arize-phoenix 4.4.4rc5__py3-none-any.whl → 4.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (118) hide show
  1. {arize_phoenix-4.4.4rc5.dist-info → arize_phoenix-4.5.0.dist-info}/METADATA +5 -5
  2. {arize_phoenix-4.4.4rc5.dist-info → arize_phoenix-4.5.0.dist-info}/RECORD +56 -117
  3. {arize_phoenix-4.4.4rc5.dist-info → arize_phoenix-4.5.0.dist-info}/WHEEL +1 -1
  4. phoenix/__init__.py +27 -0
  5. phoenix/config.py +7 -21
  6. phoenix/core/model.py +25 -25
  7. phoenix/core/model_schema.py +62 -64
  8. phoenix/core/model_schema_adapter.py +25 -27
  9. phoenix/db/bulk_inserter.py +14 -54
  10. phoenix/db/insertion/evaluation.py +6 -6
  11. phoenix/db/insertion/helpers.py +2 -13
  12. phoenix/db/migrations/versions/cf03bd6bae1d_init.py +28 -2
  13. phoenix/db/models.py +4 -236
  14. phoenix/inferences/fixtures.py +23 -23
  15. phoenix/inferences/inferences.py +7 -7
  16. phoenix/inferences/validation.py +1 -1
  17. phoenix/server/api/context.py +0 -18
  18. phoenix/server/api/dataloaders/__init__.py +0 -18
  19. phoenix/server/api/dataloaders/span_descendants.py +3 -2
  20. phoenix/server/api/routers/v1/__init__.py +2 -77
  21. phoenix/server/api/routers/v1/evaluations.py +2 -4
  22. phoenix/server/api/routers/v1/spans.py +1 -3
  23. phoenix/server/api/routers/v1/traces.py +4 -1
  24. phoenix/server/api/schema.py +303 -2
  25. phoenix/server/api/types/Cluster.py +19 -19
  26. phoenix/server/api/types/Dataset.py +63 -282
  27. phoenix/server/api/types/DatasetRole.py +23 -0
  28. phoenix/server/api/types/Dimension.py +29 -30
  29. phoenix/server/api/types/EmbeddingDimension.py +34 -40
  30. phoenix/server/api/types/Event.py +16 -16
  31. phoenix/server/api/{mutations/export_events_mutations.py → types/ExportEventsMutation.py} +14 -17
  32. phoenix/server/api/types/Model.py +42 -43
  33. phoenix/server/api/types/Project.py +12 -26
  34. phoenix/server/api/types/Span.py +2 -79
  35. phoenix/server/api/types/TimeSeries.py +6 -6
  36. phoenix/server/api/types/Trace.py +4 -15
  37. phoenix/server/api/types/UMAPPoints.py +1 -1
  38. phoenix/server/api/types/node.py +111 -5
  39. phoenix/server/api/types/pagination.py +52 -10
  40. phoenix/server/app.py +49 -101
  41. phoenix/server/main.py +27 -49
  42. phoenix/server/openapi/docs.py +0 -3
  43. phoenix/server/static/index.js +2595 -3523
  44. phoenix/server/templates/index.html +0 -1
  45. phoenix/services.py +15 -15
  46. phoenix/session/client.py +21 -438
  47. phoenix/session/session.py +37 -47
  48. phoenix/trace/exporter.py +9 -14
  49. phoenix/trace/fixtures.py +7 -133
  50. phoenix/trace/schemas.py +2 -1
  51. phoenix/trace/span_evaluations.py +3 -3
  52. phoenix/trace/trace_dataset.py +6 -6
  53. phoenix/version.py +1 -1
  54. phoenix/datasets/__init__.py +0 -0
  55. phoenix/datasets/evaluators/__init__.py +0 -18
  56. phoenix/datasets/evaluators/code_evaluators.py +0 -99
  57. phoenix/datasets/evaluators/llm_evaluators.py +0 -244
  58. phoenix/datasets/evaluators/utils.py +0 -292
  59. phoenix/datasets/experiments.py +0 -550
  60. phoenix/datasets/tracing.py +0 -85
  61. phoenix/datasets/types.py +0 -178
  62. phoenix/db/insertion/dataset.py +0 -237
  63. phoenix/db/migrations/types.py +0 -29
  64. phoenix/db/migrations/versions/10460e46d750_datasets.py +0 -291
  65. phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -100
  66. phoenix/server/api/dataloaders/dataset_example_spans.py +0 -43
  67. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +0 -85
  68. phoenix/server/api/dataloaders/experiment_error_rates.py +0 -43
  69. phoenix/server/api/dataloaders/experiment_run_counts.py +0 -42
  70. phoenix/server/api/dataloaders/experiment_sequence_number.py +0 -49
  71. phoenix/server/api/dataloaders/project_by_name.py +0 -31
  72. phoenix/server/api/dataloaders/span_projects.py +0 -33
  73. phoenix/server/api/dataloaders/trace_row_ids.py +0 -39
  74. phoenix/server/api/helpers/dataset_helpers.py +0 -179
  75. phoenix/server/api/input_types/AddExamplesToDatasetInput.py +0 -16
  76. phoenix/server/api/input_types/AddSpansToDatasetInput.py +0 -14
  77. phoenix/server/api/input_types/ClearProjectInput.py +0 -15
  78. phoenix/server/api/input_types/CreateDatasetInput.py +0 -12
  79. phoenix/server/api/input_types/DatasetExampleInput.py +0 -14
  80. phoenix/server/api/input_types/DatasetSort.py +0 -17
  81. phoenix/server/api/input_types/DatasetVersionSort.py +0 -16
  82. phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +0 -13
  83. phoenix/server/api/input_types/DeleteDatasetInput.py +0 -7
  84. phoenix/server/api/input_types/DeleteExperimentsInput.py +0 -9
  85. phoenix/server/api/input_types/PatchDatasetExamplesInput.py +0 -35
  86. phoenix/server/api/input_types/PatchDatasetInput.py +0 -14
  87. phoenix/server/api/mutations/__init__.py +0 -13
  88. phoenix/server/api/mutations/auth.py +0 -11
  89. phoenix/server/api/mutations/dataset_mutations.py +0 -520
  90. phoenix/server/api/mutations/experiment_mutations.py +0 -65
  91. phoenix/server/api/mutations/project_mutations.py +0 -47
  92. phoenix/server/api/openapi/__init__.py +0 -0
  93. phoenix/server/api/openapi/main.py +0 -6
  94. phoenix/server/api/openapi/schema.py +0 -16
  95. phoenix/server/api/queries.py +0 -503
  96. phoenix/server/api/routers/v1/dataset_examples.py +0 -178
  97. phoenix/server/api/routers/v1/datasets.py +0 -965
  98. phoenix/server/api/routers/v1/experiment_evaluations.py +0 -66
  99. phoenix/server/api/routers/v1/experiment_runs.py +0 -108
  100. phoenix/server/api/routers/v1/experiments.py +0 -174
  101. phoenix/server/api/types/AnnotatorKind.py +0 -10
  102. phoenix/server/api/types/CreateDatasetPayload.py +0 -8
  103. phoenix/server/api/types/DatasetExample.py +0 -85
  104. phoenix/server/api/types/DatasetExampleRevision.py +0 -34
  105. phoenix/server/api/types/DatasetVersion.py +0 -14
  106. phoenix/server/api/types/ExampleRevisionInterface.py +0 -14
  107. phoenix/server/api/types/Experiment.py +0 -140
  108. phoenix/server/api/types/ExperimentAnnotationSummary.py +0 -13
  109. phoenix/server/api/types/ExperimentComparison.py +0 -19
  110. phoenix/server/api/types/ExperimentRun.py +0 -91
  111. phoenix/server/api/types/ExperimentRunAnnotation.py +0 -57
  112. phoenix/server/api/types/Inferences.py +0 -80
  113. phoenix/server/api/types/InferencesRole.py +0 -23
  114. phoenix/utilities/json.py +0 -61
  115. phoenix/utilities/re.py +0 -50
  116. {arize_phoenix-4.4.4rc5.dist-info → arize_phoenix-4.5.0.dist-info}/licenses/IP_NOTICE +0 -0
  117. {arize_phoenix-4.4.4rc5.dist-info → arize_phoenix-4.5.0.dist-info}/licenses/LICENSE +0 -0
  118. /phoenix/server/api/{helpers/__init__.py → helpers.py} +0 -0
phoenix/server/app.py CHANGED
@@ -33,6 +33,7 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoin
33
33
  from starlette.requests import Request
34
34
  from starlette.responses import FileResponse, PlainTextResponse, Response
35
35
  from starlette.routing import Mount, Route
36
+ from starlette.schemas import SchemaGenerator
36
37
  from starlette.staticfiles import StaticFiles
37
38
  from starlette.templating import Jinja2Templates
38
39
  from starlette.types import Scope, StatefulLifespan
@@ -57,28 +58,18 @@ from phoenix.pointcloud.umap_parameters import UMAPParameters
57
58
  from phoenix.server.api.context import Context, DataLoaders
58
59
  from phoenix.server.api.dataloaders import (
59
60
  CacheForDataLoaders,
60
- DatasetExampleRevisionsDataLoader,
61
- DatasetExampleSpansDataLoader,
62
61
  DocumentEvaluationsDataLoader,
63
62
  DocumentEvaluationSummaryDataLoader,
64
63
  DocumentRetrievalMetricsDataLoader,
65
64
  EvaluationSummaryDataLoader,
66
- ExperimentAnnotationSummaryDataLoader,
67
- ExperimentErrorRatesDataLoader,
68
- ExperimentRunCountsDataLoader,
69
- ExperimentSequenceNumberDataLoader,
70
65
  LatencyMsQuantileDataLoader,
71
66
  MinStartOrMaxEndTimeDataLoader,
72
- ProjectByNameDataLoader,
73
67
  RecordCountDataLoader,
74
68
  SpanDescendantsDataLoader,
75
69
  SpanEvaluationsDataLoader,
76
- SpanProjectsDataLoader,
77
70
  TokenCountDataLoader,
78
71
  TraceEvaluationsDataLoader,
79
- TraceRowIdsDataLoader,
80
72
  )
81
- from phoenix.server.api.openapi.schema import OPENAPI_SCHEMA_GENERATOR
82
73
  from phoenix.server.api.routers.v1 import V1_ROUTES
83
74
  from phoenix.server.api.schema import schema
84
75
  from phoenix.server.grpc_server import GrpcServer
@@ -93,6 +84,10 @@ logger = logging.getLogger(__name__)
93
84
 
94
85
  templates = Jinja2Templates(directory=SERVER_DIR / "templates")
95
86
 
87
+ schemas = SchemaGenerator(
88
+ {"openapi": "3.0.0", "info": {"title": "ArizePhoenix API", "version": "1.0"}}
89
+ )
90
+
96
91
 
97
92
  class AppConfig(NamedTuple):
98
93
  has_inferences: bool
@@ -131,7 +126,6 @@ class Static(StaticFiles):
131
126
  "n_neighbors": self._app_config.n_neighbors,
132
127
  "n_samples": self._app_config.n_samples,
133
128
  "basename": request.scope.get("root_path", ""),
134
- "platform_version": phoenix.__version__,
135
129
  "request": request,
136
130
  },
137
131
  )
@@ -191,8 +185,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
191
185
  export_path=self.export_path,
192
186
  streaming_last_updated_at=self.streaming_last_updated_at,
193
187
  data_loaders=DataLoaders(
194
- dataset_example_revisions=DatasetExampleRevisionsDataLoader(self.db),
195
- dataset_example_spans=DatasetExampleSpansDataLoader(self.db),
196
188
  document_evaluation_summaries=DocumentEvaluationSummaryDataLoader(
197
189
  self.db,
198
190
  cache_map=self.cache_for_dataloaders.document_evaluation_summary
@@ -207,10 +199,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
207
199
  if self.cache_for_dataloaders
208
200
  else None,
209
201
  ),
210
- experiment_annotation_summaries=ExperimentAnnotationSummaryDataLoader(self.db),
211
- experiment_error_rates=ExperimentErrorRatesDataLoader(self.db),
212
- experiment_run_counts=ExperimentRunCountsDataLoader(self.db),
213
- experiment_sequence_number=ExperimentSequenceNumberDataLoader(self.db),
214
202
  latency_ms_quantile=LatencyMsQuantileDataLoader(
215
203
  self.db,
216
204
  cache_map=self.cache_for_dataloaders.latency_ms_quantile
@@ -231,7 +219,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
231
219
  ),
232
220
  span_descendants=SpanDescendantsDataLoader(self.db),
233
221
  span_evaluations=SpanEvaluationsDataLoader(self.db),
234
- span_projects=SpanProjectsDataLoader(self.db),
235
222
  token_counts=TokenCountDataLoader(
236
223
  self.db,
237
224
  cache_map=self.cache_for_dataloaders.token_count
@@ -239,8 +226,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
239
226
  else None,
240
227
  ),
241
228
  trace_evaluations=TraceEvaluationsDataLoader(self.db),
242
- trace_row_ids=TraceRowIdsDataLoader(self.db),
243
- project_by_name=ProjectByNameDataLoader(self.db),
244
229
  ),
245
230
  cache_for_dataloaders=self.cache_for_dataloaders,
246
231
  read_only=self.read_only,
@@ -287,11 +272,7 @@ def _lifespan(
287
272
  ) -> StatefulLifespan[Starlette]:
288
273
  @contextlib.asynccontextmanager
289
274
  async def lifespan(_: Starlette) -> AsyncIterator[Dict[str, Any]]:
290
- async with bulk_inserter as (
291
- queue_span,
292
- queue_evaluation,
293
- enqueue_operation,
294
- ), GrpcServer(
275
+ async with bulk_inserter as (queue_span, queue_evaluation), GrpcServer(
295
276
  queue_span,
296
277
  disabled=read_only,
297
278
  tracer_provider=tracer_provider,
@@ -300,7 +281,6 @@ def _lifespan(
300
281
  yield {
301
282
  "queue_span_for_bulk_insert": queue_span,
302
283
  "queue_evaluation_for_bulk_insert": queue_evaluation,
303
- "enqueue_operation": enqueue_operation,
304
284
  }
305
285
  for clean_up in clean_ups:
306
286
  clean_up()
@@ -313,63 +293,15 @@ async def check_healthz(_: Request) -> PlainTextResponse:
313
293
 
314
294
 
315
295
  async def openapi_schema(request: Request) -> Response:
316
- return OPENAPI_SCHEMA_GENERATOR.OpenAPIResponse(request=request)
296
+ return schemas.OpenAPIResponse(request=request)
317
297
 
318
298
 
319
299
  async def api_docs(request: Request) -> Response:
320
300
  return get_swagger_ui_html(openapi_url="/schema", title="arize-phoenix API")
321
301
 
322
302
 
323
- class SessionFactory:
324
- def __init__(
325
- self,
326
- session_factory: Callable[[], AsyncContextManager[AsyncSession]],
327
- dialect: str,
328
- ):
329
- self.session_factory = session_factory
330
- self.dialect = SupportedSQLDialect(dialect)
331
-
332
- def __call__(self) -> AsyncContextManager[AsyncSession]:
333
- return self.session_factory()
334
-
335
-
336
- def create_engine_and_run_migrations(
337
- database_url: str,
338
- ) -> AsyncEngine:
339
- try:
340
- return create_engine(database_url)
341
- except PhoenixMigrationError as e:
342
- msg = (
343
- "\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
344
- "The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
345
- "from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
346
- "revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
347
- "then try starting Phoenix again.\n\n"
348
- "If issues persist, please reach out for support in the Arize community Slack:\n"
349
- "https://arize-ai.slack.com\n\n"
350
- "You can also refer to the Alembic documentation for more information:\n"
351
- "https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
352
- ""
353
- )
354
- raise PhoenixMigrationError(msg) from e
355
-
356
-
357
- def instrument_engine_if_enabled(engine: AsyncEngine) -> List[Callable[[], None]]:
358
- instrumentation_cleanups = []
359
- if server_instrumentation_is_enabled():
360
- from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
361
-
362
- tracer_provider = initialize_opentelemetry_tracer_provider()
363
- SQLAlchemyInstrumentor().instrument(
364
- engine=engine.sync_engine,
365
- tracer_provider=tracer_provider,
366
- )
367
- instrumentation_cleanups.append(SQLAlchemyInstrumentor().uninstrument)
368
- return instrumentation_cleanups
369
-
370
-
371
303
  def create_app(
372
- db: SessionFactory,
304
+ database_url: str,
373
305
  export_path: Path,
374
306
  model: Model,
375
307
  umap_params: UMAPParameters,
@@ -379,10 +311,8 @@ def create_app(
379
311
  enable_prometheus: bool = False,
380
312
  initial_spans: Optional[Iterable[Union[Span, Tuple[Span, str]]]] = None,
381
313
  initial_evaluations: Optional[Iterable[pb.Evaluation]] = None,
382
- serve_ui: bool = True,
383
- clean_up_callbacks: List[Callable[[], None]] = [],
384
314
  ) -> Starlette:
385
- clean_ups: List[Callable[[], None]] = clean_up_callbacks # To be called at app shutdown.
315
+ clean_ups: List[Callable[[], None]] = [] # To be called at app shutdown.
386
316
  initial_batch_of_spans: Iterable[Tuple[Span, str]] = (
387
317
  ()
388
318
  if initial_spans is None
@@ -392,10 +322,28 @@ def create_app(
392
322
  )
393
323
  )
394
324
  initial_batch_of_evaluations = () if initial_evaluations is None else initial_evaluations
325
+ try:
326
+ engine = create_engine(database_url)
327
+ except PhoenixMigrationError as e:
328
+ msg = (
329
+ "\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
330
+ "The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
331
+ "from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
332
+ "revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
333
+ "then try starting Phoenix again.\n\n"
334
+ "If issues persist, please reach out for support in the Arize community Slack:\n"
335
+ "https://arize-ai.slack.com\n\n"
336
+ "You can also refer to the Alembic documentation for more information:\n"
337
+ "https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
338
+ ""
339
+ )
340
+ raise PhoenixMigrationError(msg) from e
395
341
  cache_for_dataloaders = (
396
- CacheForDataLoaders() if db.dialect is SupportedSQLDialect.SQLITE else None
342
+ CacheForDataLoaders()
343
+ if SupportedSQLDialect(engine.dialect.name) is SupportedSQLDialect.SQLITE
344
+ else None
397
345
  )
398
-
346
+ db = _db(engine)
399
347
  bulk_inserter = BulkInserter(
400
348
  db,
401
349
  enable_prometheus=enable_prometheus,
@@ -406,9 +354,16 @@ def create_app(
406
354
  tracer_provider = None
407
355
  strawberry_extensions = schema.get_extensions()
408
356
  if server_instrumentation_is_enabled():
357
+ from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
409
358
  from opentelemetry.trace import TracerProvider
410
359
  from strawberry.extensions.tracing import OpenTelemetryExtension
411
360
 
361
+ tracer_provider = initialize_opentelemetry_tracer_provider()
362
+ SQLAlchemyInstrumentor().instrument(
363
+ engine=engine.sync_engine,
364
+ tracer_provider=tracer_provider,
365
+ )
366
+ clean_ups.append(SQLAlchemyInstrumentor().uninstrument)
412
367
  if TYPE_CHECKING:
413
368
  # Type-check the class before monkey-patching its private attribute.
414
369
  assert OpenTelemetryExtension._tracer
@@ -422,7 +377,6 @@ def create_app(
422
377
  self._tracer = cast(TracerProvider, tracer_provider).get_tracer("strawberry")
423
378
 
424
379
  strawberry_extensions.append(_OpenTelemetryExtension)
425
-
426
380
  graphql = GraphQLWithContext(
427
381
  db=db,
428
382
  schema=strawberry.Schema(
@@ -479,27 +433,21 @@ def create_app(
479
433
  "/graphql",
480
434
  graphql,
481
435
  ),
482
- ]
483
- + (
484
- [
485
- Mount(
486
- "/",
487
- app=Static(
488
- directory=SERVER_DIR / "static",
489
- app_config=AppConfig(
490
- has_inferences=model.is_empty is not True,
491
- has_corpus=corpus is not None,
492
- min_dist=umap_params.min_dist,
493
- n_neighbors=umap_params.n_neighbors,
494
- n_samples=umap_params.n_samples,
495
- ),
436
+ Mount(
437
+ "/",
438
+ app=Static(
439
+ directory=SERVER_DIR / "static",
440
+ app_config=AppConfig(
441
+ has_inferences=model.is_empty is not True,
442
+ has_corpus=corpus is not None,
443
+ min_dist=umap_params.min_dist,
444
+ n_neighbors=umap_params.n_neighbors,
445
+ n_samples=umap_params.n_samples,
496
446
  ),
497
- name="static",
498
447
  ),
499
- ]
500
- if serve_ui
501
- else []
502
- ),
448
+ name="static",
449
+ ),
450
+ ],
503
451
  )
504
452
  app.state.read_only = read_only
505
453
  app.state.db = db
phoenix/server/main.py CHANGED
@@ -22,9 +22,9 @@ from phoenix.config import (
22
22
  get_pids_path,
23
23
  get_working_dir,
24
24
  )
25
- from phoenix.core.model_schema_adapter import create_model_from_inferences
25
+ from phoenix.core.model_schema_adapter import create_model_from_datasets
26
26
  from phoenix.db import get_printable_db_url
27
- from phoenix.inferences.fixtures import FIXTURES, get_inferences
27
+ from phoenix.inferences.fixtures import FIXTURES, get_datasets
28
28
  from phoenix.inferences.inferences import EMPTY_INFERENCES, Inferences
29
29
  from phoenix.pointcloud.umap_parameters import (
30
30
  DEFAULT_MIN_DIST,
@@ -32,22 +32,14 @@ from phoenix.pointcloud.umap_parameters import (
32
32
  DEFAULT_N_SAMPLES,
33
33
  UMAPParameters,
34
34
  )
35
- from phoenix.server.app import (
36
- SessionFactory,
37
- _db,
38
- create_app,
39
- create_engine_and_run_migrations,
40
- instrument_engine_if_enabled,
41
- )
35
+ from phoenix.server.app import create_app
42
36
  from phoenix.settings import Settings
43
37
  from phoenix.trace.fixtures import (
44
38
  TRACES_FIXTURES,
45
39
  download_traces_fixture,
46
- get_dataset_fixtures,
47
40
  get_evals_from_fixture,
48
41
  get_trace_fixture_by_name,
49
42
  reset_fixture_span_ids_and_timestamps,
50
- send_dataset_fixtures,
51
43
  )
52
44
  from phoenix.trace.otel import decode_otlp_span, encode_span_to_otlp
53
45
  from phoenix.trace.schemas import Span
@@ -107,14 +99,14 @@ def _get_pid_file() -> Path:
107
99
  DEFAULT_UMAP_PARAMS_STR = f"{DEFAULT_MIN_DIST},{DEFAULT_N_NEIGHBORS},{DEFAULT_N_SAMPLES}"
108
100
 
109
101
  if __name__ == "__main__":
110
- primary_inferences_name: str
111
- reference_inferences_name: Optional[str]
102
+ primary_dataset_name: str
103
+ reference_dataset_name: Optional[str]
112
104
  trace_dataset_name: Optional[str] = None
113
105
  simulate_streaming: Optional[bool] = None
114
106
 
115
- primary_inferences: Inferences = EMPTY_INFERENCES
116
- reference_inferences: Optional[Inferences] = None
117
- corpus_inferences: Optional[Inferences] = None
107
+ primary_dataset: Inferences = EMPTY_INFERENCES
108
+ reference_dataset: Optional[Inferences] = None
109
+ corpus_dataset: Optional[Inferences] = None
118
110
 
119
111
  # Initialize the settings for the Server
120
112
  Settings.log_migrations = True
@@ -158,34 +150,34 @@ if __name__ == "__main__":
158
150
  )
159
151
  export_path = Path(args.export_path) if args.export_path else EXPORT_DIR
160
152
  if args.command == "datasets":
161
- primary_inferences_name = args.primary
162
- reference_inferences_name = args.reference
163
- corpus_inferences_name = args.corpus
164
- primary_inferences = Inferences.from_name(primary_inferences_name)
165
- reference_inferences = (
166
- Inferences.from_name(reference_inferences_name)
167
- if reference_inferences_name is not None
153
+ primary_dataset_name = args.primary
154
+ reference_dataset_name = args.reference
155
+ corpus_dataset_name = args.corpus
156
+ primary_dataset = Inferences.from_name(primary_dataset_name)
157
+ reference_dataset = (
158
+ Inferences.from_name(reference_dataset_name)
159
+ if reference_dataset_name is not None
168
160
  else None
169
161
  )
170
- corpus_inferences = (
171
- None if corpus_inferences_name is None else Inferences.from_name(corpus_inferences_name)
162
+ corpus_dataset = (
163
+ None if corpus_dataset_name is None else Inferences.from_name(corpus_dataset_name)
172
164
  )
173
165
  elif args.command == "fixture":
174
166
  fixture_name = args.fixture
175
167
  primary_only = args.primary_only
176
- primary_inferences, reference_inferences, corpus_inferences = get_inferences(
168
+ primary_dataset, reference_dataset, corpus_dataset = get_datasets(
177
169
  fixture_name,
178
170
  args.no_internet,
179
171
  )
180
172
  if primary_only:
181
- reference_inferences_name = None
182
- reference_inferences = None
173
+ reference_dataset_name = None
174
+ reference_dataset = None
183
175
  elif args.command == "trace-fixture":
184
176
  trace_dataset_name = args.fixture
185
177
  simulate_streaming = args.simulate_streaming
186
178
  elif args.command == "demo":
187
179
  fixture_name = args.fixture
188
- primary_inferences, reference_inferences, corpus_inferences = get_inferences(
180
+ primary_dataset, reference_dataset, corpus_dataset = get_datasets(
189
181
  fixture_name,
190
182
  args.no_internet,
191
183
  )
@@ -205,11 +197,9 @@ if __name__ == "__main__":
205
197
 
206
198
  port = args.port or get_env_port()
207
199
  host_root_path = get_env_host_root_path()
208
- read_only = args.read_only
209
-
210
- model = create_model_from_inferences(
211
- primary_inferences,
212
- reference_inferences,
200
+ model = create_model_from_datasets(
201
+ primary_dataset,
202
+ reference_dataset,
213
203
  )
214
204
 
215
205
  fixture_spans: List[Span] = []
@@ -226,19 +216,13 @@ if __name__ == "__main__":
226
216
  ),
227
217
  get_evals_from_fixture(trace_dataset_name),
228
218
  )
229
- dataset_fixtures = list(get_dataset_fixtures(trace_dataset_name))
230
- if not read_only:
231
- Thread(
232
- target=send_dataset_fixtures,
233
- args=(f"http://{host}:{port}", dataset_fixtures),
234
- ).start()
235
219
  umap_params_list = args.umap_params.split(",")
236
220
  umap_params = UMAPParameters(
237
221
  min_dist=float(umap_params_list[0]),
238
222
  n_neighbors=int(umap_params_list[1]),
239
223
  n_samples=int(umap_params_list[2]),
240
224
  )
241
-
225
+ read_only = args.read_only
242
226
  logger.info(f"Server umap params: {umap_params}")
243
227
  if enable_prometheus := get_env_enable_prometheus():
244
228
  from phoenix.server.prometheus import start_prometheus
@@ -246,23 +230,17 @@ if __name__ == "__main__":
246
230
  start_prometheus()
247
231
 
248
232
  working_dir = get_working_dir().resolve()
249
- engine = create_engine_and_run_migrations(db_connection_str)
250
- instrumentation_cleanups = instrument_engine_if_enabled(engine)
251
- factory = SessionFactory(session_factory=_db(engine), dialect=engine.dialect.name)
252
233
  app = create_app(
253
- db=factory,
234
+ database_url=db_connection_str,
254
235
  export_path=export_path,
255
236
  model=model,
256
237
  umap_params=umap_params,
257
- corpus=None
258
- if corpus_inferences is None
259
- else create_model_from_inferences(corpus_inferences),
238
+ corpus=None if corpus_dataset is None else create_model_from_datasets(corpus_dataset),
260
239
  debug=args.debug,
261
240
  read_only=read_only,
262
241
  enable_prometheus=enable_prometheus,
263
242
  initial_spans=fixture_spans,
264
243
  initial_evaluations=fixture_evals,
265
- clean_up_callbacks=instrumentation_cleanups,
266
244
  )
267
245
  server = Server(config=Config(app, host=host, port=port, root_path=host_root_path)) # type: ignore
268
246
  Thread(target=_write_pid_file_when_ready, args=(server,), daemon=True).start()
@@ -43,9 +43,6 @@ def get_swagger_ui_html(
43
43
  <div id="swagger-ui">
44
44
  </div>
45
45
  <script src="{swagger_js_url}"></script>
46
- <style type="text/css">
47
- div[id^="operations-private"]{{display:none}} #operations-tag-private{{display:none}}
48
- </style>
49
46
  <!-- `SwaggerUIBundle` is now available on the page -->
50
47
  <script>
51
48
  const ui = SwaggerUIBundle({{