arize-phoenix 3.25.0__py3-none-any.whl → 4.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (113) hide show
  1. {arize_phoenix-3.25.0.dist-info → arize_phoenix-4.0.1.dist-info}/METADATA +26 -4
  2. {arize_phoenix-3.25.0.dist-info → arize_phoenix-4.0.1.dist-info}/RECORD +80 -75
  3. phoenix/__init__.py +9 -5
  4. phoenix/config.py +109 -53
  5. phoenix/datetime_utils.py +18 -1
  6. phoenix/db/README.md +25 -0
  7. phoenix/db/__init__.py +4 -0
  8. phoenix/db/alembic.ini +119 -0
  9. phoenix/db/bulk_inserter.py +206 -0
  10. phoenix/db/engines.py +152 -0
  11. phoenix/db/helpers.py +47 -0
  12. phoenix/db/insertion/evaluation.py +209 -0
  13. phoenix/db/insertion/helpers.py +51 -0
  14. phoenix/db/insertion/span.py +142 -0
  15. phoenix/db/migrate.py +71 -0
  16. phoenix/db/migrations/env.py +121 -0
  17. phoenix/db/migrations/script.py.mako +26 -0
  18. phoenix/db/migrations/versions/cf03bd6bae1d_init.py +280 -0
  19. phoenix/db/models.py +371 -0
  20. phoenix/exceptions.py +5 -1
  21. phoenix/server/api/context.py +40 -3
  22. phoenix/server/api/dataloaders/__init__.py +97 -0
  23. phoenix/server/api/dataloaders/cache/__init__.py +3 -0
  24. phoenix/server/api/dataloaders/cache/two_tier_cache.py +67 -0
  25. phoenix/server/api/dataloaders/document_evaluation_summaries.py +152 -0
  26. phoenix/server/api/dataloaders/document_evaluations.py +37 -0
  27. phoenix/server/api/dataloaders/document_retrieval_metrics.py +98 -0
  28. phoenix/server/api/dataloaders/evaluation_summaries.py +151 -0
  29. phoenix/server/api/dataloaders/latency_ms_quantile.py +198 -0
  30. phoenix/server/api/dataloaders/min_start_or_max_end_times.py +93 -0
  31. phoenix/server/api/dataloaders/record_counts.py +125 -0
  32. phoenix/server/api/dataloaders/span_descendants.py +64 -0
  33. phoenix/server/api/dataloaders/span_evaluations.py +37 -0
  34. phoenix/server/api/dataloaders/token_counts.py +138 -0
  35. phoenix/server/api/dataloaders/trace_evaluations.py +37 -0
  36. phoenix/server/api/input_types/SpanSort.py +138 -68
  37. phoenix/server/api/routers/v1/__init__.py +11 -0
  38. phoenix/server/api/routers/v1/evaluations.py +275 -0
  39. phoenix/server/api/routers/v1/spans.py +126 -0
  40. phoenix/server/api/routers/v1/traces.py +82 -0
  41. phoenix/server/api/schema.py +112 -48
  42. phoenix/server/api/types/DocumentEvaluationSummary.py +1 -1
  43. phoenix/server/api/types/Evaluation.py +29 -12
  44. phoenix/server/api/types/EvaluationSummary.py +29 -44
  45. phoenix/server/api/types/MimeType.py +2 -2
  46. phoenix/server/api/types/Model.py +9 -9
  47. phoenix/server/api/types/Project.py +240 -171
  48. phoenix/server/api/types/Span.py +87 -131
  49. phoenix/server/api/types/Trace.py +29 -20
  50. phoenix/server/api/types/pagination.py +151 -10
  51. phoenix/server/app.py +263 -35
  52. phoenix/server/grpc_server.py +93 -0
  53. phoenix/server/main.py +75 -60
  54. phoenix/server/openapi/docs.py +218 -0
  55. phoenix/server/prometheus.py +23 -7
  56. phoenix/server/static/index.js +662 -643
  57. phoenix/server/telemetry.py +68 -0
  58. phoenix/services.py +4 -0
  59. phoenix/session/client.py +34 -30
  60. phoenix/session/data_extractor.py +8 -3
  61. phoenix/session/session.py +176 -155
  62. phoenix/settings.py +13 -0
  63. phoenix/trace/attributes.py +349 -0
  64. phoenix/trace/dsl/README.md +116 -0
  65. phoenix/trace/dsl/filter.py +660 -192
  66. phoenix/trace/dsl/helpers.py +24 -5
  67. phoenix/trace/dsl/query.py +562 -185
  68. phoenix/trace/fixtures.py +69 -7
  69. phoenix/trace/otel.py +44 -200
  70. phoenix/trace/schemas.py +14 -8
  71. phoenix/trace/span_evaluations.py +5 -2
  72. phoenix/utilities/__init__.py +0 -26
  73. phoenix/utilities/span_store.py +0 -23
  74. phoenix/version.py +1 -1
  75. phoenix/core/project.py +0 -773
  76. phoenix/core/traces.py +0 -96
  77. phoenix/datasets/dataset.py +0 -214
  78. phoenix/datasets/fixtures.py +0 -24
  79. phoenix/datasets/schema.py +0 -31
  80. phoenix/experimental/evals/__init__.py +0 -73
  81. phoenix/experimental/evals/evaluators.py +0 -413
  82. phoenix/experimental/evals/functions/__init__.py +0 -4
  83. phoenix/experimental/evals/functions/classify.py +0 -453
  84. phoenix/experimental/evals/functions/executor.py +0 -353
  85. phoenix/experimental/evals/functions/generate.py +0 -138
  86. phoenix/experimental/evals/functions/processing.py +0 -76
  87. phoenix/experimental/evals/models/__init__.py +0 -14
  88. phoenix/experimental/evals/models/anthropic.py +0 -175
  89. phoenix/experimental/evals/models/base.py +0 -170
  90. phoenix/experimental/evals/models/bedrock.py +0 -221
  91. phoenix/experimental/evals/models/litellm.py +0 -134
  92. phoenix/experimental/evals/models/openai.py +0 -453
  93. phoenix/experimental/evals/models/rate_limiters.py +0 -246
  94. phoenix/experimental/evals/models/vertex.py +0 -173
  95. phoenix/experimental/evals/models/vertexai.py +0 -186
  96. phoenix/experimental/evals/retrievals.py +0 -96
  97. phoenix/experimental/evals/templates/__init__.py +0 -50
  98. phoenix/experimental/evals/templates/default_templates.py +0 -472
  99. phoenix/experimental/evals/templates/template.py +0 -195
  100. phoenix/experimental/evals/utils/__init__.py +0 -172
  101. phoenix/experimental/evals/utils/threads.py +0 -27
  102. phoenix/server/api/routers/evaluation_handler.py +0 -110
  103. phoenix/server/api/routers/span_handler.py +0 -70
  104. phoenix/server/api/routers/trace_handler.py +0 -60
  105. phoenix/storage/span_store/__init__.py +0 -23
  106. phoenix/storage/span_store/text_file.py +0 -85
  107. phoenix/trace/dsl/missing.py +0 -60
  108. {arize_phoenix-3.25.0.dist-info → arize_phoenix-4.0.1.dist-info}/WHEEL +0 -0
  109. {arize_phoenix-3.25.0.dist-info → arize_phoenix-4.0.1.dist-info}/licenses/IP_NOTICE +0 -0
  110. {arize_phoenix-3.25.0.dist-info → arize_phoenix-4.0.1.dist-info}/licenses/LICENSE +0 -0
  111. /phoenix/{datasets → db/insertion}/__init__.py +0 -0
  112. /phoenix/{experimental → db/migrations}/__init__.py +0 -0
  113. /phoenix/{storage → server/openapi}/__init__.py +0 -0
phoenix/server/app.py CHANGED
@@ -1,7 +1,29 @@
1
+ import contextlib
1
2
  import logging
3
+ from datetime import datetime
2
4
  from pathlib import Path
3
- from typing import Any, NamedTuple, Optional, Union
5
+ from typing import (
6
+ TYPE_CHECKING,
7
+ Any,
8
+ AsyncContextManager,
9
+ AsyncIterator,
10
+ Callable,
11
+ Dict,
12
+ Iterable,
13
+ List,
14
+ NamedTuple,
15
+ Optional,
16
+ Tuple,
17
+ Union,
18
+ cast,
19
+ )
4
20
 
21
+ import strawberry
22
+ from sqlalchemy.ext.asyncio import (
23
+ AsyncEngine,
24
+ AsyncSession,
25
+ async_sessionmaker,
26
+ )
5
27
  from starlette.applications import Starlette
6
28
  from starlette.datastructures import QueryParams
7
29
  from starlette.endpoints import HTTPEndpoint
@@ -11,29 +33,61 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoin
11
33
  from starlette.requests import Request
12
34
  from starlette.responses import FileResponse, PlainTextResponse, Response
13
35
  from starlette.routing import Mount, Route
36
+ from starlette.schemas import SchemaGenerator
14
37
  from starlette.staticfiles import StaticFiles
15
38
  from starlette.templating import Jinja2Templates
16
- from starlette.types import Scope
39
+ from starlette.types import Scope, StatefulLifespan
17
40
  from starlette.websockets import WebSocket
18
41
  from strawberry.asgi import GraphQL
19
42
  from strawberry.schema import BaseSchema
43
+ from typing_extensions import TypeAlias
20
44
 
21
45
  import phoenix
22
- from phoenix.config import SERVER_DIR
46
+ import phoenix.trace.v1 as pb
47
+ from phoenix.config import (
48
+ DEFAULT_PROJECT_NAME,
49
+ SERVER_DIR,
50
+ server_instrumentation_is_enabled,
51
+ )
23
52
  from phoenix.core.model_schema import Model
24
- from phoenix.core.traces import Traces
53
+ from phoenix.db.bulk_inserter import BulkInserter
54
+ from phoenix.db.engines import create_engine
55
+ from phoenix.db.helpers import SupportedSQLDialect
56
+ from phoenix.exceptions import PhoenixMigrationError
25
57
  from phoenix.pointcloud.umap_parameters import UMAPParameters
26
- from phoenix.server.api.context import Context
27
- from phoenix.server.api.routers.evaluation_handler import EvaluationHandler
28
- from phoenix.server.api.routers.span_handler import SpanHandler
29
- from phoenix.server.api.routers.trace_handler import TraceHandler
58
+ from phoenix.server.api.context import Context, DataLoaders
59
+ from phoenix.server.api.dataloaders import (
60
+ CacheForDataLoaders,
61
+ DocumentEvaluationsDataLoader,
62
+ DocumentEvaluationSummaryDataLoader,
63
+ DocumentRetrievalMetricsDataLoader,
64
+ EvaluationSummaryDataLoader,
65
+ LatencyMsQuantileDataLoader,
66
+ MinStartOrMaxEndTimeDataLoader,
67
+ RecordCountDataLoader,
68
+ SpanDescendantsDataLoader,
69
+ SpanEvaluationsDataLoader,
70
+ TokenCountDataLoader,
71
+ TraceEvaluationsDataLoader,
72
+ )
73
+ from phoenix.server.api.routers.v1 import V1_ROUTES
30
74
  from phoenix.server.api.schema import schema
31
- from phoenix.storage.span_store import SpanStore
75
+ from phoenix.server.grpc_server import GrpcServer
76
+ from phoenix.server.openapi.docs import get_swagger_ui_html
77
+ from phoenix.server.telemetry import initialize_opentelemetry_tracer_provider
78
+ from phoenix.trace.schemas import Span
79
+
80
+ if TYPE_CHECKING:
81
+ from opentelemetry.trace import TracerProvider
32
82
 
33
83
  logger = logging.getLogger(__name__)
34
84
 
35
85
  templates = Jinja2Templates(directory=SERVER_DIR / "templates")
36
86
 
87
+ schemas = SchemaGenerator(
88
+ {"openapi": "3.0.0", "info": {"title": "ArizePhoenix API", "version": "1.0"}}
89
+ )
90
+
37
91
 
38
92
  class AppConfig(NamedTuple):
39
93
  has_inferences: bool
@@ -92,20 +146,27 @@ class HeadersMiddleware(BaseHTTPMiddleware):
92
146
  return response
93
147
 
94
148
 
149
+ ProjectRowId: TypeAlias = int
150
+
151
+
95
152
  class GraphQLWithContext(GraphQL): # type: ignore
96
153
  def __init__(
97
154
  self,
98
155
  schema: BaseSchema,
156
+ db: Callable[[], AsyncContextManager[AsyncSession]],
99
157
  model: Model,
100
158
  export_path: Path,
101
159
  graphiql: bool = False,
102
160
  corpus: Optional[Model] = None,
103
- traces: Optional[Traces] = None,
161
+ streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None,
162
+ cache_for_dataloaders: Optional[CacheForDataLoaders] = None,
104
163
  ) -> None:
164
+ self.db = db
105
165
  self.model = model
106
166
  self.corpus = corpus
107
- self.traces = traces
108
167
  self.export_path = export_path
168
+ self.streaming_last_updated_at = streaming_last_updated_at
169
+ self.cache_for_dataloaders = cache_for_dataloaders
109
170
  super().__init__(schema, graphiql=graphiql)
110
171
 
111
172
  async def get_context(
@@ -116,10 +177,55 @@ class GraphQLWithContext(GraphQL): # type: ignore
116
177
  return Context(
117
178
  request=request,
118
179
  response=response,
180
+ db=self.db,
119
181
  model=self.model,
120
182
  corpus=self.corpus,
121
- traces=self.traces,
122
183
  export_path=self.export_path,
184
+ streaming_last_updated_at=self.streaming_last_updated_at,
185
+ data_loaders=DataLoaders(
186
+ document_evaluation_summaries=DocumentEvaluationSummaryDataLoader(
187
+ self.db,
188
+ cache_map=self.cache_for_dataloaders.document_evaluation_summary
189
+ if self.cache_for_dataloaders
190
+ else None,
191
+ ),
192
+ document_evaluations=DocumentEvaluationsDataLoader(self.db),
193
+ document_retrieval_metrics=DocumentRetrievalMetricsDataLoader(self.db),
194
+ evaluation_summaries=EvaluationSummaryDataLoader(
195
+ self.db,
196
+ cache_map=self.cache_for_dataloaders.evaluation_summary
197
+ if self.cache_for_dataloaders
198
+ else None,
199
+ ),
200
+ latency_ms_quantile=LatencyMsQuantileDataLoader(
201
+ self.db,
202
+ cache_map=self.cache_for_dataloaders.latency_ms_quantile
203
+ if self.cache_for_dataloaders
204
+ else None,
205
+ ),
206
+ min_start_or_max_end_times=MinStartOrMaxEndTimeDataLoader(
207
+ self.db,
208
+ cache_map=self.cache_for_dataloaders.min_start_or_max_end_time
209
+ if self.cache_for_dataloaders
210
+ else None,
211
+ ),
212
+ record_counts=RecordCountDataLoader(
213
+ self.db,
214
+ cache_map=self.cache_for_dataloaders.record_count
215
+ if self.cache_for_dataloaders
216
+ else None,
217
+ ),
218
+ span_descendants=SpanDescendantsDataLoader(self.db),
219
+ span_evaluations=SpanEvaluationsDataLoader(self.db),
220
+ token_counts=TokenCountDataLoader(
221
+ self.db,
222
+ cache_map=self.cache_for_dataloaders.token_count
223
+ if self.cache_for_dataloaders
224
+ else None,
225
+ ),
226
+ trace_evaluations=TraceEvaluationsDataLoader(self.db),
227
+ ),
228
+ cache_for_dataloaders=self.cache_for_dataloaders,
123
229
  )
124
230
 
125
231
 
@@ -142,28 +248,146 @@ async def version(_: Request) -> PlainTextResponse:
142
248
  return PlainTextResponse(f"{phoenix.__version__}")
143
249
 
144
250
 
251
+ def _db(engine: AsyncEngine) -> Callable[[], AsyncContextManager[AsyncSession]]:
252
+ Session = async_sessionmaker(engine, expire_on_commit=False)
253
+
254
+ @contextlib.asynccontextmanager
255
+ async def factory() -> AsyncIterator[AsyncSession]:
256
+ async with Session.begin() as session:
257
+ yield session
258
+
259
+ return factory
260
+
261
+
262
+ def _lifespan(
263
+ *,
264
+ bulk_inserter: BulkInserter,
265
+ tracer_provider: Optional["TracerProvider"] = None,
266
+ enable_prometheus: bool = False,
267
+ clean_ups: Iterable[Callable[[], None]] = (),
268
+ read_only: bool = False,
269
+ ) -> StatefulLifespan[Starlette]:
270
+ @contextlib.asynccontextmanager
271
+ async def lifespan(_: Starlette) -> AsyncIterator[Dict[str, Any]]:
272
+ async with bulk_inserter as (queue_span, queue_evaluation), GrpcServer(
273
+ queue_span,
274
+ disabled=read_only,
275
+ tracer_provider=tracer_provider,
276
+ enable_prometheus=enable_prometheus,
277
+ ):
278
+ yield {
279
+ "queue_span_for_bulk_insert": queue_span,
280
+ "queue_evaluation_for_bulk_insert": queue_evaluation,
281
+ }
282
+ for clean_up in clean_ups:
283
+ clean_up()
284
+
285
+ return lifespan
286
+
287
+
145
288
  async def check_healthz(_: Request) -> PlainTextResponse:
146
289
  return PlainTextResponse("OK")
147
290
 
148
291
 
292
+ async def openapi_schema(request: Request) -> Response:
293
+ return schemas.OpenAPIResponse(request=request)
294
+
295
+
296
+ async def api_docs(request: Request) -> Response:
297
+ return get_swagger_ui_html(openapi_url="/schema", title="arize-phoenix API")
298
+
299
+
149
300
  def create_app(
301
+ database_url: str,
150
302
  export_path: Path,
151
303
  model: Model,
152
304
  umap_params: UMAPParameters,
153
305
  corpus: Optional[Model] = None,
154
- traces: Optional[Traces] = None,
155
- span_store: Optional[SpanStore] = None,
156
306
  debug: bool = False,
157
307
  read_only: bool = False,
158
308
  enable_prometheus: bool = False,
309
+ initial_spans: Optional[Iterable[Union[Span, Tuple[Span, str]]]] = None,
310
+ initial_evaluations: Optional[Iterable[pb.Evaluation]] = None,
159
311
  ) -> Starlette:
312
+ clean_ups: List[Callable[[], None]] = [] # To be called at app shutdown.
313
+ initial_batch_of_spans: Iterable[Tuple[Span, str]] = (
314
+ ()
315
+ if initial_spans is None
316
+ else (
317
+ ((item, DEFAULT_PROJECT_NAME) if isinstance(item, Span) else item)
318
+ for item in initial_spans
319
+ )
320
+ )
321
+ initial_batch_of_evaluations = () if initial_evaluations is None else initial_evaluations
322
+ try:
323
+ engine = create_engine(database_url)
324
+ except PhoenixMigrationError as e:
325
+ msg = (
326
+ "\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
327
+ "The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
328
+ "from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
329
+ "revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
330
+ "then try starting Phoenix again.\n\n"
331
+ "If issues persist, please reach out for support in the Arize community Slack:\n"
332
+ "https://arize-ai.slack.com\n\n"
333
+ "You can also refer to the Alembic documentation for more information:\n"
334
+ "https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
335
+ ""
336
+ )
337
+ raise PhoenixMigrationError(msg) from e
338
+ cache_for_dataloaders = (
339
+ CacheForDataLoaders()
340
+ if SupportedSQLDialect(engine.dialect.name) is SupportedSQLDialect.SQLITE
341
+ else None
342
+ )
343
+ db = _db(engine)
344
+ bulk_inserter = BulkInserter(
345
+ db,
346
+ enable_prometheus=enable_prometheus,
347
+ cache_for_dataloaders=cache_for_dataloaders,
348
+ initial_batch_of_spans=initial_batch_of_spans,
349
+ initial_batch_of_evaluations=initial_batch_of_evaluations,
350
+ )
351
+ tracer_provider = None
352
+ strawberry_extensions = schema.get_extensions()
353
+ if server_instrumentation_is_enabled():
354
+ from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
355
+ from opentelemetry.trace import TracerProvider
356
+ from strawberry.extensions.tracing import OpenTelemetryExtension
357
+
358
+ tracer_provider = initialize_opentelemetry_tracer_provider()
359
+ SQLAlchemyInstrumentor().instrument(
360
+ engine=engine.sync_engine,
361
+ tracer_provider=tracer_provider,
362
+ )
363
+ clean_ups.append(SQLAlchemyInstrumentor().uninstrument)
364
+ if TYPE_CHECKING:
365
+ # Type-check the class before monkey-patching its private attribute.
366
+ assert OpenTelemetryExtension._tracer
367
+
368
+ class _OpenTelemetryExtension(OpenTelemetryExtension):
369
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
370
+ super().__init__(*args, **kwargs)
371
+ # Monkey-patch its private tracer to eliminate usage of the global
372
+ # TracerProvider, which in a notebook setting could be the one
373
+ # used by OpenInference.
374
+ self._tracer = cast(TracerProvider, tracer_provider).get_tracer("strawberry")
375
+
376
+ strawberry_extensions.append(_OpenTelemetryExtension)
160
377
  graphql = GraphQLWithContext(
161
- schema=schema,
378
+ db=db,
379
+ schema=strawberry.Schema(
380
+ query=schema.query,
381
+ mutation=schema.mutation,
382
+ subscription=schema.subscription,
383
+ extensions=strawberry_extensions,
384
+ ),
162
385
  model=model,
163
386
  corpus=corpus,
164
- traces=traces,
165
387
  export_path=export_path,
166
388
  graphiql=True,
389
+ streaming_last_updated_at=bulk_inserter.last_updated_at,
390
+ cache_for_dataloaders=cache_for_dataloaders,
167
391
  )
168
392
  if enable_prometheus:
169
393
  from phoenix.server.prometheus import PrometheusMiddleware
@@ -171,31 +395,22 @@ def create_app(
171
395
  prometheus_middlewares = [Middleware(PrometheusMiddleware)]
172
396
  else:
173
397
  prometheus_middlewares = []
174
- return Starlette(
398
+ app = Starlette(
399
+ lifespan=_lifespan(
400
+ read_only=read_only,
401
+ bulk_inserter=bulk_inserter,
402
+ tracer_provider=tracer_provider,
403
+ enable_prometheus=enable_prometheus,
404
+ clean_ups=clean_ups,
405
+ ),
175
406
  middleware=[
176
407
  Middleware(HeadersMiddleware),
177
408
  *prometheus_middlewares,
178
409
  ],
179
410
  debug=debug,
180
- routes=(
181
- []
182
- if traces is None or read_only
183
- else [
184
- Route(
185
- "/v1/spans",
186
- type("SpanEndpoint", (SpanHandler,), {"traces": traces}),
187
- ),
188
- Route(
189
- "/v1/traces",
190
- type("TraceEndpoint", (TraceHandler,), {"traces": traces, "store": span_store}),
191
- ),
192
- Route(
193
- "/v1/evaluations",
194
- type("EvaluationEndpoint", (EvaluationHandler,), {"traces": traces}),
195
- ),
196
- ]
197
- )
411
+ routes=V1_ROUTES
198
412
  + [
413
+ Route("/schema", endpoint=openapi_schema, include_in_schema=False),
199
414
  Route("/arize_phoenix_version", version),
200
415
  Route("/healthz", check_healthz),
201
416
  Route(
@@ -206,6 +421,10 @@ def create_app(
206
421
  {"path": export_path},
207
422
  ),
208
423
  ),
424
+ Route(
425
+ "/docs",
426
+ api_docs,
427
+ ),
209
428
  Route(
210
429
  "/graphql",
211
430
  graphql,
@@ -226,3 +445,12 @@ def create_app(
226
445
  ),
227
446
  ],
228
447
  )
448
+ app.state.read_only = read_only
449
+ app.state.db = db
450
+ if tracer_provider:
451
+ from opentelemetry.instrumentation.starlette import StarletteInstrumentor
452
+
453
+ StarletteInstrumentor().instrument(tracer_provider=tracer_provider)
454
+ StarletteInstrumentor.instrument_app(app, tracer_provider=tracer_provider)
455
+ clean_ups.append(StarletteInstrumentor().uninstrument)
456
+ return app
@@ -0,0 +1,93 @@
1
+ from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional
2
+
3
+ import grpc
4
+ from grpc.aio import RpcContext, Server, ServerInterceptor
5
+ from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import (
6
+ ExportTraceServiceRequest,
7
+ ExportTraceServiceResponse,
8
+ )
9
+ from opentelemetry.proto.collector.trace.v1.trace_service_pb2_grpc import (
10
+ TraceServiceServicer,
11
+ add_TraceServiceServicer_to_server,
12
+ )
13
+ from typing_extensions import TypeAlias
14
+
15
+ from phoenix.config import get_env_grpc_port
16
+ from phoenix.trace.otel import decode_otlp_span
17
+ from phoenix.trace.schemas import Span
18
+ from phoenix.utilities.project import get_project_name
19
+
20
+ if TYPE_CHECKING:
21
+ from opentelemetry.trace import TracerProvider
22
+
23
+ ProjectName: TypeAlias = str
24
+
25
+
26
+ class Servicer(TraceServiceServicer):
27
+ def __init__(
28
+ self,
29
+ callback: Callable[[Span, ProjectName], Awaitable[None]],
30
+ ) -> None:
31
+ super().__init__()
32
+ self._callback = callback
33
+
34
+ async def Export(
35
+ self,
36
+ request: ExportTraceServiceRequest,
37
+ context: RpcContext,
38
+ ) -> ExportTraceServiceResponse:
39
+ for resource_spans in request.resource_spans:
40
+ project_name = get_project_name(resource_spans.resource.attributes)
41
+ for scope_span in resource_spans.scope_spans:
42
+ for otlp_span in scope_span.spans:
43
+ span = decode_otlp_span(otlp_span)
44
+ await self._callback(span, project_name)
45
+ return ExportTraceServiceResponse()
46
+
47
+
48
+ class GrpcServer:
49
+ def __init__(
50
+ self,
51
+ callback: Callable[[Span, ProjectName], Awaitable[None]],
52
+ tracer_provider: Optional["TracerProvider"] = None,
53
+ enable_prometheus: bool = False,
54
+ disabled: bool = False,
55
+ ) -> None:
56
+ self._callback = callback
57
+ self._server: Optional[Server] = None
58
+ self._tracer_provider = tracer_provider
59
+ self._enable_prometheus = enable_prometheus
60
+ self._disabled = disabled
61
+
62
+ async def __aenter__(self) -> None:
63
+ if self._disabled:
64
+ return
65
+ interceptors: List[ServerInterceptor] = []
66
+ if self._enable_prometheus:
67
+ ...
68
+ # TODO: convert to async interceptor
69
+ # from py_grpc_prometheus.prometheus_server_interceptor import PromServerInterceptor
70
+ #
71
+ # interceptors.append(PromServerInterceptor())
72
+ if self._tracer_provider is not None:
73
+ from opentelemetry.instrumentation.grpc import GrpcAioInstrumentorServer
74
+
75
+ GrpcAioInstrumentorServer().instrument(tracer_provider=self._tracer_provider) # type: ignore
76
+ server = grpc.aio.server(
77
+ options=(("grpc.so_reuseport", 0),),
78
+ interceptors=interceptors,
79
+ )
80
+ server.add_insecure_port(f"[::]:{get_env_grpc_port()}")
81
+ add_TraceServiceServicer_to_server(Servicer(self._callback), server) # type: ignore
82
+ await server.start()
83
+ self._server = server
84
+
85
+ async def __aexit__(self, *args: Any, **kwargs: Any) -> None:
86
+ if self._server is None:
87
+ return
88
+ await self._server.stop(5)
89
+ self._server = None
90
+ if self._tracer_provider is not None:
91
+ from opentelemetry.instrumentation.grpc import GrpcAioInstrumentorServer
92
+
93
+ GrpcAioInstrumentorServer().uninstrument() # type: ignore