arize-phoenix 11.9.0__py3-none-any.whl → 11.10.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-11.9.0.dist-info → arize_phoenix-11.10.1.dist-info}/METADATA +1 -1
- {arize_phoenix-11.9.0.dist-info → arize_phoenix-11.10.1.dist-info}/RECORD +21 -21
- phoenix/server/api/helpers/playground_clients.py +146 -63
- phoenix/server/api/helpers/playground_registry.py +2 -2
- phoenix/server/api/types/Project.py +45 -21
- phoenix/server/static/.vite/manifest.json +41 -41
- phoenix/server/static/assets/{components-IBd-PDxA.js → components-XAeml0-1.js} +70 -74
- phoenix/server/static/assets/{index-B8EBC_Z5.js → index-D7EtHUpz.js} +31 -9
- phoenix/server/static/assets/{pages-6D1duYIe.js → pages-CPfaxiKa.js} +506 -424
- phoenix/server/static/assets/vendor-CqDb5u4o.css +1 -0
- phoenix/server/static/assets/{vendor-arizeai-CvjUqTrl.js → vendor-arizeai-4fVwwnrI.js} +1 -1
- phoenix/server/static/assets/{vendor-codemirror-CKK25Gd7.js → vendor-codemirror-DRfFHb57.js} +1 -1
- phoenix/server/static/assets/{vendor-recharts-CWtaRhQC.js → vendor-recharts-w6bSawXG.js} +1 -1
- phoenix/server/static/assets/{vendor-shiki-D30GF-p9.js → vendor-shiki-CplrhwOk.js} +1 -1
- phoenix/server/templates/index.html +3 -4
- phoenix/version.py +1 -1
- phoenix/server/static/assets/vendor-WIZid84E.css +0 -1
- {arize_phoenix-11.9.0.dist-info → arize_phoenix-11.10.1.dist-info}/WHEEL +0 -0
- {arize_phoenix-11.9.0.dist-info → arize_phoenix-11.10.1.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-11.9.0.dist-info → arize_phoenix-11.10.1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-11.9.0.dist-info → arize_phoenix-11.10.1.dist-info}/licenses/LICENSE +0 -0
- /phoenix/server/static/assets/{vendor-BzZ0oklU.js → vendor-DhvamIr8.js} +0 -0
|
@@ -6,7 +6,7 @@ phoenix/exceptions.py,sha256=n2L2KKuecrdflB9MsCdAYCiSEvGJptIsfRkXMoJle7A,169
|
|
|
6
6
|
phoenix/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
7
7
|
phoenix/services.py,sha256=ngkyKGVatX3cO2WJdo2hKdaVKP-xJCMvqthvga6kJss,5196
|
|
8
8
|
phoenix/settings.py,sha256=2kHfT3BNOVd4dAO1bq-syEQbHSG8oX2-7NhOwK2QREk,896
|
|
9
|
-
phoenix/version.py,sha256=
|
|
9
|
+
phoenix/version.py,sha256=xvvOboOAs4Htuc0_lAWkZ6Ru9ZUbUhsIFFXYCIMHAAE,24
|
|
10
10
|
phoenix/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
phoenix/core/embedding_dimension.py,sha256=zKGbcvwOXgLf-yrJBpQyKtd-LEOPRKHnUToyAU8Owis,87
|
|
12
12
|
phoenix/core/model.py,sha256=qBFraOtmwCCnWJltKNP18DDG0mULXigytlFsa6YOz6k,4837
|
|
@@ -176,8 +176,8 @@ phoenix/server/api/helpers/__init__.py,sha256=m2-xaSPqUiSs91k62JaRDjFNfl-1byxBfY
|
|
|
176
176
|
phoenix/server/api/helpers/annotations.py,sha256=9gMXKpMTfWEChoSCnvdWYuyB0hlSnNOp-qUdar9Vono,262
|
|
177
177
|
phoenix/server/api/helpers/dataset_helpers.py,sha256=3bdGBoUzqrtg-sr5p2wpQLOU6dhg_3TKFHNeJj8p0TU,9155
|
|
178
178
|
phoenix/server/api/helpers/experiment_run_filters.py,sha256=DOnVwrmn39eAkk2mwuZP8kIcAnR5jrOgllEwWSjsw94,29893
|
|
179
|
-
phoenix/server/api/helpers/playground_clients.py,sha256=
|
|
180
|
-
phoenix/server/api/helpers/playground_registry.py,sha256=
|
|
179
|
+
phoenix/server/api/helpers/playground_clients.py,sha256=Fq4DNVIdnCiiVt0bh5mrZ7dJb2oOQcLjTttfq0Wcuv0,73589
|
|
180
|
+
phoenix/server/api/helpers/playground_registry.py,sha256=n0v4-KnvZJxeaEwOla5qBbnOQjSWznKmMhZnh9ziJt0,2584
|
|
181
181
|
phoenix/server/api/helpers/playground_spans.py,sha256=QpXwPl_fFNwm_iA1A77XApUyXMl1aDmonw8aXuNZ_4k,17132
|
|
182
182
|
phoenix/server/api/helpers/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
183
183
|
phoenix/server/api/helpers/prompts/models.py,sha256=nlPtLZaGcHfWNRR0iNRaBUv8eoKOnoGqRm6zadrTt0I,23547
|
|
@@ -320,7 +320,7 @@ phoenix/server/api/types/ModelInterface.py,sha256=Qe7H23wDb_Q2-HmeY2t0R5Jsn4aAfY
|
|
|
320
320
|
phoenix/server/api/types/NumericRange.py,sha256=afEjgF97Go_OvmjMggbPBt-zGM8IONewAyEiKEHRds0,192
|
|
321
321
|
phoenix/server/api/types/PerformanceMetric.py,sha256=KFkmJDqP43eDUtARQOUqR7NYcxvL6Vh2uisHWU6H3ko,387
|
|
322
322
|
phoenix/server/api/types/PlaygroundModel.py,sha256=IqJFxsAAJMRyaFI9ryI3GQrpFOJ5Llf6kIutEO-tFvM,321
|
|
323
|
-
phoenix/server/api/types/Project.py,sha256=
|
|
323
|
+
phoenix/server/api/types/Project.py,sha256=FE93mSrWKv1rcCPECfp1hsCen2012SPp-1pCEVuq5ic,64039
|
|
324
324
|
phoenix/server/api/types/ProjectSession.py,sha256=uwqTsDTfSGz13AvP-cwS_mJR5JZ1lHqu10ungbl7g5s,6245
|
|
325
325
|
phoenix/server/api/types/ProjectTraceRetentionPolicy.py,sha256=tYy2kgalPDyuaYZr0VUHjH0YpXaiF_QOzg5yfaV_c7c,3782
|
|
326
326
|
phoenix/server/api/types/Prompt.py,sha256=ccP4eq1e38xbF0afclGWLOuDpBVpNbJ3AOSRClF8yFQ,4955
|
|
@@ -387,19 +387,19 @@ phoenix/server/static/apple-touch-icon-76x76.png,sha256=CT_xT12I0u2i0WU8JzBZBuOQ
|
|
|
387
387
|
phoenix/server/static/apple-touch-icon.png,sha256=fOfpjqGpWYbJ0eAurKsyoZP1EAs6ZVooBJ_SGk2ZkDs,3801
|
|
388
388
|
phoenix/server/static/favicon.ico,sha256=bY0vvCKRftemZfPShwZtE93DiiQdaYaozkPGwNFr6H8,34494
|
|
389
389
|
phoenix/server/static/modernizr.js,sha256=mvK-XtkNqjOral-QvzoqsyOMECXIMu5BQwSVN_wcU9c,2564
|
|
390
|
-
phoenix/server/static/.vite/manifest.json,sha256=
|
|
391
|
-
phoenix/server/static/assets/components-
|
|
392
|
-
phoenix/server/static/assets/index-
|
|
393
|
-
phoenix/server/static/assets/pages-
|
|
394
|
-
phoenix/server/static/assets/vendor-
|
|
395
|
-
phoenix/server/static/assets/vendor-
|
|
396
|
-
phoenix/server/static/assets/vendor-arizeai-
|
|
397
|
-
phoenix/server/static/assets/vendor-codemirror-
|
|
398
|
-
phoenix/server/static/assets/vendor-recharts-
|
|
399
|
-
phoenix/server/static/assets/vendor-shiki-
|
|
390
|
+
phoenix/server/static/.vite/manifest.json,sha256=67Jr1Pbvt4QUZP4O98Fe_9tPLJvVHSKqSJeYcfC2oVk,2165
|
|
391
|
+
phoenix/server/static/assets/components-XAeml0-1.js,sha256=aCeHz5UUtnMLqWDeVRMmHJnjMHgwEDjpdRoz_tJ063s,620501
|
|
392
|
+
phoenix/server/static/assets/index-D7EtHUpz.js,sha256=FBp-_XLUtdcJ3wLAdu0peRKAPTkU2Ceu_U2Fu-QRfDg,62601
|
|
393
|
+
phoenix/server/static/assets/pages-CPfaxiKa.js,sha256=EyoqjFXTFOgpww-GDBYnH7KfsLfV84OI06WYG0xmUjo,1190498
|
|
394
|
+
phoenix/server/static/assets/vendor-CqDb5u4o.css,sha256=zIyFiNJKxMaQk8AvtLgt1rR01oO10d1MFndSDKH9Clw,5517
|
|
395
|
+
phoenix/server/static/assets/vendor-DhvamIr8.js,sha256=hZdQcaVySUPWh4GXRhuL-PhUy2K4sJLs6gYxde6qB_I,2748936
|
|
396
|
+
phoenix/server/static/assets/vendor-arizeai-4fVwwnrI.js,sha256=8eBZfGf2fjjNoWDW31mHXdxkqSlWItGj7eHhbnH92Qk,151750
|
|
397
|
+
phoenix/server/static/assets/vendor-codemirror-DRfFHb57.js,sha256=v-q8mq6f-EbjtvgzLTQYF-SNKX4R6LOGJe5ucOMqx1w,781264
|
|
398
|
+
phoenix/server/static/assets/vendor-recharts-w6bSawXG.js,sha256=uTuxE0vslP7_y-F1dOU5K2nXke7qe8JMdE2SnRGCVKM,231651
|
|
399
|
+
phoenix/server/static/assets/vendor-shiki-CplrhwOk.js,sha256=ISu7sYmhh_FDTmnBN-icbipm6fa2InqOeQTo2JFr3LI,8980312
|
|
400
400
|
phoenix/server/static/assets/vendor-three-C5WAXd5r.js,sha256=ELkg06u70N7h8oFmvqdoHyPuUf9VgGEWeT4LKFx4VWo,620975
|
|
401
401
|
phoenix/server/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
402
|
-
phoenix/server/templates/index.html,sha256=
|
|
402
|
+
phoenix/server/templates/index.html,sha256=3VMDmbxYwo3OoqiQyFojU6JaMLKr5k8rITacYS7HTbs,6922
|
|
403
403
|
phoenix/session/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
404
404
|
phoenix/session/client.py,sha256=uw5WlCuFcN_eEj7Ko2bhJVcaihEIp7Evy50KnL6Sq-k,35602
|
|
405
405
|
phoenix/session/data_extractor.py,sha256=Y0RzYFaNy9fQj8PEIeQ76TBZ90_E1FW7bXu3K5x0EZY,2782
|
|
@@ -437,9 +437,9 @@ phoenix/utilities/project.py,sha256=auVpARXkDb-JgeX5f2aStyFIkeKvGwN9l7qrFeJMVxI,
|
|
|
437
437
|
phoenix/utilities/re.py,sha256=6YyUWIkv0zc2SigsxfOWIHzdpjKA_TZo2iqKq7zJKvw,2081
|
|
438
438
|
phoenix/utilities/span_store.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
439
439
|
phoenix/utilities/template_formatters.py,sha256=gh9PJD6WEGw7TEYXfSst1UR4pWWwmjxMLrDVQ_CkpkQ,2779
|
|
440
|
-
arize_phoenix-11.
|
|
441
|
-
arize_phoenix-11.
|
|
442
|
-
arize_phoenix-11.
|
|
443
|
-
arize_phoenix-11.
|
|
444
|
-
arize_phoenix-11.
|
|
445
|
-
arize_phoenix-11.
|
|
440
|
+
arize_phoenix-11.10.1.dist-info/METADATA,sha256=wd_5sRdhTNXTvCYeob4xrZbZiPxZ81ENs-yF8PMEIyE,30851
|
|
441
|
+
arize_phoenix-11.10.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
442
|
+
arize_phoenix-11.10.1.dist-info/entry_points.txt,sha256=Pgpn8Upxx9P8z8joPXZWl2LlnAlGc3gcQoVchb06X1Q,94
|
|
443
|
+
arize_phoenix-11.10.1.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
|
|
444
|
+
arize_phoenix-11.10.1.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
|
|
445
|
+
arize_phoenix-11.10.1.dist-info/RECORD,,
|
|
@@ -20,7 +20,7 @@ from openinference.semconv.trace import (
|
|
|
20
20
|
)
|
|
21
21
|
from strawberry import UNSET
|
|
22
22
|
from strawberry.scalars import JSON as JSONScalarType
|
|
23
|
-
from typing_extensions import TypeAlias, assert_never
|
|
23
|
+
from typing_extensions import TypeAlias, assert_never, override
|
|
24
24
|
|
|
25
25
|
from phoenix.config import getenv
|
|
26
26
|
from phoenix.evals.models.rate_limiters import (
|
|
@@ -437,9 +437,9 @@ class OpenAIBaseStreamingClient(PlaygroundStreamingClient):
|
|
|
437
437
|
if role is ChatCompletionMessageRole.TOOL:
|
|
438
438
|
if tool_call_id is None:
|
|
439
439
|
raise ValueError("tool_call_id is required for tool messages")
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
440
|
+
return ChatCompletionToolMessageParam(
|
|
441
|
+
{"content": content, "role": "tool", "tool_call_id": tool_call_id}
|
|
442
|
+
)
|
|
443
443
|
assert_never(role)
|
|
444
444
|
|
|
445
445
|
def to_openai_tool_call_param(
|
|
@@ -1140,27 +1140,28 @@ class OpenAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
1140
1140
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
|
|
1141
1141
|
|
|
1142
1142
|
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
class
|
|
1143
|
+
_OPENAI_REASONING_MODELS = [
|
|
1144
|
+
"o1",
|
|
1145
|
+
"o1-pro",
|
|
1146
|
+
"o1-2024-12-17",
|
|
1147
|
+
"o1-pro-2025-03-19",
|
|
1148
|
+
"o1-mini",
|
|
1149
|
+
"o1-mini-2024-09-12",
|
|
1150
|
+
"o1-preview",
|
|
1151
|
+
"o1-preview-2024-09-12",
|
|
1152
|
+
"o3",
|
|
1153
|
+
"o3-pro",
|
|
1154
|
+
"o3-2025-04-16",
|
|
1155
|
+
"o3-mini",
|
|
1156
|
+
"o3-mini-2025-01-31",
|
|
1157
|
+
"o4-mini",
|
|
1158
|
+
"o4-mini-2025-04-16",
|
|
1159
|
+
]
|
|
1160
|
+
|
|
1161
|
+
|
|
1162
|
+
class OpenAIReasoningReasoningModelsMixin:
|
|
1163
|
+
"""Mixin class for OpenAI-style reasoning model clients (o1, o3 series)."""
|
|
1164
|
+
|
|
1164
1165
|
@classmethod
|
|
1165
1166
|
def supported_invocation_parameters(cls) -> list[InvocationParameter]:
|
|
1166
1167
|
return [
|
|
@@ -1191,6 +1192,16 @@ class OpenAIReasoningStreamingClient(OpenAIStreamingClient):
|
|
|
1191
1192
|
),
|
|
1192
1193
|
]
|
|
1193
1194
|
|
|
1195
|
+
|
|
1196
|
+
@register_llm_client(
|
|
1197
|
+
provider_key=GenerativeProviderKey.OPENAI,
|
|
1198
|
+
model_names=_OPENAI_REASONING_MODELS,
|
|
1199
|
+
)
|
|
1200
|
+
class OpenAIReasoningNonStreamingClient(
|
|
1201
|
+
OpenAIReasoningReasoningModelsMixin,
|
|
1202
|
+
OpenAIStreamingClient,
|
|
1203
|
+
):
|
|
1204
|
+
@override
|
|
1194
1205
|
async def chat_completion_create(
|
|
1195
1206
|
self,
|
|
1196
1207
|
messages: list[
|
|
@@ -1283,46 +1294,11 @@ class OpenAIReasoningStreamingClient(OpenAIStreamingClient):
|
|
|
1283
1294
|
if role is ChatCompletionMessageRole.TOOL:
|
|
1284
1295
|
if tool_call_id is None:
|
|
1285
1296
|
raise ValueError("tool_call_id is required for tool messages")
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1297
|
+
return ChatCompletionToolMessageParam(
|
|
1298
|
+
{"content": content, "role": "tool", "tool_call_id": tool_call_id}
|
|
1299
|
+
)
|
|
1289
1300
|
assert_never(role)
|
|
1290
1301
|
|
|
1291
|
-
@staticmethod
|
|
1292
|
-
def _llm_token_counts(usage: "CompletionUsage") -> Iterator[tuple[str, Any]]:
|
|
1293
|
-
yield LLM_TOKEN_COUNT_PROMPT, usage.prompt_tokens
|
|
1294
|
-
yield LLM_TOKEN_COUNT_COMPLETION, usage.completion_tokens
|
|
1295
|
-
yield LLM_TOKEN_COUNT_TOTAL, usage.total_tokens
|
|
1296
|
-
|
|
1297
|
-
if hasattr(usage, "prompt_tokens_details") and usage.prompt_tokens_details is not None:
|
|
1298
|
-
prompt_details = usage.prompt_tokens_details
|
|
1299
|
-
if (
|
|
1300
|
-
hasattr(prompt_details, "cached_tokens")
|
|
1301
|
-
and prompt_details.cached_tokens is not None
|
|
1302
|
-
):
|
|
1303
|
-
yield LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHE_READ, prompt_details.cached_tokens
|
|
1304
|
-
if hasattr(prompt_details, "audio_tokens") and prompt_details.audio_tokens is not None:
|
|
1305
|
-
yield LLM_TOKEN_COUNT_PROMPT_DETAILS_AUDIO, prompt_details.audio_tokens
|
|
1306
|
-
|
|
1307
|
-
if (
|
|
1308
|
-
hasattr(usage, "completion_tokens_details")
|
|
1309
|
-
and usage.completion_tokens_details is not None
|
|
1310
|
-
):
|
|
1311
|
-
completion_details = usage.completion_tokens_details
|
|
1312
|
-
if (
|
|
1313
|
-
hasattr(completion_details, "reasoning_tokens")
|
|
1314
|
-
and completion_details.reasoning_tokens is not None
|
|
1315
|
-
):
|
|
1316
|
-
yield (
|
|
1317
|
-
LLM_TOKEN_COUNT_COMPLETION_DETAILS_REASONING,
|
|
1318
|
-
completion_details.reasoning_tokens,
|
|
1319
|
-
)
|
|
1320
|
-
if (
|
|
1321
|
-
hasattr(completion_details, "audio_tokens")
|
|
1322
|
-
and completion_details.audio_tokens is not None
|
|
1323
|
-
):
|
|
1324
|
-
yield LLM_TOKEN_COUNT_COMPLETION_DETAILS_AUDIO, completion_details.audio_tokens
|
|
1325
|
-
|
|
1326
1302
|
|
|
1327
1303
|
@register_llm_client(
|
|
1328
1304
|
provider_key=GenerativeProviderKey.AZURE_OPENAI,
|
|
@@ -1376,6 +1352,113 @@ class AzureOpenAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
1376
1352
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
|
|
1377
1353
|
|
|
1378
1354
|
|
|
1355
|
+
@register_llm_client(
|
|
1356
|
+
provider_key=GenerativeProviderKey.AZURE_OPENAI,
|
|
1357
|
+
model_names=_OPENAI_REASONING_MODELS,
|
|
1358
|
+
)
|
|
1359
|
+
class AzureOpenAIReasoningNonStreamingClient(
|
|
1360
|
+
OpenAIReasoningReasoningModelsMixin,
|
|
1361
|
+
AzureOpenAIStreamingClient,
|
|
1362
|
+
):
|
|
1363
|
+
@override
|
|
1364
|
+
async def chat_completion_create(
|
|
1365
|
+
self,
|
|
1366
|
+
messages: list[
|
|
1367
|
+
tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
|
|
1368
|
+
],
|
|
1369
|
+
tools: list[JSONScalarType],
|
|
1370
|
+
**invocation_parameters: Any,
|
|
1371
|
+
) -> AsyncIterator[ChatCompletionChunk]:
|
|
1372
|
+
from openai import NOT_GIVEN
|
|
1373
|
+
|
|
1374
|
+
# Convert standard messages to OpenAI messages
|
|
1375
|
+
openai_messages = []
|
|
1376
|
+
for message in messages:
|
|
1377
|
+
openai_message = self.to_openai_chat_completion_param(*message)
|
|
1378
|
+
if openai_message is not None:
|
|
1379
|
+
openai_messages.append(openai_message)
|
|
1380
|
+
|
|
1381
|
+
throttled_create = self.rate_limiter._alimit(self.client.chat.completions.create)
|
|
1382
|
+
response = await throttled_create(
|
|
1383
|
+
messages=openai_messages,
|
|
1384
|
+
model=self.model_name,
|
|
1385
|
+
stream=False,
|
|
1386
|
+
tools=tools or NOT_GIVEN,
|
|
1387
|
+
**invocation_parameters,
|
|
1388
|
+
)
|
|
1389
|
+
|
|
1390
|
+
if response.usage is not None:
|
|
1391
|
+
self._attributes.update(dict(self._llm_token_counts(response.usage)))
|
|
1392
|
+
|
|
1393
|
+
choice = response.choices[0]
|
|
1394
|
+
if choice.message.content:
|
|
1395
|
+
yield TextChunk(content=choice.message.content)
|
|
1396
|
+
|
|
1397
|
+
if choice.message.tool_calls:
|
|
1398
|
+
for tool_call in choice.message.tool_calls:
|
|
1399
|
+
yield ToolCallChunk(
|
|
1400
|
+
id=tool_call.id,
|
|
1401
|
+
function=FunctionCallChunk(
|
|
1402
|
+
name=tool_call.function.name,
|
|
1403
|
+
arguments=tool_call.function.arguments,
|
|
1404
|
+
),
|
|
1405
|
+
)
|
|
1406
|
+
|
|
1407
|
+
def to_openai_chat_completion_param(
|
|
1408
|
+
self,
|
|
1409
|
+
role: ChatCompletionMessageRole,
|
|
1410
|
+
content: JSONScalarType,
|
|
1411
|
+
tool_call_id: Optional[str] = None,
|
|
1412
|
+
tool_calls: Optional[list[JSONScalarType]] = None,
|
|
1413
|
+
) -> Optional["ChatCompletionMessageParam"]:
|
|
1414
|
+
from openai.types.chat import (
|
|
1415
|
+
ChatCompletionAssistantMessageParam,
|
|
1416
|
+
ChatCompletionDeveloperMessageParam,
|
|
1417
|
+
ChatCompletionToolMessageParam,
|
|
1418
|
+
ChatCompletionUserMessageParam,
|
|
1419
|
+
)
|
|
1420
|
+
|
|
1421
|
+
if role is ChatCompletionMessageRole.USER:
|
|
1422
|
+
return ChatCompletionUserMessageParam(
|
|
1423
|
+
{
|
|
1424
|
+
"content": content,
|
|
1425
|
+
"role": "user",
|
|
1426
|
+
}
|
|
1427
|
+
)
|
|
1428
|
+
if role is ChatCompletionMessageRole.SYSTEM:
|
|
1429
|
+
return ChatCompletionDeveloperMessageParam(
|
|
1430
|
+
{
|
|
1431
|
+
"content": content,
|
|
1432
|
+
"role": "developer",
|
|
1433
|
+
}
|
|
1434
|
+
)
|
|
1435
|
+
if role is ChatCompletionMessageRole.AI:
|
|
1436
|
+
if tool_calls is None:
|
|
1437
|
+
return ChatCompletionAssistantMessageParam(
|
|
1438
|
+
{
|
|
1439
|
+
"content": content,
|
|
1440
|
+
"role": "assistant",
|
|
1441
|
+
}
|
|
1442
|
+
)
|
|
1443
|
+
else:
|
|
1444
|
+
return ChatCompletionAssistantMessageParam(
|
|
1445
|
+
{
|
|
1446
|
+
"content": content,
|
|
1447
|
+
"role": "assistant",
|
|
1448
|
+
"tool_calls": [
|
|
1449
|
+
self.to_openai_tool_call_param(tool_call) for tool_call in tool_calls
|
|
1450
|
+
],
|
|
1451
|
+
}
|
|
1452
|
+
)
|
|
1453
|
+
if role is ChatCompletionMessageRole.TOOL:
|
|
1454
|
+
if tool_call_id is None:
|
|
1455
|
+
raise ValueError("tool_call_id is required for tool messages")
|
|
1456
|
+
return ChatCompletionToolMessageParam(
|
|
1457
|
+
{"content": content, "role": "tool", "tool_call_id": tool_call_id}
|
|
1458
|
+
)
|
|
1459
|
+
assert_never(role)
|
|
1460
|
+
|
|
1461
|
+
|
|
1379
1462
|
@register_llm_client(
|
|
1380
1463
|
provider_key=GenerativeProviderKey.ANTHROPIC,
|
|
1381
1464
|
model_names=[
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import TYPE_CHECKING, Any, Callable, Optional, Union
|
|
1
|
+
from typing import TYPE_CHECKING, Any, Callable, Optional, Sequence, Union
|
|
2
2
|
|
|
3
3
|
from phoenix.server.api.types.GenerativeProvider import GenerativeProviderKey
|
|
4
4
|
|
|
@@ -59,7 +59,7 @@ PLAYGROUND_CLIENT_REGISTRY: PlaygroundClientRegistry = PlaygroundClientRegistry(
|
|
|
59
59
|
|
|
60
60
|
def register_llm_client(
|
|
61
61
|
provider_key: GenerativeProviderKey,
|
|
62
|
-
model_names:
|
|
62
|
+
model_names: Sequence[ModelName],
|
|
63
63
|
) -> Callable[[type["PlaygroundStreamingClient"]], type["PlaygroundStreamingClient"]]:
|
|
64
64
|
def decorator(cls: type["PlaygroundStreamingClient"]) -> type["PlaygroundStreamingClient"]:
|
|
65
65
|
provider_registry = PLAYGROUND_CLIENT_REGISTRY._registry.setdefault(provider_key, {})
|
|
@@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, Optional, c
|
|
|
7
7
|
import strawberry
|
|
8
8
|
from aioitertools.itertools import groupby, islice
|
|
9
9
|
from openinference.semconv.trace import SpanAttributes
|
|
10
|
-
from sqlalchemy import and_, desc, distinct, exists, func, or_, select
|
|
10
|
+
from sqlalchemy import and_, case, desc, distinct, exists, func, or_, select
|
|
11
11
|
from sqlalchemy.dialects import postgresql, sqlite
|
|
12
12
|
from sqlalchemy.sql.elements import ColumnElement
|
|
13
13
|
from sqlalchemy.sql.expression import tuple_
|
|
@@ -719,6 +719,7 @@ class Project(Node):
|
|
|
719
719
|
info: Info[Context, None],
|
|
720
720
|
time_range: TimeRange,
|
|
721
721
|
time_bin_config: Optional[TimeBinConfig] = UNSET,
|
|
722
|
+
filter_condition: Optional[str] = UNSET,
|
|
722
723
|
) -> SpanCountTimeSeries:
|
|
723
724
|
if time_range.start is None:
|
|
724
725
|
raise BadRequest("Start time is required")
|
|
@@ -742,7 +743,17 @@ class Project(Node):
|
|
|
742
743
|
field = "year"
|
|
743
744
|
bucket = date_trunc(dialect, field, models.Span.start_time, utc_offset_minutes)
|
|
744
745
|
stmt = (
|
|
745
|
-
select(
|
|
746
|
+
select(
|
|
747
|
+
bucket,
|
|
748
|
+
func.count(models.Span.id).label("total_count"),
|
|
749
|
+
func.sum(case((models.Span.status_code == "OK", 1), else_=0)).label("ok_count"),
|
|
750
|
+
func.sum(case((models.Span.status_code == "ERROR", 1), else_=0)).label(
|
|
751
|
+
"error_count"
|
|
752
|
+
),
|
|
753
|
+
func.sum(case((models.Span.status_code == "UNSET", 1), else_=0)).label(
|
|
754
|
+
"unset_count"
|
|
755
|
+
),
|
|
756
|
+
)
|
|
746
757
|
.join_from(models.Span, models.Trace)
|
|
747
758
|
.where(models.Trace.project_rowid == self.project_rowid)
|
|
748
759
|
.group_by(bucket)
|
|
@@ -752,21 +763,31 @@ class Project(Node):
|
|
|
752
763
|
stmt = stmt.where(time_range.start <= models.Span.start_time)
|
|
753
764
|
if time_range.end:
|
|
754
765
|
stmt = stmt.where(models.Span.start_time < time_range.end)
|
|
766
|
+
if filter_condition:
|
|
767
|
+
span_filter = SpanFilter(condition=filter_condition)
|
|
768
|
+
stmt = span_filter(stmt)
|
|
755
769
|
|
|
756
770
|
data = {}
|
|
757
771
|
async with info.context.db() as session:
|
|
758
|
-
async for t,
|
|
772
|
+
async for t, total_count, ok_count, error_count, unset_count in await session.stream(
|
|
773
|
+
stmt
|
|
774
|
+
):
|
|
759
775
|
timestamp = _as_datetime(t)
|
|
760
|
-
data[timestamp] =
|
|
776
|
+
data[timestamp] = SpanCountTimeSeriesDataPoint(
|
|
777
|
+
timestamp=timestamp,
|
|
778
|
+
ok_count=ok_count,
|
|
779
|
+
error_count=error_count,
|
|
780
|
+
unset_count=unset_count,
|
|
781
|
+
total_count=total_count,
|
|
782
|
+
)
|
|
761
783
|
|
|
762
784
|
data_timestamps: list[datetime] = [data_point.timestamp for data_point in data.values()]
|
|
763
785
|
min_time = min([*data_timestamps, time_range.start])
|
|
764
786
|
max_time = max(
|
|
765
787
|
[
|
|
766
788
|
*data_timestamps,
|
|
767
|
-
*([time_range.end] if time_range.end else []),
|
|
789
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
768
790
|
],
|
|
769
|
-
default=datetime.now(timezone.utc),
|
|
770
791
|
)
|
|
771
792
|
for timestamp in get_timestamp_range(
|
|
772
793
|
start_time=min_time,
|
|
@@ -775,7 +796,7 @@ class Project(Node):
|
|
|
775
796
|
utc_offset_minutes=utc_offset_minutes,
|
|
776
797
|
):
|
|
777
798
|
if timestamp not in data:
|
|
778
|
-
data[timestamp] =
|
|
799
|
+
data[timestamp] = SpanCountTimeSeriesDataPoint(timestamp=timestamp)
|
|
779
800
|
return SpanCountTimeSeries(data=sorted(data.values(), key=lambda x: x.timestamp))
|
|
780
801
|
|
|
781
802
|
@strawberry.field
|
|
@@ -828,9 +849,8 @@ class Project(Node):
|
|
|
828
849
|
max_time = max(
|
|
829
850
|
[
|
|
830
851
|
*data_timestamps,
|
|
831
|
-
*([time_range.end] if time_range.end else []),
|
|
852
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
832
853
|
],
|
|
833
|
-
default=datetime.now(timezone.utc),
|
|
834
854
|
)
|
|
835
855
|
for timestamp in get_timestamp_range(
|
|
836
856
|
start_time=min_time,
|
|
@@ -917,9 +937,8 @@ class Project(Node):
|
|
|
917
937
|
max_time = max(
|
|
918
938
|
[
|
|
919
939
|
*data_timestamps,
|
|
920
|
-
*([time_range.end] if time_range.end else []),
|
|
940
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
921
941
|
],
|
|
922
|
-
default=datetime.now(timezone.utc),
|
|
923
942
|
)
|
|
924
943
|
for timestamp in get_timestamp_range(
|
|
925
944
|
start_time=min_time,
|
|
@@ -1025,9 +1044,8 @@ class Project(Node):
|
|
|
1025
1044
|
max_time = max(
|
|
1026
1045
|
[
|
|
1027
1046
|
*data_timestamps,
|
|
1028
|
-
*([time_range.end] if time_range.end else []),
|
|
1047
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
1029
1048
|
],
|
|
1030
|
-
default=datetime.now(timezone.utc),
|
|
1031
1049
|
)
|
|
1032
1050
|
for timestamp in get_timestamp_range(
|
|
1033
1051
|
start_time=min_time,
|
|
@@ -1111,9 +1129,8 @@ class Project(Node):
|
|
|
1111
1129
|
max_time = max(
|
|
1112
1130
|
[
|
|
1113
1131
|
*data_timestamps,
|
|
1114
|
-
*([time_range.end] if time_range.end else []),
|
|
1132
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
1115
1133
|
],
|
|
1116
|
-
default=datetime.now(timezone.utc),
|
|
1117
1134
|
)
|
|
1118
1135
|
for timestamp in get_timestamp_range(
|
|
1119
1136
|
start_time=min_time,
|
|
@@ -1195,9 +1212,8 @@ class Project(Node):
|
|
|
1195
1212
|
max_time = max(
|
|
1196
1213
|
[
|
|
1197
1214
|
*data_timestamps,
|
|
1198
|
-
*([time_range.end] if time_range.end else []),
|
|
1215
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
1199
1216
|
],
|
|
1200
|
-
default=datetime.now(timezone.utc),
|
|
1201
1217
|
)
|
|
1202
1218
|
for timestamp in get_timestamp_range(
|
|
1203
1219
|
start_time=min_time,
|
|
@@ -1281,9 +1297,8 @@ class Project(Node):
|
|
|
1281
1297
|
max_time = max(
|
|
1282
1298
|
[
|
|
1283
1299
|
*score_timestamps,
|
|
1284
|
-
*([time_range.end] if time_range.end else []),
|
|
1300
|
+
*([time_range.end] if time_range.end else [datetime.now(timezone.utc)]),
|
|
1285
1301
|
],
|
|
1286
|
-
default=datetime.now(timezone.utc),
|
|
1287
1302
|
)
|
|
1288
1303
|
data: dict[datetime, SpanAnnotationScoreTimeSeriesDataPoint] = {
|
|
1289
1304
|
timestamp: SpanAnnotationScoreTimeSeriesDataPoint(
|
|
@@ -1313,8 +1328,17 @@ class Project(Node):
|
|
|
1313
1328
|
|
|
1314
1329
|
|
|
1315
1330
|
@strawberry.type
|
|
1316
|
-
class
|
|
1317
|
-
|
|
1331
|
+
class SpanCountTimeSeriesDataPoint:
|
|
1332
|
+
timestamp: datetime
|
|
1333
|
+
ok_count: Optional[int] = None
|
|
1334
|
+
error_count: Optional[int] = None
|
|
1335
|
+
unset_count: Optional[int] = None
|
|
1336
|
+
total_count: Optional[int] = None
|
|
1337
|
+
|
|
1338
|
+
|
|
1339
|
+
@strawberry.type
|
|
1340
|
+
class SpanCountTimeSeries:
|
|
1341
|
+
data: list[SpanCountTimeSeriesDataPoint]
|
|
1318
1342
|
|
|
1319
1343
|
|
|
1320
1344
|
@strawberry.type
|
|
@@ -1,67 +1,67 @@
|
|
|
1
1
|
{
|
|
2
|
-
"_components-
|
|
3
|
-
"file": "assets/components-
|
|
2
|
+
"_components-XAeml0-1.js": {
|
|
3
|
+
"file": "assets/components-XAeml0-1.js",
|
|
4
4
|
"name": "components",
|
|
5
5
|
"imports": [
|
|
6
|
-
"_vendor-
|
|
7
|
-
"_pages-
|
|
8
|
-
"_vendor-arizeai-
|
|
9
|
-
"_vendor-codemirror-
|
|
6
|
+
"_vendor-DhvamIr8.js",
|
|
7
|
+
"_pages-CPfaxiKa.js",
|
|
8
|
+
"_vendor-arizeai-4fVwwnrI.js",
|
|
9
|
+
"_vendor-codemirror-DRfFHb57.js",
|
|
10
10
|
"_vendor-three-C5WAXd5r.js"
|
|
11
11
|
]
|
|
12
12
|
},
|
|
13
|
-
"_pages-
|
|
14
|
-
"file": "assets/pages-
|
|
13
|
+
"_pages-CPfaxiKa.js": {
|
|
14
|
+
"file": "assets/pages-CPfaxiKa.js",
|
|
15
15
|
"name": "pages",
|
|
16
16
|
"imports": [
|
|
17
|
-
"_vendor-
|
|
18
|
-
"_vendor-arizeai-
|
|
19
|
-
"_components-
|
|
20
|
-
"_vendor-codemirror-
|
|
21
|
-
"_vendor-recharts-
|
|
17
|
+
"_vendor-DhvamIr8.js",
|
|
18
|
+
"_vendor-arizeai-4fVwwnrI.js",
|
|
19
|
+
"_components-XAeml0-1.js",
|
|
20
|
+
"_vendor-codemirror-DRfFHb57.js",
|
|
21
|
+
"_vendor-recharts-w6bSawXG.js"
|
|
22
22
|
]
|
|
23
23
|
},
|
|
24
|
-
"_vendor-
|
|
25
|
-
"file": "assets/vendor-
|
|
24
|
+
"_vendor-CqDb5u4o.css": {
|
|
25
|
+
"file": "assets/vendor-CqDb5u4o.css",
|
|
26
|
+
"src": "_vendor-CqDb5u4o.css"
|
|
27
|
+
},
|
|
28
|
+
"_vendor-DhvamIr8.js": {
|
|
29
|
+
"file": "assets/vendor-DhvamIr8.js",
|
|
26
30
|
"name": "vendor",
|
|
27
31
|
"imports": [
|
|
28
32
|
"_vendor-three-C5WAXd5r.js"
|
|
29
33
|
],
|
|
30
34
|
"css": [
|
|
31
|
-
"assets/vendor-
|
|
35
|
+
"assets/vendor-CqDb5u4o.css"
|
|
32
36
|
]
|
|
33
37
|
},
|
|
34
|
-
"_vendor-
|
|
35
|
-
"file": "assets/vendor-
|
|
36
|
-
"src": "_vendor-WIZid84E.css"
|
|
37
|
-
},
|
|
38
|
-
"_vendor-arizeai-CvjUqTrl.js": {
|
|
39
|
-
"file": "assets/vendor-arizeai-CvjUqTrl.js",
|
|
38
|
+
"_vendor-arizeai-4fVwwnrI.js": {
|
|
39
|
+
"file": "assets/vendor-arizeai-4fVwwnrI.js",
|
|
40
40
|
"name": "vendor-arizeai",
|
|
41
41
|
"imports": [
|
|
42
|
-
"_vendor-
|
|
42
|
+
"_vendor-DhvamIr8.js"
|
|
43
43
|
]
|
|
44
44
|
},
|
|
45
|
-
"_vendor-codemirror-
|
|
46
|
-
"file": "assets/vendor-codemirror-
|
|
45
|
+
"_vendor-codemirror-DRfFHb57.js": {
|
|
46
|
+
"file": "assets/vendor-codemirror-DRfFHb57.js",
|
|
47
47
|
"name": "vendor-codemirror",
|
|
48
48
|
"imports": [
|
|
49
|
-
"_vendor-
|
|
50
|
-
"_vendor-shiki-
|
|
49
|
+
"_vendor-DhvamIr8.js",
|
|
50
|
+
"_vendor-shiki-CplrhwOk.js"
|
|
51
51
|
]
|
|
52
52
|
},
|
|
53
|
-
"_vendor-recharts-
|
|
54
|
-
"file": "assets/vendor-recharts-
|
|
53
|
+
"_vendor-recharts-w6bSawXG.js": {
|
|
54
|
+
"file": "assets/vendor-recharts-w6bSawXG.js",
|
|
55
55
|
"name": "vendor-recharts",
|
|
56
56
|
"imports": [
|
|
57
|
-
"_vendor-
|
|
57
|
+
"_vendor-DhvamIr8.js"
|
|
58
58
|
]
|
|
59
59
|
},
|
|
60
|
-
"_vendor-shiki-
|
|
61
|
-
"file": "assets/vendor-shiki-
|
|
60
|
+
"_vendor-shiki-CplrhwOk.js": {
|
|
61
|
+
"file": "assets/vendor-shiki-CplrhwOk.js",
|
|
62
62
|
"name": "vendor-shiki",
|
|
63
63
|
"imports": [
|
|
64
|
-
"_vendor-
|
|
64
|
+
"_vendor-DhvamIr8.js"
|
|
65
65
|
]
|
|
66
66
|
},
|
|
67
67
|
"_vendor-three-C5WAXd5r.js": {
|
|
@@ -69,19 +69,19 @@
|
|
|
69
69
|
"name": "vendor-three"
|
|
70
70
|
},
|
|
71
71
|
"index.tsx": {
|
|
72
|
-
"file": "assets/index-
|
|
72
|
+
"file": "assets/index-D7EtHUpz.js",
|
|
73
73
|
"name": "index",
|
|
74
74
|
"src": "index.tsx",
|
|
75
75
|
"isEntry": true,
|
|
76
76
|
"imports": [
|
|
77
|
-
"_vendor-
|
|
78
|
-
"_vendor-arizeai-
|
|
79
|
-
"_pages-
|
|
80
|
-
"_components-
|
|
77
|
+
"_vendor-DhvamIr8.js",
|
|
78
|
+
"_vendor-arizeai-4fVwwnrI.js",
|
|
79
|
+
"_pages-CPfaxiKa.js",
|
|
80
|
+
"_components-XAeml0-1.js",
|
|
81
81
|
"_vendor-three-C5WAXd5r.js",
|
|
82
|
-
"_vendor-codemirror-
|
|
83
|
-
"_vendor-shiki-
|
|
84
|
-
"_vendor-recharts-
|
|
82
|
+
"_vendor-codemirror-DRfFHb57.js",
|
|
83
|
+
"_vendor-shiki-CplrhwOk.js",
|
|
84
|
+
"_vendor-recharts-w6bSawXG.js"
|
|
85
85
|
]
|
|
86
86
|
}
|
|
87
87
|
}
|