arize-phoenix 5.9.1__py3-none-any.whl → 5.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (21) hide show
  1. {arize_phoenix-5.9.1.dist-info → arize_phoenix-5.10.0.dist-info}/METADATA +1 -1
  2. {arize_phoenix-5.9.1.dist-info → arize_phoenix-5.10.0.dist-info}/RECORD +21 -21
  3. phoenix/config.py +3 -1
  4. phoenix/server/api/helpers/playground_clients.py +44 -6
  5. phoenix/server/api/helpers/playground_spans.py +34 -8
  6. phoenix/server/api/subscriptions.py +1 -1
  7. phoenix/server/api/types/GenerativeProvider.py +44 -0
  8. phoenix/server/api/types/Span.py +4 -5
  9. phoenix/server/static/.vite/manifest.json +31 -31
  10. phoenix/server/static/assets/{components-BcvRmBnN.js → components-BXIz9ZO8.js} +124 -124
  11. phoenix/server/static/assets/{index-BF4RUiOz.js → index-DTut7g1y.js} +2 -2
  12. phoenix/server/static/assets/{pages-CM_Zho_x.js → pages-B8FpJuXu.js} +264 -264
  13. phoenix/server/static/assets/{vendor-Bjm5T3cE.js → vendor-BX8_Znqy.js} +146 -146
  14. phoenix/server/static/assets/{vendor-arizeai-CQhWGEdL.js → vendor-arizeai-CtHir-Ua.js} +1 -1
  15. phoenix/server/static/assets/{vendor-codemirror-CdtiO80y.js → vendor-codemirror-DLlGiguX.js} +2 -2
  16. phoenix/server/static/assets/{vendor-recharts-BqWon6Py.js → vendor-recharts-CJRple0d.js} +1 -1
  17. phoenix/version.py +1 -1
  18. {arize_phoenix-5.9.1.dist-info → arize_phoenix-5.10.0.dist-info}/WHEEL +0 -0
  19. {arize_phoenix-5.9.1.dist-info → arize_phoenix-5.10.0.dist-info}/entry_points.txt +0 -0
  20. {arize_phoenix-5.9.1.dist-info → arize_phoenix-5.10.0.dist-info}/licenses/IP_NOTICE +0 -0
  21. {arize_phoenix-5.9.1.dist-info → arize_phoenix-5.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: arize-phoenix
3
- Version: 5.9.1
3
+ Version: 5.10.0
4
4
  Summary: AI Observability and Evaluation
5
5
  Project-URL: Documentation, https://docs.arize.com/phoenix/
6
6
  Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
@@ -1,12 +1,12 @@
1
1
  phoenix/__init__.py,sha256=X3eUEwd2rG8KKWWYVNNDJoqo08ihfjgHhlP29dcdNJE,5481
2
2
  phoenix/auth.py,sha256=JpkwJbis2INlIXWcQ-M_Nk5Ln9LBgHMdWNnaAQp0D2w,10940
3
- phoenix/config.py,sha256=kg1ABxnzgmBK2Uv277VLlY2_Hd3k8DGgZCKYUqSdwwI,25405
3
+ phoenix/config.py,sha256=xOM5eupLzXXCfZ4dzCYW-4pKG1xcLDNuu7vxvyiGfoM,25591
4
4
  phoenix/datetime_utils.py,sha256=iJzNG6YJ6V7_u8B2iA7P2Z26FyxYbOPtx0dhJ7kNDHA,3398
5
5
  phoenix/exceptions.py,sha256=n2L2KKuecrdflB9MsCdAYCiSEvGJptIsfRkXMoJle7A,169
6
6
  phoenix/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
7
7
  phoenix/services.py,sha256=kpW1WL0kiB8XJsO6XycvZVJ-lBkNoenhQ7atCvBoSe8,5365
8
8
  phoenix/settings.py,sha256=ht-0oN-sMV6SPXrk7Tu1EZlngpAYkGNLYPhO8DyrdQI,661
9
- phoenix/version.py,sha256=cJut-wsFrREu2PRwluu5_M66aMFR7RNxG41cS7q4-vM,22
9
+ phoenix/version.py,sha256=PtYE7255x6FNDU5KsxORw9DOsNBQOKOzcRu8NtdUMBk,23
10
10
  phoenix/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  phoenix/core/embedding_dimension.py,sha256=zKGbcvwOXgLf-yrJBpQyKtd-LEOPRKHnUToyAU8Owis,87
12
12
  phoenix/core/model.py,sha256=qBFraOtmwCCnWJltKNP18DDG0mULXigytlFsa6YOz6k,4837
@@ -94,7 +94,7 @@ phoenix/server/api/exceptions.py,sha256=TA0JuY2YRnj35qGuMSQ8d0ToHum9gWm9W--3fSKH
94
94
  phoenix/server/api/interceptor.py,sha256=ykDnoC_apUd-llVli3m1CW18kNSIgjz2qZ6m5JmPDu8,1294
95
95
  phoenix/server/api/queries.py,sha256=4KJz8TUz3VUTup9MDjr_GoKX0SttWSvHBq2ncWZGxf8,27343
96
96
  phoenix/server/api/schema.py,sha256=tHyw2jTbue_-gu0fe9Sw7LUYtzJUCwp9SvccDgOkNPw,1696
97
- phoenix/server/api/subscriptions.py,sha256=Xu1k-UL893Ao1FsGk7OGfxuuyO6A3FzbSK8a2TTSDe4,19692
97
+ phoenix/server/api/subscriptions.py,sha256=uTGlf6WKrnC6Nogsx_GtIVSMOzgIKMJwyP1oe47QCHg,19688
98
98
  phoenix/server/api/utils.py,sha256=quCBRcusc6PUq9tJq7M8PgwFZp7nXgVAxtbw8feribY,833
99
99
  phoenix/server/api/dataloaders/__init__.py,sha256=jNYvfXjnZzgA2HWTG7AZdqWGla3ZysBUDUei8Zkz6N8,3290
100
100
  phoenix/server/api/dataloaders/annotation_summaries.py,sha256=2sHmIDX7n8tuPeBTs9bMKtlMKWn_Ph9awTZqmwn2Owc,5505
@@ -125,9 +125,9 @@ phoenix/server/api/dataloaders/cache/__init__.py,sha256=SYoOM9n8FJaMdQarma5d1blu
125
125
  phoenix/server/api/dataloaders/cache/two_tier_cache.py,sha256=cmo8FUT3E91R139IEzh4yCga-6nTamc5KPXAfMrzNDM,2315
126
126
  phoenix/server/api/helpers/__init__.py,sha256=m2-xaSPqUiSs91k62JaRDjFNfl-1byxBfY-m_Vxw16U,272
127
127
  phoenix/server/api/helpers/dataset_helpers.py,sha256=14mldZp9to3rr9BdvvoFqEwZHHV_k2e7jPm8q9z2OdQ,6896
128
- phoenix/server/api/helpers/playground_clients.py,sha256=Bt3rj0zZ2nmxb-b2vBNNzKA2N9GEUH_LyP3j166EFOI,34486
128
+ phoenix/server/api/helpers/playground_clients.py,sha256=Ah-f8jDr3uFC-MtofG1k9f4WYP_Lpb5eHxYaK154eRA,36471
129
129
  phoenix/server/api/helpers/playground_registry.py,sha256=CPLMziFB2wmr-dfbx7VbzO2f8YIG_k5RftzvGXYGQ1w,2570
130
- phoenix/server/api/helpers/playground_spans.py,sha256=j7WlFTolCgrpkazwVuXqvTj8JSJ-nf-Gf_VZKCbzAvU,14775
130
+ phoenix/server/api/helpers/playground_spans.py,sha256=ecQv7lTBue_vRW_2giIcAyYOEWIwU06C2371Gd92gK4,16253
131
131
  phoenix/server/api/input_types/AddExamplesToDatasetInput.py,sha256=mIQz0S_z8YdrktKIY6RCvtNJ2yZF9pYvTGgasUsI-54,430
132
132
  phoenix/server/api/input_types/AddSpansToDatasetInput.py,sha256=-StSstyMAVrba3tG1U30b-srkKCtu_svflQuSM19iJA,362
133
133
  phoenix/server/api/input_types/ChatCompletionInput.py,sha256=g_5ARuwylt-uCVAsGyZPEVtidEQiOhbKakvDQsZumzw,1451
@@ -227,7 +227,7 @@ phoenix/server/api/types/ExperimentRunAnnotation.py,sha256=iBxDaD9DgiF-Qymp5QyxW
227
227
  phoenix/server/api/types/ExportedFile.py,sha256=e3GTn7B5LgsTbqiwjhMCQH7VsiqXitrBO4aCMS1lHsg,163
228
228
  phoenix/server/api/types/Functionality.py,sha256=tzV9xdhB8zqfsjWxP66NDC7EZsplYkYO7jRbLWJIeeg,382
229
229
  phoenix/server/api/types/GenerativeModel.py,sha256=P7eBUMXbeqaLwSSGBKdZy3a5gOLd9I0fuP8o1st6H08,193
230
- phoenix/server/api/types/GenerativeProvider.py,sha256=4Vm82blTCvj9tCreLhghiYOz4dsJPkYbFmfDRaLae9s,1169
230
+ phoenix/server/api/types/GenerativeProvider.py,sha256=3zOR7SssR3mkGAaj8j-_qVzN6H7Qll52J_i98jVyEpA,3127
231
231
  phoenix/server/api/types/Inferences.py,sha256=wv88PjcK-KwnzmTdukiAX9EV2KX4GqsKXVAUm1JtnDA,3383
232
232
  phoenix/server/api/types/InferencesRole.py,sha256=mLfeHpyhGUVX1-tWzT9IwC_cD18BZrD3RA4YsHYuSpA,595
233
233
  phoenix/server/api/types/LabelFraction.py,sha256=zsDxdFALrNiGA1eNykeP8o65gbA0HOhRp54MPH_iRAM,93
@@ -241,7 +241,7 @@ phoenix/server/api/types/Retrieval.py,sha256=OhMK2ncjoyp5h1yjKhjlKpoTbQrMHuxmgSF
241
241
  phoenix/server/api/types/ScalarDriftMetricEnum.py,sha256=IUAcRPpgL41WdoIgK6cNk2Te38SspXGyEs-S1fY23_A,232
242
242
  phoenix/server/api/types/Segments.py,sha256=vT2v0efoa5cuBKxLtxTnsUP5YJJCZfTloM71Spu0tMI,2915
243
243
  phoenix/server/api/types/SortDir.py,sha256=OUpXhlCzCxPoXSDkJJygEs9Rw9pMymfaZUG5zPTrw4Y,152
244
- phoenix/server/api/types/Span.py,sha256=kbUeDnuXdCDEpt2UAqyxj4Hz0JL53IbtDyamyeGgzJ4,16953
244
+ phoenix/server/api/types/Span.py,sha256=6GS6MpJ3f8P2LrQUe2TWPrPf7ENxmde_wisQkJguphw,16919
245
245
  phoenix/server/api/types/SpanAnnotation.py,sha256=6b5G-b_OoRvDL2ayWk7MkbqarLK-F-pQMx21CpUuNGY,1168
246
246
  phoenix/server/api/types/SystemApiKey.py,sha256=2ym8EgsTBIvxx1l9xZ-2YMovz58ZwYb_MaHBTJ9NH2E,166
247
247
  phoenix/server/api/types/TemplateLanguage.py,sha256=9yxW3zGXgHPnA35svT4tznDyRKGuaz_WlbcpiUtC7Ec,142
@@ -273,15 +273,15 @@ phoenix/server/static/apple-touch-icon-76x76.png,sha256=CT_xT12I0u2i0WU8JzBZBuOQ
273
273
  phoenix/server/static/apple-touch-icon.png,sha256=fOfpjqGpWYbJ0eAurKsyoZP1EAs6ZVooBJ_SGk2ZkDs,3801
274
274
  phoenix/server/static/favicon.ico,sha256=bY0vvCKRftemZfPShwZtE93DiiQdaYaozkPGwNFr6H8,34494
275
275
  phoenix/server/static/modernizr.js,sha256=mvK-XtkNqjOral-QvzoqsyOMECXIMu5BQwSVN_wcU9c,2564
276
- phoenix/server/static/.vite/manifest.json,sha256=ToIrbND1XCIzmfOfMwomFfDVuzLm9Guraz0xtvELOW8,1929
277
- phoenix/server/static/assets/components-BcvRmBnN.js,sha256=NS45I5Vt1azRNaNNZWJDO2qv1m5junln5PSbZ62iIJo,306054
278
- phoenix/server/static/assets/index-BF4RUiOz.js,sha256=0QMhyXmx-SNTtEoJnAUDoC4_TKd8qN4N1ABCtDA7jZ0,7285
279
- phoenix/server/static/assets/pages-CM_Zho_x.js,sha256=4EimxhNy3yGgwydDhTH_1sWNrtPoaQc4mISioDsEUh8,628225
280
- phoenix/server/static/assets/vendor-Bjm5T3cE.js,sha256=6kC6MC_f0HVJtmOO022zWk7ti6RIwD5eC_QBceDDjJU,10898295
276
+ phoenix/server/static/.vite/manifest.json,sha256=uCPxIoF7qBxFkaV0H9ur1eYr2sV8go_LqjcguNA6CbQ,1929
277
+ phoenix/server/static/assets/components-BXIz9ZO8.js,sha256=clW5yRW2_5BRWWn3FLFSPhCAVqTsGXJmfUTJpHflHf0,306000
278
+ phoenix/server/static/assets/index-DTut7g1y.js,sha256=iM98orZiM2-8YVnpvZgBgVkteOcvDGZgJo3tWQCxkqo,7290
279
+ phoenix/server/static/assets/pages-B8FpJuXu.js,sha256=y7Q9_wYtuaTanNRZk-OvN1ffiyN_6wCsERnHuqOHHUY,629525
280
+ phoenix/server/static/assets/vendor-BX8_Znqy.js,sha256=M5d1J040pAmV0PhGLJLz3-QQ1m3bogn54ekym9iaAPg,10898641
281
281
  phoenix/server/static/assets/vendor-DxkFTwjz.css,sha256=nZrkr0u6NNElFGvpWHk9GTHeGoibCXCli1bE7mXZGZg,1816
282
- phoenix/server/static/assets/vendor-arizeai-CQhWGEdL.js,sha256=CaTXxr2IuxlTr_R58IRptZyzFgZDsWALFfWfVmhU85Y,307005
283
- phoenix/server/static/assets/vendor-codemirror-CdtiO80y.js,sha256=Dw-1RGNxuzb-klAW-Gs7uFkocAVBxjmCCUc31sn4V8g,392711
284
- phoenix/server/static/assets/vendor-recharts-BqWon6Py.js,sha256=AHlTyMC6UvNgLHOhSfFBal2Knb8azXGzH2qlLuShoYs,282859
282
+ phoenix/server/static/assets/vendor-arizeai-CtHir-Ua.js,sha256=mb8CF7PUkSisMt5HjNC2_HZd48HST1LLCOrUnxZ63iI,307000
283
+ phoenix/server/static/assets/vendor-codemirror-DLlGiguX.js,sha256=0bqHkVJOI4zakkaJLSppKz87jnbPhu3c7my6qqYFycE,392709
284
+ phoenix/server/static/assets/vendor-recharts-CJRple0d.js,sha256=YiimiK6M9fdba3tItYH1sl1-Ye61dM67qfAdWpnBjq8,282859
285
285
  phoenix/server/static/assets/vendor-three-DwGkEfCM.js,sha256=0D12ZgKzfKCTSdSTKJBFR2RZO_xxeMXrqDp0AszZqHY,620972
286
286
  phoenix/server/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
287
287
  phoenix/server/templates/index.html,sha256=ram6sfy2obf_F053ay35V30v-mnRWZ86rK-PstXLy1c,4457
@@ -322,9 +322,9 @@ phoenix/utilities/project.py,sha256=auVpARXkDb-JgeX5f2aStyFIkeKvGwN9l7qrFeJMVxI,
322
322
  phoenix/utilities/re.py,sha256=x8Xbk-Wa6qDMAtUd_7JtZvKtrYEuMY-bchB0n163_5c,2006
323
323
  phoenix/utilities/span_store.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
324
324
  phoenix/utilities/template_formatters.py,sha256=JuOyvukMPLDHa1uVNw0kCFBUnIxy02dwAWNZimdIZU4,2423
325
- arize_phoenix-5.9.1.dist-info/METADATA,sha256=7a966eolx4IkP09ctz4Iif96WmGCrGrCQlmTS7hdGK4,22613
326
- arize_phoenix-5.9.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
327
- arize_phoenix-5.9.1.dist-info/entry_points.txt,sha256=Pgpn8Upxx9P8z8joPXZWl2LlnAlGc3gcQoVchb06X1Q,94
328
- arize_phoenix-5.9.1.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
329
- arize_phoenix-5.9.1.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
330
- arize_phoenix-5.9.1.dist-info/RECORD,,
325
+ arize_phoenix-5.10.0.dist-info/METADATA,sha256=MYCUDgCZbLC0v04dTkj7SxbQ_hbJZ2FHJS42vzg-Kv4,22614
326
+ arize_phoenix-5.10.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
327
+ arize_phoenix-5.10.0.dist-info/entry_points.txt,sha256=Pgpn8Upxx9P8z8joPXZWl2LlnAlGc3gcQoVchb06X1Q,94
328
+ arize_phoenix-5.10.0.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
329
+ arize_phoenix-5.10.0.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
330
+ arize_phoenix-5.10.0.dist-info/RECORD,,
phoenix/config.py CHANGED
@@ -424,7 +424,9 @@ class OAuth2ClientConfig:
424
424
  f"An OpenID Connect configuration URL must be set for the {idp_name} OAuth2 IDP "
425
425
  f"via the {oidc_config_url_env_var} environment variable"
426
426
  )
427
- if urlparse(oidc_config_url).scheme != "https":
427
+ parsed_oidc_config_url = urlparse(oidc_config_url)
428
+ is_local_oidc_config_url = parsed_oidc_config_url.hostname in ("localhost", "127.0.0.1")
429
+ if parsed_oidc_config_url.scheme != "https" and not is_local_oidc_config_url:
428
430
  raise ValueError(
429
431
  f"Server metadata URL for {idp_name} OAuth2 IDP "
430
432
  "must be a valid URL using the https protocol"
@@ -9,7 +9,11 @@ from functools import wraps
9
9
  from typing import TYPE_CHECKING, Any, Hashable, Mapping, Optional, Union
10
10
 
11
11
  from openinference.instrumentation import safe_json_dumps
12
- from openinference.semconv.trace import SpanAttributes
12
+ from openinference.semconv.trace import (
13
+ OpenInferenceLLMProviderValues,
14
+ OpenInferenceLLMSystemValues,
15
+ SpanAttributes,
16
+ )
13
17
  from strawberry import UNSET
14
18
  from strawberry.scalars import JSON as JSONScalarType
15
19
  from typing_extensions import TypeAlias, assert_never
@@ -44,7 +48,7 @@ from phoenix.server.api.types.ChatCompletionSubscriptionPayload import (
44
48
  from phoenix.server.api.types.GenerativeProvider import GenerativeProviderKey
45
49
 
46
50
  if TYPE_CHECKING:
47
- from anthropic.types import MessageParam
51
+ from anthropic.types import MessageParam, TextBlockParam, ToolResultBlockParam
48
52
  from google.generativeai.types import ContentType
49
53
  from openai.types import CompletionUsage
50
54
  from openai.types.chat import ChatCompletionMessageParam, ChatCompletionMessageToolCallParam
@@ -255,6 +259,8 @@ class OpenAIStreamingClient(PlaygroundStreamingClient):
255
259
  from openai import RateLimitError as OpenAIRateLimitError
256
260
 
257
261
  super().__init__(model=model, api_key=api_key)
262
+ self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.OPENAI.value
263
+ self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
258
264
  self.client = AsyncOpenAI(api_key=api_key)
259
265
  self.model_name = model.name
260
266
  self.rate_limiter = PlaygroundRateLimiter(model.provider_key, OpenAIRateLimitError)
@@ -610,6 +616,8 @@ class AzureOpenAIStreamingClient(OpenAIStreamingClient):
610
616
  from openai import AsyncAzureOpenAI
611
617
 
612
618
  super().__init__(model=model, api_key=api_key)
619
+ self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.AZURE.value
620
+ self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
613
621
  if model.endpoint is None or model.api_version is None:
614
622
  raise ValueError("endpoint and api_version are required for Azure OpenAI models")
615
623
  self.client = AsyncAzureOpenAI(
@@ -638,6 +646,8 @@ class AnthropicStreamingClient(PlaygroundStreamingClient):
638
646
  import anthropic
639
647
 
640
648
  super().__init__(model=model, api_key=api_key)
649
+ self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.ANTHROPIC.value
650
+ self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.ANTHROPIC.value
641
651
  self.client = anthropic.AsyncAnthropic(api_key=api_key)
642
652
  self.model_name = model.name
643
653
  self.rate_limiter = PlaygroundRateLimiter(model.provider_key, anthropic.RateLimitError)
@@ -693,7 +703,6 @@ class AnthropicStreamingClient(PlaygroundStreamingClient):
693
703
  import anthropic.types as anthropic_types
694
704
 
695
705
  anthropic_messages, system_prompt = self._build_anthropic_messages(messages)
696
-
697
706
  anthropic_params = {
698
707
  "messages": anthropic_messages,
699
708
  "model": self.model_name,
@@ -751,19 +760,44 @@ class AnthropicStreamingClient(PlaygroundStreamingClient):
751
760
  anthropic_messages: list["MessageParam"] = []
752
761
  system_prompt = ""
753
762
  for role, content, _tool_call_id, _tool_calls in messages:
763
+ tool_aware_content = self._anthropic_message_content(content, _tool_calls)
754
764
  if role == ChatCompletionMessageRole.USER:
755
- anthropic_messages.append({"role": "user", "content": content})
765
+ anthropic_messages.append({"role": "user", "content": tool_aware_content})
756
766
  elif role == ChatCompletionMessageRole.AI:
757
- anthropic_messages.append({"role": "assistant", "content": content})
767
+ anthropic_messages.append({"role": "assistant", "content": tool_aware_content})
758
768
  elif role == ChatCompletionMessageRole.SYSTEM:
759
769
  system_prompt += content + "\n"
760
770
  elif role == ChatCompletionMessageRole.TOOL:
761
- raise NotImplementedError
771
+ anthropic_messages.append(
772
+ {
773
+ "role": "user",
774
+ "content": [
775
+ {
776
+ "type": "tool_result",
777
+ "tool_use_id": _tool_call_id or "",
778
+ "content": content or "",
779
+ }
780
+ ],
781
+ }
782
+ )
762
783
  else:
763
784
  assert_never(role)
764
785
 
765
786
  return anthropic_messages, system_prompt
766
787
 
788
+ def _anthropic_message_content(
789
+ self, content: str, tool_calls: Optional[list[JSONScalarType]]
790
+ ) -> Union[str, list[Union["ToolResultBlockParam", "TextBlockParam"]]]:
791
+ if tool_calls:
792
+ # Anthropic combines tool calls and the reasoning text into a single message object
793
+ tool_use_content: list[Union["ToolResultBlockParam", "TextBlockParam"]] = []
794
+ if content:
795
+ tool_use_content.append({"type": "text", "text": content})
796
+ tool_use_content.extend(tool_calls)
797
+ return tool_use_content
798
+
799
+ return content
800
+
767
801
 
768
802
  @register_llm_client(
769
803
  provider_key=GenerativeProviderKey.GEMINI,
@@ -784,6 +818,8 @@ class GeminiStreamingClient(PlaygroundStreamingClient):
784
818
  import google.generativeai as google_genai
785
819
 
786
820
  super().__init__(model=model, api_key=api_key)
821
+ self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.GOOGLE.value
822
+ self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.VERTEXAI.value
787
823
  google_genai.configure(api_key=api_key)
788
824
  self.model_name = model.name
789
825
 
@@ -905,6 +941,8 @@ def initialize_playground_clients() -> None:
905
941
  pass
906
942
 
907
943
 
944
+ LLM_PROVIDER = SpanAttributes.LLM_PROVIDER
945
+ LLM_SYSTEM = SpanAttributes.LLM_SYSTEM
908
946
  LLM_TOKEN_COUNT_PROMPT = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
909
947
  LLM_TOKEN_COUNT_COMPLETION = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
910
948
  LLM_TOKEN_COUNT_TOTAL = SpanAttributes.LLM_TOKEN_COUNT_TOTAL
@@ -330,20 +330,45 @@ def llm_input_messages(
330
330
  tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
331
331
  ],
332
332
  ) -> Iterator[tuple[str, Any]]:
333
- for i, (role, content, _tool_call_id, tool_calls) in enumerate(messages):
333
+ for i, (role, content, tool_call_id, tool_calls) in enumerate(messages):
334
334
  yield f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_ROLE}", role.value.lower()
335
335
  yield f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_CONTENT}", content
336
+ if role == ChatCompletionMessageRole.TOOL and tool_call_id:
337
+ # Anthropic tool result spans
338
+ yield f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALL_ID}", tool_call_id
339
+
336
340
  if tool_calls is not None:
337
341
  for tool_call_index, tool_call in enumerate(tool_calls):
338
- yield (
339
- f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_FUNCTION_NAME}",
340
- tool_call["function"]["name"],
341
- )
342
- if arguments := tool_call["function"]["arguments"]:
342
+ if tool_call.get("type") == "tool_use":
343
+ # Anthropic tool call spans
344
+ yield (
345
+ f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_FUNCTION_NAME}",
346
+ tool_call["name"],
347
+ )
343
348
  yield (
344
349
  f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_FUNCTION_ARGUMENTS_JSON}",
345
- safe_json_dumps(jsonify(arguments)),
350
+ safe_json_dumps(jsonify(tool_call["input"])),
346
351
  )
352
+ yield (
353
+ f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_ID}",
354
+ tool_call["id"],
355
+ )
356
+ elif tool_call_function := tool_call.get("function"):
357
+ # OpenAI tool call spans
358
+ yield (
359
+ f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_FUNCTION_NAME}",
360
+ tool_call_function["name"],
361
+ )
362
+ if arguments := tool_call_function["arguments"]:
363
+ yield (
364
+ f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_FUNCTION_ARGUMENTS_JSON}",
365
+ safe_json_dumps(jsonify(arguments)),
366
+ )
367
+ if tool_call_id := tool_call.get("id"):
368
+ yield (
369
+ f"{LLM_INPUT_MESSAGES}.{i}.{MESSAGE_TOOL_CALLS}.{tool_call_index}.{TOOL_CALL_ID}",
370
+ tool_call_id,
371
+ )
347
372
 
348
373
 
349
374
  def _llm_output_messages(
@@ -418,5 +443,6 @@ MESSAGE_TOOL_CALLS = MessageAttributes.MESSAGE_TOOL_CALLS
418
443
 
419
444
  TOOL_CALL_FUNCTION_NAME = ToolCallAttributes.TOOL_CALL_FUNCTION_NAME
420
445
  TOOL_CALL_FUNCTION_ARGUMENTS_JSON = ToolCallAttributes.TOOL_CALL_FUNCTION_ARGUMENTS_JSON
421
-
446
+ TOOL_CALL_ID = ToolCallAttributes.TOOL_CALL_ID
447
+ MESSAGE_TOOL_CALL_ID = MessageAttributes.MESSAGE_TOOL_CALL_ID
422
448
  TOOL_JSON_SCHEMA = ToolAttributes.TOOL_JSON_SCHEMA
@@ -127,7 +127,7 @@ class Subscription:
127
127
  ):
128
128
  span.add_response_chunk(chunk)
129
129
  yield chunk
130
- span.set_attributes(llm_client.attributes)
130
+ span.set_attributes(llm_client.attributes)
131
131
  if span.status_message is not None:
132
132
  yield ChatCompletionSubscriptionError(message=span.status_message)
133
133
  async with info.context.db() as session:
@@ -1,6 +1,10 @@
1
1
  from enum import Enum
2
+ from typing import Any, ClassVar, Optional, Union
2
3
 
3
4
  import strawberry
5
+ from openinference.semconv.trace import OpenInferenceLLMProviderValues, SpanAttributes
6
+
7
+ from phoenix.trace.attributes import get_attribute_value
4
8
 
5
9
 
6
10
  @strawberry.enum
@@ -16,6 +20,20 @@ class GenerativeProvider:
16
20
  name: str
17
21
  key: GenerativeProviderKey
18
22
 
23
+ model_provider_to_model_prefix_map: ClassVar[dict[GenerativeProviderKey, list[str]]] = {
24
+ GenerativeProviderKey.AZURE_OPENAI: [],
25
+ GenerativeProviderKey.ANTHROPIC: ["claude"],
26
+ GenerativeProviderKey.OPENAI: ["gpt", "o1"],
27
+ GenerativeProviderKey.GEMINI: ["gemini"],
28
+ }
29
+
30
+ attribute_provider_to_generative_provider_map: ClassVar[dict[str, GenerativeProviderKey]] = {
31
+ OpenInferenceLLMProviderValues.OPENAI.value: GenerativeProviderKey.OPENAI,
32
+ OpenInferenceLLMProviderValues.ANTHROPIC.value: GenerativeProviderKey.ANTHROPIC,
33
+ OpenInferenceLLMProviderValues.AZURE.value: GenerativeProviderKey.AZURE_OPENAI,
34
+ OpenInferenceLLMProviderValues.GOOGLE.value: GenerativeProviderKey.GEMINI,
35
+ }
36
+
19
37
  @strawberry.field
20
38
  async def dependencies(self) -> list[str]:
21
39
  from phoenix.server.api.helpers.playground_registry import (
@@ -39,3 +57,29 @@ class GenerativeProvider:
39
57
  if default_client:
40
58
  return default_client.dependencies_are_installed()
41
59
  return False
60
+
61
+ @classmethod
62
+ def _infer_model_provider_from_model_name(
63
+ cls,
64
+ model_name: str,
65
+ ) -> Union[GenerativeProviderKey, None]:
66
+ for provider, prefixes in cls.model_provider_to_model_prefix_map.items():
67
+ if any(prefix.lower() in model_name.lower() for prefix in prefixes):
68
+ return provider
69
+ return None
70
+
71
+ @classmethod
72
+ def get_model_provider_from_attributes(
73
+ cls,
74
+ attributes: dict[str, Any],
75
+ ) -> Union[GenerativeProviderKey, None]:
76
+ llm_provider: Optional[str] = get_attribute_value(attributes, SpanAttributes.LLM_PROVIDER)
77
+
78
+ if isinstance(llm_provider, str) and (
79
+ provider := cls.attribute_provider_to_generative_provider_map.get(llm_provider)
80
+ ):
81
+ return provider
82
+ llm_model = get_attribute_value(attributes, SpanAttributes.LLM_MODEL_NAME)
83
+ if isinstance(llm_model, str):
84
+ return cls._infer_model_provider_from_model_name(llm_model)
85
+ return None
@@ -25,7 +25,7 @@ from phoenix.server.api.input_types.SpanAnnotationSort import (
25
25
  SpanAnnotationColumn,
26
26
  SpanAnnotationSort,
27
27
  )
28
- from phoenix.server.api.types.GenerativeProvider import GenerativeProviderKey
28
+ from phoenix.server.api.types.GenerativeProvider import GenerativeProvider
29
29
  from phoenix.server.api.types.SortDir import SortDir
30
30
  from phoenix.server.api.types.SpanAnnotation import to_gql_span_annotation
31
31
  from phoenix.trace.attributes import get_attribute_value
@@ -300,10 +300,9 @@ class Span(Node):
300
300
 
301
301
  db_span = self.db_span
302
302
  attributes = db_span.attributes
303
- llm_provider: GenerativeProviderKey = (
304
- get_attribute_value(attributes, SpanAttributes.LLM_PROVIDER)
305
- or GenerativeProviderKey.OPENAI
306
- )
303
+ llm_provider = GenerativeProvider.get_model_provider_from_attributes(attributes)
304
+ if llm_provider is None:
305
+ return []
307
306
  llm_model = get_attribute_value(attributes, SpanAttributes.LLM_MODEL_NAME)
308
307
  invocation_parameters = get_attribute_value(
309
308
  attributes, SpanAttributes.LLM_INVOCATION_PARAMETERS
@@ -1,32 +1,32 @@
1
1
  {
2
- "_components-BcvRmBnN.js": {
3
- "file": "assets/components-BcvRmBnN.js",
2
+ "_components-BXIz9ZO8.js": {
3
+ "file": "assets/components-BXIz9ZO8.js",
4
4
  "name": "components",
5
5
  "imports": [
6
- "_vendor-Bjm5T3cE.js",
7
- "_vendor-arizeai-CQhWGEdL.js",
8
- "_vendor-codemirror-CdtiO80y.js",
9
- "_pages-CM_Zho_x.js",
6
+ "_vendor-BX8_Znqy.js",
7
+ "_pages-B8FpJuXu.js",
8
+ "_vendor-arizeai-CtHir-Ua.js",
9
+ "_vendor-codemirror-DLlGiguX.js",
10
10
  "_vendor-three-DwGkEfCM.js"
11
11
  ]
12
12
  },
13
- "_pages-CM_Zho_x.js": {
14
- "file": "assets/pages-CM_Zho_x.js",
13
+ "_pages-B8FpJuXu.js": {
14
+ "file": "assets/pages-B8FpJuXu.js",
15
15
  "name": "pages",
16
16
  "imports": [
17
- "_vendor-Bjm5T3cE.js",
18
- "_vendor-arizeai-CQhWGEdL.js",
19
- "_components-BcvRmBnN.js",
20
- "_vendor-recharts-BqWon6Py.js",
21
- "_vendor-codemirror-CdtiO80y.js"
17
+ "_vendor-BX8_Znqy.js",
18
+ "_vendor-arizeai-CtHir-Ua.js",
19
+ "_components-BXIz9ZO8.js",
20
+ "_vendor-recharts-CJRple0d.js",
21
+ "_vendor-codemirror-DLlGiguX.js"
22
22
  ]
23
23
  },
24
24
  "_vendor-!~{003}~.js": {
25
25
  "file": "assets/vendor-DxkFTwjz.css",
26
26
  "src": "_vendor-!~{003}~.js"
27
27
  },
28
- "_vendor-Bjm5T3cE.js": {
29
- "file": "assets/vendor-Bjm5T3cE.js",
28
+ "_vendor-BX8_Znqy.js": {
29
+ "file": "assets/vendor-BX8_Znqy.js",
30
30
  "name": "vendor",
31
31
  "imports": [
32
32
  "_vendor-three-DwGkEfCM.js"
@@ -35,25 +35,25 @@
35
35
  "assets/vendor-DxkFTwjz.css"
36
36
  ]
37
37
  },
38
- "_vendor-arizeai-CQhWGEdL.js": {
39
- "file": "assets/vendor-arizeai-CQhWGEdL.js",
38
+ "_vendor-arizeai-CtHir-Ua.js": {
39
+ "file": "assets/vendor-arizeai-CtHir-Ua.js",
40
40
  "name": "vendor-arizeai",
41
41
  "imports": [
42
- "_vendor-Bjm5T3cE.js"
42
+ "_vendor-BX8_Znqy.js"
43
43
  ]
44
44
  },
45
- "_vendor-codemirror-CdtiO80y.js": {
46
- "file": "assets/vendor-codemirror-CdtiO80y.js",
45
+ "_vendor-codemirror-DLlGiguX.js": {
46
+ "file": "assets/vendor-codemirror-DLlGiguX.js",
47
47
  "name": "vendor-codemirror",
48
48
  "imports": [
49
- "_vendor-Bjm5T3cE.js"
49
+ "_vendor-BX8_Znqy.js"
50
50
  ]
51
51
  },
52
- "_vendor-recharts-BqWon6Py.js": {
53
- "file": "assets/vendor-recharts-BqWon6Py.js",
52
+ "_vendor-recharts-CJRple0d.js": {
53
+ "file": "assets/vendor-recharts-CJRple0d.js",
54
54
  "name": "vendor-recharts",
55
55
  "imports": [
56
- "_vendor-Bjm5T3cE.js"
56
+ "_vendor-BX8_Znqy.js"
57
57
  ]
58
58
  },
59
59
  "_vendor-three-DwGkEfCM.js": {
@@ -61,18 +61,18 @@
61
61
  "name": "vendor-three"
62
62
  },
63
63
  "index.tsx": {
64
- "file": "assets/index-BF4RUiOz.js",
64
+ "file": "assets/index-DTut7g1y.js",
65
65
  "name": "index",
66
66
  "src": "index.tsx",
67
67
  "isEntry": true,
68
68
  "imports": [
69
- "_vendor-Bjm5T3cE.js",
70
- "_vendor-arizeai-CQhWGEdL.js",
71
- "_pages-CM_Zho_x.js",
72
- "_components-BcvRmBnN.js",
69
+ "_vendor-BX8_Znqy.js",
70
+ "_vendor-arizeai-CtHir-Ua.js",
71
+ "_pages-B8FpJuXu.js",
72
+ "_components-BXIz9ZO8.js",
73
73
  "_vendor-three-DwGkEfCM.js",
74
- "_vendor-recharts-BqWon6Py.js",
75
- "_vendor-codemirror-CdtiO80y.js"
74
+ "_vendor-recharts-CJRple0d.js",
75
+ "_vendor-codemirror-DLlGiguX.js"
76
76
  ]
77
77
  }
78
78
  }