arize-phoenix 10.13.2__py3-none-any.whl → 10.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (25) hide show
  1. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/METADATA +3 -2
  2. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/RECORD +25 -24
  3. phoenix/config.py +3 -1
  4. phoenix/db/types/model_provider.py +1 -0
  5. phoenix/server/api/helpers/playground_clients.py +459 -0
  6. phoenix/server/api/helpers/prompts/conversions/aws.py +83 -0
  7. phoenix/server/api/helpers/prompts/models.py +67 -0
  8. phoenix/server/api/input_types/GenerativeModelInput.py +2 -0
  9. phoenix/server/api/types/GenerativeProvider.py +33 -20
  10. phoenix/server/email/sender.py +2 -2
  11. phoenix/server/main.py +9 -6
  12. phoenix/server/static/.vite/manifest.json +36 -36
  13. phoenix/server/static/assets/{components-D-yJsfVa.js → components-SpUMF1qV.js} +257 -257
  14. phoenix/server/static/assets/{index-BXRcSHM6.js → index-DIlhmbjB.js} +3 -3
  15. phoenix/server/static/assets/{pages-Dn4XFHMU.js → pages-YX47cEoQ.js} +369 -386
  16. phoenix/server/static/assets/{vendor-BKYy4SMr.js → vendor-DCZoBorz.js} +2 -2
  17. phoenix/server/static/assets/{vendor-arizeai-CaqmrQdQ.js → vendor-arizeai-Ckci3irT.js} +1 -1
  18. phoenix/server/static/assets/{vendor-codemirror-BlmFw5CA.js → vendor-codemirror-BODM513D.js} +1 -1
  19. phoenix/server/static/assets/{vendor-recharts-Bz7zqjbW.js → vendor-recharts-C9O2a-N3.js} +1 -1
  20. phoenix/server/static/assets/{vendor-shiki-BitvudxD.js → vendor-shiki-Dq54rRC7.js} +1 -1
  21. phoenix/version.py +1 -1
  22. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/WHEEL +0 -0
  23. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/entry_points.txt +0 -0
  24. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/licenses/IP_NOTICE +0 -0
  25. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: arize-phoenix
3
- Version: 10.13.2
3
+ Version: 10.15.0
4
4
  Summary: AI Observability and Evaluation
5
5
  Project-URL: Documentation, https://arize.com/docs/phoenix/
6
6
  Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
@@ -58,6 +58,7 @@ Provides-Extra: container
58
58
  Requires-Dist: aiohttp; extra == 'container'
59
59
  Requires-Dist: anthropic>=0.49.0; extra == 'container'
60
60
  Requires-Dist: azure-identity; extra == 'container'
61
+ Requires-Dist: boto3; extra == 'container'
61
62
  Requires-Dist: fast-hdbscan>=0.2.0; extra == 'container'
62
63
  Requires-Dist: google-generativeai; extra == 'container'
63
64
  Requires-Dist: numba>=0.60.0; extra == 'container'
@@ -218,7 +219,7 @@ Phoenix is built on top of OpenTelemetry and is vendor, language, and framework
218
219
  | [LangChain](https://arize.com/docs/phoenix/tracing/integrations-tracing/langchain) | `openinference-instrumentation-langchain` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-langchain.svg)](https://pypi.python.org/pypi/openinference-instrumentation-langchain) |
219
220
  | [MistralAI](https://arize.com/docs/phoenix/tracing/integrations-tracing/mistralai) | `openinference-instrumentation-mistralai` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-mistralai.svg)](https://pypi.python.org/pypi/openinference-instrumentation-mistralai) |
220
221
  | [Google GenAI](https://arize.com/docs/phoenix/tracing/integrations-tracing/google-gen-ai) | `openinference-instrumentation-google-genai` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-google-genai.svg)](https://pypi.python.org/pypi/openinference-instrumentation-google-genai) |
221
- | [Google ADK](https://arize.com/docs/phoenix/tracing/integrations-tracing/google-adk) | `openinference-instrumentation-google-adk` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-google-adk.svg)](https://pypi.python.org/pypi/openinference-instrumentation-google-adk) |
222
+ | [Google ADK](https://arize.com/docs/phoenix/integrations/llm-providers/google-gen-ai/google-adk-tracing) | `openinference-instrumentation-google-adk` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-google-adk.svg)](https://pypi.python.org/pypi/openinference-instrumentation-google-adk) |
222
223
  | [Guardrails](https://arize.com/docs/phoenix/tracing/integrations-tracing/guardrails) | `openinference-instrumentation-guardrails` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-guardrails.svg)](https://pypi.python.org/pypi/openinference-instrumentation-guardrails) |
223
224
  | [VertexAI](https://arize.com/docs/phoenix/tracing/integrations-tracing/vertexai) | `openinference-instrumentation-vertexai` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-vertexai.svg)](https://pypi.python.org/pypi/openinference-instrumentation-vertexai) |
224
225
  | [CrewAI](https://arize.com/docs/phoenix/tracing/integrations-tracing/crewai) | `openinference-instrumentation-crewai` | [![PyPI Version](https://img.shields.io/pypi/v/openinference-instrumentation-crewai.svg)](https://pypi.python.org/pypi/openinference-instrumentation-crewai) |
@@ -1,12 +1,12 @@
1
1
  phoenix/__init__.py,sha256=xkpXH76HFbEDCq8IhiFp-2GnEHx39xPMdOpV5Skew1w,5481
2
2
  phoenix/auth.py,sha256=yW78f1xWNjTE30ACGUM14nOd5BzkukhlzA9B45kSUkM,11053
3
- phoenix/config.py,sha256=1K086wVZDsu8GC1qwk3EBe2Uuw0ZSAhiZhI62PlspaU,57463
3
+ phoenix/config.py,sha256=qWSfDE4xH_83XawfFH2oxawx2aBNWA64E_b69aZSi8I,57571
4
4
  phoenix/datetime_utils.py,sha256=iJzNG6YJ6V7_u8B2iA7P2Z26FyxYbOPtx0dhJ7kNDHA,3398
5
5
  phoenix/exceptions.py,sha256=n2L2KKuecrdflB9MsCdAYCiSEvGJptIsfRkXMoJle7A,169
6
6
  phoenix/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
7
7
  phoenix/services.py,sha256=ngkyKGVatX3cO2WJdo2hKdaVKP-xJCMvqthvga6kJss,5196
8
8
  phoenix/settings.py,sha256=2kHfT3BNOVd4dAO1bq-syEQbHSG8oX2-7NhOwK2QREk,896
9
- phoenix/version.py,sha256=FMFeYOnSdHf5LB9PTDzKa9mK3ewZi3uYY2A6xGvYYGY,24
9
+ phoenix/version.py,sha256=jpWizMuX7YqQHh4siflLCEQbxrdSqz8Okj76Y23IUME,24
10
10
  phoenix/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  phoenix/core/embedding_dimension.py,sha256=zKGbcvwOXgLf-yrJBpQyKtd-LEOPRKHnUToyAU8Owis,87
12
12
  phoenix/core/model.py,sha256=qBFraOtmwCCnWJltKNP18DDG0mULXigytlFsa6YOz6k,4837
@@ -53,7 +53,7 @@ phoenix/db/types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
53
53
  phoenix/db/types/annotation_configs.py,sha256=keiQ9mzLZWE8Eqsu1xFxQBXvBiym0s-yPzoL8qIR4PY,2904
54
54
  phoenix/db/types/db_models.py,sha256=nMSd9gWHwObnVO3_slztlHqoeh04czS-Jxu-omS6M6E,1135
55
55
  phoenix/db/types/identifier.py,sha256=Opr3_1di6e5ncrBDn30WfBSr-jN_VGBnkkA4BMuSoyc,244
56
- phoenix/db/types/model_provider.py,sha256=vrG0yIQqy-SwvRJ1aGJqpCjPZ29vRAi6550CnvSyj2E,221
56
+ phoenix/db/types/model_provider.py,sha256=zKYGcEQqbAxtPwnq5dL0fYPgDC8nrh_ABLBMR94___4,237
57
57
  phoenix/db/types/trace_retention.py,sha256=fyqAQCvDiD7mpJ_WUqbPyQvuSdERof4DpKpHLJsdROk,9897
58
58
  phoenix/experiments/__init__.py,sha256=6JGwgUd7xCbGpuHqYZlsmErmYvVgv7N_j43bn3dUqsk,123
59
59
  phoenix/experiments/functions.py,sha256=QoNkMW_EamcFyDBwq4WzKkmHVXD5ZFW9MlUaGRssrTQ,38227
@@ -96,7 +96,7 @@ phoenix/server/dml_event.py,sha256=MjJmVEKytq75chBOSyvYDusUnEbg1pHpIjR3pZkUaJA,2
96
96
  phoenix/server/dml_event_handler.py,sha256=EZLXmCvx4pJrCkz29gxwKwmvmUkTtPCHw6klR-XM8qE,8258
97
97
  phoenix/server/grpc_server.py,sha256=dod29zE_Zlir7NyLcdVM8GH3P8sy-9ykzfaBfVifyE4,4656
98
98
  phoenix/server/jwt_store.py,sha256=B6uVildN_dQDTG_-aHHvuVSI7wIVK1yvED-_y6se2GU,16905
99
- phoenix/server/main.py,sha256=j00TIU7QYOIXaJW9EpqsjEACKtwtSy70s0zWdwfuPw0,18436
99
+ phoenix/server/main.py,sha256=SvLh2gB1F1Rh2LMcEx-W5gtOoGTpBjGl1tT10fG62Ns,18792
100
100
  phoenix/server/oauth2.py,sha256=GvUqZBoZ5dG-l2G1RMl1SUcN10jNAjaMXFznMSWz2Zs,3336
101
101
  phoenix/server/prometheus.py,sha256=1KjvSfjSa2-BPjDybVMM_Kag316CsN-Zwt64YNr_snc,7825
102
102
  phoenix/server/rate_limiters.py,sha256=cFc73D2NaxqNZZDbwfIDw4So-fRVOJPBtqxOZ8Qky_s,7155
@@ -158,13 +158,14 @@ phoenix/server/api/helpers/__init__.py,sha256=m2-xaSPqUiSs91k62JaRDjFNfl-1byxBfY
158
158
  phoenix/server/api/helpers/annotations.py,sha256=9gMXKpMTfWEChoSCnvdWYuyB0hlSnNOp-qUdar9Vono,262
159
159
  phoenix/server/api/helpers/dataset_helpers.py,sha256=3bdGBoUzqrtg-sr5p2wpQLOU6dhg_3TKFHNeJj8p0TU,9155
160
160
  phoenix/server/api/helpers/experiment_run_filters.py,sha256=DOnVwrmn39eAkk2mwuZP8kIcAnR5jrOgllEwWSjsw94,29893
161
- phoenix/server/api/helpers/playground_clients.py,sha256=laFDXVnj-n5-nze5OeJSLT5nwdz2IWFTVccKNcLqK7U,47932
161
+ phoenix/server/api/helpers/playground_clients.py,sha256=7QdE8WsggSCOVm4pp94nqg2QmAWni9ygHl7aeL2EkeY,66648
162
162
  phoenix/server/api/helpers/playground_registry.py,sha256=CPLMziFB2wmr-dfbx7VbzO2f8YIG_k5RftzvGXYGQ1w,2570
163
163
  phoenix/server/api/helpers/playground_spans.py,sha256=QpXwPl_fFNwm_iA1A77XApUyXMl1aDmonw8aXuNZ_4k,17132
164
164
  phoenix/server/api/helpers/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
165
- phoenix/server/api/helpers/prompts/models.py,sha256=NXdPfs2ktl-btP33qVUXVbzv6GEgadFLurO6QLUw97w,21208
165
+ phoenix/server/api/helpers/prompts/models.py,sha256=nlPtLZaGcHfWNRR0iNRaBUv8eoKOnoGqRm6zadrTt0I,23547
166
166
  phoenix/server/api/helpers/prompts/conversions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
167
167
  phoenix/server/api/helpers/prompts/conversions/anthropic.py,sha256=ZT--UqBwoGf7QMusajB6aeB7zyWGttaZigb113kgiY8,3571
168
+ phoenix/server/api/helpers/prompts/conversions/aws.py,sha256=6vaT8K13r0bMXB9XHA8qY1MCjVsmR9TO0VIwyBMjQoY,2941
168
169
  phoenix/server/api/helpers/prompts/conversions/openai.py,sha256=a43WAftFn_me6ePHDufqvlg-4Z2C31owUSsqYC0YUP8,2589
169
170
  phoenix/server/api/input_types/AddExamplesToDatasetInput.py,sha256=mIQz0S_z8YdrktKIY6RCvtNJ2yZF9pYvTGgasUsI-54,430
170
171
  phoenix/server/api/input_types/AddSpansToDatasetInput.py,sha256=-StSstyMAVrba3tG1U30b-srkKCtu_svflQuSM19iJA,362
@@ -188,7 +189,7 @@ phoenix/server/api/input_types/DeleteExperimentsInput.py,sha256=4d9N0vSLYbuysAam
188
189
  phoenix/server/api/input_types/DimensionFilter.py,sha256=eBYcn7ECSJQlEePvbStqkHBRicbIL4vEAzFJwX7bacQ,3137
189
190
  phoenix/server/api/input_types/DimensionInput.py,sha256=Vfx5FmiMKey4-EHDQsQRPzSAMRJMN5oVMLDUl4NKAa8,164
190
191
  phoenix/server/api/input_types/GenerativeCredentialInput.py,sha256=sEM9UtgDMMuhImLwGXctD8BWDs2V3hNQ1mosoklRZvc,219
191
- phoenix/server/api/input_types/GenerativeModelInput.py,sha256=n6OCkX44I1AIovMAHCxy8SvqPKDb_BYDPA-fn_JnckQ,634
192
+ phoenix/server/api/input_types/GenerativeModelInput.py,sha256=ceJ65f7Mf1JmOuH9TV3KJYbNnQ-_Z85Dl4_vPMB1K_o,713
192
193
  phoenix/server/api/input_types/Granularity.py,sha256=dbBlD_GsIBa8_xrx4JlLuR59bQ0NRB5H-cv1zvcb-cw,2299
193
194
  phoenix/server/api/input_types/InvocationParameters.py,sha256=62xL0iIKvuQherkuJaJ6Lha4TTEoYLpvH-pEP9awK6k,5260
194
195
  phoenix/server/api/input_types/PatchAnnotationInput.py,sha256=2wxC-ibQU59I28amTxLMYMo9SSvJwAIor0w0YvS0e48,676
@@ -286,7 +287,7 @@ phoenix/server/api/types/ExperimentRunAnnotation.py,sha256=YGw5zIbjRXUK3zH475DnE
286
287
  phoenix/server/api/types/ExportedFile.py,sha256=e3GTn7B5LgsTbqiwjhMCQH7VsiqXitrBO4aCMS1lHsg,163
287
288
  phoenix/server/api/types/Functionality.py,sha256=zDDl2bANIqjwfooSOHg-VQk6-wQy05mREwjV_-VbSIg,262
288
289
  phoenix/server/api/types/GenerativeModel.py,sha256=P7eBUMXbeqaLwSSGBKdZy3a5gOLd9I0fuP8o1st6H08,193
289
- phoenix/server/api/types/GenerativeProvider.py,sha256=rWMJYJfm8wAIl6pxP3ARKLHf2zaAhJ7_yv0ulXv7Wn4,6087
290
+ phoenix/server/api/types/GenerativeProvider.py,sha256=blXHIzZwe-xlu3-iF2dexvqnb4xxiD2XynS4Vw0iLx4,6750
290
291
  phoenix/server/api/types/Identifier.py,sha256=n3rxpoKNCwEvZu7QY8yr7g3AW2mU-U62BxFXYaiHLKk,306
291
292
  phoenix/server/api/types/Inferences.py,sha256=wv88PjcK-KwnzmTdukiAX9EV2KX4GqsKXVAUm1JtnDA,3383
292
293
  phoenix/server/api/types/InferencesRole.py,sha256=mLfeHpyhGUVX1-tWzT9IwC_cD18BZrD3RA4YsHYuSpA,595
@@ -332,7 +333,7 @@ phoenix/server/cost_tracking/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
332
333
  phoenix/server/cost_tracking/cost_lookup.py,sha256=c9COURDSW-LFAeuX1k2PX-kKpy8WZeIiwwjJr_YZOqY,9416
333
334
  phoenix/server/cost_tracking/model_cost_manifest.json,sha256=tlOYj69-K0ru53ql3UtX-ynRU_J3C_g5BUGZR6aSirM,19270
334
335
  phoenix/server/email/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
335
- phoenix/server/email/sender.py,sha256=eC6RcLANVJH0mh20mGZ2qr-bU-OWo9po2e5og2tMzJw,4127
336
+ phoenix/server/email/sender.py,sha256=kH94CtAQACyu9KgtVBLmJwMNICxY1XVtbfthibAOC-8,4067
336
337
  phoenix/server/email/types.py,sha256=IO2bTtCh-1cve-xiM4MWnunCCVNOQ3Z2cqTqF7vH-do,466
337
338
  phoenix/server/email/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
338
339
  phoenix/server/email/templates/password_reset.html,sha256=jv0Pe-06JloPZcubRWxPdAFHYEn9eDj_4SjmuoIwshI,441
@@ -350,16 +351,16 @@ phoenix/server/static/apple-touch-icon-76x76.png,sha256=CT_xT12I0u2i0WU8JzBZBuOQ
350
351
  phoenix/server/static/apple-touch-icon.png,sha256=fOfpjqGpWYbJ0eAurKsyoZP1EAs6ZVooBJ_SGk2ZkDs,3801
351
352
  phoenix/server/static/favicon.ico,sha256=bY0vvCKRftemZfPShwZtE93DiiQdaYaozkPGwNFr6H8,34494
352
353
  phoenix/server/static/modernizr.js,sha256=mvK-XtkNqjOral-QvzoqsyOMECXIMu5BQwSVN_wcU9c,2564
353
- phoenix/server/static/.vite/manifest.json,sha256=2GjIVsxtqZR-W8GiECWeJ70uvc5YkZ5F9HjZHNopO4w,2165
354
- phoenix/server/static/assets/components-D-yJsfVa.js,sha256=F_pbfhJ-b_zJ6Fjksart0Oub_5u7DuqLKnnKzM6irJo,566746
355
- phoenix/server/static/assets/index-BXRcSHM6.js,sha256=I6uVSHSqpGWkjNyMy4DhAGZTJaNfBnbh5CUC910tmWs,61125
356
- phoenix/server/static/assets/pages-Dn4XFHMU.js,sha256=Z-SO-VnjVkCdL2es8uf1qZoQvvjNiQ8zS_Yg-t68Rn8,1061315
357
- phoenix/server/static/assets/vendor-BKYy4SMr.js,sha256=lUYFHNxWckrqWult9uTiNwc6-Qv5Bfr1IcjrHoos1oU,2735976
354
+ phoenix/server/static/.vite/manifest.json,sha256=Y-z3nmvX-iQaJQgQqKOx0pmLPxCcQKIXlOxEdf93p7U,2165
355
+ phoenix/server/static/assets/components-SpUMF1qV.js,sha256=308uVnx0PRUHJivRbpcxWOICksl87Mqg6dOsCskOBJ4,571610
356
+ phoenix/server/static/assets/index-DIlhmbjB.js,sha256=SNXaSGqR8bCnJoX09au6MNGKULaeRDWpFHKXxTB1dzI,61125
357
+ phoenix/server/static/assets/pages-YX47cEoQ.js,sha256=1QQjMVGrxbdcLuKlDishQ-M4w4-BHeEAH19_mKr9ZUY,1065207
358
+ phoenix/server/static/assets/vendor-DCZoBorz.js,sha256=frseZBHZYP76tArtLTfi4U93GacwBT5Q8uIoAJVuB5g,2736000
358
359
  phoenix/server/static/assets/vendor-WIZid84E.css,sha256=spZD2r7XL5GfLO13ln-IuXfnjAref8l6g_n_AvxxOlI,5517
359
- phoenix/server/static/assets/vendor-arizeai-CaqmrQdQ.js,sha256=LX_KD8wdk8F7lQlpwybw9f6c5FPgolO1zOQCOGpQRBE,181763
360
- phoenix/server/static/assets/vendor-codemirror-BlmFw5CA.js,sha256=QODyQdDaw_c_n-qaHcjfGxzU2ECerJcAVtnaF-8L6O4,781264
361
- phoenix/server/static/assets/vendor-recharts-Bz7zqjbW.js,sha256=c7rl5xi-366k-e16Udyr6M_ZADAZGELnUOJrBR-1PBc,282150
362
- phoenix/server/static/assets/vendor-shiki-BitvudxD.js,sha256=cVscU0RuS8MzpAVyJc4N28iswI9hNRUM_9rUO0RR0ug,8980312
360
+ phoenix/server/static/assets/vendor-arizeai-Ckci3irT.js,sha256=mvXNc2HVkfkRYdt7hKi645pZRZsw9y4rb5lB0mtTB9M,181763
361
+ phoenix/server/static/assets/vendor-codemirror-BODM513D.js,sha256=KqaMvoGxjvY1zT5I1A3xBW7cm3s_q8UoGdebxO_ZXZI,781264
362
+ phoenix/server/static/assets/vendor-recharts-C9O2a-N3.js,sha256=jfbV8EPYh47srUYI8Zw_-SM034D0SHaqKVnzldXgbos,282150
363
+ phoenix/server/static/assets/vendor-shiki-Dq54rRC7.js,sha256=TmLQSXawTOMzS2o06MqwGLwhUOfr7VgeFscIE0dLFhk,8980312
363
364
  phoenix/server/static/assets/vendor-three-C5WAXd5r.js,sha256=ELkg06u70N7h8oFmvqdoHyPuUf9VgGEWeT4LKFx4VWo,620975
364
365
  phoenix/server/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
365
366
  phoenix/server/templates/index.html,sha256=TxaZTZKUz7-xQ3XlPO3DAPMj6S1rMEr5v6g1UmgaW70,6761
@@ -400,9 +401,9 @@ phoenix/utilities/project.py,sha256=auVpARXkDb-JgeX5f2aStyFIkeKvGwN9l7qrFeJMVxI,
400
401
  phoenix/utilities/re.py,sha256=6YyUWIkv0zc2SigsxfOWIHzdpjKA_TZo2iqKq7zJKvw,2081
401
402
  phoenix/utilities/span_store.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
402
403
  phoenix/utilities/template_formatters.py,sha256=gh9PJD6WEGw7TEYXfSst1UR4pWWwmjxMLrDVQ_CkpkQ,2779
403
- arize_phoenix-10.13.2.dist-info/METADATA,sha256=9o9REU_H5SBEINtyPUxfM8pytKBs3rNBAn9AU1IlZBI,27307
404
- arize_phoenix-10.13.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
405
- arize_phoenix-10.13.2.dist-info/entry_points.txt,sha256=Pgpn8Upxx9P8z8joPXZWl2LlnAlGc3gcQoVchb06X1Q,94
406
- arize_phoenix-10.13.2.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
407
- arize_phoenix-10.13.2.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
408
- arize_phoenix-10.13.2.dist-info/RECORD,,
404
+ arize_phoenix-10.15.0.dist-info/METADATA,sha256=eujKx3278NoP3MqiE1KFzAuRrmivH7_B3pMl6BCJmls,27370
405
+ arize_phoenix-10.15.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
406
+ arize_phoenix-10.15.0.dist-info/entry_points.txt,sha256=Pgpn8Upxx9P8z8joPXZWl2LlnAlGc3gcQoVchb06X1Q,94
407
+ arize_phoenix-10.15.0.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
408
+ arize_phoenix-10.15.0.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
409
+ arize_phoenix-10.15.0.dist-info/RECORD,,
phoenix/config.py CHANGED
@@ -24,6 +24,8 @@ if TYPE_CHECKING:
24
24
 
25
25
  logger = logging.getLogger(__name__)
26
26
 
27
+ ENV_OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT"
28
+
27
29
  # Phoenix environment variables
28
30
  ENV_PHOENIX_PORT = "PHOENIX_PORT"
29
31
  ENV_PHOENIX_GRPC_PORT = "PHOENIX_GRPC_PORT"
@@ -1276,7 +1278,7 @@ def get_env_host_root_path() -> str:
1276
1278
 
1277
1279
 
1278
1280
  def get_env_collector_endpoint() -> Optional[str]:
1279
- return getenv(ENV_PHOENIX_COLLECTOR_ENDPOINT)
1281
+ return getenv(ENV_PHOENIX_COLLECTOR_ENDPOINT) or getenv(ENV_OTEL_EXPORTER_OTLP_ENDPOINT)
1280
1282
 
1281
1283
 
1282
1284
  def get_env_project_name() -> str:
@@ -9,3 +9,4 @@ class ModelProvider(Enum):
9
9
  DEEPSEEK = "DEEPSEEK"
10
10
  XAI = "XAI"
11
11
  OLLAMA = "OLLAMA"
12
+ AWS = "AWS"
@@ -597,6 +597,465 @@ class OllamaStreamingClient(OpenAIBaseStreamingClient):
597
597
  self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
598
598
 
599
599
 
600
+ @register_llm_client(
601
+ provider_key=GenerativeProviderKey.AWS,
602
+ model_names=[
603
+ PROVIDER_DEFAULT,
604
+ "anthropic.claude-3-5-sonnet-20240620-v1:0",
605
+ "anthropic.claude-3-7-sonnet-20250219-v1:0",
606
+ "anthropic.claude-3-haiku-20240307-v1:0",
607
+ "anthropic.claude-3-5-sonnet-20241022-v2:0",
608
+ "anthropic.claude-3-5-haiku-20241022-v1:0",
609
+ "anthropic.claude-opus-4-20250514-v1:0",
610
+ "anthropic.claude-sonnet-4-20250514-v1:0",
611
+ "amazon.titan-embed-text-v2:0",
612
+ "amazon.nova-pro-v1:0",
613
+ "amazon.nova-premier-v1:0:8k",
614
+ "amazon.nova-premier-v1:0:20k",
615
+ "amazon.nova-premier-v1:0:1000k",
616
+ "amazon.nova-premier-v1:0:mm",
617
+ "amazon.nova-premier-v1:0",
618
+ "amazon.nova-lite-v1:0",
619
+ "amazon.nova-micro-v1:0",
620
+ "deepseek.r1-v1:0",
621
+ "mistral.pixtral-large-2502-v1:0",
622
+ "meta.llama3-1-8b-instruct-v1:0:128k",
623
+ "meta.llama3-1-8b-instruct-v1:0",
624
+ "meta.llama3-1-70b-instruct-v1:0:128k",
625
+ "meta.llama3-1-70b-instruct-v1:0",
626
+ "meta.llama3-1-405b-instruct-v1:0",
627
+ "meta.llama3-2-11b-instruct-v1:0",
628
+ "meta.llama3-2-90b-instruct-v1:0",
629
+ "meta.llama3-2-1b-instruct-v1:0",
630
+ "meta.llama3-2-3b-instruct-v1:0",
631
+ "meta.llama3-3-70b-instruct-v1:0",
632
+ "meta.llama4-scout-17b-instruct-v1:0",
633
+ "meta.llama4-maverick-17b-instruct-v1:0",
634
+ ],
635
+ )
636
+ class BedrockStreamingClient(PlaygroundStreamingClient):
637
+ def __init__(
638
+ self,
639
+ model: GenerativeModelInput,
640
+ credentials: Optional[list[PlaygroundClientCredential]] = None,
641
+ ) -> None:
642
+ import boto3 # type: ignore[import-untyped]
643
+
644
+ super().__init__(model=model, credentials=credentials)
645
+ self.region = model.region or "us-east-1"
646
+ self.api = "converse"
647
+ self.aws_access_key_id = _get_credential_value(credentials, "AWS_ACCESS_KEY_ID") or getenv(
648
+ "AWS_ACCESS_KEY_ID"
649
+ )
650
+ self.aws_secret_access_key = _get_credential_value(
651
+ credentials, "AWS_SECRET_ACCESS_KEY"
652
+ ) or getenv("AWS_SECRET_ACCESS_KEY")
653
+ self.aws_session_token = _get_credential_value(credentials, "AWS_SESSION_TOKEN") or getenv(
654
+ "AWS_SESSION_TOKEN"
655
+ )
656
+ self.model_name = model.name
657
+ self.client = boto3.client(
658
+ service_name="bedrock-runtime",
659
+ region_name="us-east-1", # match the default region in the UI
660
+ aws_access_key_id=self.aws_access_key_id,
661
+ aws_secret_access_key=self.aws_secret_access_key,
662
+ aws_session_token=self.aws_session_token,
663
+ )
664
+
665
+ self._attributes[LLM_PROVIDER] = "aws"
666
+ self._attributes[LLM_SYSTEM] = "aws"
667
+
668
+ @classmethod
669
+ def dependencies(cls) -> list[Dependency]:
670
+ return [Dependency(name="boto3")]
671
+
672
+ @classmethod
673
+ def supported_invocation_parameters(cls) -> list[InvocationParameter]:
674
+ return [
675
+ IntInvocationParameter(
676
+ invocation_name="max_tokens",
677
+ canonical_name=CanonicalParameterName.MAX_COMPLETION_TOKENS,
678
+ label="Max Tokens",
679
+ default_value=1024,
680
+ ),
681
+ BoundedFloatInvocationParameter(
682
+ invocation_name="temperature",
683
+ canonical_name=CanonicalParameterName.TEMPERATURE,
684
+ label="Temperature",
685
+ default_value=1.0,
686
+ min_value=0.0,
687
+ max_value=1.0,
688
+ ),
689
+ BoundedFloatInvocationParameter(
690
+ invocation_name="top_p",
691
+ canonical_name=CanonicalParameterName.TOP_P,
692
+ label="Top P",
693
+ default_value=1.0,
694
+ min_value=0.0,
695
+ max_value=1.0,
696
+ ),
697
+ JSONInvocationParameter(
698
+ invocation_name="tool_choice",
699
+ label="Tool Choice",
700
+ canonical_name=CanonicalParameterName.TOOL_CHOICE,
701
+ ),
702
+ ]
703
+
704
+ async def chat_completion_create(
705
+ self,
706
+ messages: list[
707
+ tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
708
+ ],
709
+ tools: list[JSONScalarType],
710
+ **invocation_parameters: Any,
711
+ ) -> AsyncIterator[ChatCompletionChunk]:
712
+ import boto3
713
+
714
+ if (
715
+ self.client.meta.region_name != self.region
716
+ ): # override the region if it's different from the default
717
+ self.client = boto3.client(
718
+ "bedrock-runtime",
719
+ region_name=self.region,
720
+ aws_access_key_id=self.aws_access_key_id,
721
+ aws_secret_access_key=self.aws_secret_access_key,
722
+ aws_session_token=self.aws_session_token,
723
+ )
724
+ if self.api == "invoke":
725
+ async for chunk in self._handle_invoke_api(messages, tools, invocation_parameters):
726
+ yield chunk
727
+ else:
728
+ async for chunk in self._handle_converse_api(messages, tools, invocation_parameters):
729
+ yield chunk
730
+
731
+ async def _handle_converse_api(
732
+ self,
733
+ messages: list[
734
+ tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
735
+ ],
736
+ tools: list[JSONScalarType],
737
+ invocation_parameters: dict[str, Any],
738
+ ) -> AsyncIterator[ChatCompletionChunk]:
739
+ """
740
+ Handle the converse API.
741
+ """
742
+ # Build messages in Converse API format
743
+ converse_messages = self._build_converse_messages(messages)
744
+
745
+ # Build the request parameters for Converse API
746
+ converse_params: dict[str, Any] = {
747
+ "modelId": f"us.{self.model_name}",
748
+ "messages": converse_messages,
749
+ "inferenceConfig": {
750
+ "maxTokens": invocation_parameters["max_tokens"],
751
+ "temperature": invocation_parameters["temperature"],
752
+ "topP": invocation_parameters["top_p"],
753
+ },
754
+ }
755
+
756
+ # Add system prompt if available
757
+ system_prompt = self._extract_system_prompt(messages)
758
+ if system_prompt:
759
+ converse_params["system"] = [{"text": system_prompt}]
760
+
761
+ # Add tools if provided
762
+ if tools:
763
+ converse_params["toolConfig"] = {"tools": tools}
764
+ if (
765
+ "tool_choice" in invocation_parameters
766
+ and invocation_parameters["tool_choice"]["type"] != "none"
767
+ ):
768
+ converse_params["toolConfig"]["toolChoice"] = {}
769
+
770
+ if invocation_parameters["tool_choice"]["type"] == "auto":
771
+ converse_params["toolConfig"]["toolChoice"]["auto"] = {}
772
+ elif invocation_parameters["tool_choice"]["type"] == "any":
773
+ converse_params["toolConfig"]["toolChoice"]["any"] = {}
774
+ else:
775
+ converse_params["toolConfig"]["toolChoice"]["tool"] = {
776
+ "name": invocation_parameters["tool_choice"]["name"],
777
+ }
778
+
779
+ # Make the streaming API call
780
+ response = self.client.converse_stream(**converse_params)
781
+
782
+ # Track active tool calls
783
+ active_tool_calls = {} # contentBlockIndex -> {id, name, arguments_buffer}
784
+
785
+ # Process the event stream
786
+ event_stream = response.get("stream")
787
+
788
+ for event in event_stream:
789
+ # Handle content block start events
790
+ if "contentBlockStart" in event:
791
+ content_block_start = event["contentBlockStart"]
792
+ start_event = content_block_start.get("start", {})
793
+ block_index = content_block_start.get(
794
+ "contentBlockIndex", 0
795
+ ) # Get the actual index
796
+
797
+ if "toolUse" in start_event:
798
+ tool_use = start_event["toolUse"]
799
+ active_tool_calls[block_index] = { # Use the actual block index
800
+ "id": tool_use.get("toolUseId"),
801
+ "name": tool_use.get("name"),
802
+ "arguments_buffer": "",
803
+ }
804
+
805
+ # Yield initial tool call chunk
806
+ yield ToolCallChunk(
807
+ id=tool_use.get("toolUseId"),
808
+ function=FunctionCallChunk(
809
+ name=tool_use.get("name"),
810
+ arguments="",
811
+ ),
812
+ )
813
+
814
+ # Handle content block delta events
815
+ elif "contentBlockDelta" in event:
816
+ content_delta = event["contentBlockDelta"]
817
+ delta = content_delta.get("delta", {})
818
+ delta_index = content_delta.get("contentBlockIndex", 0)
819
+
820
+ # Handle text delta
821
+ if "text" in delta:
822
+ yield TextChunk(content=delta["text"])
823
+
824
+ # Handle tool use delta
825
+ elif "toolUse" in delta:
826
+ tool_delta = delta["toolUse"]
827
+ if "input" in tool_delta and delta_index in active_tool_calls:
828
+ # Accumulate tool arguments
829
+ json_chunk = tool_delta["input"]
830
+ active_tool_calls[delta_index]["arguments_buffer"] += json_chunk
831
+
832
+ # Yield incremental argument update
833
+ yield ToolCallChunk(
834
+ id=active_tool_calls[delta_index]["id"],
835
+ function=FunctionCallChunk(
836
+ name=active_tool_calls[delta_index]["name"],
837
+ arguments=json_chunk,
838
+ ),
839
+ )
840
+
841
+ # Handle content block stop events
842
+ elif "contentBlockStop" in event:
843
+ stop_index = event["contentBlockStop"].get("contentBlockIndex", 0)
844
+ if stop_index in active_tool_calls:
845
+ del active_tool_calls[stop_index]
846
+
847
+ elif "metadata" in event:
848
+ self._attributes.update(
849
+ {
850
+ LLM_TOKEN_COUNT_PROMPT: event.get("metadata")
851
+ .get("usage", {})
852
+ .get("inputTokens", 0)
853
+ }
854
+ )
855
+
856
+ self._attributes.update(
857
+ {
858
+ LLM_TOKEN_COUNT_COMPLETION: event.get("metadata")
859
+ .get("usage", {})
860
+ .get("outputTokens", 0)
861
+ }
862
+ )
863
+
864
+ self._attributes.update(
865
+ {
866
+ LLM_TOKEN_COUNT_TOTAL: event.get("metadata")
867
+ .get("usage", {})
868
+ .get("totalTokens", 0)
869
+ }
870
+ )
871
+
872
+ async def _handle_invoke_api(
873
+ self,
874
+ messages: list[
875
+ tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
876
+ ],
877
+ tools: list[JSONScalarType],
878
+ invocation_parameters: dict[str, Any],
879
+ ) -> AsyncIterator[ChatCompletionChunk]:
880
+ if "anthropic" not in self.model_name:
881
+ raise ValueError("Invoke API is only supported for Anthropic models")
882
+
883
+ bedrock_messages, system_prompt = self._build_bedrock_messages(messages)
884
+ bedrock_params = {
885
+ "anthropic_version": "bedrock-2023-05-31",
886
+ "max_tokens": invocation_parameters["max_tokens"],
887
+ "messages": bedrock_messages,
888
+ "system": system_prompt,
889
+ "temperature": invocation_parameters["temperature"],
890
+ "top_p": invocation_parameters["top_p"],
891
+ "tools": tools,
892
+ }
893
+
894
+ response = self.client.invoke_model_with_response_stream(
895
+ modelId=f"us.{self.model_name}", # or another Claude model
896
+ contentType="application/json",
897
+ accept="application/json",
898
+ body=json.dumps(bedrock_params),
899
+ trace="ENABLED_FULL",
900
+ )
901
+
902
+ # The response['body'] is an EventStream object
903
+ event_stream = response["body"]
904
+
905
+ # Track active tool calls and their accumulating arguments
906
+ active_tool_calls: dict[int, dict[str, Any]] = {} # index -> {id, name, arguments_buffer}
907
+
908
+ for event in event_stream:
909
+ if "chunk" in event:
910
+ chunk_data = json.loads(event["chunk"]["bytes"].decode("utf-8"))
911
+
912
+ # Handle text content
913
+ if chunk_data.get("type") == "content_block_delta":
914
+ delta = chunk_data.get("delta", {})
915
+ index = chunk_data.get("index", 0)
916
+
917
+ if delta.get("type") == "text_delta" and "text" in delta:
918
+ yield TextChunk(content=delta["text"])
919
+
920
+ elif delta.get("type") == "input_json_delta":
921
+ # Accumulate tool arguments
922
+ if index in active_tool_calls:
923
+ active_tool_calls[index]["arguments_buffer"] += delta.get(
924
+ "partial_json", ""
925
+ )
926
+ # Yield incremental argument update
927
+ yield ToolCallChunk(
928
+ id=active_tool_calls[index]["id"],
929
+ function=FunctionCallChunk(
930
+ name=active_tool_calls[index]["name"],
931
+ arguments=delta.get("partial_json", ""),
932
+ ),
933
+ )
934
+
935
+ # Handle tool call start
936
+ elif chunk_data.get("type") == "content_block_start":
937
+ content_block = chunk_data.get("content_block", {})
938
+ index = chunk_data.get("index", 0)
939
+
940
+ if content_block.get("type") == "tool_use":
941
+ # Initialize tool call tracking
942
+ active_tool_calls[index] = {
943
+ "id": content_block.get("id"),
944
+ "name": content_block.get("name"),
945
+ "arguments_buffer": "",
946
+ }
947
+
948
+ # Yield initial tool call chunk
949
+ yield ToolCallChunk(
950
+ id=content_block.get("id"),
951
+ function=FunctionCallChunk(
952
+ name=content_block.get("name"),
953
+ arguments="", # Start with empty, will be filled by deltas
954
+ ),
955
+ )
956
+
957
+ # Handle content block stop (tool call complete)
958
+ elif chunk_data.get("type") == "content_block_stop":
959
+ index = chunk_data.get("index", 0)
960
+ if index in active_tool_calls:
961
+ # Tool call is complete, clean up
962
+ del active_tool_calls[index]
963
+
964
+ elif chunk_data.get("type") == "message_stop":
965
+ self._attributes.update(
966
+ {
967
+ LLM_TOKEN_COUNT_COMPLETION: chunk_data.get(
968
+ "amazon-bedrock-invocationMetrics", {}
969
+ ).get("outputTokenCount", 0)
970
+ }
971
+ )
972
+
973
+ self._attributes.update(
974
+ {
975
+ LLM_TOKEN_COUNT_PROMPT: chunk_data.get(
976
+ "amazon-bedrock-invocationMetrics", {}
977
+ ).get("inputTokenCount", 0)
978
+ }
979
+ )
980
+
981
+ def _build_bedrock_messages(
982
+ self,
983
+ messages: list[
984
+ tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
985
+ ],
986
+ ) -> tuple[list[dict[str, Any]], str]:
987
+ bedrock_messages = []
988
+ system_prompt = ""
989
+ for role, content, _, _ in messages:
990
+ if role == ChatCompletionMessageRole.USER:
991
+ bedrock_messages.append(
992
+ {
993
+ "role": "user",
994
+ "content": content,
995
+ }
996
+ )
997
+ elif role == ChatCompletionMessageRole.AI:
998
+ bedrock_messages.append(
999
+ {
1000
+ "role": "assistant",
1001
+ "content": content,
1002
+ }
1003
+ )
1004
+ elif role == ChatCompletionMessageRole.SYSTEM:
1005
+ system_prompt += content + "\n"
1006
+ return bedrock_messages, system_prompt
1007
+
1008
+ def _extract_system_prompt(
1009
+ self,
1010
+ messages: list[
1011
+ tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
1012
+ ],
1013
+ ) -> str:
1014
+ """Extract system prompt from messages."""
1015
+ system_prompts = []
1016
+ for role, content, _, _ in messages:
1017
+ if role == ChatCompletionMessageRole.SYSTEM:
1018
+ system_prompts.append(content)
1019
+ return "\n".join(system_prompts)
1020
+
1021
+ def _build_converse_messages(
1022
+ self,
1023
+ messages: list[
1024
+ tuple[ChatCompletionMessageRole, str, Optional[str], Optional[list[JSONScalarType]]]
1025
+ ],
1026
+ ) -> list[dict[str, Any]]:
1027
+ """Convert messages to Converse API format."""
1028
+ converse_messages: list[dict[str, Any]] = []
1029
+ for role, content, _id, tool_calls in messages:
1030
+ if role == ChatCompletionMessageRole.USER:
1031
+ converse_messages.append({"role": "user", "content": [{"text": content}]})
1032
+ elif role == ChatCompletionMessageRole.TOOL:
1033
+ converse_messages.append(
1034
+ {
1035
+ "role": "user",
1036
+ "content": [
1037
+ {
1038
+ "toolResult": {
1039
+ "toolUseId": _id,
1040
+ "content": [{"json": json.loads(content)}],
1041
+ }
1042
+ }
1043
+ ],
1044
+ }
1045
+ )
1046
+
1047
+ elif role == ChatCompletionMessageRole.AI:
1048
+ # Handle assistant messages with potential tool calls
1049
+ message: dict[str, Any] = {"role": "assistant", "content": []}
1050
+ if content:
1051
+ message["content"].append({"text": content})
1052
+ if tool_calls:
1053
+ for tool_call in tool_calls:
1054
+ message["content"].append(tool_call)
1055
+ converse_messages.append(message)
1056
+ return converse_messages
1057
+
1058
+
600
1059
  @register_llm_client(
601
1060
  provider_key=GenerativeProviderKey.OPENAI,
602
1061
  model_names=[