arize-phoenix 10.6.2__py3-none-any.whl → 10.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-10.6.2.dist-info → arize_phoenix-10.7.0.dist-info}/METADATA +2 -2
- {arize_phoenix-10.6.2.dist-info → arize_phoenix-10.7.0.dist-info}/RECORD +25 -24
- phoenix/db/types/model_provider.py +1 -0
- phoenix/experiments/functions.py +21 -9
- phoenix/experiments/tracing.py +1 -1
- phoenix/server/api/helpers/playground_clients.py +132 -21
- phoenix/server/api/helpers/playground_spans.py +6 -3
- phoenix/server/api/helpers/prompts/models.py +21 -0
- phoenix/server/api/input_types/ChatCompletionInput.py +3 -2
- phoenix/server/api/input_types/GenerativeCredentialInput.py +9 -0
- phoenix/server/api/mutations/chat_mutations.py +20 -3
- phoenix/server/api/subscriptions.py +19 -2
- phoenix/server/api/types/GenerativeProvider.py +44 -12
- phoenix/server/main.py +1 -0
- phoenix/server/static/.vite/manifest.json +9 -9
- phoenix/server/static/assets/{components-J06J_j9O.js → components-CuV9v4w3.js} +243 -243
- phoenix/server/static/assets/{index-DfT39tc3.js → index-BeIqg4nl.js} +2 -2
- phoenix/server/static/assets/{pages-nxs-tDxQ.js → pages-DTLdnakm.js} +401 -398
- phoenix/server/telemetry.py +1 -1
- phoenix/trace/projects.py +1 -1
- phoenix/version.py +1 -1
- {arize_phoenix-10.6.2.dist-info → arize_phoenix-10.7.0.dist-info}/WHEEL +0 -0
- {arize_phoenix-10.6.2.dist-info → arize_phoenix-10.7.0.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-10.6.2.dist-info → arize_phoenix-10.7.0.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-10.6.2.dist-info → arize_phoenix-10.7.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: arize-phoenix
|
|
3
|
-
Version: 10.
|
|
3
|
+
Version: 10.7.0
|
|
4
4
|
Summary: AI Observability and Evaluation
|
|
5
5
|
Project-URL: Documentation, https://arize.com/docs/phoenix/
|
|
6
6
|
Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
|
|
@@ -22,7 +22,7 @@ Requires-Dist: aiosqlite
|
|
|
22
22
|
Requires-Dist: alembic<2,>=1.3.0
|
|
23
23
|
Requires-Dist: arize-phoenix-client
|
|
24
24
|
Requires-Dist: arize-phoenix-evals>=0.20.6
|
|
25
|
-
Requires-Dist: arize-phoenix-otel>=0.10.
|
|
25
|
+
Requires-Dist: arize-phoenix-otel>=0.10.3
|
|
26
26
|
Requires-Dist: authlib
|
|
27
27
|
Requires-Dist: cachetools
|
|
28
28
|
Requires-Dist: email-validator
|
|
@@ -6,7 +6,7 @@ phoenix/exceptions.py,sha256=n2L2KKuecrdflB9MsCdAYCiSEvGJptIsfRkXMoJle7A,169
|
|
|
6
6
|
phoenix/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
7
7
|
phoenix/services.py,sha256=ngkyKGVatX3cO2WJdo2hKdaVKP-xJCMvqthvga6kJss,5196
|
|
8
8
|
phoenix/settings.py,sha256=x87BX7hWGQQZbrW_vrYqFR_izCGfO9gFc--JXUG4Tdk,754
|
|
9
|
-
phoenix/version.py,sha256=
|
|
9
|
+
phoenix/version.py,sha256=9VjBfsEYJF5PsmYbaZqem3HOFjLlVd77_XvGC9hBJrQ,23
|
|
10
10
|
phoenix/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
phoenix/core/embedding_dimension.py,sha256=zKGbcvwOXgLf-yrJBpQyKtd-LEOPRKHnUToyAU8Owis,87
|
|
12
12
|
phoenix/core/model.py,sha256=qBFraOtmwCCnWJltKNP18DDG0mULXigytlFsa6YOz6k,4837
|
|
@@ -53,11 +53,11 @@ phoenix/db/types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
53
53
|
phoenix/db/types/annotation_configs.py,sha256=keiQ9mzLZWE8Eqsu1xFxQBXvBiym0s-yPzoL8qIR4PY,2904
|
|
54
54
|
phoenix/db/types/db_models.py,sha256=nMSd9gWHwObnVO3_slztlHqoeh04czS-Jxu-omS6M6E,1135
|
|
55
55
|
phoenix/db/types/identifier.py,sha256=Opr3_1di6e5ncrBDn30WfBSr-jN_VGBnkkA4BMuSoyc,244
|
|
56
|
-
phoenix/db/types/model_provider.py,sha256=
|
|
56
|
+
phoenix/db/types/model_provider.py,sha256=vrG0yIQqy-SwvRJ1aGJqpCjPZ29vRAi6550CnvSyj2E,221
|
|
57
57
|
phoenix/db/types/trace_retention.py,sha256=fyqAQCvDiD7mpJ_WUqbPyQvuSdERof4DpKpHLJsdROk,9897
|
|
58
58
|
phoenix/experiments/__init__.py,sha256=6JGwgUd7xCbGpuHqYZlsmErmYvVgv7N_j43bn3dUqsk,123
|
|
59
|
-
phoenix/experiments/functions.py,sha256=
|
|
60
|
-
phoenix/experiments/tracing.py,sha256=
|
|
59
|
+
phoenix/experiments/functions.py,sha256=JVhHImgta_ILxXbV17hFSo0k8O0qCIoHC-JDNwKw5Mg,38257
|
|
60
|
+
phoenix/experiments/tracing.py,sha256=bpw9OtrBxjOZKkpf4-Fmn3uCUXUrlhCjHPN9XqW6cp0,2888
|
|
61
61
|
phoenix/experiments/types.py,sha256=yntt6fnAny1U4Q9Y5Mm4ZYIb9319OaJovl-kyXFtGQE,23475
|
|
62
62
|
phoenix/experiments/utils.py,sha256=MZ1-OnTcavk_KUtbfGqt55Fk9TGtJpYG_K71WsN-zDk,785
|
|
63
63
|
phoenix/experiments/evaluators/__init__.py,sha256=CPWW1EiufLqc0JWghE4wVAPG_z6Wt4mD_-yf_4IckB4,772
|
|
@@ -96,12 +96,12 @@ phoenix/server/dml_event.py,sha256=MjJmVEKytq75chBOSyvYDusUnEbg1pHpIjR3pZkUaJA,2
|
|
|
96
96
|
phoenix/server/dml_event_handler.py,sha256=EZLXmCvx4pJrCkz29gxwKwmvmUkTtPCHw6klR-XM8qE,8258
|
|
97
97
|
phoenix/server/grpc_server.py,sha256=dod29zE_Zlir7NyLcdVM8GH3P8sy-9ykzfaBfVifyE4,4656
|
|
98
98
|
phoenix/server/jwt_store.py,sha256=B6uVildN_dQDTG_-aHHvuVSI7wIVK1yvED-_y6se2GU,16905
|
|
99
|
-
phoenix/server/main.py,sha256=
|
|
99
|
+
phoenix/server/main.py,sha256=ZL9cCqhaawTWOr6DIs6x09QjZ2xDk-jdoiyJcrjJFMc,18294
|
|
100
100
|
phoenix/server/oauth2.py,sha256=GvUqZBoZ5dG-l2G1RMl1SUcN10jNAjaMXFznMSWz2Zs,3336
|
|
101
101
|
phoenix/server/prometheus.py,sha256=1KjvSfjSa2-BPjDybVMM_Kag316CsN-Zwt64YNr_snc,7825
|
|
102
102
|
phoenix/server/rate_limiters.py,sha256=cFc73D2NaxqNZZDbwfIDw4So-fRVOJPBtqxOZ8Qky_s,7155
|
|
103
103
|
phoenix/server/retention.py,sha256=MQe1FWuc_NxhqgIq5q2hfFhWT8ddAmpppgI74xYEQ6c,3064
|
|
104
|
-
phoenix/server/telemetry.py,sha256=
|
|
104
|
+
phoenix/server/telemetry.py,sha256=yKAAEy9rvdLvQ-BCINM-H3U6hxlXq2bM8FkiQ_4VazU,2785
|
|
105
105
|
phoenix/server/thread_server.py,sha256=Ea2AWreN1lwJsT2wYvGaRaiXrzBqH4kgkZpx0FO5Ocw,2144
|
|
106
106
|
phoenix/server/types.py,sha256=b17xahdt6uwDdUYul0xctu7TbBC65AjarlhUzOiXFNE,7443
|
|
107
107
|
phoenix/server/api/README.md,sha256=Pyq1PLPgTzXAswrfIhGXrjI3Skq8it2jTVnanT6Ba4Q,1162
|
|
@@ -112,7 +112,7 @@ phoenix/server/api/exceptions.py,sha256=TA0JuY2YRnj35qGuMSQ8d0ToHum9gWm9W--3fSKH
|
|
|
112
112
|
phoenix/server/api/interceptor.py,sha256=ykDnoC_apUd-llVli3m1CW18kNSIgjz2qZ6m5JmPDu8,1294
|
|
113
113
|
phoenix/server/api/queries.py,sha256=MzfCwdR2oLlZn8p0t0VMBp2-1ZFDwleKiYvtnAU9bFc,40710
|
|
114
114
|
phoenix/server/api/schema.py,sha256=fcs36xQwFF_Qe41_5cWR8wYpDvOrnbcyTeo5WNMbDsA,1702
|
|
115
|
-
phoenix/server/api/subscriptions.py,sha256=
|
|
115
|
+
phoenix/server/api/subscriptions.py,sha256=73s6TzwI2M_bjIZDYwgohdI_13iv7pgpLCvZYNuExnw,23777
|
|
116
116
|
phoenix/server/api/utils.py,sha256=quCBRcusc6PUq9tJq7M8PgwFZp7nXgVAxtbw8feribY,833
|
|
117
117
|
phoenix/server/api/dataloaders/__init__.py,sha256=jAdt8Tuoj8X2iszbjqJ-LVNo5PicmyxyCTQWL51NMxU,4659
|
|
118
118
|
phoenix/server/api/dataloaders/annotation_summaries.py,sha256=U-mVB6pY65umyjtmF-cLGJAp5QCAaB7psgTie6_gLGI,12943
|
|
@@ -158,17 +158,17 @@ phoenix/server/api/helpers/__init__.py,sha256=m2-xaSPqUiSs91k62JaRDjFNfl-1byxBfY
|
|
|
158
158
|
phoenix/server/api/helpers/annotations.py,sha256=9gMXKpMTfWEChoSCnvdWYuyB0hlSnNOp-qUdar9Vono,262
|
|
159
159
|
phoenix/server/api/helpers/dataset_helpers.py,sha256=DoMBTg-qXTnC_K4Evx1WKpCCYgRbITpVqyY-8efJRf0,8984
|
|
160
160
|
phoenix/server/api/helpers/experiment_run_filters.py,sha256=DOnVwrmn39eAkk2mwuZP8kIcAnR5jrOgllEwWSjsw94,29893
|
|
161
|
-
phoenix/server/api/helpers/playground_clients.py,sha256=
|
|
161
|
+
phoenix/server/api/helpers/playground_clients.py,sha256=ycvccG8bzwtIeU0LQpR6IMOn3B3DftmCwqfuYYZ7YYM,47957
|
|
162
162
|
phoenix/server/api/helpers/playground_registry.py,sha256=CPLMziFB2wmr-dfbx7VbzO2f8YIG_k5RftzvGXYGQ1w,2570
|
|
163
|
-
phoenix/server/api/helpers/playground_spans.py,sha256=
|
|
163
|
+
phoenix/server/api/helpers/playground_spans.py,sha256=QpXwPl_fFNwm_iA1A77XApUyXMl1aDmonw8aXuNZ_4k,17132
|
|
164
164
|
phoenix/server/api/helpers/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
165
|
-
phoenix/server/api/helpers/prompts/models.py,sha256=
|
|
165
|
+
phoenix/server/api/helpers/prompts/models.py,sha256=NXdPfs2ktl-btP33qVUXVbzv6GEgadFLurO6QLUw97w,21208
|
|
166
166
|
phoenix/server/api/helpers/prompts/conversions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
167
167
|
phoenix/server/api/helpers/prompts/conversions/anthropic.py,sha256=ZT--UqBwoGf7QMusajB6aeB7zyWGttaZigb113kgiY8,3571
|
|
168
168
|
phoenix/server/api/helpers/prompts/conversions/openai.py,sha256=a43WAftFn_me6ePHDufqvlg-4Z2C31owUSsqYC0YUP8,2589
|
|
169
169
|
phoenix/server/api/input_types/AddExamplesToDatasetInput.py,sha256=mIQz0S_z8YdrktKIY6RCvtNJ2yZF9pYvTGgasUsI-54,430
|
|
170
170
|
phoenix/server/api/input_types/AddSpansToDatasetInput.py,sha256=-StSstyMAVrba3tG1U30b-srkKCtu_svflQuSM19iJA,362
|
|
171
|
-
phoenix/server/api/input_types/ChatCompletionInput.py,sha256=
|
|
171
|
+
phoenix/server/api/input_types/ChatCompletionInput.py,sha256=f72ZoJKc-Pi6icaZ1yMUbHcvx_3MbrFUDkEu7sLakUI,1774
|
|
172
172
|
phoenix/server/api/input_types/ChatCompletionMessageInput.py,sha256=0_YQBcoOS3BBTluHIB68DSP8FGAn2D9ZAw-Ht-dkbAQ,822
|
|
173
173
|
phoenix/server/api/input_types/ClearProjectInput.py,sha256=cpPFRyQ3ffy2dLbCZgYpway-mCzhdm4QqnUg8caOBfQ,382
|
|
174
174
|
phoenix/server/api/input_types/ClusterInput.py,sha256=AfhuYYHlYgdMO6Ap8cLXqAp70S0Wutx-RTzZYetN62A,173
|
|
@@ -186,6 +186,7 @@ phoenix/server/api/input_types/DeleteDatasetInput.py,sha256=p7xjCyWnVCIXHnezmDiW
|
|
|
186
186
|
phoenix/server/api/input_types/DeleteExperimentsInput.py,sha256=4d9N0vSLYbuysAamGoPUP_m8vdVhwrZmXoi2vhy_HdI,141
|
|
187
187
|
phoenix/server/api/input_types/DimensionFilter.py,sha256=eBYcn7ECSJQlEePvbStqkHBRicbIL4vEAzFJwX7bacQ,3137
|
|
188
188
|
phoenix/server/api/input_types/DimensionInput.py,sha256=Vfx5FmiMKey4-EHDQsQRPzSAMRJMN5oVMLDUl4NKAa8,164
|
|
189
|
+
phoenix/server/api/input_types/GenerativeCredentialInput.py,sha256=sEM9UtgDMMuhImLwGXctD8BWDs2V3hNQ1mosoklRZvc,219
|
|
189
190
|
phoenix/server/api/input_types/GenerativeModelInput.py,sha256=n6OCkX44I1AIovMAHCxy8SvqPKDb_BYDPA-fn_JnckQ,634
|
|
190
191
|
phoenix/server/api/input_types/Granularity.py,sha256=dbBlD_GsIBa8_xrx4JlLuR59bQ0NRB5H-cv1zvcb-cw,2299
|
|
191
192
|
phoenix/server/api/input_types/InvocationParameters.py,sha256=62xL0iIKvuQherkuJaJ6Lha4TTEoYLpvH-pEP9awK6k,5260
|
|
@@ -208,7 +209,7 @@ phoenix/server/api/input_types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5
|
|
|
208
209
|
phoenix/server/api/mutations/__init__.py,sha256=fGFokUtX4J1q9yYzRtPOwRyfJNyL_8z60gaWX9IPvtM,1872
|
|
209
210
|
phoenix/server/api/mutations/annotation_config_mutations.py,sha256=i7NsQhYICcQ-I-tnFjGtVAYc8WVmMBacmRaqHWJ25t4,15433
|
|
210
211
|
phoenix/server/api/mutations/api_key_mutations.py,sha256=nfnRjALCaQMi_jIbEPW4G3Dn3tPnmZVU11tpBbBijGA,6242
|
|
211
|
-
phoenix/server/api/mutations/chat_mutations.py,sha256=
|
|
212
|
+
phoenix/server/api/mutations/chat_mutations.py,sha256=pwrScthRbMcgXsOucQvSEx6PPiMLw8pwtx08e37SXsg,23715
|
|
212
213
|
phoenix/server/api/mutations/dataset_mutations.py,sha256=KRlF-Ag3twqaBpLR_6WYxf57DffaGuFBm-soaBPStbI,27787
|
|
213
214
|
phoenix/server/api/mutations/experiment_mutations.py,sha256=p3CoLAa8nFPa3D759Y2A7De_PVJNGOL98mA3HoZBrRQ,3188
|
|
214
215
|
phoenix/server/api/mutations/export_events_mutations.py,sha256=xoDnVWC7eA_8wNQP0-oyiHojyUZ0EhVVSrsAnztetC0,3993
|
|
@@ -284,7 +285,7 @@ phoenix/server/api/types/ExperimentRunAnnotation.py,sha256=YGw5zIbjRXUK3zH475DnE
|
|
|
284
285
|
phoenix/server/api/types/ExportedFile.py,sha256=e3GTn7B5LgsTbqiwjhMCQH7VsiqXitrBO4aCMS1lHsg,163
|
|
285
286
|
phoenix/server/api/types/Functionality.py,sha256=zDDl2bANIqjwfooSOHg-VQk6-wQy05mREwjV_-VbSIg,262
|
|
286
287
|
phoenix/server/api/types/GenerativeModel.py,sha256=P7eBUMXbeqaLwSSGBKdZy3a5gOLd9I0fuP8o1st6H08,193
|
|
287
|
-
phoenix/server/api/types/GenerativeProvider.py,sha256=
|
|
288
|
+
phoenix/server/api/types/GenerativeProvider.py,sha256=yLOR8qqPSeMiB1HxmAYTXamVQo8GIZVe-VYFsJUQqqo,5901
|
|
288
289
|
phoenix/server/api/types/Identifier.py,sha256=n3rxpoKNCwEvZu7QY8yr7g3AW2mU-U62BxFXYaiHLKk,306
|
|
289
290
|
phoenix/server/api/types/Inferences.py,sha256=wv88PjcK-KwnzmTdukiAX9EV2KX4GqsKXVAUm1JtnDA,3383
|
|
290
291
|
phoenix/server/api/types/InferencesRole.py,sha256=mLfeHpyhGUVX1-tWzT9IwC_cD18BZrD3RA4YsHYuSpA,595
|
|
@@ -348,10 +349,10 @@ phoenix/server/static/apple-touch-icon-76x76.png,sha256=CT_xT12I0u2i0WU8JzBZBuOQ
|
|
|
348
349
|
phoenix/server/static/apple-touch-icon.png,sha256=fOfpjqGpWYbJ0eAurKsyoZP1EAs6ZVooBJ_SGk2ZkDs,3801
|
|
349
350
|
phoenix/server/static/favicon.ico,sha256=bY0vvCKRftemZfPShwZtE93DiiQdaYaozkPGwNFr6H8,34494
|
|
350
351
|
phoenix/server/static/modernizr.js,sha256=mvK-XtkNqjOral-QvzoqsyOMECXIMu5BQwSVN_wcU9c,2564
|
|
351
|
-
phoenix/server/static/.vite/manifest.json,sha256=
|
|
352
|
-
phoenix/server/static/assets/components-
|
|
353
|
-
phoenix/server/static/assets/index-
|
|
354
|
-
phoenix/server/static/assets/pages-
|
|
352
|
+
phoenix/server/static/.vite/manifest.json,sha256=Ic1-HpR1J5oVDoJ3iYQe-pAy6FLhbmn0VnFuZMdhj1M,2165
|
|
353
|
+
phoenix/server/static/assets/components-CuV9v4w3.js,sha256=nTIWvCvLEtn2E0or0I3wDCM0rDSrz5Qlshfnv0l0c20,564814
|
|
354
|
+
phoenix/server/static/assets/index-BeIqg4nl.js,sha256=StSS6Rh7FmF3IPAGUYyttIrRqEg7YRBNbvUHwNjYR1g,61125
|
|
355
|
+
phoenix/server/static/assets/pages-DTLdnakm.js,sha256=ZbCPahoScQDXAWTdWAi6ClGSVCVA7Ycnbbu8lIVQ464,1034584
|
|
355
356
|
phoenix/server/static/assets/vendor-B52WHALA.js,sha256=qNJdtbj4rc-YYJc4JF9crIVk9G3iSnPB5NbDhXzS4VM,2731862
|
|
356
357
|
phoenix/server/static/assets/vendor-WIZid84E.css,sha256=spZD2r7XL5GfLO13ln-IuXfnjAref8l6g_n_AvxxOlI,5517
|
|
357
358
|
phoenix/server/static/assets/vendor-arizeai-DGHetzZW.js,sha256=0GqwRIhagIyYxB33qbBQaHZaxLVExlvEXiOTCy7Ybc4,181777
|
|
@@ -373,7 +374,7 @@ phoenix/trace/evaluation_conventions.py,sha256=t8jydM3U0-T5YpiQKRJ3tWdWGlHtzKytt
|
|
|
373
374
|
phoenix/trace/exporter.py,sha256=bUXh8fjJIbHurrnt4bAm-cCWqUN5FqNsIc8DZzzklkQ,4695
|
|
374
375
|
phoenix/trace/fixtures.py,sha256=1c7fsyvmxC53Fib9T_Qxp_Ly3OZdDbkLQ0XpFzikEjk,20298
|
|
375
376
|
phoenix/trace/otel.py,sha256=RJSbAuzS4KBS0t-fntXQaaYwv7FmIXRMrw65DI67z8k,10622
|
|
376
|
-
phoenix/trace/projects.py,sha256=
|
|
377
|
+
phoenix/trace/projects.py,sha256=URKEHEaxnr3uHM4nn8kcVN6tcn4MXAt_OEca4TPDtco,2208
|
|
377
378
|
phoenix/trace/schemas.py,sha256=Su6e567Bei9oo6PsWO2srTcPAj9C2bMgbGtx64Sgqeg,6332
|
|
378
379
|
phoenix/trace/span_evaluations.py,sha256=x3nye9r2SQk1mrR3N05YbuWsgUKpMWwTRBtJTDBSj3Y,13156
|
|
379
380
|
phoenix/trace/span_json_decoder.py,sha256=J1_oDViuUoC4vxPg61U4EOZC1uEMcCzoj-kVjOFEE8k,3224
|
|
@@ -398,9 +399,9 @@ phoenix/utilities/project.py,sha256=auVpARXkDb-JgeX5f2aStyFIkeKvGwN9l7qrFeJMVxI,
|
|
|
398
399
|
phoenix/utilities/re.py,sha256=6YyUWIkv0zc2SigsxfOWIHzdpjKA_TZo2iqKq7zJKvw,2081
|
|
399
400
|
phoenix/utilities/span_store.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
400
401
|
phoenix/utilities/template_formatters.py,sha256=gh9PJD6WEGw7TEYXfSst1UR4pWWwmjxMLrDVQ_CkpkQ,2779
|
|
401
|
-
arize_phoenix-10.
|
|
402
|
-
arize_phoenix-10.
|
|
403
|
-
arize_phoenix-10.
|
|
404
|
-
arize_phoenix-10.
|
|
405
|
-
arize_phoenix-10.
|
|
406
|
-
arize_phoenix-10.
|
|
402
|
+
arize_phoenix-10.7.0.dist-info/METADATA,sha256=5XcIXrXlk4HhfC39_LICUDXrJyBeRt5bWlAfYDSBpfc,27004
|
|
403
|
+
arize_phoenix-10.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
404
|
+
arize_phoenix-10.7.0.dist-info/entry_points.txt,sha256=Pgpn8Upxx9P8z8joPXZWl2LlnAlGc3gcQoVchb06X1Q,94
|
|
405
|
+
arize_phoenix-10.7.0.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
|
|
406
|
+
arize_phoenix-10.7.0.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
|
|
407
|
+
arize_phoenix-10.7.0.dist-info/RECORD,,
|
phoenix/experiments/functions.py
CHANGED
|
@@ -25,7 +25,7 @@ from openinference.semconv.trace import (
|
|
|
25
25
|
)
|
|
26
26
|
from opentelemetry.context import Context
|
|
27
27
|
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
|
28
|
-
from opentelemetry.sdk.resources import Resource
|
|
28
|
+
from opentelemetry.sdk.resources import Resource # type: ignore[attr-defined]
|
|
29
29
|
from opentelemetry.sdk.trace import Span
|
|
30
30
|
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
|
31
31
|
from opentelemetry.trace import Status, StatusCode, Tracer
|
|
@@ -274,8 +274,11 @@ def run_experiment(
|
|
|
274
274
|
error: Optional[BaseException] = None
|
|
275
275
|
status = Status(StatusCode.OK)
|
|
276
276
|
with ExitStack() as stack:
|
|
277
|
-
span
|
|
278
|
-
|
|
277
|
+
span = cast(
|
|
278
|
+
Span,
|
|
279
|
+
stack.enter_context(
|
|
280
|
+
tracer.start_as_current_span(root_span_name, context=Context())
|
|
281
|
+
),
|
|
279
282
|
)
|
|
280
283
|
stack.enter_context(capture_spans(resource))
|
|
281
284
|
try:
|
|
@@ -391,8 +394,11 @@ def run_experiment(
|
|
|
391
394
|
error: Optional[BaseException] = None
|
|
392
395
|
status = Status(StatusCode.OK)
|
|
393
396
|
with ExitStack() as stack:
|
|
394
|
-
span
|
|
395
|
-
|
|
397
|
+
span = cast(
|
|
398
|
+
Span,
|
|
399
|
+
stack.enter_context(
|
|
400
|
+
tracer.start_as_current_span(root_span_name, context=Context())
|
|
401
|
+
),
|
|
396
402
|
)
|
|
397
403
|
stack.enter_context(capture_spans(resource))
|
|
398
404
|
try:
|
|
@@ -622,8 +628,11 @@ def evaluate_experiment(
|
|
|
622
628
|
status = Status(StatusCode.OK)
|
|
623
629
|
root_span_name = f"Evaluation: {evaluator.name}"
|
|
624
630
|
with ExitStack() as stack:
|
|
625
|
-
span
|
|
626
|
-
|
|
631
|
+
span = cast(
|
|
632
|
+
Span,
|
|
633
|
+
stack.enter_context(
|
|
634
|
+
tracer.start_as_current_span(root_span_name, context=Context())
|
|
635
|
+
),
|
|
627
636
|
)
|
|
628
637
|
stack.enter_context(capture_spans(resource))
|
|
629
638
|
try:
|
|
@@ -674,8 +683,11 @@ def evaluate_experiment(
|
|
|
674
683
|
status = Status(StatusCode.OK)
|
|
675
684
|
root_span_name = f"Evaluation: {evaluator.name}"
|
|
676
685
|
with ExitStack() as stack:
|
|
677
|
-
span
|
|
678
|
-
|
|
686
|
+
span = cast(
|
|
687
|
+
Span,
|
|
688
|
+
stack.enter_context(
|
|
689
|
+
tracer.start_as_current_span(root_span_name, context=Context())
|
|
690
|
+
),
|
|
679
691
|
)
|
|
680
692
|
stack.enter_context(capture_spans(resource))
|
|
681
693
|
try:
|
phoenix/experiments/tracing.py
CHANGED
|
@@ -6,7 +6,7 @@ from contextvars import ContextVar
|
|
|
6
6
|
from threading import Lock
|
|
7
7
|
from typing import Any, Optional
|
|
8
8
|
|
|
9
|
-
from opentelemetry.sdk.resources import Resource
|
|
9
|
+
from opentelemetry.sdk.resources import Resource # type: ignore[attr-defined]
|
|
10
10
|
from opentelemetry.sdk.trace import ReadableSpan
|
|
11
11
|
from opentelemetry.trace import INVALID_TRACE_ID
|
|
12
12
|
from wrapt import apply_patch, resolve_path, wrap_function_wrapper
|
|
@@ -7,6 +7,7 @@ import json
|
|
|
7
7
|
import time
|
|
8
8
|
from abc import ABC, abstractmethod
|
|
9
9
|
from collections.abc import AsyncIterator, Callable, Iterator
|
|
10
|
+
from dataclasses import dataclass
|
|
10
11
|
from functools import wraps
|
|
11
12
|
from typing import TYPE_CHECKING, Any, Hashable, Mapping, MutableMapping, Optional, Union
|
|
12
13
|
|
|
@@ -66,6 +67,16 @@ SetSpanAttributesFn: TypeAlias = Callable[[Mapping[str, Any]], None]
|
|
|
66
67
|
ChatCompletionChunk: TypeAlias = Union[TextChunk, ToolCallChunk]
|
|
67
68
|
|
|
68
69
|
|
|
70
|
+
@dataclass
|
|
71
|
+
class PlaygroundClientCredential:
|
|
72
|
+
"""
|
|
73
|
+
Represents a credential for LLM providers.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
env_var_name: str
|
|
77
|
+
value: str
|
|
78
|
+
|
|
79
|
+
|
|
69
80
|
class Dependency:
|
|
70
81
|
"""
|
|
71
82
|
Set the module_name to the import name if it is different from the install name
|
|
@@ -172,9 +183,10 @@ class PlaygroundStreamingClient(ABC):
|
|
|
172
183
|
def __init__(
|
|
173
184
|
self,
|
|
174
185
|
model: GenerativeModelInput,
|
|
175
|
-
|
|
186
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
176
187
|
) -> None:
|
|
177
188
|
self._attributes: dict[str, AttributeValue] = dict()
|
|
189
|
+
self._credentials = credentials or []
|
|
178
190
|
|
|
179
191
|
@classmethod
|
|
180
192
|
@abstractmethod
|
|
@@ -243,11 +255,11 @@ class OpenAIBaseStreamingClient(PlaygroundStreamingClient):
|
|
|
243
255
|
*,
|
|
244
256
|
client: Union["AsyncOpenAI", "AsyncAzureOpenAI"],
|
|
245
257
|
model: GenerativeModelInput,
|
|
246
|
-
|
|
258
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
247
259
|
) -> None:
|
|
248
260
|
from openai import RateLimitError as OpenAIRateLimitError
|
|
249
261
|
|
|
250
|
-
super().__init__(model=model,
|
|
262
|
+
super().__init__(model=model, credentials=credentials)
|
|
251
263
|
self.client = client
|
|
252
264
|
self.model_name = model.name
|
|
253
265
|
self.rate_limiter = PlaygroundRateLimiter(model.provider_key, OpenAIRateLimitError)
|
|
@@ -453,6 +465,28 @@ class OpenAIBaseStreamingClient(PlaygroundStreamingClient):
|
|
|
453
465
|
yield LLM_TOKEN_COUNT_TOTAL, usage.total_tokens
|
|
454
466
|
|
|
455
467
|
|
|
468
|
+
def _get_credential_value(
|
|
469
|
+
credentials: Optional[list[PlaygroundClientCredential]], env_var_name: str
|
|
470
|
+
) -> Optional[str]:
|
|
471
|
+
"""Helper function to extract credential value from credentials list."""
|
|
472
|
+
if not credentials:
|
|
473
|
+
return None
|
|
474
|
+
return next(
|
|
475
|
+
(credential.value for credential in credentials if credential.env_var_name == env_var_name),
|
|
476
|
+
None,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
def _require_credential(
|
|
481
|
+
credentials: Optional[list[PlaygroundClientCredential]], env_var_name: str, provider_name: str
|
|
482
|
+
) -> str:
|
|
483
|
+
"""Helper function to require a credential value, raising an exception if not found."""
|
|
484
|
+
value = _get_credential_value(credentials, env_var_name)
|
|
485
|
+
if value is None:
|
|
486
|
+
raise BadRequest(f"Missing required credential '{env_var_name}' for {provider_name}")
|
|
487
|
+
return value
|
|
488
|
+
|
|
489
|
+
|
|
456
490
|
@register_llm_client(
|
|
457
491
|
provider_key=GenerativeProviderKey.DEEPSEEK,
|
|
458
492
|
model_names=[
|
|
@@ -465,17 +499,24 @@ class DeepSeekStreamingClient(OpenAIBaseStreamingClient):
|
|
|
465
499
|
def __init__(
|
|
466
500
|
self,
|
|
467
501
|
model: GenerativeModelInput,
|
|
468
|
-
|
|
502
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
469
503
|
) -> None:
|
|
470
504
|
from openai import AsyncOpenAI
|
|
471
505
|
|
|
472
506
|
base_url = model.base_url or getenv("DEEPSEEK_BASE_URL")
|
|
473
|
-
|
|
507
|
+
|
|
508
|
+
# Try to get API key from credentials first, then fallback to env
|
|
509
|
+
api_key = _get_credential_value(credentials, "DEEPSEEK_API_KEY") or getenv(
|
|
510
|
+
"DEEPSEEK_API_KEY"
|
|
511
|
+
)
|
|
512
|
+
|
|
513
|
+
if not api_key:
|
|
474
514
|
if not base_url:
|
|
475
515
|
raise BadRequest("An API key is required for DeepSeek models")
|
|
476
516
|
api_key = "sk-fake-api-key"
|
|
517
|
+
|
|
477
518
|
client = AsyncOpenAI(api_key=api_key, base_url=base_url or "https://api.deepseek.com")
|
|
478
|
-
super().__init__(client=client, model=model,
|
|
519
|
+
super().__init__(client=client, model=model, credentials=credentials)
|
|
479
520
|
# DeepSeek uses OpenAI-compatible API but we'll track it as a separate provider
|
|
480
521
|
# Adding a custom "deepseek" provider value to make it distinguishable in traces
|
|
481
522
|
self._attributes[LLM_PROVIDER] = "deepseek"
|
|
@@ -498,23 +539,65 @@ class XAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
498
539
|
def __init__(
|
|
499
540
|
self,
|
|
500
541
|
model: GenerativeModelInput,
|
|
501
|
-
|
|
542
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
502
543
|
) -> None:
|
|
503
544
|
from openai import AsyncOpenAI
|
|
504
545
|
|
|
505
546
|
base_url = model.base_url or getenv("XAI_BASE_URL")
|
|
506
|
-
|
|
547
|
+
|
|
548
|
+
# Try to get API key from credentials first, then fallback to env
|
|
549
|
+
api_key = _get_credential_value(credentials, "XAI_API_KEY") or getenv("XAI_API_KEY")
|
|
550
|
+
|
|
551
|
+
if not api_key:
|
|
507
552
|
if not base_url:
|
|
508
553
|
raise BadRequest("An API key is required for xAI models")
|
|
509
554
|
api_key = "sk-fake-api-key"
|
|
555
|
+
|
|
510
556
|
client = AsyncOpenAI(api_key=api_key, base_url=base_url or "https://api.x.ai/v1")
|
|
511
|
-
super().__init__(client=client, model=model,
|
|
557
|
+
super().__init__(client=client, model=model, credentials=credentials)
|
|
512
558
|
# xAI uses OpenAI-compatible API but we'll track it as a separate provider
|
|
513
559
|
# Adding a custom "xai" provider value to make it distinguishable in traces
|
|
514
560
|
self._attributes[LLM_PROVIDER] = "xai"
|
|
515
561
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
|
|
516
562
|
|
|
517
563
|
|
|
564
|
+
@register_llm_client(
|
|
565
|
+
provider_key=GenerativeProviderKey.OLLAMA,
|
|
566
|
+
model_names=[
|
|
567
|
+
PROVIDER_DEFAULT,
|
|
568
|
+
"llama3.3",
|
|
569
|
+
"llama3.2",
|
|
570
|
+
"llama3.1",
|
|
571
|
+
"llama3",
|
|
572
|
+
"llama2",
|
|
573
|
+
"mistral",
|
|
574
|
+
"mixtral",
|
|
575
|
+
"codellama",
|
|
576
|
+
"phi3",
|
|
577
|
+
"qwen2.5",
|
|
578
|
+
"gemma2",
|
|
579
|
+
],
|
|
580
|
+
)
|
|
581
|
+
class OllamaStreamingClient(OpenAIBaseStreamingClient):
|
|
582
|
+
def __init__(
|
|
583
|
+
self,
|
|
584
|
+
model: GenerativeModelInput,
|
|
585
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
586
|
+
) -> None:
|
|
587
|
+
from openai import AsyncOpenAI
|
|
588
|
+
|
|
589
|
+
base_url = model.base_url or getenv("OLLAMA_BASE_URL")
|
|
590
|
+
if not base_url:
|
|
591
|
+
raise BadRequest("An Ollama base URL is required for Ollama models")
|
|
592
|
+
api_key = "ollama"
|
|
593
|
+
client = AsyncOpenAI(api_key=api_key, base_url=base_url)
|
|
594
|
+
super().__init__(client=client, model=model, credentials=credentials)
|
|
595
|
+
# Ollama uses OpenAI-compatible API but we'll track it as a separate provider
|
|
596
|
+
# Adding a custom "ollama" provider value to make it distinguishable in traces
|
|
597
|
+
self._attributes[LLM_PROVIDER] = "ollama"
|
|
598
|
+
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
|
|
599
|
+
|
|
600
|
+
|
|
518
601
|
@register_llm_client(
|
|
519
602
|
provider_key=GenerativeProviderKey.OPENAI,
|
|
520
603
|
model_names=[
|
|
@@ -550,17 +633,22 @@ class OpenAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
550
633
|
def __init__(
|
|
551
634
|
self,
|
|
552
635
|
model: GenerativeModelInput,
|
|
553
|
-
|
|
636
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
554
637
|
) -> None:
|
|
555
638
|
from openai import AsyncOpenAI
|
|
556
639
|
|
|
557
640
|
base_url = model.base_url or getenv("OPENAI_BASE_URL")
|
|
558
|
-
|
|
641
|
+
|
|
642
|
+
# Try to get API key from credentials first, then fallback to env
|
|
643
|
+
api_key = _get_credential_value(credentials, "OPENAI_API_KEY") or getenv("OPENAI_API_KEY")
|
|
644
|
+
|
|
645
|
+
if not api_key:
|
|
559
646
|
if not base_url:
|
|
560
647
|
raise BadRequest("An API key is required for OpenAI models")
|
|
561
648
|
api_key = "sk-fake-api-key"
|
|
649
|
+
|
|
562
650
|
client = AsyncOpenAI(api_key=api_key, base_url=base_url)
|
|
563
|
-
super().__init__(client=client, model=model,
|
|
651
|
+
super().__init__(client=client, model=model, credentials=credentials)
|
|
564
652
|
self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.OPENAI.value
|
|
565
653
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
|
|
566
654
|
|
|
@@ -723,7 +811,7 @@ class AzureOpenAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
723
811
|
def __init__(
|
|
724
812
|
self,
|
|
725
813
|
model: GenerativeModelInput,
|
|
726
|
-
|
|
814
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
727
815
|
):
|
|
728
816
|
from openai import AsyncAzureOpenAI
|
|
729
817
|
|
|
@@ -731,7 +819,13 @@ class AzureOpenAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
731
819
|
raise BadRequest("An Azure endpoint is required for Azure OpenAI models")
|
|
732
820
|
if not (api_version := model.api_version or getenv("OPENAI_API_VERSION")):
|
|
733
821
|
raise BadRequest("An OpenAI API version is required for Azure OpenAI models")
|
|
734
|
-
|
|
822
|
+
|
|
823
|
+
# Try to get API key from credentials first, then fallback to env
|
|
824
|
+
api_key = _get_credential_value(credentials, "AZURE_OPENAI_API_KEY") or getenv(
|
|
825
|
+
"AZURE_OPENAI_API_KEY"
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
if api_key:
|
|
735
829
|
client = AsyncAzureOpenAI(
|
|
736
830
|
api_key=api_key,
|
|
737
831
|
azure_endpoint=endpoint,
|
|
@@ -754,7 +848,7 @@ class AzureOpenAIStreamingClient(OpenAIBaseStreamingClient):
|
|
|
754
848
|
azure_endpoint=endpoint,
|
|
755
849
|
api_version=api_version,
|
|
756
850
|
)
|
|
757
|
-
super().__init__(client=client, model=model,
|
|
851
|
+
super().__init__(client=client, model=model, credentials=credentials)
|
|
758
852
|
self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.AZURE.value
|
|
759
853
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.OPENAI.value
|
|
760
854
|
|
|
@@ -783,15 +877,22 @@ class AnthropicStreamingClient(PlaygroundStreamingClient):
|
|
|
783
877
|
def __init__(
|
|
784
878
|
self,
|
|
785
879
|
model: GenerativeModelInput,
|
|
786
|
-
|
|
880
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
787
881
|
) -> None:
|
|
788
882
|
import anthropic
|
|
789
883
|
|
|
790
|
-
super().__init__(model=model,
|
|
884
|
+
super().__init__(model=model, credentials=credentials)
|
|
791
885
|
self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.ANTHROPIC.value
|
|
792
886
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.ANTHROPIC.value
|
|
793
|
-
|
|
887
|
+
|
|
888
|
+
# Try to get API key from credentials first, then fallback to env
|
|
889
|
+
api_key = _get_credential_value(credentials, "ANTHROPIC_API_KEY") or getenv(
|
|
890
|
+
"ANTHROPIC_API_KEY"
|
|
891
|
+
)
|
|
892
|
+
|
|
893
|
+
if not api_key:
|
|
794
894
|
raise BadRequest("An API key is required for Anthropic models")
|
|
895
|
+
|
|
795
896
|
self.client = anthropic.AsyncAnthropic(api_key=api_key)
|
|
796
897
|
self.model_name = model.name
|
|
797
898
|
self.rate_limiter = PlaygroundRateLimiter(model.provider_key, anthropic.RateLimitError)
|
|
@@ -991,15 +1092,25 @@ class GoogleStreamingClient(PlaygroundStreamingClient):
|
|
|
991
1092
|
def __init__(
|
|
992
1093
|
self,
|
|
993
1094
|
model: GenerativeModelInput,
|
|
994
|
-
|
|
1095
|
+
credentials: Optional[list[PlaygroundClientCredential]] = None,
|
|
995
1096
|
) -> None:
|
|
996
1097
|
import google.generativeai as google_genai
|
|
997
1098
|
|
|
998
|
-
super().__init__(model=model,
|
|
1099
|
+
super().__init__(model=model, credentials=credentials)
|
|
999
1100
|
self._attributes[LLM_PROVIDER] = OpenInferenceLLMProviderValues.GOOGLE.value
|
|
1000
1101
|
self._attributes[LLM_SYSTEM] = OpenInferenceLLMSystemValues.VERTEXAI.value
|
|
1001
|
-
|
|
1102
|
+
|
|
1103
|
+
# Try to get API key from credentials first, then fallback to env
|
|
1104
|
+
api_key = (
|
|
1105
|
+
_get_credential_value(credentials, "GEMINI_API_KEY")
|
|
1106
|
+
or _get_credential_value(credentials, "GOOGLE_API_KEY")
|
|
1107
|
+
or getenv("GEMINI_API_KEY")
|
|
1108
|
+
or getenv("GOOGLE_API_KEY")
|
|
1109
|
+
)
|
|
1110
|
+
|
|
1111
|
+
if not api_key:
|
|
1002
1112
|
raise BadRequest("An API key is required for Gemini models")
|
|
1113
|
+
|
|
1003
1114
|
google_genai.configure(api_key=api_key)
|
|
1004
1115
|
self.model_name = model.name
|
|
1005
1116
|
|
|
@@ -263,10 +263,13 @@ def llm_tools(tools: list[JSONScalarType]) -> Iterator[tuple[str, Any]]:
|
|
|
263
263
|
def input_value_and_mime_type(
|
|
264
264
|
input: Union[ChatCompletionInput, ChatCompletionOverDatasetInput],
|
|
265
265
|
) -> Iterator[tuple[str, Any]]:
|
|
266
|
-
|
|
267
|
-
|
|
266
|
+
input_data = jsonify(input)
|
|
267
|
+
# Filter out sensitive credential information and invocation parameters
|
|
268
|
+
disallowed_keys = {"api_key", "credentials", "invocation_parameters"}
|
|
268
269
|
input_data = {k: v for k, v in input_data.items() if k not in disallowed_keys}
|
|
269
|
-
|
|
270
|
+
# Ensure sensitive data is not included in trace data
|
|
271
|
+
assert "api_key" not in input_data
|
|
272
|
+
assert "credentials" not in input_data
|
|
270
273
|
yield INPUT_MIME_TYPE, JSON
|
|
271
274
|
yield INPUT_VALUE, safe_json_dumps(input_data)
|
|
272
275
|
|
|
@@ -340,6 +340,10 @@ class PromptXAIInvocationParametersContent(PromptOpenAIInvocationParametersConte
|
|
|
340
340
|
pass
|
|
341
341
|
|
|
342
342
|
|
|
343
|
+
class PromptOllamaInvocationParametersContent(PromptOpenAIInvocationParametersContent):
|
|
344
|
+
pass
|
|
345
|
+
|
|
346
|
+
|
|
343
347
|
class PromptAzureOpenAIInvocationParameters(DBBaseModel):
|
|
344
348
|
type: Literal["azure_openai"]
|
|
345
349
|
azure_openai: PromptAzureOpenAIInvocationParametersContent
|
|
@@ -355,6 +359,11 @@ class PromptXAIInvocationParameters(DBBaseModel):
|
|
|
355
359
|
xai: PromptXAIInvocationParametersContent
|
|
356
360
|
|
|
357
361
|
|
|
362
|
+
class PromptOllamaInvocationParameters(DBBaseModel):
|
|
363
|
+
type: Literal["ollama"]
|
|
364
|
+
ollama: PromptOllamaInvocationParametersContent
|
|
365
|
+
|
|
366
|
+
|
|
358
367
|
class PromptAnthropicThinkingConfigDisabled(DBBaseModel):
|
|
359
368
|
type: Literal["disabled"]
|
|
360
369
|
|
|
@@ -411,6 +420,7 @@ PromptInvocationParameters: TypeAlias = Annotated[
|
|
|
411
420
|
PromptGoogleInvocationParameters,
|
|
412
421
|
PromptDeepSeekInvocationParameters,
|
|
413
422
|
PromptXAIInvocationParameters,
|
|
423
|
+
PromptOllamaInvocationParameters,
|
|
414
424
|
],
|
|
415
425
|
Field(..., discriminator="type"),
|
|
416
426
|
]
|
|
@@ -431,6 +441,8 @@ def get_raw_invocation_parameters(
|
|
|
431
441
|
return invocation_parameters.deepseek.model_dump()
|
|
432
442
|
if isinstance(invocation_parameters, PromptXAIInvocationParameters):
|
|
433
443
|
return invocation_parameters.xai.model_dump()
|
|
444
|
+
if isinstance(invocation_parameters, PromptOllamaInvocationParameters):
|
|
445
|
+
return invocation_parameters.ollama.model_dump()
|
|
434
446
|
assert_never(invocation_parameters)
|
|
435
447
|
|
|
436
448
|
|
|
@@ -446,6 +458,7 @@ def is_prompt_invocation_parameters(
|
|
|
446
458
|
PromptGoogleInvocationParameters,
|
|
447
459
|
PromptDeepSeekInvocationParameters,
|
|
448
460
|
PromptXAIInvocationParameters,
|
|
461
|
+
PromptOllamaInvocationParameters,
|
|
449
462
|
),
|
|
450
463
|
)
|
|
451
464
|
|
|
@@ -494,6 +507,11 @@ def validate_invocation_parameters(
|
|
|
494
507
|
type="xai",
|
|
495
508
|
xai=PromptXAIInvocationParametersContent.model_validate(invocation_parameters),
|
|
496
509
|
)
|
|
510
|
+
elif model_provider is ModelProvider.OLLAMA:
|
|
511
|
+
return PromptOllamaInvocationParameters(
|
|
512
|
+
type="ollama",
|
|
513
|
+
ollama=PromptOllamaInvocationParametersContent.model_validate(invocation_parameters),
|
|
514
|
+
)
|
|
497
515
|
assert_never(model_provider)
|
|
498
516
|
|
|
499
517
|
|
|
@@ -508,6 +526,7 @@ def normalize_tools(
|
|
|
508
526
|
or model_provider is ModelProvider.AZURE_OPENAI
|
|
509
527
|
or model_provider is ModelProvider.DEEPSEEK
|
|
510
528
|
or model_provider is ModelProvider.XAI
|
|
529
|
+
or model_provider is ModelProvider.OLLAMA
|
|
511
530
|
):
|
|
512
531
|
openai_tools = [OpenAIToolDefinition.model_validate(schema) for schema in schemas]
|
|
513
532
|
tools = [_openai_to_prompt_tool(openai_tool) for openai_tool in openai_tools]
|
|
@@ -523,6 +542,7 @@ def normalize_tools(
|
|
|
523
542
|
or model_provider is ModelProvider.AZURE_OPENAI
|
|
524
543
|
or model_provider is ModelProvider.DEEPSEEK
|
|
525
544
|
or model_provider is ModelProvider.XAI
|
|
545
|
+
or model_provider is ModelProvider.OLLAMA
|
|
526
546
|
):
|
|
527
547
|
ans.tool_choice = OpenAIToolChoiceConversion.from_openai(tool_choice) # type: ignore[arg-type]
|
|
528
548
|
elif model_provider is ModelProvider.ANTHROPIC:
|
|
@@ -546,6 +566,7 @@ def denormalize_tools(
|
|
|
546
566
|
or model_provider is ModelProvider.AZURE_OPENAI
|
|
547
567
|
or model_provider is ModelProvider.DEEPSEEK
|
|
548
568
|
or model_provider is ModelProvider.XAI
|
|
569
|
+
or model_provider is ModelProvider.OLLAMA
|
|
549
570
|
):
|
|
550
571
|
denormalized_tools = [_prompt_to_openai_tool(tool) for tool in tools.tools]
|
|
551
572
|
if tools.tool_choice:
|
|
@@ -8,6 +8,7 @@ from strawberry.scalars import JSON
|
|
|
8
8
|
from phoenix.server.api.helpers.prompts.models import (
|
|
9
9
|
PromptTemplateFormat,
|
|
10
10
|
)
|
|
11
|
+
from phoenix.server.api.input_types.GenerativeCredentialInput import GenerativeCredentialInput
|
|
11
12
|
from phoenix.server.api.types.Identifier import Identifier
|
|
12
13
|
|
|
13
14
|
from .ChatCompletionMessageInput import ChatCompletionMessageInput
|
|
@@ -22,7 +23,7 @@ class ChatCompletionInput:
|
|
|
22
23
|
model: GenerativeModelInput
|
|
23
24
|
invocation_parameters: list[InvocationParameterInput] = strawberry.field(default_factory=list)
|
|
24
25
|
tools: Optional[list[JSON]] = UNSET
|
|
25
|
-
|
|
26
|
+
credentials: Optional[list[GenerativeCredentialInput]] = UNSET
|
|
26
27
|
template: Optional[PromptTemplateOptions] = UNSET
|
|
27
28
|
prompt_name: Optional[Identifier] = None
|
|
28
29
|
|
|
@@ -33,7 +34,7 @@ class ChatCompletionOverDatasetInput:
|
|
|
33
34
|
model: GenerativeModelInput
|
|
34
35
|
invocation_parameters: list[InvocationParameterInput] = strawberry.field(default_factory=list)
|
|
35
36
|
tools: Optional[list[JSON]] = UNSET
|
|
36
|
-
|
|
37
|
+
credentials: Optional[list[GenerativeCredentialInput]] = UNSET
|
|
37
38
|
template_format: PromptTemplateFormat = PromptTemplateFormat.MUSTACHE
|
|
38
39
|
dataset_id: GlobalID
|
|
39
40
|
dataset_version_id: Optional[GlobalID] = None
|