arize-phoenix 3.4.0__py3-none-any.whl → 3.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (39) hide show
  1. {arize_phoenix-3.4.0.dist-info → arize_phoenix-3.5.0.dist-info}/METADATA +31 -31
  2. {arize_phoenix-3.4.0.dist-info → arize_phoenix-3.5.0.dist-info}/RECORD +39 -38
  3. phoenix/core/evals.py +9 -9
  4. phoenix/core/model.py +18 -18
  5. phoenix/core/model_schema.py +22 -44
  6. phoenix/experimental/evals/functions/executor.py +1 -2
  7. phoenix/experimental/evals/functions/processing.py +33 -1
  8. phoenix/experimental/evals/models/base.py +4 -8
  9. phoenix/experimental/evals/models/litellm.py +1 -1
  10. phoenix/experimental/evals/models/rate_limiters.py +1 -2
  11. phoenix/metrics/__init__.py +2 -4
  12. phoenix/metrics/binning.py +3 -6
  13. phoenix/metrics/mixins.py +1 -0
  14. phoenix/metrics/wrappers.py +1 -0
  15. phoenix/pointcloud/pointcloud.py +2 -4
  16. phoenix/server/api/input_types/SpanSort.py +1 -2
  17. phoenix/server/api/interceptor.py +1 -2
  18. phoenix/server/api/routers/trace_handler.py +1 -2
  19. phoenix/server/api/schema.py +20 -3
  20. phoenix/server/api/types/Project.py +72 -0
  21. phoenix/server/api/types/Segments.py +2 -4
  22. phoenix/server/api/types/Span.py +18 -0
  23. phoenix/server/app.py +4 -0
  24. phoenix/server/main.py +35 -2
  25. phoenix/server/static/index.js +534 -494
  26. phoenix/server/templates/index.html +2 -1
  27. phoenix/session/data_extractor.py +2 -4
  28. phoenix/session/evaluation.py +1 -0
  29. phoenix/trace/dsl/filter.py +1 -2
  30. phoenix/trace/dsl/helpers.py +3 -2
  31. phoenix/trace/dsl/query.py +3 -7
  32. phoenix/trace/langchain/tracer.py +1 -0
  33. phoenix/trace/span_evaluations.py +1 -2
  34. phoenix/trace/span_json_encoder.py +13 -3
  35. phoenix/trace/tracer.py +2 -2
  36. phoenix/version.py +1 -1
  37. {arize_phoenix-3.4.0.dist-info → arize_phoenix-3.5.0.dist-info}/WHEEL +0 -0
  38. {arize_phoenix-3.4.0.dist-info → arize_phoenix-3.5.0.dist-info}/licenses/IP_NOTICE +0 -0
  39. {arize_phoenix-3.4.0.dist-info → arize_phoenix-3.5.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: arize-phoenix
3
- Version: 3.4.0
3
+ Version: 3.5.0
4
4
  Summary: ML Observability in your notebook
5
5
  Project-URL: Documentation, https://docs.arize.com/phoenix/
6
6
  Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
@@ -59,7 +59,7 @@ Requires-Dist: pytest-asyncio; extra == 'dev'
59
59
  Requires-Dist: pytest-cov; extra == 'dev'
60
60
  Requires-Dist: pytest-lazy-fixture; extra == 'dev'
61
61
  Requires-Dist: pytest==7.4.4; extra == 'dev'
62
- Requires-Dist: ruff==0.1.5; extra == 'dev'
62
+ Requires-Dist: ruff==0.3.0; extra == 'dev'
63
63
  Requires-Dist: strawberry-graphql[debug-server]==0.208.2; extra == 'dev'
64
64
  Provides-Extra: evals
65
65
  Requires-Dist: arize-phoenix-evals>=0.0.3; extra == 'evals'
@@ -134,14 +134,11 @@ Phoenix provides MLOps and LLMOps insights at lightning speed with zero-config o
134
134
  Install Phoenix via `pip` or or `conda` as well as any of its subpackages.
135
135
 
136
136
  ```shell
137
- pip install arize-phoenix
137
+ pip install arize-phoenix[evals]
138
138
  ```
139
139
 
140
- Some functionality such as LLM evals are under the `experimental` subpackage.
141
-
142
- ```shell
143
- pip install arize-phoenix[experimental]
144
- ```
140
+ > [!NOTE]
141
+ > The above will install Phoenix and its `evals` subpackage. To just install phoenix's evaluation package, you can run `pip install arize-phoenix-evals` instead.
145
142
 
146
143
  ## LLM Traces
147
144
 
@@ -159,39 +156,44 @@ To extract traces from your LlamaIndex application, you will have to add Phoenix
159
156
 
160
157
  ```shell
161
158
  # Install phoenix as well as llama_index and your LLM of choice
162
- pip install arize-phoenix llama-index openai
163
-
159
+ pip install "arize-phoenix[evals]" "openai>=1" "llama-index>=0.10.3" "openinference-instrumentation-llama-index>=1.0.0" "llama-index-callbacks-arize-phoenix>=0.1.2" llama-index-llms-openai
164
160
  ```
165
161
 
166
162
  Launch Phoenix in a notebook and view the traces of your LlamaIndex application in the Phoenix UI.
167
163
 
168
164
  ```python
165
+ import os
169
166
  import phoenix as px
167
+ from llama_index.core import (
168
+ Settings,
169
+ VectorStoreIndex,
170
+ SimpleDirectoryReader,
171
+ set_global_handler,
172
+ )
173
+ from llama_index.embeddings.openai import OpenAIEmbedding
174
+ from llama_index.llms.openai import OpenAI
175
+
176
+ os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"
170
177
 
171
178
  # To view traces in Phoenix, you will first have to start a Phoenix server. You can do this by running the following:
172
179
  session = px.launch_app()
173
180
 
174
181
 
175
- # Once you have started a Phoenix server, you can start your LlamaIndex application with the `OpenInferenceTraceCallback` as a callback. To do this, you will have to add the callback to the initialization of your LlamaIndex application:
182
+ # Once you have started a Phoenix server, you can start your LlamaIndex application and configure it to send traces to Phoenix. To do this, you will have to add configure Phoenix as the global handler
176
183
 
177
- from phoenix.trace.llama_index import (
178
- OpenInferenceTraceCallbackHandler,
179
- )
184
+ set_global_handler("arize_phoenix")
180
185
 
181
- # Initialize the callback handler
182
- callback_handler = OpenInferenceTraceCallbackHandler()
183
186
 
184
187
  # LlamaIndex application initialization may vary
185
188
  # depending on your application
186
- service_context = ServiceContext.from_defaults(
187
- llm_predictor=LLMPredictor(llm=ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)),
188
- embed_model=OpenAIEmbedding(model="text-embedding-ada-002"),
189
- callback_manager=CallbackManager(handlers=[callback_handler]),
190
- )
191
- index = load_index_from_storage(
192
- storage_context,
193
- service_context=service_context,
194
- )
189
+ Settings.llm = OpenAI(model="gpt-4-turbo-preview")
190
+ Settings.embed_model = OpenAIEmbedding(model="text-embedding-ada-002")
191
+
192
+
193
+ # Load your data and create an index. Note you usually want to store your index in a persistent store like a database or the file system
194
+ documents = SimpleDirectoryReader("YOUR_DATA_DIRECTORY").load_data()
195
+ index = VectorStoreIndex.from_documents(documents)
196
+
195
197
  query_engine = index.as_query_engine()
196
198
 
197
199
  # Query your LlamaIndex application
@@ -267,8 +269,6 @@ session.url
267
269
 
268
270
  ## LLM Evals
269
271
 
270
- 🚧 LLM Evals is still under construction under a sub-module `arize-phoenix[experimental]`
271
-
272
272
  [![Open in Colab](https://img.shields.io/static/v1?message=Open%20in%20Colab&logo=googlecolab&labelColor=grey&color=blue&logoColor=orange&label=%20)](https://colab.research.google.com/github/Arize-ai/phoenix/blob/main/tutorials/evals/evaluate_relevance_classifications.ipynb) [![Open in GitHub](https://img.shields.io/static/v1?message=Open%20in%20GitHub&logo=github&labelColor=grey&color=blue&logoColor=white&label=%20)](https://github.com/Arize-ai/phoenix/blob/main/tutorials/evals/evaluate_relevance_classifications.ipynb)
273
273
 
274
274
  Phoenix provides tooling to evaluate LLM applications, including tools to determine the relevance or irrelevance of documents retrieved by retrieval-augmented generation (RAG) application, whether or not the response is toxic, and much more.
@@ -283,12 +283,12 @@ Phoenix's approach to LLM evals is notable for the following reasons:
283
283
  Here is an example of running the RAG relevance eval on a dataset of Wikipedia questions and answers:
284
284
 
285
285
  ```shell
286
- # Install phoenix as well as the experimental subpackage
287
- pip install arize-phoenix[experimental] ipython matplotlib openai pycm scikit-learn
286
+ # Install phoenix as well as the evals subpackage
287
+ pip install 'arize-phoenix[evals]' ipython matplotlib openai pycm scikit-learn
288
288
  ```
289
289
 
290
290
  ```python
291
- from phoenix.experimental.evals import (
291
+ from phoenix.evals import (
292
292
  RAG_RELEVANCY_PROMPT_TEMPLATE,
293
293
  RAG_RELEVANCY_PROMPT_RAILS_MAP,
294
294
  OpenAIModel,
@@ -324,7 +324,7 @@ y_pred = df["eval_relevance"]
324
324
  precision, recall, f1, support = precision_recall_fscore_support(y_true, y_pred)
325
325
  ```
326
326
 
327
- To learn more about LLM Evals, see the [LLM Evals documentation](https://docs.arize.com/phoenix/concepts/llm-evals/).
327
+ To learn more about LLM Evals, see the [Evals documentation](https://docs.arize.com/phoenix/concepts/llm-evals/).
328
328
 
329
329
  ## Embedding Analysis
330
330
 
@@ -4,12 +4,12 @@ phoenix/datetime_utils.py,sha256=D955QLrkgrrSdUM6NyqbCeAu2SMsjhR5rHVQEsVUdng,277
4
4
  phoenix/exceptions.py,sha256=X5k9ipUDfwSCwZB-H5zFJLas86Gf9tAx0W4l5TZxp5k,108
5
5
  phoenix/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
6
6
  phoenix/services.py,sha256=f6AeyKTuOpy9RCcTCjVH3gx5nYZhbTMFOuv1WSUOB5o,4992
7
- phoenix/version.py,sha256=SaDAYHfdUx0zZwEMJs8CthhSY2X4I0doARTPgX2vOwM,22
7
+ phoenix/version.py,sha256=j0HWOe68EJbDKQHKURp6LgON8HyoWHbKmvvCvTB1OzA,22
8
8
  phoenix/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  phoenix/core/embedding_dimension.py,sha256=zKGbcvwOXgLf-yrJBpQyKtd-LEOPRKHnUToyAU8Owis,87
10
- phoenix/core/evals.py,sha256=gJyqQzpud5YjtoY8h4pgXvHDsdubGfqmEewLuZHPPmQ,10224
11
- phoenix/core/model.py,sha256=vQ6RxpUPlncezJvur5u6xBN0Lkrk2gW0cTyb-qqaSqA,4713
12
- phoenix/core/model_schema.py,sha256=rR9VdhL_oXxbprDTPQJBXs5hw5sMPQmzx__m6Kwsxug,50394
10
+ phoenix/core/evals.py,sha256=s2f1nTF3apQz5LRM6dJMo9-jmbxZs8jltgtBRkU8eeU,10230
11
+ phoenix/core/model.py,sha256=C-kDATyJEgP-oqYVKOiQM76Ljs66F6VZdT93_b8kTGk,4725
12
+ phoenix/core/model_schema.py,sha256=lQaTvKS34yurHOJ53YD020uURLfgG3dqKC1NLQftOjA,50222
13
13
  phoenix/core/model_schema_adapter.py,sha256=3GkyzqUST4fYi-Bgs8qAam5hwMCdQRZTDLjZ9Bnzdm4,8268
14
14
  phoenix/core/traces.py,sha256=IHaRP-SK0iKuGpq8hpt-0fio3LhqjTmvtfPzo_X2tS8,13798
15
15
  phoenix/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -24,16 +24,16 @@ phoenix/experimental/evals/evaluators.py,sha256=r7fXrS-l4gn58SUhLAZSfY3P8lxysouS
24
24
  phoenix/experimental/evals/retrievals.py,sha256=o3fqrsYbYZjyGj_jWkN_9VQVyXjLkDKDw5Ws7l8bwdI,3828
25
25
  phoenix/experimental/evals/functions/__init__.py,sha256=NNd0-_cmIopdV7vm3rspjfgM726qoQJ4DPq_vqbnaxQ,180
26
26
  phoenix/experimental/evals/functions/classify.py,sha256=rIQLf0GRXPNT2d5Oni0lrpFrubBorAscjN0ievjWx9A,19092
27
- phoenix/experimental/evals/functions/executor.py,sha256=bM7PI2rcPukQQzZ2rWqN_-Kfo_a935YJj0bh1Red8Ps,13406
27
+ phoenix/experimental/evals/functions/executor.py,sha256=vPvvR7XTqPqWBqnA_ht2oOESKVJHcqIjOtSA09LLFrE,13398
28
28
  phoenix/experimental/evals/functions/generate.py,sha256=8LnnPAjBM9yxitdkaGZ67OabuDTOWBF3fvinJ_uCFRg,5584
29
- phoenix/experimental/evals/functions/processing.py,sha256=F4xtLsulLV4a8CkuLldRddsCim75dSTIShEJUYN6I6w,1823
29
+ phoenix/experimental/evals/functions/processing.py,sha256=xyFHYVmYq2lphdMvUaW_dRFi15NuqtAdAVg1G1dQb38,2960
30
30
  phoenix/experimental/evals/models/__init__.py,sha256=j1N7DhiOPbcaemtVBONcQ0miNnGQwEXz4u3P3Vwe6-4,320
31
31
  phoenix/experimental/evals/models/anthropic.py,sha256=BZmLvepkSMj_opCWsZoL34a3yAwRdl7qbJB86DFR84E,6688
32
- phoenix/experimental/evals/models/base.py,sha256=RWz_Jzj3Z1fENl2WUXIz-4eMsk6HfYXc0K8IZ-BJss4,6306
32
+ phoenix/experimental/evals/models/base.py,sha256=qOX36f4H1PA7YW3fckUsoh4apSz3bzURVtqlXw6Xcqo,6274
33
33
  phoenix/experimental/evals/models/bedrock.py,sha256=nVOXRZr-iDwHEINozpO2bqZR2KEeDHNyj6jgQPONQYs,8565
34
- phoenix/experimental/evals/models/litellm.py,sha256=Xo415fJehxIj32zpzVtvzQXADJURPUvNZJHmc_FAKvE,4759
34
+ phoenix/experimental/evals/models/litellm.py,sha256=YHZvwBHo4Y0_b589gjtwPN8N7sYgyIm9Hy5nnBiUtzA,4788
35
35
  phoenix/experimental/evals/models/openai.py,sha256=v2qkdFZc-u0ZAfxnV4v5UMWkMc9P3k4Gx1XaTnVFTz4,17922
36
- phoenix/experimental/evals/models/rate_limiters.py,sha256=5GVN0RQKt36Przg3-9jLgocRmyg-tbeO-cdbuLIx89w,10160
36
+ phoenix/experimental/evals/models/rate_limiters.py,sha256=L8xf7mc2d6axmWNjg2wwWIHWeTZ-tiI8m1GoBNFfJd0,10156
37
37
  phoenix/experimental/evals/models/vertex.py,sha256=3kj0tdwyCcgs39x1XnMgFoSbZwXvvBAPL7AwHYe-qIE,6236
38
38
  phoenix/experimental/evals/models/vertexai.py,sha256=nFN5isv1GPNsvUA4OxSnQd1hTMi-3BcxfDz1y7QcoA0,6189
39
39
  phoenix/experimental/evals/templates/__init__.py,sha256=GSJSoWJ4jwyoUANniidmWMUtXQhNQYbTJbfFqCvuYuo,1470
@@ -42,27 +42,27 @@ phoenix/experimental/evals/templates/template.py,sha256=ImFSaTPo9oalPNwq7cNdOCnd
42
42
  phoenix/experimental/evals/utils/__init__.py,sha256=FE1tbb022t3pwn0CrprR7QXcIsgpsdsotV7-iB3khsI,5601
43
43
  phoenix/experimental/evals/utils/threads.py,sha256=ksI-egarPnlxit0qKKjtjZ2L82qGLxqxZ6s92O0eBA4,1005
44
44
  phoenix/metrics/README.md,sha256=5gekqTU-5gGdMwvcfNp2Wlu8p1ul9kGY_jq0XXQusoI,1964
45
- phoenix/metrics/__init__.py,sha256=sLp7td1GIt_0Z8dPUyP4L0-_4x9c871yAaGX30oMsvg,2433
46
- phoenix/metrics/binning.py,sha256=MWI0jle3qj2Rk2wFu0vN27TKvbyX2Q0ZpXiil8iNOko,12763
45
+ phoenix/metrics/__init__.py,sha256=W8lVORvjBo66pFgUmU9P8Fi8i4yI75wOPkhU42sfeQU,2417
46
+ phoenix/metrics/binning.py,sha256=e20P5W5ineqDtfyVO0KDmwI2KY9As31pDhJdjAofNIo,12739
47
47
  phoenix/metrics/metrics.py,sha256=7SfkDmSnpzGATtBXlYHb42r-2BfV8ELMcMgjdw3c8yA,7907
48
- phoenix/metrics/mixins.py,sha256=O-U7q3VCTlULkAcdsLf_r68phvoTE_va4UFnez9i4h8,7439
48
+ phoenix/metrics/mixins.py,sha256=moZ5hENIKzUQt2IRhWOd5EFXnoqQkVrpqEqMH7KQzyA,7440
49
49
  phoenix/metrics/retrieval_metrics.py,sha256=XFQPo66h16w7-1AJ92M1VL_BUIXIWxXHGKF_QVOABZI,4384
50
50
  phoenix/metrics/timeseries.py,sha256=Cib3E0njJzi0vZpmyADvbakFQA98rIkfDaYAOmsmBz8,6277
51
- phoenix/metrics/wrappers.py,sha256=g3rDzQt_zI_S_VfaZnrzUxhPSbGQ72UunmIGKYf1M_E,8391
51
+ phoenix/metrics/wrappers.py,sha256=umZqa_5lf1wZSFe3FgzxF-qp1xbPdKD54W628GlGCUI,8392
52
52
  phoenix/pointcloud/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
53
53
  phoenix/pointcloud/clustering.py,sha256=IzcG67kJ2hPP7pcqVmKPSL_6gKRonKdOT3bCtbTOqnk,820
54
- phoenix/pointcloud/pointcloud.py,sha256=ms-h1FLC0xXb3sk256zpSuZQDE2hdOAJzRNBklP0tDg,2161
54
+ phoenix/pointcloud/pointcloud.py,sha256=4zAIkKs2xOUbchpj4XDAV-iPMXrfAJ15TG6rlIYGrao,2145
55
55
  phoenix/pointcloud/projectors.py,sha256=zO_RrtDYSv2rqVOfIP2_9Cv11Dc8EmcZR94xhFcBYPU,1057
56
56
  phoenix/pointcloud/umap_parameters.py,sha256=lJsEOrbSuSiqI7g4Yt6xj7kgYxEqoep4ZHWLr6VWBqw,1760
57
57
  phoenix/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- phoenix/server/app.py,sha256=wsW8kYMdnso9wsr_YW9YJXRtaEvlxaXImYsVs3kvUIA,6801
59
- phoenix/server/main.py,sha256=i2CrYj4PJoJaaMdgX471Lgr4JB4I2jDhjMbUAQP38Dw,7470
58
+ phoenix/server/app.py,sha256=vQ694jL76f-6Nv1nn862ur2C668pw65dLKZ3mFLqOYo,7034
59
+ phoenix/server/main.py,sha256=CPRsifuFTN1XBEK2OAELF_jlXOZPlOj01sbFhZ0tEiU,9006
60
60
  phoenix/server/thread_server.py,sha256=dP6cm6Cf08jNhDA1TRlVZpziu1YgtPDmaeIJMm725eI,2154
61
61
  phoenix/server/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  phoenix/server/api/context.py,sha256=02vRgyLFpDCmh97QwsjWD5cdNZkoCUtDPPs1YItbdbI,583
63
63
  phoenix/server/api/helpers.py,sha256=_V1eVkchZmTkhOfRC4QqR1sUB2xtIxdsMJkDouZq_IE,251
64
- phoenix/server/api/interceptor.py,sha256=do_J4HjPPQ_C7bMmqe1YpTmt_hoxcwC2I8P3n5sZBo4,1302
65
- phoenix/server/api/schema.py,sha256=lEahYCASRgRTw6nOme7zQtyKaVbHqK5CQUbg5XTT5nU,15293
64
+ phoenix/server/api/interceptor.py,sha256=ykDnoC_apUd-llVli3m1CW18kNSIgjz2qZ6m5JmPDu8,1294
65
+ phoenix/server/api/schema.py,sha256=jJfNV662qDGg11WUzd4ePNraug9HTKlFLUBaLqo-UIk,16004
66
66
  phoenix/server/api/input_types/ClusterInput.py,sha256=EL4ftvZxQ8mVdruUPcdhMhByORmSmM8S-X6RPqU6GX0,179
67
67
  phoenix/server/api/input_types/Coordinates.py,sha256=meTwbIjwTfqx5DGD2DBlH9wQzdQVNM5a8x9dp1FfIgA,173
68
68
  phoenix/server/api/input_types/DataQualityMetricInput.py,sha256=LazvmQCCM5m9SDZTpyxQXO1rYF4cmsc3lsR2S9S65X4,1292
@@ -70,13 +70,13 @@ phoenix/server/api/input_types/DimensionFilter.py,sha256=vcXgglSnZcB5pGh-6oEtRmG
70
70
  phoenix/server/api/input_types/DimensionInput.py,sha256=Vfx5FmiMKey4-EHDQsQRPzSAMRJMN5oVMLDUl4NKAa8,164
71
71
  phoenix/server/api/input_types/Granularity.py,sha256=6SVfZ5yTZYq1PI6vdpjfkBUc4YilLSkF-k6okuSNbbQ,2301
72
72
  phoenix/server/api/input_types/PerformanceMetricInput.py,sha256=fElsLTSEYYgGFGMYTEGcYid39tXUKFdV_JkdHavMcbA,591
73
- phoenix/server/api/input_types/SpanSort.py,sha256=72oWhYLLIZLy7K5_tFhMg7RcmDgatqMB-ZoRJhHGIR0,3527
73
+ phoenix/server/api/input_types/SpanSort.py,sha256=frSxKIo8eYI_9D2M2zKV_z25IwLbKdK0Gj8JmDldU2k,3519
74
74
  phoenix/server/api/input_types/TimeRange.py,sha256=yzx-gxj8mDeGLft1FzU_x1MVEgIG5Pt6-f8PUVDgipQ,522
75
75
  phoenix/server/api/input_types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
76
76
  phoenix/server/api/routers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
77
77
  phoenix/server/api/routers/evaluation_handler.py,sha256=1Wg7cajuLBQ4g0ZzrahOkTlYQSgnzUx7j9xeIuVP7Tc,3524
78
78
  phoenix/server/api/routers/span_handler.py,sha256=UEgeTj9b4KBsq7qUvcGKUlZwZtH7iODjV1NpGZKdr8k,3213
79
- phoenix/server/api/routers/trace_handler.py,sha256=pXanrp9L21Mh7MnyJbj202NJ-Rn4bCjG0oL4DtdKcls,2074
79
+ phoenix/server/api/routers/trace_handler.py,sha256=XBN5u7CqVInitrCalBCVfDMzByBABOKN1FmYy9fznWw,2066
80
80
  phoenix/server/api/routers/utils.py,sha256=M41BoH-fl37izhRuN2aX7lWm7jOC20A_3uClv9TVUUY,583
81
81
  phoenix/server/api/types/Cluster.py,sha256=R08ZKrLl1KK8colxHU57N5XIOTMUwg5ZI50ofPoxxSM,5618
82
82
  phoenix/server/api/types/DataQualityMetric.py,sha256=zRKsNvHBu-NdcsunuLhqFpZhi6ks-HMqA1PJD27jTak,590
@@ -104,12 +104,13 @@ phoenix/server/api/types/MimeType.py,sha256=VIUQBqveSctiRFStJjDUdZ85TeKhwNNEosMC
104
104
  phoenix/server/api/types/Model.py,sha256=IFhO2nouLQoMPNhl8vnm7nEXXSpr14CL3ihw8RnVWNY,7826
105
105
  phoenix/server/api/types/NumericRange.py,sha256=afEjgF97Go_OvmjMggbPBt-zGM8IONewAyEiKEHRds0,192
106
106
  phoenix/server/api/types/PerformanceMetric.py,sha256=W92B7OghEOgzFvmY0LCqpgavHaQggTGshdgfD0yqHX4,350
107
+ phoenix/server/api/types/Project.py,sha256=NoW9hda7lFO_xvA5wQe6LAh2v4xmZLzYFtwCfNVkoGM,1795
107
108
  phoenix/server/api/types/PromptResponse.py,sha256=Q8HKtpp8GpUOcxPCzZpkkokidDd6u0aZOv_SuPZZd5Q,630
108
109
  phoenix/server/api/types/Retrieval.py,sha256=OhMK2ncjoyp5h1yjKhjlKpoTbQrMHuxmgSFw-AO1rWw,285
109
110
  phoenix/server/api/types/ScalarDriftMetricEnum.py,sha256=IUAcRPpgL41WdoIgK6cNk2Te38SspXGyEs-S1fY23_A,232
110
- phoenix/server/api/types/Segments.py,sha256=zogJI9MdmctBL7J-fDSR_8tUJLvuISlVYgCLnTaigKE,2937
111
+ phoenix/server/api/types/Segments.py,sha256=B6UUWjalZONjWjl_l61A6USPSu15ICXRgzZ4m3vA1yw,2921
111
112
  phoenix/server/api/types/SortDir.py,sha256=OUpXhlCzCxPoXSDkJJygEs9Rw9pMymfaZUG5zPTrw4Y,152
112
- phoenix/server/api/types/Span.py,sha256=ao8LusgrdXVLc39jGtrL-9RuaUbPOzI98EsQNb6xpiE,11610
113
+ phoenix/server/api/types/Span.py,sha256=yAN0H3L1GXgVZJDrF5td0q9gbxlLH7WZ7JAKIu3NKTA,12133
113
114
  phoenix/server/api/types/TimeSeries.py,sha256=QbLfxHnwYsMsirpq4tx9us6ha7YtAVzK4m8mAL3fMt0,5200
114
115
  phoenix/server/api/types/UMAPPoints.py,sha256=8l9RJXi308qty4MdHb2pBbiU6ZuLbrRRxXNbPhXoxKI,1639
115
116
  phoenix/server/api/types/ValidationResult.py,sha256=pHwdYk4J7SJ5xhlWWHg_6qWkfk4rjOx-bSkGHvkDE3Q,142
@@ -127,14 +128,14 @@ phoenix/server/static/apple-touch-icon-76x76.png,sha256=CT_xT12I0u2i0WU8JzBZBuOQ
127
128
  phoenix/server/static/apple-touch-icon.png,sha256=fOfpjqGpWYbJ0eAurKsyoZP1EAs6ZVooBJ_SGk2ZkDs,3801
128
129
  phoenix/server/static/favicon.ico,sha256=bY0vvCKRftemZfPShwZtE93DiiQdaYaozkPGwNFr6H8,34494
129
130
  phoenix/server/static/index.css,sha256=KKGpx4iwF91VGRm0YN-4cn8oC-oIqC6HecoPf0x3ZM8,1885
130
- phoenix/server/static/index.js,sha256=73BthyJKaIMt5o5VThZoX7kGJclvLdghL9Dn80wEdUE,3144672
131
+ phoenix/server/static/index.js,sha256=qGACFG1xujCW8-ybbf5J29rlhb2sdeuCwawuCqKUgyA,3149880
131
132
  phoenix/server/static/modernizr.js,sha256=mvK-XtkNqjOral-QvzoqsyOMECXIMu5BQwSVN_wcU9c,2564
132
133
  phoenix/server/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
133
- phoenix/server/templates/index.html,sha256=DlfcGoq1V5C2QkJWqP1j4Nu6_kPfsOzOrtzYF3ogghE,1900
134
+ phoenix/server/templates/index.html,sha256=lO2wGA5XsftPg03rw_VcyaYf_4vegtlWbIT5ms4fA_c,1982
134
135
  phoenix/session/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
135
136
  phoenix/session/client.py,sha256=KlWa2tg-QgKObzVVQcWFcOd-y2C0meih8joKC1yDmZA,5397
136
- phoenix/session/data_extractor.py,sha256=V4ntT2FcmbV_-zobcbPR51HKWaDyMnXHj4Pmu7MZ0OQ,1664
137
- phoenix/session/evaluation.py,sha256=m_vufbzeDBnoG2O_TWRj48eljA0UC_rPlqjVWluw2J4,5443
137
+ phoenix/session/data_extractor.py,sha256=_oWYn2KRXffmcdWmTM5pDmIACAUi06WblIx-jjp0ssM,1648
138
+ phoenix/session/evaluation.py,sha256=YCv1XkWHi7vM_W5V7rorrrAxadv78wuMPeCVJvf5-oE,5444
138
139
  phoenix/session/session.py,sha256=R83kPeiqLxEJTD6cTjJqqMjMV-iQzQFKw6rMXnbIxnc,21800
139
140
  phoenix/trace/__init__.py,sha256=4d_MqzUIFmlY9WWcFeTONJ4xL5mPGoWZaPM2TJ0ZDBQ,266
140
141
  phoenix/trace/errors.py,sha256=wB1z8qdPckngdfU-TORToekvg3344oNFAA83_hC2yFY,180
@@ -143,20 +144,20 @@ phoenix/trace/exporter.py,sha256=O-9Arn-S_B9Me-jy4Qa84y6lvxKNoa8pczrsamPl3_Q,487
143
144
  phoenix/trace/fixtures.py,sha256=HOjuYOB_xtR7JUeLz7WpEroiGj4E5_SxVLSjBYUy8RQ,7055
144
145
  phoenix/trace/otel.py,sha256=mpuj_eOdBUPAcslpxk8XZflb9kkzpMJ8X9acJS8ACPA,15322
145
146
  phoenix/trace/schemas.py,sha256=zlAY8v-PLgEDqTgbmDxY3NJlCAmzj_3ZCJFebQOBz3M,6028
146
- phoenix/trace/span_evaluations.py,sha256=t3EL_3kTULlwMO67ZpUqJvTNr5_y_LRHfFMxdDhxMB8,12958
147
+ phoenix/trace/span_evaluations.py,sha256=T67grfU71iANMM0g6dp2OQ_ZjtkON2viZz95_U9FLQA,12954
147
148
  phoenix/trace/span_json_decoder.py,sha256=IAFakPRqSMYxTPKYFMiXYxm7U-FipdN8_xbvapDS0Qc,3131
148
- phoenix/trace/span_json_encoder.py,sha256=hIDd1I6xm01kaNmeKjHOHyxUGI3uTg5J_Os1kXtAb6g,1755
149
+ phoenix/trace/span_json_encoder.py,sha256=rBuBe7pH8wbcKmXmPWVyeAG92WEpHN015kGiMom4dNU,2025
149
150
  phoenix/trace/trace_dataset.py,sha256=RpHIfZLbMmULOIb-fKXJkQLhIdC0sJlAOTjlyJppMYA,13776
150
- phoenix/trace/tracer.py,sha256=JDKlyvjy6AsQmaA60ycJ1hKXoUQU61jqPx3nvYr8xUc,3647
151
+ phoenix/trace/tracer.py,sha256=9V3eFwKd-eAyMZ9ZzX8HyrGZEVlWx0Ni-_v1LG3J1kY,3640
151
152
  phoenix/trace/utils.py,sha256=7LurVGXn245cjj4MJsc7v6jq4DSJkpK6YGBfIaSywuw,1307
152
153
  phoenix/trace/dsl/__init__.py,sha256=WIQIjJg362XD3s50OsPJJ0xbDsGp41bSv7vDllLrPuA,144
153
- phoenix/trace/dsl/filter.py,sha256=fRPUIsucxXGJcbtei86ApSIjEP5PmdsONmYvT7EBu6w,14240
154
- phoenix/trace/dsl/helpers.py,sha256=U71HmCecJQ_zHq0g2mFKbxrRuiJDkopskWMpFoBNP-Y,1722
154
+ phoenix/trace/dsl/filter.py,sha256=QPSupyOCQhVb3xmBL07OKkL8uXbVmN7cRwcBtpniiBU,14232
155
+ phoenix/trace/dsl/helpers.py,sha256=CP6WaJpP7_WdI1Yoip7tDWcYDXZRg6xgNBRS2gojjMc,1728
155
156
  phoenix/trace/dsl/missing.py,sha256=BWPOHr2_tBkPDgVeq8GVXXVbNbJiBelu4NtwHBg6mTE,1435
156
- phoenix/trace/dsl/query.py,sha256=XoFwKEALzGqUERy7B5fgD-n0s87zN6jRVrZgW6-jqRo,14819
157
+ phoenix/trace/dsl/query.py,sha256=k0guhWBEo6L7ZJH5FJs2-iGSnWXdUUqu09gd-8M4CGg,14783
157
158
  phoenix/trace/langchain/__init__.py,sha256=aTKMFmEOgjx_6dnyplalgYi7PQnetablwrwpcUZqcGE,764
158
159
  phoenix/trace/langchain/instrumentor.py,sha256=rmqdaEh2rwyZnaddns4RC2WQnj7TbCnJjETgoiwWvzs,1163
159
- phoenix/trace/langchain/tracer.py,sha256=fSjtPc5VxaZK63x3ob3DAW7eM2kfOZWgbSQ9dmhhtsw,1424
160
+ phoenix/trace/langchain/tracer.py,sha256=gq5RDgFexw-4qpwf701hGsZGcT6XdPjRIbI5aZdBI2I,1425
160
161
  phoenix/trace/llama_index/__init__.py,sha256=4fpR5702Qh2t5TaXIx584EkA-BveCPftXPOKvI0Oi3I,105
161
162
  phoenix/trace/llama_index/callback.py,sha256=YKCZZtFwAwasZt9qNSJO6dj97Jt2eN_PBakUYWrCleE,3574
162
163
  phoenix/trace/openai/__init__.py,sha256=J3G0uqCxGdksUpaQVHds_Egv2drvh8UEqoLjiQAOveg,79
@@ -167,8 +168,8 @@ phoenix/trace/v1/evaluation_pb2.pyi,sha256=cCbbx06gwQmaH14s3J1X25TtaARh-k1abbxQd
167
168
  phoenix/utilities/__init__.py,sha256=3TVirVnjIGyaCFuJCqeZO4tjlzQ_chZgYM0itIwsEpE,656
168
169
  phoenix/utilities/error_handling.py,sha256=7b5rpGFj9EWZ8yrZK1IHvxB89suWk3lggDayUQcvZds,1946
169
170
  phoenix/utilities/logging.py,sha256=lDXd6EGaamBNcQxL4vP1au9-i_SXe0OraUDiJOcszSw,222
170
- arize_phoenix-3.4.0.dist-info/METADATA,sha256=GILlx8yqFVDq1YOx3KHlqPMfYKGMUnE7ndwRhIE_nG4,28853
171
- arize_phoenix-3.4.0.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
172
- arize_phoenix-3.4.0.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
173
- arize_phoenix-3.4.0.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
174
- arize_phoenix-3.4.0.dist-info/RECORD,,
171
+ arize_phoenix-3.5.0.dist-info/METADATA,sha256=ZFtnh2MB6uIyrkrz0hp12LqL2iTsQwxTUOPJMyGSXd8,29052
172
+ arize_phoenix-3.5.0.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
173
+ arize_phoenix-3.5.0.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
174
+ arize_phoenix-3.5.0.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
175
+ arize_phoenix-3.5.0.dist-info/RECORD,,
phoenix/core/evals.py CHANGED
@@ -33,15 +33,15 @@ class Evals:
33
33
  self._trace_evaluations_by_name: DefaultDict[
34
34
  EvaluationName, Dict[TraceID, pb.Evaluation]
35
35
  ] = defaultdict(dict)
36
- self._evaluations_by_trace_id: DefaultDict[
37
- TraceID, Dict[EvaluationName, pb.Evaluation]
38
- ] = defaultdict(dict)
39
- self._span_evaluations_by_name: DefaultDict[
40
- EvaluationName, Dict[SpanID, pb.Evaluation]
41
- ] = defaultdict(dict)
42
- self._evaluations_by_span_id: DefaultDict[
43
- SpanID, Dict[EvaluationName, pb.Evaluation]
44
- ] = defaultdict(dict)
36
+ self._evaluations_by_trace_id: DefaultDict[TraceID, Dict[EvaluationName, pb.Evaluation]] = (
37
+ defaultdict(dict)
38
+ )
39
+ self._span_evaluations_by_name: DefaultDict[EvaluationName, Dict[SpanID, pb.Evaluation]] = (
40
+ defaultdict(dict)
41
+ )
42
+ self._evaluations_by_span_id: DefaultDict[SpanID, Dict[EvaluationName, pb.Evaluation]] = (
43
+ defaultdict(dict)
44
+ )
45
45
  self._span_evaluation_labels: DefaultDict[EvaluationName, Set[str]] = defaultdict(set)
46
46
  self._document_evaluations_by_span_id: DefaultDict[
47
47
  SpanID, DefaultDict[EvaluationName, Dict[DocumentPosition, pb.Evaluation]]
phoenix/core/model.py CHANGED
@@ -12,36 +12,36 @@ def _get_embedding_dimensions(
12
12
  embedding_dimensions: List[EmbeddingDimension] = []
13
13
  embedding_features: EmbeddingFeatures = {}
14
14
 
15
- primary_embedding_features: Optional[
16
- EmbeddingFeatures
17
- ] = primary_dataset.schema.embedding_feature_column_names
15
+ primary_embedding_features: Optional[EmbeddingFeatures] = (
16
+ primary_dataset.schema.embedding_feature_column_names
17
+ )
18
18
  if primary_embedding_features is not None:
19
19
  embedding_features.update(primary_embedding_features)
20
- primary_prompt_column_names: Optional[
21
- EmbeddingColumnNames
22
- ] = primary_dataset.schema.prompt_column_names
20
+ primary_prompt_column_names: Optional[EmbeddingColumnNames] = (
21
+ primary_dataset.schema.prompt_column_names
22
+ )
23
23
  if primary_prompt_column_names is not None:
24
24
  embedding_features.update({"prompt": primary_prompt_column_names})
25
- primary_response_column_names: Optional[
26
- Union[str, EmbeddingColumnNames]
27
- ] = primary_dataset.schema.response_column_names
25
+ primary_response_column_names: Optional[Union[str, EmbeddingColumnNames]] = (
26
+ primary_dataset.schema.response_column_names
27
+ )
28
28
  if isinstance(primary_response_column_names, EmbeddingColumnNames):
29
29
  embedding_features.update({"response": primary_response_column_names})
30
30
 
31
31
  if reference_dataset is not None:
32
- reference_embedding_features: Optional[
33
- EmbeddingFeatures
34
- ] = reference_dataset.schema.embedding_feature_column_names
32
+ reference_embedding_features: Optional[EmbeddingFeatures] = (
33
+ reference_dataset.schema.embedding_feature_column_names
34
+ )
35
35
  if reference_embedding_features is not None:
36
36
  embedding_features.update(reference_embedding_features)
37
- reference_prompt_column_names: Optional[
38
- EmbeddingColumnNames
39
- ] = reference_dataset.schema.prompt_column_names
37
+ reference_prompt_column_names: Optional[EmbeddingColumnNames] = (
38
+ reference_dataset.schema.prompt_column_names
39
+ )
40
40
  if reference_prompt_column_names is not None:
41
41
  embedding_features.update({"prompt": reference_prompt_column_names})
42
- reference_response_column_names: Optional[
43
- Union[str, EmbeddingColumnNames]
44
- ] = reference_dataset.schema.response_column_names
42
+ reference_response_column_names: Optional[Union[str, EmbeddingColumnNames]] = (
43
+ reference_dataset.schema.response_column_names
44
+ )
45
45
  if isinstance(reference_response_column_names, EmbeddingColumnNames):
46
46
  embedding_features.update({"response": reference_response_column_names})
47
47
 
@@ -52,8 +52,7 @@ from phoenix.config import GENERATED_DATASET_NAME_PREFIX
52
52
  from phoenix.datetime_utils import floor_to_minute
53
53
 
54
54
 
55
- class DimensionRole(IntEnum):
56
- ...
55
+ class DimensionRole(IntEnum): ...
57
56
 
58
57
 
59
58
  @unique
@@ -151,8 +150,7 @@ class CompositeDimensionSpec(SchemaSpec, ABC):
151
150
  ...
152
151
 
153
152
  @abstractmethod
154
- def __iter__(self) -> Iterator[str]:
155
- ...
153
+ def __iter__(self) -> Iterator[str]: ...
156
154
 
157
155
 
158
156
  @dataclass(frozen=True)
@@ -311,12 +309,10 @@ class Column:
311
309
  object.__setattr__(self, "name", _rand_str())
312
310
 
313
311
  @overload
314
- def __call__(self, data: pd.DataFrame) -> "pd.Series[Any]":
315
- ...
312
+ def __call__(self, data: pd.DataFrame) -> "pd.Series[Any]": ...
316
313
 
317
314
  @overload
318
- def __call__(self, data: "pd.Series[Any]") -> Any:
319
- ...
315
+ def __call__(self, data: "pd.Series[Any]") -> Any: ...
320
316
 
321
317
  def __call__(self, data: DataFrameOrSeries) -> Any:
322
318
  """Extracts a value from series, or a series from a dataframe. If
@@ -569,8 +565,7 @@ class ModelData(ObjectProxy, ABC): # type: ignore
569
565
 
570
566
  @property
571
567
  @abstractmethod
572
- def null_value(self) -> Any:
573
- ...
568
+ def null_value(self) -> Any: ...
574
569
 
575
570
  def __getitem__(self, key: Any) -> Any:
576
571
  if _is_column_key(key):
@@ -614,12 +609,10 @@ class Event(ModelData):
614
609
  return np.nan
615
610
 
616
611
  @overload
617
- def __getitem__(self, key: ColumnKey) -> Any:
618
- ...
612
+ def __getitem__(self, key: ColumnKey) -> Any: ...
619
613
 
620
614
  @overload
621
- def __getitem__(self, key: Any) -> Any:
622
- ...
615
+ def __getitem__(self, key: Any) -> Any: ...
623
616
 
624
617
  def __getitem__(self, key: Any) -> Any:
625
618
  return super().__getitem__(key)
@@ -668,12 +661,10 @@ class Events(ModelData):
668
661
  )
669
662
 
670
663
  @overload
671
- def __getitem__(self, key: ColumnKey) -> "pd.Series[Any]":
672
- ...
664
+ def __getitem__(self, key: ColumnKey) -> "pd.Series[Any]": ...
673
665
 
674
666
  @overload
675
- def __getitem__(self, key: List[RowId]) -> "Events":
676
- ...
667
+ def __getitem__(self, key: List[RowId]) -> "Events": ...
677
668
 
678
669
  def __getitem__(self, key: Any) -> Any:
679
670
  if isinstance(key, list):
@@ -728,12 +719,10 @@ class Dataset(Events):
728
719
  return pd.Index(self[PREDICTION_ID])
729
720
 
730
721
  @overload
731
- def __getitem__(self, key: ColumnKey) -> "pd.Series[Any]":
732
- ...
722
+ def __getitem__(self, key: ColumnKey) -> "pd.Series[Any]": ...
733
723
 
734
724
  @overload
735
- def __getitem__(self, key: List[RowId]) -> Events:
736
- ...
725
+ def __getitem__(self, key: List[RowId]) -> Events: ...
737
726
 
738
727
  def __getitem__(self, key: Any) -> Any:
739
728
  if isinstance(key, list):
@@ -1030,32 +1019,25 @@ class Model:
1030
1019
  return ans
1031
1020
 
1032
1021
  @overload
1033
- def __getitem__(self, key: Type[Dataset]) -> Iterator[Dataset]:
1034
- ...
1022
+ def __getitem__(self, key: Type[Dataset]) -> Iterator[Dataset]: ...
1035
1023
 
1036
1024
  @overload
1037
- def __getitem__(self, key: DatasetRole) -> Dataset:
1038
- ...
1025
+ def __getitem__(self, key: DatasetRole) -> Dataset: ...
1039
1026
 
1040
1027
  @overload
1041
- def __getitem__(self, key: ColumnKey) -> Dimension:
1042
- ...
1028
+ def __getitem__(self, key: ColumnKey) -> Dimension: ...
1043
1029
 
1044
1030
  @overload
1045
- def __getitem__(self, key: MultiDimensionKey) -> Iterator[Dimension]:
1046
- ...
1031
+ def __getitem__(self, key: MultiDimensionKey) -> Iterator[Dimension]: ...
1047
1032
 
1048
1033
  @overload
1049
- def __getitem__(self, key: Type[ScalarDimension]) -> Iterator[ScalarDimension]:
1050
- ...
1034
+ def __getitem__(self, key: Type[ScalarDimension]) -> Iterator[ScalarDimension]: ...
1051
1035
 
1052
1036
  @overload
1053
- def __getitem__(self, key: Type[EmbeddingDimension]) -> Iterator[EmbeddingDimension]:
1054
- ...
1037
+ def __getitem__(self, key: Type[EmbeddingDimension]) -> Iterator[EmbeddingDimension]: ...
1055
1038
 
1056
1039
  @overload
1057
- def __getitem__(self, key: Type[Dimension]) -> Iterator[Dimension]:
1058
- ...
1040
+ def __getitem__(self, key: Type[Dimension]) -> Iterator[Dimension]: ...
1059
1041
 
1060
1042
  @overload
1061
1043
  def __getitem__(
@@ -1064,8 +1046,7 @@ class Model:
1064
1046
  MultiDimensionKey,
1065
1047
  Union[Type[ScalarDimension], Type[EmbeddingDimension]],
1066
1048
  ],
1067
- ) -> Iterator[Dimension]:
1068
- ...
1049
+ ) -> Iterator[Dimension]: ...
1069
1050
 
1070
1051
  def __getitem__(self, key: Any) -> Any:
1071
1052
  if key is Dataset:
@@ -1124,8 +1105,7 @@ class Model:
1124
1105
  obj: DimensionRole,
1125
1106
  cls: Type[Dimension] = ScalarDimension,
1126
1107
  **kwargs: Any,
1127
- ) -> Dimension:
1128
- ...
1108
+ ) -> Dimension: ...
1129
1109
 
1130
1110
  @overload
1131
1111
  def _new_dimension(
@@ -1133,16 +1113,14 @@ class Model:
1133
1113
  obj: Name,
1134
1114
  cls: Type[Dimension] = ScalarDimension,
1135
1115
  **kwargs: Any,
1136
- ) -> Dimension:
1137
- ...
1116
+ ) -> Dimension: ...
1138
1117
 
1139
1118
  @overload
1140
1119
  def _new_dimension(
1141
1120
  self,
1142
1121
  obj: Dimension,
1143
1122
  **kwargs: Any,
1144
- ) -> Dimension:
1145
- ...
1123
+ ) -> Dimension: ...
1146
1124
 
1147
1125
  def _new_dimension(
1148
1126
  self, obj: Any, cls: Type[Dimension] = ScalarDimension, **kwargs: Any
@@ -21,8 +21,7 @@ _unset = Unset()
21
21
 
22
22
 
23
23
  class Executor(Protocol):
24
- def run(self, inputs: Sequence[Any]) -> List[Any]:
25
- ...
24
+ def run(self, inputs: Sequence[Any]) -> List[Any]: ...
26
25
 
27
26
 
28
27
  class AsyncExecutor(Executor):
@@ -1,7 +1,22 @@
1
- from typing import List
1
+ """
2
+ Token processing functions for supported models. This module is being deprecated.
3
+ """
4
+
5
+ import logging
6
+ import sys
7
+ from typing import Any, List
2
8
 
3
9
  from ..models import BaseEvalModel
4
10
 
11
+ logger = logging.getLogger(__name__)
12
+
13
+ _DEPRECATION_WARNING = (
14
+ "The processing module is being deprecated. For advanced token processing, please use the "
15
+ "encoding approach recommended by the model provider. For example, OpenAI models can use the "
16
+ "`tiktoken` library to encode and decode text. For other models, please refer to the model "
17
+ "provider's documentation."
18
+ )
19
+
5
20
 
6
21
  def truncate_text_by_model(model: BaseEvalModel, text: str, token_buffer: int = 0) -> str:
7
22
  """Truncates text using a give model token limit.
@@ -42,3 +57,20 @@ def concatenate_and_truncate_chunks(
42
57
  str: _description_
43
58
  """
44
59
  return truncate_text_by_model(model=model, text=" ".join(chunks), token_buffer=token_buffer)
60
+
61
+
62
+ class _DEPRECATED_MODULE:
63
+ __all__ = ("truncate_text_by_model", "concatenate_and_truncate_chunks")
64
+
65
+ def __getattr__(self, name: str) -> Any:
66
+ if name == "truncate_text_by_model":
67
+ logger.warning(_DEPRECATION_WARNING)
68
+ return truncate_text_by_model
69
+ if name == "concatenate_and_truncate_chunks":
70
+ logger.warning(_DEPRECATION_WARNING)
71
+ return concatenate_and_truncate_chunks
72
+ raise AttributeError(f"module {__name__} has no attribute {name}")
73
+
74
+
75
+ # See e.g. https://stackoverflow.com/a/7668273
76
+ sys.modules[__name__] = _DEPRECATED_MODULE() # type: ignore
@@ -158,17 +158,13 @@ class BaseEvalModel(ABC):
158
158
  raise ImportError(msg)
159
159
 
160
160
  @abstractmethod
161
- def get_tokens_from_text(self, text: str) -> List[int]:
162
- ...
161
+ def get_tokens_from_text(self, text: str) -> List[int]: ...
163
162
 
164
163
  @abstractmethod
165
- def get_text_from_tokens(self, tokens: List[int]) -> str:
166
- ...
164
+ def get_text_from_tokens(self, tokens: List[int]) -> str: ...
167
165
 
168
166
  @abstractproperty
169
- def max_context_size(self) -> int:
170
- ...
167
+ def max_context_size(self) -> int: ...
171
168
 
172
169
  @abstractproperty
173
- def encoder(self) -> "Encoding":
174
- ...
170
+ def encoder(self) -> "Encoding": ...
@@ -67,7 +67,7 @@ class LiteLLMModel(BaseEvalModel):
67
67
  self._litellm = litellm
68
68
  env_info = validate_environment(self._litellm.utils.get_llm_provider(self.model))
69
69
 
70
- if not env_info["keys_in_environment"]:
70
+ if not env_info["keys_in_environment"] and env_info["missing_keys"]:
71
71
  raise RuntimeError(
72
72
  f"Missing environment variable(s): '{str(env_info['missing_keys'])}', for "
73
73
  f"model: {self.model}. \nFor additional information about the right "