opik-optimizer 0.7.8__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. opik_optimizer/__init__.py +2 -0
  2. opik_optimizer/base_optimizer.py +6 -4
  3. opik_optimizer/data/hotpot-500.json +501 -1001
  4. opik_optimizer/datasets/__init__.py +27 -0
  5. opik_optimizer/datasets/ai2_arc.py +44 -0
  6. opik_optimizer/datasets/cnn_dailymail.py +40 -0
  7. opik_optimizer/datasets/election_questions.py +36 -0
  8. opik_optimizer/datasets/gsm8k.py +40 -0
  9. opik_optimizer/datasets/halu_eval.py +43 -0
  10. opik_optimizer/datasets/hotpot_qa.py +68 -0
  11. opik_optimizer/datasets/medhallu.py +39 -0
  12. opik_optimizer/datasets/rag_hallucinations.py +41 -0
  13. opik_optimizer/datasets/ragbench.py +40 -0
  14. opik_optimizer/datasets/tiny_test.py +57 -0
  15. opik_optimizer/datasets/truthful_qa.py +107 -0
  16. opik_optimizer/demo/datasets.py +53 -607
  17. opik_optimizer/evolutionary_optimizer/evolutionary_optimizer.py +3 -1
  18. opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py +90 -19
  19. opik_optimizer/logging_config.py +1 -1
  20. opik_optimizer/meta_prompt_optimizer.py +60 -14
  21. opik_optimizer/mipro_optimizer/mipro_optimizer.py +151 -13
  22. opik_optimizer/optimization_result.py +11 -0
  23. opik_optimizer/task_evaluator.py +6 -1
  24. opik_optimizer/utils.py +0 -52
  25. opik_optimizer-0.8.1.dist-info/METADATA +196 -0
  26. opik_optimizer-0.8.1.dist-info/RECORD +45 -0
  27. opik_optimizer-0.7.8.dist-info/METADATA +0 -174
  28. opik_optimizer-0.7.8.dist-info/RECORD +0 -33
  29. {opik_optimizer-0.7.8.dist-info → opik_optimizer-0.8.1.dist-info}/WHEEL +0 -0
  30. {opik_optimizer-0.7.8.dist-info → opik_optimizer-0.8.1.dist-info}/licenses/LICENSE +0 -0
  31. {opik_optimizer-0.7.8.dist-info → opik_optimizer-0.8.1.dist-info}/top_level.txt +0 -0
@@ -23,6 +23,7 @@ from .optimization_config.mappers import (
23
23
  )
24
24
 
25
25
  from opik.evaluation.models.litellm import warning_filters
26
+ from . import datasets
26
27
 
27
28
  warning_filters.add_warning_filters()
28
29
 
@@ -42,4 +43,5 @@ __all__ = [
42
43
  "from_llm_response_text",
43
44
  "OptimizationResult",
44
45
  "setup_logging",
46
+ "datasets",
45
47
  ]
@@ -4,15 +4,15 @@ import logging
4
4
  import time
5
5
 
6
6
  import litellm
7
+ from . import _throttle
7
8
  from opik.rest_api.core import ApiError
8
9
 
9
10
  from pydantic import BaseModel
10
- from ._throttle import RateLimiter, rate_limited
11
11
  from .cache_config import initialize_cache
12
12
  from opik.evaluation.models.litellm import opik_monitor as opik_litellm_monitor
13
13
  from .optimization_config.configs import TaskConfig, MetricConfig
14
14
 
15
- limiter = RateLimiter(max_calls_per_second=8)
15
+ _limiter = _throttle.get_rate_limiter_for_current_opik_installation()
16
16
 
17
17
  # Don't use unsupported params:
18
18
  litellm.drop_params = True
@@ -32,19 +32,21 @@ class OptimizationRound(BaseModel):
32
32
 
33
33
 
34
34
  class BaseOptimizer:
35
- def __init__(self, model: str, project_name: Optional[str] = None, **model_kwargs):
35
+ def __init__(self, model: str, project_name: Optional[str] = None, verbose: int = 1, **model_kwargs):
36
36
  """
37
37
  Base class for optimizers.
38
38
 
39
39
  Args:
40
40
  model: LiteLLM model name
41
41
  project_name: Opik project name
42
+ verbose: Controls internal logging/progress bars (0=off, 1=on).
42
43
  model_kwargs: additional args for model (eg, temperature)
43
44
  """
44
45
  self.model = model
45
46
  self.reasoning_model = model
46
47
  self.model_kwargs = model_kwargs
47
48
  self.project_name = project_name
49
+ self.verbose = verbose
48
50
  self._history = []
49
51
  self.experiment_config = None
50
52
  self.llm_call_counter = 0
@@ -141,7 +143,7 @@ class BaseOptimizer:
141
143
  """
142
144
  self._history.append(round_data)
143
145
 
144
-
146
+
145
147
  def update_optimization(self, optimization, status: str) -> None:
146
148
  """
147
149
  Update the optimization status