opik-optimizer 2.0.1__py3-none-any.whl → 2.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,8 +16,15 @@ from .gepa_optimizer import GepaOptimizer
16
16
  from .logging_config import setup_logging
17
17
  from .meta_prompt_optimizer import MetaPromptOptimizer
18
18
  from .mipro_optimizer import MiproOptimizer
19
+ from .hierarchical_reflective_optimizer import HierarchicalReflectiveOptimizer
19
20
  from .optimization_config.configs import TaskConfig
20
21
  from .optimization_result import OptimizationResult
22
+ from .parameter_optimizer import (
23
+ ParameterOptimizer,
24
+ ParameterSearchSpace,
25
+ ParameterSpec,
26
+ ParameterType,
27
+ )
21
28
 
22
29
  __version__ = importlib.metadata.version("opik_optimizer")
23
30
 
@@ -34,9 +41,14 @@ __all__ = [
34
41
  "MetaPromptOptimizer",
35
42
  "MiproOptimizer",
36
43
  "EvolutionaryOptimizer",
44
+ "HierarchicalReflectiveOptimizer",
45
+ "ParameterOptimizer",
37
46
  "OptimizationResult",
38
47
  "OptimizableAgent",
39
48
  "setup_logging",
40
49
  "datasets",
41
50
  "TaskConfig",
51
+ "ParameterSearchSpace",
52
+ "ParameterSpec",
53
+ "ParameterType",
42
54
  ]
@@ -470,6 +470,39 @@ class BaseOptimizer(ABC):
470
470
  f"{self.__class__.__name__} does not implement optimize_mcp yet."
471
471
  )
472
472
 
473
+ def optimize_parameter(
474
+ self,
475
+ prompt: "chat_prompt.ChatPrompt",
476
+ dataset: Dataset,
477
+ metric: Callable,
478
+ parameter_space: Any,
479
+ experiment_config: dict | None = None,
480
+ n_trials: int | None = None,
481
+ n_samples: int | None = None,
482
+ agent_class: type[OptimizableAgent] | None = None,
483
+ **kwargs: Any,
484
+ ) -> optimization_result.OptimizationResult:
485
+ """
486
+ Optimize LLM call parameters such as temperature or top_k.
487
+
488
+ Args:
489
+ prompt: The chat prompt to evaluate with tuned parameters
490
+ dataset: Dataset providing evaluation examples
491
+ metric: Objective function to maximize
492
+ parameter_space: Definition of the search space for tunable parameters
493
+ experiment_config: Optional experiment metadata
494
+ n_trials: Number of trials to run (optimizer specific default if None)
495
+ n_samples: Number of dataset samples to evaluate per trial (None for all)
496
+ agent_class: Optional custom agent class to execute evaluations
497
+ **kwargs: Additional optimizer specific settings
498
+
499
+ Returns:
500
+ OptimizationResult: Structured result describing the best parameters found
501
+ """
502
+ raise NotImplementedError(
503
+ f"{self.__class__.__name__} does not implement optimize_parameter yet."
504
+ )
505
+
473
506
  def get_history(self) -> list[OptimizationRound]:
474
507
  """
475
508
  Get the optimization history.
@@ -0,0 +1,5 @@
1
+ from .hierarchical_reflective_optimizer import HierarchicalReflectiveOptimizer
2
+
3
+ __all__ = [
4
+ "HierarchicalReflectiveOptimizer",
5
+ ]