opik-optimizer 1.1.0__tar.gz → 2.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/PKG-INFO +97 -10
  2. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/README.md +94 -8
  3. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/pyproject.toml +12 -2
  4. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/__init__.py +2 -0
  5. opik_optimizer-2.0.1/src/opik_optimizer/base_optimizer.py +572 -0
  6. opik_optimizer-2.0.1/src/opik_optimizer/evolutionary_optimizer/evaluation_ops.py +136 -0
  7. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/evolutionary_optimizer.py +179 -39
  8. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/llm_support.py +3 -1
  9. opik_optimizer-2.0.1/src/opik_optimizer/evolutionary_optimizer/mcp.py +249 -0
  10. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/mutation_ops.py +17 -3
  11. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/population_ops.py +5 -0
  12. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/prompts.py +47 -0
  13. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/reporting.py +12 -0
  14. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py +65 -59
  15. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/gepa_optimizer/adapter.py +5 -3
  16. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/gepa_optimizer/gepa_optimizer.py +163 -66
  17. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mcp_utils/mcp_workflow.py +57 -3
  18. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/meta_prompt_optimizer/meta_prompt_optimizer.py +75 -69
  19. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mipro_optimizer/_lm.py +10 -3
  20. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mipro_optimizer/_mipro_optimizer_v2.py +1 -1
  21. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mipro_optimizer/mipro_optimizer.py +96 -21
  22. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/optimizable_agent.py +5 -0
  23. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/optimization_result.py +1 -0
  24. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/utils/core.py +56 -14
  25. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer.egg-info/PKG-INFO +97 -10
  26. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer.egg-info/SOURCES.txt +4 -2
  27. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer.egg-info/requires.txt +2 -1
  28. opik_optimizer-2.0.1/tests/test_setup.py +5 -0
  29. opik_optimizer-1.1.0/src/opik_optimizer/base_optimizer.py +0 -215
  30. opik_optimizer-1.1.0/src/opik_optimizer/evolutionary_optimizer/evaluation_ops.py +0 -73
  31. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/LICENSE +0 -0
  32. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/setup.cfg +0 -0
  33. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/setup.py +0 -0
  34. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/_throttle.py +0 -0
  35. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/cache_config.py +0 -0
  36. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/data/context7_eval.jsonl +0 -0
  37. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/data/hotpot-500.json +0 -0
  38. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/__init__.py +0 -0
  39. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/ai2_arc.py +0 -0
  40. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/cnn_dailymail.py +0 -0
  41. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/context7_eval.py +0 -0
  42. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/election_questions.py +0 -0
  43. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/gsm8k.py +0 -0
  44. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/halu_eval.py +0 -0
  45. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/hotpot_qa.py +0 -0
  46. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/medhallu.py +0 -0
  47. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/rag_hallucinations.py +0 -0
  48. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/ragbench.py +0 -0
  49. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/tiny_test.py +0 -0
  50. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/datasets/truthful_qa.py +0 -0
  51. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/demo/__init__.py +0 -0
  52. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/demo/cache.py +0 -0
  53. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/demo/datasets.py +0 -0
  54. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/__init__.py +0 -0
  55. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/crossover_ops.py +0 -0
  56. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/helpers.py +0 -0
  57. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/evolutionary_optimizer/style_ops.py +0 -0
  58. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/few_shot_bayesian_optimizer/__init__.py +0 -0
  59. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/few_shot_bayesian_optimizer/reporting.py +0 -0
  60. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/gepa_optimizer/__init__.py +0 -0
  61. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/gepa_optimizer/reporting.py +0 -0
  62. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/logging_config.py +0 -0
  63. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mcp_utils/__init__.py +0 -0
  64. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mcp_utils/mcp.py +0 -0
  65. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mcp_utils/mcp_second_pass.py +0 -0
  66. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mcp_utils/mcp_simulator.py +0 -0
  67. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/meta_prompt_optimizer/__init__.py +0 -0
  68. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/meta_prompt_optimizer/reporting.py +0 -0
  69. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mipro_optimizer/__init__.py +0 -0
  70. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/mipro_optimizer/utils.py +0 -0
  71. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/optimization_config/__init__.py +0 -0
  72. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/optimization_config/chat_prompt.py +0 -0
  73. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/optimization_config/configs.py +0 -0
  74. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/optimization_config/mappers.py +0 -0
  75. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/py.typed +0 -0
  76. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/reporting_utils.py +0 -0
  77. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/task_evaluator.py +0 -0
  78. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/utils/__init__.py +0 -0
  79. {opik_optimizer-1.1.0/src/opik_optimizer → opik_optimizer-2.0.1/src/opik_optimizer/utils}/colbert.py +0 -0
  80. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/utils/dataset_utils.py +0 -0
  81. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer/utils/prompt_segments.py +0 -0
  82. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer.egg-info/dependency_links.txt +0 -0
  83. {opik_optimizer-1.1.0 → opik_optimizer-2.0.1}/src/opik_optimizer.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: opik_optimizer
3
- Version: 1.1.0
3
+ Version: 2.0.1
4
4
  Summary: Agent optimization with Opik
5
5
  Home-page: https://github.com/comet-ml/opik
6
6
  Author: Comet ML
@@ -15,6 +15,7 @@ Requires-Dist: datasets
15
15
  Requires-Dist: deap>=1.4.3
16
16
  Requires-Dist: diskcache
17
17
  Requires-Dist: dspy<3
18
+ Requires-Dist: gepa>=0.0.7
18
19
  Requires-Dist: ujson
19
20
  Requires-Dist: hf_xet
20
21
  Requires-Dist: litellm
@@ -30,7 +31,7 @@ Provides-Extra: dev
30
31
  Requires-Dist: pytest; extra == "dev"
31
32
  Requires-Dist: pytest-cov; extra == "dev"
32
33
  Requires-Dist: langgraph; extra == "dev"
33
- Requires-Dist: gepa>=0.0.7; extra == "dev"
34
+ Requires-Dist: pre-commit; extra == "dev"
34
35
  Dynamic: author
35
36
  Dynamic: home-page
36
37
  Dynamic: license-file
@@ -43,13 +44,24 @@ Dynamic: requires-python
43
44
  [![Downloads](https://static.pepy.tech/badge/opik-optimizer)](https://pepy.tech/project/opik-optimizer)
44
45
  [![License](https://img.shields.io/github/license/comet-ml/opik)](https://github.com/comet-ml/opik/blob/main/LICENSE)
45
46
 
46
- The Opik Agent Optimizer refines your prompts to achieve better performance from your Large Language Models (LLMs). It supports a variety of optimization algorithms, including:
47
+ The Opik Agent Optimizer refines your prompts to achieve better performance from your Large Language Models (LLMs). It supports a variety of optimization algorithms, all with a **standardized API** for consistent usage and chaining:
47
48
 
48
- * EvolutionaryOptimizer
49
- * FewShotBayesianOptimizer
50
- * MetaPromptOptimizer
51
- * MiproOptimizer
52
- * GepaOptimizer
49
+ * **EvolutionaryOptimizer** - Uses genetic algorithms for prompt evolution
50
+ * **FewShotBayesianOptimizer** - Uses few-shot learning with Bayesian optimization
51
+ * **MetaPromptOptimizer** - Employs meta-prompting techniques for optimization
52
+ * **MiproOptimizer** - Implements MIPRO (Multi-Input Prompt Optimization) algorithm
53
+ * **GepaOptimizer** - Leverages GEPA (Genetic-Pareto) optimization approach
54
+
55
+ ## 🎯 Key Features
56
+
57
+ - **Standardized API**: All optimizers follow the same interface for `optimize_prompt()` and `optimize_mcp()` methods
58
+ - **Optimizer Chaining**: Results from one optimizer can be used as input for another
59
+ - **MCP Support**: Built-in support for Model Context Protocol tool calling
60
+ - **Consistent Results**: All optimizers return standardized `OptimizationResult` objects
61
+ - **Counter Tracking**: Built-in LLM and tool call counters for monitoring usage
62
+ - **Type Safety**: Full type hints and validation for robust development
63
+ - **Backward Compatibility**: All original parameters preserved through kwargs extraction
64
+ - **Deprecation Warnings**: Clear warnings for deprecated parameters with migration guidance
53
65
 
54
66
  Opik Optimizer is a component of the [Opik platform](https://github.com/comet-ml/opik), an open-source LLM evaluation platform by Comet.
55
67
  For more information about the broader Opik ecosystem, visit our [Website](https://www.comet.com/site/products/opik/) or [Documentation](https://www.comet.com/docs/opik/).
@@ -101,6 +113,40 @@ You'll typically need:
101
113
  * An [Opik Metric](https://www.comet.com/docs/opik/evaluation/metrics/overview/) (or a custom evaluation function).
102
114
  * A starting prompt (template string).
103
115
 
116
+ ## Standardized API
117
+
118
+ All optimizers follow the same interface, making it easy to switch between algorithms or chain them together:
119
+
120
+ ```python
121
+ # All optimizers have the same signature
122
+ def optimize_prompt(
123
+ self,
124
+ prompt: ChatPrompt,
125
+ dataset: Dataset,
126
+ metric: Callable,
127
+ experiment_config: dict | None = None,
128
+ n_samples: int | None = None,
129
+ auto_continue: bool = False,
130
+ agent_class: type[OptimizableAgent] | None = None,
131
+ **kwargs: Any,
132
+ ) -> OptimizationResult
133
+
134
+ # All optimizers return the same result type
135
+ result = optimizer.optimize_prompt(
136
+ prompt=prompt,
137
+ dataset=dataset,
138
+ metric=metric,
139
+ n_samples=100
140
+ )
141
+
142
+ # Results can be chained
143
+ chained_result = another_optimizer.optimize_prompt(
144
+ prompt=ChatPrompt.from_result(result), # Use previous result
145
+ dataset=dataset,
146
+ metric=metric
147
+ )
148
+ ```
149
+
104
150
  ## Example
105
151
 
106
152
  Here's a brief example of how to use the `FewShotBayesianOptimizer`. We'll use a sample dataset provided by Opik.
@@ -146,8 +192,7 @@ result = optimizer.optimize_prompt(
146
192
  prompt=prompt,
147
193
  dataset=hot_pot_dataset,
148
194
  metric=levenshtein_ratio,
149
- n_trials=10, # Number of optimization trials
150
- n_samples=150, # Number of dataset samples for evaluation per trial
195
+ n_samples=150 # Number of dataset samples for evaluation per trial
151
196
  )
152
197
 
153
198
  # Display the best prompt and its score
@@ -155,6 +200,8 @@ result.display()
155
200
  ```
156
201
  The `result` object contains the optimized prompt, evaluation scores, and other details from the optimization process. If `project_name` is provided and Opik is configured, results will also be logged to your Comet workspace.
157
202
 
203
+ The optimizer automatically logs run metadata—including optimizer version, tool schemas, prompt messages, and the models used—so you get consistent experiment context without any additional arguments. If you still need custom tags (for example identifying the dataset or task), pass an `experiment_config` dictionary and your fields will be merged on top of the defaults.
204
+
158
205
  ## Tool Optimization (MCP) - Beta
159
206
 
160
207
  The Opik Agent Optimizer supports **true tool optimization** for MCP (Model Context Protocol) tools. This feature is currently in **Beta** and supported by the **MetaPrompt Optimizer**.
@@ -215,6 +262,46 @@ optimizer = MetaPromptOptimizer(model="gpt-4")
215
262
 
216
263
  For comprehensive documentation on tool optimization, see the [Tool Optimization Guide](https://www.comet.com/docs/opik/agent_optimization/algorithms/tool_optimization).
217
264
 
265
+ ## Deprecation Warnings
266
+
267
+ The following parameters are deprecated and will be removed in future versions:
268
+
269
+ ### Constructor Parameters
270
+
271
+ - **`project_name`** in optimizer constructors: Set `project_name` in the `ChatPrompt` instead
272
+ - **`num_threads`** in optimizer constructors: Use `n_threads` instead
273
+
274
+ ### Example Migration
275
+
276
+ ```python
277
+ # ❌ Deprecated
278
+ optimizer = FewShotBayesianOptimizer(
279
+ model="gpt-4o-mini",
280
+ project_name="my-project", # Deprecated
281
+ num_threads=16, # Deprecated
282
+ )
283
+
284
+ # ✅ Correct
285
+ optimizer = FewShotBayesianOptimizer(
286
+ model="gpt-4o-mini",
287
+ n_threads=16, # Use n_threads instead
288
+ )
289
+
290
+ prompt = ChatPrompt(
291
+ project_name="my-project", # Set here instead
292
+ messages=[...]
293
+ )
294
+ ```
295
+
296
+ ### Suppressing Deprecation Warnings
297
+
298
+ To suppress deprecation warnings during development:
299
+
300
+ ```python
301
+ import warnings
302
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
303
+ ```
304
+
218
305
  ### MCP Integration (Beta)
219
306
 
220
307
  The optimizer includes utilities for MCP tool integration:
@@ -5,13 +5,24 @@
5
5
  [![Downloads](https://static.pepy.tech/badge/opik-optimizer)](https://pepy.tech/project/opik-optimizer)
6
6
  [![License](https://img.shields.io/github/license/comet-ml/opik)](https://github.com/comet-ml/opik/blob/main/LICENSE)
7
7
 
8
- The Opik Agent Optimizer refines your prompts to achieve better performance from your Large Language Models (LLMs). It supports a variety of optimization algorithms, including:
8
+ The Opik Agent Optimizer refines your prompts to achieve better performance from your Large Language Models (LLMs). It supports a variety of optimization algorithms, all with a **standardized API** for consistent usage and chaining:
9
9
 
10
- * EvolutionaryOptimizer
11
- * FewShotBayesianOptimizer
12
- * MetaPromptOptimizer
13
- * MiproOptimizer
14
- * GepaOptimizer
10
+ * **EvolutionaryOptimizer** - Uses genetic algorithms for prompt evolution
11
+ * **FewShotBayesianOptimizer** - Uses few-shot learning with Bayesian optimization
12
+ * **MetaPromptOptimizer** - Employs meta-prompting techniques for optimization
13
+ * **MiproOptimizer** - Implements MIPRO (Multi-Input Prompt Optimization) algorithm
14
+ * **GepaOptimizer** - Leverages GEPA (Genetic-Pareto) optimization approach
15
+
16
+ ## 🎯 Key Features
17
+
18
+ - **Standardized API**: All optimizers follow the same interface for `optimize_prompt()` and `optimize_mcp()` methods
19
+ - **Optimizer Chaining**: Results from one optimizer can be used as input for another
20
+ - **MCP Support**: Built-in support for Model Context Protocol tool calling
21
+ - **Consistent Results**: All optimizers return standardized `OptimizationResult` objects
22
+ - **Counter Tracking**: Built-in LLM and tool call counters for monitoring usage
23
+ - **Type Safety**: Full type hints and validation for robust development
24
+ - **Backward Compatibility**: All original parameters preserved through kwargs extraction
25
+ - **Deprecation Warnings**: Clear warnings for deprecated parameters with migration guidance
15
26
 
16
27
  Opik Optimizer is a component of the [Opik platform](https://github.com/comet-ml/opik), an open-source LLM evaluation platform by Comet.
17
28
  For more information about the broader Opik ecosystem, visit our [Website](https://www.comet.com/site/products/opik/) or [Documentation](https://www.comet.com/docs/opik/).
@@ -63,6 +74,40 @@ You'll typically need:
63
74
  * An [Opik Metric](https://www.comet.com/docs/opik/evaluation/metrics/overview/) (or a custom evaluation function).
64
75
  * A starting prompt (template string).
65
76
 
77
+ ## Standardized API
78
+
79
+ All optimizers follow the same interface, making it easy to switch between algorithms or chain them together:
80
+
81
+ ```python
82
+ # All optimizers have the same signature
83
+ def optimize_prompt(
84
+ self,
85
+ prompt: ChatPrompt,
86
+ dataset: Dataset,
87
+ metric: Callable,
88
+ experiment_config: dict | None = None,
89
+ n_samples: int | None = None,
90
+ auto_continue: bool = False,
91
+ agent_class: type[OptimizableAgent] | None = None,
92
+ **kwargs: Any,
93
+ ) -> OptimizationResult
94
+
95
+ # All optimizers return the same result type
96
+ result = optimizer.optimize_prompt(
97
+ prompt=prompt,
98
+ dataset=dataset,
99
+ metric=metric,
100
+ n_samples=100
101
+ )
102
+
103
+ # Results can be chained
104
+ chained_result = another_optimizer.optimize_prompt(
105
+ prompt=ChatPrompt.from_result(result), # Use previous result
106
+ dataset=dataset,
107
+ metric=metric
108
+ )
109
+ ```
110
+
66
111
  ## Example
67
112
 
68
113
  Here's a brief example of how to use the `FewShotBayesianOptimizer`. We'll use a sample dataset provided by Opik.
@@ -108,8 +153,7 @@ result = optimizer.optimize_prompt(
108
153
  prompt=prompt,
109
154
  dataset=hot_pot_dataset,
110
155
  metric=levenshtein_ratio,
111
- n_trials=10, # Number of optimization trials
112
- n_samples=150, # Number of dataset samples for evaluation per trial
156
+ n_samples=150 # Number of dataset samples for evaluation per trial
113
157
  )
114
158
 
115
159
  # Display the best prompt and its score
@@ -117,6 +161,8 @@ result.display()
117
161
  ```
118
162
  The `result` object contains the optimized prompt, evaluation scores, and other details from the optimization process. If `project_name` is provided and Opik is configured, results will also be logged to your Comet workspace.
119
163
 
164
+ The optimizer automatically logs run metadata—including optimizer version, tool schemas, prompt messages, and the models used—so you get consistent experiment context without any additional arguments. If you still need custom tags (for example identifying the dataset or task), pass an `experiment_config` dictionary and your fields will be merged on top of the defaults.
165
+
120
166
  ## Tool Optimization (MCP) - Beta
121
167
 
122
168
  The Opik Agent Optimizer supports **true tool optimization** for MCP (Model Context Protocol) tools. This feature is currently in **Beta** and supported by the **MetaPrompt Optimizer**.
@@ -177,6 +223,46 @@ optimizer = MetaPromptOptimizer(model="gpt-4")
177
223
 
178
224
  For comprehensive documentation on tool optimization, see the [Tool Optimization Guide](https://www.comet.com/docs/opik/agent_optimization/algorithms/tool_optimization).
179
225
 
226
+ ## Deprecation Warnings
227
+
228
+ The following parameters are deprecated and will be removed in future versions:
229
+
230
+ ### Constructor Parameters
231
+
232
+ - **`project_name`** in optimizer constructors: Set `project_name` in the `ChatPrompt` instead
233
+ - **`num_threads`** in optimizer constructors: Use `n_threads` instead
234
+
235
+ ### Example Migration
236
+
237
+ ```python
238
+ # ❌ Deprecated
239
+ optimizer = FewShotBayesianOptimizer(
240
+ model="gpt-4o-mini",
241
+ project_name="my-project", # Deprecated
242
+ num_threads=16, # Deprecated
243
+ )
244
+
245
+ # ✅ Correct
246
+ optimizer = FewShotBayesianOptimizer(
247
+ model="gpt-4o-mini",
248
+ n_threads=16, # Use n_threads instead
249
+ )
250
+
251
+ prompt = ChatPrompt(
252
+ project_name="my-project", # Set here instead
253
+ messages=[...]
254
+ )
255
+ ```
256
+
257
+ ### Suppressing Deprecation Warnings
258
+
259
+ To suppress deprecation warnings during development:
260
+
261
+ ```python
262
+ import warnings
263
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
264
+ ```
265
+
180
266
  ### MCP Integration (Beta)
181
267
 
182
268
  The optimizer includes utilities for MCP tool integration:
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "opik_optimizer"
3
- version = "1.1.0"
3
+ version = "2.0.1"
4
4
  description = "Agent optimization with Opik"
5
5
  authors = [
6
6
  {name = "Comet ML", email = "support@comet.com"}
@@ -13,6 +13,7 @@ dependencies = [
13
13
  "deap>=1.4.3",
14
14
  "diskcache",
15
15
  "dspy<3",
16
+ "gepa>=0.0.7",
16
17
  "ujson",
17
18
  "hf_xet",
18
19
  "litellm",
@@ -32,7 +33,7 @@ dev = [
32
33
  "pytest-cov",
33
34
  # "google-adk",
34
35
  "langgraph",
35
- "gepa>=0.0.7",
36
+ "pre-commit",
36
37
  ]
37
38
 
38
39
  [tool.setuptools.packages.find]
@@ -62,3 +63,12 @@ filterwarnings = [
62
63
  "ignore::pytest.PytestConfigWarning",
63
64
  "ignore::RuntimeWarning:litellm.integrations.opik.opik",
64
65
  ]
66
+
67
+ [tool.coverage.run]
68
+ branch = true
69
+ source = ["opik_optimizer"]
70
+ omit = ["opik_optimizer/tests/*"]
71
+
72
+ [tool.coverage.report]
73
+ show_missing = true
74
+ skip_covered = true
@@ -15,6 +15,7 @@ from .few_shot_bayesian_optimizer import FewShotBayesianOptimizer
15
15
  from .gepa_optimizer import GepaOptimizer
16
16
  from .logging_config import setup_logging
17
17
  from .meta_prompt_optimizer import MetaPromptOptimizer
18
+ from .mipro_optimizer import MiproOptimizer
18
19
  from .optimization_config.configs import TaskConfig
19
20
  from .optimization_result import OptimizationResult
20
21
 
@@ -31,6 +32,7 @@ __all__ = [
31
32
  "FewShotBayesianOptimizer",
32
33
  "GepaOptimizer",
33
34
  "MetaPromptOptimizer",
35
+ "MiproOptimizer",
34
36
  "EvolutionaryOptimizer",
35
37
  "OptimizationResult",
36
38
  "OptimizableAgent",