opik-optimizer 0.7.1__py3-none-any.whl → 0.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opik_optimizer/__init__.py +14 -36
- opik_optimizer/_throttle.py +30 -30
- opik_optimizer/base_optimizer.py +1 -82
- opik_optimizer/demo/cache.py +1 -0
- opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py +12 -10
- opik_optimizer/meta_prompt_optimizer.py +3 -0
- opik_optimizer/mipro_optimizer/_lm.py +2 -2
- opik_optimizer-0.7.3.dist-info/METADATA +173 -0
- {opik_optimizer-0.7.1.dist-info → opik_optimizer-0.7.3.dist-info}/RECORD +12 -12
- opik_optimizer-0.7.1.dist-info/METADATA +0 -35
- {opik_optimizer-0.7.1.dist-info → opik_optimizer-0.7.3.dist-info}/WHEEL +0 -0
- {opik_optimizer-0.7.1.dist-info → opik_optimizer-0.7.3.dist-info}/licenses/LICENSE +0 -0
- {opik_optimizer-0.7.1.dist-info → opik_optimizer-0.7.3.dist-info}/top_level.txt +0 -0
opik_optimizer/__init__.py
CHANGED
@@ -7,42 +7,20 @@ __version__ = importlib.metadata.version("opik_optimizer")
|
|
7
7
|
# Using WARNING as a sensible default to avoid flooding users with INFO/DEBUG
|
8
8
|
setup_logging(level=logging.WARNING)
|
9
9
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
return MetaPromptOptimizer
|
25
|
-
elif name == "FewShotBayesianOptimizer":
|
26
|
-
from .few_shot_bayesian_optimizer import FewShotBayesianOptimizer
|
27
|
-
|
28
|
-
return FewShotBayesianOptimizer
|
29
|
-
elif name in ["MetricConfig", "OptimizationConfig", "TaskConfig"]:
|
30
|
-
from .optimization_config.configs import (
|
31
|
-
MetricConfig,
|
32
|
-
OptimizationConfig,
|
33
|
-
TaskConfig,
|
34
|
-
)
|
35
|
-
|
36
|
-
return locals()[name]
|
37
|
-
elif name in ["from_dataset_field", "from_llm_response_text"]:
|
38
|
-
from .optimization_config.mappers import (
|
39
|
-
from_dataset_field,
|
40
|
-
from_llm_response_text,
|
41
|
-
)
|
42
|
-
|
43
|
-
return locals()[name]
|
44
|
-
raise AttributeError(f"module 'opik_optimizer' has no attribute '{name}'")
|
45
|
-
|
10
|
+
# Regular imports
|
11
|
+
from .mipro_optimizer import MiproOptimizer
|
12
|
+
from .base_optimizer import BaseOptimizer
|
13
|
+
from .meta_prompt_optimizer import MetaPromptOptimizer
|
14
|
+
from .few_shot_bayesian_optimizer import FewShotBayesianOptimizer
|
15
|
+
from .optimization_config.configs import (
|
16
|
+
MetricConfig,
|
17
|
+
OptimizationConfig,
|
18
|
+
TaskConfig,
|
19
|
+
)
|
20
|
+
from .optimization_config.mappers import (
|
21
|
+
from_dataset_field,
|
22
|
+
from_llm_response_text,
|
23
|
+
)
|
46
24
|
|
47
25
|
from opik.evaluation.models.litellm import warning_filters
|
48
26
|
|
opik_optimizer/_throttle.py
CHANGED
@@ -1,43 +1,43 @@
|
|
1
|
-
import
|
1
|
+
import functools
|
2
|
+
import pyrate_limiter
|
2
3
|
import time
|
3
|
-
import
|
4
|
-
|
4
|
+
import opik.config
|
5
|
+
|
6
|
+
from typing import Callable, Any
|
7
|
+
|
5
8
|
|
6
9
|
class RateLimiter:
|
7
10
|
"""
|
8
|
-
Rate limiter that enforces a maximum number of calls across all threads.
|
11
|
+
Rate limiter that enforces a maximum number of calls across all threads using pyrate_limiter.
|
9
12
|
"""
|
10
|
-
def __init__(self, max_calls_per_second):
|
13
|
+
def __init__(self, max_calls_per_second: int):
|
11
14
|
self.max_calls_per_second = max_calls_per_second
|
12
|
-
|
13
|
-
|
14
|
-
self.
|
15
|
+
rate = pyrate_limiter.Rate(max_calls_per_second, pyrate_limiter.Duration.SECOND)
|
16
|
+
|
17
|
+
self.limiter = pyrate_limiter.Limiter(rate, raise_when_fail=False)
|
18
|
+
self.bucket_key = "global_rate_limit"
|
15
19
|
|
16
|
-
def acquire(self):
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
with self.lock:
|
22
|
-
current_time = time.time()
|
23
|
-
time_since_last = current_time - self.last_call_time
|
24
|
-
|
25
|
-
# If we haven't waited long enough since the last call
|
26
|
-
if time_since_last < self.interval:
|
27
|
-
# Calculate how much longer we need to wait
|
28
|
-
sleep_time = self.interval - time_since_last
|
29
|
-
time.sleep(sleep_time)
|
30
|
-
|
31
|
-
# Update the last call time (after potential sleep)
|
32
|
-
self.last_call_time = time.time()
|
33
|
-
|
34
|
-
def rate_limited(limiter):
|
20
|
+
def acquire(self) -> None:
|
21
|
+
while not self.limiter.try_acquire(self.bucket_key):
|
22
|
+
time.sleep(0.01)
|
23
|
+
|
24
|
+
def rate_limited(limiter: RateLimiter) -> Callable[[Callable], Callable]:
|
35
25
|
"""Decorator to rate limit a function using the provided limiter"""
|
36
|
-
|
37
|
-
|
38
|
-
|
26
|
+
|
27
|
+
def decorator(func: Callable) -> Callable:
|
28
|
+
@functools.wraps(func)
|
29
|
+
def wrapper(*args, **kwargs) -> Any:
|
39
30
|
limiter.acquire()
|
40
31
|
return func(*args, **kwargs)
|
41
32
|
return wrapper
|
42
33
|
return decorator
|
43
34
|
|
35
|
+
|
36
|
+
def get_rate_limiter_for_current_opik_installation() -> RateLimiter:
|
37
|
+
opik_config = opik.config.OpikConfig()
|
38
|
+
max_calls_per_second = (
|
39
|
+
10
|
40
|
+
if opik_config.is_cloud_installation
|
41
|
+
else 50
|
42
|
+
)
|
43
|
+
return RateLimiter(max_calls_per_second=max_calls_per_second)
|
opik_optimizer/base_optimizer.py
CHANGED
@@ -4,8 +4,6 @@ import logging
|
|
4
4
|
import time
|
5
5
|
|
6
6
|
import litellm
|
7
|
-
from opik.evaluation import metrics
|
8
|
-
from opik.opik_context import get_current_span_data
|
9
7
|
from opik.rest_api.core import ApiError
|
10
8
|
|
11
9
|
from pydantic import BaseModel
|
@@ -14,7 +12,7 @@ from .cache_config import initialize_cache
|
|
14
12
|
from opik.evaluation.models.litellm import opik_monitor as opik_litellm_monitor
|
15
13
|
from .optimization_config.configs import TaskConfig, MetricConfig
|
16
14
|
|
17
|
-
limiter = RateLimiter(max_calls_per_second=
|
15
|
+
limiter = RateLimiter(max_calls_per_second=8)
|
18
16
|
|
19
17
|
# Don't use unsupported params:
|
20
18
|
litellm.drop_params = True
|
@@ -143,85 +141,6 @@ class BaseOptimizer:
|
|
143
141
|
"""
|
144
142
|
self._history.append(round_data)
|
145
143
|
|
146
|
-
@rate_limited(limiter)
|
147
|
-
def _call_model(
|
148
|
-
self,
|
149
|
-
prompt: str,
|
150
|
-
system_prompt: Optional[str] = None,
|
151
|
-
is_reasoning: bool = False,
|
152
|
-
) -> str:
|
153
|
-
"""Call the model to get suggestions based on the meta-prompt."""
|
154
|
-
model = self.reasoning_model if is_reasoning else self.model
|
155
|
-
messages = []
|
156
|
-
|
157
|
-
if system_prompt:
|
158
|
-
messages.append({"role": "system", "content": system_prompt})
|
159
|
-
logger.debug(f"Using custom system prompt: {system_prompt[:100]}...")
|
160
|
-
else:
|
161
|
-
messages.append(
|
162
|
-
{"role": "system", "content": "You are a helpful assistant."}
|
163
|
-
)
|
164
|
-
|
165
|
-
messages.append({"role": "user", "content": prompt})
|
166
|
-
logger.debug(f"Calling model {model} with prompt: {prompt[:100]}...")
|
167
|
-
|
168
|
-
api_params = self.model_kwargs.copy()
|
169
|
-
api_params.update(
|
170
|
-
{
|
171
|
-
"model": model,
|
172
|
-
"messages": messages,
|
173
|
-
# Ensure required params like 'temperature', 'max_tokens' are present
|
174
|
-
# Defaults added here for safety, though usually set in __init__ kwargs
|
175
|
-
"temperature": api_params.get("temperature", 0.3),
|
176
|
-
"max_tokens": api_params.get("max_tokens", 1000),
|
177
|
-
}
|
178
|
-
)
|
179
|
-
|
180
|
-
# Attempt to add Opik monitoring if available
|
181
|
-
try:
|
182
|
-
# Assuming opik_litellm_monitor is imported and configured elsewhere
|
183
|
-
api_params = opik_litellm_monitor.try_add_opik_monitoring_to_params(
|
184
|
-
api_params
|
185
|
-
)
|
186
|
-
logger.debug("Opik monitoring hooks added to LiteLLM params.")
|
187
|
-
except Exception as e:
|
188
|
-
logger.warning(f"Could not add Opik monitoring to LiteLLM params: {e}")
|
189
|
-
|
190
|
-
logger.debug(
|
191
|
-
f"Final API params (excluding messages): { {k:v for k,v in api_params.items() if k != 'messages'} }"
|
192
|
-
)
|
193
|
-
|
194
|
-
# Increment Counter
|
195
|
-
self.llm_call_counter += 1
|
196
|
-
logger.debug(f"LLM Call Count: {self.llm_call_counter}")
|
197
|
-
|
198
|
-
try:
|
199
|
-
response = litellm.completion(**api_params)
|
200
|
-
model_output = response.choices[0].message.content.strip()
|
201
|
-
logger.debug(f"Model response from {model_to_use}: {model_output[:100]}...")
|
202
|
-
return model_output
|
203
|
-
except litellm.exceptions.RateLimitError as e:
|
204
|
-
logger.error(f"LiteLLM Rate Limit Error for model {model_to_use}: {e}")
|
205
|
-
# Consider adding retry logic here with tenacity
|
206
|
-
raise
|
207
|
-
except litellm.exceptions.APIConnectionError as e:
|
208
|
-
logger.error(f"LiteLLM API Connection Error for model {model_to_use}: {e}")
|
209
|
-
# Consider adding retry logic here
|
210
|
-
raise
|
211
|
-
except litellm.exceptions.ContextWindowExceededError as e:
|
212
|
-
logger.error(
|
213
|
-
f"LiteLLM Context Window Exceeded Error for model {model_to_use}. Prompt length: {len(prompt)}. Details: {e}"
|
214
|
-
)
|
215
|
-
raise
|
216
|
-
except litellm.exceptions.APIError as e: # Catch broader API errors
|
217
|
-
logger.error(f"LiteLLM API Error for model {model_to_use}: {e}")
|
218
|
-
raise
|
219
|
-
except Exception as e:
|
220
|
-
# Catch any other unexpected errors
|
221
|
-
logger.error(
|
222
|
-
f"Unexpected error during model call to {model_to_use}: {type(e).__name__} - {e}"
|
223
|
-
)
|
224
|
-
raise
|
225
144
|
|
226
145
|
def update_optimization(self, optimization, status: str) -> None:
|
227
146
|
"""
|
opik_optimizer/demo/cache.py
CHANGED
@@ -9,6 +9,7 @@ import requests
|
|
9
9
|
NAMED_CACHES = {
|
10
10
|
"test": "https://drive.google.com/file/d/1RifNtpN-pl0DW49daRaAMJwW7MCsOh6y/view?usp=sharing",
|
11
11
|
"test2": "https://drive.google.com/uc?id=1RifNtpN-pl0DW49daRaAMJwW7MCsOh6y&export=download",
|
12
|
+
"opik-workshop": "https://drive.google.com/file/d/1l0aK6KhDPs2bFsQTkfzvOvfacJlhdmHr/view?usp=sharing",
|
12
13
|
}
|
13
14
|
CACHE_DIR = os.path.expanduser("~/.litellm_cache")
|
14
15
|
|
@@ -1,8 +1,8 @@
|
|
1
1
|
import random
|
2
|
-
from typing import Any, Dict, List, Tuple, Union, Optional,
|
3
|
-
import openai
|
2
|
+
from typing import Any, Dict, List, Tuple, Union, Optional, Literal
|
4
3
|
import opik
|
5
4
|
import optuna
|
5
|
+
import optuna.samplers
|
6
6
|
import logging
|
7
7
|
import json
|
8
8
|
|
@@ -14,18 +14,18 @@ from opik_optimizer import base_optimizer
|
|
14
14
|
|
15
15
|
from . import prompt_parameter
|
16
16
|
from . import prompt_templates
|
17
|
-
from ..
|
17
|
+
from .. import _throttle
|
18
18
|
from .. import optimization_result, task_evaluator
|
19
19
|
|
20
20
|
import litellm
|
21
21
|
|
22
22
|
from opik.evaluation.models.litellm import opik_monitor as opik_litellm_monitor
|
23
23
|
|
24
|
-
|
24
|
+
_limiter = _throttle.get_rate_limiter_for_current_opik_installation()
|
25
25
|
|
26
26
|
logger = logging.getLogger(__name__)
|
27
27
|
|
28
|
-
@rate_limited(
|
28
|
+
@_throttle.rate_limited(_limiter)
|
29
29
|
def _call_model(model, messages, seed, model_kwargs):
|
30
30
|
model_kwargs = opik_litellm_monitor.try_add_opik_monitoring_to_params(model_kwargs)
|
31
31
|
|
@@ -59,7 +59,6 @@ class FewShotBayesianOptimizer(base_optimizer.BaseOptimizer):
|
|
59
59
|
self.n_threads = n_threads
|
60
60
|
self.n_initial_prompts = n_initial_prompts
|
61
61
|
self.n_iterations = n_iterations
|
62
|
-
|
63
62
|
self._opik_client = opik.Opik()
|
64
63
|
logger.debug(f"Initialized FewShotBayesianOptimizer with model: {model}")
|
65
64
|
|
@@ -85,7 +84,7 @@ class FewShotBayesianOptimizer(base_optimizer.BaseOptimizer):
|
|
85
84
|
split_idx = int(len(dataset) * train_ratio)
|
86
85
|
return dataset[:split_idx], dataset[split_idx:]
|
87
86
|
|
88
|
-
def _optimize_prompt(
|
87
|
+
def _optimize_prompt(
|
89
88
|
self,
|
90
89
|
dataset: Union[str, Dataset],
|
91
90
|
metric_config: MetricConfig,
|
@@ -171,8 +170,10 @@ class FewShotBayesianOptimizer(base_optimizer.BaseOptimizer):
|
|
171
170
|
n_examples = trial.suggest_int(
|
172
171
|
"n_examples", self.min_examples, self.max_examples
|
173
172
|
)
|
174
|
-
|
175
|
-
|
173
|
+
example_indices = [
|
174
|
+
trial.suggest_categorical(f"example_{i}", list(range(len(dataset_items))))
|
175
|
+
for i in range(n_examples)
|
176
|
+
]
|
176
177
|
trial.set_user_attr("example_indices", example_indices)
|
177
178
|
|
178
179
|
instruction = task_config.instruction_prompt
|
@@ -238,7 +239,8 @@ class FewShotBayesianOptimizer(base_optimizer.BaseOptimizer):
|
|
238
239
|
except Exception as e:
|
239
240
|
logger.warning(f"Could not configure Optuna logging within optimizer: {e}")
|
240
241
|
|
241
|
-
|
242
|
+
sampler = optuna.samplers.TPESampler(seed=self.seed)
|
243
|
+
study = optuna.create_study(direction="maximize", sampler=sampler)
|
242
244
|
study.optimize(optimization_objective, n_trials=n_trials)
|
243
245
|
logger.info("Optuna study finished.")
|
244
246
|
|
@@ -16,6 +16,7 @@ from opik_optimizer import task_evaluator
|
|
16
16
|
from opik.api_objects import opik_client
|
17
17
|
from opik.evaluation.models.litellm import opik_monitor as opik_litellm_monitor
|
18
18
|
from opik.environment import get_tqdm_for_current_environment
|
19
|
+
from . import _throttle
|
19
20
|
|
20
21
|
tqdm = get_tqdm_for_current_environment()
|
21
22
|
|
@@ -26,6 +27,7 @@ litellm.cache = Cache(type="disk", disk_cache_dir=disk_cache_dir)
|
|
26
27
|
# Set up logging
|
27
28
|
logger = logging.getLogger(__name__) # Gets logger configured by setup_logging
|
28
29
|
|
30
|
+
_rate_limiter = _throttle.get_rate_limiter_for_current_opik_installation()
|
29
31
|
|
30
32
|
class MetaPromptOptimizer(BaseOptimizer):
|
31
33
|
"""Optimizer that uses meta-prompting to improve prompts based on examples and performance."""
|
@@ -176,6 +178,7 @@ class MetaPromptOptimizer(BaseOptimizer):
|
|
176
178
|
optimization_id=optimization_id,
|
177
179
|
)
|
178
180
|
|
181
|
+
@_throttle.rate_limited(_rate_limiter)
|
179
182
|
def _call_model(
|
180
183
|
self,
|
181
184
|
prompt: str,
|
@@ -22,11 +22,11 @@ from dspy.dsp.utils.settings import settings
|
|
22
22
|
from dspy.utils.callback import BaseCallback, with_callbacks
|
23
23
|
from dspy.clients.base_lm import BaseLM
|
24
24
|
|
25
|
-
from .._throttle import RateLimiter, rate_limited
|
25
|
+
from .._throttle import RateLimiter, rate_limited, get_rate_limiter_for_current_opik_installation
|
26
26
|
|
27
27
|
logger = logging.getLogger(__name__)
|
28
28
|
# Limit how fast an LLM can be called:
|
29
|
-
limiter =
|
29
|
+
limiter = get_rate_limiter_for_current_opik_installation()
|
30
30
|
|
31
31
|
class LM(BaseLM):
|
32
32
|
"""
|
@@ -0,0 +1,173 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: opik_optimizer
|
3
|
+
Version: 0.7.3
|
4
|
+
Summary: Agent optimization with Opik
|
5
|
+
Home-page: https://github.com/comet-ml/opik
|
6
|
+
Author: Comet ML
|
7
|
+
Author-email: support@comet.com
|
8
|
+
Classifier: Development Status :: 3 - Alpha
|
9
|
+
Classifier: Intended Audience :: Developers
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
12
|
+
Requires-Python: >=3.9,<3.13
|
13
|
+
Description-Content-Type: text/markdown
|
14
|
+
License-File: LICENSE
|
15
|
+
Requires-Dist: opik>=1.7.17
|
16
|
+
Requires-Dist: dspy<3,>=2.6.18
|
17
|
+
Requires-Dist: litellm
|
18
|
+
Requires-Dist: tqdm
|
19
|
+
Requires-Dist: datasets
|
20
|
+
Requires-Dist: optuna
|
21
|
+
Requires-Dist: pydantic
|
22
|
+
Requires-Dist: pandas
|
23
|
+
Requires-Dist: hf_xet
|
24
|
+
Requires-Dist: pyrate-limiter
|
25
|
+
Provides-Extra: dev
|
26
|
+
Requires-Dist: adalflow; extra == "dev"
|
27
|
+
Requires-Dist: pytest; extra == "dev"
|
28
|
+
Requires-Dist: pytest-conv; extra == "dev"
|
29
|
+
Dynamic: author
|
30
|
+
Dynamic: author-email
|
31
|
+
Dynamic: classifier
|
32
|
+
Dynamic: description
|
33
|
+
Dynamic: description-content-type
|
34
|
+
Dynamic: home-page
|
35
|
+
Dynamic: license-file
|
36
|
+
Dynamic: provides-extra
|
37
|
+
Dynamic: requires-dist
|
38
|
+
Dynamic: requires-python
|
39
|
+
Dynamic: summary
|
40
|
+
|
41
|
+
# Opik Optimizer
|
42
|
+
|
43
|
+
The Opik Opitmizer can refine your prompts to get better performance
|
44
|
+
from your LLMs. You can use a variety of algorithms, including:
|
45
|
+
|
46
|
+
* FewShotBayesianOptimizer
|
47
|
+
* MiproOptimizer
|
48
|
+
* MetaPromptOptimizer
|
49
|
+
|
50
|
+
## Quickstart
|
51
|
+
|
52
|
+
|
53
|
+
[Open Quickstart Notebook in Colab](https://colab.research.google.com/github/comet-ml/opik/blob/main/sdks/opik_optimizer/notebooks/OpikOptimizerIntro.ipynb)
|
54
|
+
|
55
|
+
|
56
|
+
## Setup
|
57
|
+
|
58
|
+
1. Configure Opik:
|
59
|
+
```bash
|
60
|
+
# Install Comet ML CLI
|
61
|
+
pip install opik
|
62
|
+
|
63
|
+
# Configure your API key
|
64
|
+
opik configure
|
65
|
+
# When prompted, enter your Opik API key
|
66
|
+
```
|
67
|
+
|
68
|
+
2. Set up your environment variables:
|
69
|
+
```bash
|
70
|
+
# OpenAI API key for LLM access
|
71
|
+
export OPENAI_API_KEY=your_openai_api_key
|
72
|
+
```
|
73
|
+
|
74
|
+
3. Install the package:
|
75
|
+
```bash
|
76
|
+
pip install opik-optimizer
|
77
|
+
```
|
78
|
+
|
79
|
+
You'll need:
|
80
|
+
|
81
|
+
1. An LLM model name
|
82
|
+
2. An Opik Dataset (or Opik Dataset name)
|
83
|
+
3. An Opik Metric (possibly a custom one)
|
84
|
+
4. A starting prompt (string)
|
85
|
+
|
86
|
+
## Example
|
87
|
+
|
88
|
+
We have prepared some sample datasets for testing:
|
89
|
+
|
90
|
+
* "tiny-test"
|
91
|
+
* "halu-eval-300"
|
92
|
+
* "hotpot-300"
|
93
|
+
|
94
|
+
You can see how to use those below:
|
95
|
+
|
96
|
+
```python
|
97
|
+
from opik.evaluation.metrics import LevenshteinRatio
|
98
|
+
from opik_optimizer import FewShotBayesianOptimizer
|
99
|
+
from opik_optimizer.demo import get_or_create_dataset
|
100
|
+
|
101
|
+
from opik_optimizer import (
|
102
|
+
MetricConfig,
|
103
|
+
TaskConfig,
|
104
|
+
from_dataset_field,
|
105
|
+
from_llm_response_text,
|
106
|
+
)
|
107
|
+
|
108
|
+
hot_pot_dataset = get_or_create_dataset("hotpot-300")
|
109
|
+
|
110
|
+
# For chat prompts instruction doesn't need to contain input parameters from dataset examples.
|
111
|
+
prompt_instruction = """
|
112
|
+
Answer the question.
|
113
|
+
"""
|
114
|
+
project_name = "optimize-few-shot-bayesian-hotpot"
|
115
|
+
|
116
|
+
optimizer = FewShotBayesianOptimizer(
|
117
|
+
model="gpt-4o-mini",
|
118
|
+
project_name=project_name,
|
119
|
+
min_examples=3,
|
120
|
+
max_examples=8,
|
121
|
+
n_threads=16,
|
122
|
+
seed=42,
|
123
|
+
)
|
124
|
+
|
125
|
+
metric_config = MetricConfig(
|
126
|
+
metric=LevenshteinRatio(project_name=project_name),
|
127
|
+
inputs={
|
128
|
+
"output": from_llm_response_text(),
|
129
|
+
"reference": from_dataset_field(name="answer"),
|
130
|
+
},
|
131
|
+
)
|
132
|
+
|
133
|
+
task_config = TaskConfig(
|
134
|
+
instruction_prompt=prompt_instruction,
|
135
|
+
input_dataset_fields=["question"],
|
136
|
+
output_dataset_field="answer",
|
137
|
+
use_chat_prompt=True,
|
138
|
+
)
|
139
|
+
|
140
|
+
result = optimizer.optimize_prompt(
|
141
|
+
dataset=hot_pot_dataset,
|
142
|
+
metric_config=metric_config,
|
143
|
+
task_config=task_config,
|
144
|
+
n_trials=10,
|
145
|
+
n_samples=150,
|
146
|
+
)
|
147
|
+
|
148
|
+
result.display()
|
149
|
+
```
|
150
|
+
|
151
|
+
More examples can be found in the `scripts` folder.
|
152
|
+
|
153
|
+
## Installation
|
154
|
+
|
155
|
+
```bash
|
156
|
+
pip install opik-optimizer
|
157
|
+
```
|
158
|
+
|
159
|
+
## Development
|
160
|
+
|
161
|
+
To use the Opik Optimizer from source:
|
162
|
+
|
163
|
+
```bash
|
164
|
+
git clone git clone git@github.com:comet-ml/opik
|
165
|
+
cd sdks/opik_optimizer
|
166
|
+
pip install -e .
|
167
|
+
```
|
168
|
+
|
169
|
+
## Requirements
|
170
|
+
|
171
|
+
- Python 3.10+ < 3.13
|
172
|
+
- Opik API key
|
173
|
+
- OpenAI API key (or other LLM provider)
|
@@ -1,30 +1,30 @@
|
|
1
|
-
opik_optimizer/__init__.py,sha256=
|
2
|
-
opik_optimizer/_throttle.py,sha256=
|
3
|
-
opik_optimizer/base_optimizer.py,sha256=
|
1
|
+
opik_optimizer/__init__.py,sha256=x5QSFom-TtmmUNzqyYIJY2AujMJXbyhXs2oz44-4Af0,1121
|
2
|
+
opik_optimizer/_throttle.py,sha256=ztub8qlwz4u0GVA2TIoLig0D1Cs0hJ7_o_SnT_C7Nmk,1360
|
3
|
+
opik_optimizer/base_optimizer.py,sha256=f4gNX9j3Z3TGst8F0gm1nMHHpHKAlChmeCVAcTdTIR4,4883
|
4
4
|
opik_optimizer/cache_config.py,sha256=EzF4RAzxhSG8vtMJANdiUpNHQ9HzL2CrCXp0iik0f4A,580
|
5
5
|
opik_optimizer/logging_config.py,sha256=ELevhxtflYinTo-jVvyQYZbXG7FgAe_b5dPa9y5uLWw,2774
|
6
|
-
opik_optimizer/meta_prompt_optimizer.py,sha256=
|
6
|
+
opik_optimizer/meta_prompt_optimizer.py,sha256=AoFskD01Mu9VbH-Ys4CUoqZk3Gn3iFrOzF2yh-Hw7GM,46944
|
7
7
|
opik_optimizer/optimization_result.py,sha256=9zdDV2MXeLYk7U8OqgMmSU-DdPV6qgYQWS2rtkO6Dzw,8693
|
8
8
|
opik_optimizer/task_evaluator.py,sha256=MafDMaLeW0_yGPrumLvYF0HzQUKrnpAlM_0N_TPG8tw,3695
|
9
9
|
opik_optimizer/utils.py,sha256=HivUsNzbt7BcuZeEvikdER1DaTPUFLJrpaVQ8raZYD8,3637
|
10
10
|
opik_optimizer/demo/__init__.py,sha256=KSpFYhzN7fTmLEsIaciRHwxcJDeAiX5NDmYLdPsfpT8,150
|
11
|
-
opik_optimizer/demo/cache.py,sha256=
|
11
|
+
opik_optimizer/demo/cache.py,sha256=5WqK8rSiijzU6s4VHIjLuL1LR5i1yHtY-x5FZTduSus,3669
|
12
12
|
opik_optimizer/demo/datasets.py,sha256=hD6JZAQotEDQb4nK7dbnurquILqQsrFRF7nUwon_iXE,22930
|
13
13
|
opik_optimizer/few_shot_bayesian_optimizer/__init__.py,sha256=VuH7FOROyGcjMPryejtZC-5Y0QHlVTFLTGUDgNqRAFw,113
|
14
|
-
opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py,sha256=
|
14
|
+
opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py,sha256=HIoIVDW9v0p2Y9B736DncXjJPPYoDfphgeSqiB4MIVM,15235
|
15
15
|
opik_optimizer/few_shot_bayesian_optimizer/prompt_parameter.py,sha256=EDsSIFAUOfiZKWLrOAaBDB7Exk7cmIs4ccI95kVa7JY,3118
|
16
16
|
opik_optimizer/few_shot_bayesian_optimizer/prompt_templates.py,sha256=HmvD-UeT3aKiiet5cUtULXe6iFPEOo6hxyDE0pH2LnQ,2424
|
17
17
|
opik_optimizer/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
18
18
|
opik_optimizer/mipro_optimizer/__init__.py,sha256=CF9TVXjOxTobDO1kAS8CD4eyLVzEozxjfgoKwIO6ZpU,44
|
19
|
-
opik_optimizer/mipro_optimizer/_lm.py,sha256=
|
19
|
+
opik_optimizer/mipro_optimizer/_lm.py,sha256=bcTy2Y5HjSaFQOATIpUaA86eIp3vKHaMuDI2_RvN2ww,16376
|
20
20
|
opik_optimizer/mipro_optimizer/_mipro_optimizer_v2.py,sha256=r8FKaqvtZq_R7FwGnXqp1foCLk7M7r6M-CMvWbJtP5c,39512
|
21
21
|
opik_optimizer/mipro_optimizer/mipro_optimizer.py,sha256=5QS7OKqOMKe4CD_8W2FMD_qJNmulkvxmOT_YtJ3BllM,14755
|
22
22
|
opik_optimizer/mipro_optimizer/utils.py,sha256=4et1JA1QInX3h6Is-_RqzliFwJqkm6tlA0X5CryG60I,3142
|
23
23
|
opik_optimizer/optimization_config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
24
|
opik_optimizer/optimization_config/configs.py,sha256=MYL9H2UAqeyGBlBGWbOZ-6Snto4ZMuXnypgvVuUSW1Y,1132
|
25
25
|
opik_optimizer/optimization_config/mappers.py,sha256=RXgTMxPzTQ1AHGke6Zca6rTcfCI7IkCKhQYciaEGSAo,1698
|
26
|
-
opik_optimizer-0.7.
|
27
|
-
opik_optimizer-0.7.
|
28
|
-
opik_optimizer-0.7.
|
29
|
-
opik_optimizer-0.7.
|
30
|
-
opik_optimizer-0.7.
|
26
|
+
opik_optimizer-0.7.3.dist-info/licenses/LICENSE,sha256=dTRSwwCHdWeSjzodvnivYqcwi8x3Qfr21yv65QUWWBE,1062
|
27
|
+
opik_optimizer-0.7.3.dist-info/METADATA,sha256=ng9ZwnuLM631y5mrK-DSi70r763Db51thVyE1QNuQlI,3826
|
28
|
+
opik_optimizer-0.7.3.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
|
29
|
+
opik_optimizer-0.7.3.dist-info/top_level.txt,sha256=ondOlpq6_yFckqpxoAHSfzZS2N-JfgmA-QQhOJfz7m0,15
|
30
|
+
opik_optimizer-0.7.3.dist-info/RECORD,,
|
@@ -1,35 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.4
|
2
|
-
Name: opik_optimizer
|
3
|
-
Version: 0.7.1
|
4
|
-
Summary: Agent optimization with Opik
|
5
|
-
Home-page: https://github.com/comet-ml/opik
|
6
|
-
Author: Comet ML
|
7
|
-
Author-email: info@comet.ml
|
8
|
-
Classifier: Development Status :: 3 - Alpha
|
9
|
-
Classifier: Intended Audience :: Developers
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
11
|
-
Classifier: Programming Language :: Python :: 3.10
|
12
|
-
Requires-Python: >=3.9
|
13
|
-
License-File: LICENSE
|
14
|
-
Requires-Dist: opik>=1.7.17
|
15
|
-
Requires-Dist: dspy<3,>=2.6.18
|
16
|
-
Requires-Dist: litellm
|
17
|
-
Requires-Dist: tqdm
|
18
|
-
Requires-Dist: datasets
|
19
|
-
Requires-Dist: optuna
|
20
|
-
Requires-Dist: pydantic
|
21
|
-
Requires-Dist: pandas
|
22
|
-
Requires-Dist: hf_xet
|
23
|
-
Provides-Extra: dev
|
24
|
-
Requires-Dist: adalflow; extra == "dev"
|
25
|
-
Requires-Dist: pytest; extra == "dev"
|
26
|
-
Requires-Dist: pytest-conv; extra == "dev"
|
27
|
-
Dynamic: author
|
28
|
-
Dynamic: author-email
|
29
|
-
Dynamic: classifier
|
30
|
-
Dynamic: home-page
|
31
|
-
Dynamic: license-file
|
32
|
-
Dynamic: provides-extra
|
33
|
-
Dynamic: requires-dist
|
34
|
-
Dynamic: requires-python
|
35
|
-
Dynamic: summary
|
File without changes
|
File without changes
|
File without changes
|