opik-optimizer 0.7.8__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. opik_optimizer/__init__.py +2 -0
  2. opik_optimizer/base_optimizer.py +6 -4
  3. opik_optimizer/datasets/__init__.py +27 -0
  4. opik_optimizer/datasets/ai2_arc.py +44 -0
  5. opik_optimizer/datasets/cnn_dailymail.py +40 -0
  6. opik_optimizer/datasets/election_questions.py +36 -0
  7. opik_optimizer/datasets/gsm8k.py +40 -0
  8. opik_optimizer/datasets/halu_eval.py +43 -0
  9. opik_optimizer/datasets/hotpot_qa.py +67 -0
  10. opik_optimizer/datasets/medhallu.py +39 -0
  11. opik_optimizer/datasets/rag_hallucinations.py +41 -0
  12. opik_optimizer/datasets/ragbench.py +40 -0
  13. opik_optimizer/datasets/tiny_test.py +57 -0
  14. opik_optimizer/datasets/truthful_qa.py +107 -0
  15. opik_optimizer/demo/datasets.py +53 -607
  16. opik_optimizer/evolutionary_optimizer/evolutionary_optimizer.py +3 -1
  17. opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py +88 -17
  18. opik_optimizer/logging_config.py +1 -1
  19. opik_optimizer/meta_prompt_optimizer.py +57 -11
  20. opik_optimizer/mipro_optimizer/mipro_optimizer.py +151 -13
  21. opik_optimizer/optimization_result.py +11 -0
  22. opik_optimizer/task_evaluator.py +6 -1
  23. opik_optimizer/utils.py +0 -52
  24. opik_optimizer-0.8.0.dist-info/METADATA +196 -0
  25. opik_optimizer-0.8.0.dist-info/RECORD +45 -0
  26. opik_optimizer-0.7.8.dist-info/METADATA +0 -174
  27. opik_optimizer-0.7.8.dist-info/RECORD +0 -33
  28. {opik_optimizer-0.7.8.dist-info → opik_optimizer-0.8.0.dist-info}/WHEEL +0 -0
  29. {opik_optimizer-0.7.8.dist-info → opik_optimizer-0.8.0.dist-info}/licenses/LICENSE +0 -0
  30. {opik_optimizer-0.7.8.dist-info → opik_optimizer-0.8.0.dist-info}/top_level.txt +0 -0
@@ -7,6 +7,15 @@ from pydantic import BaseModel, Field
7
7
  from .base_optimizer import OptimizationRound # Adjust import as necessary
8
8
  import rich
9
9
 
10
+ class OptimizationStep(BaseModel):
11
+ """Represents a single step or trial in an optimization process."""
12
+ step: int
13
+ score: Optional[float] = None
14
+ prompt: Optional[Union[str, List[Dict[str, str]]]] = None
15
+ parameters: Optional[Dict[str, Any]] = None
16
+ timestamp: Optional[str] = None
17
+ # Add other relevant details per step if needed
18
+
10
19
 
11
20
  class OptimizationResult(pydantic.BaseModel):
12
21
  """Result of an optimization run."""
@@ -28,6 +37,8 @@ class OptimizationResult(pydantic.BaseModel):
28
37
  demonstrations: Optional[List[Dict[str, Any]]] = None
29
38
  optimizer: str = "Optimizer"
30
39
  tool_prompts: Optional[Dict[str, str]] = None
40
+ opik_metadata: Optional[Dict[str, Any]] = None
41
+ llm_calls: Optional[int] = None
31
42
 
32
43
  model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
33
44
 
@@ -1,11 +1,12 @@
1
1
  import opik
2
-
2
+ import logging
3
3
  from typing import Any, Callable, Dict, List, Optional
4
4
  from opik_optimizer.optimization_config.configs import MetricConfig
5
5
  from opik.evaluation.metrics import score_result
6
6
 
7
7
  from opik.evaluation import evaluator as opik_evaluator
8
8
 
9
+ logger = logging.getLogger(__name__)
9
10
 
10
11
  def evaluate(
11
12
  dataset: opik.Dataset,
@@ -17,6 +18,7 @@ def evaluate(
17
18
  project_name: Optional[str] = None,
18
19
  n_samples: Optional[int] = None,
19
20
  experiment_config: Optional[Dict[str, Any]] = None,
21
+ verbose: int = 1,
20
22
  ) -> float:
21
23
  """
22
24
  Evaluate a task on a dataset.
@@ -31,6 +33,7 @@ def evaluate(
31
33
  num_threads: Number of threads to use for evaluation.
32
34
  experiment_config: The dictionary with parameters that describe experiment
33
35
  optimization_id: Optional optimization ID for the experiment.
36
+ verbose: Whether to print debug information.
34
37
 
35
38
  Returns:
36
39
  float: The average score of the evaluated task.
@@ -71,6 +74,7 @@ def evaluate(
71
74
  task_threads=num_threads,
72
75
  nb_samples=n_samples,
73
76
  experiment_config=experiment_config,
77
+ verbose=verbose,
74
78
  )
75
79
  else:
76
80
  result = opik_evaluator.evaluate(
@@ -83,6 +87,7 @@ def evaluate(
83
87
  task_threads=num_threads,
84
88
  nb_samples=n_samples,
85
89
  experiment_config=experiment_config,
90
+ verbose=verbose,
86
91
  )
87
92
 
88
93
  if not result.test_results:
opik_optimizer/utils.py CHANGED
@@ -8,9 +8,6 @@ from opik.api_objects.opik_client import Opik
8
8
 
9
9
  from typing import List, Dict, Any, Optional, Callable, TYPE_CHECKING
10
10
 
11
- # Test dataset name for optimizer examples
12
- TEST_DATASET_NAME = "tiny-test-optimizer"
13
-
14
11
  # Type hint for OptimizationResult without circular import
15
12
  if TYPE_CHECKING:
16
13
  from .optimization_result import OptimizationResult
@@ -79,54 +76,5 @@ def get_random_seed() -> int:
79
76
 
80
77
  return random.randint(0, 2**32 - 1)
81
78
 
82
-
83
- def get_or_create_dataset(
84
- dataset_name: str,
85
- description: str,
86
- data_loader: Callable[[], List[Dict[str, Any]]],
87
- project_name: Optional[str] = None,
88
- ) -> opik.Dataset:
89
- """
90
- Get an existing dataset or create a new one if it doesn't exist.
91
-
92
- Args:
93
- dataset_name: Name of the dataset
94
- description: Description of the dataset
95
- data: Optional data to insert into the dataset
96
- project_name: Optional project name
97
-
98
- Returns:
99
- opik.Dataset: The dataset object
100
- """
101
- client = Opik(project_name=project_name)
102
-
103
- try:
104
- # Try to get existing dataset
105
- dataset = client.get_dataset(dataset_name)
106
- # If dataset exists but has no data, delete it
107
- if not dataset.get_items():
108
- print("Dataset exists but is empty - deleting it...")
109
- # Delete all items in the dataset
110
- items = dataset.get_items()
111
- if items:
112
- dataset.delete(items_ids=[item.id for item in items])
113
- # Delete the dataset itself
114
- client.delete_dataset(dataset_name)
115
- raise Exception("Dataset deleted, will create new one")
116
- except Exception:
117
- # Create new dataset
118
- print("Creating new dataset...")
119
- dataset = client.create_dataset(name=dataset_name, description=description)
120
-
121
- dataset_items = data_loader()
122
- dataset.insert(dataset_items)
123
-
124
- # Verify data was added
125
- if not dataset.get_items():
126
- raise Exception("Failed to add data to dataset")
127
-
128
- return dataset
129
-
130
-
131
79
  def random_chars(n: int) -> str:
132
80
  return "".join(random.choice(string.ascii_letters) for _ in range(n))
@@ -0,0 +1,196 @@
1
+ Metadata-Version: 2.4
2
+ Name: opik_optimizer
3
+ Version: 0.8.0
4
+ Summary: Agent optimization with Opik
5
+ Home-page: https://github.com/comet-ml/opik
6
+ Author: Comet ML
7
+ Author-email: support@comet.com
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Requires-Python: >=3.9,<3.13
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: opik>=1.7.17
16
+ Requires-Dist: dspy<3,>=2.6.18
17
+ Requires-Dist: litellm
18
+ Requires-Dist: tqdm
19
+ Requires-Dist: datasets
20
+ Requires-Dist: optuna
21
+ Requires-Dist: pydantic
22
+ Requires-Dist: pandas
23
+ Requires-Dist: hf_xet
24
+ Requires-Dist: pyrate-limiter
25
+ Requires-Dist: deap>=1.4.3
26
+ Provides-Extra: dev
27
+ Requires-Dist: pytest; extra == "dev"
28
+ Requires-Dist: pytest-conv; extra == "dev"
29
+ Dynamic: author
30
+ Dynamic: author-email
31
+ Dynamic: classifier
32
+ Dynamic: description
33
+ Dynamic: description-content-type
34
+ Dynamic: home-page
35
+ Dynamic: license-file
36
+ Dynamic: provides-extra
37
+ Dynamic: requires-dist
38
+ Dynamic: requires-python
39
+ Dynamic: summary
40
+
41
+ # Opik Agent Optimizer
42
+
43
+ [![PyPI version](https://img.shields.io/pypi/v/opik-optimizer.svg)](https://pypi.org/project/opik-optimizer/)
44
+ [![Python versions](https://img.shields.io/pypi/pyversions/opik-optimizer.svg)](https://pypi.org/project/opik-optimizer/)
45
+ [![Downloads](https://static.pepy.tech/badge/opik-optimizer)](https://pepy.tech/project/opik-optimizer)
46
+ [![License](https://img.shields.io/github/license/comet-ml/opik)](https://github.com/comet-ml/opik/blob/main/LICENSE)
47
+
48
+ The Opik Agent Optimizer refines your prompts to achieve better performance from your Large Language Models (LLMs). It supports a variety of optimization algorithms, including:
49
+
50
+ * EvolutionaryOptimizer
51
+ * FewShotBayesianOptimizer
52
+ * MetaPromptOptimizer
53
+ * MiproOptimizer
54
+
55
+ Opik Optimizer is a component of the [Opik platform](https://github.com/comet-ml/opik), an open-source LLM evaluation platform by Comet.
56
+ For more information about the broader Opik ecosystem, visit our [Website](https://www.comet.com/site/products/opik/) or [Documentation](https://www.comet.com/docs/opik/).
57
+
58
+ ## Quickstart
59
+
60
+ Explore Opik Optimizer's capabilities with our interactive notebook:
61
+
62
+ <a href="https://colab.research.google.com/github/comet-ml/opik/blob/main/sdks/opik_optimizer/notebooks/OpikOptimizerIntro.ipynb">
63
+ <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open in Colab"/>
64
+ </a>
65
+
66
+ ## Setup
67
+
68
+ To get started with Opik Optimizer, follow these steps:
69
+
70
+ 1. **Install the package:**
71
+ ```bash
72
+ # using pip
73
+ pip install opik-optimizer
74
+
75
+ # using uv (faster)
76
+ uv pip install opik-optimizer
77
+ ```
78
+
79
+ 2. **Configure Opik (Optional, for advanced features):**
80
+ If you plan to log optimization experiments to Comet or use Opik Datasets, you'll need to configure the Opik client:
81
+ ```bash
82
+ # Install the main Opik CLI (if not already installed)
83
+ pip install opik
84
+
85
+ # Configure your Comet API key and workspace
86
+ opik configure
87
+ # When prompted, enter your Opik API key and workspace details.
88
+ ```
89
+ Using Opik with Comet allows you to track your optimization runs, compare results, and manage datasets seamlessly.
90
+
91
+ 3. **Set up LLM Provider API Keys:**
92
+ Ensure your environment variables are set for the LLM(s) you intend to use. For example, for OpenAI models:
93
+ ```bash
94
+ export OPENAI_API_KEY="your_openai_api_key"
95
+ ```
96
+ The optimizer utilizes LiteLLM, so you can configure keys for various providers as per LiteLLM's documentation.
97
+
98
+ You'll typically need:
99
+
100
+ * An LLM model name (e.g., "gpt-4o-mini", "claude-3-haiku-20240307").
101
+ * An [Opik Dataset](https://www.comet.com/docs/opik/evaluation/manage_datasets/) (or a compatible local dataset/data generator).
102
+ * An [Opik Metric](https://www.comet.com/docs/opik/evaluation/metrics/overview/) (or a custom evaluation function).
103
+ * A starting prompt (template string).
104
+
105
+ ## Example
106
+
107
+ Here's a brief example of how to use the `FewShotBayesianOptimizer`. We'll use a sample dataset provided by Opik.
108
+
109
+ Available sample datasets for testing:
110
+ * `"tiny-test"`
111
+ * `"halu-eval-300"`
112
+ * `"hotpot-300"`
113
+
114
+ ```python
115
+ from opik.evaluation.metrics import LevenshteinRatio
116
+ from opik_optimizer import FewShotBayesianOptimizer
117
+ from opik_optimizer.demo import get_or_create_dataset
118
+
119
+ from opik_optimizer import (
120
+ MetricConfig,
121
+ TaskConfig,
122
+ from_dataset_field,
123
+ from_llm_response_text,
124
+ )
125
+
126
+ # Load a sample dataset
127
+ hot_pot_dataset = get_or_create_dataset("hotpot-300")
128
+
129
+ # Define the instruction for your chat prompt.
130
+ # Input parameters from dataset examples will be interpolated into the full prompt.
131
+ prompt_instruction = """
132
+ Answer the question based on the provided context.
133
+ """
134
+ project_name = "optimize-few-shot-bayesian-hotpot" # For Comet logging
135
+
136
+ optimizer = FewShotBayesianOptimizer(
137
+ model="gpt-4o-mini", # LiteLLM name to use for generation and optimization
138
+ project_name=project_name, # Associates the run with a Comet project
139
+ min_examples=3, # Min few-shot examples
140
+ max_examples=8, # Max few-shot examples
141
+ n_threads=16, # Parallel threads for evaluation
142
+ seed=42,
143
+ )
144
+
145
+ metric_config = MetricConfig(
146
+ metric=LevenshteinRatio(project_name=project_name), # Metric for evaluation
147
+ inputs={
148
+ "output": from_llm_response_text(), # Get output from LLM
149
+ "reference": from_dataset_field(name="answer"), # Get reference from dataset
150
+ },
151
+ )
152
+
153
+ task_config = TaskConfig(
154
+ instruction_prompt=prompt_instruction,
155
+ input_dataset_fields=["question"], # Fields from dataset to use as input
156
+ output_dataset_field="answer", # Field in dataset for reference answer
157
+ use_chat_prompt=True, # Use chat-style prompting
158
+ )
159
+
160
+ # Run the optimization
161
+ result = optimizer.optimize_prompt(
162
+ dataset=hot_pot_dataset,
163
+ metric_config=metric_config,
164
+ task_config=task_config,
165
+ n_trials=10, # Number of optimization trials
166
+ n_samples=150, # Number of dataset samples for evaluation per trial
167
+ )
168
+
169
+ # Display the best prompt and its score
170
+ result.display()
171
+ ```
172
+ The `result` object contains the optimized prompt, evaluation scores, and other details from the optimization process. If `project_name` is provided and Opik is configured, results will also be logged to your Comet workspace.
173
+
174
+ ## Development
175
+
176
+ To contribute or use the Opik Optimizer from source:
177
+
178
+ 1. **Clone the Opik repository:**
179
+ ```bash
180
+ git clone git@github.com:comet-ml/opik.git
181
+ ```
182
+ 2. **Navigate to the optimizer's directory:**
183
+ ```bash
184
+ cd opik/sdks/opik_optimizer # Adjust 'opik' if you cloned into a different folder name
185
+ ```
186
+ 3. **Install in editable mode (with development dependencies):**
187
+ ```bash
188
+ pip install -e .[dev]
189
+ ```
190
+ The `[dev]` extra installs dependencies useful for development, such as `pytest`.
191
+
192
+ ## Requirements
193
+
194
+ - Python `>=3.9,<3.13`
195
+ - Opik API key (recommended for full functionality, configure via `opik configure`)
196
+ - API key for your chosen LLM provider (e.g., OpenAI, Anthropic, Gemini), configured as per LiteLLM guidelines.
@@ -0,0 +1,45 @@
1
+ opik_optimizer/__init__.py,sha256=8nbzCWZWePrko_3fE2MT-sldseOBTnpUnbnjoNbVddU,1284
2
+ opik_optimizer/_throttle.py,sha256=ztub8qlwz4u0GVA2TIoLig0D1Cs0hJ7_o_SnT_C7Nmk,1360
3
+ opik_optimizer/base_optimizer.py,sha256=Gp96LSmWBHpC5rOoDkDUunRayvqf-A510TMwjsVhZYk,5018
4
+ opik_optimizer/cache_config.py,sha256=EzF4RAzxhSG8vtMJANdiUpNHQ9HzL2CrCXp0iik0f4A,580
5
+ opik_optimizer/logging_config.py,sha256=XECPnSoh8ghbllv1F0vj6ofO8YmE2HL0coLWjLdaNTU,2780
6
+ opik_optimizer/meta_prompt_optimizer.py,sha256=x7jUvzWHAxS1g6t7XDdY2MaOiSOgx5Wb4ZSHPKd9XWk,49439
7
+ opik_optimizer/optimization_result.py,sha256=v_22SUW62XOFDPGRXrKLshPowi_QeJ1ZFrtnlaFMWek,9134
8
+ opik_optimizer/task_evaluator.py,sha256=aKVM2ER4TOgBC54FO1E6Spj-hdN_G8XstJ-F6m1gkJo,3879
9
+ opik_optimizer/utils.py,sha256=NWNyOYnsV0A7pHrfywRROmXq68nrUUuyzn2w0hKXpUg,1986
10
+ opik_optimizer/data/hotpot-500.json,sha256=ToYC9l3lebrOoeWFhELgp63XVFYWcS0HODlQO9g5V3c,101861
11
+ opik_optimizer/datasets/__init__.py,sha256=j4O7ItmTDsm0XdAtx42uBsewSEhhw99Z-BO0CyyEBes,692
12
+ opik_optimizer/datasets/ai2_arc.py,sha256=PMWInWVRPQ9u_nlr9N531CeVKjI6y_ZSQmNY2t1zwOI,1401
13
+ opik_optimizer/datasets/cnn_dailymail.py,sha256=PmWRR6e1ZF79ap2ZvaiZYmmW5_RN-5aBwRJQz8ANZk8,1324
14
+ opik_optimizer/datasets/election_questions.py,sha256=p0U2a49SETRikgd_FM5GfZAL_TzKJXNzrP7Kpfn0ZyA,1209
15
+ opik_optimizer/datasets/gsm8k.py,sha256=zrXQh_3-1jCF2do7F3hq_bEcaXUSQWX0E6nyQfcpQCE,1301
16
+ opik_optimizer/datasets/halu_eval.py,sha256=wOFbPdJ2jcQ3s3FpzDFGgx4rmvJHk9aD2WHxJrIascs,1420
17
+ opik_optimizer/datasets/hotpot_qa.py,sha256=OTEjgJDA8t4FZCgGfFzjRTrtHotacLqe3ko8aBCZTW0,2141
18
+ opik_optimizer/datasets/medhallu.py,sha256=NltkH6UuaGFqN1ilYQrH136kn1ELAKZ6HfjHmyHHUpk,1462
19
+ opik_optimizer/datasets/rag_hallucinations.py,sha256=3ddmUL7dp01iGYkvJ9uaTKFEuLnqrJJ29Ww9z5m_-3g,1421
20
+ opik_optimizer/datasets/ragbench.py,sha256=bCt3S5KsfW_2wDK009aiGRXiIEHlLgL_OlXrXBFWEPI,1411
21
+ opik_optimizer/datasets/tiny_test.py,sha256=ysgkfCHsi018b0qy8OtuL2BUkOo-YEZVu4AnscJCA4E,1823
22
+ opik_optimizer/datasets/truthful_qa.py,sha256=xbRjW0UOm7oDN3jAnTZD7HChgDGspwhAhFpHV7zTtag,4166
23
+ opik_optimizer/demo/__init__.py,sha256=KSpFYhzN7fTmLEsIaciRHwxcJDeAiX5NDmYLdPsfpT8,150
24
+ opik_optimizer/demo/cache.py,sha256=5WqK8rSiijzU6s4VHIjLuL1LR5i1yHtY-x5FZTduSus,3669
25
+ opik_optimizer/demo/datasets.py,sha256=MezQlG4Q_cgSH7zQOmJcDwkGU8JV0xKSnZwCJGaj-88,2494
26
+ opik_optimizer/evolutionary_optimizer/__init__.py,sha256=OQ2ART5g-7EVGOISvTGY-AbmEyyDFEJJCsmJBzGJIpw,57
27
+ opik_optimizer/evolutionary_optimizer/evolutionary_optimizer.py,sha256=rrSZ8rUeUkVQ8qZwz16gY3TUoOwi0o-rVPZLumNeSWs,76650
28
+ opik_optimizer/few_shot_bayesian_optimizer/__init__.py,sha256=VuH7FOROyGcjMPryejtZC-5Y0QHlVTFLTGUDgNqRAFw,113
29
+ opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py,sha256=HCiEpvxpZ0JKHt4r5LqdJoeE6m5GlbHQ-j6j5m-0PKI,19322
30
+ opik_optimizer/few_shot_bayesian_optimizer/prompt_parameter.py,sha256=EDsSIFAUOfiZKWLrOAaBDB7Exk7cmIs4ccI95kVa7JY,3118
31
+ opik_optimizer/few_shot_bayesian_optimizer/prompt_templates.py,sha256=HmvD-UeT3aKiiet5cUtULXe6iFPEOo6hxyDE0pH2LnQ,2424
32
+ opik_optimizer/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
+ opik_optimizer/mipro_optimizer/__init__.py,sha256=CF9TVXjOxTobDO1kAS8CD4eyLVzEozxjfgoKwIO6ZpU,44
34
+ opik_optimizer/mipro_optimizer/_lm.py,sha256=bcTy2Y5HjSaFQOATIpUaA86eIp3vKHaMuDI2_RvN2ww,16376
35
+ opik_optimizer/mipro_optimizer/_mipro_optimizer_v2.py,sha256=CiQWe39LCp-81SZmLyMybIw2lc_0RBKcxclLEuSXQgI,39757
36
+ opik_optimizer/mipro_optimizer/mipro_optimizer.py,sha256=XFXlhDCMBS5wXIVds83F26ztKSkqF3IeEDXYJvL-dZc,23957
37
+ opik_optimizer/mipro_optimizer/utils.py,sha256=wG1koygXfm_pvtA1jR-YaU4NATPbJZoTI7LE7l7df7g,3273
38
+ opik_optimizer/optimization_config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ opik_optimizer/optimization_config/configs.py,sha256=MYL9H2UAqeyGBlBGWbOZ-6Snto4ZMuXnypgvVuUSW1Y,1132
40
+ opik_optimizer/optimization_config/mappers.py,sha256=RXgTMxPzTQ1AHGke6Zca6rTcfCI7IkCKhQYciaEGSAo,1698
41
+ opik_optimizer-0.8.0.dist-info/licenses/LICENSE,sha256=dTRSwwCHdWeSjzodvnivYqcwi8x3Qfr21yv65QUWWBE,1062
42
+ opik_optimizer-0.8.0.dist-info/METADATA,sha256=6OjYyVMvcisKYbb_nTftBGHuVfE5VxmcJhwfq-XLJGM,7085
43
+ opik_optimizer-0.8.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
44
+ opik_optimizer-0.8.0.dist-info/top_level.txt,sha256=ondOlpq6_yFckqpxoAHSfzZS2N-JfgmA-QQhOJfz7m0,15
45
+ opik_optimizer-0.8.0.dist-info/RECORD,,
@@ -1,174 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: opik_optimizer
3
- Version: 0.7.8
4
- Summary: Agent optimization with Opik
5
- Home-page: https://github.com/comet-ml/opik
6
- Author: Comet ML
7
- Author-email: support@comet.com
8
- Classifier: Development Status :: 3 - Alpha
9
- Classifier: Intended Audience :: Developers
10
- Classifier: Programming Language :: Python :: 3
11
- Classifier: Programming Language :: Python :: 3.10
12
- Requires-Python: >=3.9,<3.13
13
- Description-Content-Type: text/markdown
14
- License-File: LICENSE
15
- Requires-Dist: opik>=1.7.17
16
- Requires-Dist: dspy<3,>=2.6.18
17
- Requires-Dist: litellm
18
- Requires-Dist: tqdm
19
- Requires-Dist: datasets
20
- Requires-Dist: optuna
21
- Requires-Dist: pydantic
22
- Requires-Dist: pandas
23
- Requires-Dist: hf_xet
24
- Requires-Dist: pyrate-limiter
25
- Requires-Dist: deap>=1.4.3
26
- Provides-Extra: dev
27
- Requires-Dist: adalflow; extra == "dev"
28
- Requires-Dist: pytest; extra == "dev"
29
- Requires-Dist: pytest-conv; extra == "dev"
30
- Dynamic: author
31
- Dynamic: author-email
32
- Dynamic: classifier
33
- Dynamic: description
34
- Dynamic: description-content-type
35
- Dynamic: home-page
36
- Dynamic: license-file
37
- Dynamic: provides-extra
38
- Dynamic: requires-dist
39
- Dynamic: requires-python
40
- Dynamic: summary
41
-
42
- # Opik Optimizer
43
-
44
- The Opik Opitmizer can refine your prompts to get better performance
45
- from your LLMs. You can use a variety of algorithms, including:
46
-
47
- * FewShotBayesianOptimizer
48
- * MiproOptimizer
49
- * MetaPromptOptimizer
50
-
51
- ## Quickstart
52
-
53
-
54
- [Open Quickstart Notebook in Colab](https://colab.research.google.com/github/comet-ml/opik/blob/main/sdks/opik_optimizer/notebooks/OpikOptimizerIntro.ipynb)
55
-
56
-
57
- ## Setup
58
-
59
- 1. Configure Opik:
60
- ```bash
61
- # Install Comet ML CLI
62
- pip install opik
63
-
64
- # Configure your API key
65
- opik configure
66
- # When prompted, enter your Opik API key
67
- ```
68
-
69
- 2. Set up your environment variables:
70
- ```bash
71
- # OpenAI API key for LLM access
72
- export OPENAI_API_KEY=your_openai_api_key
73
- ```
74
-
75
- 3. Install the package:
76
- ```bash
77
- pip install opik-optimizer
78
- ```
79
-
80
- You'll need:
81
-
82
- 1. An LLM model name
83
- 2. An Opik Dataset (or Opik Dataset name)
84
- 3. An Opik Metric (possibly a custom one)
85
- 4. A starting prompt (string)
86
-
87
- ## Example
88
-
89
- We have prepared some sample datasets for testing:
90
-
91
- * "tiny-test"
92
- * "halu-eval-300"
93
- * "hotpot-300"
94
-
95
- You can see how to use those below:
96
-
97
- ```python
98
- from opik.evaluation.metrics import LevenshteinRatio
99
- from opik_optimizer import FewShotBayesianOptimizer
100
- from opik_optimizer.demo import get_or_create_dataset
101
-
102
- from opik_optimizer import (
103
- MetricConfig,
104
- TaskConfig,
105
- from_dataset_field,
106
- from_llm_response_text,
107
- )
108
-
109
- hot_pot_dataset = get_or_create_dataset("hotpot-300")
110
-
111
- # For chat prompts instruction doesn't need to contain input parameters from dataset examples.
112
- prompt_instruction = """
113
- Answer the question.
114
- """
115
- project_name = "optimize-few-shot-bayesian-hotpot"
116
-
117
- optimizer = FewShotBayesianOptimizer(
118
- model="gpt-4o-mini",
119
- project_name=project_name,
120
- min_examples=3,
121
- max_examples=8,
122
- n_threads=16,
123
- seed=42,
124
- )
125
-
126
- metric_config = MetricConfig(
127
- metric=LevenshteinRatio(project_name=project_name),
128
- inputs={
129
- "output": from_llm_response_text(),
130
- "reference": from_dataset_field(name="answer"),
131
- },
132
- )
133
-
134
- task_config = TaskConfig(
135
- instruction_prompt=prompt_instruction,
136
- input_dataset_fields=["question"],
137
- output_dataset_field="answer",
138
- use_chat_prompt=True,
139
- )
140
-
141
- result = optimizer.optimize_prompt(
142
- dataset=hot_pot_dataset,
143
- metric_config=metric_config,
144
- task_config=task_config,
145
- n_trials=10,
146
- n_samples=150,
147
- )
148
-
149
- result.display()
150
- ```
151
-
152
- More examples can be found in the `scripts` folder.
153
-
154
- ## Installation
155
-
156
- ```bash
157
- pip install opik-optimizer
158
- ```
159
-
160
- ## Development
161
-
162
- To use the Opik Optimizer from source:
163
-
164
- ```bash
165
- git clone git clone git@github.com:comet-ml/opik
166
- cd sdks/opik_optimizer
167
- pip install -e .
168
- ```
169
-
170
- ## Requirements
171
-
172
- - Python 3.10+ < 3.13
173
- - Opik API key
174
- - OpenAI API key (or other LLM provider)
@@ -1,33 +0,0 @@
1
- opik_optimizer/__init__.py,sha256=Tbny_W_oUWKdf_33fM-nNTBqvQvarLfB8hMNd_goXT0,1245
2
- opik_optimizer/_throttle.py,sha256=ztub8qlwz4u0GVA2TIoLig0D1Cs0hJ7_o_SnT_C7Nmk,1360
3
- opik_optimizer/base_optimizer.py,sha256=f4gNX9j3Z3TGst8F0gm1nMHHpHKAlChmeCVAcTdTIR4,4883
4
- opik_optimizer/cache_config.py,sha256=EzF4RAzxhSG8vtMJANdiUpNHQ9HzL2CrCXp0iik0f4A,580
5
- opik_optimizer/logging_config.py,sha256=ELevhxtflYinTo-jVvyQYZbXG7FgAe_b5dPa9y5uLWw,2774
6
- opik_optimizer/meta_prompt_optimizer.py,sha256=tCCDHDjUEjpuicN-766HbwODZHeAzZGTXGqoRl9Npvc,46992
7
- opik_optimizer/optimization_result.py,sha256=9zdDV2MXeLYk7U8OqgMmSU-DdPV6qgYQWS2rtkO6Dzw,8693
8
- opik_optimizer/task_evaluator.py,sha256=MafDMaLeW0_yGPrumLvYF0HzQUKrnpAlM_0N_TPG8tw,3695
9
- opik_optimizer/utils.py,sha256=HivUsNzbt7BcuZeEvikdER1DaTPUFLJrpaVQ8raZYD8,3637
10
- opik_optimizer/data/hotpot-500.json,sha256=ToYC9l3lebrOoeWFhELgp63XVFYWcS0HODlQO9g5V3c,101861
11
- opik_optimizer/demo/__init__.py,sha256=KSpFYhzN7fTmLEsIaciRHwxcJDeAiX5NDmYLdPsfpT8,150
12
- opik_optimizer/demo/cache.py,sha256=5WqK8rSiijzU6s4VHIjLuL1LR5i1yHtY-x5FZTduSus,3669
13
- opik_optimizer/demo/datasets.py,sha256=-qmdeFk5X3CIcg9QX7nbGUAVo2khh0XGbmkHh8jgvSY,22633
14
- opik_optimizer/evolutionary_optimizer/__init__.py,sha256=OQ2ART5g-7EVGOISvTGY-AbmEyyDFEJJCsmJBzGJIpw,57
15
- opik_optimizer/evolutionary_optimizer/evolutionary_optimizer.py,sha256=652TAHa4U4C7_0_wBulDmizfVijqV9E3ngR38nLcpt4,76568
16
- opik_optimizer/few_shot_bayesian_optimizer/__init__.py,sha256=VuH7FOROyGcjMPryejtZC-5Y0QHlVTFLTGUDgNqRAFw,113
17
- opik_optimizer/few_shot_bayesian_optimizer/few_shot_bayesian_optimizer.py,sha256=nmf3mguehaIomiHPFnONvnxzJvvo15wMU-GLe_S0Gb0,15318
18
- opik_optimizer/few_shot_bayesian_optimizer/prompt_parameter.py,sha256=EDsSIFAUOfiZKWLrOAaBDB7Exk7cmIs4ccI95kVa7JY,3118
19
- opik_optimizer/few_shot_bayesian_optimizer/prompt_templates.py,sha256=HmvD-UeT3aKiiet5cUtULXe6iFPEOo6hxyDE0pH2LnQ,2424
20
- opik_optimizer/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- opik_optimizer/mipro_optimizer/__init__.py,sha256=CF9TVXjOxTobDO1kAS8CD4eyLVzEozxjfgoKwIO6ZpU,44
22
- opik_optimizer/mipro_optimizer/_lm.py,sha256=bcTy2Y5HjSaFQOATIpUaA86eIp3vKHaMuDI2_RvN2ww,16376
23
- opik_optimizer/mipro_optimizer/_mipro_optimizer_v2.py,sha256=CiQWe39LCp-81SZmLyMybIw2lc_0RBKcxclLEuSXQgI,39757
24
- opik_optimizer/mipro_optimizer/mipro_optimizer.py,sha256=WoBSFcgSdztwzNOkRRhxBW0aiR4B_ThxZRaebDgt_fA,15581
25
- opik_optimizer/mipro_optimizer/utils.py,sha256=wG1koygXfm_pvtA1jR-YaU4NATPbJZoTI7LE7l7df7g,3273
26
- opik_optimizer/optimization_config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- opik_optimizer/optimization_config/configs.py,sha256=MYL9H2UAqeyGBlBGWbOZ-6Snto4ZMuXnypgvVuUSW1Y,1132
28
- opik_optimizer/optimization_config/mappers.py,sha256=RXgTMxPzTQ1AHGke6Zca6rTcfCI7IkCKhQYciaEGSAo,1698
29
- opik_optimizer-0.7.8.dist-info/licenses/LICENSE,sha256=dTRSwwCHdWeSjzodvnivYqcwi8x3Qfr21yv65QUWWBE,1062
30
- opik_optimizer-0.7.8.dist-info/METADATA,sha256=eYfFtFoEWgqLxVx2OYhmsXyHUOSjK6GArV8hvxdCgcs,3853
31
- opik_optimizer-0.7.8.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
32
- opik_optimizer-0.7.8.dist-info/top_level.txt,sha256=ondOlpq6_yFckqpxoAHSfzZS2N-JfgmA-QQhOJfz7m0,15
33
- opik_optimizer-0.7.8.dist-info/RECORD,,