inspect-ai 0.3.85__py3-none-any.whl → 0.3.86__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
inspect_ai/_eval/eval.py CHANGED
@@ -43,7 +43,7 @@ from inspect_ai.model import (
43
43
  GenerateConfigArgs,
44
44
  Model,
45
45
  )
46
- from inspect_ai.model._model import init_active_model, resolve_models
46
+ from inspect_ai.model._model import get_model, init_active_model, resolve_models
47
47
  from inspect_ai.scorer._reducer import reducer_log_names
48
48
  from inspect_ai.solver._chain import chain
49
49
  from inspect_ai.solver._solver import Solver, SolverSpec
@@ -751,10 +751,15 @@ async def eval_retry_async(
751
751
  else None
752
752
  )
753
753
 
754
+ # resolve the model
755
+ model = get_model(
756
+ model=eval_log.eval.model,
757
+ config=eval_log.eval.model_generate_config,
758
+ base_url=eval_log.eval.model_base_url,
759
+ **eval_log.eval.model_args,
760
+ )
761
+
754
762
  # collect the rest of the params we need for the eval
755
- model = eval_log.eval.model
756
- model_base_url = eval_log.eval.model_base_url
757
- model_args = eval_log.eval.model_args
758
763
  task_args = eval_log.eval.task_args
759
764
  tags = eval_log.eval.tags
760
765
  limit = eval_log.eval.config.limit
@@ -813,8 +818,6 @@ async def eval_retry_async(
813
818
  id=task_id, task=task, task_args=task_args, model=None, log=eval_log
814
819
  ),
815
820
  model=model,
816
- model_base_url=model_base_url,
817
- model_args=model_args,
818
821
  task_args=task_args,
819
822
  sandbox=eval_log.eval.sandbox,
820
823
  sandbox_cleanup=sandbox_cleanup,
@@ -139,6 +139,7 @@ class TaskLogger:
139
139
  tags=tags,
140
140
  solver_args=solver.args if solver else None,
141
141
  model=str(ModelName(model)),
142
+ model_generate_config=model.config,
142
143
  model_base_url=model.api.base_url,
143
144
  dataset=EvalDataset(
144
145
  name=dataset.name,
inspect_ai/log/_log.py CHANGED
@@ -599,6 +599,9 @@ class EvalSpec(BaseModel):
599
599
  model: str
600
600
  """Model used for eval."""
601
601
 
602
+ model_generate_config: GenerateConfig = Field(default_factory=GenerateConfig)
603
+ """Generate config specified for model instance."""
604
+
602
605
  model_base_url: str | None = Field(default=None)
603
606
  """Optional override of model base url"""
604
607
 
@@ -199,28 +199,36 @@ class SampleBufferDatabase(SampleBuffer):
199
199
  )
200
200
 
201
201
  def remove_samples(self, samples: list[tuple[str | int, int]]) -> None:
202
+ # short circuit no samples
203
+ if len(samples) == 0:
204
+ return
205
+
202
206
  with self._get_connection(write=True) as conn:
203
207
  cursor = conn.cursor()
204
208
  try:
205
- # Convert list of tuples into a string for SQL IN clause
206
- # Format: (('id1', 1), ('id2', 2))
207
- sample_conditions = ",".join(
208
- [f"('{sid}', {epoch})" for sid, epoch in samples]
209
+ # Build a query using individual column comparisons instead of row values
210
+ placeholders = " OR ".join(
211
+ ["(sample_id=? AND sample_epoch=?)" for _ in samples]
209
212
  )
210
213
 
211
- # Delete associated events first due to foreign key constraint
214
+ # Flatten parameters for binding
215
+ parameters = [item for tup in samples for item in tup]
216
+
217
+ # Delete associated events first
212
218
  events_query = f"""
213
219
  DELETE FROM events
214
- WHERE (sample_id, sample_epoch) IN ({sample_conditions})
220
+ WHERE {placeholders}
215
221
  """
216
- cursor.execute(events_query)
222
+ cursor.execute(events_query, parameters)
223
+
224
+ # Then delete the samples using the same approach
225
+ placeholders = " OR ".join(["(id=? AND epoch=?)" for _ in samples])
217
226
 
218
- # Then delete the samples
219
227
  samples_query = f"""
220
228
  DELETE FROM samples
221
- WHERE (id, epoch) IN ({sample_conditions})
229
+ WHERE {placeholders}
222
230
  """
223
- cursor.execute(samples_query)
231
+ cursor.execute(samples_query, parameters)
224
232
  finally:
225
233
  cursor.close()
226
234
 
@@ -259,7 +267,7 @@ class SampleBufferDatabase(SampleBuffer):
259
267
 
260
268
  # fetch data
261
269
  return Samples(
262
- samples=list(self._get_samples(conn)),
270
+ samples=list(self._get_samples(conn, True)),
263
271
  metrics=task_data.metrics,
264
272
  refresh=self.update_interval,
265
273
  etag=str(task_data.version),
@@ -3,7 +3,7 @@ import re
3
3
  from copy import copy
4
4
  from typing import Literal
5
5
 
6
- from openai import BadRequestError, OpenAIError
6
+ from openai import APIStatusError, OpenAIError
7
7
  from openai.types.chat import (
8
8
  ChatCompletion,
9
9
  ChatCompletionAssistantMessageParam,
@@ -518,7 +518,7 @@ def chat_choices_from_openai(
518
518
 
519
519
 
520
520
  def openai_handle_bad_request(
521
- model_name: str, e: BadRequestError
521
+ model_name: str, e: APIStatusError
522
522
  ) -> ModelOutput | Exception:
523
523
  # extract message
524
524
  if isinstance(e.body, dict) and "message" in e.body.keys():
@@ -13,6 +13,7 @@ from openai import (
13
13
  AsyncOpenAI,
14
14
  BadRequestError,
15
15
  RateLimitError,
16
+ UnprocessableEntityError,
16
17
  )
17
18
  from openai._types import NOT_GIVEN
18
19
  from openai.types.chat import ChatCompletion
@@ -295,13 +296,13 @@ class OpenAIAPI(ModelAPI):
295
296
  else None
296
297
  ),
297
298
  ), model_call()
298
- except BadRequestError as e:
299
+ except (BadRequestError, UnprocessableEntityError) as e:
299
300
  return self.handle_bad_request(e), model_call()
300
301
 
301
302
  def on_response(self, response: dict[str, Any]) -> None:
302
303
  pass
303
304
 
304
- def handle_bad_request(self, ex: BadRequestError) -> ModelOutput | Exception:
305
+ def handle_bad_request(self, ex: APIStatusError) -> ModelOutput | Exception:
305
306
  return openai_handle_bad_request(self.model_name, ex)
306
307
 
307
308
  def _chat_choices_from_response(
@@ -3,7 +3,7 @@ from json import dumps
3
3
  from typing import Any
4
4
 
5
5
  import httpx
6
- from openai import BadRequestError
6
+ from openai import APIStatusError
7
7
  from openai.types.chat import (
8
8
  ChatCompletion,
9
9
  )
@@ -105,7 +105,7 @@ class TogetherAIAPI(OpenAIAPI):
105
105
  return DEFAULT_MAX_TOKENS
106
106
 
107
107
  @override
108
- def handle_bad_request(self, ex: BadRequestError) -> ModelOutput | Exception:
108
+ def handle_bad_request(self, ex: APIStatusError) -> ModelOutput | Exception:
109
109
  response = ex.response.json()
110
110
  if "error" in response and "message" in response.get("error"):
111
111
  content = response.get("error").get("message")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inspect_ai
3
- Version: 0.3.85
3
+ Version: 0.3.86
4
4
  Summary: Framework for large language model evaluations
5
5
  Author: UK AI Security Institute
6
6
  License: MIT License
@@ -46,7 +46,7 @@ inspect_ai/_display/textual/widgets/transcript.py,sha256=zaxlDixT6Fie0acAWBM9Hlt
46
46
  inspect_ai/_display/textual/widgets/vscode.py,sha256=YTXdIZ0fcf9XE2v3rWIfUTgnXFww8uKCo7skugQLIbs,1247
47
47
  inspect_ai/_eval/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
48
  inspect_ai/_eval/context.py,sha256=gWTjEEMVTJMJpCCKLRs4joZDkG00rzE7-HXZFyzSC_I,1283
49
- inspect_ai/_eval/eval.py,sha256=3VHOUYhkTmni-AaT4dYE9kfoXME68eCqOv_T6xFySzo,40266
49
+ inspect_ai/_eval/eval.py,sha256=tfIYOJSNGNfZHl18XYqXZHIMKlCmUZ8gbqe7I0OZJII,40307
50
50
  inspect_ai/_eval/evalset.py,sha256=FnZBVi5hOt6f84PNYlFhkjb7N1lNgiQydQlernJZeW4,24005
51
51
  inspect_ai/_eval/list.py,sha256=VbZ-2EI6MqrXvCN7VTz21TQSoU5K5_Q0hqhxmj5A_m0,3744
52
52
  inspect_ai/_eval/loader.py,sha256=yCDrW5MhP6GT329hZ_gUm_eAMsCA9G7jb8sm45Pj-pw,24970
@@ -59,7 +59,7 @@ inspect_ai/_eval/task/epochs.py,sha256=Ci7T6CQniSOTChv5Im2dCdSDrP-5hq19rV6iJ2uBc
59
59
  inspect_ai/_eval/task/error.py,sha256=Vhqinfdf0eIrjn7kUY7-id8Kbdggr-fEFpAJeJrkJ1M,1244
60
60
  inspect_ai/_eval/task/generate.py,sha256=C9-S9ak4VFQO7QgtUbGjt8F4sTyXS5nekR3Mg_MPwmM,2511
61
61
  inspect_ai/_eval/task/images.py,sha256=nTzHizlyuPYumPH7gAOBSrNkTwTbAmZ7tKdzN7d_R2k,4035
62
- inspect_ai/_eval/task/log.py,sha256=w1Uu3VplvL_UUqyCVDmUMOG5s8_E3si6OkglE7xPxM0,11769
62
+ inspect_ai/_eval/task/log.py,sha256=PD2ZrqtHY0zRyx7pB8L5v-txyaBRePs76cFu5Fb-vjE,11817
63
63
  inspect_ai/_eval/task/resolved.py,sha256=OCQc_0HmW_Vw8o1KisX0DCn-eOPkTbR1v_y_jEaAlhU,966
64
64
  inspect_ai/_eval/task/results.py,sha256=x4weYRK2XGowfBG3f2msOeZQ_pxh230HTlw6kps33jw,17925
65
65
  inspect_ai/_eval/task/run.py,sha256=RS2Qv3AythSkQL4fsgBFaXfyx2WDIZuFj9v6ifoRiYs,38714
@@ -510,7 +510,7 @@ inspect_ai/log/_bundle.py,sha256=5Uy-s64_SFokZ7WRzti9mD7yoKrd2sOzdvqKyahoiC4,804
510
510
  inspect_ai/log/_condense.py,sha256=OedMphK5Q2YPuY1cnoAM7tGsyVIU6Kwrv3oIeb3dFmY,10881
511
511
  inspect_ai/log/_convert.py,sha256=qn6q10Um2XV7dnK4nQargANa0bz6RFJPmaEMINv38cs,3467
512
512
  inspect_ai/log/_file.py,sha256=QjeVUegoCWVUv6CMsj0das_UpZZZMfnbvCQAKlFYGXE,17105
513
- inspect_ai/log/_log.py,sha256=f4ChtLdNc_z0qVXsJCmZyW6BdbFKGTfHWY5gaymsUkc,24970
513
+ inspect_ai/log/_log.py,sha256=KsssY2kGfuDHGIXOGJHN4bO1LXVs0f3XtqIUfA2R68A,25109
514
514
  inspect_ai/log/_message.py,sha256=QofM_JZF_x3k_5ta1uQzoN_VnMoUhXFnqWurIn9FXOY,1999
515
515
  inspect_ai/log/_retry.py,sha256=e7a2hjl3Ncl8b8sU7CsDpvK8DV0b1uSRLeokRX1mt34,2109
516
516
  inspect_ai/log/_samples.py,sha256=wPQlV1VR9djWaj37lLrjBprCabdAm4S2vFOsQTcd12U,4910
@@ -524,7 +524,7 @@ inspect_ai/log/_recorders/recorder.py,sha256=zDDpl2tktPjb6xk5kd4TyEMxkXZiLgXXpPi
524
524
  inspect_ai/log/_recorders/types.py,sha256=Aeo-U7FhmWQSvE_uz3fwUI7cqaSR-ZE_uRVu-1fBCgc,865
525
525
  inspect_ai/log/_recorders/buffer/__init__.py,sha256=6DsRdnNl-ic-xJmnBE5i45ZP3eB4yAta9wxi5WFcbqc,367
526
526
  inspect_ai/log/_recorders/buffer/buffer.py,sha256=rtLvaX7nSqNrWb-3CeSaOHwJgF1CzRgXFT_I1dDkM1k,945
527
- inspect_ai/log/_recorders/buffer/database.py,sha256=aqBJdM6meQTWsLs9uF1gFGg1dsE1MvVQdiXR1DHoRqw,22171
527
+ inspect_ai/log/_recorders/buffer/database.py,sha256=3yV8OlDsQ4zFQHNqe7aBAHwkUISW3zmaLBlD1OFj36w,22396
528
528
  inspect_ai/log/_recorders/buffer/filestore.py,sha256=S6RP-5zkOPSmy1hV2LCCbfwdX-YFZGuIEjfJuOWMjDQ,8274
529
529
  inspect_ai/log/_recorders/buffer/types.py,sha256=pTnPCZHbk9qF6yF-eNXHTa23cLH_FvP8dmfPJCFO15Q,2046
530
530
  inspect_ai/model/__init__.py,sha256=6Aa_HEU-rgxWPDaIRlE6KBdXY406x2LtcLeVtAxk-AI,2453
@@ -537,7 +537,7 @@ inspect_ai/model/_generate_config.py,sha256=_-kzw7LOl45baVkTjlfL1K1VLKGgNOOczH2H
537
537
  inspect_ai/model/_model.py,sha256=h4ASS2VuTZ_97145rLW202u6e7-mw4ENnnlBl0Vsbio,52127
538
538
  inspect_ai/model/_model_call.py,sha256=VJ8wnl9Y81JaiClBYM8eyt1jVb3n-yc6Dd88ofRiJDc,2234
539
539
  inspect_ai/model/_model_output.py,sha256=R5EAUPLc5RWymVb3le4cbqbNCZ9voTzg0U1j_e4I-yM,7768
540
- inspect_ai/model/_openai.py,sha256=0OAmxQbIU6V7WJr9Q8J6oGwQuY9aZLPpHQ9r28GCmbg,19382
540
+ inspect_ai/model/_openai.py,sha256=-N_LhZR8-nrnCL8h9lklo_RrGNDR1SzMJ0tPafVuPXo,19380
541
541
  inspect_ai/model/_openai_computer_use.py,sha256=vbKkYLhqNuX16zuWfg5MaGp9H8URrPcLhKQ1pDsZtPo,5943
542
542
  inspect_ai/model/_openai_responses.py,sha256=bQWuVvJIkS8CqtoX9z1aRb1aky4TNbMngG2paB3wsrA,20179
543
543
  inspect_ai/model/_reasoning.py,sha256=qmR8WT6t_cb7NIsJOQHPyFZh2eLV0HmYxKo2vtvteQ4,929
@@ -556,12 +556,12 @@ inspect_ai/model/_providers/mistral.py,sha256=FbMPN_pw8LZal2iFGf5FX70ypuH3k44FUn
556
556
  inspect_ai/model/_providers/mockllm.py,sha256=gL9f-f5TOdE4a0GVENr3cOIIp2kv8zVXWPZ608rouGk,2440
557
557
  inspect_ai/model/_providers/none.py,sha256=6qLbZpHSoEZaaxFO7luieFjqig2Ju8Fu00DlRngAry8,935
558
558
  inspect_ai/model/_providers/ollama.py,sha256=mBPSxaEkiH_RnlHKqOyFBlXObQhc2dfjL-rCKrea5u8,675
559
- inspect_ai/model/_providers/openai.py,sha256=NFdMpnI2vlmpI8h_vWnt8y4X_XaydaL9gH5Dmy6k5Tw,16478
559
+ inspect_ai/model/_providers/openai.py,sha256=zJkhtiEQrmsuhfL7mpBPpOlYJ_WNraeyTkjYTelF0no,16535
560
560
  inspect_ai/model/_providers/openai_o1.py,sha256=k-Xm_Wzn1KHKL6Z1KTHg4CTTr8ybgiHvXkLiLdjP7Os,12926
561
561
  inspect_ai/model/_providers/openai_responses.py,sha256=YPXt8KQfIEiiTpvtoQECBoNQLDLbwBW_KhBfM8vEhJk,6324
562
562
  inspect_ai/model/_providers/openrouter.py,sha256=pDimDmm_4FzS4GZx0n9z8z717mQf3IQlgEy30huzpc4,4730
563
563
  inspect_ai/model/_providers/providers.py,sha256=Sd2D9OcWkukuBcl_-KDfdpxMaAShv1JZhL5KfAM87CE,5817
564
- inspect_ai/model/_providers/together.py,sha256=MoA3tyMKUnE0EekTqEIBBwvsaOp5c697kydLi1ZMYzE,9745
564
+ inspect_ai/model/_providers/together.py,sha256=Wh3G0vhKHq5ofx1otwXjJFhM98Ll70IbqBhUNNV2-rk,9743
565
565
  inspect_ai/model/_providers/vertex.py,sha256=60W7kgoA83GtKdMeJgNU2IAw0N0wTscg4YCcMPu2bwo,17185
566
566
  inspect_ai/model/_providers/vllm.py,sha256=UYjCCXzw2hGJHVC3oPl-u2EI4iAm8ZncoIfYp1QJkbQ,14238
567
567
  inspect_ai/model/_providers/util/__init__.py,sha256=d4T_qvXihTRd1zmQkNE3xUBlHCX8tOIbRK19EwU0fTs,717
@@ -692,9 +692,9 @@ inspect_ai/util/_sandbox/docker/internal.py,sha256=c8X8TLrBPOvsfnq5TkMlb_bzTALyc
692
692
  inspect_ai/util/_sandbox/docker/prereqs.py,sha256=0j6_OauBBnVlpBleADcZavIAAQZy4WewVjbRn9c0stg,3355
693
693
  inspect_ai/util/_sandbox/docker/service.py,sha256=hhHIWH1VDFLwehdGd19aUBD_VKfDO3GCPxpw1HSwVQk,2437
694
694
  inspect_ai/util/_sandbox/docker/util.py,sha256=EeInihCNXgUWxaqZ4dNOJd719kXL2_jr63QCoXn68vA,3154
695
- inspect_ai-0.3.85.dist-info/licenses/LICENSE,sha256=xZPCr8gTiFIerrA_DRpLAbw-UUftnLFsHxKeW-NTtq8,1081
696
- inspect_ai-0.3.85.dist-info/METADATA,sha256=yNjYFfYdXBQnzqg-_uT3awl6p1mr9eOKlJrIOIPtCZs,4965
697
- inspect_ai-0.3.85.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
698
- inspect_ai-0.3.85.dist-info/entry_points.txt,sha256=WGGLmzTzDWLzYfiyovSY6oEKuf-gqzSDNOb5V-hk3fM,54
699
- inspect_ai-0.3.85.dist-info/top_level.txt,sha256=Tp3za30CHXJEKLk8xLe9qGsW4pBzJpEIOMHOHNCXiVo,11
700
- inspect_ai-0.3.85.dist-info/RECORD,,
695
+ inspect_ai-0.3.86.dist-info/licenses/LICENSE,sha256=xZPCr8gTiFIerrA_DRpLAbw-UUftnLFsHxKeW-NTtq8,1081
696
+ inspect_ai-0.3.86.dist-info/METADATA,sha256=tId3lj5ywe2A79iWACtXXP_aBeWfnbQQfAujxJGxaoc,4965
697
+ inspect_ai-0.3.86.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
698
+ inspect_ai-0.3.86.dist-info/entry_points.txt,sha256=WGGLmzTzDWLzYfiyovSY6oEKuf-gqzSDNOb5V-hk3fM,54
699
+ inspect_ai-0.3.86.dist-info/top_level.txt,sha256=Tp3za30CHXJEKLk8xLe9qGsW4pBzJpEIOMHOHNCXiVo,11
700
+ inspect_ai-0.3.86.dist-info/RECORD,,