deepeval 3.4.8__py3-none-any.whl → 3.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. deepeval/__init__.py +8 -5
  2. deepeval/_version.py +1 -1
  3. deepeval/benchmarks/drop/drop.py +2 -3
  4. deepeval/benchmarks/hellaswag/hellaswag.py +2 -2
  5. deepeval/benchmarks/logi_qa/logi_qa.py +2 -2
  6. deepeval/benchmarks/math_qa/math_qa.py +2 -2
  7. deepeval/benchmarks/mmlu/mmlu.py +2 -2
  8. deepeval/benchmarks/truthful_qa/truthful_qa.py +2 -2
  9. deepeval/cli/main.py +561 -727
  10. deepeval/confident/api.py +30 -14
  11. deepeval/config/__init__.py +0 -0
  12. deepeval/config/settings.py +565 -0
  13. deepeval/config/settings_manager.py +133 -0
  14. deepeval/config/utils.py +86 -0
  15. deepeval/dataset/__init__.py +1 -0
  16. deepeval/dataset/dataset.py +70 -10
  17. deepeval/dataset/test_run_tracer.py +82 -0
  18. deepeval/dataset/utils.py +23 -0
  19. deepeval/integrations/pydantic_ai/__init__.py +2 -4
  20. deepeval/integrations/pydantic_ai/{setup.py → otel.py} +0 -8
  21. deepeval/integrations/pydantic_ai/patcher.py +376 -0
  22. deepeval/key_handler.py +1 -0
  23. deepeval/metrics/answer_relevancy/template.py +7 -2
  24. deepeval/metrics/faithfulness/template.py +11 -8
  25. deepeval/metrics/multimodal_metrics/multimodal_answer_relevancy/template.py +6 -4
  26. deepeval/metrics/multimodal_metrics/multimodal_faithfulness/template.py +6 -4
  27. deepeval/metrics/tool_correctness/tool_correctness.py +7 -3
  28. deepeval/models/llms/amazon_bedrock_model.py +24 -3
  29. deepeval/models/llms/grok_model.py +1 -1
  30. deepeval/models/llms/kimi_model.py +1 -1
  31. deepeval/models/llms/openai_model.py +37 -41
  32. deepeval/models/retry_policy.py +280 -0
  33. deepeval/openai_agents/agent.py +4 -2
  34. deepeval/test_run/api.py +1 -0
  35. deepeval/tracing/otel/exporter.py +20 -8
  36. deepeval/tracing/otel/utils.py +57 -0
  37. deepeval/tracing/perf_epoch_bridge.py +4 -4
  38. deepeval/tracing/tracing.py +37 -16
  39. deepeval/tracing/utils.py +98 -1
  40. deepeval/utils.py +111 -70
  41. {deepeval-3.4.8.dist-info → deepeval-3.5.0.dist-info}/METADATA +16 -13
  42. {deepeval-3.4.8.dist-info → deepeval-3.5.0.dist-info}/RECORD +45 -40
  43. deepeval/env.py +0 -35
  44. deepeval/integrations/pydantic_ai/agent.py +0 -364
  45. {deepeval-3.4.8.dist-info → deepeval-3.5.0.dist-info}/LICENSE.md +0 -0
  46. {deepeval-3.4.8.dist-info → deepeval-3.5.0.dist-info}/WHEEL +0 -0
  47. {deepeval-3.4.8.dist-info → deepeval-3.5.0.dist-info}/entry_points.txt +0 -0
deepeval/cli/main.py CHANGED
@@ -18,6 +18,7 @@ General behavior for all `set-*` / `unset-*` commands:
18
18
  import os
19
19
  from typing import Optional
20
20
  from rich import print
21
+ from rich.markup import escape
21
22
  import webbrowser
22
23
  import threading
23
24
  import random
@@ -25,6 +26,7 @@ import string
25
26
  import socket
26
27
  import typer
27
28
  from enum import Enum
29
+ from pydantic import SecretStr
28
30
  from deepeval.key_handler import (
29
31
  KEY_FILE_HANDLER,
30
32
  KeyValues,
@@ -34,6 +36,7 @@ from deepeval.key_handler import (
34
36
  from deepeval.telemetry import capture_login_event, capture_view_event
35
37
  from deepeval.cli.test import app as test_app
36
38
  from deepeval.cli.server import start_server
39
+ from deepeval.config.settings import get_settings
37
40
  from deepeval.utils import delete_file_if_exists, open_browser
38
41
  from deepeval.test_run.test_run import (
39
42
  LATEST_TEST_RUN_FILE_PATH,
@@ -43,8 +46,6 @@ from deepeval.cli.utils import (
43
46
  render_login_message,
44
47
  upload_and_open_link,
45
48
  PROD,
46
- clear_evaluation_model_keys,
47
- clear_embedding_model_keys,
48
49
  resolve_save_target,
49
50
  save_environ_to_store,
50
51
  unset_environ_in_store,
@@ -78,10 +79,18 @@ def find_available_port():
78
79
 
79
80
 
80
81
  def is_openai_configured() -> bool:
81
- api_key = os.getenv("OPENAI_API_KEY") or KEY_FILE_HANDLER.fetch_data(
82
- ModelKeyValues.OPENAI_API_KEY
83
- )
84
- return bool(api_key)
82
+ s = get_settings()
83
+ v = s.OPENAI_API_KEY
84
+ if isinstance(v, SecretStr):
85
+ try:
86
+ if v.get_secret_value().strip():
87
+ return True
88
+ except Exception:
89
+ pass
90
+ elif v and str(v).strip():
91
+ return True
92
+ env = os.getenv("OPENAI_API_KEY")
93
+ return bool(env and env.strip())
85
94
 
86
95
 
87
96
  @app.command(name="set-confident-region")
@@ -99,25 +108,26 @@ def set_confident_region_command(
99
108
  """Set the Confident AI data region."""
100
109
  # Add flag emojis based on region
101
110
  flag = "🇺🇸" if region == Regions.US else "🇪🇺"
102
- KEY_FILE_HANDLER.write_key(KeyValues.CONFIDENT_REGION, region.value)
103
- save_target = resolve_save_target(save)
104
- if save_target:
105
- handled, path = save_environ_to_store(
106
- save_target,
107
- {
108
- KeyValues.CONFIDENT_REGION: region.value,
109
- },
111
+
112
+ setting = get_settings()
113
+ with settings.edit(save=save) as edit_ctx:
114
+ settings.CONFIDENT_REGION = region.value
115
+
116
+ handled, path, _ = edit_ctx.result
117
+
118
+ if not handled and save is not None:
119
+ # invalid --save format (unsupported)
120
+ print("Unsupported --save option. Use --save=dotenv[:path].")
121
+ elif path:
122
+ # persisted to a file
123
+ print(
124
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
110
125
  )
111
- if handled:
112
- print(
113
- f"Saved environment variables to {path} (ensure it's git-ignored)."
114
- )
115
- else:
116
- print("Unsupported --save option. Use --save=dotenv[:path].")
117
126
  else:
127
+ # updated in-memory & process env only
118
128
  print(
119
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
120
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
129
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
130
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
121
131
  )
122
132
 
123
133
  print(
@@ -181,17 +191,25 @@ def login(
181
191
  )
182
192
  key = api_key.strip()
183
193
 
184
- save_target = resolve_save_target(save) or "dotenv:.env.local"
185
- handled, path = save_environ_to_store(
186
- save_target,
187
- {KeyValues.API_KEY: key, CONFIDENT_API_KEY_ENV_VAR: key},
188
- )
189
- if handled:
190
- print(
191
- f"Saved environment variables to {path} (ensure it's git-ignored)."
192
- )
193
- else:
194
- print("Unsupported --save option. Use --save=dotenv[:path].")
194
+ settings = get_settings()
195
+ save = save or settings.DEEPEVAL_DEFAULT_SAVE or "dotenv:.env.local"
196
+ with settings.edit(save=save) as edit_ctx:
197
+ settings.API_KEY = key
198
+ settings.CONFIDENT_API_KEY = key
199
+
200
+ handled, path, updated = edit_ctx.result
201
+
202
+ if updated:
203
+ if not handled and save is not None:
204
+ # invalid --save format (unsupported)
205
+ print(
206
+ "Unsupported --save option. Use --save=dotenv[:path]."
207
+ )
208
+ elif path:
209
+ # persisted to a file
210
+ print(
211
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
212
+ )
195
213
 
196
214
  completed = True
197
215
  print(
@@ -216,7 +234,7 @@ def logout(
216
234
  save: Optional[str] = typer.Option(
217
235
  None,
218
236
  "--save",
219
- help="Where to remove the saved key from. Use format dotenv[:path]. If omitted, logout removes from .env.local. JSON keystore is always cleared.",
237
+ help="Where to remove the saved key from. Use format dotenv[:path]. If omitted, uses DEEPEVAL_DEFAULT_SAVE or .env.local. The JSON keystore is always cleared.",
220
238
  )
221
239
  ):
222
240
  """
@@ -224,30 +242,24 @@ def logout(
224
242
 
225
243
  Behavior:
226
244
  - Always clears the Confident API key from the JSON keystore and process env.
227
- - Also removes credentials from a dotenv file; defaults to .env.local.
245
+ - Also removes credentials from a dotenv file; defaults to DEEPEVAL_DEFAULT_SAVE if set, otherwise.env.local.
228
246
  Override the target with --save=dotenv[:path].
229
247
  """
230
- set_confident_api_key(None)
231
-
232
- # Remove from dotenv file (both names)
233
- save_target = resolve_save_target(save) or "dotenv:.env.local"
234
- if save_target:
235
- handled, path = unset_environ_in_store(
236
- save_target,
237
- [
238
- KeyValues.API_KEY,
239
- CONFIDENT_API_KEY_ENV_VAR,
240
- ],
241
- )
242
- if handled:
243
- print(f"Removed Confident AI key(s) from {path}.")
244
- else:
248
+ settings = get_settings()
249
+ save = save or settings.DEEPEVAL_DEFAULT_SAVE or "dotenv:.env.local"
250
+ with settings.edit(save=save) as edit_ctx:
251
+ settings.API_KEY = None
252
+ settings.CONFIDENT_API_KEY = None
253
+
254
+ handled, path, updated = edit_ctx.result
255
+
256
+ if updated:
257
+ if not handled and save is not None:
258
+ # invalid --save format (unsupported)
245
259
  print("Unsupported --save option. Use --save=dotenv[:path].")
246
- else:
247
- print(
248
- "Tip: remove keys from a dotenv file with --save=dotenv[:path] (default .env.local) "
249
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
250
- )
260
+ elif path:
261
+ # persisted to a file
262
+ print(f"Removed Confident AI key(s) from {path}.")
251
263
 
252
264
  delete_file_if_exists(LATEST_TEST_RUN_FILE_PATH)
253
265
 
@@ -271,8 +283,22 @@ def view():
271
283
 
272
284
 
273
285
  @app.command(name="enable-grpc-logging")
274
- def enable_grpc_logging():
275
- os.environ["DEEPEVAL_GRPC_LOGGING"] = "1"
286
+ def enable_grpc_logging(save: Optional[str] = None):
287
+ """
288
+ Enable verbose gRPC logging for the current process.
289
+ Pass --save=dotenv[:path] to persist it (optional).
290
+ """
291
+ settings = get_settings()
292
+ with settings.edit(save=save) as edit_ctx:
293
+ settings.DEEPEVAL_GRPC_LOGGING = True
294
+
295
+ handled, path, _ = edit_ctx.result
296
+
297
+ if not handled and save is not None:
298
+ # invalid --save format (unsupported)
299
+ print("Unsupported --save option. Use --save=dotenv[:path].")
300
+ else:
301
+ print("gRPC logging enabled.")
276
302
 
277
303
 
278
304
  #############################################
@@ -312,7 +338,7 @@ def set_openai_env(
312
338
  What this does:
313
339
  - Sets the active provider flag to `USE_OPENAI_MODEL`.
314
340
  - Persists the selected model name and any cost overrides in the JSON store.
315
- - secrets are ever written to `.deepeval/.deepeval` (JSON).
341
+ - secrets are never written to `.deepeval/.deepeval` (JSON).
316
342
 
317
343
  Pricing rules:
318
344
  - If `model` is a known OpenAI model, you may omit costs (built‑in pricing is used).
@@ -322,8 +348,9 @@ def set_openai_env(
322
348
  Secrets & saving:
323
349
  - Set your `OPENAI_API_KEY` via environment or a dotenv file.
324
350
  - Pass `--save=dotenv[:path]` to write configuration to a dotenv file
325
- (default: `.env.local`). Supported secrets, such as `OPENAI_API_KEY`, are
326
- persisted there if present in your environment.
351
+ (default: `.env.local`). This command does not set or persist OPENAI_API_KEY. Set it
352
+ via your environment or a dotenv file (e.g., add OPENAI_API_KEY=... to .env.local)
353
+ before running this command, or manage it with whatever command you use for secrets.
327
354
 
328
355
  Args:
329
356
  model: OpenAI model name, such as `gpt-4o-mini`.
@@ -338,61 +365,34 @@ def set_openai_env(
338
365
  --cost_per_output_token 0.0015 \\
339
366
  --save dotenv:.env.local
340
367
  """
341
-
342
- clear_evaluation_model_keys()
343
- KEY_FILE_HANDLER.write_key(ModelKeyValues.OPENAI_MODEL_NAME, model)
344
- if cost_per_input_token is not None:
345
- KEY_FILE_HANDLER.write_key(
346
- ModelKeyValues.OPENAI_COST_PER_INPUT_TOKEN,
347
- str(cost_per_input_token),
348
- )
349
- if cost_per_output_token is not None:
350
- KEY_FILE_HANDLER.write_key(
351
- ModelKeyValues.OPENAI_COST_PER_OUTPUT_TOKEN,
352
- str(cost_per_output_token),
353
- )
354
-
355
- save_target = resolve_save_target(save)
356
- switch_model_provider(ModelKeyValues.USE_OPENAI_MODEL, save_target)
357
- if save_target:
358
- handled, path = save_environ_to_store(
359
- save_target,
360
- {
361
- ModelKeyValues.OPENAI_MODEL_NAME: model,
362
- **(
363
- {
364
- ModelKeyValues.OPENAI_COST_PER_INPUT_TOKEN: str(
365
- cost_per_input_token
366
- )
367
- }
368
- if cost_per_input_token is not None
369
- else {}
370
- ),
371
- **(
372
- {
373
- ModelKeyValues.OPENAI_COST_PER_OUTPUT_TOKEN: str(
374
- cost_per_output_token
375
- )
376
- }
377
- if cost_per_output_token is not None
378
- else {}
379
- ),
380
- },
368
+ settings = get_settings()
369
+ with settings.edit(save=save) as edit_ctx:
370
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_OPENAI_MODEL)
371
+ settings.OPENAI_MODEL_NAME = model
372
+ if cost_per_input_token is not None:
373
+ settings.OPENAI_COST_PER_INPUT_TOKEN = cost_per_input_token
374
+ if cost_per_output_token is not None:
375
+ settings.OPENAI_COST_PER_OUTPUT_TOKEN = cost_per_output_token
376
+
377
+ handled, path, _ = edit_ctx.result
378
+
379
+ if not handled and save is not None:
380
+ # invalid --save format (unsupported)
381
+ print("Unsupported --save option. Use --save=dotenv[:path].")
382
+ elif path:
383
+ # persisted to a file
384
+ print(
385
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
381
386
  )
382
- if handled:
383
- print(
384
- f"Saved environment variables to {path} (ensure it's git-ignored)."
385
- )
386
- else:
387
- print("Unsupported --save option. Use --save=dotenv[:path].")
388
-
389
387
  else:
388
+ # updated in-memory & process env only
390
389
  print(
391
390
  "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
392
391
  "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
393
392
  )
393
+
394
394
  print(
395
- f":raising_hands: Congratulations! You're now using OpenAI's `{model}` for all evals that require an LLM."
395
+ f":raising_hands: Congratulations! You're now using OpenAI's `{escape(model)}` for all evals that require an LLM."
396
396
  )
397
397
 
398
398
 
@@ -421,26 +421,21 @@ def unset_openai_env(
421
421
  deepeval unset-openai --save dotenv:.env.local
422
422
  """
423
423
 
424
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_OPENAI_MODEL)
425
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.OPENAI_MODEL_NAME)
426
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.OPENAI_COST_PER_INPUT_TOKEN)
427
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.OPENAI_COST_PER_OUTPUT_TOKEN)
428
-
429
- save_target = resolve_save_target(save)
430
- if save_target:
431
- handled, path = unset_environ_in_store(
432
- save_target,
433
- [
434
- ModelKeyValues.USE_OPENAI_MODEL,
435
- ModelKeyValues.OPENAI_MODEL_NAME,
436
- ModelKeyValues.OPENAI_COST_PER_INPUT_TOKEN,
437
- ModelKeyValues.OPENAI_COST_PER_OUTPUT_TOKEN,
438
- ],
439
- )
440
- if handled:
441
- print(f"Removed OpenAI environment variables from {path}.")
442
- else:
443
- print("Unsupported --save option. Use --save=dotenv[:path].")
424
+ settings = get_settings()
425
+ with settings.edit(save=save) as edit_ctx:
426
+ settings.OPENAI_MODEL_NAME = None
427
+ settings.OPENAI_COST_PER_INPUT_TOKEN = None
428
+ settings.OPENAI_COST_PER_OUTPUT_TOKEN = None
429
+ settings.USE_OPENAI_MODEL = None
430
+
431
+ handled, path, _ = edit_ctx.result
432
+
433
+ if not handled and save is not None:
434
+ # invalid --save format (unsupported)
435
+ print("Unsupported --save option. Use --save=dotenv[:path].")
436
+ elif path:
437
+ # persisted to a file
438
+ print(f"Removed OpenAI environment variables from {path}.")
444
439
 
445
440
  if is_openai_configured():
446
441
  print(
@@ -462,7 +457,7 @@ def set_azure_openai_env(
462
457
  azure_openai_api_key: str = typer.Option(
463
458
  ...,
464
459
  "--openai-api-key",
465
- help="Azure OpenAI API key (NOT persisted; set in .env[.local])",
460
+ help="Azure OpenAI API key",
466
461
  ),
467
462
  azure_openai_endpoint: str = typer.Option(
468
463
  ..., "--openai-endpoint", help="Azure OpenAI endpoint"
@@ -486,61 +481,36 @@ def set_azure_openai_env(
486
481
  "Usage: --save=dotenv[:path] (default: .env.local)",
487
482
  ),
488
483
  ):
489
-
490
- clear_evaluation_model_keys()
491
-
492
- KEY_FILE_HANDLER.write_key(
493
- ModelKeyValues.AZURE_MODEL_NAME, openai_model_name
494
- )
495
- KEY_FILE_HANDLER.write_key(
496
- ModelKeyValues.AZURE_OPENAI_ENDPOINT, azure_openai_endpoint
497
- )
498
- KEY_FILE_HANDLER.write_key(
499
- ModelKeyValues.OPENAI_API_VERSION, openai_api_version
500
- )
501
- KEY_FILE_HANDLER.write_key(
502
- ModelKeyValues.AZURE_DEPLOYMENT_NAME, azure_deployment_name
503
- )
504
-
505
- if azure_model_version is not None:
506
- KEY_FILE_HANDLER.write_key(
507
- ModelKeyValues.AZURE_MODEL_VERSION, azure_model_version
508
- )
509
-
510
- save_target = resolve_save_target(save)
511
- switch_model_provider(ModelKeyValues.USE_AZURE_OPENAI, save_target)
512
-
513
- if save_target:
514
- handled, path = save_environ_to_store(
515
- save_target,
516
- {
517
- ModelKeyValues.AZURE_OPENAI_API_KEY: azure_openai_api_key,
518
- ModelKeyValues.AZURE_OPENAI_ENDPOINT: azure_openai_endpoint,
519
- ModelKeyValues.OPENAI_API_VERSION: openai_api_version,
520
- ModelKeyValues.AZURE_DEPLOYMENT_NAME: azure_deployment_name,
521
- ModelKeyValues.AZURE_MODEL_NAME: openai_model_name,
522
- **(
523
- {ModelKeyValues.AZURE_MODEL_VERSION: azure_model_version}
524
- if azure_model_version
525
- else {}
526
- ),
527
- },
484
+ settings = get_settings()
485
+ with settings.edit(save=save) as edit_ctx:
486
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_AZURE_OPENAI)
487
+ settings.AZURE_OPENAI_API_KEY = azure_openai_api_key
488
+ settings.AZURE_OPENAI_ENDPOINT = azure_openai_endpoint
489
+ settings.OPENAI_API_VERSION = openai_api_version
490
+ settings.AZURE_DEPLOYMENT_NAME = azure_deployment_name
491
+ settings.AZURE_MODEL_NAME = openai_model_name
492
+ if azure_model_version is not None:
493
+ settings.AZURE_MODEL_VERSION = azure_model_version
494
+
495
+ handled, path, _ = edit_ctx.result
496
+
497
+ if not handled and save is not None:
498
+ # invalid --save format (unsupported)
499
+ print("Unsupported --save option. Use --save=dotenv[:path].")
500
+ elif path:
501
+ # persisted to a file
502
+ print(
503
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
528
504
  )
529
- if handled:
530
- print(
531
- f"Saved environment variables to {path} (ensure it's git-ignored)."
532
- )
533
- else:
534
- print("Unsupported --save option. Use --save=dotenv[:path].")
535
-
536
505
  else:
506
+ # updated in-memory & process env only
537
507
  print(
538
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
539
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
508
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
509
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
540
510
  )
541
511
 
542
512
  print(
543
- ":raising_hands: Congratulations! You're now using Azure OpenAI for all evals that require an LLM."
513
+ f":raising_hands: Congratulations! You're now using Azure OpenAI's `{escape(openai_model_name)}` for all evals that require an LLM."
544
514
  )
545
515
 
546
516
 
@@ -558,34 +528,30 @@ def set_azure_openai_embedding_env(
558
528
  "Usage: --save=dotenv[:path] (default: .env.local)",
559
529
  ),
560
530
  ):
561
- clear_embedding_model_keys()
562
- KEY_FILE_HANDLER.write_key(
563
- EmbeddingKeyValues.AZURE_EMBEDDING_DEPLOYMENT_NAME,
564
- azure_embedding_deployment_name,
565
- )
566
-
567
- save_target = resolve_save_target(save)
568
- switch_model_provider(
569
- EmbeddingKeyValues.USE_AZURE_OPENAI_EMBEDDING, save_target
570
- )
571
- if save_target:
572
- handled, path = save_environ_to_store(
573
- save_target,
574
- {
575
- EmbeddingKeyValues.AZURE_EMBEDDING_DEPLOYMENT_NAME: azure_embedding_deployment_name,
576
- },
531
+ settings = get_settings()
532
+ with settings.edit(save=save) as edit_ctx:
533
+ edit_ctx.switch_model_provider(
534
+ EmbeddingKeyValues.USE_AZURE_OPENAI_EMBEDDING
535
+ )
536
+ settings.AZURE_EMBEDDING_DEPLOYMENT_NAME = (
537
+ azure_embedding_deployment_name
577
538
  )
578
- if handled:
579
- print(
580
- f"Saved environment variables to {path} (ensure it's git-ignored)."
581
- )
582
- else:
583
- print("Unsupported --save option. Use --save=dotenv[:path].")
584
539
 
540
+ handled, path, _ = edit_ctx.result
541
+
542
+ if not handled and save is not None:
543
+ # invalid --save format (unsupported)
544
+ print("Unsupported --save option. Use --save=dotenv[:path].")
545
+ elif path:
546
+ # persisted to a file
547
+ print(
548
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
549
+ )
585
550
  else:
551
+ # updated in-memory & process env only
586
552
  print(
587
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
588
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
553
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
554
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
589
555
  )
590
556
 
591
557
  print(
@@ -602,32 +568,24 @@ def unset_azure_openai_env(
602
568
  "Usage: --save=dotenv[:path] (default: .env.local)",
603
569
  )
604
570
  ):
605
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.AZURE_OPENAI_API_KEY)
606
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.AZURE_OPENAI_ENDPOINT)
607
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.OPENAI_API_VERSION)
608
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.AZURE_DEPLOYMENT_NAME)
609
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.AZURE_MODEL_NAME)
610
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.AZURE_MODEL_VERSION)
611
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_AZURE_OPENAI)
612
-
613
- save_target = resolve_save_target(save)
614
- if save_target:
615
- handled, path = unset_environ_in_store(
616
- save_target,
617
- [
618
- ModelKeyValues.AZURE_OPENAI_API_KEY,
619
- ModelKeyValues.AZURE_OPENAI_ENDPOINT,
620
- ModelKeyValues.OPENAI_API_VERSION,
621
- ModelKeyValues.AZURE_DEPLOYMENT_NAME,
622
- ModelKeyValues.AZURE_MODEL_NAME,
623
- ModelKeyValues.AZURE_MODEL_VERSION,
624
- ModelKeyValues.USE_AZURE_OPENAI,
625
- ],
626
- )
627
- if handled:
628
- print(f"Removed Azure OpenAI environment variables from {path}.")
629
- else:
630
- print("Unsupported --save option. Use --save=dotenv[:path].")
571
+ settings = get_settings()
572
+ with settings.edit(save=save) as edit_ctx:
573
+ settings.AZURE_OPENAI_API_KEY = None
574
+ settings.AZURE_OPENAI_ENDPOINT = None
575
+ settings.OPENAI_API_VERSION = None
576
+ settings.AZURE_DEPLOYMENT_NAME = None
577
+ settings.AZURE_MODEL_NAME = None
578
+ settings.AZURE_MODEL_VERSION = None
579
+ settings.USE_AZURE_OPENAI = None
580
+
581
+ handled, path, _ = edit_ctx.result
582
+
583
+ if not handled and save is not None:
584
+ # invalid --save format (unsupported)
585
+ print("Unsupported --save option. Use --save=dotenv[:path].")
586
+ elif path:
587
+ # persisted to a file
588
+ print(f"Removed Azure OpenAI environment variables from {path}.")
631
589
 
632
590
  if is_openai_configured():
633
591
  print(
@@ -635,7 +593,7 @@ def unset_azure_openai_env(
635
593
  )
636
594
  else:
637
595
  print(
638
- "Azure OpenAI configuration removed. No model is currently configured, but you can set one with the CLI or add credentials to .env[.local]."
596
+ "Azure OpenAI has been unset. No active provider is configured. Set one with the CLI, or add credentials to .env[.local]."
639
597
  )
640
598
 
641
599
 
@@ -648,26 +606,21 @@ def unset_azure_openai_embedding_env(
648
606
  "Usage: --save=dotenv[:path] (default: .env.local)",
649
607
  ),
650
608
  ):
651
- KEY_FILE_HANDLER.remove_key(
652
- EmbeddingKeyValues.AZURE_EMBEDDING_DEPLOYMENT_NAME
653
- )
654
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.USE_AZURE_OPENAI_EMBEDDING)
655
-
656
- save_target = resolve_save_target(save)
657
- if save_target:
658
- handled, path = unset_environ_in_store(
659
- save_target,
660
- [
661
- EmbeddingKeyValues.AZURE_EMBEDDING_DEPLOYMENT_NAME,
662
- EmbeddingKeyValues.USE_AZURE_OPENAI_EMBEDDING,
663
- ],
609
+ settings = get_settings()
610
+ with settings.edit(save=save) as edit_ctx:
611
+ settings.AZURE_EMBEDDING_DEPLOYMENT_NAME = None
612
+ settings.USE_AZURE_OPENAI_EMBEDDING = None
613
+
614
+ handled, path, _ = edit_ctx.result
615
+
616
+ if not handled and save is not None:
617
+ # invalid --save format (unsupported)
618
+ print("Unsupported --save option. Use --save=dotenv[:path].")
619
+ elif path:
620
+ # persisted to a file
621
+ print(
622
+ f"Removed Azure OpenAI embedding environment variables from {path}."
664
623
  )
665
- if handled:
666
- print(
667
- f"Removed Azure OpenAI embedding environment variables from {path}."
668
- )
669
- else:
670
- print("Unsupported --save option. Use --save=dotenv[:path].")
671
624
 
672
625
  if is_openai_configured():
673
626
  print(
@@ -700,36 +653,32 @@ def set_ollama_model_env(
700
653
  "Usage: --save=dotenv[:path] (default: .env.local)",
701
654
  ),
702
655
  ):
703
- clear_evaluation_model_keys()
704
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LOCAL_MODEL_NAME, model_name)
705
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LOCAL_MODEL_BASE_URL, base_url)
706
-
707
- save_target = resolve_save_target(save)
708
- switch_model_provider(ModelKeyValues.USE_LOCAL_MODEL, save_target)
709
- if save_target:
710
- handled, path = save_environ_to_store(
711
- save_target,
712
- {
713
- ModelKeyValues.LOCAL_MODEL_NAME: model_name,
714
- ModelKeyValues.LOCAL_MODEL_BASE_URL: base_url,
715
- ModelKeyValues.LOCAL_MODEL_API_KEY: "ollama",
716
- },
656
+ settings = get_settings()
657
+ with settings.edit(save=save) as edit_ctx:
658
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_LOCAL_MODEL)
659
+ settings.LOCAL_MODEL_API_KEY = "ollama"
660
+ settings.LOCAL_MODEL_NAME = model_name
661
+ settings.LOCAL_MODEL_BASE_URL = base_url
662
+
663
+ handled, path, _ = edit_ctx.result
664
+
665
+ if not handled and save is not None:
666
+ # invalid --save format (unsupported)
667
+ print("Unsupported --save option. Use --save=dotenv[:path].")
668
+ elif path:
669
+ # persisted to a file
670
+ print(
671
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
717
672
  )
718
- if handled:
719
- print(
720
- f"Saved environment variables to {path} (ensure it's git-ignored)."
721
- )
722
- else:
723
- print("Unsupported --save option. Use --save=dotenv[:path].")
724
-
725
673
  else:
674
+ # updated in-memory & process env only
726
675
  print(
727
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
728
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
676
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
677
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
729
678
  )
730
679
 
731
680
  print(
732
- ":raising_hands: Congratulations! You're now using a local Ollama model for all evals that require an LLM."
681
+ f":raising_hands: Congratulations! You're now using a local Ollama model `{escape(model_name)}` for all evals that require an LLM."
733
682
  )
734
683
 
735
684
 
@@ -742,26 +691,21 @@ def unset_ollama_model_env(
742
691
  "Usage: --save=dotenv[:path] (default: .env.local)",
743
692
  ),
744
693
  ):
745
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_NAME)
746
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_BASE_URL)
747
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_LOCAL_MODEL)
748
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_API_KEY)
749
-
750
- save_target = resolve_save_target(save)
751
- if save_target:
752
- handled, path = unset_environ_in_store(
753
- save_target,
754
- [
755
- ModelKeyValues.LOCAL_MODEL_NAME,
756
- ModelKeyValues.LOCAL_MODEL_BASE_URL,
757
- ModelKeyValues.USE_LOCAL_MODEL,
758
- ModelKeyValues.LOCAL_MODEL_API_KEY,
759
- ],
760
- )
761
- if handled:
762
- print(f"Removed Ollama environment variables from {path}.")
763
- else:
764
- print("Unsupported --save option. Use --save=dotenv[:path].")
694
+ settings = get_settings()
695
+ with settings.edit(save=save) as edit_ctx:
696
+ settings.LOCAL_MODEL_API_KEY = None
697
+ settings.LOCAL_MODEL_NAME = None
698
+ settings.LOCAL_MODEL_BASE_URL = None
699
+ settings.USE_LOCAL_MODEL = None
700
+
701
+ handled, path, _ = edit_ctx.result
702
+
703
+ if not handled and save is not None:
704
+ # invalid --save format (unsupported)
705
+ print("Unsupported --save option. Use --save=dotenv[:path].")
706
+ elif path:
707
+ # persisted to a file
708
+ print(f"Removed local Ollama environment variables from {path}.")
765
709
 
766
710
  if is_openai_configured():
767
711
  print(
@@ -769,7 +713,7 @@ def unset_ollama_model_env(
769
713
  )
770
714
  else:
771
715
  print(
772
- "local Ollama model configuration removed. No model is currently configured, but you can set one with the CLI or add credentials to .env[.local]."
716
+ "The local Ollama model configuration has been removed. No model is currently configured, but you can set one with the CLI or add credentials to .env[.local]."
773
717
  )
774
718
 
775
719
 
@@ -791,40 +735,32 @@ def set_ollama_embeddings_env(
791
735
  "Usage: --save=dotenv[:path] (default: .env.local)",
792
736
  ),
793
737
  ):
794
- clear_embedding_model_keys()
795
- KEY_FILE_HANDLER.write_key(
796
- EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME, model_name
797
- )
798
- KEY_FILE_HANDLER.write_key(
799
- EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL, base_url
800
- )
801
-
802
- save_target = resolve_save_target(save)
803
- switch_model_provider(EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS, save_target)
804
- if save_target:
805
- handled, path = save_environ_to_store(
806
- save_target,
807
- {
808
- EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME: model_name,
809
- EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL: base_url,
810
- EmbeddingKeyValues.LOCAL_EMBEDDING_API_KEY: "ollama",
811
- },
738
+ settings = get_settings()
739
+ with settings.edit(save=save) as edit_ctx:
740
+ edit_ctx.switch_model_provider(EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS)
741
+ settings.LOCAL_EMBEDDING_API_KEY = "ollama"
742
+ settings.LOCAL_EMBEDDING_MODEL_NAME = model_name
743
+ settings.LOCAL_EMBEDDING_BASE_URL = base_url
744
+
745
+ handled, path, _ = edit_ctx.result
746
+
747
+ if not handled and save is not None:
748
+ # invalid --save format (unsupported)
749
+ print("Unsupported --save option. Use --save=dotenv[:path].")
750
+ elif path:
751
+ # persisted to a file
752
+ print(
753
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
812
754
  )
813
- if handled:
814
- print(
815
- f"Saved environment variables to {path} (ensure it's git-ignored)."
816
- )
817
- else:
818
- print("Unsupported --save option. Use --save=dotenv[:path].")
819
-
820
755
  else:
756
+ # updated in-memory & process env only
821
757
  print(
822
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
823
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
758
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
759
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
824
760
  )
825
761
 
826
762
  print(
827
- ":raising_hands: Congratulations! You're now using Ollama embeddings for all evals that require text embeddings."
763
+ f":raising_hands: Congratulations! You're now using the Ollama embedding model `{escape(model_name)}` for all evals that require text embeddings."
828
764
  )
829
765
 
830
766
 
@@ -837,32 +773,28 @@ def unset_ollama_embeddings_env(
837
773
  "Usage: --save=dotenv[:path] (default: .env.local)",
838
774
  ),
839
775
  ):
840
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME)
841
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL)
842
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.LOCAL_EMBEDDING_API_KEY)
843
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS)
844
-
845
- save_target = resolve_save_target(save)
846
- if save_target:
847
- handled, path = unset_environ_in_store(
848
- save_target,
849
- [
850
- EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME,
851
- EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL,
852
- EmbeddingKeyValues.LOCAL_EMBEDDING_API_KEY,
853
- EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS,
854
- ],
776
+
777
+ settings = get_settings()
778
+ with settings.edit(save=save) as edit_ctx:
779
+ settings.LOCAL_EMBEDDING_API_KEY = None
780
+ settings.LOCAL_EMBEDDING_MODEL_NAME = None
781
+ settings.LOCAL_EMBEDDING_BASE_URL = None
782
+ settings.USE_LOCAL_EMBEDDINGS = None
783
+
784
+ handled, path, _ = edit_ctx.result
785
+
786
+ if not handled and save is not None:
787
+ # invalid --save format (unsupported)
788
+ print("Unsupported --save option. Use --save=dotenv[:path].")
789
+ elif path:
790
+ # persisted to a file
791
+ print(
792
+ f"Removed local Ollama embedding environment variables from {path}."
855
793
  )
856
- if handled:
857
- print(
858
- f"Removed Ollama embedding environment variables from {path}."
859
- )
860
- else:
861
- print("Unsupported --save option. Use --save=dotenv[:path].")
862
794
 
863
795
  if is_openai_configured():
864
796
  print(
865
- ":raised_hands: Regular OpenAI will still be used by default because OPENAI_API_KEY is set."
797
+ ":raised_hands: Regular OpenAI embeddings will still be used by default because OPENAI_API_KEY is set."
866
798
  )
867
799
  else:
868
800
  print(
@@ -886,9 +818,9 @@ def set_local_model_env(
886
818
  api_key: Optional[str] = typer.Option(
887
819
  None,
888
820
  "--api-key",
889
- help="API key for the local model (if required) (NOT persisted; set in .env[.local])",
821
+ help="API key for the local model. Persisted to dotenv if --save is used; never written to the legacy JSON keystore.",
890
822
  ),
891
- format: Optional[str] = typer.Option(
823
+ model_format: Optional[str] = typer.Option(
892
824
  "json",
893
825
  "--format",
894
826
  help="Format of the response from the local model (default: json)",
@@ -900,49 +832,35 @@ def set_local_model_env(
900
832
  "Usage: --save=dotenv[:path] (default: .env.local)",
901
833
  ),
902
834
  ):
903
- clear_evaluation_model_keys()
904
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LOCAL_MODEL_NAME, model_name)
905
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LOCAL_MODEL_BASE_URL, base_url)
906
-
907
- if format:
908
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LOCAL_MODEL_FORMAT, format)
909
-
910
- save_target = resolve_save_target(save)
911
- switch_model_provider(ModelKeyValues.USE_LOCAL_MODEL, save_target)
912
- if save_target:
913
- handled, path = save_environ_to_store(
914
- save_target,
915
- {
916
- ModelKeyValues.LOCAL_MODEL_NAME: model_name,
917
- ModelKeyValues.LOCAL_MODEL_BASE_URL: base_url,
918
- **(
919
- {ModelKeyValues.LOCAL_MODEL_API_KEY: api_key}
920
- if api_key
921
- else {}
922
- ),
923
- **(
924
- {ModelKeyValues.LOCAL_MODEL_FORMAT: format}
925
- if format
926
- else {}
927
- ),
928
- },
835
+ settings = get_settings()
836
+ with settings.edit(save=save) as edit_ctx:
837
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_LOCAL_MODEL)
838
+ settings.LOCAL_MODEL_NAME = model_name
839
+ settings.LOCAL_MODEL_BASE_URL = base_url
840
+ if model_format:
841
+ settings.LOCAL_MODEL_FORMAT = model_format
842
+ if api_key:
843
+ settings.LOCAL_MODEL_API_KEY = api_key
844
+
845
+ handled, path, _ = edit_ctx.result
846
+
847
+ if not handled and save is not None:
848
+ # invalid --save format (unsupported)
849
+ print("Unsupported --save option. Use --save=dotenv[:path].")
850
+ elif path:
851
+ # persisted to a file
852
+ print(
853
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
929
854
  )
930
-
931
- if handled:
932
- print(
933
- f"Saved environment variables to {path} (ensure it's git-ignored)."
934
- )
935
- else:
936
- print("Unsupported --save option. Use --save=dotenv[:path].")
937
-
938
855
  else:
856
+ # updated in-memory & process env only
939
857
  print(
940
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
941
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
858
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
859
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
942
860
  )
943
861
 
944
862
  print(
945
- ":raising_hands: Congratulations! You're now using a local model for all evals that require an LLM."
863
+ f":raising_hands: Congratulations! You're now using a local model `{escape(model_name)}` for all evals that require an LLM."
946
864
  )
947
865
 
948
866
 
@@ -955,28 +873,23 @@ def unset_local_model_env(
955
873
  "Usage: --save=dotenv[:path] (default: .env.local)",
956
874
  ),
957
875
  ):
958
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_NAME)
959
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_BASE_URL)
960
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_API_KEY)
961
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LOCAL_MODEL_FORMAT)
962
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_LOCAL_MODEL)
963
-
964
- save_target = resolve_save_target(save)
965
- if save_target:
966
- handled, path = unset_environ_in_store(
967
- save_target,
968
- [
969
- ModelKeyValues.LOCAL_MODEL_NAME,
970
- ModelKeyValues.LOCAL_MODEL_BASE_URL,
971
- ModelKeyValues.USE_LOCAL_MODEL,
972
- ModelKeyValues.LOCAL_MODEL_API_KEY,
973
- ModelKeyValues.LOCAL_MODEL_FORMAT,
974
- ],
975
- )
976
- if handled:
977
- print(f"Removed local model environment variables from {path}.")
978
- else:
979
- print("Unsupported --save option. Use --save=dotenv[:path].")
876
+ settings = get_settings()
877
+ with settings.edit(save=save) as edit_ctx:
878
+ settings.LOCAL_MODEL_API_KEY = None
879
+ settings.LOCAL_MODEL_NAME = None
880
+ settings.LOCAL_MODEL_BASE_URL = None
881
+ settings.LOCAL_MODEL_FORMAT = None
882
+ settings.USE_LOCAL_MODEL = None
883
+
884
+ handled, path, _ = edit_ctx.result
885
+
886
+ if not handled and save is not None:
887
+ # invalid --save format (unsupported)
888
+ print("Unsupported --save option. Use --save=dotenv[:path].")
889
+ elif path:
890
+ # persisted to a file
891
+ print(f"Removed local model environment variables from {path}.")
892
+
980
893
  if is_openai_configured():
981
894
  print(
982
895
  ":raised_hands: OpenAI will still be used by default because OPENAI_API_KEY is set."
@@ -1000,7 +913,7 @@ def set_grok_model_env(
1000
913
  api_key: str = typer.Option(
1001
914
  ...,
1002
915
  "--api-key",
1003
- help="API key for the Grok model (NOT persisted; set in .env[.local])",
916
+ help="API key for the Grok model. Persisted to dotenv if --save is used; never written to the legacy JSON keystore.",
1004
917
  ),
1005
918
  temperature: float = typer.Option(
1006
919
  0, "--temperature", help="Temperature for the Grok model"
@@ -1012,35 +925,32 @@ def set_grok_model_env(
1012
925
  "Usage: --save=dotenv[:path] (default: .env.local)",
1013
926
  ),
1014
927
  ):
1015
- clear_evaluation_model_keys()
1016
- KEY_FILE_HANDLER.write_key(ModelKeyValues.GROK_MODEL_NAME, model_name)
1017
- KEY_FILE_HANDLER.write_key(ModelKeyValues.TEMPERATURE, str(temperature))
1018
-
1019
- save_target = resolve_save_target(save)
1020
- switch_model_provider(ModelKeyValues.USE_GROK_MODEL, save_target)
1021
- if save_target:
1022
- handled, path = save_environ_to_store(
1023
- save_target,
1024
- {
1025
- ModelKeyValues.GROK_MODEL_NAME: model_name,
1026
- ModelKeyValues.GROK_API_KEY: api_key,
1027
- ModelKeyValues.TEMPERATURE: str(temperature),
1028
- },
928
+ settings = get_settings()
929
+ with settings.edit(save=save) as edit_ctx:
930
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_GROK_MODEL)
931
+ settings.GROK_API_KEY = api_key
932
+ settings.GROK_MODEL_NAME = model_name
933
+ settings.TEMPERATURE = temperature
934
+
935
+ handled, path, _ = edit_ctx.result
936
+
937
+ if not handled and save is not None:
938
+ # invalid --save format (unsupported)
939
+ print("Unsupported --save option. Use --save=dotenv[:path].")
940
+ elif path:
941
+ # persisted to a file
942
+ print(
943
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
1029
944
  )
1030
-
1031
- if handled:
1032
- print(
1033
- f"Saved environment variables to {path} (ensure it's git-ignored)."
1034
- )
1035
- else:
1036
- print("Unsupported --save option. Use --save=dotenv[:path].")
1037
945
  else:
946
+ # updated in-memory & process env only
1038
947
  print(
1039
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
1040
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
948
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
949
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1041
950
  )
951
+
1042
952
  print(
1043
- ":raising_hands: Congratulations! You're now using a Grok model for all evals that require an LLM."
953
+ f":raising_hands: Congratulations! You're now using a Grok's `{escape(model_name)}` for all evals that require an LLM."
1044
954
  )
1045
955
 
1046
956
 
@@ -1053,26 +963,21 @@ def unset_grok_model_env(
1053
963
  "Usage: --save=dotenv[:path] (default: .env.local)",
1054
964
  ),
1055
965
  ):
1056
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GROK_MODEL_NAME)
1057
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GROK_API_KEY)
1058
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.TEMPERATURE)
1059
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_GROK_MODEL)
1060
-
1061
- save_target = resolve_save_target(save)
1062
- if save_target:
1063
- handled, path = unset_environ_in_store(
1064
- save_target,
1065
- [
1066
- ModelKeyValues.GROK_MODEL_NAME,
1067
- ModelKeyValues.GROK_API_KEY,
1068
- ModelKeyValues.TEMPERATURE,
1069
- ModelKeyValues.USE_GROK_MODEL,
1070
- ],
1071
- )
1072
- if handled:
1073
- print(f"Removed Grok model environment variables from {path}.")
1074
- else:
1075
- print("Unsupported --save option. Use --save=dotenv[:path].")
966
+ settings = get_settings()
967
+ with settings.edit(save=save) as edit_ctx:
968
+ settings.GROK_API_KEY = None
969
+ settings.GROK_MODEL_NAME = None
970
+ settings.TEMPERATURE = None
971
+ settings.USE_GROK_MODEL = None
972
+
973
+ handled, path, _ = edit_ctx.result
974
+
975
+ if not handled and save is not None:
976
+ # invalid --save format (unsupported)
977
+ print("Unsupported --save option. Use --save=dotenv[:path].")
978
+ elif path:
979
+ # persisted to a file
980
+ print(f"Removed Grok model environment variables from {path}.")
1076
981
 
1077
982
  if is_openai_configured():
1078
983
  print(
@@ -1097,7 +1002,7 @@ def set_moonshot_model_env(
1097
1002
  api_key: str = typer.Option(
1098
1003
  ...,
1099
1004
  "--api-key",
1100
- help="API key for the Moonshot model (NOT persisted; set in .env[.local])",
1005
+ help="API key for the Moonshot model. Persisted to dotenv if --save is used; never written to the legacy JSON keystore.",
1101
1006
  ),
1102
1007
  temperature: float = typer.Option(
1103
1008
  0, "--temperature", help="Temperature for the Moonshot model"
@@ -1109,36 +1014,32 @@ def set_moonshot_model_env(
1109
1014
  "Usage: --save=dotenv[:path] (default: .env.local)",
1110
1015
  ),
1111
1016
  ):
1112
- clear_evaluation_model_keys()
1113
- KEY_FILE_HANDLER.write_key(ModelKeyValues.MOONSHOT_MODEL_NAME, model_name)
1114
- KEY_FILE_HANDLER.write_key(ModelKeyValues.TEMPERATURE, str(temperature))
1115
-
1116
- save_target = resolve_save_target(save)
1117
- switch_model_provider(ModelKeyValues.USE_MOONSHOT_MODEL, save_target)
1118
- if save_target:
1119
- handled, path = save_environ_to_store(
1120
- save_target,
1121
- {
1122
- ModelKeyValues.MOONSHOT_MODEL_NAME: model_name,
1123
- ModelKeyValues.MOONSHOT_API_KEY: api_key,
1124
- ModelKeyValues.TEMPERATURE: str(temperature),
1125
- },
1017
+ settings = get_settings()
1018
+ with settings.edit(save=save) as edit_ctx:
1019
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_MOONSHOT_MODEL)
1020
+ settings.MOONSHOT_API_KEY = api_key
1021
+ settings.MOONSHOT_MODEL_NAME = model_name
1022
+ settings.TEMPERATURE = temperature
1023
+
1024
+ handled, path, _ = edit_ctx.result
1025
+
1026
+ if not handled and save is not None:
1027
+ # invalid --save format (unsupported)
1028
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1029
+ elif path:
1030
+ # persisted to a file
1031
+ print(
1032
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
1126
1033
  )
1127
-
1128
- if handled:
1129
- print(
1130
- f"Saved environment variables to {path} (ensure it's git-ignored)."
1131
- )
1132
- else:
1133
- print("Unsupported --save option. Use --save=dotenv[:path].")
1134
-
1135
1034
  else:
1035
+ # updated in-memory & process env only
1136
1036
  print(
1137
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
1138
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1037
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
1038
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1139
1039
  )
1040
+
1140
1041
  print(
1141
- ":raising_hands: Congratulations! You're now using a Moonshot model for all evals that require an LLM."
1042
+ f":raising_hands: Congratulations! You're now using Moonshot's `{escape(model_name)}` for all evals that require an LLM."
1142
1043
  )
1143
1044
 
1144
1045
 
@@ -1151,26 +1052,21 @@ def unset_moonshot_model_env(
1151
1052
  "Usage: --save=dotenv[:path] (default: .env.local)",
1152
1053
  ),
1153
1054
  ):
1154
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.MOONSHOT_MODEL_NAME)
1155
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.MOONSHOT_API_KEY)
1156
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.TEMPERATURE)
1157
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_MOONSHOT_MODEL)
1158
-
1159
- save_target = resolve_save_target(save)
1160
- if save_target:
1161
- handled, path = unset_environ_in_store(
1162
- save_target,
1163
- [
1164
- ModelKeyValues.MOONSHOT_MODEL_NAME,
1165
- ModelKeyValues.MOONSHOT_API_KEY,
1166
- ModelKeyValues.TEMPERATURE,
1167
- ModelKeyValues.USE_MOONSHOT_MODEL,
1168
- ],
1169
- )
1170
- if handled:
1171
- print(f"Removed Moonshot model environment variables from {path}.")
1172
- else:
1173
- print("Unsupported --save option. Use --save=dotenv[:path].")
1055
+ settings = get_settings()
1056
+ with settings.edit(save=save) as edit_ctx:
1057
+ settings.MOONSHOT_API_KEY = None
1058
+ settings.MOONSHOT_MODEL_NAME = None
1059
+ settings.TEMPERATURE = None
1060
+ settings.USE_MOONSHOT_MODEL = None
1061
+
1062
+ handled, path, _ = edit_ctx.result
1063
+
1064
+ if not handled and save is not None:
1065
+ # invalid --save format (unsupported)
1066
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1067
+ elif path:
1068
+ # persisted to a file
1069
+ print(f"Removed Moonshot model environment variables from {path}.")
1174
1070
 
1175
1071
  if is_openai_configured():
1176
1072
  print(
@@ -1195,7 +1091,7 @@ def set_deepseek_model_env(
1195
1091
  api_key: str = typer.Option(
1196
1092
  ...,
1197
1093
  "--api-key",
1198
- help="API key for the DeepSeek model (NOT persisted; set in .env[.local])",
1094
+ help="API key for the DeepSeek model. Persisted to dotenv if --save is used; never written to the legacy JSON keystore.",
1199
1095
  ),
1200
1096
  temperature: float = typer.Option(
1201
1097
  0, "--temperature", help="Temperature for the DeepSeek model"
@@ -1207,36 +1103,32 @@ def set_deepseek_model_env(
1207
1103
  "Usage: --save=dotenv[:path] (default: .env.local)",
1208
1104
  ),
1209
1105
  ):
1210
- clear_evaluation_model_keys()
1211
- KEY_FILE_HANDLER.write_key(ModelKeyValues.DEEPSEEK_MODEL_NAME, model_name)
1212
- KEY_FILE_HANDLER.write_key(ModelKeyValues.TEMPERATURE, str(temperature))
1213
-
1214
- save_target = resolve_save_target(save)
1215
- switch_model_provider(ModelKeyValues.USE_DEEPSEEK_MODEL, save_target)
1216
- if save_target:
1217
- handled, path = save_environ_to_store(
1218
- save_target,
1219
- {
1220
- ModelKeyValues.DEEPSEEK_MODEL_NAME: model_name,
1221
- ModelKeyValues.DEEPSEEK_API_KEY: api_key,
1222
- ModelKeyValues.TEMPERATURE: str(temperature),
1223
- },
1106
+ settings = get_settings()
1107
+ with settings.edit(save=save) as edit_ctx:
1108
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_DEEPSEEK_MODEL)
1109
+ settings.DEEPSEEK_API_KEY = api_key
1110
+ settings.DEEPSEEK_MODEL_NAME = model_name
1111
+ settings.TEMPERATURE = temperature
1112
+
1113
+ handled, path, _ = edit_ctx.result
1114
+
1115
+ if not handled and save is not None:
1116
+ # invalid --save format (unsupported)
1117
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1118
+ elif path:
1119
+ # persisted to a file
1120
+ print(
1121
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
1224
1122
  )
1225
-
1226
- if handled:
1227
- print(
1228
- f"Saved environment variables to {path} (ensure it's git-ignored)."
1229
- )
1230
- else:
1231
- print("Unsupported --save option. Use --save=dotenv[:path].")
1232
-
1233
1123
  else:
1124
+ # updated in-memory & process env only
1234
1125
  print(
1235
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
1236
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1126
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
1127
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1237
1128
  )
1129
+
1238
1130
  print(
1239
- ":raising_hands: Congratulations! You're now using a DeepSeek model for all evals that require an LLM."
1131
+ f":raising_hands: Congratulations! You're now using DeepSeek's `{escape(model_name)}` for all evals that require an LLM."
1240
1132
  )
1241
1133
 
1242
1134
 
@@ -1249,26 +1141,21 @@ def unset_deepseek_model_env(
1249
1141
  "Usage: --save=dotenv[:path] (default: .env.local)",
1250
1142
  ),
1251
1143
  ):
1252
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.DEEPSEEK_MODEL_NAME)
1253
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.DEEPSEEK_API_KEY)
1254
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.TEMPERATURE)
1255
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_DEEPSEEK_MODEL)
1256
-
1257
- save_target = resolve_save_target(save)
1258
- if save_target:
1259
- handled, path = unset_environ_in_store(
1260
- save_target,
1261
- [
1262
- ModelKeyValues.DEEPSEEK_MODEL_NAME,
1263
- ModelKeyValues.DEEPSEEK_API_KEY,
1264
- ModelKeyValues.TEMPERATURE,
1265
- ModelKeyValues.USE_DEEPSEEK_MODEL,
1266
- ],
1267
- )
1268
- if handled:
1269
- print(f"Removed DeepSeek model environment variables from {path}.")
1270
- else:
1271
- print("Unsupported --save option. Use --save=dotenv[:path].")
1144
+ settings = get_settings()
1145
+ with settings.edit(save=save) as edit_ctx:
1146
+ settings.DEEPSEEK_API_KEY = None
1147
+ settings.DEEPSEEK_MODEL_NAME = None
1148
+ settings.TEMPERATURE = None
1149
+ settings.USE_DEEPSEEK_MODEL = None
1150
+
1151
+ handled, path, _ = edit_ctx.result
1152
+
1153
+ if not handled and save is not None:
1154
+ # invalid --save format (unsupported)
1155
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1156
+ elif path:
1157
+ # persisted to a file
1158
+ print(f"Removed DeepSeek model environment variables from {path}.")
1272
1159
 
1273
1160
  if is_openai_configured():
1274
1161
  print(
@@ -1276,7 +1163,7 @@ def unset_deepseek_model_env(
1276
1163
  )
1277
1164
  else:
1278
1165
  print(
1279
- "The Deepseek model configuration has been removed. No model is currently configured, but you can set one with the CLI or add credentials to .env[.local]."
1166
+ "The DeepSeek model configuration has been removed. No model is currently configured, but you can set one with the CLI or add credentials to .env[.local]."
1280
1167
  )
1281
1168
 
1282
1169
 
@@ -1296,7 +1183,7 @@ def set_local_embeddings_env(
1296
1183
  api_key: Optional[str] = typer.Option(
1297
1184
  None,
1298
1185
  "--api-key",
1299
- help="API key for the local embeddings (if required) (NOT persisted; set in .env[.local])",
1186
+ help="API key for the local embeddings. Persisted to dotenv if --save is used; never written to the legacy JSON keystore.",
1300
1187
  ),
1301
1188
  save: Optional[str] = typer.Option(
1302
1189
  None,
@@ -1305,42 +1192,33 @@ def set_local_embeddings_env(
1305
1192
  "Usage: --save=dotenv[:path] (default: .env.local)",
1306
1193
  ),
1307
1194
  ):
1308
- clear_embedding_model_keys()
1309
- KEY_FILE_HANDLER.write_key(
1310
- EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME, model_name
1311
- )
1312
- KEY_FILE_HANDLER.write_key(
1313
- EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL, base_url
1314
- )
1315
-
1316
- save_target = resolve_save_target(save)
1317
- switch_model_provider(EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS, save_target)
1318
- if save_target:
1319
- handled, path = save_environ_to_store(
1320
- save_target,
1321
- {
1322
- EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME: model_name,
1323
- EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL: base_url,
1324
- **(
1325
- {EmbeddingKeyValues.LOCAL_EMBEDDING_API_KEY: api_key}
1326
- if api_key
1327
- else {}
1328
- ),
1329
- },
1195
+ settings = get_settings()
1196
+ with settings.edit(save=save) as edit_ctx:
1197
+ edit_ctx.switch_model_provider(EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS)
1198
+ settings.LOCAL_EMBEDDING_MODEL_NAME = model_name
1199
+ settings.LOCAL_EMBEDDING_BASE_URL = base_url
1200
+ if api_key:
1201
+ settings.LOCAL_EMBEDDING_API_KEY = api_key
1202
+
1203
+ handled, path, _ = edit_ctx.result
1204
+
1205
+ if not handled and save is not None:
1206
+ # invalid --save format (unsupported)
1207
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1208
+ elif path:
1209
+ # persisted to a file
1210
+ print(
1211
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
1330
1212
  )
1331
- if handled:
1332
- print(
1333
- f"Saved environment variables to {path} (ensure it's git-ignored)."
1334
- )
1335
- else:
1336
- print("Unsupported --save option. Use --save=dotenv[:path].")
1337
1213
  else:
1214
+ # updated in-memory & process env only
1338
1215
  print(
1339
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
1340
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1216
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
1217
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1341
1218
  )
1219
+
1342
1220
  print(
1343
- ":raising_hands: Congratulations! You're now using local embeddings for all evals that require text embeddings."
1221
+ f":raising_hands: Congratulations! You're now using the local embedding model `{escape(model_name)}` for all evals that require text embeddings."
1344
1222
  )
1345
1223
 
1346
1224
 
@@ -1353,26 +1231,21 @@ def unset_local_embeddings_env(
1353
1231
  "Usage: --save=dotenv[:path] (default: .env.local)",
1354
1232
  ),
1355
1233
  ):
1356
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME)
1357
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL)
1358
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.LOCAL_EMBEDDING_API_KEY)
1359
- KEY_FILE_HANDLER.remove_key(EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS)
1360
-
1361
- save_target = resolve_save_target(save)
1362
- if save_target:
1363
- handled, path = unset_environ_in_store(
1364
- save_target,
1365
- [
1366
- EmbeddingKeyValues.LOCAL_EMBEDDING_MODEL_NAME,
1367
- EmbeddingKeyValues.LOCAL_EMBEDDING_BASE_URL,
1368
- EmbeddingKeyValues.LOCAL_EMBEDDING_API_KEY,
1369
- EmbeddingKeyValues.USE_LOCAL_EMBEDDINGS,
1370
- ],
1371
- )
1372
- if handled:
1373
- print(f"Removed local embedding environment variables from {path}.")
1374
- else:
1375
- print("Unsupported --save option. Use --save=dotenv[:path].")
1234
+ settings = get_settings()
1235
+ with settings.edit(save=save) as edit_ctx:
1236
+ settings.LOCAL_EMBEDDING_API_KEY = None
1237
+ settings.LOCAL_EMBEDDING_MODEL_NAME = None
1238
+ settings.LOCAL_EMBEDDING_BASE_URL = None
1239
+ settings.USE_LOCAL_EMBEDDINGS = None
1240
+
1241
+ handled, path, _ = edit_ctx.result
1242
+
1243
+ if not handled and save is not None:
1244
+ # invalid --save format (unsupported)
1245
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1246
+ elif path:
1247
+ # persisted to a file
1248
+ print(f"Removed local embedding environment variables from {path}.")
1376
1249
 
1377
1250
  if is_openai_configured():
1378
1251
  print(
@@ -1397,7 +1270,7 @@ def set_gemini_model_env(
1397
1270
  google_api_key: Optional[str] = typer.Option(
1398
1271
  None,
1399
1272
  "--google-api-key",
1400
- help="Google API Key for Gemini (NOT persisted; set in .env[.local])",
1273
+ help="Google API Key for Gemini",
1401
1274
  ),
1402
1275
  google_cloud_project: Optional[str] = typer.Option(
1403
1276
  None, "--project-id", help="Google Cloud project ID"
@@ -1412,7 +1285,6 @@ def set_gemini_model_env(
1412
1285
  "Usage: --save=dotenv[:path] (default: .env.local)",
1413
1286
  ),
1414
1287
  ):
1415
- clear_evaluation_model_keys()
1416
1288
  if not google_api_key and not (
1417
1289
  google_cloud_project and google_cloud_location
1418
1290
  ):
@@ -1421,71 +1293,52 @@ def set_gemini_model_env(
1421
1293
  err=True,
1422
1294
  )
1423
1295
  raise typer.Exit(code=1)
1424
- if model_name is not None:
1425
- KEY_FILE_HANDLER.write_key(ModelKeyValues.GEMINI_MODEL_NAME, model_name)
1426
1296
 
1427
- if google_api_key is None:
1428
- KEY_FILE_HANDLER.write_key(
1429
- ModelKeyValues.GOOGLE_GENAI_USE_VERTEXAI, "YES"
1430
- )
1297
+ settings = get_settings()
1298
+ with settings.edit(save=save) as edit_ctx:
1299
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_GEMINI_MODEL)
1431
1300
 
1432
- if google_cloud_project is not None:
1433
- KEY_FILE_HANDLER.write_key(
1434
- ModelKeyValues.GOOGLE_CLOUD_PROJECT, google_cloud_project
1301
+ if google_api_key is not None:
1302
+ settings.GOOGLE_API_KEY = google_api_key
1303
+ settings.GOOGLE_GENAI_USE_VERTEXAI = False
1304
+ else:
1305
+ settings.GOOGLE_GENAI_USE_VERTEXAI = True
1306
+ if google_cloud_project:
1307
+ settings.GOOGLE_CLOUD_PROJECT = google_cloud_project
1308
+ if google_cloud_location:
1309
+ settings.GOOGLE_CLOUD_LOCATION = google_cloud_location
1310
+ if model_name:
1311
+ settings.GEMINI_MODEL_NAME = model_name
1312
+
1313
+ handled, path, _ = edit_ctx.result
1314
+
1315
+ if not handled and save is not None:
1316
+ # invalid --save format (unsupported)
1317
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1318
+ elif path:
1319
+ # persisted to a file
1320
+ print(
1321
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
1435
1322
  )
1436
- if google_cloud_location is not None:
1437
- KEY_FILE_HANDLER.write_key(
1438
- ModelKeyValues.GOOGLE_CLOUD_LOCATION, google_cloud_location
1323
+ else:
1324
+ # updated in-memory & process env only
1325
+ print(
1326
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
1327
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1439
1328
  )
1440
1329
 
1441
- save_target = resolve_save_target(save)
1442
- switch_model_provider(ModelKeyValues.USE_GEMINI_MODEL, save_target)
1443
- if save_target:
1444
- handled, path = save_environ_to_store(
1445
- save_target,
1446
- {
1447
- **(
1448
- {ModelKeyValues.GOOGLE_API_KEY: google_api_key}
1449
- if google_api_key
1450
- else {ModelKeyValues.GOOGLE_GENAI_USE_VERTEXAI: "YES"}
1451
- ),
1452
- **(
1453
- {ModelKeyValues.GEMINI_MODEL_NAME: model_name}
1454
- if model_name
1455
- else {}
1456
- ),
1457
- **(
1458
- {ModelKeyValues.GOOGLE_CLOUD_PROJECT: google_cloud_project}
1459
- if google_cloud_project
1460
- else {}
1461
- ),
1462
- **(
1463
- {
1464
- ModelKeyValues.GOOGLE_CLOUD_LOCATION: google_cloud_location
1465
- }
1466
- if google_cloud_location
1467
- else {}
1468
- ),
1469
- },
1330
+ _model_name = (
1331
+ model_name if model_name is not None else settings.GEMINI_MODEL_NAME
1332
+ )
1333
+ if _model_name is not None:
1334
+ print(
1335
+ f":raising_hands: Congratulations! You're now using Gemini's `{escape(_model_name)}` for all evals that require an LLM."
1470
1336
  )
1471
-
1472
- if handled:
1473
- print(
1474
- f"Saved environment variables to {path} (ensure it's git-ignored)."
1475
- )
1476
- else:
1477
- print("Unsupported --save option. Use --save=dotenv[:path].")
1478
-
1479
1337
  else:
1480
1338
  print(
1481
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
1482
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1339
+ f":raising_hands: Congratulations! You're now using Gemini's model for all evals that require an LLM."
1483
1340
  )
1484
1341
 
1485
- print(
1486
- ":raising_hands: Congratulations! You're now using a Gemini model for all evals that require an LLM."
1487
- )
1488
-
1489
1342
 
1490
1343
  @app.command(name="unset-gemini")
1491
1344
  def unset_gemini_model_env(
@@ -1496,30 +1349,23 @@ def unset_gemini_model_env(
1496
1349
  "Usage: --save=dotenv[:path] (default: .env.local)",
1497
1350
  ),
1498
1351
  ):
1499
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_GEMINI_MODEL)
1500
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GEMINI_MODEL_NAME)
1501
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GOOGLE_API_KEY)
1502
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GOOGLE_CLOUD_PROJECT)
1503
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GOOGLE_CLOUD_LOCATION)
1504
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.GOOGLE_GENAI_USE_VERTEXAI)
1505
-
1506
- save_target = resolve_save_target(save)
1507
- if save_target:
1508
- handled, path = unset_environ_in_store(
1509
- save_target,
1510
- [
1511
- ModelKeyValues.USE_GEMINI_MODEL,
1512
- ModelKeyValues.GEMINI_MODEL_NAME,
1513
- ModelKeyValues.GOOGLE_API_KEY,
1514
- ModelKeyValues.GOOGLE_CLOUD_PROJECT,
1515
- ModelKeyValues.GOOGLE_CLOUD_LOCATION,
1516
- ModelKeyValues.GOOGLE_GENAI_USE_VERTEXAI,
1517
- ],
1518
- )
1519
- if handled:
1520
- print(f"Removed Gemini environment variables from {path}.")
1521
- else:
1522
- print("Unsupported --save option. Use --save=dotenv[:path].")
1352
+ settings = get_settings()
1353
+ with settings.edit(save=save) as edit_ctx:
1354
+ settings.GOOGLE_API_KEY = None
1355
+ settings.GOOGLE_GENAI_USE_VERTEXAI = None
1356
+ settings.GOOGLE_CLOUD_PROJECT = None
1357
+ settings.GOOGLE_CLOUD_LOCATION = None
1358
+ settings.GEMINI_MODEL_NAME = None
1359
+ settings.USE_GEMINI_MODEL = None
1360
+
1361
+ handled, path, _ = edit_ctx.result
1362
+
1363
+ if not handled and save is not None:
1364
+ # invalid --save format (unsupported)
1365
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1366
+ elif path:
1367
+ # persisted to a file
1368
+ print(f"Removed Gemini model environment variables from {path}.")
1523
1369
 
1524
1370
  if is_openai_configured():
1525
1371
  print(
@@ -1537,7 +1383,7 @@ def set_litellm_model_env(
1537
1383
  api_key: Optional[str] = typer.Option(
1538
1384
  None,
1539
1385
  "--api-key",
1540
- help="API key for the model (if required) (NOT persisted; set in .env[.local])",
1386
+ help="API key for the model. Persisted to dotenv if --save is used; never written to the legacy JSON keystore.",
1541
1387
  ),
1542
1388
  api_base: Optional[str] = typer.Option(
1543
1389
  None, "--api-base", help="Base URL for the model API (if required)"
@@ -1549,42 +1395,34 @@ def set_litellm_model_env(
1549
1395
  "Usage: --save=dotenv[:path] (default: .env.local)",
1550
1396
  ),
1551
1397
  ):
1552
- clear_evaluation_model_keys()
1553
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LITELLM_MODEL_NAME, model_name)
1554
-
1555
- if api_base:
1556
- KEY_FILE_HANDLER.write_key(ModelKeyValues.LITELLM_API_BASE, api_base)
1557
-
1558
- save_target = resolve_save_target(save)
1559
- switch_model_provider(ModelKeyValues.USE_LITELLM, save_target)
1560
- if save_target:
1561
- handled, path = save_environ_to_store(
1562
- save_target,
1563
- {
1564
- ModelKeyValues.LITELLM_MODEL_NAME: model_name,
1565
- **(
1566
- {ModelKeyValues.LITELLM_API_KEY: api_key} if api_key else {}
1567
- ),
1568
- **(
1569
- {ModelKeyValues.LITELLM_API_BASE: api_base}
1570
- if api_base
1571
- else {}
1572
- ),
1573
- },
1398
+ settings = get_settings()
1399
+ with settings.edit(save=save) as edit_ctx:
1400
+ edit_ctx.switch_model_provider(ModelKeyValues.USE_LITELLM)
1401
+ settings.LITELLM_MODEL_NAME = model_name
1402
+ if api_key is not None:
1403
+ settings.LITELLM_API_KEY = api_key
1404
+ if api_base is not None:
1405
+ settings.LITELLM_API_BASE = api_base
1406
+
1407
+ handled, path, _ = edit_ctx.result
1408
+
1409
+ if not handled and save is not None:
1410
+ # invalid --save format (unsupported)
1411
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1412
+ elif path:
1413
+ # persisted to a file
1414
+ print(
1415
+ f"Saved environment variables to {path} (ensure it's git-ignored)."
1574
1416
  )
1575
- if handled:
1576
- print(
1577
- f"Saved environment variables to {path} (ensure it's git-ignored)."
1578
- )
1579
- else:
1580
- print("Unsupported --save option. Use --save=dotenv[:path].")
1581
1417
  else:
1418
+ # updated in-memory & process env only
1582
1419
  print(
1583
- "Tip: persist these settings to a dotenv file with --save=dotenv[:path] (default .env.local) "
1584
- "or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1420
+ "Settings updated for this session. To persist, use --save=dotenv[:path] "
1421
+ "(default .env.local) or set DEEPEVAL_DEFAULT_SAVE=dotenv:.env.local"
1585
1422
  )
1423
+
1586
1424
  print(
1587
- ":raising_hands: Congratulations! You're now using a LiteLLM model for all evals that require an LLM."
1425
+ f":raising_hands: Congratulations! You're now using LiteLLM's `{escape(model_name)}` for all evals that require an LLM."
1588
1426
  )
1589
1427
 
1590
1428
 
@@ -1597,26 +1435,22 @@ def unset_litellm_model_env(
1597
1435
  "Usage: --save=dotenv[:path] (default: .env.local)",
1598
1436
  ),
1599
1437
  ):
1600
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LITELLM_MODEL_NAME)
1601
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LITELLM_API_KEY)
1602
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.LITELLM_API_BASE)
1603
- KEY_FILE_HANDLER.remove_key(ModelKeyValues.USE_LITELLM)
1604
-
1605
- save_target = resolve_save_target(save)
1606
- if save_target:
1607
- handled, path = unset_environ_in_store(
1608
- save_target,
1609
- [
1610
- ModelKeyValues.LITELLM_MODEL_NAME,
1611
- ModelKeyValues.LITELLM_API_KEY,
1612
- ModelKeyValues.LITELLM_API_BASE,
1613
- ModelKeyValues.USE_LITELLM,
1614
- ],
1615
- )
1616
- if handled:
1617
- print(f"Removed LiteLLM environment variables from {path}.")
1618
- else:
1619
- print("Unsupported --save option. Use --save=dotenv[:path].")
1438
+ settings = get_settings()
1439
+ with settings.edit(save=save) as edit_ctx:
1440
+ settings.LITELLM_API_KEY = None
1441
+ settings.LITELLM_MODEL_NAME = None
1442
+ settings.LITELLM_API_BASE = None
1443
+ settings.USE_LITELLM = None
1444
+
1445
+ handled, path, _ = edit_ctx.result
1446
+
1447
+ if not handled and save is not None:
1448
+ # invalid --save format (unsupported)
1449
+ print("Unsupported --save option. Use --save=dotenv[:path].")
1450
+ elif path:
1451
+ # persisted to a file
1452
+ print(f"Removed LiteLLM model environment variables from {path}.")
1453
+
1620
1454
  if is_openai_configured():
1621
1455
  print(
1622
1456
  ":raised_hands: OpenAI will still be used by default because OPENAI_API_KEY is set."