omniopt2 8701__tar.gz → 8712__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omniopt2 might be problematic. Click here for more details.

Files changed (40) hide show
  1. {omniopt2-8701 → omniopt2-8712}/.omniopt.py +40 -20
  2. {omniopt2-8701 → omniopt2-8712}/PKG-INFO +1 -1
  3. {omniopt2-8701 → omniopt2-8712}/omniopt2.egg-info/PKG-INFO +1 -1
  4. {omniopt2-8701 → omniopt2-8712}/pyproject.toml +1 -1
  5. {omniopt2-8701 → omniopt2-8712}/.colorfunctions.sh +0 -0
  6. {omniopt2-8701 → omniopt2-8712}/.dockerignore +0 -0
  7. {omniopt2-8701 → omniopt2-8712}/.general.sh +0 -0
  8. {omniopt2-8701 → omniopt2-8712}/.gitignore +0 -0
  9. {omniopt2-8701 → omniopt2-8712}/.helpers.py +0 -0
  10. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_cpu_ram_usage.py +0 -0
  11. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_general.py +0 -0
  12. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_gpu_usage.py +0 -0
  13. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_kde.py +0 -0
  14. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_scatter.py +0 -0
  15. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_scatter_generation_method.py +0 -0
  16. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_scatter_hex.py +0 -0
  17. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_time_and_exit_code.py +0 -0
  18. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_trial_index_result.py +0 -0
  19. {omniopt2-8701 → omniopt2-8712}/.omniopt_plot_worker.py +0 -0
  20. {omniopt2-8701 → omniopt2-8712}/.random_generator.py +0 -0
  21. {omniopt2-8701 → omniopt2-8712}/.shellscript_functions +0 -0
  22. {omniopt2-8701 → omniopt2-8712}/.tests/pylint.rc +0 -0
  23. {omniopt2-8701 → omniopt2-8712}/.tpe.py +0 -0
  24. {omniopt2-8701 → omniopt2-8712}/LICENSE +0 -0
  25. {omniopt2-8701 → omniopt2-8712}/MANIFEST.in +0 -0
  26. {omniopt2-8701 → omniopt2-8712}/README.md +0 -0
  27. {omniopt2-8701 → omniopt2-8712}/apt-dependencies.txt +0 -0
  28. {omniopt2-8701 → omniopt2-8712}/omniopt +0 -0
  29. {omniopt2-8701 → omniopt2-8712}/omniopt2.egg-info/SOURCES.txt +0 -0
  30. {omniopt2-8701 → omniopt2-8712}/omniopt2.egg-info/dependency_links.txt +0 -0
  31. {omniopt2-8701 → omniopt2-8712}/omniopt2.egg-info/requires.txt +0 -0
  32. {omniopt2-8701 → omniopt2-8712}/omniopt2.egg-info/top_level.txt +0 -0
  33. {omniopt2-8701 → omniopt2-8712}/omniopt_docker +0 -0
  34. {omniopt2-8701 → omniopt2-8712}/omniopt_evaluate +0 -0
  35. {omniopt2-8701 → omniopt2-8712}/omniopt_plot +0 -0
  36. {omniopt2-8701 → omniopt2-8712}/omniopt_share +0 -0
  37. {omniopt2-8701 → omniopt2-8712}/requirements.txt +0 -0
  38. {omniopt2-8701 → omniopt2-8712}/setup.cfg +0 -0
  39. {omniopt2-8701 → omniopt2-8712}/setup.py +0 -0
  40. {omniopt2-8701 → omniopt2-8712}/test_requirements.txt +0 -0
@@ -32,6 +32,8 @@ import psutil
32
32
  FORCE_EXIT: bool = False
33
33
 
34
34
  last_msg_progressbar = ""
35
+ last_msg_raw = None
36
+ last_lock = threading.Lock()
35
37
 
36
38
  def force_exit(signal_number: Any, frame: Any) -> Any:
37
39
  global FORCE_EXIT
@@ -545,6 +547,18 @@ logfile_worker_creation_logs: str = f'{log_uuid_dir}_worker_creation_logs'
545
547
  logfile_trial_index_to_param_logs: str = f'{log_uuid_dir}_trial_index_to_param_logs'
546
548
  LOGFILE_DEBUG_GET_NEXT_TRIALS: Union[str, None] = None
547
549
 
550
+ def error_without_print(text: str) -> None:
551
+ print_debug(text)
552
+
553
+ if get_current_run_folder():
554
+ try:
555
+ with open(get_current_run_folder("oo_errors.txt"), mode="a", encoding="utf-8") as myfile:
556
+ myfile.write(text + "\n\n")
557
+ except (OSError, FileNotFoundError) as e:
558
+ helpers.print_color("red", f"Error: {e}. This may mean that the {get_current_run_folder()} was deleted during the run. Could not write '{text} to {get_current_run_folder()}/oo_errors.txt'")
559
+ sys.exit(99)
560
+
561
+
548
562
  def print_red(text: str) -> None:
549
563
  helpers.print_color("red", text)
550
564
 
@@ -2827,15 +2841,20 @@ def print_debug_get_next_trials(got: int, requested: int, _line: int) -> None:
2827
2841
  log_message_to_file(LOGFILE_DEBUG_GET_NEXT_TRIALS, msg, 0, "")
2828
2842
 
2829
2843
  def print_debug_progressbar(msg: str) -> None:
2830
- global last_msg_progressbar
2844
+ global last_msg_progressbar, last_msg_raw
2831
2845
 
2832
- if msg != last_msg_progressbar:
2833
- time_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
2834
- msg = f"{time_str} ({worker_generator_uuid}): {msg}"
2846
+ try:
2847
+ with last_lock:
2848
+ if msg != last_msg_raw:
2849
+ time_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
2850
+ full_msg = f"{time_str} ({worker_generator_uuid}): {msg}"
2835
2851
 
2836
- _debug_progressbar(msg)
2852
+ _debug_progressbar(full_msg)
2837
2853
 
2838
- last_msg_progressbar = msg
2854
+ last_msg_raw = msg
2855
+ last_msg_progressbar = full_msg
2856
+ except Exception as e:
2857
+ print(f"Error in print_debug_progressbar: {e}", flush=True)
2839
2858
 
2840
2859
  def get_process_info(pid: Any) -> str:
2841
2860
  try:
@@ -5009,7 +5028,8 @@ def show_pareto_or_error_msg(path_to_calculate: str, res_names: list = arg_resul
5009
5028
  try:
5010
5029
  show_pareto_frontier_data(path_to_calculate, res_names, disable_sixel_and_table)
5011
5030
  except Exception as e:
5012
- print_red(f"show_pareto_frontier_data() failed with exception '{e}'")
5031
+ inner_tb = ''.join(traceback.format_exception(type(e), e, e.__traceback__))
5032
+ print_red(f"show_pareto_frontier_data() failed with exception '{e}':\n{inner_tb}")
5013
5033
  else:
5014
5034
  print_debug(f"show_pareto_frontier_data will NOT be executed because len(arg_result_names) is {len(arg_result_names)}")
5015
5035
  return None
@@ -7496,7 +7516,7 @@ def finish_previous_jobs(new_msgs: List[str] = []) -> None:
7496
7516
 
7497
7517
  jobs_copy = global_vars["jobs"][:]
7498
7518
 
7499
- finishing_jobs_start_time = time.time()
7519
+ #finishing_jobs_start_time = time.time()
7500
7520
 
7501
7521
  with ThreadPoolExecutor() as finish_job_executor:
7502
7522
  futures = [finish_job_executor.submit(_finish_previous_jobs_helper_check_and_process, (job, trial_index)) for job, trial_index in jobs_copy]
@@ -7507,11 +7527,11 @@ def finish_previous_jobs(new_msgs: List[str] = []) -> None:
7507
7527
  except Exception as e:
7508
7528
  print_red(f"⚠ Exception in parallel job handling: {e}")
7509
7529
 
7510
- finishing_jobs_end_time = time.time()
7530
+ #finishing_jobs_end_time = time.time()
7511
7531
 
7512
- finishing_jobs_runtime = finishing_jobs_end_time - finishing_jobs_start_time
7532
+ #finishing_jobs_runtime = finishing_jobs_end_time - finishing_jobs_start_time
7513
7533
 
7514
- print_debug(f"Finishing jobs took {finishing_jobs_runtime} second(s)")
7534
+ #print_debug(f"Finishing jobs took {finishing_jobs_runtime} second(s)")
7515
7535
 
7516
7536
  if this_jobs_finished > 0:
7517
7537
  save_results_csv()
@@ -7688,7 +7708,7 @@ def get_ax_client_trial(trial_index: int) -> Optional[ax.core.trial.Trial]:
7688
7708
  try:
7689
7709
  return ax_client.get_trial(trial_index)
7690
7710
  except KeyError as e:
7691
- print_red(f"get_ax_client_trial: trial_index {trial_index} failed, error: {e}")
7711
+ error_without_print(f"get_ax_client_trial: trial_index {trial_index} failed, error: {e}")
7692
7712
  return None
7693
7713
 
7694
7714
  def orchestrator_start_trial(parameters: Union[dict, str], trial_index: int) -> None:
@@ -7833,7 +7853,7 @@ def execute_evaluation(_params: list) -> Optional[int]:
7833
7853
  _trial = get_ax_client_trial(trial_index)
7834
7854
 
7835
7855
  if _trial is None:
7836
- print_red(f"execute_evaluation: _trial was not in execute_evaluation for params {_params}")
7856
+ error_without_print(f"execute_evaluation: _trial was not in execute_evaluation for params {_params}")
7837
7857
  return None
7838
7858
 
7839
7859
  def mark_trial_stage(stage: str, error_msg: str) -> None:
@@ -8123,13 +8143,13 @@ def get_batched_arms(nr_of_jobs_to_get: int) -> list:
8123
8143
 
8124
8144
  print_debug(f"get_batched_arms: Attempt {attempts + 1}: requesting 1 more arm")
8125
8145
 
8126
- t0 = time.time()
8146
+ #t0 = time.time()
8127
8147
  pending_observations = get_pending_observation_features(experiment=ax_client.experiment)
8128
- dt = time.time() - t0
8129
- print_debug(f"got pending observations: {pending_observations} (took {dt:.2f} seconds)")
8148
+ #dt = time.time() - t0
8149
+ #print_debug(f"got pending observations: {pending_observations} (took {dt:.2f} seconds)")
8130
8150
 
8131
8151
  try:
8132
- print_debug("getting global_gs.gen() with n=1")
8152
+ #print_debug("getting global_gs.gen() with n=1")
8133
8153
  batched_generator_run: Any = global_gs.gen(
8134
8154
  experiment=ax_client.experiment,
8135
8155
  n=1,
@@ -8144,12 +8164,12 @@ def get_batched_arms(nr_of_jobs_to_get: int) -> list:
8144
8164
  depth = 0
8145
8165
  path = "batched_generator_run"
8146
8166
  while isinstance(batched_generator_run, (list, tuple)) and len(batched_generator_run) == 1:
8147
- print_debug(f"Depth {depth}, path {path}, type {type(batched_generator_run).__name__}, length {len(batched_generator_run)}: {batched_generator_run}")
8167
+ #print_debug(f"Depth {depth}, path {path}, type {type(batched_generator_run).__name__}, length {len(batched_generator_run)}: {batched_generator_run}")
8148
8168
  batched_generator_run = batched_generator_run[0]
8149
8169
  path += "[0]"
8150
8170
  depth += 1
8151
8171
 
8152
- print_debug(f"Final flat object at depth {depth}, path {path}: {batched_generator_run} (type {type(batched_generator_run).__name__})")
8172
+ #print_debug(f"Final flat object at depth {depth}, path {path}: {batched_generator_run} (type {type(batched_generator_run).__name__})")
8153
8173
 
8154
8174
  new_arms = getattr(batched_generator_run, "arms", [])
8155
8175
  if not new_arms:
@@ -9428,7 +9448,7 @@ def finalize_jobs() -> None:
9428
9448
  handle_slurm_execution()
9429
9449
 
9430
9450
  def go_through_jobs_that_are_not_completed_yet() -> None:
9431
- print_debug(f"Waiting for jobs to finish (currently, len(global_vars['jobs']) = {len(global_vars['jobs'])}")
9451
+ #print_debug(f"Waiting for jobs to finish (currently, len(global_vars['jobs']) = {len(global_vars['jobs'])}")
9432
9452
 
9433
9453
  nr_jobs_left = len(global_vars['jobs'])
9434
9454
  if nr_jobs_left == 1:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 8701
3
+ Version: 8712
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 8701
3
+ Version: 8712
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
@@ -5,7 +5,7 @@ authors = [
5
5
  {email = "norman.koch@tu-dresden.de"},
6
6
  {name = "Norman Koch"}
7
7
  ]
8
- version = "8701"
8
+ version = "8712"
9
9
 
10
10
  readme = "README.md"
11
11
  dynamic = ["dependencies"]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes