omniopt2 8753__py3-none-any.whl → 8762__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omniopt2 might be problematic. Click here for more details.

Files changed (37) hide show
  1. .omniopt.py +56 -46
  2. omniopt +0 -5
  3. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt.py +56 -46
  4. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/omniopt +0 -5
  5. {omniopt2-8753.dist-info → omniopt2-8762.dist-info}/METADATA +1 -1
  6. {omniopt2-8753.dist-info → omniopt2-8762.dist-info}/RECORD +37 -37
  7. omniopt2.egg-info/PKG-INFO +1 -1
  8. pyproject.toml +1 -1
  9. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.colorfunctions.sh +0 -0
  10. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.general.sh +0 -0
  11. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.helpers.py +0 -0
  12. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_cpu_ram_usage.py +0 -0
  13. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_general.py +0 -0
  14. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_gpu_usage.py +0 -0
  15. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_kde.py +0 -0
  16. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_scatter.py +0 -0
  17. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_scatter_generation_method.py +0 -0
  18. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_scatter_hex.py +0 -0
  19. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_time_and_exit_code.py +0 -0
  20. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_trial_index_result.py +0 -0
  21. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.omniopt_plot_worker.py +0 -0
  22. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.random_generator.py +0 -0
  23. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.shellscript_functions +0 -0
  24. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/.tpe.py +0 -0
  25. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/LICENSE +0 -0
  26. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/apt-dependencies.txt +0 -0
  27. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/omniopt_docker +0 -0
  28. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/omniopt_evaluate +0 -0
  29. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/omniopt_plot +0 -0
  30. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/omniopt_share +0 -0
  31. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/pylint.rc +0 -0
  32. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/requirements.txt +0 -0
  33. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/setup.py +0 -0
  34. {omniopt2-8753.data → omniopt2-8762.data}/data/bin/test_requirements.txt +0 -0
  35. {omniopt2-8753.dist-info → omniopt2-8762.dist-info}/WHEEL +0 -0
  36. {omniopt2-8753.dist-info → omniopt2-8762.dist-info}/licenses/LICENSE +0 -0
  37. {omniopt2-8753.dist-info → omniopt2-8762.dist-info}/top_level.txt +0 -0
.omniopt.py CHANGED
@@ -8089,7 +8089,7 @@ def break_run_search(_name: str, _max_eval: Optional[int]) -> bool:
8089
8089
 
8090
8090
  return _ret
8091
8091
 
8092
- def _calculate_nr_of_jobs_to_get(simulated_jobs: int, currently_running_jobs: int) -> int:
8092
+ def calculate_nr_of_jobs_to_get(simulated_jobs: int, currently_running_jobs: int) -> int:
8093
8093
  """Calculates the number of jobs to retrieve."""
8094
8094
  return min(
8095
8095
  max_eval + simulated_jobs - count_done_jobs(),
@@ -8102,7 +8102,7 @@ def remove_extra_spaces(text: str) -> str:
8102
8102
  raise ValueError("Input must be a string")
8103
8103
  return re.sub(r'\s+', ' ', text).strip()
8104
8104
 
8105
- def _get_trials_message(nr_of_jobs_to_get: int, full_nr_of_jobs_to_get: int, trial_durations: List[float]) -> str:
8105
+ def get_trials_message(nr_of_jobs_to_get: int, full_nr_of_jobs_to_get: int, trial_durations: List[float]) -> str:
8106
8106
  """Generates the appropriate message for the number of trials being retrieved."""
8107
8107
  ret = ""
8108
8108
  if full_nr_of_jobs_to_get > 1:
@@ -8270,7 +8270,7 @@ def generate_trials(n: int, recursion: bool) -> Tuple[Dict[int, Any], bool]:
8270
8270
  retries += 1
8271
8271
  continue
8272
8272
 
8273
- progressbar_description(_get_trials_message(cnt + 1, n, trial_durations))
8273
+ progressbar_description(get_trials_message(cnt + 1, n, trial_durations))
8274
8274
 
8275
8275
  try:
8276
8276
  result = create_and_handle_trial(arm)
@@ -8292,7 +8292,7 @@ def generate_trials(n: int, recursion: bool) -> Tuple[Dict[int, Any], bool]:
8292
8292
  return finalized
8293
8293
 
8294
8294
  except Exception as e:
8295
- return _handle_generation_failure(e, n, recursion)
8295
+ return handle_generation_failure(e, n, recursion)
8296
8296
 
8297
8297
  class TrialRejected(Exception):
8298
8298
  pass
@@ -8357,7 +8357,7 @@ def finalize_generation(trials_dict: Dict[int, Any], cnt: int, requested: int, s
8357
8357
 
8358
8358
  return trials_dict, False
8359
8359
 
8360
- def _handle_generation_failure(
8360
+ def handle_generation_failure(
8361
8361
  e: Exception,
8362
8362
  requested: int,
8363
8363
  recursion: bool
@@ -8373,7 +8373,7 @@ def _handle_generation_failure(
8373
8373
  )):
8374
8374
  msg = str(e)
8375
8375
  if msg not in error_8_saved:
8376
- _print_exhaustion_warning(e, recursion)
8376
+ print_exhaustion_warning(e, recursion)
8377
8377
  error_8_saved.append(msg)
8378
8378
 
8379
8379
  if not recursion and args.revert_to_random_when_seemingly_exhausted:
@@ -8381,11 +8381,11 @@ def _handle_generation_failure(
8381
8381
  set_global_gs_to_random()
8382
8382
  return fetch_next_trials(requested, True)
8383
8383
 
8384
- print_red(f"_handle_generation_failure: General Exception: {e}")
8384
+ print_red(f"handle_generation_failure: General Exception: {e}")
8385
8385
 
8386
8386
  return {}, True
8387
8387
 
8388
- def _print_exhaustion_warning(e: Exception, recursion: bool) -> None:
8388
+ def print_exhaustion_warning(e: Exception, recursion: bool) -> None:
8389
8389
  if not recursion and args.revert_to_random_when_seemingly_exhausted:
8390
8390
  print_yellow(f"\n⚠Error 8: {e} From now (done jobs: {count_done_jobs()}) on, random points will be generated.")
8391
8391
  else:
@@ -9235,20 +9235,20 @@ def create_and_execute_next_runs(next_nr_steps: int, phase: Optional[str], _max_
9235
9235
  done_optimizing: bool = False
9236
9236
 
9237
9237
  try:
9238
- done_optimizing, trial_index_to_param = _create_and_execute_next_runs_run_loop(_max_eval, phase)
9239
- _create_and_execute_next_runs_finish(done_optimizing)
9238
+ done_optimizing, trial_index_to_param = create_and_execute_next_runs_run_loop(_max_eval, phase)
9239
+ create_and_execute_next_runs_finish(done_optimizing)
9240
9240
  except Exception as e:
9241
9241
  stacktrace = traceback.format_exc()
9242
9242
  print_debug(f"Warning: create_and_execute_next_runs encountered an exception: {e}\n{stacktrace}")
9243
9243
  return handle_exceptions_create_and_execute_next_runs(e)
9244
9244
 
9245
- return _create_and_execute_next_runs_return_value(trial_index_to_param)
9245
+ return create_and_execute_next_runs_return_value(trial_index_to_param)
9246
9246
 
9247
- def _create_and_execute_next_runs_run_loop(_max_eval: Optional[int], phase: Optional[str]) -> Tuple[bool, Optional[Dict]]:
9247
+ def create_and_execute_next_runs_run_loop(_max_eval: Optional[int], phase: Optional[str]) -> Tuple[bool, Optional[Dict]]:
9248
9248
  done_optimizing = False
9249
9249
  trial_index_to_param: Optional[Dict] = None
9250
9250
 
9251
- nr_of_jobs_to_get = _calculate_nr_of_jobs_to_get(get_nr_of_imported_jobs(), len(global_vars["jobs"]))
9251
+ nr_of_jobs_to_get = calculate_nr_of_jobs_to_get(get_nr_of_imported_jobs(), len(global_vars["jobs"]))
9252
9252
 
9253
9253
  __max_eval = _max_eval if _max_eval is not None else 0
9254
9254
  new_nr_of_jobs_to_get = min(__max_eval - (submitted_jobs() - failed_jobs()), nr_of_jobs_to_get)
@@ -9286,13 +9286,13 @@ def _create_and_execute_next_runs_run_loop(_max_eval: Optional[int], phase: Opti
9286
9286
 
9287
9287
  return done_optimizing, trial_index_to_param
9288
9288
 
9289
- def _create_and_execute_next_runs_finish(done_optimizing: bool) -> None:
9289
+ def create_and_execute_next_runs_finish(done_optimizing: bool) -> None:
9290
9290
  finish_previous_jobs(["finishing jobs"])
9291
9291
 
9292
9292
  if done_optimizing:
9293
9293
  end_program(False, 0)
9294
9294
 
9295
- def _create_and_execute_next_runs_return_value(trial_index_to_param: Optional[Dict]) -> int:
9295
+ def create_and_execute_next_runs_return_value(trial_index_to_param: Optional[Dict]) -> int:
9296
9296
  try:
9297
9297
  if trial_index_to_param:
9298
9298
  res = len(trial_index_to_param.keys())
@@ -9623,28 +9623,28 @@ def parse_parameters() -> Any:
9623
9623
  def create_pareto_front_table(idxs: List[int], metric_x: str, metric_y: str) -> Table:
9624
9624
  table = Table(title=f"Pareto-Front for {metric_y}/{metric_x}:", show_lines=True)
9625
9625
 
9626
- rows = _pareto_front_table_read_csv()
9626
+ rows = pareto_front_table_read_csv()
9627
9627
  if not rows:
9628
9628
  table.add_column("No data found")
9629
9629
  return table
9630
9630
 
9631
- filtered_rows = _pareto_front_table_filter_rows(rows, idxs)
9631
+ filtered_rows = pareto_front_table_filter_rows(rows, idxs)
9632
9632
  if not filtered_rows:
9633
9633
  table.add_column("No matching entries")
9634
9634
  return table
9635
9635
 
9636
- param_cols, result_cols = _pareto_front_table_get_columns(filtered_rows[0])
9636
+ param_cols, result_cols = pareto_front_table_get_columns(filtered_rows[0])
9637
9637
 
9638
- _pareto_front_table_add_headers(table, param_cols, result_cols)
9639
- _pareto_front_table_add_rows(table, filtered_rows, param_cols, result_cols)
9638
+ pareto_front_table_add_headers(table, param_cols, result_cols)
9639
+ pareto_front_table_add_rows(table, filtered_rows, param_cols, result_cols)
9640
9640
 
9641
9641
  return table
9642
9642
 
9643
- def _pareto_front_table_read_csv() -> List[Dict[str, str]]:
9643
+ def pareto_front_table_read_csv() -> List[Dict[str, str]]:
9644
9644
  with open(RESULT_CSV_FILE, mode="r", encoding="utf-8", newline="") as f:
9645
9645
  return list(csv.DictReader(f))
9646
9646
 
9647
- def _pareto_front_table_filter_rows(rows: List[Dict[str, str]], idxs: List[int]) -> List[Dict[str, str]]:
9647
+ def pareto_front_table_filter_rows(rows: List[Dict[str, str]], idxs: List[int]) -> List[Dict[str, str]]:
9648
9648
  result = []
9649
9649
  for row in rows:
9650
9650
  try:
@@ -9656,7 +9656,7 @@ def _pareto_front_table_filter_rows(rows: List[Dict[str, str]], idxs: List[int])
9656
9656
  result.append(row)
9657
9657
  return result
9658
9658
 
9659
- def _pareto_front_table_get_columns(first_row: Dict[str, str]) -> Tuple[List[str], List[str]]:
9659
+ def pareto_front_table_get_columns(first_row: Dict[str, str]) -> Tuple[List[str], List[str]]:
9660
9660
  all_columns = list(first_row.keys())
9661
9661
  ignored_cols = set(special_col_names) - {"trial_index"}
9662
9662
 
@@ -9664,13 +9664,13 @@ def _pareto_front_table_get_columns(first_row: Dict[str, str]) -> Tuple[List[str
9664
9664
  result_cols = [col for col in arg_result_names if col in all_columns]
9665
9665
  return param_cols, result_cols
9666
9666
 
9667
- def _pareto_front_table_add_headers(table: Table, param_cols: List[str], result_cols: List[str]) -> None:
9667
+ def pareto_front_table_add_headers(table: Table, param_cols: List[str], result_cols: List[str]) -> None:
9668
9668
  for col in param_cols:
9669
9669
  table.add_column(col, justify="center")
9670
9670
  for col in result_cols:
9671
9671
  table.add_column(Text(f"{col}", style="cyan"), justify="center")
9672
9672
 
9673
- def _pareto_front_table_add_rows(table: Table, rows: List[Dict[str, str]], param_cols: List[str], result_cols: List[str]) -> None:
9673
+ def pareto_front_table_add_rows(table: Table, rows: List[Dict[str, str]], param_cols: List[str], result_cols: List[str]) -> None:
9674
9674
  for row in rows:
9675
9675
  values = [str(helpers.to_int_when_possible(row[col])) for col in param_cols]
9676
9676
  result_values = [Text(str(helpers.to_int_when_possible(row[col])), style="cyan") for col in result_cols]
@@ -9731,11 +9731,11 @@ def plot_pareto_frontier_sixel(data: Any, x_metric: str, y_metric: str) -> None:
9731
9731
 
9732
9732
  plt.close(fig)
9733
9733
 
9734
- def _pareto_front_general_validate_shapes(x: np.ndarray, y: np.ndarray) -> None:
9734
+ def pareto_front_general_validate_shapes(x: np.ndarray, y: np.ndarray) -> None:
9735
9735
  if x.shape != y.shape:
9736
9736
  raise ValueError("Input arrays x and y must have the same shape.")
9737
9737
 
9738
- def _pareto_front_general_compare(
9738
+ def pareto_front_general_compare(
9739
9739
  xi: float, yi: float, xj: float, yj: float,
9740
9740
  x_minimize: bool, y_minimize: bool
9741
9741
  ) -> bool:
@@ -9746,7 +9746,7 @@ def _pareto_front_general_compare(
9746
9746
 
9747
9747
  return bool(x_better_eq and y_better_eq and (x_strictly_better or y_strictly_better))
9748
9748
 
9749
- def _pareto_front_general_find_dominated(
9749
+ def pareto_front_general_find_dominated(
9750
9750
  x: np.ndarray, y: np.ndarray, x_minimize: bool, y_minimize: bool
9751
9751
  ) -> np.ndarray:
9752
9752
  num_points = len(x)
@@ -9757,7 +9757,7 @@ def _pareto_front_general_find_dominated(
9757
9757
  if i == j:
9758
9758
  continue
9759
9759
 
9760
- if _pareto_front_general_compare(x[i], y[i], x[j], y[j], x_minimize, y_minimize):
9760
+ if pareto_front_general_compare(x[i], y[i], x[j], y[j], x_minimize, y_minimize):
9761
9761
  is_dominated[i] = True
9762
9762
  break
9763
9763
 
@@ -9770,14 +9770,14 @@ def pareto_front_general(
9770
9770
  y_minimize: bool = True
9771
9771
  ) -> np.ndarray:
9772
9772
  try:
9773
- _pareto_front_general_validate_shapes(x, y)
9774
- is_dominated = _pareto_front_general_find_dominated(x, y, x_minimize, y_minimize)
9773
+ pareto_front_general_validate_shapes(x, y)
9774
+ is_dominated = pareto_front_general_find_dominated(x, y, x_minimize, y_minimize)
9775
9775
  return np.where(~is_dominated)[0]
9776
9776
  except Exception as e:
9777
9777
  print("Error in pareto_front_general:", str(e))
9778
9778
  return np.array([], dtype=int)
9779
9779
 
9780
- def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[int, str], Dict[str, Dict[str, float]]]]:
9780
+ def pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[int, str], Dict[str, Dict[str, float]]]]:
9781
9781
  results_csv_file = f"{path_to_calculate}/{RESULTS_CSV_FILENAME}"
9782
9782
  result_names_file = f"{path_to_calculate}/result_names.txt"
9783
9783
 
@@ -9805,7 +9805,7 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
9805
9805
 
9806
9806
  return records
9807
9807
 
9808
- def _pareto_front_filter_complete_points(
9808
+ def pareto_front_filter_complete_points(
9809
9809
  path_to_calculate: str,
9810
9810
  records: Dict[Tuple[int, str], Dict[str, Dict[str, float]]],
9811
9811
  primary_name: str,
@@ -9822,7 +9822,7 @@ def _pareto_front_filter_complete_points(
9822
9822
  raise ValueError(f"No full data points with both objectives found in {path_to_calculate}.")
9823
9823
  return points
9824
9824
 
9825
- def _pareto_front_transform_objectives(
9825
+ def pareto_front_transform_objectives(
9826
9826
  points: List[Tuple[Any, float, float]],
9827
9827
  primary_name: str,
9828
9828
  secondary_name: str
@@ -9845,7 +9845,7 @@ def _pareto_front_transform_objectives(
9845
9845
 
9846
9846
  return x, y
9847
9847
 
9848
- def _pareto_front_select_pareto_points(
9848
+ def pareto_front_select_pareto_points(
9849
9849
  x: np.ndarray,
9850
9850
  y: np.ndarray,
9851
9851
  x_minimize: bool,
@@ -9859,7 +9859,7 @@ def _pareto_front_select_pareto_points(
9859
9859
  selected_points = [points[i] for i in sorted_indices]
9860
9860
  return selected_points
9861
9861
 
9862
- def _pareto_front_build_return_structure(
9862
+ def pareto_front_build_return_structure(
9863
9863
  path_to_calculate: str,
9864
9864
  selected_points: List[Tuple[Any, float, float]],
9865
9865
  records: Dict[Tuple[int, str], Dict[str, Dict[str, float]]],
@@ -9890,7 +9890,7 @@ def _pareto_front_build_return_structure(
9890
9890
  for (trial_index, arm_name), _, _ in selected_points:
9891
9891
  row = csv_rows.get(trial_index, {})
9892
9892
  if row == {} or row is None or row['arm_name'] != arm_name:
9893
- print_debug(f"_pareto_front_build_return_structure: trial_index '{trial_index}' could not be found and row returned as None")
9893
+ print_debug(f"pareto_front_build_return_structure: trial_index '{trial_index}' could not be found and row returned as None")
9894
9894
  continue
9895
9895
 
9896
9896
  idxs.append(int(row["trial_index"]))
@@ -9934,15 +9934,15 @@ def get_pareto_frontier_points(
9934
9934
  absolute_metrics: List[str],
9935
9935
  num_points: int
9936
9936
  ) -> Optional[dict]:
9937
- records = _pareto_front_aggregate_data(path_to_calculate)
9937
+ records = pareto_front_aggregate_data(path_to_calculate)
9938
9938
 
9939
9939
  if records is None:
9940
9940
  return None
9941
9941
 
9942
- points = _pareto_front_filter_complete_points(path_to_calculate, records, primary_objective, secondary_objective)
9943
- x, y = _pareto_front_transform_objectives(points, primary_objective, secondary_objective)
9944
- selected_points = _pareto_front_select_pareto_points(x, y, x_minimize, y_minimize, points, num_points)
9945
- result = _pareto_front_build_return_structure(path_to_calculate, selected_points, records, absolute_metrics, primary_objective, secondary_objective)
9942
+ points = pareto_front_filter_complete_points(path_to_calculate, records, primary_objective, secondary_objective)
9943
+ x, y = pareto_front_transform_objectives(points, primary_objective, secondary_objective)
9944
+ selected_points = pareto_front_select_pareto_points(x, y, x_minimize, y_minimize, points, num_points)
9945
+ result = pareto_front_build_return_structure(path_to_calculate, selected_points, records, absolute_metrics, primary_objective, secondary_objective)
9946
9946
 
9947
9947
  return result
9948
9948
 
@@ -9954,7 +9954,7 @@ def save_experiment_state() -> None:
9954
9954
  state_path = get_current_run_folder("experiment_state.json")
9955
9955
  save_ax_client_to_json_file(state_path)
9956
9956
  except Exception as e:
9957
- print(f"Error saving experiment state: {e}")
9957
+ print_debug(f"Error saving experiment state: {e}")
9958
9958
 
9959
9959
  def wait_for_state_file(state_path: str, min_size: int = 5, max_wait_seconds: int = 60) -> bool:
9960
9960
  try:
@@ -10209,7 +10209,7 @@ def get_pareto_front_data(path_to_calculate: str, res_names: list) -> dict:
10209
10209
  def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_sixel_and_table: bool = False) -> None:
10210
10210
  if len(res_names) <= 1:
10211
10211
  print_debug(f"--result_names (has {len(res_names)} entries) must be at least 2.")
10212
- return
10212
+ return None
10213
10213
 
10214
10214
  pareto_front_data: dict = get_pareto_front_data(path_to_calculate, res_names)
10215
10215
 
@@ -10230,8 +10230,16 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
10230
10230
  else:
10231
10231
  print(f"Not showing Pareto-front-sixel for {path_to_calculate}")
10232
10232
 
10233
- if len(calculated_frontier[metric_x][metric_y]["idxs"]):
10234
- pareto_points[metric_x][metric_y] = sorted(calculated_frontier[metric_x][metric_y]["idxs"])
10233
+ if calculated_frontier is None:
10234
+ print_debug("ERROR: calculated_frontier is None")
10235
+ return None
10236
+
10237
+ try:
10238
+ if len(calculated_frontier[metric_x][metric_y]["idxs"]):
10239
+ pareto_points[metric_x][metric_y] = sorted(calculated_frontier[metric_x][metric_y]["idxs"])
10240
+ except AttributeError:
10241
+ print_debug(f"ERROR: calculated_frontier structure invalid for ({metric_x}, {metric_y})")
10242
+ return None
10235
10243
 
10236
10244
  rich_table = pareto_front_as_rich_table(
10237
10245
  calculated_frontier[metric_x][metric_y]["idxs"],
@@ -10256,6 +10264,8 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
10256
10264
 
10257
10265
  live_share_after_pareto()
10258
10266
 
10267
+ return None
10268
+
10259
10269
  def show_available_hardware_and_generation_strategy_string(gpu_string: str, gpu_color: str) -> None:
10260
10270
  cpu_count = os.cpu_count()
10261
10271
 
omniopt CHANGED
@@ -1924,34 +1924,29 @@ EOF
1924
1924
  exit_code_lines=$(grep -i "exit-code:*" "$LOG_PATH" 2>/dev/null)
1925
1925
  exit_code_lines_lines=$?
1926
1926
  if [ $exit_code_lines_lines -ne 0 ] || [ -z "$exit_code_lines" ]; then
1927
- echo "WARN: grep failed or no exit-code line found."
1928
1927
  exit_code_lines=""
1929
1928
  fi
1930
1929
 
1931
1930
  exit_code_sed=$(echo "$exit_code_lines" | sed -e 's#Exit-Code:*[[:space:]]*##i' -e 's#,.*##')
1932
1931
  exit_code_sed_sed=$?
1933
1932
  if [ $exit_code_sed_sed -ne 0 ] || [ -z "$exit_code_sed" ]; then
1934
- echo "WARN: sed failed or no data after sed."
1935
1933
  exit_code_sed=""
1936
1934
  fi
1937
1935
 
1938
1936
  exit_code_tail=$(echo "$exit_code_sed" | tail -n1)
1939
1937
  exit_code_tail_tail=$?
1940
1938
  if [ $exit_code_tail_tail -ne 0 ] || [ -z "$exit_code_tail" ]; then
1941
- echo "WARN: tail failed or no data after tail."
1942
1939
  exit_code_tail=""
1943
1940
  fi
1944
1941
 
1945
1942
  exit_code_only_digits=$(echo "$exit_code_tail" | grep -o '[0-9]\+')
1946
1943
  if [ -z "$exit_code_only_digits" ]; then
1947
- echo "WARN: No valid exit code found, setting it to 3"
1948
1944
  exit_code_only_digits=3
1949
1945
  fi
1950
1946
 
1951
1947
  exit_code="$exit_code_only_digits"
1952
1948
 
1953
1949
  if ! [[ "$exit_code" =~ ^[0-9]+$ ]]; then
1954
- echo "WARN: exit_code invalid ('$exit_code'), setting to 3"
1955
1950
  exit_code=3
1956
1951
  fi
1957
1952
 
@@ -8089,7 +8089,7 @@ def break_run_search(_name: str, _max_eval: Optional[int]) -> bool:
8089
8089
 
8090
8090
  return _ret
8091
8091
 
8092
- def _calculate_nr_of_jobs_to_get(simulated_jobs: int, currently_running_jobs: int) -> int:
8092
+ def calculate_nr_of_jobs_to_get(simulated_jobs: int, currently_running_jobs: int) -> int:
8093
8093
  """Calculates the number of jobs to retrieve."""
8094
8094
  return min(
8095
8095
  max_eval + simulated_jobs - count_done_jobs(),
@@ -8102,7 +8102,7 @@ def remove_extra_spaces(text: str) -> str:
8102
8102
  raise ValueError("Input must be a string")
8103
8103
  return re.sub(r'\s+', ' ', text).strip()
8104
8104
 
8105
- def _get_trials_message(nr_of_jobs_to_get: int, full_nr_of_jobs_to_get: int, trial_durations: List[float]) -> str:
8105
+ def get_trials_message(nr_of_jobs_to_get: int, full_nr_of_jobs_to_get: int, trial_durations: List[float]) -> str:
8106
8106
  """Generates the appropriate message for the number of trials being retrieved."""
8107
8107
  ret = ""
8108
8108
  if full_nr_of_jobs_to_get > 1:
@@ -8270,7 +8270,7 @@ def generate_trials(n: int, recursion: bool) -> Tuple[Dict[int, Any], bool]:
8270
8270
  retries += 1
8271
8271
  continue
8272
8272
 
8273
- progressbar_description(_get_trials_message(cnt + 1, n, trial_durations))
8273
+ progressbar_description(get_trials_message(cnt + 1, n, trial_durations))
8274
8274
 
8275
8275
  try:
8276
8276
  result = create_and_handle_trial(arm)
@@ -8292,7 +8292,7 @@ def generate_trials(n: int, recursion: bool) -> Tuple[Dict[int, Any], bool]:
8292
8292
  return finalized
8293
8293
 
8294
8294
  except Exception as e:
8295
- return _handle_generation_failure(e, n, recursion)
8295
+ return handle_generation_failure(e, n, recursion)
8296
8296
 
8297
8297
  class TrialRejected(Exception):
8298
8298
  pass
@@ -8357,7 +8357,7 @@ def finalize_generation(trials_dict: Dict[int, Any], cnt: int, requested: int, s
8357
8357
 
8358
8358
  return trials_dict, False
8359
8359
 
8360
- def _handle_generation_failure(
8360
+ def handle_generation_failure(
8361
8361
  e: Exception,
8362
8362
  requested: int,
8363
8363
  recursion: bool
@@ -8373,7 +8373,7 @@ def _handle_generation_failure(
8373
8373
  )):
8374
8374
  msg = str(e)
8375
8375
  if msg not in error_8_saved:
8376
- _print_exhaustion_warning(e, recursion)
8376
+ print_exhaustion_warning(e, recursion)
8377
8377
  error_8_saved.append(msg)
8378
8378
 
8379
8379
  if not recursion and args.revert_to_random_when_seemingly_exhausted:
@@ -8381,11 +8381,11 @@ def _handle_generation_failure(
8381
8381
  set_global_gs_to_random()
8382
8382
  return fetch_next_trials(requested, True)
8383
8383
 
8384
- print_red(f"_handle_generation_failure: General Exception: {e}")
8384
+ print_red(f"handle_generation_failure: General Exception: {e}")
8385
8385
 
8386
8386
  return {}, True
8387
8387
 
8388
- def _print_exhaustion_warning(e: Exception, recursion: bool) -> None:
8388
+ def print_exhaustion_warning(e: Exception, recursion: bool) -> None:
8389
8389
  if not recursion and args.revert_to_random_when_seemingly_exhausted:
8390
8390
  print_yellow(f"\n⚠Error 8: {e} From now (done jobs: {count_done_jobs()}) on, random points will be generated.")
8391
8391
  else:
@@ -9235,20 +9235,20 @@ def create_and_execute_next_runs(next_nr_steps: int, phase: Optional[str], _max_
9235
9235
  done_optimizing: bool = False
9236
9236
 
9237
9237
  try:
9238
- done_optimizing, trial_index_to_param = _create_and_execute_next_runs_run_loop(_max_eval, phase)
9239
- _create_and_execute_next_runs_finish(done_optimizing)
9238
+ done_optimizing, trial_index_to_param = create_and_execute_next_runs_run_loop(_max_eval, phase)
9239
+ create_and_execute_next_runs_finish(done_optimizing)
9240
9240
  except Exception as e:
9241
9241
  stacktrace = traceback.format_exc()
9242
9242
  print_debug(f"Warning: create_and_execute_next_runs encountered an exception: {e}\n{stacktrace}")
9243
9243
  return handle_exceptions_create_and_execute_next_runs(e)
9244
9244
 
9245
- return _create_and_execute_next_runs_return_value(trial_index_to_param)
9245
+ return create_and_execute_next_runs_return_value(trial_index_to_param)
9246
9246
 
9247
- def _create_and_execute_next_runs_run_loop(_max_eval: Optional[int], phase: Optional[str]) -> Tuple[bool, Optional[Dict]]:
9247
+ def create_and_execute_next_runs_run_loop(_max_eval: Optional[int], phase: Optional[str]) -> Tuple[bool, Optional[Dict]]:
9248
9248
  done_optimizing = False
9249
9249
  trial_index_to_param: Optional[Dict] = None
9250
9250
 
9251
- nr_of_jobs_to_get = _calculate_nr_of_jobs_to_get(get_nr_of_imported_jobs(), len(global_vars["jobs"]))
9251
+ nr_of_jobs_to_get = calculate_nr_of_jobs_to_get(get_nr_of_imported_jobs(), len(global_vars["jobs"]))
9252
9252
 
9253
9253
  __max_eval = _max_eval if _max_eval is not None else 0
9254
9254
  new_nr_of_jobs_to_get = min(__max_eval - (submitted_jobs() - failed_jobs()), nr_of_jobs_to_get)
@@ -9286,13 +9286,13 @@ def _create_and_execute_next_runs_run_loop(_max_eval: Optional[int], phase: Opti
9286
9286
 
9287
9287
  return done_optimizing, trial_index_to_param
9288
9288
 
9289
- def _create_and_execute_next_runs_finish(done_optimizing: bool) -> None:
9289
+ def create_and_execute_next_runs_finish(done_optimizing: bool) -> None:
9290
9290
  finish_previous_jobs(["finishing jobs"])
9291
9291
 
9292
9292
  if done_optimizing:
9293
9293
  end_program(False, 0)
9294
9294
 
9295
- def _create_and_execute_next_runs_return_value(trial_index_to_param: Optional[Dict]) -> int:
9295
+ def create_and_execute_next_runs_return_value(trial_index_to_param: Optional[Dict]) -> int:
9296
9296
  try:
9297
9297
  if trial_index_to_param:
9298
9298
  res = len(trial_index_to_param.keys())
@@ -9623,28 +9623,28 @@ def parse_parameters() -> Any:
9623
9623
  def create_pareto_front_table(idxs: List[int], metric_x: str, metric_y: str) -> Table:
9624
9624
  table = Table(title=f"Pareto-Front for {metric_y}/{metric_x}:", show_lines=True)
9625
9625
 
9626
- rows = _pareto_front_table_read_csv()
9626
+ rows = pareto_front_table_read_csv()
9627
9627
  if not rows:
9628
9628
  table.add_column("No data found")
9629
9629
  return table
9630
9630
 
9631
- filtered_rows = _pareto_front_table_filter_rows(rows, idxs)
9631
+ filtered_rows = pareto_front_table_filter_rows(rows, idxs)
9632
9632
  if not filtered_rows:
9633
9633
  table.add_column("No matching entries")
9634
9634
  return table
9635
9635
 
9636
- param_cols, result_cols = _pareto_front_table_get_columns(filtered_rows[0])
9636
+ param_cols, result_cols = pareto_front_table_get_columns(filtered_rows[0])
9637
9637
 
9638
- _pareto_front_table_add_headers(table, param_cols, result_cols)
9639
- _pareto_front_table_add_rows(table, filtered_rows, param_cols, result_cols)
9638
+ pareto_front_table_add_headers(table, param_cols, result_cols)
9639
+ pareto_front_table_add_rows(table, filtered_rows, param_cols, result_cols)
9640
9640
 
9641
9641
  return table
9642
9642
 
9643
- def _pareto_front_table_read_csv() -> List[Dict[str, str]]:
9643
+ def pareto_front_table_read_csv() -> List[Dict[str, str]]:
9644
9644
  with open(RESULT_CSV_FILE, mode="r", encoding="utf-8", newline="") as f:
9645
9645
  return list(csv.DictReader(f))
9646
9646
 
9647
- def _pareto_front_table_filter_rows(rows: List[Dict[str, str]], idxs: List[int]) -> List[Dict[str, str]]:
9647
+ def pareto_front_table_filter_rows(rows: List[Dict[str, str]], idxs: List[int]) -> List[Dict[str, str]]:
9648
9648
  result = []
9649
9649
  for row in rows:
9650
9650
  try:
@@ -9656,7 +9656,7 @@ def _pareto_front_table_filter_rows(rows: List[Dict[str, str]], idxs: List[int])
9656
9656
  result.append(row)
9657
9657
  return result
9658
9658
 
9659
- def _pareto_front_table_get_columns(first_row: Dict[str, str]) -> Tuple[List[str], List[str]]:
9659
+ def pareto_front_table_get_columns(first_row: Dict[str, str]) -> Tuple[List[str], List[str]]:
9660
9660
  all_columns = list(first_row.keys())
9661
9661
  ignored_cols = set(special_col_names) - {"trial_index"}
9662
9662
 
@@ -9664,13 +9664,13 @@ def _pareto_front_table_get_columns(first_row: Dict[str, str]) -> Tuple[List[str
9664
9664
  result_cols = [col for col in arg_result_names if col in all_columns]
9665
9665
  return param_cols, result_cols
9666
9666
 
9667
- def _pareto_front_table_add_headers(table: Table, param_cols: List[str], result_cols: List[str]) -> None:
9667
+ def pareto_front_table_add_headers(table: Table, param_cols: List[str], result_cols: List[str]) -> None:
9668
9668
  for col in param_cols:
9669
9669
  table.add_column(col, justify="center")
9670
9670
  for col in result_cols:
9671
9671
  table.add_column(Text(f"{col}", style="cyan"), justify="center")
9672
9672
 
9673
- def _pareto_front_table_add_rows(table: Table, rows: List[Dict[str, str]], param_cols: List[str], result_cols: List[str]) -> None:
9673
+ def pareto_front_table_add_rows(table: Table, rows: List[Dict[str, str]], param_cols: List[str], result_cols: List[str]) -> None:
9674
9674
  for row in rows:
9675
9675
  values = [str(helpers.to_int_when_possible(row[col])) for col in param_cols]
9676
9676
  result_values = [Text(str(helpers.to_int_when_possible(row[col])), style="cyan") for col in result_cols]
@@ -9731,11 +9731,11 @@ def plot_pareto_frontier_sixel(data: Any, x_metric: str, y_metric: str) -> None:
9731
9731
 
9732
9732
  plt.close(fig)
9733
9733
 
9734
- def _pareto_front_general_validate_shapes(x: np.ndarray, y: np.ndarray) -> None:
9734
+ def pareto_front_general_validate_shapes(x: np.ndarray, y: np.ndarray) -> None:
9735
9735
  if x.shape != y.shape:
9736
9736
  raise ValueError("Input arrays x and y must have the same shape.")
9737
9737
 
9738
- def _pareto_front_general_compare(
9738
+ def pareto_front_general_compare(
9739
9739
  xi: float, yi: float, xj: float, yj: float,
9740
9740
  x_minimize: bool, y_minimize: bool
9741
9741
  ) -> bool:
@@ -9746,7 +9746,7 @@ def _pareto_front_general_compare(
9746
9746
 
9747
9747
  return bool(x_better_eq and y_better_eq and (x_strictly_better or y_strictly_better))
9748
9748
 
9749
- def _pareto_front_general_find_dominated(
9749
+ def pareto_front_general_find_dominated(
9750
9750
  x: np.ndarray, y: np.ndarray, x_minimize: bool, y_minimize: bool
9751
9751
  ) -> np.ndarray:
9752
9752
  num_points = len(x)
@@ -9757,7 +9757,7 @@ def _pareto_front_general_find_dominated(
9757
9757
  if i == j:
9758
9758
  continue
9759
9759
 
9760
- if _pareto_front_general_compare(x[i], y[i], x[j], y[j], x_minimize, y_minimize):
9760
+ if pareto_front_general_compare(x[i], y[i], x[j], y[j], x_minimize, y_minimize):
9761
9761
  is_dominated[i] = True
9762
9762
  break
9763
9763
 
@@ -9770,14 +9770,14 @@ def pareto_front_general(
9770
9770
  y_minimize: bool = True
9771
9771
  ) -> np.ndarray:
9772
9772
  try:
9773
- _pareto_front_general_validate_shapes(x, y)
9774
- is_dominated = _pareto_front_general_find_dominated(x, y, x_minimize, y_minimize)
9773
+ pareto_front_general_validate_shapes(x, y)
9774
+ is_dominated = pareto_front_general_find_dominated(x, y, x_minimize, y_minimize)
9775
9775
  return np.where(~is_dominated)[0]
9776
9776
  except Exception as e:
9777
9777
  print("Error in pareto_front_general:", str(e))
9778
9778
  return np.array([], dtype=int)
9779
9779
 
9780
- def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[int, str], Dict[str, Dict[str, float]]]]:
9780
+ def pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[int, str], Dict[str, Dict[str, float]]]]:
9781
9781
  results_csv_file = f"{path_to_calculate}/{RESULTS_CSV_FILENAME}"
9782
9782
  result_names_file = f"{path_to_calculate}/result_names.txt"
9783
9783
 
@@ -9805,7 +9805,7 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
9805
9805
 
9806
9806
  return records
9807
9807
 
9808
- def _pareto_front_filter_complete_points(
9808
+ def pareto_front_filter_complete_points(
9809
9809
  path_to_calculate: str,
9810
9810
  records: Dict[Tuple[int, str], Dict[str, Dict[str, float]]],
9811
9811
  primary_name: str,
@@ -9822,7 +9822,7 @@ def _pareto_front_filter_complete_points(
9822
9822
  raise ValueError(f"No full data points with both objectives found in {path_to_calculate}.")
9823
9823
  return points
9824
9824
 
9825
- def _pareto_front_transform_objectives(
9825
+ def pareto_front_transform_objectives(
9826
9826
  points: List[Tuple[Any, float, float]],
9827
9827
  primary_name: str,
9828
9828
  secondary_name: str
@@ -9845,7 +9845,7 @@ def _pareto_front_transform_objectives(
9845
9845
 
9846
9846
  return x, y
9847
9847
 
9848
- def _pareto_front_select_pareto_points(
9848
+ def pareto_front_select_pareto_points(
9849
9849
  x: np.ndarray,
9850
9850
  y: np.ndarray,
9851
9851
  x_minimize: bool,
@@ -9859,7 +9859,7 @@ def _pareto_front_select_pareto_points(
9859
9859
  selected_points = [points[i] for i in sorted_indices]
9860
9860
  return selected_points
9861
9861
 
9862
- def _pareto_front_build_return_structure(
9862
+ def pareto_front_build_return_structure(
9863
9863
  path_to_calculate: str,
9864
9864
  selected_points: List[Tuple[Any, float, float]],
9865
9865
  records: Dict[Tuple[int, str], Dict[str, Dict[str, float]]],
@@ -9890,7 +9890,7 @@ def _pareto_front_build_return_structure(
9890
9890
  for (trial_index, arm_name), _, _ in selected_points:
9891
9891
  row = csv_rows.get(trial_index, {})
9892
9892
  if row == {} or row is None or row['arm_name'] != arm_name:
9893
- print_debug(f"_pareto_front_build_return_structure: trial_index '{trial_index}' could not be found and row returned as None")
9893
+ print_debug(f"pareto_front_build_return_structure: trial_index '{trial_index}' could not be found and row returned as None")
9894
9894
  continue
9895
9895
 
9896
9896
  idxs.append(int(row["trial_index"]))
@@ -9934,15 +9934,15 @@ def get_pareto_frontier_points(
9934
9934
  absolute_metrics: List[str],
9935
9935
  num_points: int
9936
9936
  ) -> Optional[dict]:
9937
- records = _pareto_front_aggregate_data(path_to_calculate)
9937
+ records = pareto_front_aggregate_data(path_to_calculate)
9938
9938
 
9939
9939
  if records is None:
9940
9940
  return None
9941
9941
 
9942
- points = _pareto_front_filter_complete_points(path_to_calculate, records, primary_objective, secondary_objective)
9943
- x, y = _pareto_front_transform_objectives(points, primary_objective, secondary_objective)
9944
- selected_points = _pareto_front_select_pareto_points(x, y, x_minimize, y_minimize, points, num_points)
9945
- result = _pareto_front_build_return_structure(path_to_calculate, selected_points, records, absolute_metrics, primary_objective, secondary_objective)
9942
+ points = pareto_front_filter_complete_points(path_to_calculate, records, primary_objective, secondary_objective)
9943
+ x, y = pareto_front_transform_objectives(points, primary_objective, secondary_objective)
9944
+ selected_points = pareto_front_select_pareto_points(x, y, x_minimize, y_minimize, points, num_points)
9945
+ result = pareto_front_build_return_structure(path_to_calculate, selected_points, records, absolute_metrics, primary_objective, secondary_objective)
9946
9946
 
9947
9947
  return result
9948
9948
 
@@ -9954,7 +9954,7 @@ def save_experiment_state() -> None:
9954
9954
  state_path = get_current_run_folder("experiment_state.json")
9955
9955
  save_ax_client_to_json_file(state_path)
9956
9956
  except Exception as e:
9957
- print(f"Error saving experiment state: {e}")
9957
+ print_debug(f"Error saving experiment state: {e}")
9958
9958
 
9959
9959
  def wait_for_state_file(state_path: str, min_size: int = 5, max_wait_seconds: int = 60) -> bool:
9960
9960
  try:
@@ -10209,7 +10209,7 @@ def get_pareto_front_data(path_to_calculate: str, res_names: list) -> dict:
10209
10209
  def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_sixel_and_table: bool = False) -> None:
10210
10210
  if len(res_names) <= 1:
10211
10211
  print_debug(f"--result_names (has {len(res_names)} entries) must be at least 2.")
10212
- return
10212
+ return None
10213
10213
 
10214
10214
  pareto_front_data: dict = get_pareto_front_data(path_to_calculate, res_names)
10215
10215
 
@@ -10230,8 +10230,16 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
10230
10230
  else:
10231
10231
  print(f"Not showing Pareto-front-sixel for {path_to_calculate}")
10232
10232
 
10233
- if len(calculated_frontier[metric_x][metric_y]["idxs"]):
10234
- pareto_points[metric_x][metric_y] = sorted(calculated_frontier[metric_x][metric_y]["idxs"])
10233
+ if calculated_frontier is None:
10234
+ print_debug("ERROR: calculated_frontier is None")
10235
+ return None
10236
+
10237
+ try:
10238
+ if len(calculated_frontier[metric_x][metric_y]["idxs"]):
10239
+ pareto_points[metric_x][metric_y] = sorted(calculated_frontier[metric_x][metric_y]["idxs"])
10240
+ except AttributeError:
10241
+ print_debug(f"ERROR: calculated_frontier structure invalid for ({metric_x}, {metric_y})")
10242
+ return None
10235
10243
 
10236
10244
  rich_table = pareto_front_as_rich_table(
10237
10245
  calculated_frontier[metric_x][metric_y]["idxs"],
@@ -10256,6 +10264,8 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
10256
10264
 
10257
10265
  live_share_after_pareto()
10258
10266
 
10267
+ return None
10268
+
10259
10269
  def show_available_hardware_and_generation_strategy_string(gpu_string: str, gpu_color: str) -> None:
10260
10270
  cpu_count = os.cpu_count()
10261
10271
 
@@ -1924,34 +1924,29 @@ EOF
1924
1924
  exit_code_lines=$(grep -i "exit-code:*" "$LOG_PATH" 2>/dev/null)
1925
1925
  exit_code_lines_lines=$?
1926
1926
  if [ $exit_code_lines_lines -ne 0 ] || [ -z "$exit_code_lines" ]; then
1927
- echo "WARN: grep failed or no exit-code line found."
1928
1927
  exit_code_lines=""
1929
1928
  fi
1930
1929
 
1931
1930
  exit_code_sed=$(echo "$exit_code_lines" | sed -e 's#Exit-Code:*[[:space:]]*##i' -e 's#,.*##')
1932
1931
  exit_code_sed_sed=$?
1933
1932
  if [ $exit_code_sed_sed -ne 0 ] || [ -z "$exit_code_sed" ]; then
1934
- echo "WARN: sed failed or no data after sed."
1935
1933
  exit_code_sed=""
1936
1934
  fi
1937
1935
 
1938
1936
  exit_code_tail=$(echo "$exit_code_sed" | tail -n1)
1939
1937
  exit_code_tail_tail=$?
1940
1938
  if [ $exit_code_tail_tail -ne 0 ] || [ -z "$exit_code_tail" ]; then
1941
- echo "WARN: tail failed or no data after tail."
1942
1939
  exit_code_tail=""
1943
1940
  fi
1944
1941
 
1945
1942
  exit_code_only_digits=$(echo "$exit_code_tail" | grep -o '[0-9]\+')
1946
1943
  if [ -z "$exit_code_only_digits" ]; then
1947
- echo "WARN: No valid exit code found, setting it to 3"
1948
1944
  exit_code_only_digits=3
1949
1945
  fi
1950
1946
 
1951
1947
  exit_code="$exit_code_only_digits"
1952
1948
 
1953
1949
  if ! [[ "$exit_code" =~ ^[0-9]+$ ]]; then
1954
- echo "WARN: exit_code invalid ('$exit_code'), setting to 3"
1955
1950
  exit_code=3
1956
1951
  fi
1957
1952
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 8753
3
+ Version: 8762
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
@@ -3,7 +3,7 @@
3
3
  .general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
4
4
  .gitignore,sha256=3A7BlH7vV-liv6jRnjoLhPFCBWO_b-mxjTPXiNIQdQs,3802
5
5
  .helpers.py,sha256=4YPKu3HBjV9SWUhOGlnmBPLtvn2ofbUPqwQrDIPegmM,30848
6
- .omniopt.py,sha256=nHtcaTPKJnW9GSG4wuuZrf0bsJ7e0mOyzpoo9KARmpo,448824
6
+ .omniopt.py,sha256=PJX6eMEkRBUffU5W43KhvZlrMorGd-Kxd3R1xTeeiuA,449143
7
7
  .omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
8
8
  .omniopt_plot_general.py,sha256=3iy-bPef8I5rTB3KRz-TuleMdgKDmVZ6c8LuNQhNwu0,6810
9
9
  .omniopt_plot_gpu_usage.py,sha256=ojxVicwSoiyl7f3c-6lLuT2EpyPcSJKEcbp75LgDY2k,5107
@@ -21,51 +21,51 @@ LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
21
21
  MANIFEST.in,sha256=L3QBXyL1TtyWxbAPsfjZdCfpeBrRuI5MSgdnvwdQ22Q,24
22
22
  README.md,sha256=WJxay4TRM8wfK_k5C01SlizLUHgKqlMwa_PQwAxj-As,3221
23
23
  apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
24
- omniopt,sha256=uZ8TOePdCdnwu2Oy96rS4aUrY2CyC2QoBZznSgSjal0,51841
24
+ omniopt,sha256=MgQAN6Gcn21TszmFuY87XsRUwO4uMmbz4fW5QpGSH-I,51544
25
25
  omniopt_docker,sha256=XyEGZ71l-tUlAYUluVF39fGO3NLDRa8UyrwxmAsXWNI,3542
26
26
  omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
27
27
  omniopt_plot,sha256=ze6Dz2k0w5jyUnfatAA7dk59nzQjKC_G36tDvJGWsfA,13696
28
28
  omniopt_share,sha256=2KZcTjN_7vcxNjb4sLLiWgUc38pl2-6-Qwmgq6_nTb0,13856
29
- pyproject.toml,sha256=ZCjXkbHUQM5DBvDcwCfREIE49DQW300qkFeMXN9r_pI,397
29
+ pyproject.toml,sha256=0Hs03791zlDN-3fk6LENApaGrxls5BE9DC7qH81x858,397
30
30
  requirements.txt,sha256=iL8Nj0cyoHnj0Uk1jzVUr1DnaosD-nXZYVGXnSW2B8A,299
31
31
  setup.cfg,sha256=HEc8uu6NpfxG5_AVh5SvXOpEFMNKPPPxgMIAH144vT4,38
32
32
  test_requirements.txt,sha256=jpyZzAwbWR_qnoRqWvpBB5MUjIX9jVwynX2D-B-r8aA,487
33
33
  .tests/pylint.rc,sha256=w77rayiyvpPxZYrmZ446S0_rLgBEbsR9q6E_W_skQHs,22467
34
- omniopt2-8753.data/data/bin/.colorfunctions.sh,sha256=xxc08V3Fh_0Je20fkJMRO14u9VCSvMyOiMaDfioEyCY,1098
35
- omniopt2-8753.data/data/bin/.general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
36
- omniopt2-8753.data/data/bin/.helpers.py,sha256=4YPKu3HBjV9SWUhOGlnmBPLtvn2ofbUPqwQrDIPegmM,30848
37
- omniopt2-8753.data/data/bin/.omniopt.py,sha256=nHtcaTPKJnW9GSG4wuuZrf0bsJ7e0mOyzpoo9KARmpo,448824
38
- omniopt2-8753.data/data/bin/.omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
39
- omniopt2-8753.data/data/bin/.omniopt_plot_general.py,sha256=3iy-bPef8I5rTB3KRz-TuleMdgKDmVZ6c8LuNQhNwu0,6810
40
- omniopt2-8753.data/data/bin/.omniopt_plot_gpu_usage.py,sha256=ojxVicwSoiyl7f3c-6lLuT2EpyPcSJKEcbp75LgDY2k,5107
41
- omniopt2-8753.data/data/bin/.omniopt_plot_kde.py,sha256=uRLWr72TDKvj3AqJ0O0AvkKZ1ok1O1QpXnbfQQdo0nA,6873
42
- omniopt2-8753.data/data/bin/.omniopt_plot_scatter.py,sha256=b0_CIqgyi6PztaUVJRL9X9XBTaOonh-yDH2hRxMGkH0,8403
43
- omniopt2-8753.data/data/bin/.omniopt_plot_scatter_generation_method.py,sha256=rgKY_w1E516c9UucVaEvaKd8tCnoUq9xg-RrYSDzYEQ,4289
44
- omniopt2-8753.data/data/bin/.omniopt_plot_scatter_hex.py,sha256=UKjw40c1eumgEcf0xqB-_SakX5PB6HD3u4VwBxbsgQo,10279
45
- omniopt2-8753.data/data/bin/.omniopt_plot_time_and_exit_code.py,sha256=WUyl2uI59wsC1eSX_5uJHOrqcF-s5cUDIEu8u3IFMLU,6462
46
- omniopt2-8753.data/data/bin/.omniopt_plot_trial_index_result.py,sha256=Xlo_dYTQNnxPhYakmgMM58CCcW9gUoXZ3gLap9RScTY,4578
47
- omniopt2-8753.data/data/bin/.omniopt_plot_worker.py,sha256=VuluQq4W6KRR5RU08dxmDSFk5mbfDRkRJQFwwcLgAGw,4524
48
- omniopt2-8753.data/data/bin/.random_generator.py,sha256=ezBBUXpez_QaGdpCglMcJ0KZPdQP0XdX5gnLzO1xhwU,2987
49
- omniopt2-8753.data/data/bin/.shellscript_functions,sha256=DxcCtTZBRxFp6P6EesUM4JcxqeCSsZOb7rLXbmBdQt8,13628
50
- omniopt2-8753.data/data/bin/.tpe.py,sha256=93yjviAtxPRYUCpSIShW_H3QQ5JPNcnXNjF0g8zc7Wc,6766
51
- omniopt2-8753.data/data/bin/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
52
- omniopt2-8753.data/data/bin/apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
53
- omniopt2-8753.data/data/bin/omniopt,sha256=uZ8TOePdCdnwu2Oy96rS4aUrY2CyC2QoBZznSgSjal0,51841
54
- omniopt2-8753.data/data/bin/omniopt_docker,sha256=XyEGZ71l-tUlAYUluVF39fGO3NLDRa8UyrwxmAsXWNI,3542
55
- omniopt2-8753.data/data/bin/omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
56
- omniopt2-8753.data/data/bin/omniopt_plot,sha256=ze6Dz2k0w5jyUnfatAA7dk59nzQjKC_G36tDvJGWsfA,13696
57
- omniopt2-8753.data/data/bin/omniopt_share,sha256=2KZcTjN_7vcxNjb4sLLiWgUc38pl2-6-Qwmgq6_nTb0,13856
58
- omniopt2-8753.data/data/bin/pylint.rc,sha256=w77rayiyvpPxZYrmZ446S0_rLgBEbsR9q6E_W_skQHs,22467
59
- omniopt2-8753.data/data/bin/requirements.txt,sha256=iL8Nj0cyoHnj0Uk1jzVUr1DnaosD-nXZYVGXnSW2B8A,299
60
- omniopt2-8753.data/data/bin/setup.py,sha256=zg7k4jeHXEdaGWGeB1mPC14fFj-hqz1vNimnU0JcMyk,4632
61
- omniopt2-8753.data/data/bin/test_requirements.txt,sha256=jpyZzAwbWR_qnoRqWvpBB5MUjIX9jVwynX2D-B-r8aA,487
62
- omniopt2-8753.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
63
- omniopt2.egg-info/PKG-INFO,sha256=-6Jcuf_ETMVMsQd2MaZd4FkqBXmry3-JdycbB_BMAp4,5725
34
+ omniopt2-8762.data/data/bin/.colorfunctions.sh,sha256=xxc08V3Fh_0Je20fkJMRO14u9VCSvMyOiMaDfioEyCY,1098
35
+ omniopt2-8762.data/data/bin/.general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
36
+ omniopt2-8762.data/data/bin/.helpers.py,sha256=4YPKu3HBjV9SWUhOGlnmBPLtvn2ofbUPqwQrDIPegmM,30848
37
+ omniopt2-8762.data/data/bin/.omniopt.py,sha256=PJX6eMEkRBUffU5W43KhvZlrMorGd-Kxd3R1xTeeiuA,449143
38
+ omniopt2-8762.data/data/bin/.omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
39
+ omniopt2-8762.data/data/bin/.omniopt_plot_general.py,sha256=3iy-bPef8I5rTB3KRz-TuleMdgKDmVZ6c8LuNQhNwu0,6810
40
+ omniopt2-8762.data/data/bin/.omniopt_plot_gpu_usage.py,sha256=ojxVicwSoiyl7f3c-6lLuT2EpyPcSJKEcbp75LgDY2k,5107
41
+ omniopt2-8762.data/data/bin/.omniopt_plot_kde.py,sha256=uRLWr72TDKvj3AqJ0O0AvkKZ1ok1O1QpXnbfQQdo0nA,6873
42
+ omniopt2-8762.data/data/bin/.omniopt_plot_scatter.py,sha256=b0_CIqgyi6PztaUVJRL9X9XBTaOonh-yDH2hRxMGkH0,8403
43
+ omniopt2-8762.data/data/bin/.omniopt_plot_scatter_generation_method.py,sha256=rgKY_w1E516c9UucVaEvaKd8tCnoUq9xg-RrYSDzYEQ,4289
44
+ omniopt2-8762.data/data/bin/.omniopt_plot_scatter_hex.py,sha256=UKjw40c1eumgEcf0xqB-_SakX5PB6HD3u4VwBxbsgQo,10279
45
+ omniopt2-8762.data/data/bin/.omniopt_plot_time_and_exit_code.py,sha256=WUyl2uI59wsC1eSX_5uJHOrqcF-s5cUDIEu8u3IFMLU,6462
46
+ omniopt2-8762.data/data/bin/.omniopt_plot_trial_index_result.py,sha256=Xlo_dYTQNnxPhYakmgMM58CCcW9gUoXZ3gLap9RScTY,4578
47
+ omniopt2-8762.data/data/bin/.omniopt_plot_worker.py,sha256=VuluQq4W6KRR5RU08dxmDSFk5mbfDRkRJQFwwcLgAGw,4524
48
+ omniopt2-8762.data/data/bin/.random_generator.py,sha256=ezBBUXpez_QaGdpCglMcJ0KZPdQP0XdX5gnLzO1xhwU,2987
49
+ omniopt2-8762.data/data/bin/.shellscript_functions,sha256=DxcCtTZBRxFp6P6EesUM4JcxqeCSsZOb7rLXbmBdQt8,13628
50
+ omniopt2-8762.data/data/bin/.tpe.py,sha256=93yjviAtxPRYUCpSIShW_H3QQ5JPNcnXNjF0g8zc7Wc,6766
51
+ omniopt2-8762.data/data/bin/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
52
+ omniopt2-8762.data/data/bin/apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
53
+ omniopt2-8762.data/data/bin/omniopt,sha256=MgQAN6Gcn21TszmFuY87XsRUwO4uMmbz4fW5QpGSH-I,51544
54
+ omniopt2-8762.data/data/bin/omniopt_docker,sha256=XyEGZ71l-tUlAYUluVF39fGO3NLDRa8UyrwxmAsXWNI,3542
55
+ omniopt2-8762.data/data/bin/omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
56
+ omniopt2-8762.data/data/bin/omniopt_plot,sha256=ze6Dz2k0w5jyUnfatAA7dk59nzQjKC_G36tDvJGWsfA,13696
57
+ omniopt2-8762.data/data/bin/omniopt_share,sha256=2KZcTjN_7vcxNjb4sLLiWgUc38pl2-6-Qwmgq6_nTb0,13856
58
+ omniopt2-8762.data/data/bin/pylint.rc,sha256=w77rayiyvpPxZYrmZ446S0_rLgBEbsR9q6E_W_skQHs,22467
59
+ omniopt2-8762.data/data/bin/requirements.txt,sha256=iL8Nj0cyoHnj0Uk1jzVUr1DnaosD-nXZYVGXnSW2B8A,299
60
+ omniopt2-8762.data/data/bin/setup.py,sha256=zg7k4jeHXEdaGWGeB1mPC14fFj-hqz1vNimnU0JcMyk,4632
61
+ omniopt2-8762.data/data/bin/test_requirements.txt,sha256=jpyZzAwbWR_qnoRqWvpBB5MUjIX9jVwynX2D-B-r8aA,487
62
+ omniopt2-8762.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
63
+ omniopt2.egg-info/PKG-INFO,sha256=BQ4o-T2duRAU1QtUSAJrEK4YvkgNb-eONCFIvwNWiZ4,5725
64
64
  omniopt2.egg-info/SOURCES.txt,sha256=2mSVIhM8Ncyl5_s8ZFBlb6qjRDbTCQmV9FwVVjp5ROE,795
65
65
  omniopt2.egg-info/dependency_links.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
66
66
  omniopt2.egg-info/requires.txt,sha256=0gjAtya3hgDAlLXSjgndq_waff89WU6_cQ6jlLyFn2w,786
67
67
  omniopt2.egg-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
68
- omniopt2-8753.dist-info/METADATA,sha256=-6Jcuf_ETMVMsQd2MaZd4FkqBXmry3-JdycbB_BMAp4,5725
69
- omniopt2-8753.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
- omniopt2-8753.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
71
- omniopt2-8753.dist-info/RECORD,,
68
+ omniopt2-8762.dist-info/METADATA,sha256=BQ4o-T2duRAU1QtUSAJrEK4YvkgNb-eONCFIvwNWiZ4,5725
69
+ omniopt2-8762.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
+ omniopt2-8762.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
71
+ omniopt2-8762.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 8753
3
+ Version: 8762
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
pyproject.toml CHANGED
@@ -5,7 +5,7 @@ authors = [
5
5
  {email = "norman.koch@tu-dresden.de"},
6
6
  {name = "Norman Koch"}
7
7
  ]
8
- version = "8753"
8
+ version = "8762"
9
9
 
10
10
  readme = "README.md"
11
11
  dynamic = ["dependencies"]