omniopt2 6964__py3-none-any.whl → 6965__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. .omniopt.py +19 -28
  2. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt.py +19 -28
  3. {omniopt2-6964.dist-info → omniopt2-6965.dist-info}/METADATA +1 -1
  4. {omniopt2-6964.dist-info → omniopt2-6965.dist-info}/RECORD +34 -34
  5. omniopt2.egg-info/PKG-INFO +1 -1
  6. pyproject.toml +1 -1
  7. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.colorfunctions.sh +0 -0
  8. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.general.sh +0 -0
  9. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.helpers.py +0 -0
  10. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_cpu_ram_usage.py +0 -0
  11. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_general.py +0 -0
  12. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_gpu_usage.py +0 -0
  13. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_kde.py +0 -0
  14. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_scatter.py +0 -0
  15. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_scatter_generation_method.py +0 -0
  16. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_scatter_hex.py +0 -0
  17. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_time_and_exit_code.py +0 -0
  18. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_trial_index_result.py +0 -0
  19. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.omniopt_plot_worker.py +0 -0
  20. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.random_generator.py +0 -0
  21. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/.shellscript_functions +0 -0
  22. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/LICENSE +0 -0
  23. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/apt-dependencies.txt +0 -0
  24. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/omniopt +0 -0
  25. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/omniopt_docker +0 -0
  26. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/omniopt_evaluate +0 -0
  27. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/omniopt_plot +0 -0
  28. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/omniopt_share +0 -0
  29. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/requirements.txt +0 -0
  30. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/setup.py +0 -0
  31. {omniopt2-6964.data → omniopt2-6965.data}/data/bin/test_requirements.txt +0 -0
  32. {omniopt2-6964.dist-info → omniopt2-6965.dist-info}/WHEEL +0 -0
  33. {omniopt2-6964.dist-info → omniopt2-6965.dist-info}/licenses/LICENSE +0 -0
  34. {omniopt2-6964.dist-info → omniopt2-6965.dist-info}/top_level.txt +0 -0
.omniopt.py CHANGED
@@ -240,7 +240,7 @@ with console.status("[bold green]Importing rich_argparse...") as status:
240
240
  try:
241
241
  from rich_argparse import RichHelpFormatter
242
242
  except ModuleNotFoundError:
243
- RichHelpFormatter = argparse.HelpFormatter # type: ignore
243
+ RichHelpFormatter = argparse.HelpFormatter
244
244
 
245
245
  @beartype
246
246
  def makedirs(p: str) -> bool:
@@ -1879,7 +1879,6 @@ def get_line_info() -> Any:
1879
1879
 
1880
1880
  frame_info = stack[1]
1881
1881
 
1882
- # fallbacks bei Problemen mit Encoding oder Zugriffsfehlern
1883
1882
  try:
1884
1883
  filename = str(frame_info.filename)
1885
1884
  except Exception as e:
@@ -1900,7 +1899,6 @@ def get_line_info() -> Any:
1900
1899
  return (filename, ":", lineno, ":", function)
1901
1900
 
1902
1901
  except Exception as e:
1903
- # finaler Fallback, wenn gar nichts geht
1904
1902
  return ("<exception in get_line_info>", ":", -1, ":", str(e))
1905
1903
 
1906
1904
  @beartype
@@ -6186,7 +6184,7 @@ def finish_job_core(job: Any, trial_index: int, this_jobs_finished: int) -> int:
6186
6184
  try:
6187
6185
  _finish_job_core_helper_mark_success(_trial, result)
6188
6186
 
6189
- if len(arg_result_names) > 1 and count_done_jobs() > 1 and job_calculate_pareto_front(get_current_run_folder(), True):
6187
+ if len(arg_result_names) > 1 and count_done_jobs() > 1 and not job_calculate_pareto_front(get_current_run_folder(), True):
6190
6188
  print_red("job_calculate_pareto_front post job failed")
6191
6189
  except Exception as e:
6192
6190
  print(f"ERROR in line {get_line_info()}: {e}")
@@ -7163,7 +7161,7 @@ def plot_times_vs_jobs_sixel(
7163
7161
  fig, _ax = plt.subplots()
7164
7162
 
7165
7163
  iterations = list(range(1, len(times) + 1))
7166
- sizes = [max(20, min(200, jc * 10)) for jc in job_counts] # Punktgröße je nach Jobanzahl, skaliert
7164
+ sizes = [max(20, min(200, jc * 10)) for jc in job_counts]
7167
7165
 
7168
7166
  scatter = _ax.scatter(iterations, times, s=sizes, c=job_counts, cmap='viridis', alpha=0.7, edgecolors='black')
7169
7167
 
@@ -8325,13 +8323,11 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8325
8323
  if not os.path.exists(results_csv_file) or not os.path.exists(result_names_file):
8326
8324
  return None
8327
8325
 
8328
- # Lade die Ergebnisnamen
8329
8326
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8330
8327
  result_names = [line.strip() for line in f if line.strip()]
8331
8328
 
8332
8329
  records: dict = defaultdict(lambda: {'means': {}})
8333
8330
 
8334
- # Lese die CSV-Datei
8335
8331
  with open(results_csv_file, encoding="utf-8", mode="r", newline='') as csvfile:
8336
8332
  reader = csv.DictReader(csvfile)
8337
8333
  for row in reader:
@@ -8344,7 +8340,7 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8344
8340
  try:
8345
8341
  records[key]['means'][metric] = float(row[metric])
8346
8342
  except ValueError:
8347
- continue # Wenn der Wert nicht konvertierbar ist
8343
+ continue
8348
8344
 
8349
8345
  return records
8350
8346
 
@@ -8416,11 +8412,9 @@ def _pareto_front_build_return_structure(
8416
8412
  results_csv_file = f"{path_to_calculate}/results.csv"
8417
8413
  result_names_file = f"{path_to_calculate}/result_names.txt"
8418
8414
 
8419
- # Lade die Ergebnisnamen
8420
8415
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8421
8416
  result_names = [line.strip() for line in f if line.strip()]
8422
8417
 
8423
- # CSV komplett in dict laden (trial_index als int -> row dict)
8424
8418
  csv_rows = {}
8425
8419
  with open(results_csv_file, mode="r", encoding="utf-8", newline='') as csvfile:
8426
8420
  reader = csv.DictReader(csvfile)
@@ -8428,7 +8422,6 @@ def _pareto_front_build_return_structure(
8428
8422
  trial_index = int(row['trial_index'])
8429
8423
  csv_rows[trial_index] = row
8430
8424
 
8431
- # Statische Spalten, die keine Parameter sind
8432
8425
  ignored_columns = {'trial_index', 'arm_name', 'trial_status', 'generation_node'}
8433
8426
  ignored_columns.update(result_names)
8434
8427
 
@@ -8439,11 +8432,10 @@ def _pareto_front_build_return_structure(
8439
8432
  for (trial_index, arm_name), _, _ in selected_points:
8440
8433
  row = csv_rows.get(trial_index)
8441
8434
  if row is None or row['arm_name'] != arm_name:
8442
- continue # Sicherheitshalber prüfen
8435
+ continue
8443
8436
 
8444
8437
  idxs.append(int(row["trial_index"]))
8445
8438
 
8446
- # Parameter extrahieren
8447
8439
  param_dict = {}
8448
8440
  for key, value in row.items():
8449
8441
  if key not in ignored_columns:
@@ -8453,7 +8445,7 @@ def _pareto_front_build_return_structure(
8453
8445
  try:
8454
8446
  param_dict[key] = float(value)
8455
8447
  except ValueError:
8456
- param_dict[key] = value # z.B. choice_param als String
8448
+ param_dict[key] = value
8457
8449
 
8458
8450
  param_dicts.append(param_dict)
8459
8451
 
@@ -8679,7 +8671,7 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
8679
8671
 
8680
8672
  pareto_front_data: dict = get_pareto_front_data(path_to_calculate, res_names)
8681
8673
 
8682
- pareto_points = {}
8674
+ pareto_points: dict = {}
8683
8675
 
8684
8676
  for metric_x in pareto_front_data.keys():
8685
8677
  if metric_x not in pareto_points:
@@ -9029,7 +9021,7 @@ def post_job_calculate_pareto_front() -> None:
9029
9021
 
9030
9022
  for _path_to_calculate in _paths_to_calculate:
9031
9023
  for path_to_calculate in found_paths:
9032
- if job_calculate_pareto_front(path_to_calculate):
9024
+ if not job_calculate_pareto_front(path_to_calculate):
9033
9025
  failure = True
9034
9026
 
9035
9027
  if failure:
@@ -9041,9 +9033,8 @@ def post_job_calculate_pareto_front() -> None:
9041
9033
  def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table: bool = False) -> bool:
9042
9034
  pf_start_time = time.time()
9043
9035
 
9044
- # Returns true if it fails
9045
9036
  if not path_to_calculate:
9046
- return True
9037
+ return False
9047
9038
 
9048
9039
  global CURRENT_RUN_FOLDER
9049
9040
  global RESULT_CSV_FILE
@@ -9051,41 +9042,41 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9051
9042
 
9052
9043
  if not path_to_calculate:
9053
9044
  print_red("Can only calculate pareto front of previous job when --calculate_pareto_front_of_job is set")
9054
- return True
9045
+ return False
9055
9046
 
9056
9047
  if not os.path.exists(path_to_calculate):
9057
9048
  print_red(f"Path '{path_to_calculate}' does not exist")
9058
- return True
9049
+ return False
9059
9050
 
9060
9051
  ax_client_json = f"{path_to_calculate}/state_files/ax_client.experiment.json"
9061
9052
 
9062
9053
  if not os.path.exists(ax_client_json):
9063
9054
  print_red(f"Path '{ax_client_json}' not found")
9064
- return True
9055
+ return False
9065
9056
 
9066
9057
  checkpoint_file: str = f"{path_to_calculate}/state_files/checkpoint.json"
9067
9058
  if not os.path.exists(checkpoint_file):
9068
9059
  print_red(f"The checkpoint file '{checkpoint_file}' does not exist")
9069
- return True
9060
+ return False
9070
9061
 
9071
9062
  RESULT_CSV_FILE = f"{path_to_calculate}/results.csv"
9072
9063
  if not os.path.exists(RESULT_CSV_FILE):
9073
9064
  print_red(f"{RESULT_CSV_FILE} not found")
9074
- return True
9065
+ return False
9075
9066
 
9076
9067
  res_names = []
9077
9068
 
9078
9069
  res_names_file = f"{path_to_calculate}/result_names.txt"
9079
9070
  if not os.path.exists(res_names_file):
9080
9071
  print_red(f"File '{res_names_file}' does not exist")
9081
- return True
9072
+ return False
9082
9073
 
9083
9074
  try:
9084
9075
  with open(res_names_file, "r", encoding="utf-8") as file:
9085
9076
  lines = file.readlines()
9086
9077
  except Exception as e:
9087
9078
  print_red(f"Error reading file '{res_names_file}': {e}")
9088
- return True
9079
+ return False
9089
9080
 
9090
9081
  for line in lines:
9091
9082
  entry = line.strip()
@@ -9094,7 +9085,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9094
9085
 
9095
9086
  if len(res_names) < 2:
9096
9087
  print_red(f"Error: There are less than 2 result names (is: {len(res_names)}, {', '.join(res_names)}) in {path_to_calculate}. Cannot continue calculating the pareto front.")
9097
- return True
9088
+ return False
9098
9089
 
9099
9090
  load_username_to_args(path_to_calculate)
9100
9091
 
@@ -9105,7 +9096,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9105
9096
  experiment_parameters = load_experiment_parameters_from_checkpoint_file(checkpoint_file, False)
9106
9097
 
9107
9098
  if experiment_parameters is None:
9108
- return True
9099
+ return False
9109
9100
 
9110
9101
  show_pareto_or_error_msg(path_to_calculate, res_names, disable_sixel_and_table)
9111
9102
 
@@ -9113,7 +9104,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9113
9104
 
9114
9105
  print_debug(f"Calculating the pareto-front took {pf_end_time - pf_start_time} seconds")
9115
9106
 
9116
- return False
9107
+ return True
9117
9108
 
9118
9109
  @beartype
9119
9110
  def set_arg_states_from_continue() -> None:
@@ -240,7 +240,7 @@ with console.status("[bold green]Importing rich_argparse...") as status:
240
240
  try:
241
241
  from rich_argparse import RichHelpFormatter
242
242
  except ModuleNotFoundError:
243
- RichHelpFormatter = argparse.HelpFormatter # type: ignore
243
+ RichHelpFormatter = argparse.HelpFormatter
244
244
 
245
245
  @beartype
246
246
  def makedirs(p: str) -> bool:
@@ -1879,7 +1879,6 @@ def get_line_info() -> Any:
1879
1879
 
1880
1880
  frame_info = stack[1]
1881
1881
 
1882
- # fallbacks bei Problemen mit Encoding oder Zugriffsfehlern
1883
1882
  try:
1884
1883
  filename = str(frame_info.filename)
1885
1884
  except Exception as e:
@@ -1900,7 +1899,6 @@ def get_line_info() -> Any:
1900
1899
  return (filename, ":", lineno, ":", function)
1901
1900
 
1902
1901
  except Exception as e:
1903
- # finaler Fallback, wenn gar nichts geht
1904
1902
  return ("<exception in get_line_info>", ":", -1, ":", str(e))
1905
1903
 
1906
1904
  @beartype
@@ -6186,7 +6184,7 @@ def finish_job_core(job: Any, trial_index: int, this_jobs_finished: int) -> int:
6186
6184
  try:
6187
6185
  _finish_job_core_helper_mark_success(_trial, result)
6188
6186
 
6189
- if len(arg_result_names) > 1 and count_done_jobs() > 1 and job_calculate_pareto_front(get_current_run_folder(), True):
6187
+ if len(arg_result_names) > 1 and count_done_jobs() > 1 and not job_calculate_pareto_front(get_current_run_folder(), True):
6190
6188
  print_red("job_calculate_pareto_front post job failed")
6191
6189
  except Exception as e:
6192
6190
  print(f"ERROR in line {get_line_info()}: {e}")
@@ -7163,7 +7161,7 @@ def plot_times_vs_jobs_sixel(
7163
7161
  fig, _ax = plt.subplots()
7164
7162
 
7165
7163
  iterations = list(range(1, len(times) + 1))
7166
- sizes = [max(20, min(200, jc * 10)) for jc in job_counts] # Punktgröße je nach Jobanzahl, skaliert
7164
+ sizes = [max(20, min(200, jc * 10)) for jc in job_counts]
7167
7165
 
7168
7166
  scatter = _ax.scatter(iterations, times, s=sizes, c=job_counts, cmap='viridis', alpha=0.7, edgecolors='black')
7169
7167
 
@@ -8325,13 +8323,11 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8325
8323
  if not os.path.exists(results_csv_file) or not os.path.exists(result_names_file):
8326
8324
  return None
8327
8325
 
8328
- # Lade die Ergebnisnamen
8329
8326
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8330
8327
  result_names = [line.strip() for line in f if line.strip()]
8331
8328
 
8332
8329
  records: dict = defaultdict(lambda: {'means': {}})
8333
8330
 
8334
- # Lese die CSV-Datei
8335
8331
  with open(results_csv_file, encoding="utf-8", mode="r", newline='') as csvfile:
8336
8332
  reader = csv.DictReader(csvfile)
8337
8333
  for row in reader:
@@ -8344,7 +8340,7 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8344
8340
  try:
8345
8341
  records[key]['means'][metric] = float(row[metric])
8346
8342
  except ValueError:
8347
- continue # Wenn der Wert nicht konvertierbar ist
8343
+ continue
8348
8344
 
8349
8345
  return records
8350
8346
 
@@ -8416,11 +8412,9 @@ def _pareto_front_build_return_structure(
8416
8412
  results_csv_file = f"{path_to_calculate}/results.csv"
8417
8413
  result_names_file = f"{path_to_calculate}/result_names.txt"
8418
8414
 
8419
- # Lade die Ergebnisnamen
8420
8415
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8421
8416
  result_names = [line.strip() for line in f if line.strip()]
8422
8417
 
8423
- # CSV komplett in dict laden (trial_index als int -> row dict)
8424
8418
  csv_rows = {}
8425
8419
  with open(results_csv_file, mode="r", encoding="utf-8", newline='') as csvfile:
8426
8420
  reader = csv.DictReader(csvfile)
@@ -8428,7 +8422,6 @@ def _pareto_front_build_return_structure(
8428
8422
  trial_index = int(row['trial_index'])
8429
8423
  csv_rows[trial_index] = row
8430
8424
 
8431
- # Statische Spalten, die keine Parameter sind
8432
8425
  ignored_columns = {'trial_index', 'arm_name', 'trial_status', 'generation_node'}
8433
8426
  ignored_columns.update(result_names)
8434
8427
 
@@ -8439,11 +8432,10 @@ def _pareto_front_build_return_structure(
8439
8432
  for (trial_index, arm_name), _, _ in selected_points:
8440
8433
  row = csv_rows.get(trial_index)
8441
8434
  if row is None or row['arm_name'] != arm_name:
8442
- continue # Sicherheitshalber prüfen
8435
+ continue
8443
8436
 
8444
8437
  idxs.append(int(row["trial_index"]))
8445
8438
 
8446
- # Parameter extrahieren
8447
8439
  param_dict = {}
8448
8440
  for key, value in row.items():
8449
8441
  if key not in ignored_columns:
@@ -8453,7 +8445,7 @@ def _pareto_front_build_return_structure(
8453
8445
  try:
8454
8446
  param_dict[key] = float(value)
8455
8447
  except ValueError:
8456
- param_dict[key] = value # z.B. choice_param als String
8448
+ param_dict[key] = value
8457
8449
 
8458
8450
  param_dicts.append(param_dict)
8459
8451
 
@@ -8679,7 +8671,7 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
8679
8671
 
8680
8672
  pareto_front_data: dict = get_pareto_front_data(path_to_calculate, res_names)
8681
8673
 
8682
- pareto_points = {}
8674
+ pareto_points: dict = {}
8683
8675
 
8684
8676
  for metric_x in pareto_front_data.keys():
8685
8677
  if metric_x not in pareto_points:
@@ -9029,7 +9021,7 @@ def post_job_calculate_pareto_front() -> None:
9029
9021
 
9030
9022
  for _path_to_calculate in _paths_to_calculate:
9031
9023
  for path_to_calculate in found_paths:
9032
- if job_calculate_pareto_front(path_to_calculate):
9024
+ if not job_calculate_pareto_front(path_to_calculate):
9033
9025
  failure = True
9034
9026
 
9035
9027
  if failure:
@@ -9041,9 +9033,8 @@ def post_job_calculate_pareto_front() -> None:
9041
9033
  def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table: bool = False) -> bool:
9042
9034
  pf_start_time = time.time()
9043
9035
 
9044
- # Returns true if it fails
9045
9036
  if not path_to_calculate:
9046
- return True
9037
+ return False
9047
9038
 
9048
9039
  global CURRENT_RUN_FOLDER
9049
9040
  global RESULT_CSV_FILE
@@ -9051,41 +9042,41 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9051
9042
 
9052
9043
  if not path_to_calculate:
9053
9044
  print_red("Can only calculate pareto front of previous job when --calculate_pareto_front_of_job is set")
9054
- return True
9045
+ return False
9055
9046
 
9056
9047
  if not os.path.exists(path_to_calculate):
9057
9048
  print_red(f"Path '{path_to_calculate}' does not exist")
9058
- return True
9049
+ return False
9059
9050
 
9060
9051
  ax_client_json = f"{path_to_calculate}/state_files/ax_client.experiment.json"
9061
9052
 
9062
9053
  if not os.path.exists(ax_client_json):
9063
9054
  print_red(f"Path '{ax_client_json}' not found")
9064
- return True
9055
+ return False
9065
9056
 
9066
9057
  checkpoint_file: str = f"{path_to_calculate}/state_files/checkpoint.json"
9067
9058
  if not os.path.exists(checkpoint_file):
9068
9059
  print_red(f"The checkpoint file '{checkpoint_file}' does not exist")
9069
- return True
9060
+ return False
9070
9061
 
9071
9062
  RESULT_CSV_FILE = f"{path_to_calculate}/results.csv"
9072
9063
  if not os.path.exists(RESULT_CSV_FILE):
9073
9064
  print_red(f"{RESULT_CSV_FILE} not found")
9074
- return True
9065
+ return False
9075
9066
 
9076
9067
  res_names = []
9077
9068
 
9078
9069
  res_names_file = f"{path_to_calculate}/result_names.txt"
9079
9070
  if not os.path.exists(res_names_file):
9080
9071
  print_red(f"File '{res_names_file}' does not exist")
9081
- return True
9072
+ return False
9082
9073
 
9083
9074
  try:
9084
9075
  with open(res_names_file, "r", encoding="utf-8") as file:
9085
9076
  lines = file.readlines()
9086
9077
  except Exception as e:
9087
9078
  print_red(f"Error reading file '{res_names_file}': {e}")
9088
- return True
9079
+ return False
9089
9080
 
9090
9081
  for line in lines:
9091
9082
  entry = line.strip()
@@ -9094,7 +9085,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9094
9085
 
9095
9086
  if len(res_names) < 2:
9096
9087
  print_red(f"Error: There are less than 2 result names (is: {len(res_names)}, {', '.join(res_names)}) in {path_to_calculate}. Cannot continue calculating the pareto front.")
9097
- return True
9088
+ return False
9098
9089
 
9099
9090
  load_username_to_args(path_to_calculate)
9100
9091
 
@@ -9105,7 +9096,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9105
9096
  experiment_parameters = load_experiment_parameters_from_checkpoint_file(checkpoint_file, False)
9106
9097
 
9107
9098
  if experiment_parameters is None:
9108
- return True
9099
+ return False
9109
9100
 
9110
9101
  show_pareto_or_error_msg(path_to_calculate, res_names, disable_sixel_and_table)
9111
9102
 
@@ -9113,7 +9104,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9113
9104
 
9114
9105
  print_debug(f"Calculating the pareto-front took {pf_end_time - pf_start_time} seconds")
9115
9106
 
9116
- return False
9107
+ return True
9117
9108
 
9118
9109
  @beartype
9119
9110
  def set_arg_states_from_continue() -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 6964
3
+ Version: 6965
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
@@ -3,7 +3,7 @@
3
3
  .general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
4
4
  .gitignore,sha256=OMaFWOR6wxjAlI85rF3euQcjQFFAl1F34abZkltKnaU,3714
5
5
  .helpers.py,sha256=G4TLhtj9u1GdAB9ACnjscsqz-Eu85ew1QAAU93ctcRQ,30432
6
- .omniopt.py,sha256=lWIf-18E0aOuIkpOS1I3I_9TbVoxADpcMnjp-XD-iL8,372679
6
+ .omniopt.py,sha256=562OQjW2VtgnA3_ZZsBvO-3OYymrU_XUtTTGIGxqe5k,372160
7
7
  .omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
8
8
  .omniopt_plot_general.py,sha256=ZERZJkvVOoJhi7SszmTF1Iln-_08_0Aki48u3LHUW-k,6809
9
9
  .omniopt_plot_gpu_usage.py,sha256=bQmjc1Xq_9rlJhiEvy93Q5hXGs4ZKRVg3v7uaMrv9Go,5108
@@ -25,43 +25,43 @@ omniopt_docker,sha256=LWVUeyvmA5AKqAHiH9jBUkR5uZ6AHMnSy0eET7mK6E4,3602
25
25
  omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
26
26
  omniopt_plot,sha256=Z8ZR10p-ZRSgMeVPO-wVCJ8lk-LQtntjZ9Bk9RifCIs,13360
27
27
  omniopt_share,sha256=7g5I7YdoWcA6_GDwWwq0xPf23qiVc_VDrm9ySLH7SH0,14051
28
- pyproject.toml,sha256=T9zkntHYD5IdiBclRB6l-BCvbfoWjM4-bKKo7pc5ccA,397
28
+ pyproject.toml,sha256=Gj_VXJvHAAPG7y5FrRExNChjen8aphooIEezjpA9C80,397
29
29
  requirements.txt,sha256=4-CwlGCOMAexOgLozRy9_5HwpQVsZFALR_uv69xGsgE,308
30
30
  setup.cfg,sha256=HEc8uu6NpfxG5_AVh5SvXOpEFMNKPPPxgMIAH144vT4,38
31
31
  test_requirements.txt,sha256=dnCbKmKalrVzNZ_-iQWf1xCxcnDsdGuhbDAr9XlGm-U,477
32
- omniopt2-6964.data/data/bin/.colorfunctions.sh,sha256=CDlgjwrsrHR_E6c-Qak5wZlotArXm-nf9sVvXePzGZA,1083
33
- omniopt2-6964.data/data/bin/.general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
34
- omniopt2-6964.data/data/bin/.helpers.py,sha256=G4TLhtj9u1GdAB9ACnjscsqz-Eu85ew1QAAU93ctcRQ,30432
35
- omniopt2-6964.data/data/bin/.omniopt.py,sha256=lWIf-18E0aOuIkpOS1I3I_9TbVoxADpcMnjp-XD-iL8,372679
36
- omniopt2-6964.data/data/bin/.omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
37
- omniopt2-6964.data/data/bin/.omniopt_plot_general.py,sha256=ZERZJkvVOoJhi7SszmTF1Iln-_08_0Aki48u3LHUW-k,6809
38
- omniopt2-6964.data/data/bin/.omniopt_plot_gpu_usage.py,sha256=bQmjc1Xq_9rlJhiEvy93Q5hXGs4ZKRVg3v7uaMrv9Go,5108
39
- omniopt2-6964.data/data/bin/.omniopt_plot_kde.py,sha256=uRLWr72TDKvj3AqJ0O0AvkKZ1ok1O1QpXnbfQQdo0nA,6873
40
- omniopt2-6964.data/data/bin/.omniopt_plot_scatter.py,sha256=VFkly4lbmXeegWoyiu1fLO82JThPJa_iYVZIAPGPP64,8670
41
- omniopt2-6964.data/data/bin/.omniopt_plot_scatter_generation_method.py,sha256=rgKY_w1E516c9UucVaEvaKd8tCnoUq9xg-RrYSDzYEQ,4289
42
- omniopt2-6964.data/data/bin/.omniopt_plot_scatter_hex.py,sha256=6014iELQcS83WAAwOqVIUMZQewNP4jntlTQncTY3NTA,10527
43
- omniopt2-6964.data/data/bin/.omniopt_plot_time_and_exit_code.py,sha256=PFDJt2dDCWSsZntDsDa5imfRLF0h5lCExH8sFp8ptVs,6465
44
- omniopt2-6964.data/data/bin/.omniopt_plot_trial_index_result.py,sha256=5DmqZAQO_PFmzdap-TIhSMAshRXpOHQacnHAtjwnzN4,4629
45
- omniopt2-6964.data/data/bin/.omniopt_plot_worker.py,sha256=bD-byr3NTnJ0qF4M_CogdXnhw7sVrzs1eFoqTtDjmfE,4590
46
- omniopt2-6964.data/data/bin/.random_generator.py,sha256=ezBBUXpez_QaGdpCglMcJ0KZPdQP0XdX5gnLzO1xhwU,2987
47
- omniopt2-6964.data/data/bin/.shellscript_functions,sha256=7IjirQJpC7TCRPuRj1dO2W6A8h5cHt0dVzj6eH_2EiQ,14664
48
- omniopt2-6964.data/data/bin/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
49
- omniopt2-6964.data/data/bin/apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
50
- omniopt2-6964.data/data/bin/omniopt,sha256=RIi0yZALrZGefiejun1fkDJ0LX95WymXd2xZFlglMsQ,48072
51
- omniopt2-6964.data/data/bin/omniopt_docker,sha256=LWVUeyvmA5AKqAHiH9jBUkR5uZ6AHMnSy0eET7mK6E4,3602
52
- omniopt2-6964.data/data/bin/omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
53
- omniopt2-6964.data/data/bin/omniopt_plot,sha256=Z8ZR10p-ZRSgMeVPO-wVCJ8lk-LQtntjZ9Bk9RifCIs,13360
54
- omniopt2-6964.data/data/bin/omniopt_share,sha256=7g5I7YdoWcA6_GDwWwq0xPf23qiVc_VDrm9ySLH7SH0,14051
55
- omniopt2-6964.data/data/bin/requirements.txt,sha256=4-CwlGCOMAexOgLozRy9_5HwpQVsZFALR_uv69xGsgE,308
56
- omniopt2-6964.data/data/bin/setup.py,sha256=g3uEqJHXhggXwgLYoxOjsXg9Z6IV1ubh-Og59AZ264Q,4648
57
- omniopt2-6964.data/data/bin/test_requirements.txt,sha256=dnCbKmKalrVzNZ_-iQWf1xCxcnDsdGuhbDAr9XlGm-U,477
58
- omniopt2-6964.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
59
- omniopt2.egg-info/PKG-INFO,sha256=ps0xrSlQVX5ME6bCzBPVZBl-v0Znd7nK55mKRsGEx1k,5286
32
+ omniopt2-6965.data/data/bin/.colorfunctions.sh,sha256=CDlgjwrsrHR_E6c-Qak5wZlotArXm-nf9sVvXePzGZA,1083
33
+ omniopt2-6965.data/data/bin/.general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
34
+ omniopt2-6965.data/data/bin/.helpers.py,sha256=G4TLhtj9u1GdAB9ACnjscsqz-Eu85ew1QAAU93ctcRQ,30432
35
+ omniopt2-6965.data/data/bin/.omniopt.py,sha256=562OQjW2VtgnA3_ZZsBvO-3OYymrU_XUtTTGIGxqe5k,372160
36
+ omniopt2-6965.data/data/bin/.omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
37
+ omniopt2-6965.data/data/bin/.omniopt_plot_general.py,sha256=ZERZJkvVOoJhi7SszmTF1Iln-_08_0Aki48u3LHUW-k,6809
38
+ omniopt2-6965.data/data/bin/.omniopt_plot_gpu_usage.py,sha256=bQmjc1Xq_9rlJhiEvy93Q5hXGs4ZKRVg3v7uaMrv9Go,5108
39
+ omniopt2-6965.data/data/bin/.omniopt_plot_kde.py,sha256=uRLWr72TDKvj3AqJ0O0AvkKZ1ok1O1QpXnbfQQdo0nA,6873
40
+ omniopt2-6965.data/data/bin/.omniopt_plot_scatter.py,sha256=VFkly4lbmXeegWoyiu1fLO82JThPJa_iYVZIAPGPP64,8670
41
+ omniopt2-6965.data/data/bin/.omniopt_plot_scatter_generation_method.py,sha256=rgKY_w1E516c9UucVaEvaKd8tCnoUq9xg-RrYSDzYEQ,4289
42
+ omniopt2-6965.data/data/bin/.omniopt_plot_scatter_hex.py,sha256=6014iELQcS83WAAwOqVIUMZQewNP4jntlTQncTY3NTA,10527
43
+ omniopt2-6965.data/data/bin/.omniopt_plot_time_and_exit_code.py,sha256=PFDJt2dDCWSsZntDsDa5imfRLF0h5lCExH8sFp8ptVs,6465
44
+ omniopt2-6965.data/data/bin/.omniopt_plot_trial_index_result.py,sha256=5DmqZAQO_PFmzdap-TIhSMAshRXpOHQacnHAtjwnzN4,4629
45
+ omniopt2-6965.data/data/bin/.omniopt_plot_worker.py,sha256=bD-byr3NTnJ0qF4M_CogdXnhw7sVrzs1eFoqTtDjmfE,4590
46
+ omniopt2-6965.data/data/bin/.random_generator.py,sha256=ezBBUXpez_QaGdpCglMcJ0KZPdQP0XdX5gnLzO1xhwU,2987
47
+ omniopt2-6965.data/data/bin/.shellscript_functions,sha256=7IjirQJpC7TCRPuRj1dO2W6A8h5cHt0dVzj6eH_2EiQ,14664
48
+ omniopt2-6965.data/data/bin/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
49
+ omniopt2-6965.data/data/bin/apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
50
+ omniopt2-6965.data/data/bin/omniopt,sha256=RIi0yZALrZGefiejun1fkDJ0LX95WymXd2xZFlglMsQ,48072
51
+ omniopt2-6965.data/data/bin/omniopt_docker,sha256=LWVUeyvmA5AKqAHiH9jBUkR5uZ6AHMnSy0eET7mK6E4,3602
52
+ omniopt2-6965.data/data/bin/omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
53
+ omniopt2-6965.data/data/bin/omniopt_plot,sha256=Z8ZR10p-ZRSgMeVPO-wVCJ8lk-LQtntjZ9Bk9RifCIs,13360
54
+ omniopt2-6965.data/data/bin/omniopt_share,sha256=7g5I7YdoWcA6_GDwWwq0xPf23qiVc_VDrm9ySLH7SH0,14051
55
+ omniopt2-6965.data/data/bin/requirements.txt,sha256=4-CwlGCOMAexOgLozRy9_5HwpQVsZFALR_uv69xGsgE,308
56
+ omniopt2-6965.data/data/bin/setup.py,sha256=g3uEqJHXhggXwgLYoxOjsXg9Z6IV1ubh-Og59AZ264Q,4648
57
+ omniopt2-6965.data/data/bin/test_requirements.txt,sha256=dnCbKmKalrVzNZ_-iQWf1xCxcnDsdGuhbDAr9XlGm-U,477
58
+ omniopt2-6965.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
59
+ omniopt2.egg-info/PKG-INFO,sha256=-fT2Ivfev1qaA-IY_XpAdk-WwrNzq3J7G4RaukIcKjY,5286
60
60
  omniopt2.egg-info/SOURCES.txt,sha256=kXBlYs2_3BE6tKUH1egGFa_9X5w8EQ5pm9dKgPEvdhY,770
61
61
  omniopt2.egg-info/dependency_links.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
62
62
  omniopt2.egg-info/requires.txt,sha256=4Yz6l6MaP7IW0SiKxbYKVvF0qAVAdyUo6_EgbiMII8E,785
63
63
  omniopt2.egg-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
64
- omniopt2-6964.dist-info/METADATA,sha256=ps0xrSlQVX5ME6bCzBPVZBl-v0Znd7nK55mKRsGEx1k,5286
65
- omniopt2-6964.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
66
- omniopt2-6964.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
67
- omniopt2-6964.dist-info/RECORD,,
64
+ omniopt2-6965.dist-info/METADATA,sha256=-fT2Ivfev1qaA-IY_XpAdk-WwrNzq3J7G4RaukIcKjY,5286
65
+ omniopt2-6965.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
66
+ omniopt2-6965.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
67
+ omniopt2-6965.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 6964
3
+ Version: 6965
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
pyproject.toml CHANGED
@@ -5,7 +5,7 @@ authors = [
5
5
  {email = "norman.koch@tu-dresden.de"},
6
6
  {name = "Norman Koch"}
7
7
  ]
8
- version = "6964"
8
+ version = "6965"
9
9
 
10
10
  readme = "README.md"
11
11
  dynamic = ["dependencies"]