omniopt2 6964__py3-none-any.whl → 6966__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. .omniopt.py +19 -34
  2. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt.py +19 -34
  3. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/requirements.txt +0 -1
  4. {omniopt2-6964.dist-info → omniopt2-6966.dist-info}/METADATA +1 -2
  5. {omniopt2-6964.dist-info → omniopt2-6966.dist-info}/RECORD +36 -36
  6. omniopt2.egg-info/PKG-INFO +1 -2
  7. omniopt2.egg-info/requires.txt +0 -1
  8. pyproject.toml +1 -1
  9. requirements.txt +0 -1
  10. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.colorfunctions.sh +0 -0
  11. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.general.sh +0 -0
  12. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.helpers.py +0 -0
  13. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_cpu_ram_usage.py +0 -0
  14. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_general.py +0 -0
  15. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_gpu_usage.py +0 -0
  16. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_kde.py +0 -0
  17. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_scatter.py +0 -0
  18. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_scatter_generation_method.py +0 -0
  19. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_scatter_hex.py +0 -0
  20. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_time_and_exit_code.py +0 -0
  21. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_trial_index_result.py +0 -0
  22. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.omniopt_plot_worker.py +0 -0
  23. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.random_generator.py +0 -0
  24. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/.shellscript_functions +0 -0
  25. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/LICENSE +0 -0
  26. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/apt-dependencies.txt +0 -0
  27. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/omniopt +0 -0
  28. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/omniopt_docker +0 -0
  29. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/omniopt_evaluate +0 -0
  30. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/omniopt_plot +0 -0
  31. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/omniopt_share +0 -0
  32. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/setup.py +0 -0
  33. {omniopt2-6964.data → omniopt2-6966.data}/data/bin/test_requirements.txt +0 -0
  34. {omniopt2-6964.dist-info → omniopt2-6966.dist-info}/WHEEL +0 -0
  35. {omniopt2-6964.dist-info → omniopt2-6966.dist-info}/licenses/LICENSE +0 -0
  36. {omniopt2-6964.dist-info → omniopt2-6966.dist-info}/top_level.txt +0 -0
.omniopt.py CHANGED
@@ -236,12 +236,6 @@ except KeyboardInterrupt:
236
236
  def fool_linter(*fool_linter_args: Any) -> Any:
237
237
  return fool_linter_args
238
238
 
239
- with console.status("[bold green]Importing rich_argparse...") as status:
240
- try:
241
- from rich_argparse import RichHelpFormatter
242
- except ModuleNotFoundError:
243
- RichHelpFormatter = argparse.HelpFormatter # type: ignore
244
-
245
239
  @beartype
246
240
  def makedirs(p: str) -> bool:
247
241
  if not os.path.exists(p):
@@ -591,7 +585,7 @@ class ConfigLoader:
591
585
  prog="omniopt",
592
586
  description='A hyperparameter optimizer for slurm-based HPC-systems',
593
587
  epilog=f"Example:\n\n{oo_call} --partition=alpha --experiment_name=neural_network ...",
594
- formatter_class=RichHelpFormatter
588
+ formatter_class=argparse.HelpFormatter
595
589
  )
596
590
 
597
591
  self.parser.add_argument('--config_yaml', help='YAML configuration file', type=str, default=None)
@@ -1879,7 +1873,6 @@ def get_line_info() -> Any:
1879
1873
 
1880
1874
  frame_info = stack[1]
1881
1875
 
1882
- # fallbacks bei Problemen mit Encoding oder Zugriffsfehlern
1883
1876
  try:
1884
1877
  filename = str(frame_info.filename)
1885
1878
  except Exception as e:
@@ -1900,7 +1893,6 @@ def get_line_info() -> Any:
1900
1893
  return (filename, ":", lineno, ":", function)
1901
1894
 
1902
1895
  except Exception as e:
1903
- # finaler Fallback, wenn gar nichts geht
1904
1896
  return ("<exception in get_line_info>", ":", -1, ":", str(e))
1905
1897
 
1906
1898
  @beartype
@@ -6186,7 +6178,7 @@ def finish_job_core(job: Any, trial_index: int, this_jobs_finished: int) -> int:
6186
6178
  try:
6187
6179
  _finish_job_core_helper_mark_success(_trial, result)
6188
6180
 
6189
- if len(arg_result_names) > 1 and count_done_jobs() > 1 and job_calculate_pareto_front(get_current_run_folder(), True):
6181
+ if len(arg_result_names) > 1 and count_done_jobs() > 1 and not job_calculate_pareto_front(get_current_run_folder(), True):
6190
6182
  print_red("job_calculate_pareto_front post job failed")
6191
6183
  except Exception as e:
6192
6184
  print(f"ERROR in line {get_line_info()}: {e}")
@@ -7163,7 +7155,7 @@ def plot_times_vs_jobs_sixel(
7163
7155
  fig, _ax = plt.subplots()
7164
7156
 
7165
7157
  iterations = list(range(1, len(times) + 1))
7166
- sizes = [max(20, min(200, jc * 10)) for jc in job_counts] # Punktgröße je nach Jobanzahl, skaliert
7158
+ sizes = [max(20, min(200, jc * 10)) for jc in job_counts]
7167
7159
 
7168
7160
  scatter = _ax.scatter(iterations, times, s=sizes, c=job_counts, cmap='viridis', alpha=0.7, edgecolors='black')
7169
7161
 
@@ -8325,13 +8317,11 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8325
8317
  if not os.path.exists(results_csv_file) or not os.path.exists(result_names_file):
8326
8318
  return None
8327
8319
 
8328
- # Lade die Ergebnisnamen
8329
8320
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8330
8321
  result_names = [line.strip() for line in f if line.strip()]
8331
8322
 
8332
8323
  records: dict = defaultdict(lambda: {'means': {}})
8333
8324
 
8334
- # Lese die CSV-Datei
8335
8325
  with open(results_csv_file, encoding="utf-8", mode="r", newline='') as csvfile:
8336
8326
  reader = csv.DictReader(csvfile)
8337
8327
  for row in reader:
@@ -8344,7 +8334,7 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8344
8334
  try:
8345
8335
  records[key]['means'][metric] = float(row[metric])
8346
8336
  except ValueError:
8347
- continue # Wenn der Wert nicht konvertierbar ist
8337
+ continue
8348
8338
 
8349
8339
  return records
8350
8340
 
@@ -8416,11 +8406,9 @@ def _pareto_front_build_return_structure(
8416
8406
  results_csv_file = f"{path_to_calculate}/results.csv"
8417
8407
  result_names_file = f"{path_to_calculate}/result_names.txt"
8418
8408
 
8419
- # Lade die Ergebnisnamen
8420
8409
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8421
8410
  result_names = [line.strip() for line in f if line.strip()]
8422
8411
 
8423
- # CSV komplett in dict laden (trial_index als int -> row dict)
8424
8412
  csv_rows = {}
8425
8413
  with open(results_csv_file, mode="r", encoding="utf-8", newline='') as csvfile:
8426
8414
  reader = csv.DictReader(csvfile)
@@ -8428,7 +8416,6 @@ def _pareto_front_build_return_structure(
8428
8416
  trial_index = int(row['trial_index'])
8429
8417
  csv_rows[trial_index] = row
8430
8418
 
8431
- # Statische Spalten, die keine Parameter sind
8432
8419
  ignored_columns = {'trial_index', 'arm_name', 'trial_status', 'generation_node'}
8433
8420
  ignored_columns.update(result_names)
8434
8421
 
@@ -8439,11 +8426,10 @@ def _pareto_front_build_return_structure(
8439
8426
  for (trial_index, arm_name), _, _ in selected_points:
8440
8427
  row = csv_rows.get(trial_index)
8441
8428
  if row is None or row['arm_name'] != arm_name:
8442
- continue # Sicherheitshalber prüfen
8429
+ continue
8443
8430
 
8444
8431
  idxs.append(int(row["trial_index"]))
8445
8432
 
8446
- # Parameter extrahieren
8447
8433
  param_dict = {}
8448
8434
  for key, value in row.items():
8449
8435
  if key not in ignored_columns:
@@ -8453,7 +8439,7 @@ def _pareto_front_build_return_structure(
8453
8439
  try:
8454
8440
  param_dict[key] = float(value)
8455
8441
  except ValueError:
8456
- param_dict[key] = value # z.B. choice_param als String
8442
+ param_dict[key] = value
8457
8443
 
8458
8444
  param_dicts.append(param_dict)
8459
8445
 
@@ -8679,7 +8665,7 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
8679
8665
 
8680
8666
  pareto_front_data: dict = get_pareto_front_data(path_to_calculate, res_names)
8681
8667
 
8682
- pareto_points = {}
8668
+ pareto_points: dict = {}
8683
8669
 
8684
8670
  for metric_x in pareto_front_data.keys():
8685
8671
  if metric_x not in pareto_points:
@@ -9029,7 +9015,7 @@ def post_job_calculate_pareto_front() -> None:
9029
9015
 
9030
9016
  for _path_to_calculate in _paths_to_calculate:
9031
9017
  for path_to_calculate in found_paths:
9032
- if job_calculate_pareto_front(path_to_calculate):
9018
+ if not job_calculate_pareto_front(path_to_calculate):
9033
9019
  failure = True
9034
9020
 
9035
9021
  if failure:
@@ -9041,9 +9027,8 @@ def post_job_calculate_pareto_front() -> None:
9041
9027
  def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table: bool = False) -> bool:
9042
9028
  pf_start_time = time.time()
9043
9029
 
9044
- # Returns true if it fails
9045
9030
  if not path_to_calculate:
9046
- return True
9031
+ return False
9047
9032
 
9048
9033
  global CURRENT_RUN_FOLDER
9049
9034
  global RESULT_CSV_FILE
@@ -9051,41 +9036,41 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9051
9036
 
9052
9037
  if not path_to_calculate:
9053
9038
  print_red("Can only calculate pareto front of previous job when --calculate_pareto_front_of_job is set")
9054
- return True
9039
+ return False
9055
9040
 
9056
9041
  if not os.path.exists(path_to_calculate):
9057
9042
  print_red(f"Path '{path_to_calculate}' does not exist")
9058
- return True
9043
+ return False
9059
9044
 
9060
9045
  ax_client_json = f"{path_to_calculate}/state_files/ax_client.experiment.json"
9061
9046
 
9062
9047
  if not os.path.exists(ax_client_json):
9063
9048
  print_red(f"Path '{ax_client_json}' not found")
9064
- return True
9049
+ return False
9065
9050
 
9066
9051
  checkpoint_file: str = f"{path_to_calculate}/state_files/checkpoint.json"
9067
9052
  if not os.path.exists(checkpoint_file):
9068
9053
  print_red(f"The checkpoint file '{checkpoint_file}' does not exist")
9069
- return True
9054
+ return False
9070
9055
 
9071
9056
  RESULT_CSV_FILE = f"{path_to_calculate}/results.csv"
9072
9057
  if not os.path.exists(RESULT_CSV_FILE):
9073
9058
  print_red(f"{RESULT_CSV_FILE} not found")
9074
- return True
9059
+ return False
9075
9060
 
9076
9061
  res_names = []
9077
9062
 
9078
9063
  res_names_file = f"{path_to_calculate}/result_names.txt"
9079
9064
  if not os.path.exists(res_names_file):
9080
9065
  print_red(f"File '{res_names_file}' does not exist")
9081
- return True
9066
+ return False
9082
9067
 
9083
9068
  try:
9084
9069
  with open(res_names_file, "r", encoding="utf-8") as file:
9085
9070
  lines = file.readlines()
9086
9071
  except Exception as e:
9087
9072
  print_red(f"Error reading file '{res_names_file}': {e}")
9088
- return True
9073
+ return False
9089
9074
 
9090
9075
  for line in lines:
9091
9076
  entry = line.strip()
@@ -9094,7 +9079,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9094
9079
 
9095
9080
  if len(res_names) < 2:
9096
9081
  print_red(f"Error: There are less than 2 result names (is: {len(res_names)}, {', '.join(res_names)}) in {path_to_calculate}. Cannot continue calculating the pareto front.")
9097
- return True
9082
+ return False
9098
9083
 
9099
9084
  load_username_to_args(path_to_calculate)
9100
9085
 
@@ -9105,7 +9090,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9105
9090
  experiment_parameters = load_experiment_parameters_from_checkpoint_file(checkpoint_file, False)
9106
9091
 
9107
9092
  if experiment_parameters is None:
9108
- return True
9093
+ return False
9109
9094
 
9110
9095
  show_pareto_or_error_msg(path_to_calculate, res_names, disable_sixel_and_table)
9111
9096
 
@@ -9113,7 +9098,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9113
9098
 
9114
9099
  print_debug(f"Calculating the pareto-front took {pf_end_time - pf_start_time} seconds")
9115
9100
 
9116
- return False
9101
+ return True
9117
9102
 
9118
9103
  @beartype
9119
9104
  def set_arg_states_from_continue() -> None:
@@ -236,12 +236,6 @@ except KeyboardInterrupt:
236
236
  def fool_linter(*fool_linter_args: Any) -> Any:
237
237
  return fool_linter_args
238
238
 
239
- with console.status("[bold green]Importing rich_argparse...") as status:
240
- try:
241
- from rich_argparse import RichHelpFormatter
242
- except ModuleNotFoundError:
243
- RichHelpFormatter = argparse.HelpFormatter # type: ignore
244
-
245
239
  @beartype
246
240
  def makedirs(p: str) -> bool:
247
241
  if not os.path.exists(p):
@@ -591,7 +585,7 @@ class ConfigLoader:
591
585
  prog="omniopt",
592
586
  description='A hyperparameter optimizer for slurm-based HPC-systems',
593
587
  epilog=f"Example:\n\n{oo_call} --partition=alpha --experiment_name=neural_network ...",
594
- formatter_class=RichHelpFormatter
588
+ formatter_class=argparse.HelpFormatter
595
589
  )
596
590
 
597
591
  self.parser.add_argument('--config_yaml', help='YAML configuration file', type=str, default=None)
@@ -1879,7 +1873,6 @@ def get_line_info() -> Any:
1879
1873
 
1880
1874
  frame_info = stack[1]
1881
1875
 
1882
- # fallbacks bei Problemen mit Encoding oder Zugriffsfehlern
1883
1876
  try:
1884
1877
  filename = str(frame_info.filename)
1885
1878
  except Exception as e:
@@ -1900,7 +1893,6 @@ def get_line_info() -> Any:
1900
1893
  return (filename, ":", lineno, ":", function)
1901
1894
 
1902
1895
  except Exception as e:
1903
- # finaler Fallback, wenn gar nichts geht
1904
1896
  return ("<exception in get_line_info>", ":", -1, ":", str(e))
1905
1897
 
1906
1898
  @beartype
@@ -6186,7 +6178,7 @@ def finish_job_core(job: Any, trial_index: int, this_jobs_finished: int) -> int:
6186
6178
  try:
6187
6179
  _finish_job_core_helper_mark_success(_trial, result)
6188
6180
 
6189
- if len(arg_result_names) > 1 and count_done_jobs() > 1 and job_calculate_pareto_front(get_current_run_folder(), True):
6181
+ if len(arg_result_names) > 1 and count_done_jobs() > 1 and not job_calculate_pareto_front(get_current_run_folder(), True):
6190
6182
  print_red("job_calculate_pareto_front post job failed")
6191
6183
  except Exception as e:
6192
6184
  print(f"ERROR in line {get_line_info()}: {e}")
@@ -7163,7 +7155,7 @@ def plot_times_vs_jobs_sixel(
7163
7155
  fig, _ax = plt.subplots()
7164
7156
 
7165
7157
  iterations = list(range(1, len(times) + 1))
7166
- sizes = [max(20, min(200, jc * 10)) for jc in job_counts] # Punktgröße je nach Jobanzahl, skaliert
7158
+ sizes = [max(20, min(200, jc * 10)) for jc in job_counts]
7167
7159
 
7168
7160
  scatter = _ax.scatter(iterations, times, s=sizes, c=job_counts, cmap='viridis', alpha=0.7, edgecolors='black')
7169
7161
 
@@ -8325,13 +8317,11 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8325
8317
  if not os.path.exists(results_csv_file) or not os.path.exists(result_names_file):
8326
8318
  return None
8327
8319
 
8328
- # Lade die Ergebnisnamen
8329
8320
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8330
8321
  result_names = [line.strip() for line in f if line.strip()]
8331
8322
 
8332
8323
  records: dict = defaultdict(lambda: {'means': {}})
8333
8324
 
8334
- # Lese die CSV-Datei
8335
8325
  with open(results_csv_file, encoding="utf-8", mode="r", newline='') as csvfile:
8336
8326
  reader = csv.DictReader(csvfile)
8337
8327
  for row in reader:
@@ -8344,7 +8334,7 @@ def _pareto_front_aggregate_data(path_to_calculate: str) -> Optional[Dict[Tuple[
8344
8334
  try:
8345
8335
  records[key]['means'][metric] = float(row[metric])
8346
8336
  except ValueError:
8347
- continue # Wenn der Wert nicht konvertierbar ist
8337
+ continue
8348
8338
 
8349
8339
  return records
8350
8340
 
@@ -8416,11 +8406,9 @@ def _pareto_front_build_return_structure(
8416
8406
  results_csv_file = f"{path_to_calculate}/results.csv"
8417
8407
  result_names_file = f"{path_to_calculate}/result_names.txt"
8418
8408
 
8419
- # Lade die Ergebnisnamen
8420
8409
  with open(result_names_file, mode="r", encoding="utf-8") as f:
8421
8410
  result_names = [line.strip() for line in f if line.strip()]
8422
8411
 
8423
- # CSV komplett in dict laden (trial_index als int -> row dict)
8424
8412
  csv_rows = {}
8425
8413
  with open(results_csv_file, mode="r", encoding="utf-8", newline='') as csvfile:
8426
8414
  reader = csv.DictReader(csvfile)
@@ -8428,7 +8416,6 @@ def _pareto_front_build_return_structure(
8428
8416
  trial_index = int(row['trial_index'])
8429
8417
  csv_rows[trial_index] = row
8430
8418
 
8431
- # Statische Spalten, die keine Parameter sind
8432
8419
  ignored_columns = {'trial_index', 'arm_name', 'trial_status', 'generation_node'}
8433
8420
  ignored_columns.update(result_names)
8434
8421
 
@@ -8439,11 +8426,10 @@ def _pareto_front_build_return_structure(
8439
8426
  for (trial_index, arm_name), _, _ in selected_points:
8440
8427
  row = csv_rows.get(trial_index)
8441
8428
  if row is None or row['arm_name'] != arm_name:
8442
- continue # Sicherheitshalber prüfen
8429
+ continue
8443
8430
 
8444
8431
  idxs.append(int(row["trial_index"]))
8445
8432
 
8446
- # Parameter extrahieren
8447
8433
  param_dict = {}
8448
8434
  for key, value in row.items():
8449
8435
  if key not in ignored_columns:
@@ -8453,7 +8439,7 @@ def _pareto_front_build_return_structure(
8453
8439
  try:
8454
8440
  param_dict[key] = float(value)
8455
8441
  except ValueError:
8456
- param_dict[key] = value # z.B. choice_param als String
8442
+ param_dict[key] = value
8457
8443
 
8458
8444
  param_dicts.append(param_dict)
8459
8445
 
@@ -8679,7 +8665,7 @@ def show_pareto_frontier_data(path_to_calculate: str, res_names: list, disable_s
8679
8665
 
8680
8666
  pareto_front_data: dict = get_pareto_front_data(path_to_calculate, res_names)
8681
8667
 
8682
- pareto_points = {}
8668
+ pareto_points: dict = {}
8683
8669
 
8684
8670
  for metric_x in pareto_front_data.keys():
8685
8671
  if metric_x not in pareto_points:
@@ -9029,7 +9015,7 @@ def post_job_calculate_pareto_front() -> None:
9029
9015
 
9030
9016
  for _path_to_calculate in _paths_to_calculate:
9031
9017
  for path_to_calculate in found_paths:
9032
- if job_calculate_pareto_front(path_to_calculate):
9018
+ if not job_calculate_pareto_front(path_to_calculate):
9033
9019
  failure = True
9034
9020
 
9035
9021
  if failure:
@@ -9041,9 +9027,8 @@ def post_job_calculate_pareto_front() -> None:
9041
9027
  def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table: bool = False) -> bool:
9042
9028
  pf_start_time = time.time()
9043
9029
 
9044
- # Returns true if it fails
9045
9030
  if not path_to_calculate:
9046
- return True
9031
+ return False
9047
9032
 
9048
9033
  global CURRENT_RUN_FOLDER
9049
9034
  global RESULT_CSV_FILE
@@ -9051,41 +9036,41 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9051
9036
 
9052
9037
  if not path_to_calculate:
9053
9038
  print_red("Can only calculate pareto front of previous job when --calculate_pareto_front_of_job is set")
9054
- return True
9039
+ return False
9055
9040
 
9056
9041
  if not os.path.exists(path_to_calculate):
9057
9042
  print_red(f"Path '{path_to_calculate}' does not exist")
9058
- return True
9043
+ return False
9059
9044
 
9060
9045
  ax_client_json = f"{path_to_calculate}/state_files/ax_client.experiment.json"
9061
9046
 
9062
9047
  if not os.path.exists(ax_client_json):
9063
9048
  print_red(f"Path '{ax_client_json}' not found")
9064
- return True
9049
+ return False
9065
9050
 
9066
9051
  checkpoint_file: str = f"{path_to_calculate}/state_files/checkpoint.json"
9067
9052
  if not os.path.exists(checkpoint_file):
9068
9053
  print_red(f"The checkpoint file '{checkpoint_file}' does not exist")
9069
- return True
9054
+ return False
9070
9055
 
9071
9056
  RESULT_CSV_FILE = f"{path_to_calculate}/results.csv"
9072
9057
  if not os.path.exists(RESULT_CSV_FILE):
9073
9058
  print_red(f"{RESULT_CSV_FILE} not found")
9074
- return True
9059
+ return False
9075
9060
 
9076
9061
  res_names = []
9077
9062
 
9078
9063
  res_names_file = f"{path_to_calculate}/result_names.txt"
9079
9064
  if not os.path.exists(res_names_file):
9080
9065
  print_red(f"File '{res_names_file}' does not exist")
9081
- return True
9066
+ return False
9082
9067
 
9083
9068
  try:
9084
9069
  with open(res_names_file, "r", encoding="utf-8") as file:
9085
9070
  lines = file.readlines()
9086
9071
  except Exception as e:
9087
9072
  print_red(f"Error reading file '{res_names_file}': {e}")
9088
- return True
9073
+ return False
9089
9074
 
9090
9075
  for line in lines:
9091
9076
  entry = line.strip()
@@ -9094,7 +9079,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9094
9079
 
9095
9080
  if len(res_names) < 2:
9096
9081
  print_red(f"Error: There are less than 2 result names (is: {len(res_names)}, {', '.join(res_names)}) in {path_to_calculate}. Cannot continue calculating the pareto front.")
9097
- return True
9082
+ return False
9098
9083
 
9099
9084
  load_username_to_args(path_to_calculate)
9100
9085
 
@@ -9105,7 +9090,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9105
9090
  experiment_parameters = load_experiment_parameters_from_checkpoint_file(checkpoint_file, False)
9106
9091
 
9107
9092
  if experiment_parameters is None:
9108
- return True
9093
+ return False
9109
9094
 
9110
9095
  show_pareto_or_error_msg(path_to_calculate, res_names, disable_sixel_and_table)
9111
9096
 
@@ -9113,7 +9098,7 @@ def job_calculate_pareto_front(path_to_calculate: str, disable_sixel_and_table:
9113
9098
 
9114
9099
  print_debug(f"Calculating the pareto-front took {pf_end_time - pf_start_time} seconds")
9115
9100
 
9116
- return False
9101
+ return True
9117
9102
 
9118
9103
  @beartype
9119
9104
  def set_arg_states_from_continue() -> None:
@@ -7,7 +7,6 @@ ax-platform
7
7
  art
8
8
  tzlocal
9
9
  Rich
10
- rich-argparse
11
10
  sixel
12
11
  scikit-learn
13
12
  submitit
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 6964
3
+ Version: 6966
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
@@ -17,7 +17,6 @@ Requires-Dist: ax-platform
17
17
  Requires-Dist: art
18
18
  Requires-Dist: tzlocal
19
19
  Requires-Dist: Rich
20
- Requires-Dist: rich-argparse
21
20
  Requires-Dist: sixel
22
21
  Requires-Dist: scikit-learn
23
22
  Requires-Dist: submitit
@@ -3,7 +3,7 @@
3
3
  .general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
4
4
  .gitignore,sha256=OMaFWOR6wxjAlI85rF3euQcjQFFAl1F34abZkltKnaU,3714
5
5
  .helpers.py,sha256=G4TLhtj9u1GdAB9ACnjscsqz-Eu85ew1QAAU93ctcRQ,30432
6
- .omniopt.py,sha256=lWIf-18E0aOuIkpOS1I3I_9TbVoxADpcMnjp-XD-iL8,372679
6
+ .omniopt.py,sha256=ypaKqygeLDnNTUoN2r7h6Bx1-waHvsWPg3MaFK2hPU0,371947
7
7
  .omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
8
8
  .omniopt_plot_general.py,sha256=ZERZJkvVOoJhi7SszmTF1Iln-_08_0Aki48u3LHUW-k,6809
9
9
  .omniopt_plot_gpu_usage.py,sha256=bQmjc1Xq_9rlJhiEvy93Q5hXGs4ZKRVg3v7uaMrv9Go,5108
@@ -25,43 +25,43 @@ omniopt_docker,sha256=LWVUeyvmA5AKqAHiH9jBUkR5uZ6AHMnSy0eET7mK6E4,3602
25
25
  omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
26
26
  omniopt_plot,sha256=Z8ZR10p-ZRSgMeVPO-wVCJ8lk-LQtntjZ9Bk9RifCIs,13360
27
27
  omniopt_share,sha256=7g5I7YdoWcA6_GDwWwq0xPf23qiVc_VDrm9ySLH7SH0,14051
28
- pyproject.toml,sha256=T9zkntHYD5IdiBclRB6l-BCvbfoWjM4-bKKo7pc5ccA,397
29
- requirements.txt,sha256=4-CwlGCOMAexOgLozRy9_5HwpQVsZFALR_uv69xGsgE,308
28
+ pyproject.toml,sha256=B20UBNvsONBBfSOecgp3Z9uyxVQTq6zyCfHlL1t73U4,397
29
+ requirements.txt,sha256=1Rl2GOnCEuznk9kSFjTjfRwF9FCz_rOrQtfsNZV6F5g,294
30
30
  setup.cfg,sha256=HEc8uu6NpfxG5_AVh5SvXOpEFMNKPPPxgMIAH144vT4,38
31
31
  test_requirements.txt,sha256=dnCbKmKalrVzNZ_-iQWf1xCxcnDsdGuhbDAr9XlGm-U,477
32
- omniopt2-6964.data/data/bin/.colorfunctions.sh,sha256=CDlgjwrsrHR_E6c-Qak5wZlotArXm-nf9sVvXePzGZA,1083
33
- omniopt2-6964.data/data/bin/.general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
34
- omniopt2-6964.data/data/bin/.helpers.py,sha256=G4TLhtj9u1GdAB9ACnjscsqz-Eu85ew1QAAU93ctcRQ,30432
35
- omniopt2-6964.data/data/bin/.omniopt.py,sha256=lWIf-18E0aOuIkpOS1I3I_9TbVoxADpcMnjp-XD-iL8,372679
36
- omniopt2-6964.data/data/bin/.omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
37
- omniopt2-6964.data/data/bin/.omniopt_plot_general.py,sha256=ZERZJkvVOoJhi7SszmTF1Iln-_08_0Aki48u3LHUW-k,6809
38
- omniopt2-6964.data/data/bin/.omniopt_plot_gpu_usage.py,sha256=bQmjc1Xq_9rlJhiEvy93Q5hXGs4ZKRVg3v7uaMrv9Go,5108
39
- omniopt2-6964.data/data/bin/.omniopt_plot_kde.py,sha256=uRLWr72TDKvj3AqJ0O0AvkKZ1ok1O1QpXnbfQQdo0nA,6873
40
- omniopt2-6964.data/data/bin/.omniopt_plot_scatter.py,sha256=VFkly4lbmXeegWoyiu1fLO82JThPJa_iYVZIAPGPP64,8670
41
- omniopt2-6964.data/data/bin/.omniopt_plot_scatter_generation_method.py,sha256=rgKY_w1E516c9UucVaEvaKd8tCnoUq9xg-RrYSDzYEQ,4289
42
- omniopt2-6964.data/data/bin/.omniopt_plot_scatter_hex.py,sha256=6014iELQcS83WAAwOqVIUMZQewNP4jntlTQncTY3NTA,10527
43
- omniopt2-6964.data/data/bin/.omniopt_plot_time_and_exit_code.py,sha256=PFDJt2dDCWSsZntDsDa5imfRLF0h5lCExH8sFp8ptVs,6465
44
- omniopt2-6964.data/data/bin/.omniopt_plot_trial_index_result.py,sha256=5DmqZAQO_PFmzdap-TIhSMAshRXpOHQacnHAtjwnzN4,4629
45
- omniopt2-6964.data/data/bin/.omniopt_plot_worker.py,sha256=bD-byr3NTnJ0qF4M_CogdXnhw7sVrzs1eFoqTtDjmfE,4590
46
- omniopt2-6964.data/data/bin/.random_generator.py,sha256=ezBBUXpez_QaGdpCglMcJ0KZPdQP0XdX5gnLzO1xhwU,2987
47
- omniopt2-6964.data/data/bin/.shellscript_functions,sha256=7IjirQJpC7TCRPuRj1dO2W6A8h5cHt0dVzj6eH_2EiQ,14664
48
- omniopt2-6964.data/data/bin/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
49
- omniopt2-6964.data/data/bin/apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
50
- omniopt2-6964.data/data/bin/omniopt,sha256=RIi0yZALrZGefiejun1fkDJ0LX95WymXd2xZFlglMsQ,48072
51
- omniopt2-6964.data/data/bin/omniopt_docker,sha256=LWVUeyvmA5AKqAHiH9jBUkR5uZ6AHMnSy0eET7mK6E4,3602
52
- omniopt2-6964.data/data/bin/omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
53
- omniopt2-6964.data/data/bin/omniopt_plot,sha256=Z8ZR10p-ZRSgMeVPO-wVCJ8lk-LQtntjZ9Bk9RifCIs,13360
54
- omniopt2-6964.data/data/bin/omniopt_share,sha256=7g5I7YdoWcA6_GDwWwq0xPf23qiVc_VDrm9ySLH7SH0,14051
55
- omniopt2-6964.data/data/bin/requirements.txt,sha256=4-CwlGCOMAexOgLozRy9_5HwpQVsZFALR_uv69xGsgE,308
56
- omniopt2-6964.data/data/bin/setup.py,sha256=g3uEqJHXhggXwgLYoxOjsXg9Z6IV1ubh-Og59AZ264Q,4648
57
- omniopt2-6964.data/data/bin/test_requirements.txt,sha256=dnCbKmKalrVzNZ_-iQWf1xCxcnDsdGuhbDAr9XlGm-U,477
58
- omniopt2-6964.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
59
- omniopt2.egg-info/PKG-INFO,sha256=ps0xrSlQVX5ME6bCzBPVZBl-v0Znd7nK55mKRsGEx1k,5286
32
+ omniopt2-6966.data/data/bin/.colorfunctions.sh,sha256=CDlgjwrsrHR_E6c-Qak5wZlotArXm-nf9sVvXePzGZA,1083
33
+ omniopt2-6966.data/data/bin/.general.sh,sha256=uyGMN8xNToQ0v50KoiYxm6jRmgf0orroOaodM_Nuq30,2107
34
+ omniopt2-6966.data/data/bin/.helpers.py,sha256=G4TLhtj9u1GdAB9ACnjscsqz-Eu85ew1QAAU93ctcRQ,30432
35
+ omniopt2-6966.data/data/bin/.omniopt.py,sha256=ypaKqygeLDnNTUoN2r7h6Bx1-waHvsWPg3MaFK2hPU0,371947
36
+ omniopt2-6966.data/data/bin/.omniopt_plot_cpu_ram_usage.py,sha256=DbOAmdrbcZtsMnHJgHfeRngjtv6zX5J0axyua_dYezc,3932
37
+ omniopt2-6966.data/data/bin/.omniopt_plot_general.py,sha256=ZERZJkvVOoJhi7SszmTF1Iln-_08_0Aki48u3LHUW-k,6809
38
+ omniopt2-6966.data/data/bin/.omniopt_plot_gpu_usage.py,sha256=bQmjc1Xq_9rlJhiEvy93Q5hXGs4ZKRVg3v7uaMrv9Go,5108
39
+ omniopt2-6966.data/data/bin/.omniopt_plot_kde.py,sha256=uRLWr72TDKvj3AqJ0O0AvkKZ1ok1O1QpXnbfQQdo0nA,6873
40
+ omniopt2-6966.data/data/bin/.omniopt_plot_scatter.py,sha256=VFkly4lbmXeegWoyiu1fLO82JThPJa_iYVZIAPGPP64,8670
41
+ omniopt2-6966.data/data/bin/.omniopt_plot_scatter_generation_method.py,sha256=rgKY_w1E516c9UucVaEvaKd8tCnoUq9xg-RrYSDzYEQ,4289
42
+ omniopt2-6966.data/data/bin/.omniopt_plot_scatter_hex.py,sha256=6014iELQcS83WAAwOqVIUMZQewNP4jntlTQncTY3NTA,10527
43
+ omniopt2-6966.data/data/bin/.omniopt_plot_time_and_exit_code.py,sha256=PFDJt2dDCWSsZntDsDa5imfRLF0h5lCExH8sFp8ptVs,6465
44
+ omniopt2-6966.data/data/bin/.omniopt_plot_trial_index_result.py,sha256=5DmqZAQO_PFmzdap-TIhSMAshRXpOHQacnHAtjwnzN4,4629
45
+ omniopt2-6966.data/data/bin/.omniopt_plot_worker.py,sha256=bD-byr3NTnJ0qF4M_CogdXnhw7sVrzs1eFoqTtDjmfE,4590
46
+ omniopt2-6966.data/data/bin/.random_generator.py,sha256=ezBBUXpez_QaGdpCglMcJ0KZPdQP0XdX5gnLzO1xhwU,2987
47
+ omniopt2-6966.data/data/bin/.shellscript_functions,sha256=7IjirQJpC7TCRPuRj1dO2W6A8h5cHt0dVzj6eH_2EiQ,14664
48
+ omniopt2-6966.data/data/bin/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
49
+ omniopt2-6966.data/data/bin/apt-dependencies.txt,sha256=X5tBB8ZLW9XaFtRh8B7C2pIkSoxNNawqioDr0QZAtuM,149
50
+ omniopt2-6966.data/data/bin/omniopt,sha256=RIi0yZALrZGefiejun1fkDJ0LX95WymXd2xZFlglMsQ,48072
51
+ omniopt2-6966.data/data/bin/omniopt_docker,sha256=LWVUeyvmA5AKqAHiH9jBUkR5uZ6AHMnSy0eET7mK6E4,3602
52
+ omniopt2-6966.data/data/bin/omniopt_evaluate,sha256=9oBh0_ikCuIz_aJQZrN0j39NDiIDYoSvEFmSVIoFjJE,23842
53
+ omniopt2-6966.data/data/bin/omniopt_plot,sha256=Z8ZR10p-ZRSgMeVPO-wVCJ8lk-LQtntjZ9Bk9RifCIs,13360
54
+ omniopt2-6966.data/data/bin/omniopt_share,sha256=7g5I7YdoWcA6_GDwWwq0xPf23qiVc_VDrm9ySLH7SH0,14051
55
+ omniopt2-6966.data/data/bin/requirements.txt,sha256=1Rl2GOnCEuznk9kSFjTjfRwF9FCz_rOrQtfsNZV6F5g,294
56
+ omniopt2-6966.data/data/bin/setup.py,sha256=g3uEqJHXhggXwgLYoxOjsXg9Z6IV1ubh-Og59AZ264Q,4648
57
+ omniopt2-6966.data/data/bin/test_requirements.txt,sha256=dnCbKmKalrVzNZ_-iQWf1xCxcnDsdGuhbDAr9XlGm-U,477
58
+ omniopt2-6966.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
59
+ omniopt2.egg-info/PKG-INFO,sha256=CLra0wXG2sLzBHgTXTzO-CCfLCsBDfZVK9OTbpNYI5s,5257
60
60
  omniopt2.egg-info/SOURCES.txt,sha256=kXBlYs2_3BE6tKUH1egGFa_9X5w8EQ5pm9dKgPEvdhY,770
61
61
  omniopt2.egg-info/dependency_links.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
62
- omniopt2.egg-info/requires.txt,sha256=4Yz6l6MaP7IW0SiKxbYKVvF0qAVAdyUo6_EgbiMII8E,785
62
+ omniopt2.egg-info/requires.txt,sha256=bsUfS9yXREtW8RMtt_zWVuaDBlgTkUQYelzWBx1nG_4,771
63
63
  omniopt2.egg-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
64
- omniopt2-6964.dist-info/METADATA,sha256=ps0xrSlQVX5ME6bCzBPVZBl-v0Znd7nK55mKRsGEx1k,5286
65
- omniopt2-6964.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
66
- omniopt2-6964.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
67
- omniopt2-6964.dist-info/RECORD,,
64
+ omniopt2-6966.dist-info/METADATA,sha256=CLra0wXG2sLzBHgTXTzO-CCfLCsBDfZVK9OTbpNYI5s,5257
65
+ omniopt2-6966.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
66
+ omniopt2-6966.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
67
+ omniopt2-6966.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omniopt2
3
- Version: 6964
3
+ Version: 6966
4
4
  Summary: Automatic highly parallelized hyperparameter optimizer based on Ax/Botorch
5
5
  Home-page: https://scads.ai/transfer-2/verfuegbare-software-dienste-en/omniopt/
6
6
  Author: Norman Koch
@@ -17,7 +17,6 @@ Requires-Dist: ax-platform
17
17
  Requires-Dist: art
18
18
  Requires-Dist: tzlocal
19
19
  Requires-Dist: Rich
20
- Requires-Dist: rich-argparse
21
20
  Requires-Dist: sixel
22
21
  Requires-Dist: scikit-learn
23
22
  Requires-Dist: submitit
@@ -7,7 +7,6 @@ ax-platform
7
7
  art
8
8
  tzlocal
9
9
  Rich
10
- rich-argparse
11
10
  sixel
12
11
  scikit-learn
13
12
  submitit
pyproject.toml CHANGED
@@ -5,7 +5,7 @@ authors = [
5
5
  {email = "norman.koch@tu-dresden.de"},
6
6
  {name = "Norman Koch"}
7
7
  ]
8
- version = "6964"
8
+ version = "6966"
9
9
 
10
10
  readme = "README.md"
11
11
  dynamic = ["dependencies"]
requirements.txt CHANGED
@@ -7,7 +7,6 @@ ax-platform
7
7
  art
8
8
  tzlocal
9
9
  Rich
10
- rich-argparse
11
10
  sixel
12
11
  scikit-learn
13
12
  submitit