bmtool 0.7.0.6.4__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
bmtool/SLURM.py CHANGED
@@ -1,12 +1,12 @@
1
- import time
2
- import os
3
- import subprocess
1
+ import copy
4
2
  import json
5
- import requests
3
+ import os
6
4
  import shutil
5
+ import subprocess
7
6
  import time
8
- import copy
7
+
9
8
  import numpy as np
9
+ import requests
10
10
 
11
11
 
12
12
  def check_job_status(job_id):
@@ -20,17 +20,17 @@ def check_job_status(job_id):
20
20
  str: The state of the job.
21
21
  """
22
22
  try:
23
- result = subprocess.run(['scontrol', 'show', 'job', job_id], capture_output=True, text=True)
23
+ result = subprocess.run(["scontrol", "show", "job", job_id], capture_output=True, text=True)
24
24
  if result.returncode != 0:
25
25
  # this check is not needed if check_interval is less than 5 min (~300 seconds)
26
- if 'slurm_load_jobs error: Invalid job id specified' in result.stderr:
27
- return 'COMPLETED' # Treat invalid job ID as completed because scontrol expires and removed job info when done.
28
- #raise Exception(f"Error checking job status: {result.stderr}")
26
+ if "slurm_load_jobs error: Invalid job id specified" in result.stderr:
27
+ return "COMPLETED" # Treat invalid job ID as completed because scontrol expires and removed job info when done.
28
+ # raise Exception(f"Error checking job status: {result.stderr}")
29
29
 
30
30
  job_state = None
31
- for line in result.stdout.split('\n'):
32
- if 'JobState=' in line:
33
- job_state = line.strip().split('JobState=')[1].split()[0]
31
+ for line in result.stdout.split("\n"):
32
+ if "JobState=" in line:
33
+ job_state = line.strip().split("JobState=")[1].split()[0]
34
34
  break
35
35
 
36
36
  if job_state is None:
@@ -39,7 +39,7 @@ def check_job_status(job_id):
39
39
  return job_state
40
40
  except Exception as e:
41
41
  print(f"Exception while checking job status: {e}", flush=True)
42
- return 'UNKNOWN'
42
+ return "UNKNOWN"
43
43
 
44
44
 
45
45
  def submit_job(script_path):
@@ -55,29 +55,27 @@ def submit_job(script_path):
55
55
  Raises:
56
56
  Exception: If there is an error in submitting the job.
57
57
  """
58
- result = subprocess.run(['sbatch', script_path], capture_output=True, text=True)
58
+ result = subprocess.run(["sbatch", script_path], capture_output=True, text=True)
59
59
  if result.returncode != 0:
60
60
  raise Exception(f"Error submitting job: {result.stderr}")
61
61
  job_id = result.stdout.strip().split()[-1]
62
62
  return job_id
63
63
 
64
64
 
65
- def send_teams_message(webhook,message):
65
+ def send_teams_message(webhook, message):
66
66
  """Sends a message to a teams channel or chat
67
67
 
68
68
  Args:
69
69
  webhook (str): A microsoft teams webhook
70
70
  message (str): A message to send in the chat/channel
71
71
  """
72
- message = {
73
- "text": f"{message}"
74
- }
72
+ message = {"text": f"{message}"}
75
73
 
76
74
  # Send POST request to trigger the flow
77
75
  response = requests.post(
78
76
  webhook,
79
77
  json=message, # Using 'json' instead of 'data' for automatic serialization
80
- headers={'Content-Type': 'application/json'}
78
+ headers={"Content-Type": "application/json"},
81
79
  )
82
80
 
83
81
 
@@ -100,17 +98,20 @@ class seedSweep:
100
98
  Args:
101
99
  new_value: The new value for the parameter.
102
100
  """
103
- with open(self.json_file_path, 'r') as f:
101
+ with open(self.json_file_path, "r") as f:
104
102
  data = json.load(f)
105
-
103
+
106
104
  data[self.param_name] = new_value
107
-
108
- with open(self.json_file_path, 'w') as f:
105
+
106
+ with open(self.json_file_path, "w") as f:
109
107
  json.dump(data, f, indent=4)
110
-
111
- print(f"JSON file '{self.json_file_path}' modified successfully with {self.param_name}={new_value}.", flush=True)
112
-
113
- def change_json_file_path(self,new_json_file_path):
108
+
109
+ print(
110
+ f"JSON file '{self.json_file_path}' modified successfully with {self.param_name}={new_value}.",
111
+ flush=True,
112
+ )
113
+
114
+ def change_json_file_path(self, new_json_file_path):
114
115
  self.json_file_path = new_json_file_path
115
116
 
116
117
 
@@ -120,6 +121,7 @@ class multiSeedSweep(seedSweep):
120
121
  MultSeedSweeps are centered around some base JSON cell file. When that base JSON is updated, the other JSONs
121
122
  change according to their ratio with the base JSON.
122
123
  """
124
+
123
125
  def __init__(self, base_json_file_path, param_name, syn_dict, base_ratio=1):
124
126
  """
125
127
  Initializes the multipleSeedSweep instance.
@@ -144,22 +146,39 @@ class multiSeedSweep(seedSweep):
144
146
  self.edit_json(new_value)
145
147
  base_ratio = self.base_ratio
146
148
 
147
- json_file_path = self.syn_dict_for_multi['json_file_path']
148
- new_ratio = self.syn_dict_for_multi['ratio'] / base_ratio
149
-
150
- with open(json_file_path, 'r') as f:
149
+ json_file_path = self.syn_dict_for_multi["json_file_path"]
150
+ new_ratio = self.syn_dict_for_multi["ratio"] / base_ratio
151
+
152
+ with open(json_file_path, "r") as f:
151
153
  data = json.load(f)
152
154
  altered_value = new_ratio * new_value
153
155
  data[self.param_name] = altered_value
154
-
155
- with open(json_file_path, 'w') as f:
156
+
157
+ with open(json_file_path, "w") as f:
156
158
  json.dump(data, f, indent=4)
157
-
158
- print(f"JSON file '{json_file_path}' modified successfully with {self.param_name}={altered_value}.", flush=True)
159
+
160
+ print(
161
+ f"JSON file '{json_file_path}' modified successfully with {self.param_name}={altered_value}.",
162
+ flush=True,
163
+ )
159
164
 
160
165
 
161
166
  class SimulationBlock:
162
- def __init__(self, block_name, time, partition, nodes, ntasks, mem, simulation_cases, output_base_dir,account=None,additional_commands=None,status_list = ['COMPLETED', 'FAILED', 'CANCELLED'],component_path=None):
167
+ def __init__(
168
+ self,
169
+ block_name,
170
+ time,
171
+ partition,
172
+ nodes,
173
+ ntasks,
174
+ mem,
175
+ simulation_cases,
176
+ output_base_dir,
177
+ account=None,
178
+ additional_commands=None,
179
+ status_list=["COMPLETED", "FAILED", "CANCELLED"],
180
+ component_path=None,
181
+ ):
163
182
  """
164
183
  Initializes the SimulationBlock instance.
165
184
 
@@ -172,10 +191,10 @@ class SimulationBlock:
172
191
  mem (int) : Number of gigabytes (per node)
173
192
  simulation_cases (dict): Dictionary of simulation cases with their commands.
174
193
  output_base_dir (str): Base directory for the output files.
175
- account (str) : account to charge on HPC
194
+ account (str) : account to charge on HPC
176
195
  additional commands (list): commands to run before bmtk model starts useful for loading modules
177
- status_list (list): List of things to check before running next block.
178
- Adding RUNNING runs blocks faster but uses MUCH more resources and is only recommended on large HPC
196
+ status_list (list): List of things to check before running next block.
197
+ Adding RUNNING runs blocks faster but uses MUCH more resources and is only recommended on large HPC
179
198
  """
180
199
  self.block_name = block_name
181
200
  self.time = time
@@ -185,7 +204,7 @@ class SimulationBlock:
185
204
  self.mem = mem
186
205
  self.simulation_cases = simulation_cases
187
206
  self.output_base_dir = output_base_dir
188
- self.account = account
207
+ self.account = account
189
208
  self.additional_commands = additional_commands if additional_commands is not None else []
190
209
  self.status_list = status_list
191
210
  self.job_ids = []
@@ -198,24 +217,33 @@ class SimulationBlock:
198
217
  Args:
199
218
  case_name (str): Name of the simulation case.
200
219
  command (str): Command to run the simulation.
201
-
220
+
202
221
  Returns:
203
222
  str: Path to the batch script file.
204
223
  """
205
- block_output_dir = os.path.join(self.output_base_dir, self.block_name) # Create block-specific output folder
206
- case_output_dir = os.path.join(block_output_dir, case_name) # Create case-specific output folder
224
+ block_output_dir = os.path.join(
225
+ self.output_base_dir, self.block_name
226
+ ) # Create block-specific output folder
227
+ case_output_dir = os.path.join(
228
+ block_output_dir, case_name
229
+ ) # Create case-specific output folder
207
230
  os.makedirs(case_output_dir, exist_ok=True)
208
231
 
209
- batch_script_path = os.path.join(block_output_dir, f'{case_name}_script.sh')
232
+ batch_script_path = os.path.join(block_output_dir, f"{case_name}_script.sh")
210
233
  additional_commands_str = "\n".join(self.additional_commands)
211
234
  # Conditional account linegit
212
235
  account_line = f"#SBATCH --account={self.account}\n" if self.account else ""
213
- env_var_component_path = f"export COMPONENT_PATH={self.component_path}" if self.component_path else ""
214
- mem_per_cpu = int(np.ceil(int(self.mem)/int(self.ntasks))) # do ceil cause more mem is always better then less
236
+ env_var_component_path = (
237
+ f"export COMPONENT_PATH={self.component_path}" if self.component_path else ""
238
+ )
239
+ mem_per_cpu = int(
240
+ np.ceil(int(self.mem) / int(self.ntasks))
241
+ ) # do ceil cause more mem is always better then less
215
242
 
216
243
  # Write the batch script to the file
217
- with open(batch_script_path, 'w') as script_file:
218
- script_file.write(f"""#!/bin/bash
244
+ with open(batch_script_path, "w") as script_file:
245
+ script_file.write(
246
+ f"""#!/bin/bash
219
247
  #SBATCH --job-name={self.block_name}_{case_name}
220
248
  #SBATCH --output={block_output_dir}/%x_%j.out
221
249
  #SBATCH --error={block_output_dir}/%x_%j.err
@@ -235,9 +263,10 @@ class SimulationBlock:
235
263
  export OUTPUT_DIR={case_output_dir}
236
264
 
237
265
  {command}
238
- """)
266
+ """
267
+ )
239
268
 
240
- #print(f"Batch script created: {batch_script_path}", flush=True)
269
+ # print(f"Batch script created: {batch_script_path}", flush=True)
241
270
 
242
271
  return batch_script_path
243
272
 
@@ -247,7 +276,7 @@ export OUTPUT_DIR={case_output_dir}
247
276
  """
248
277
  for case_name, command in self.simulation_cases.items():
249
278
  script_path = self.create_batch_script(case_name, command)
250
- result = subprocess.run(['sbatch', script_path], capture_output=True, text=True)
279
+ result = subprocess.run(["sbatch", script_path], capture_output=True, text=True)
251
280
  if result.returncode == 0:
252
281
  job_id = result.stdout.strip().split()[-1]
253
282
  self.job_ids.append(job_id)
@@ -267,7 +296,7 @@ export OUTPUT_DIR={case_output_dir}
267
296
  if status not in self.status_list:
268
297
  return False
269
298
  return True
270
-
299
+
271
300
  def check_block_completed(self):
272
301
  """checks if all the jobs in the block have been completed by slurm
273
302
 
@@ -276,8 +305,10 @@ export OUTPUT_DIR={case_output_dir}
276
305
  """
277
306
  for job_id in self.job_ids:
278
307
  status = check_job_status(job_id)
279
- #print(f"status of job is {status}")
280
- if status != 'COMPLETED': # can add PENDING here for debugging NOT FOR ACTUALLY USING IT
308
+ # print(f"status of job is {status}")
309
+ if (
310
+ status != "COMPLETED"
311
+ ): # can add PENDING here for debugging NOT FOR ACTUALLY USING IT
281
312
  return False
282
313
  return True
283
314
 
@@ -289,10 +320,10 @@ export OUTPUT_DIR={case_output_dir}
289
320
  """
290
321
  for job_id in self.job_ids:
291
322
  status = check_job_status(job_id)
292
- if status != 'RUNNING': #
323
+ if status != "RUNNING": #
293
324
  return False
294
325
  return True
295
-
326
+
296
327
  def check_block_submited(self):
297
328
  """checks if a job is running
298
329
 
@@ -301,7 +332,7 @@ export OUTPUT_DIR={case_output_dir}
301
332
  """
302
333
  for job_id in self.job_ids:
303
334
  status = check_job_status(job_id)
304
- if status != 'PENDING': #
335
+ if status != "PENDING": #
305
336
  return False
306
337
  return True
307
338
 
@@ -310,7 +341,9 @@ def get_relative_path(endpoint, absolute_path):
310
341
  """Convert absolute path to relative path for Globus transfer."""
311
342
  try:
312
343
  # Get the directories at the mount point
313
- result = subprocess.run(["globus", "ls", f"{endpoint}:/"], capture_output=True, text=True, check=True)
344
+ result = subprocess.run(
345
+ ["globus", "ls", f"{endpoint}:/"], capture_output=True, text=True, check=True
346
+ )
314
347
  dirs = set(result.stdout.splitlines()) # Convert to a set for quicker lookup
315
348
 
316
349
  # Split the absolute path into parts
@@ -318,34 +351,34 @@ def get_relative_path(endpoint, absolute_path):
318
351
 
319
352
  # Find the first matching directory in the list
320
353
  for i, part in enumerate(path_parts):
321
- if part+"/" in dirs:
354
+ if part + "/" in dirs:
322
355
  # The mount point is everything up to and including this directory
323
356
  mount_point = "/" + "/".join(path_parts[:i])
324
357
  relative_path = absolute_path.replace(mount_point, "", 1).lstrip("/")
325
358
  return relative_path
326
-
359
+
327
360
  print("Error: Could not determine relative path.")
328
361
  return None
329
362
  except subprocess.CalledProcessError as e:
330
363
  print(f"Error retrieving directories from Globus: {e}")
331
364
  return None
332
365
 
366
+
333
367
  def globus_transfer(source_endpoint, dest_endpoint, source_path, dest_path):
334
368
  """
335
- Transfers file using custom globus transfer function.
369
+ Transfers file using custom globus transfer function.
336
370
  For more info see https://github.com/GregGlickert/transfer-files/blob/main/globus_transfer.sh
337
- work in progress still... kinda forgot about this
371
+ work in progress still... kinda forgot about this
338
372
  """
339
373
  relative_source_path = get_relative_path(source_endpoint, source_path)
340
374
  if relative_source_path is None:
341
375
  print("Transfer aborted: Could not determine relative source path.")
342
376
  return
343
-
377
+
344
378
  command = f"globus transfer {source_endpoint}:{relative_source_path} {dest_endpoint}:{dest_path} --label 'bmtool slurm transfer'"
345
379
  os.system(command)
346
380
 
347
381
 
348
-
349
382
  class BlockRunner:
350
383
  """
351
384
  Class to handle submitting multiple blocks sequentially.
@@ -357,9 +390,17 @@ class BlockRunner:
357
390
  webhook (str): a microsoft webhook for teams. When used will send teams messages to the hook!
358
391
  """
359
392
 
360
- def __init__(self, blocks, json_editor=None,json_file_path=None, param_name=None,
361
- param_values=None, check_interval=60,syn_dict = None,
362
- webhook=None):
393
+ def __init__(
394
+ self,
395
+ blocks,
396
+ json_editor=None,
397
+ json_file_path=None,
398
+ param_name=None,
399
+ param_values=None,
400
+ check_interval=60,
401
+ syn_dict=None,
402
+ webhook=None,
403
+ ):
363
404
  self.blocks = blocks
364
405
  self.json_editor = json_editor
365
406
  self.param_values = param_values
@@ -377,12 +418,14 @@ class BlockRunner:
377
418
  for i, block in enumerate(self.blocks):
378
419
  print(block.output_base_dir)
379
420
  # Update JSON file with new parameter value
380
- if self.json_file_path == None and self.param_values == None:
421
+ if self.json_file_path is None and self.param_values is None:
381
422
  source_dir = block.component_path
382
423
  destination_dir = f"{source_dir}{i+1}"
383
424
  block.component_path = destination_dir
384
- shutil.copytree(source_dir, destination_dir,dirs_exist_ok = True) # create new components folder
385
- print(f"skipping json editing for block {block.block_name}",flush=True)
425
+ shutil.copytree(
426
+ source_dir, destination_dir, dirs_exist_ok=True
427
+ ) # create new components folder
428
+ print(f"skipping json editing for block {block.block_name}", flush=True)
386
429
  else:
387
430
  if len(self.blocks) != len(self.param_values):
388
431
  raise Exception("Number of blocks needs to each number of params given")
@@ -392,31 +435,34 @@ class BlockRunner:
392
435
  destination_dir = f"{source_dir}{i+1}"
393
436
  block.component_path = destination_dir
394
437
 
395
- shutil.copytree(source_dir, destination_dir,dirs_exist_ok = True) # create new components folder
396
- json_file_path = os.path.join(destination_dir,self.json_file_path)
397
-
398
- if self.syn_dict == None:
399
- json_editor = seedSweep(json_file_path , self.param_name)
438
+ shutil.copytree(
439
+ source_dir, destination_dir, dirs_exist_ok=True
440
+ ) # create new components folder
441
+ json_file_path = os.path.join(destination_dir, self.json_file_path)
442
+
443
+ if self.syn_dict is None:
444
+ json_editor = seedSweep(json_file_path, self.param_name)
400
445
  json_editor.edit_json(new_value)
401
446
  else:
402
447
  # need to keep the orignal around
403
448
  syn_dict_temp = copy.deepcopy(self.syn_dict)
404
- json_to_be_ratioed = syn_dict_temp['json_file_path']
405
- corrected_ratio_path = os.path.join(destination_dir,json_to_be_ratioed)
406
- syn_dict_temp['json_file_path'] = corrected_ratio_path
407
- json_editor = multiSeedSweep(json_file_path ,self.param_name,
408
- syn_dict=syn_dict_temp,base_ratio=1)
409
- json_editor.edit_all_jsons(new_value)
449
+ json_to_be_ratioed = syn_dict_temp["json_file_path"]
450
+ corrected_ratio_path = os.path.join(destination_dir, json_to_be_ratioed)
451
+ syn_dict_temp["json_file_path"] = corrected_ratio_path
452
+ json_editor = multiSeedSweep(
453
+ json_file_path, self.param_name, syn_dict=syn_dict_temp, base_ratio=1
454
+ )
455
+ json_editor.edit_all_jsons(new_value)
410
456
 
411
457
  # Submit the block
412
458
  print(f"Submitting block: {block.block_name}", flush=True)
413
459
  block.submit_block()
414
460
  if self.webhook:
415
461
  message = f"SIMULATION UPDATE: Block {i} has been submitted! There are {(len(self.blocks)-1)-i} left to be submitted"
416
- send_teams_message(self.webhook,message)
462
+ send_teams_message(self.webhook, message)
417
463
 
418
464
  # Wait for the block to complete
419
- if i == len(self.blocks) - 1:
465
+ if i == len(self.blocks) - 1:
420
466
  while not block.check_block_status():
421
467
  print(f"Waiting for the last block {i} to complete...")
422
468
  time.sleep(self.check_interval)
@@ -424,12 +470,12 @@ class BlockRunner:
424
470
  while not block.check_block_status():
425
471
  print(f"Waiting for block {i} to complete...")
426
472
  time.sleep(self.check_interval)
427
-
473
+
428
474
  print(f"Block {block.block_name} completed.", flush=True)
429
- print("All blocks are done!",flush=True)
475
+ print("All blocks are done!", flush=True)
430
476
  if self.webhook:
431
477
  message = "SIMULATION UPDATE: Simulation are Done!"
432
- send_teams_message(self.webhook,message)
478
+ send_teams_message(self.webhook, message)
433
479
 
434
480
  def submit_blocks_parallel(self):
435
481
  """
@@ -437,47 +483,54 @@ class BlockRunner:
437
483
  Also the json_file_path should be the path after the components dir
438
484
  """
439
485
  for i, block in enumerate(self.blocks):
440
- if self.param_values == None:
486
+ if self.param_values is None:
441
487
  source_dir = block.component_path
442
488
  destination_dir = f"{source_dir}{i+1}"
443
489
  block.component_path = destination_dir
444
- shutil.copytree(source_dir, destination_dir,dirs_exist_ok = True) # create new components folder
445
- print(f"skipping json editing for block {block.block_name}",flush=True)
490
+ shutil.copytree(
491
+ source_dir, destination_dir, dirs_exist_ok=True
492
+ ) # create new components folder
493
+ print(f"skipping json editing for block {block.block_name}", flush=True)
446
494
  else:
447
- if block.component_path == None:
448
- raise Exception("Unable to use parallel submitter without defining the component path")
495
+ if block.component_path is None:
496
+ raise Exception(
497
+ "Unable to use parallel submitter without defining the component path"
498
+ )
449
499
  new_value = self.param_values[i]
450
-
500
+
451
501
  source_dir = block.component_path
452
502
  destination_dir = f"{source_dir}{i+1}"
453
503
  block.component_path = destination_dir
454
504
 
455
- shutil.copytree(source_dir, destination_dir,dirs_exist_ok = True) # create new components folder
456
- json_file_path = os.path.join(destination_dir,self.json_file_path)
457
-
458
- if self.syn_dict == None:
459
- json_editor = seedSweep(json_file_path , self.param_name)
505
+ shutil.copytree(
506
+ source_dir, destination_dir, dirs_exist_ok=True
507
+ ) # create new components folder
508
+ json_file_path = os.path.join(destination_dir, self.json_file_path)
509
+
510
+ if self.syn_dict is None:
511
+ json_editor = seedSweep(json_file_path, self.param_name)
460
512
  json_editor.edit_json(new_value)
461
513
  else:
462
514
  # need to keep the orignal around
463
515
  syn_dict_temp = copy.deepcopy(self.syn_dict)
464
- json_to_be_ratioed = syn_dict_temp['json_file_path']
465
- corrected_ratio_path = os.path.join(destination_dir,json_to_be_ratioed)
466
- syn_dict_temp['json_file_path'] = corrected_ratio_path
467
- json_editor = multiSeedSweep(json_file_path ,self.param_name,
468
- syn_dict_temp,base_ratio=1)
469
- json_editor.edit_all_jsons(new_value)
470
- # submit block with new component path
516
+ json_to_be_ratioed = syn_dict_temp["json_file_path"]
517
+ corrected_ratio_path = os.path.join(destination_dir, json_to_be_ratioed)
518
+ syn_dict_temp["json_file_path"] = corrected_ratio_path
519
+ json_editor = multiSeedSweep(
520
+ json_file_path, self.param_name, syn_dict_temp, base_ratio=1
521
+ )
522
+ json_editor.edit_all_jsons(new_value)
523
+ # submit block with new component path
471
524
  print(f"Submitting block: {block.block_name}", flush=True)
472
525
  block.submit_block()
473
526
  if i == len(self.blocks) - 1:
474
- print("\nEverything has been submitted. You can close out of this or keep this script running to get a message when everything is finished\n")
527
+ print(
528
+ "\nEverything has been submitted. You can close out of this or keep this script running to get a message when everything is finished\n"
529
+ )
475
530
  while not block.check_block_status():
476
531
  print(f"Waiting for the last block {i} to complete...")
477
532
  time.sleep(self.check_interval)
478
-
533
+
479
534
  if self.webhook:
480
535
  message = "SIMULATION UPDATE: Simulations are Done!"
481
- send_teams_message(self.webhook,message)
482
-
483
-
536
+ send_teams_message(self.webhook, message)
bmtool/__init__.py CHANGED
@@ -3,4 +3,4 @@ __version__ = "0.0.1"
3
3
  import warnings
4
4
 
5
5
  with warnings.catch_warnings():
6
- warnings.filterwarnings("ignore",category=FutureWarning)
6
+ warnings.filterwarnings("ignore", category=FutureWarning)
bmtool/__main__.py CHANGED
@@ -1,27 +1,28 @@
1
1
  import argparse
2
- from argparse import RawTextHelpFormatter,SUPPRESS
3
2
  import sys
3
+ from argparse import SUPPRESS, RawTextHelpFormatter
4
4
 
5
5
  use_description = """
6
6
  Build, plot or debug BMTK models easily.
7
7
 
8
8
  python -m bmtool.build
9
- python -m bmtool.plot
9
+ python -m bmtool.plot
10
10
  python -m bmtool.debug
11
11
  python -m bmtool.util
12
12
 
13
13
  """
14
14
 
15
- if __name__ == '__main__':
16
-
17
- parser = argparse.ArgumentParser(description=use_description, formatter_class=RawTextHelpFormatter,usage=SUPPRESS)
15
+ if __name__ == "__main__":
16
+ parser = argparse.ArgumentParser(
17
+ description=use_description, formatter_class=RawTextHelpFormatter, usage=SUPPRESS
18
+ )
18
19
  options = None
19
20
  try:
20
21
  if not len(sys.argv) > 1:
21
22
  raise
22
- if sys.argv[1] in ['-h','--h','-help','--help','help']:
23
+ if sys.argv[1] in ["-h", "--h", "-help", "--help", "help"]:
23
24
  raise
24
25
  options = parser.parse_args()
25
26
  except:
26
27
  parser.print_help()
27
- sys.exit(0)
28
+ sys.exit(0)