oh-my-batch 0.1.0.dev3__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
oh_my_batch/__init__.py CHANGED
@@ -0,0 +1,4 @@
1
+ if __name__ == '__main__':
2
+ import fire
3
+ from .cli import OhMyBatch
4
+ fire.Fire(OhMyBatch)
oh_my_batch/batch.py CHANGED
@@ -13,7 +13,7 @@ class BatchMaker:
13
13
  self._script_bottom = []
14
14
  self._command = []
15
15
 
16
- def add_work_dir(self, *dir: str):
16
+ def add_work_dirs(self, *dir: str):
17
17
  """
18
18
  Add working directories
19
19
 
@@ -22,39 +22,55 @@ class BatchMaker:
22
22
  self._work_dirs.extend(expand_globs(dir))
23
23
  return self
24
24
 
25
- def add_header_file(self, file: str, encoding='utf-8'):
25
+ def add_header_files(self, *file: str, encoding='utf-8'):
26
26
  """
27
27
  Add script header from files
28
28
 
29
29
  :param file: File path
30
30
  :param encoding: File encoding
31
31
  """
32
- with open(file, 'r', encoding=encoding) as f:
33
- self._script_header.append(f.read())
32
+ self._script_header.extend(load_files(*file, encoding=encoding))
34
33
  return self
35
34
 
36
- def add_bottom_file(self, file: str, encoding='utf-8'):
35
+ def add_headers(self, *header: str):
36
+ """
37
+ Add script header
38
+
39
+ :param header: Header lines
40
+ """
41
+ self._script_header.extend(header)
42
+ return self
43
+
44
+ def add_bottom_files(self, *file: str, encoding='utf-8'):
37
45
  """
38
46
  Add script bottom from files
39
47
 
40
48
  :param file: File path
41
49
  :param encoding: File encoding
42
50
  """
43
- with open(file, 'r', encoding=encoding) as f:
44
- self._script_bottom.append(f.read())
51
+ self._script_bottom.extend(load_files(*file, encoding=encoding))
52
+ return self
53
+
54
+ def add_bottoms(self, *bottom: str):
55
+ """
56
+ Add script bottom
45
57
 
46
- def add_command_file(self, file: str, encoding='utf-8'):
58
+ :param bottom: Bottom lines
59
+ """
60
+ self._script_bottom.extend(bottom)
61
+ return self
62
+
63
+ def add_cmd_files(self, *file: str, encoding='utf-8'):
47
64
  """
48
65
  Add commands from files to run under every working directory
49
66
 
50
67
  :param file: File path
51
68
  :param encoding: File encoding
52
69
  """
53
- with open(file, 'r', encoding=encoding) as f:
54
- self._command.append(f.read())
70
+ self._command.extend(load_files(*file, encoding=encoding))
55
71
  return self
56
72
 
57
- def add_command(self, *cmd: str):
73
+ def add_cmds(self, *cmd: str):
58
74
  """
59
75
  add commands to run under every working directory
60
76
 
@@ -68,10 +84,10 @@ class BatchMaker:
68
84
  Make batch script files from the previous setup
69
85
 
70
86
  :param path: Path to save batch script files, use {i} to represent index
71
- :param concurrency: Number of concurrent commands to run
87
+ :param concurrency: Number of scripts to to make
72
88
  """
73
89
  # inject pre-defined functions
74
- self.add_header_file(get_asset('functions.sh'))
90
+ self.add_header_files(get_asset('functions.sh'))
75
91
 
76
92
  header = '\n'.join(self._script_header)
77
93
  bottom = '\n'.join(self._script_bottom)
@@ -80,10 +96,10 @@ class BatchMaker:
80
96
  work_dirs_arr = "\n".join(shlex.quote(w) for w in work_dirs)
81
97
  body.extend([
82
98
  '[ -n "$PBS_O_WORKDIR" ] && cd $PBS_O_WORKDIR # fix PBS',
83
- f'work_dirs=({work_dirs_arr})',
99
+ f'WORK_DIRS=({work_dirs_arr})',
84
100
  '',
85
- 'for work_dir in "${work_dirs[@]}"; do',
86
- 'pushd $work_dir',
101
+ 'for WORK_DIR in "${WORK_DIRS[@]}"; do',
102
+ 'pushd $WORK_DIR',
87
103
  *self._command,
88
104
  'popd',
89
105
  'done'
@@ -94,3 +110,17 @@ class BatchMaker:
94
110
  with open(out_path, 'w', encoding=encoding) as f:
95
111
  f.write(script)
96
112
  os.chmod(out_path, mode_translate(str(mode)))
113
+
114
+
115
+ def load_files(*file, encoding='utf-8', raise_invalid=False):
116
+ """
117
+ Load files from paths
118
+
119
+ :param files: List of file paths
120
+ :return: List of file contents
121
+ """
122
+ result = []
123
+ for file in expand_globs(file, raise_invalid=raise_invalid):
124
+ with open(file, 'r', encoding=encoding) as f:
125
+ result.append(f.read())
126
+ return result
oh_my_batch/combo.py CHANGED
@@ -117,7 +117,6 @@ class ComboMaker:
117
117
  :param args: Values
118
118
  :param broadcast: If True, values are broadcasted, otherwise they are producted when making combos
119
119
  """
120
-
121
120
  if key == 'i':
122
121
  raise ValueError("Variable name 'i' is reserved")
123
122
 
oh_my_batch/job.py CHANGED
@@ -1,5 +1,4 @@
1
1
  from typing import List
2
- from enum import Enum
3
2
 
4
3
  import logging
5
4
  import json
@@ -7,11 +6,12 @@ import time
7
6
  import os
8
7
  import re
9
8
 
10
- from .util import expand_globs, shell_run, parse_csv
9
+ from .util import expand_globs, shell_run, parse_csv, ensure_dir, log_cp
11
10
 
12
11
 
13
12
  logger = logging.getLogger(__name__)
14
13
 
14
+
15
15
  class JobState:
16
16
  NULL = 0
17
17
  PENDING = 1
@@ -59,7 +59,7 @@ class BaseJobManager:
59
59
  recover_scripts = set(j['script'] for j in jobs)
60
60
  logger.info('Scripts in recovery files: %s', recover_scripts)
61
61
 
62
- scripts = set(os.path.normpath(s) for s in expand_globs(script))
62
+ scripts = set(norm_path(s) for s in expand_globs(script, raise_invalid=True))
63
63
  logger.info('Scripts to submit: %s', scripts)
64
64
 
65
65
  for script_file in scripts:
@@ -70,6 +70,7 @@ class BaseJobManager:
70
70
  while True:
71
71
  self._update_jobs(jobs, max_tries, opts)
72
72
  if recovery:
73
+ ensure_dir(recovery)
73
74
  with open(recovery, 'w', encoding='utf-8') as f:
74
75
  json.dump(jobs, f, indent=2)
75
76
 
@@ -101,20 +102,18 @@ class Slurm(BaseJobManager):
101
102
  job_ids = [j['id'] for j in jobs if j['id']]
102
103
  if job_ids:
103
104
  query_cmd = f'{self._sacct_bin} -X -P --format=JobID,JobName,State -j {",".join(job_ids)}'
104
- user = os.environ.get('USER')
105
- if user:
106
- query_cmd += f' -u {user}'
107
-
108
105
  cp = shell_run(query_cmd)
109
106
  if cp.returncode != 0:
110
- logger.error('Failed to query job status: %s', cp.stderr.decode('utf-8'))
107
+ logger.error('Failed to query job status: %s', log_cp(cp))
111
108
  return jobs
112
- logger.info('Job status: %s', cp.stdout.decode('utf-8'))
109
+ logger.info('Job status:\n%s', cp.stdout.decode('utf-8'))
113
110
  new_state = parse_csv(cp.stdout.decode('utf-8'))
114
111
  else:
115
112
  new_state = []
116
113
 
117
114
  for job in jobs:
115
+ if not job['id']:
116
+ continue
118
117
  for row in new_state:
119
118
  if job['id'] == row['JobID']:
120
119
  job['state'] = self._map_state(row['State'])
@@ -122,8 +121,7 @@ class Slurm(BaseJobManager):
122
121
  logger.warning('Unknown job %s state: %s',row['JobID'], row['State'])
123
122
  break
124
123
  else:
125
- if job['id']:
126
- logger.error('Job %s not found in sacct output', job['id'])
124
+ logger.error('Job %s not found in sacct output', job['id'])
127
125
 
128
126
  # check if there are jobs to be (re)submitted
129
127
  for job in jobs:
@@ -135,7 +133,7 @@ class Slurm(BaseJobManager):
135
133
  cp = shell_run(submit_cmd)
136
134
  if cp.returncode != 0:
137
135
  job['state'] = JobState.FAILED
138
- logger.error('Failed to submit job: %s', cp.stderr.decode('utf-8'))
136
+ logger.error('Failed to submit job: %s', log_cp(cp))
139
137
  else:
140
138
  job['id'] = self._parse_job_id(cp.stdout.decode('utf-8'))
141
139
  assert job['id'], 'Failed to parse job id'
@@ -169,3 +167,7 @@ def should_submit(job: dict, max_tries: int):
169
167
  if job['tries'] >= max_tries:
170
168
  return False
171
169
  return state != JobState.COMPLETED
170
+
171
+
172
+ def norm_path(path: str):
173
+ return os.path.normpath(os.path.abspath(path))
oh_my_batch/util.py CHANGED
@@ -19,7 +19,7 @@ def expand_globs(patterns: Iterable[str], raise_invalid=False) -> List[str]:
19
19
  """
20
20
  paths = []
21
21
  for pattern in patterns:
22
- result = glob.glob(pattern, recursive=True) if '*' in pattern else [pattern]
22
+ result = glob.glob(pattern, recursive=True)
23
23
  if raise_invalid and len(result) == 0:
24
24
  raise FileNotFoundError(f'No file found for {pattern}')
25
25
  for p in result:
@@ -83,4 +83,18 @@ def parse_csv(text: str, delimiter="|"):
83
83
  Parse CSV text to list of dictionaries
84
84
  """
85
85
  reader = csv.DictReader(text.splitlines(), delimiter=delimiter)
86
- return list(reader)
86
+ return list(reader)
87
+
88
+
89
+ def log_cp(cp):
90
+ """
91
+ Log child process
92
+ """
93
+ log = f'Command: {cp.args}\nReturn code: {cp.returncode}'
94
+
95
+ out = cp.stdout.decode('utf-8').strip()
96
+ if out:
97
+ log += f'\nSTDOUT:\n{out}'
98
+ err = cp.stderr.decode('utf-8').strip()
99
+ if err:
100
+ log += f'\nSTDERR:\n{err}'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: oh-my-batch
3
- Version: 0.1.0.dev3
3
+ Version: 0.2.0
4
4
  Summary:
5
5
  License: GPL
6
6
  Author: weihong.xu
@@ -17,6 +17,11 @@ Requires-Dist: fire (>=0.7.0,<0.8.0)
17
17
  Description-Content-Type: text/markdown
18
18
 
19
19
  # oh-my-batch
20
+
21
+ [![PyPI version](https://badge.fury.io/py/oh-my-batch.svg)](https://badge.fury.io/py/oh-my-batch)
22
+ [![PyPI - Downloads](https://img.shields.io/pypi/dm/oh-my-batch)](https://pypi.org/project/oh-my-batch/)
23
+ [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/oh-my-batch)](https://pypi.org/project/oh-my-batch/)
24
+
20
25
  A simple tool to manipulate batch tasks designed for scientific computing community.
21
26
 
22
27
  ## Features
@@ -41,7 +46,6 @@ for example, different temperatures 300K, 400K, 500K, against each data file.
41
46
  In this case, you can use `omb combo` command to generate a series of input files for you.
42
47
 
43
48
  ```bash
44
- #! /bin/bash
45
49
  # prepare fake data files
46
50
  mkdir -p tmp/
47
51
  touch tmp/1.data tmp/2.data tmp/3.data
@@ -87,7 +91,6 @@ You want to package them into 2 batch scripts to submit to a job scheduler.
87
91
  You can use `omb batch` to generate batch scripts for you like this:
88
92
 
89
93
  ```bash
90
- #! /bin/bash
91
94
  cat > tmp/lammps_header.sh <<EOF
92
95
  #!/bin/bash
93
96
  #SBATCH -J lmp
@@ -96,9 +99,9 @@ cat > tmp/lammps_header.sh <<EOF
96
99
  EOF
97
100
 
98
101
  omb batch \
99
- add_work_dir tmp/tasks/* - \
100
- add_header_file tmp/lammps_header.sh - \
101
- add_command "checkpoint lmp.done ./run.sh" - \
102
+ add_work_dirs tmp/tasks/* - \
103
+ add_header_files tmp/lammps_header.sh - \
104
+ add_cmds "checkpoint lmp.done ./run.sh" - \
102
105
  make tmp/lmp-{i}.slurm --concurrency 2
103
106
  ```
104
107
 
@@ -112,19 +115,16 @@ You can run the above script by `./examples/omb-batch.sh`,
112
115
  ### Track the state of job in job schedular
113
116
 
114
117
  Let's continue the above example, now you have submitted the batch scripts to the job scheduler.
115
-
116
- You can use `omb job` to track the state of the jobs.
118
+ In this case, you can use `omb job` to track the state of the jobs.
117
119
 
118
120
  ```bash
119
-
120
- omb job slurm \
121
- submit tmp/*.slurm --max_tries 3 --wait --recovery lammps-jobs.json
121
+ omb job slurm submit tmp/*.slurm --max_tries 3 --wait --recovery lammps-jobs.json
122
122
  ```
123
123
 
124
124
  The above command will submit the batch scripts to the job scheduler,
125
125
  and wait for the jobs to finish. If the job fails, it will retry for at most 3 times.
126
126
 
127
- The `--recovery` option will save the job information to `lammps-jobs.json` file,
128
- if `omb job` is interrupted, you can run the exact same command to recover the job status,
129
- so that you don't need to resubmit the jobs that are already submitted.
127
+ The `--recovery` option will save the job information to `lammps-jobs.json` file.
128
+ If `omb job` is interrupted, you can rerun the exact same command to recover the job status,
129
+ so that you don't need to resubmit the jobs that are still running or completed.
130
130
 
@@ -0,0 +1,14 @@
1
+ oh_my_batch/__init__.py,sha256=BsRNxZbqDWfaIZJGxzIDqCubRWztMGFDceW08TECuFs,98
2
+ oh_my_batch/__main__.py,sha256=sWyFZMwWNvhkanwZSJRGfBBDoIevhC028dTSB67i6yI,61
3
+ oh_my_batch/assets/__init__.py,sha256=Exub46UbQaz2V2eXpQeiVfnThQpXaNeuyjlGY6gBSZc,130
4
+ oh_my_batch/assets/functions.sh,sha256=eORxFefV-XrWbG-2I6u-c8uf1XxOQ31LaeVHBumwzJ4,708
5
+ oh_my_batch/batch.py,sha256=6qnaXEVyA493heGzzbCrdZXCcnYk8zgl7WP0rmo7KlU,3690
6
+ oh_my_batch/cli.py,sha256=uelW9ms1N30DipJOcsiuG5K-5VN8O6yu1RNEqex00GY,475
7
+ oh_my_batch/combo.py,sha256=R_WTO4v-LWHIQ0O46bIgeRlL_RGrFcf8305S9auqeQk,7679
8
+ oh_my_batch/job.py,sha256=8kZnWtvpr1rAl4tc9I_Vlhi-T0o3rh4RQZZgMNBCGho,5800
9
+ oh_my_batch/util.py,sha256=okg_kY8dJouyJ2BYCXRl7bxDUAtNH6GLh2UjXRnkoW0,2385
10
+ oh_my_batch-0.2.0.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
11
+ oh_my_batch-0.2.0.dist-info/METADATA,sha256=_qeLyk6LEg2--NU9NqgMoV2WU96125A0eiIOdJFY760,4759
12
+ oh_my_batch-0.2.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
13
+ oh_my_batch-0.2.0.dist-info/entry_points.txt,sha256=ZY2GutSoNjjSyJ4qO2pTeseKUFgoTYdvmgkuZZkwi68,77
14
+ oh_my_batch-0.2.0.dist-info/RECORD,,
@@ -1,14 +0,0 @@
1
- oh_my_batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- oh_my_batch/__main__.py,sha256=sWyFZMwWNvhkanwZSJRGfBBDoIevhC028dTSB67i6yI,61
3
- oh_my_batch/assets/__init__.py,sha256=Exub46UbQaz2V2eXpQeiVfnThQpXaNeuyjlGY6gBSZc,130
4
- oh_my_batch/assets/functions.sh,sha256=eORxFefV-XrWbG-2I6u-c8uf1XxOQ31LaeVHBumwzJ4,708
5
- oh_my_batch/batch.py,sha256=e73N-xwxMvgxnWwFMp33PQD1Dy-T-ATjANlwtPRHPQM,3016
6
- oh_my_batch/cli.py,sha256=uelW9ms1N30DipJOcsiuG5K-5VN8O6yu1RNEqex00GY,475
7
- oh_my_batch/combo.py,sha256=AHFD5CLoczqtjcfl2Rb4A2ucoQU40-cWtDOYjtP-yY4,7680
8
- oh_my_batch/job.py,sha256=_fETBYpuSd_hNHKnXSwYcSU3OXtU7PO-P2QMfhE-Wfs,5788
9
- oh_my_batch/util.py,sha256=H8B4zVNH5xRp-NG_uypgvtmz2YSpXy_6LK5ROv6SYrc,2116
10
- oh_my_batch-0.1.0.dev3.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
11
- oh_my_batch-0.1.0.dev3.dist-info/METADATA,sha256=sFfJrqAmymhuKZIv4-ydeDX6Y3oby3NWVfiPjQPUAUM,4453
12
- oh_my_batch-0.1.0.dev3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
13
- oh_my_batch-0.1.0.dev3.dist-info/entry_points.txt,sha256=ZY2GutSoNjjSyJ4qO2pTeseKUFgoTYdvmgkuZZkwi68,77
14
- oh_my_batch-0.1.0.dev3.dist-info/RECORD,,