resubmit 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,197 @@
1
+ import re
2
+ from typing import Any, Dict, List, Tuple, Union, Optional, Iterable
3
+ import pandas as pd
4
+ from itertools import product
5
+ import logging
6
+
7
+
8
+ def _normalize_regex_spec(val: Any) -> Tuple[re.Pattern, bool]:
9
+ """Return (compiled_pattern, exclude_flag) for a given regex spec.
10
+
11
+ Raises ValueError for unsupported types.
12
+ """
13
+ if hasattr(val, "search") and callable(val.search):
14
+ return val, False
15
+ if isinstance(val, tuple) and len(val) >= 1:
16
+ pat = val[0]
17
+ exclude = bool(val[1]) if len(val) > 1 else False
18
+ return pat, exclude
19
+ if isinstance(val, dict):
20
+ pat = val["pattern"]
21
+ exclude = bool(val.get("exclude", False))
22
+ return pat, exclude
23
+ if isinstance(val, str):
24
+ if val.startswith("!re:"):
25
+ return re.compile(val[4:]), True
26
+ elif val.startswith("re:"):
27
+ return re.compile(val[3:]), False
28
+ raise ValueError(f"Unsupported regex spec: {val!r}")
29
+
30
+
31
+ def ensure_unique_combinations(
32
+ df: pd.DataFrame, cols: Union[str, List[str]], raise_on_conflict: bool = True
33
+ ) -> Tuple[bool, Optional[pd.DataFrame]]:
34
+ """Check that combinations of columns `cols` are unique across `df`.
35
+
36
+ Returns (is_unique, duplicates_df) where `duplicates_df` is None when unique.
37
+ If `raise_on_conflict` is True, raises `ValueError` when duplicates are found.
38
+ """
39
+ if isinstance(cols, str):
40
+ cols = [cols]
41
+ # Stringify to avoid dtype mismatch effects
42
+ key_series = df[cols].astype(str).agg("||".join, axis=1)
43
+ nunique = key_series.nunique()
44
+ if nunique == len(df):
45
+ return True, None
46
+
47
+ duplicates = df[key_series.duplicated(keep=False)]
48
+ if raise_on_conflict:
49
+ raise ValueError(
50
+ f"Found {len(duplicates)} rows with non-unique combinations for cols={cols}."
51
+ )
52
+ return False, duplicates
53
+
54
+
55
+ def create_jobs_dataframe(params: Dict[str, Any]) -> pd.DataFrame:
56
+ """Create a job DataFrame from a parameter map.
57
+
58
+ Rules:
59
+ - For parameters whose values are iterable (lists, tuples), we build the Cartesian
60
+ product across all such parameters.
61
+ - If a parameter value is callable, it is evaluated AFTER the initial DataFrame
62
+ is created; the callable is called as `col_values = fn(df)` and the result is
63
+ used as the column values (must be same length as `df`).
64
+ - If a parameter value is a regex spec (see `_is_regex_spec`), it is applied LAST
65
+ as a filter on the generated DataFrame. Regex specs can be used to include or
66
+ exclude rows based on the stringified value of that column.
67
+
68
+ Returns a filtered DataFrame with the applied callables and regex filters.
69
+ """
70
+ # Separate static values (used for product), callables and regex specs
71
+ static_items = {}
72
+ callables: Dict[str, Any] = {}
73
+ regex_specs: Dict[str, Any] = {}
74
+ unique_items: Dict[str, Any] = {}
75
+
76
+ for k, v in params.items():
77
+ # support explicit regex keys like 'name__regex' or 'name_regex' to filter 'name'
78
+ if k.endswith("__regex") or k.endswith("_regex"):
79
+ if k.endswith("__regex"):
80
+ base = k[: -len("__regex")]
81
+ else:
82
+ base = k[: -len("_regex")]
83
+ regex_specs[base] = v
84
+ elif k.endswith("__callable") or k.endswith("_callable"):
85
+ if k.endswith("__callable"):
86
+ base = k[: -len("__callable")]
87
+ else:
88
+ base = k[: -len("_callable")]
89
+ callables[base] = v
90
+ elif k.endswith("__unique") or k.endswith("_unique"):
91
+ if k.endswith("__unique"):
92
+ base = k[: -len("__unique")]
93
+ else:
94
+ base = k[: -len("_unique")]
95
+ unique_items[base] = v
96
+ continue
97
+ else:
98
+ static_items[k] = v
99
+
100
+ # If there are no static items, start from single-row DataFrame so callables
101
+ # can still compute columns.
102
+ if len(static_items) == 0:
103
+ df = pd.DataFrame([{}])
104
+ else:
105
+ df = pd.DataFrame(
106
+ list(product(*static_items.values())), columns=static_items.keys()
107
+ )
108
+
109
+ # Apply callables (they must accept the dataframe and return a list-like)
110
+ for k, fn in callables.items():
111
+ vals = fn(df)
112
+ if len(vals) != len(df):
113
+ raise ValueError(
114
+ f"Callable for param {k!r} returned length {len(vals)} != {len(df)}"
115
+ )
116
+ df[k] = vals
117
+
118
+ # Apply regex specs last as filters
119
+ if len(regex_specs) > 0:
120
+ mask = pd.Series([True] * len(df), index=df.index)
121
+ for k, spec in regex_specs.items():
122
+ pat, exclude = _normalize_regex_spec(spec)
123
+ col_str = df[k].astype(str)
124
+ matches = col_str.apply(lambda s: bool(pat.search(s)))
125
+ if exclude:
126
+ mask = mask & ~matches
127
+ else:
128
+ mask = mask & matches
129
+ df = df[mask].reset_index(drop=True)
130
+
131
+ # apply unique constraints
132
+ for k, unique_val in unique_items.items():
133
+ is_unique, duplicates = ensure_unique_combinations(
134
+ df,
135
+ k,
136
+ raise_on_conflict=unique_val,
137
+ )
138
+ if not is_unique:
139
+ logging.warning(f"Non-unique values found for column {k!r}:\n{duplicates}")
140
+
141
+ return df
142
+
143
+
144
+ def submit_jobs(
145
+ jobs_args: dict[Iterable],
146
+ func: Any,
147
+ *,
148
+ timeout_min: int,
149
+ cpus_per_task: int = 16,
150
+ mem_gb: int = 64,
151
+ num_gpus: int = 1,
152
+ folder: str = "logs/%j",
153
+ block: bool = False,
154
+ prompt: bool = True,
155
+ local_run: bool = False,
156
+ slurm_additional_parameters: Dict | None = None,
157
+ ) -> Any:
158
+ """
159
+ Submit jobs described by `jobs_args` where each entry is a dict of kwargs for `func`.
160
+ A dataframe is created from cartesian product of parameter lists, with support for callables and regex filtering.
161
+ 1. use `__unique' postfix in keys to enforce uniqueness.
162
+ 2. use `__callable' postfix in keys to define callables for column values.
163
+ 3. use `__regex' postfix in keys to define regex filters for columns.
164
+
165
+ Args:
166
+ jobs_args: dict of lists of job parameters.
167
+ func: Function to be submitted for each job.
168
+ timeout_min: Job timeout in minutes.
169
+ cpus_per_task: Number of CPUs per task.
170
+ mem_gb: Memory in GB.
171
+ num_gpus: Number of GPUs.
172
+ folder: Folder for logs.
173
+ block: Whether to block until jobs complete.
174
+ prompt: Whether to prompt for confirmation before submission.
175
+ local_run: If True, runs the function locally instead of submitting.
176
+ slurm_additional_parameters: Additional Slurm parameters as a dict. If not provided, defaults to {"gpus": num_gpus}.
177
+ Returns:
178
+ The result of `submit_jobs` from `.__submit`.
179
+ """
180
+
181
+ jobs_df = create_jobs_dataframe(jobs_args)
182
+ records = jobs_df.to_dict(orient="records")
183
+ from .__submit import _submit_jobs
184
+
185
+ return _submit_jobs(
186
+ records,
187
+ func,
188
+ timeout_min=timeout_min,
189
+ cpus_per_task=cpus_per_task,
190
+ mem_gb=mem_gb,
191
+ num_gpus=num_gpus,
192
+ folder=folder,
193
+ block=block,
194
+ prompt=prompt,
195
+ local_run=local_run,
196
+ slurm_additional_parameters=slurm_additional_parameters,
197
+ )
resubmit/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """resubmit: small helpers around submitit for reproducible cluster submissions."""
2
2
 
3
- from .submit import submit_jobs
4
- from .debug import maybe_attach_debugger
3
+ from .__debug import maybe_attach_debugger
4
+ from .__bookkeeping import submit_jobs
5
5
 
6
6
  __all__ = ["submit_jobs", "maybe_attach_debugger"]
@@ -1,23 +1,21 @@
1
1
  """Core submission utilities wrapping submitit."""
2
+
2
3
  from typing import Any, Callable, Iterable, List, Optional, Dict
3
4
 
4
5
 
5
- def submit_jobs(
6
+ def _submit_jobs(
6
7
  jobs_args: Iterable[dict],
7
8
  func: Callable[[List[dict]], Any],
8
9
  *,
9
10
  timeout_min: int,
10
- cpus_per_task: int = 16,
11
- mem_gb: int = 64,
12
- num_gpus: int = 1,
13
- account: Optional[str] = None,
14
- folder: str = "logs/%j",
15
- block: bool = False,
16
- prompt: bool = True,
17
- local_run: bool = False,
11
+ cpus_per_task: int,
12
+ mem_gb: int,
13
+ num_gpus: int,
14
+ folder: str,
15
+ block: bool,
16
+ prompt: bool,
17
+ local_run: bool,
18
18
  slurm_additional_parameters: Optional[Dict] = None,
19
- constraint: Optional[str] = None,
20
- reservation: Optional[str] = None,
21
19
  ):
22
20
  """Submit jobs described by `jobs_args` where each entry is a dict of kwargs for `func`.
23
21
 
@@ -46,6 +44,7 @@ def submit_jobs(
46
44
  return
47
45
 
48
46
  import submitit
47
+
49
48
  print("submitting jobs")
50
49
  executor = submitit.AutoExecutor(folder=folder)
51
50
 
@@ -56,14 +55,6 @@ def submit_jobs(
56
55
  slurm_additional_parameters = dict(slurm_additional_parameters)
57
56
  slurm_additional_parameters.setdefault("gpus", num_gpus)
58
57
 
59
- # Allow explicit overrides similar to `account`.
60
- if account is not None:
61
- slurm_additional_parameters["account"] = account
62
- if reservation is not None:
63
- slurm_additional_parameters["reservation"] = reservation
64
- if constraint is not None:
65
- slurm_additional_parameters["constraint"] = constraint
66
-
67
58
  print("Slurm additional parameters:", slurm_additional_parameters)
68
59
 
69
60
  executor.update_parameters(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: resubmit
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: Small wrapper around submitit to simplify cluster submissions
5
5
  Author: Amir Mehrpanah
6
6
  License: MIT
@@ -0,0 +1,9 @@
1
+ resubmit/__bookkeeping.py,sha256=VX2cCD82nibVM4Tf7peeqylyUXRVwH7ZWLGj1CcfzRU,7219
2
+ resubmit/__debug.py,sha256=8RINyz7eSAiT47d018wR0R3B_u4PllQJCiLy0zTSQDE,887
3
+ resubmit/__init__.py,sha256=FLKq6KZeI973gBXzdnSkvK1aEdxF--5V2T82fxyzv0U,219
4
+ resubmit/__submit.py,sha256=17XhT9e7_p-svBIODlUjKv8r9aryb0tdH3sCFKlJ5g0,2386
5
+ resubmit-0.0.5.dist-info/licenses/LICENSE,sha256=v2spsd7N1pKFFh2G8wGP_45iwe5S0DYiJzG4im8Rupc,1066
6
+ resubmit-0.0.5.dist-info/METADATA,sha256=tJyMalMf59bo41uiZnu0YfdVmj7Ms2J1VwffXM94qes,2976
7
+ resubmit-0.0.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
8
+ resubmit-0.0.5.dist-info/top_level.txt,sha256=BfCexfX-VhUZuNi8sI88i0HF_e3ppausQ76hxPeXjYc,9
9
+ resubmit-0.0.5.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- resubmit/__init__.py,sha256=E9oorHt7ntQZgLeV9GtB79jHJCCC9WR5_skifBMHnGQ,210
2
- resubmit/debug.py,sha256=8RINyz7eSAiT47d018wR0R3B_u4PllQJCiLy0zTSQDE,887
3
- resubmit/submit.py,sha256=iyL-VTDmL_2YvdOAbfeJTH9m4FVb7lLXQLnOIoHqIZI,2874
4
- resubmit-0.0.3.dist-info/licenses/LICENSE,sha256=v2spsd7N1pKFFh2G8wGP_45iwe5S0DYiJzG4im8Rupc,1066
5
- resubmit-0.0.3.dist-info/METADATA,sha256=lVUYWXWkdtlco8409sxgQAzjvTp60_lM_FOrKRrj72I,2976
6
- resubmit-0.0.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
7
- resubmit-0.0.3.dist-info/top_level.txt,sha256=BfCexfX-VhUZuNi8sI88i0HF_e3ppausQ76hxPeXjYc,9
8
- resubmit-0.0.3.dist-info/RECORD,,
File without changes