pyantz 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. pyantz/__init__.py +0 -0
  2. pyantz/infrastructure/__init__.py +4 -0
  3. pyantz/infrastructure/config/__init__.py +0 -0
  4. pyantz/infrastructure/config/base.py +332 -0
  5. pyantz/infrastructure/config/get_functions.py +132 -0
  6. pyantz/infrastructure/config/local_submitter.py +20 -0
  7. pyantz/infrastructure/config/submitters/__init__.py +0 -0
  8. pyantz/infrastructure/config/submitters/slurm_submitter.py +50 -0
  9. pyantz/infrastructure/core/__init__.py +0 -0
  10. pyantz/infrastructure/core/job.py +42 -0
  11. pyantz/infrastructure/core/manager.py +20 -0
  12. pyantz/infrastructure/core/mutable_job.py +44 -0
  13. pyantz/infrastructure/core/pipeline.py +198 -0
  14. pyantz/infrastructure/core/status.py +14 -0
  15. pyantz/infrastructure/core/submitter_job.py +67 -0
  16. pyantz/infrastructure/core/variables.py +358 -0
  17. pyantz/infrastructure/distributed_queue/__init__.py +9 -0
  18. pyantz/infrastructure/distributed_queue/relational/__init__.py +0 -0
  19. pyantz/infrastructure/distributed_queue/relational/queue_orm.py +61 -0
  20. pyantz/infrastructure/distributed_queue/relational/sqlite_queue.py +256 -0
  21. pyantz/infrastructure/hpc/__init__.py +0 -0
  22. pyantz/infrastructure/hpc/slurm/__init__.py +6 -0
  23. pyantz/infrastructure/hpc/slurm/restful/__init__.py +1 -0
  24. pyantz/infrastructure/hpc/slurm/restful/models/__init__.py +4 -0
  25. pyantz/infrastructure/hpc/slurm/restful/models/account.py +19 -0
  26. pyantz/infrastructure/hpc/slurm/restful/models/account_short.py +12 -0
  27. pyantz/infrastructure/hpc/slurm/restful/models/accounting.py +18 -0
  28. pyantz/infrastructure/hpc/slurm/restful/models/accounting_allocated.py +11 -0
  29. pyantz/infrastructure/hpc/slurm/restful/models/accounts_add_cond.py +15 -0
  30. pyantz/infrastructure/hpc/slurm/restful/models/acct_gather_energy.py +18 -0
  31. pyantz/infrastructure/hpc/slurm/restful/models/assoc.py +31 -0
  32. pyantz/infrastructure/hpc/slurm/restful/models/assoc_default.py +11 -0
  33. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max.py +17 -0
  34. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_jobs.py +17 -0
  35. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_jobs_per.py +16 -0
  36. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_per.py +13 -0
  37. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_per_account.py +13 -0
  38. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_tres.py +19 -0
  39. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_tres_group.py +14 -0
  40. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_tres_minutes.py +15 -0
  41. pyantz/infrastructure/hpc/slurm/restful/models/assoc_max_tres_per.py +14 -0
  42. pyantz/infrastructure/hpc/slurm/restful/models/assoc_min.py +13 -0
  43. pyantz/infrastructure/hpc/slurm/restful/models/assoc_rec_set.py +36 -0
  44. pyantz/infrastructure/hpc/slurm/restful/models/assoc_shares_obj_wrap.py +28 -0
  45. pyantz/infrastructure/hpc/slurm/restful/models/assoc_shares_obj_wrap_fairshare.py +14 -0
  46. pyantz/infrastructure/hpc/slurm/restful/models/assoc_shares_obj_wrap_tres.py +16 -0
  47. pyantz/infrastructure/hpc/slurm/restful/models/assoc_short.py +15 -0
  48. pyantz/infrastructure/hpc/slurm/restful/models/bf_exit_fields.py +16 -0
  49. pyantz/infrastructure/hpc/slurm/restful/models/cluster_rec.py +22 -0
  50. pyantz/infrastructure/hpc/slurm/restful/models/cluster_rec_associations.py +13 -0
  51. pyantz/infrastructure/hpc/slurm/restful/models/cluster_rec_controller.py +12 -0
  52. pyantz/infrastructure/hpc/slurm/restful/models/controller_ping.py +16 -0
  53. pyantz/infrastructure/hpc/slurm/restful/models/coord.py +12 -0
  54. pyantz/infrastructure/hpc/slurm/restful/models/cron_entry.py +21 -0
  55. pyantz/infrastructure/hpc/slurm/restful/models/cron_entry_line.py +12 -0
  56. pyantz/infrastructure/hpc/slurm/restful/models/float64_no_val_struct.py +13 -0
  57. pyantz/infrastructure/hpc/slurm/restful/models/instance.py +18 -0
  58. pyantz/infrastructure/hpc/slurm/restful/models/instance_time.py +12 -0
  59. pyantz/infrastructure/hpc/slurm/restful/models/qos_limits_min_tres_per.py +13 -0
  60. pyantz/infrastructure/hpc/slurm/restful/models/shares_float128_tres.py +13 -0
  61. pyantz/infrastructure/hpc/slurm/restful/models/shares_uint64_tres.py +14 -0
  62. pyantz/infrastructure/hpc/slurm/restful/models/tres.py +14 -0
  63. pyantz/infrastructure/hpc/slurm/restful/models/uint32_no_val_struct.py +13 -0
  64. pyantz/infrastructure/hpc/slurm/restful/models/uint64_no_val_struct.py +13 -0
  65. pyantz/infrastructure/log/__init__.py +5 -0
  66. pyantz/infrastructure/log/multiproc_logging.py +56 -0
  67. pyantz/infrastructure/submitters/__init__.py +0 -0
  68. pyantz/infrastructure/submitters/local.py +151 -0
  69. pyantz/infrastructure/submitters/slurm/__init__.py +1 -0
  70. pyantz/infrastructure/submitters/slurm/basic_slurm.py +137 -0
  71. pyantz/jobs/__init__.py +51 -0
  72. pyantz/jobs/analysis/__init__.py +0 -0
  73. pyantz/jobs/analysis/count_dataframe.py +0 -0
  74. pyantz/jobs/analysis/filter_dataframe.py +62 -0
  75. pyantz/jobs/analysis/filter_parquet.py +57 -0
  76. pyantz/jobs/branch/__init__.py +0 -0
  77. pyantz/jobs/branch/compare.py +103 -0
  78. pyantz/jobs/branch/create_pipelines_from_matrix.py +108 -0
  79. pyantz/jobs/branch/explode_pipeline.py +57 -0
  80. pyantz/jobs/branch/if_then.py +77 -0
  81. pyantz/jobs/branch/parallel_pipelines.py +67 -0
  82. pyantz/jobs/dispatch/__init__.py +0 -0
  83. pyantz/jobs/dispatch/run_command.py +64 -0
  84. pyantz/jobs/file/__init__.py +1 -0
  85. pyantz/jobs/file/copy.py +102 -0
  86. pyantz/jobs/file/delete.py +54 -0
  87. pyantz/jobs/file/edit_json.py +118 -0
  88. pyantz/jobs/file/make_dirs.py +37 -0
  89. pyantz/jobs/nop.py +18 -0
  90. pyantz/jobs/restart_pipeline.py +76 -0
  91. pyantz/jobs/run_script.py +71 -0
  92. pyantz/jobs/setup/__init__.py +1 -0
  93. pyantz/jobs/variables/__init__.py +0 -0
  94. pyantz/jobs/variables/assert_variable.py +45 -0
  95. pyantz/jobs/variables/assign_environment_variable.py +44 -0
  96. pyantz/jobs/variables/change_variable.py +71 -0
  97. pyantz/jobs/variables/set_variable_from_function.py +62 -0
  98. pyantz/py.typed +0 -0
  99. pyantz/run.py +59 -0
  100. pyantz-0.1.0.dist-info/METADATA +98 -0
  101. pyantz-0.1.0.dist-info/RECORD +104 -0
  102. pyantz-0.1.0.dist-info/WHEEL +4 -0
  103. pyantz-0.1.0.dist-info/entry_points.txt +2 -0
  104. pyantz-0.1.0.dist-info/licenses/LICENSE.txt +19 -0
pyantz/__init__.py ADDED
File without changes
@@ -0,0 +1,4 @@
1
+ """
2
+ Infrastructure provides the backbones of this runner. This module contains the code
3
+ to run all the various components to setup and execute the jobs in the configuration
4
+ """
File without changes
@@ -0,0 +1,332 @@
1
+ """
2
+ This is the base level of the configuration for the core components
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+ import uuid
9
+ from collections.abc import Mapping
10
+ from typing import Any, Callable, Literal, TypeAlias, Union
11
+
12
+ from pydantic import (
13
+ BaseModel,
14
+ BeforeValidator,
15
+ ConfigDict,
16
+ Field,
17
+ JsonValue,
18
+ field_serializer,
19
+ model_validator,
20
+ validate_call,
21
+ )
22
+ from typing_extensions import Annotated
23
+
24
+ from pyantz.infrastructure.config.get_functions import (
25
+ get_function_by_name_strongly_typed,
26
+ get_params_model,
27
+ set_job_type,
28
+ set_params_model,
29
+ )
30
+ from pyantz.infrastructure.core.status import Status
31
+ from pyantz.infrastructure.core.variables import is_variable
32
+
33
+ from .local_submitter import LocalSubmitterConfig
34
+ from .submitters.slurm_submitter import SlurmBasicSubmitter
35
+
36
+ PrimitiveType: TypeAlias = str | int | float | bool | None
37
+ AntzConfig: TypeAlias = Union[
38
+ "Config", "PipelineConfig", "JobConfig", "SubmitterJobConfig", "MutableJobConfig"
39
+ ]
40
+ ParametersType: TypeAlias = (
41
+ Mapping[str, AntzConfig | list[AntzConfig] | JsonValue] | None
42
+ )
43
+ SubmitFunctionType: TypeAlias = Callable[["Config"], None]
44
+ JobFunctionType: TypeAlias = Callable[
45
+ ["ParametersType", logging.Logger],
46
+ Status,
47
+ ]
48
+ SubmitterJobFunctionType: TypeAlias = Callable[
49
+ [
50
+ "ParametersType",
51
+ SubmitFunctionType,
52
+ Mapping[str, PrimitiveType],
53
+ "PipelineConfig",
54
+ logging.Logger,
55
+ ],
56
+ Status,
57
+ ]
58
+ MutableJobFunctionType: TypeAlias = Callable[
59
+ ["ParametersType", Mapping[str, PrimitiveType], logging.Logger],
60
+ tuple[
61
+ Status,
62
+ Mapping[str, PrimitiveType],
63
+ ],
64
+ ]
65
+
66
+
67
+ class _AbstractJobConfig(BaseModel, frozen=True):
68
+ """holds common functions for the various job configs"""
69
+
70
+ function: Callable[..., Any]
71
+ parameters: ParametersType
72
+ name: str = "some job"
73
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, validate_default=True)
74
+
75
+ @field_serializer("function")
76
+ def serialize_function(self, func: JobFunctionType, info):
77
+ """To serialize function, store the import path to the func
78
+ instead of its handle as a str
79
+ """
80
+ if hasattr(func, "__wrapped__"):
81
+ return self.serialize_function(func.__wrapped__, info)
82
+ return func.__module__ + "." + func.__name__
83
+
84
+ @model_validator(mode="after")
85
+ def check_parameters_match(self: "_AbstractJobConfig") -> "_AbstractJobConfig":
86
+ """Checks that the config parameters match the expected parameters for the function"""
87
+
88
+ params_model = get_params_model(self.function)
89
+ if params_model is None:
90
+ return self
91
+
92
+ if params_model is None:
93
+ return self
94
+ if not isinstance(params_model, type) or not issubclass(
95
+ params_model, BaseModel
96
+ ):
97
+ raise ValueError(
98
+ f"Invalid parameters mode for function {self.function.__name__}"
99
+ )
100
+
101
+ # If the parameters are None or not a mapping, error in validation
102
+ if self.parameters is None:
103
+ raise ValueError(
104
+ f"Parameters cannot be None for function {self.function.__name__}"
105
+ )
106
+ if not isinstance(self.parameters, Mapping):
107
+ raise ValueError(
108
+ f"Parameters must be a mapping for function {self.function.__name__}"
109
+ )
110
+
111
+ if any(isinstance(field, BaseModel) for field in self.parameters.values()):
112
+ # cannot check jobs or pipelines
113
+ return self
114
+ if any(
115
+ is_variable(field)
116
+ for field in self.parameters.values()
117
+ if isinstance(field, PrimitiveType)
118
+ ):
119
+ return self
120
+ if not params_model.model_validate(self.parameters):
121
+ raise ValueError(
122
+ f"Parameters do not match expected parameters for function {self.function.__name__}"
123
+ )
124
+ return self
125
+
126
+
127
+ class MutableJobConfig(_AbstractJobConfig, frozen=True):
128
+ """Configuration of a submitter job, with different function param types
129
+ These jobs gain access to the submit function and can submit
130
+ entirely new pipelines of execution
131
+
132
+ However, they must ALWAYS BE FINAL
133
+ """
134
+
135
+ type: Literal["mutable_job"]
136
+ function: Annotated[
137
+ MutableJobFunctionType,
138
+ BeforeValidator(get_function_by_name_strongly_typed("mutable")),
139
+ ]
140
+
141
+
142
+ class SubmitterJobConfig(_AbstractJobConfig, frozen=True):
143
+ """Configuration of a submitter job, with different function param types
144
+ These jobs gain access to the submit function and can submit
145
+ entirely new pipelines of execution
146
+
147
+ However, they must ALWAYS BE FINAL
148
+ """
149
+
150
+ type: Literal["submitter_job"]
151
+ function: Annotated[
152
+ SubmitterJobFunctionType,
153
+ BeforeValidator(get_function_by_name_strongly_typed("submitter")),
154
+ ]
155
+
156
+
157
+ class JobConfig(_AbstractJobConfig, frozen=True):
158
+ """Configuration of a job"""
159
+
160
+ type: Literal["job"] | Literal["simple_job"]
161
+ function: Annotated[
162
+ JobFunctionType,
163
+ BeforeValidator(get_function_by_name_strongly_typed("simple")),
164
+ ]
165
+
166
+
167
+ class PipelineConfig(BaseModel, frozen=True):
168
+ """Configuration of a pipeline, which is a series of jobs or sub-pipelines"""
169
+
170
+ type: Literal["pipeline"]
171
+ name: str = "pipeline"
172
+ curr_stage: int = 0
173
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, validate_default=True)
174
+ status: int = Status.READY
175
+ max_allowed_restarts: int = 0
176
+ curr_restarts: int = 0
177
+ stages: list[
178
+ Annotated[
179
+ Union[MutableJobConfig, SubmitterJobConfig, JobConfig],
180
+ Field(discriminator="type"),
181
+ ]
182
+ ]
183
+ # stages: list[JobConfig | SubmitterJobConfig | MutableJobConfig]
184
+
185
+
186
+ class LoggingConfig(BaseModel, frozen=True):
187
+ """The configuration of logging"""
188
+
189
+ type: Literal["off", "file", "console", "remote"] = (
190
+ "console" # default to logging to screen
191
+ )
192
+ level: int = logging.CRITICAL # default to only logging on crashes
193
+ directory: str | None = "./log"
194
+
195
+
196
+ class Config(BaseModel, frozen=True):
197
+ """The global configuration to submit to runner"""
198
+
199
+ variables: Mapping[str, PrimitiveType]
200
+ config: PipelineConfig
201
+
202
+
203
+ class InitialConfig(BaseModel, frozen=True):
204
+ """The configuration of both the jobs and the submitters"""
205
+
206
+ analysis_config: Config
207
+ submitter_config: LocalSubmitterConfig | SlurmBasicSubmitter = Field(
208
+ discriminator="type"
209
+ )
210
+ logging_config: LoggingConfig = LoggingConfig()
211
+
212
+
213
+ def mutable_job(
214
+ params_model: type[BaseModel] | None,
215
+ ) -> Callable[
216
+ [
217
+ Callable[
218
+ ["ParametersType", Mapping[str, PrimitiveType], logging.Logger],
219
+ tuple[Status, Mapping[str, PrimitiveType]],
220
+ ]
221
+ ],
222
+ MutableJobFunctionType,
223
+ ]:
224
+ """Wrap a mutable job and add its parameters model to its definition to allow
225
+ the configuration parser to validate the parameters
226
+ """
227
+
228
+ def mark_mutable_job(
229
+ fn: Callable[
230
+ ["ParametersType", Mapping[str, PrimitiveType], logging.Logger],
231
+ tuple[Status, Mapping[str, PrimitiveType]],
232
+ ],
233
+ ) -> MutableJobFunctionType:
234
+ """Wrap a mutable job to
235
+ 1. Allow it to accept variable args if a user incorrectly marks job
236
+ 2. Allow for type checking in the pydantic model
237
+ """
238
+
239
+ @validate_call(config=ConfigDict(arbitrary_types_allowed=True))
240
+ def _mutable_job(
241
+ params: ParametersType,
242
+ variables: Mapping[str, PrimitiveType],
243
+ logger: logging.Logger,
244
+ ) -> tuple[Status, Mapping[str, PrimitiveType]]:
245
+ return fn(params, variables, logger)
246
+
247
+ set_job_type(_mutable_job, "mutable")
248
+ set_params_model(_mutable_job, params_model)
249
+ setattr(_mutable_job, "__wrapped__", fn)
250
+ return _mutable_job
251
+
252
+ return mark_mutable_job
253
+
254
+
255
+ def submitter_job(
256
+ params_model: type[BaseModel] | None,
257
+ ) -> Callable[[SubmitterJobFunctionType], SubmitterJobFunctionType]:
258
+ """Wrap a submitter job and add its parameters model to its definition to allow
259
+ the configuration parser to validate the parameters
260
+ """
261
+
262
+ def mark_submitter_job(fn: SubmitterJobFunctionType) -> SubmitterJobFunctionType:
263
+ """Wrap a submitter job to
264
+ 1. Allow it to accept variable args if a user incorrectly marks job
265
+ 2. Allow for type checking in the pydantic model
266
+ """
267
+
268
+ @validate_call(config=ConfigDict(arbitrary_types_allowed=True))
269
+ def _submitter_job(
270
+ params: ParametersType,
271
+ submitter: SubmitFunctionType,
272
+ variables: Mapping[str, PrimitiveType],
273
+ pipeline_config: PipelineConfig,
274
+ logger: logging.Logger,
275
+ ) -> Status:
276
+ return fn(params, submitter, variables, pipeline_config, logger)
277
+
278
+ set_job_type(_submitter_job, "submitter")
279
+ set_params_model(_submitter_job, params_model)
280
+ setattr(_submitter_job, "__wrapped__", fn)
281
+ return _submitter_job
282
+
283
+ return mark_submitter_job
284
+
285
+
286
+ def simple_job(
287
+ params_model: type[BaseModel] | None,
288
+ ) -> Callable[[Callable[[ParametersType, logging.Logger], Status]], JobFunctionType]:
289
+ """Wrap a simple job and add its parameters model to its definition to allow
290
+ the configuration parser to validate the parameters
291
+ """
292
+
293
+ def mark_simple_job(
294
+ fn: Callable[[ParametersType, logging.Logger], Status],
295
+ ) -> JobFunctionType:
296
+ """Wrap a simple job to
297
+ 1. Allow it to accept variable args if a user incorrectly marks job
298
+ 2. Allow for type checking in the pydantic model
299
+ """
300
+
301
+ @validate_call(config=ConfigDict(arbitrary_types_allowed=True))
302
+ def _simple_job(
303
+ params: ParametersType,
304
+ logger: logging.Logger,
305
+ ) -> Status:
306
+ return fn(params, logger)
307
+
308
+ set_job_type(_simple_job, "simple")
309
+ set_params_model(_simple_job, params_model)
310
+ setattr(_simple_job, "__wrapped__", fn)
311
+ return _simple_job
312
+
313
+ return mark_simple_job
314
+
315
+
316
+ __all__ = [
317
+ "simple_job",
318
+ "submitter_job",
319
+ "mutable_job",
320
+ "InitialConfig",
321
+ "Config",
322
+ "LoggingConfig",
323
+ "PipelineConfig",
324
+ "JobConfig",
325
+ "SubmitterJobConfig",
326
+ "MutableJobConfig",
327
+ "ParametersType",
328
+ "PrimitiveType",
329
+ "SubmitFunctionType",
330
+ "MutableJobFunctionType",
331
+ "JobFunctionType",
332
+ ]
@@ -0,0 +1,132 @@
1
+ """Functions to dynamically import and tag functions from a configuration"""
2
+
3
+ import importlib
4
+ from typing import Any, Callable
5
+
6
+ from pydantic import BaseModel
7
+
8
+ _PYANTZ_JOB_TYPE_FIELD: str = "__pyantz_job_type__"
9
+ _PYANTZ_PARAMS_MODEL_FIELD: str = "__pyantz_param_model__"
10
+
11
+
12
+ def set_params_model(
13
+ fn: Callable[..., Any], params_model: type[BaseModel] | None
14
+ ) -> Callable[..., Any]:
15
+ """Sets the parameters model field"""
16
+ setattr(fn, _PYANTZ_PARAMS_MODEL_FIELD, params_model)
17
+ return fn
18
+
19
+
20
+ def set_job_type(fn: Callable[..., Any], job_type: str) -> Callable[..., Any]:
21
+ """Set the job type field"""
22
+ setattr(fn, _PYANTZ_JOB_TYPE_FIELD, job_type)
23
+ return fn
24
+
25
+
26
+ def get_params_model(fn: Callable[..., Any]) -> type[BaseModel] | None:
27
+ """For the provided callable, if it has a params model field,
28
+ return that field for type checking on instatiation of the job
29
+ """
30
+ if hasattr(fn, _PYANTZ_PARAMS_MODEL_FIELD):
31
+ return getattr(fn, _PYANTZ_PARAMS_MODEL_FIELD)
32
+ return None
33
+
34
+
35
+ def get_job_type(fn: Callable[..., Any] | None) -> str | None:
36
+ """For a provided callable, return what type of job it is
37
+
38
+ This API is guaranteed to be stable; our implementation of how
39
+ to mark functions is not. SO **USE THIS** to check
40
+
41
+ :param fn: any function which may or may not be marked
42
+ :type fn: Callable[..., Any]
43
+ :return: if the function is marked, return the mark type; else None
44
+ :rtype: str | None
45
+ """
46
+ if fn is None:
47
+ return fn
48
+ if hasattr(fn, _PYANTZ_JOB_TYPE_FIELD):
49
+ return getattr(fn, _PYANTZ_JOB_TYPE_FIELD)
50
+ return None
51
+
52
+
53
+ def get_function_by_name_strongly_typed(
54
+ func_type_name: str | tuple[str, ...], strict: bool | None = None
55
+ ) -> Callable[[Any], Callable[..., Any] | None]:
56
+ """Returns a function Calls get_function_by_name and checks that the function type is correct
57
+
58
+ Uses strict rules for internal functions; otherwise uses non-strict
59
+ can be overriden with the strict argument
60
+ If strict is True,
61
+ requires that the function is wrapped in the correct wrapper from job_decorators.py
62
+ if strict is false,
63
+ if the function is not wrapped in any of those wrappers, will skip checking
64
+
65
+ Args:
66
+ func_type_name: the name of the wrapper in job_decorators
67
+ strict: overrides the default behavior if provided, see notes above
68
+ """
69
+ # strict for PyAntz jobs because we should at least be consistent!
70
+ if strict is None:
71
+ if isinstance(func_type_name, str):
72
+ strict = func_type_name.startswith("pyantz")
73
+ else:
74
+ strict = all(name.startswith("pyantz") for name in func_type_name)
75
+
76
+ def typed_get_function_by_name(
77
+ func_name_or_any: Any,
78
+ ) -> Callable[..., Any] | None:
79
+ func_handle = get_function_by_name(func_name_or_any)
80
+ job_type = get_job_type(func_handle)
81
+ if job_type is None and strict:
82
+ return None
83
+ if job_type is None:
84
+ return func_handle
85
+ if isinstance(func_type_name, str):
86
+ if job_type != func_type_name:
87
+ return None
88
+ return func_handle
89
+ if job_type in func_type_name:
90
+ return func_handle
91
+ return None
92
+
93
+ return typed_get_function_by_name
94
+
95
+
96
+ def get_function_by_name(func_name_or_any: Any) -> Callable[..., Any] | None:
97
+ """Links to the function described by config
98
+
99
+ Args:
100
+ config (JobConfig): configuration of the job to link
101
+
102
+ Returns:
103
+ Callable[[ParametersType, Callable[[PipelineConfig], None]], Status] } None:
104
+ a function that takes parameters and a
105
+ submitter callable and returns a status after executing
106
+ Returns None if it is unable to find the correct function
107
+
108
+ """
109
+
110
+ if not isinstance(func_name_or_any, str):
111
+ return None
112
+
113
+ name: str = func_name_or_any
114
+
115
+ components = name.split(".")
116
+ func_name = components[-1]
117
+ mod_name = ".".join(components[:-1])
118
+
119
+ try:
120
+ mod = importlib.import_module(mod_name)
121
+ except ModuleNotFoundError as _:
122
+ return None
123
+
124
+ if not hasattr(mod, func_name):
125
+ return None
126
+
127
+ func = getattr(mod, func_name)
128
+
129
+ if not callable(func):
130
+ return None
131
+
132
+ return func
@@ -0,0 +1,20 @@
1
+ """Configuration for the Local Submitter
2
+
3
+ num_concurrent_jobs controls how many processes to spawn for the manager
4
+ """
5
+
6
+ from typing import Literal
7
+
8
+ from pydantic import BaseModel
9
+
10
+
11
+ class LocalSubmitterConfig(BaseModel, frozen=True):
12
+ """
13
+ The configuration of the local submitter
14
+
15
+ num_concurrent_jobs (int): number of processes to run jobs
16
+ """
17
+
18
+ type: Literal["local"]
19
+ name: str = "local submitter"
20
+ num_concurrent_jobs: int = 1
File without changes
@@ -0,0 +1,50 @@
1
+ """Configuration of the slurm submitter"""
2
+
3
+ from typing import Literal
4
+ import enum
5
+
6
+ from pydantic import BaseModel, DirectoryPath, Field
7
+
8
+
9
+ class RetryPolicy(enum.StrEnum):
10
+ """Retry policies are how a slurm submitter should try to resubmit jobs that failed
11
+
12
+ naive: just resubmit with no changes
13
+ exclude: exclude the node that failed previously
14
+ include: only submit to nodes with previously successful jobs
15
+ current: only submit to the current node
16
+ """
17
+
18
+ NAIVE = "naive"
19
+ EXCLUDE = "exclude"
20
+ INCLUDE = "include"
21
+ CURRENT = "current"
22
+
23
+
24
+ class SlurmBasicSubmitter(BaseModel, frozen=True):
25
+ """Configuration of the basic slurm submitter (basic_slurm)
26
+
27
+ Fields:
28
+ - type (str): always set to slurm basic
29
+ - name (str): whatever name you want for the submitters
30
+ - max_submit_retries (int): when submitting, how many times to retry a failed submission
31
+ useful if you think the failure is from random node issues
32
+ defaults to 0
33
+ - retry policy: see RetryPolicy enum docstring
34
+ how a submitter should resubmit failed jobs
35
+ defaults to NAIVE
36
+ - submit_wait_time (int): how long after submitting to wait for job fail/success
37
+ in seconds
38
+ defaults to 3
39
+ - slurm_command (str): the command to use to submit. for now, must be sbatch
40
+ in the future, could come up with an srun solution
41
+ """
42
+
43
+ type: Literal["slurm_basic"]
44
+ name: str = "basic slurm submitter"
45
+ max_submit_retries: int = 0
46
+ retry_policy: RetryPolicy = RetryPolicy.NAIVE
47
+ submit_wait_time: int = 3
48
+ slurm_command: Literal["sbatch"] = "sbatch"
49
+ working_directory: DirectoryPath
50
+ grid_cmd_args: list[str] = Field(..., default_factory=lambda: []) # type: ignore
File without changes
@@ -0,0 +1,42 @@
1
+ """A job is the basic unit of execution in this module
2
+
3
+ Each job performs one user-assigned task and returns its state.
4
+ """
5
+
6
+ # pylint: disable=duplicate-code
7
+
8
+ import logging
9
+ from collections.abc import Mapping
10
+
11
+ from pyantz.infrastructure.config.base import JobConfig, PrimitiveType
12
+ from pyantz.infrastructure.core.status import Status
13
+ from pyantz.infrastructure.core.variables import resolve_variables
14
+
15
+
16
+ def run_job(
17
+ config: JobConfig,
18
+ variables: Mapping[str, PrimitiveType],
19
+ logger: logging.Logger,
20
+ ) -> Status:
21
+ """Run a job, which is the smallest atomic task of antz"""
22
+ status: Status
23
+ func_handle = config.function
24
+ logger.debug("Running job %s, with func handle: %s", config.id, str(func_handle))
25
+
26
+ params = resolve_variables(config.parameters, variables)
27
+ logger.debug("Running function with parameters %s", str(params))
28
+
29
+ try:
30
+ ret = func_handle(params, logger)
31
+ if isinstance(ret, Status):
32
+ status = ret
33
+ else:
34
+ logger.warning(
35
+ "Return of function was not an ANTZ status, this is an automatic error"
36
+ )
37
+ status = Status.ERROR # bad return type is an error
38
+ except Exception as exc: # pylint: disable=broad-exception-caught
39
+ logger.warning("Unexpected error", exc_info=exc)
40
+ status = Status.ERROR
41
+ logger.debug("Finished job %s with status %s", config.id, str(status))
42
+ return status
@@ -0,0 +1,20 @@
1
+ """Run a general config, which is a pipeline with a scope (variables)"""
2
+
3
+ import logging
4
+ from typing import Callable
5
+
6
+ from pyantz.infrastructure.config.base import Config
7
+ from pyantz.infrastructure.core.pipeline import run_pipeline
8
+
9
+
10
+ def run_manager(
11
+ config: Config, submit_fn: Callable[[Config], None], logger: logging.Logger
12
+ ) -> None:
13
+ """Run the configuration"""
14
+ logger.debug("Manager starting up pipeline with id %d", config.config.id)
15
+ run_pipeline(
16
+ config=config.config,
17
+ variables=config.variables,
18
+ submit_fn=submit_fn,
19
+ logger=logger,
20
+ )
@@ -0,0 +1,44 @@
1
+ """Mutable jobs allow the function to edit the variables of the outer scope"""
2
+
3
+ # pylint: disable=duplicate-code
4
+
5
+ import logging
6
+ from collections.abc import Mapping
7
+ from copy import deepcopy
8
+
9
+ from pyantz.infrastructure.config.base import MutableJobConfig, PrimitiveType
10
+ from pyantz.infrastructure.core.status import Status
11
+ from pyantz.infrastructure.core.variables import resolve_variables
12
+
13
+
14
+ def run_mutable_job(
15
+ config: MutableJobConfig,
16
+ variables: Mapping[str, PrimitiveType],
17
+ logger: logging.Logger,
18
+ ) -> tuple[Status, Mapping[str, PrimitiveType]]:
19
+ """Run a job, which is the smallest atomic task of antz"""
20
+
21
+ status: Status
22
+ func_handle = config.function
23
+ logger.debug("Running job %s, with func handle: %s", config.id, str(func_handle))
24
+
25
+ params = resolve_variables(config.parameters, variables)
26
+ logger.debug("Running function with parameters %s", str(params))
27
+
28
+ try:
29
+ ret_status, ret_vars = func_handle(params, deepcopy(variables), logger)
30
+ if isinstance(ret_status, Status):
31
+ status = ret_status
32
+ else:
33
+ logger.warning(
34
+ "Return of function was not an ANTZ status, this is an automatic error"
35
+ )
36
+ status = Status.ERROR # bad return type is an error
37
+ except Exception as exc: # pylint: disable=broad-exception-caught
38
+ logger.warning("Unexpected error", exc_info=exc)
39
+ status = Status.ERROR
40
+ logger.debug("Finished job %s with status %s", config.id, str(status))
41
+
42
+ if status == Status.ERROR:
43
+ return status, variables
44
+ return status, ret_vars