scalable-pypeline 1.2.2__py2.py3-none-any.whl → 2.0.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pypeline/__init__.py +1 -1
- pypeline/barrier.py +34 -0
- pypeline/composition.py +348 -0
- pypeline/constants.py +51 -84
- pypeline/dramatiq.py +470 -0
- pypeline/extensions.py +9 -8
- pypeline/flask/__init__.py +3 -5
- pypeline/flask/api/pipelines.py +109 -148
- pypeline/flask/api/schedules.py +14 -39
- pypeline/flask/decorators.py +18 -53
- pypeline/flask/flask_pypeline.py +156 -0
- pypeline/middleware.py +61 -0
- pypeline/pipeline_config_schema.py +104 -91
- pypeline/pypeline_yaml.py +458 -0
- pypeline/schedule_config_schema.py +35 -120
- pypeline/utils/config_utils.py +52 -310
- pypeline/utils/module_utils.py +35 -71
- pypeline/utils/pipeline_utils.py +161 -0
- scalable_pypeline-2.0.1.dist-info/METADATA +217 -0
- scalable_pypeline-2.0.1.dist-info/RECORD +27 -0
- scalable_pypeline-2.0.1.dist-info/entry_points.txt +3 -0
- tests/fixtures/__init__.py +0 -1
- pypeline/celery.py +0 -206
- pypeline/celery_beat.py +0 -254
- pypeline/flask/api/utils.py +0 -35
- pypeline/flask/flask_sermos.py +0 -156
- pypeline/generators.py +0 -196
- pypeline/logging_config.py +0 -171
- pypeline/pipeline/__init__.py +0 -0
- pypeline/pipeline/chained_task.py +0 -70
- pypeline/pipeline/generator.py +0 -254
- pypeline/sermos_yaml.py +0 -442
- pypeline/utils/graph_utils.py +0 -144
- pypeline/utils/task_utils.py +0 -552
- scalable_pypeline-1.2.2.dist-info/METADATA +0 -163
- scalable_pypeline-1.2.2.dist-info/RECORD +0 -33
- scalable_pypeline-1.2.2.dist-info/entry_points.txt +0 -2
- tests/fixtures/s3_fixtures.py +0 -52
- {scalable_pypeline-1.2.2.dist-info → scalable_pypeline-2.0.1.dist-info}/LICENSE +0 -0
- {scalable_pypeline-1.2.2.dist-info → scalable_pypeline-2.0.1.dist-info}/WHEEL +0 -0
- {scalable_pypeline-1.2.2.dist-info → scalable_pypeline-2.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,458 @@
|
|
1
|
+
""" Definition of the `pypeline.yaml` file.
|
2
|
+
|
3
|
+
If using, a basic file may look like::
|
4
|
+
serviceConfig:
|
5
|
+
- name: pypeline-worker
|
6
|
+
registeredTasks:
|
7
|
+
- handler: pypeline_demo_client.workers.demo_worker.demo_worker_task
|
8
|
+
- handler: pypeline_demo_client.workers.demo_worker.demo_model_task
|
9
|
+
|
10
|
+
pipelines:
|
11
|
+
demo-pipeline:
|
12
|
+
name: demo-pipeline
|
13
|
+
description: Demo Pipeline.
|
14
|
+
schemaVersion: 1
|
15
|
+
config:
|
16
|
+
dagAdjacency:
|
17
|
+
node_a:
|
18
|
+
- node_b
|
19
|
+
- node_c
|
20
|
+
metadata:
|
21
|
+
maxRetry: 3
|
22
|
+
maxTtl: 60
|
23
|
+
queue: default-task-queue
|
24
|
+
taskDefinitions:
|
25
|
+
node_a:
|
26
|
+
handler: pypeline_demo_client.workers.demo_pipeline.demo_pipeline_node_a
|
27
|
+
node_b:
|
28
|
+
handler: pypeline_demo_client.workers.demo_pipeline.demo_pipeline_node_b
|
29
|
+
queue: node-b-queue
|
30
|
+
node_c:
|
31
|
+
handler: pypeline_demo_client.workers.demo_pipeline.demo_pipeline_node_c
|
32
|
+
|
33
|
+
scheduledTasks:
|
34
|
+
demo-model-task:
|
35
|
+
name: Demo Model Task
|
36
|
+
enabled: true
|
37
|
+
config:
|
38
|
+
task: pypeline_demo_client.workers.demo_worker.demo_model_task
|
39
|
+
queue: default-task-queue
|
40
|
+
schedule:
|
41
|
+
minute: '*'
|
42
|
+
hour: '*'
|
43
|
+
dayOfWeek: '*'
|
44
|
+
dayOfMonth: '*'
|
45
|
+
monthOfYear: '*'
|
46
|
+
schemaVersion: 1
|
47
|
+
|
48
|
+
"""
|
49
|
+
import re
|
50
|
+
import os
|
51
|
+
import logging
|
52
|
+
import pkg_resources
|
53
|
+
import yaml
|
54
|
+
from yaml.loader import SafeLoader
|
55
|
+
from marshmallow import Schema, fields, pre_load, EXCLUDE, INCLUDE, validates_schema
|
56
|
+
from marshmallow.exceptions import ValidationError
|
57
|
+
from pypeline.utils.module_utils import PypelineModuleLoader, normalized_pkg_name
|
58
|
+
from pypeline.constants import PYPELINE_YAML_PATH, PYPELINE_CLIENT_PKG_NAME
|
59
|
+
from pypeline.pipeline_config_schema import BasePipelineSchema
|
60
|
+
from pypeline.schedule_config_schema import BaseScheduleSchema
|
61
|
+
|
62
|
+
logger = logging.getLogger(__name__)
|
63
|
+
|
64
|
+
|
65
|
+
class InvalidPackagePath(Exception):
|
66
|
+
pass
|
67
|
+
|
68
|
+
|
69
|
+
class InvalidPypelineConfig(Exception):
|
70
|
+
pass
|
71
|
+
|
72
|
+
|
73
|
+
class MissingPypelineConfig(Exception):
|
74
|
+
pass
|
75
|
+
|
76
|
+
|
77
|
+
class ExcludeUnknownSchema(Schema):
|
78
|
+
class Meta:
|
79
|
+
unknown = EXCLUDE
|
80
|
+
|
81
|
+
|
82
|
+
class NameSchema(Schema):
|
83
|
+
"""Validated name string field."""
|
84
|
+
|
85
|
+
name = fields.String(
|
86
|
+
required=True,
|
87
|
+
description="Name for service or image. Must include "
|
88
|
+
"only alphanumeric characters along with `_` and `-`.",
|
89
|
+
example="my-service-name",
|
90
|
+
)
|
91
|
+
|
92
|
+
@pre_load
|
93
|
+
def validate_characters(self, item, **kwargs):
|
94
|
+
"""Ensure name field conforms to allowed characters"""
|
95
|
+
valid_chars = r"^[\w\d\-\_]+$"
|
96
|
+
if not bool(re.match(valid_chars, item["name"])):
|
97
|
+
raise ValueError(
|
98
|
+
f"Invalid name: {item['name']}. Only alphanumeric characters "
|
99
|
+
"allowed along with `-` and `_`."
|
100
|
+
)
|
101
|
+
return item
|
102
|
+
|
103
|
+
|
104
|
+
class PypelineRegisteredTaskDetailConfigSchema(Schema):
|
105
|
+
handler = fields.String(
|
106
|
+
required=True,
|
107
|
+
description="Full path to the Method handles work / pipeline tasks.",
|
108
|
+
example="pypeline_customer_client.workers.worker_group.useful_worker",
|
109
|
+
)
|
110
|
+
|
111
|
+
event = fields.Raw(
|
112
|
+
required=False,
|
113
|
+
unknown=INCLUDE,
|
114
|
+
description="Arbitrary user data, passed through `event` arg in task.",
|
115
|
+
)
|
116
|
+
|
117
|
+
|
118
|
+
class PypelineCeleryWorkerConfigSchema(Schema):
|
119
|
+
"""Attributes for a celery worker. This worker will run all of the
|
120
|
+
pipelines and scheduled tasks.
|
121
|
+
"""
|
122
|
+
|
123
|
+
registeredTasks = fields.List(
|
124
|
+
fields.Nested(PypelineRegisteredTaskDetailConfigSchema, required=True),
|
125
|
+
required=False,
|
126
|
+
_required=True,
|
127
|
+
description="List of task handlers to register for to your Pypeline app.",
|
128
|
+
)
|
129
|
+
|
130
|
+
|
131
|
+
class PypelineServiceConfigSchema(
|
132
|
+
ExcludeUnknownSchema, PypelineCeleryWorkerConfigSchema, NameSchema
|
133
|
+
):
|
134
|
+
"""Base service config object definition for workers."""
|
135
|
+
|
136
|
+
pass
|
137
|
+
|
138
|
+
|
139
|
+
class PypelineYamlSchema(ExcludeUnknownSchema):
|
140
|
+
"""The primary `pypeline.yaml` file schema. This defines all available
|
141
|
+
properties in a valid Pypeline configuration file.
|
142
|
+
"""
|
143
|
+
|
144
|
+
serviceConfig = fields.List(
|
145
|
+
fields.Nested(
|
146
|
+
PypelineServiceConfigSchema,
|
147
|
+
required=True,
|
148
|
+
description="Core service configuration.",
|
149
|
+
),
|
150
|
+
description="List of workers for Pypeline to manage.",
|
151
|
+
required=True,
|
152
|
+
)
|
153
|
+
|
154
|
+
pipelines = fields.Dict(
|
155
|
+
keys=fields.String(),
|
156
|
+
values=fields.Nested(BasePipelineSchema),
|
157
|
+
description="List of pipelines",
|
158
|
+
required=False,
|
159
|
+
)
|
160
|
+
|
161
|
+
scheduledTasks = fields.Dict(
|
162
|
+
keys=fields.String(),
|
163
|
+
values=fields.Nested(BaseScheduleSchema),
|
164
|
+
description="List of scheduled tasks",
|
165
|
+
required=False,
|
166
|
+
)
|
167
|
+
|
168
|
+
def validate_errors(self, schema: Schema, value: dict):
|
169
|
+
"""Run Marshmallow validate() and raise if any errors"""
|
170
|
+
schema = schema()
|
171
|
+
errors = schema.validate(value)
|
172
|
+
if len(errors.keys()) > 0:
|
173
|
+
raise ValidationError(errors)
|
174
|
+
|
175
|
+
@validates_schema
|
176
|
+
def validate_schema(self, data, **kwargs):
|
177
|
+
"""Additional validation.
|
178
|
+
|
179
|
+
Nested fields that are not required are not validated by Marshmallow
|
180
|
+
by default. Do a single level down of validation for now.
|
181
|
+
|
182
|
+
imageConfig can provide *either* an install command for Pypeline
|
183
|
+
to use to build the image for customer *or* a Docker repository
|
184
|
+
for Pypeline to pull.
|
185
|
+
"""
|
186
|
+
# Vaidate nested
|
187
|
+
key_schema_pairs = (("serviceConfig", PypelineServiceConfigSchema),)
|
188
|
+
for k_s in key_schema_pairs:
|
189
|
+
val = data.get(k_s[0], None)
|
190
|
+
if val is not None:
|
191
|
+
if type(val) == list:
|
192
|
+
for v in val:
|
193
|
+
self.validate_errors(k_s[1], v)
|
194
|
+
else:
|
195
|
+
self.validate_errors(k_s[1], val)
|
196
|
+
|
197
|
+
# Validate the services. We list every service schema field as not
|
198
|
+
# required in order to use them as mixins for a generic service object,
|
199
|
+
# however, they ARE required, so validate here using the custom
|
200
|
+
# metadata property `_required`. Default to value of `required`.
|
201
|
+
for service in data.get("serviceConfig"):
|
202
|
+
schema = PypelineCeleryWorkerConfigSchema
|
203
|
+
for field in schema().fields:
|
204
|
+
try:
|
205
|
+
if (
|
206
|
+
schema()
|
207
|
+
.fields[field]
|
208
|
+
.metadata.get(
|
209
|
+
"_required", getattr(schema().fields[field], "required")
|
210
|
+
)
|
211
|
+
):
|
212
|
+
assert field in service
|
213
|
+
except AssertionError:
|
214
|
+
raise ValidationError(f"`{field}` missing in worker definition.")
|
215
|
+
|
216
|
+
# Validate unique pipeline ids
|
217
|
+
if "pipelines" in data:
|
218
|
+
pipeline_ids = set()
|
219
|
+
for pipeline_id, pipeline_data in data["pipelines"].items():
|
220
|
+
if pipeline_id in pipeline_ids:
|
221
|
+
raise ValidationError("All pipeline ids must be unique!")
|
222
|
+
pipeline_ids.add(pipeline_id)
|
223
|
+
schema_version = pipeline_data["schemaVersion"]
|
224
|
+
PipelineSchema = BasePipelineSchema.get_by_version(schema_version)
|
225
|
+
self.validate_errors(PipelineSchema, pipeline_data)
|
226
|
+
|
227
|
+
# Validate unique scheduled tasks names
|
228
|
+
if "scheduledTasks" in data:
|
229
|
+
task_ids = set()
|
230
|
+
for task_id, task_data in data["scheduledTasks"].items():
|
231
|
+
if task_id in task_ids:
|
232
|
+
raise ValidationError("All schedule ids must be unique!")
|
233
|
+
task_ids.add(task_id)
|
234
|
+
schema_version = task_data["schemaVersion"]
|
235
|
+
TaskSchema = BaseScheduleSchema.get_by_version(schema_version)
|
236
|
+
self.validate_errors(TaskSchema, task_data)
|
237
|
+
|
238
|
+
|
239
|
+
class YamlPatternConstructor:
|
240
|
+
"""Adds a pattern resolver + constructor to PyYaml.
|
241
|
+
|
242
|
+
Typical/deault usage is for parsing environment variables
|
243
|
+
in a yaml file but this can be used for any pattern you provide.
|
244
|
+
|
245
|
+
See: https://pyyaml.org/wiki/PyYAMLDocumentation
|
246
|
+
"""
|
247
|
+
|
248
|
+
def __init__(self, env_var_pattern: str = None, add_constructor: bool = True):
|
249
|
+
self.env_var_pattern = env_var_pattern
|
250
|
+
if self.env_var_pattern is None:
|
251
|
+
# Default pattern is: ${VAR:default}
|
252
|
+
self.env_var_pattern = r"^\$\{(.*)\}$"
|
253
|
+
self.path_matcher = re.compile(self.env_var_pattern)
|
254
|
+
|
255
|
+
if add_constructor:
|
256
|
+
self.add_constructor()
|
257
|
+
|
258
|
+
def _path_constructor(self, loader, node):
|
259
|
+
"""Extract the matched value, expand env variable,
|
260
|
+
and replace the match
|
261
|
+
|
262
|
+
TODO: Would need to update this (specifically the parsing) if any
|
263
|
+
pattern other than our default (or a highly compatible variation)
|
264
|
+
is provided.
|
265
|
+
"""
|
266
|
+
# Try to match the correct env variable pattern in this node's value
|
267
|
+
# If the value does not match the pattern, return None (which means
|
268
|
+
# this node will not be parsed for ENV variables and instead just
|
269
|
+
# returned as-is).
|
270
|
+
env_var_name = re.match(self.env_var_pattern, node.value)
|
271
|
+
try:
|
272
|
+
env_var_name = env_var_name.group(1)
|
273
|
+
except AttributeError:
|
274
|
+
return None
|
275
|
+
|
276
|
+
# If we get down here, then the 'node.value' matches our specified
|
277
|
+
# pattern, so try to parse. env_var_name is the value inside ${...}.
|
278
|
+
# Split on `:`, which is our delimiter for default values.
|
279
|
+
env_var_name_split = env_var_name.split(":")
|
280
|
+
|
281
|
+
# Attempt to retrieve the environment variable...from the environment
|
282
|
+
env_var = os.environ.get(env_var_name_split[0], None)
|
283
|
+
|
284
|
+
if env_var is None: # Nothing found in environment
|
285
|
+
# If a default was provided (e.g. VAR:default), return that.
|
286
|
+
# We join anything after first element because the default
|
287
|
+
# value might be a URL or something with a colon in it
|
288
|
+
# which would have 'split' above
|
289
|
+
if len(env_var_name_split) > 1:
|
290
|
+
return ":".join(env_var_name_split[1:])
|
291
|
+
return "unset" # Return 'unset' if not in environ nor default
|
292
|
+
return env_var
|
293
|
+
|
294
|
+
def add_constructor(self):
|
295
|
+
"""Initialize PyYaml with ability to resolve/load environment
|
296
|
+
variables defined in a yaml template when they exist in
|
297
|
+
the environment.
|
298
|
+
|
299
|
+
Add to SafeLoader in addition to standard Loader.
|
300
|
+
"""
|
301
|
+
# Add the `!env_var` tag to any scalar (value) that matches the
|
302
|
+
# pattern self.path_matcher. This allows the template to be much more
|
303
|
+
# intuitive vs needing to add !env_var to the beginning of each value
|
304
|
+
yaml.add_implicit_resolver("!env_var", self.path_matcher)
|
305
|
+
yaml.add_implicit_resolver("!env_var", self.path_matcher, Loader=SafeLoader)
|
306
|
+
|
307
|
+
# Add constructor for the tag `!env_var`, which is a function that
|
308
|
+
# converts a node of a YAML representation graph to a native Python
|
309
|
+
# object.
|
310
|
+
yaml.add_constructor("!env_var", self._path_constructor)
|
311
|
+
yaml.add_constructor("!env_var", self._path_constructor, Loader=SafeLoader)
|
312
|
+
|
313
|
+
|
314
|
+
def parse_config_file(pypeline_yaml: str):
|
315
|
+
"""Parse the `pypeline.yaml` file when it's been loaded.
|
316
|
+
|
317
|
+
Arguments:
|
318
|
+
pypeline_yaml (required): String of loaded pypeline.yaml file.
|
319
|
+
"""
|
320
|
+
YamlPatternConstructor() # Add our env variable parser
|
321
|
+
try:
|
322
|
+
pypeline_yaml_schema = PypelineYamlSchema()
|
323
|
+
# First suss out yaml issues
|
324
|
+
pypeline_config = yaml.safe_load(pypeline_yaml)
|
325
|
+
# Then schema issues
|
326
|
+
pypeline_config = pypeline_yaml_schema.load(pypeline_config)
|
327
|
+
except ValidationError as e:
|
328
|
+
msg = "Invalid Pypeline configuration due to {}".format(e.messages)
|
329
|
+
logger.error(msg)
|
330
|
+
raise InvalidPypelineConfig(msg)
|
331
|
+
except Exception as e:
|
332
|
+
msg = (
|
333
|
+
"Invalid Pypeline configuration, likely due to invalid "
|
334
|
+
"YAML formatting ..."
|
335
|
+
)
|
336
|
+
logger.exception("{} {}".format(msg, e))
|
337
|
+
raise InvalidPypelineConfig(msg)
|
338
|
+
return pypeline_config
|
339
|
+
|
340
|
+
|
341
|
+
def _get_pkg_name(pkg_name: str) -> str:
|
342
|
+
"""Retrieve the normalized package name."""
|
343
|
+
if pkg_name is None:
|
344
|
+
pkg_name = PYPELINE_CLIENT_PKG_NAME # From environment
|
345
|
+
if pkg_name is None:
|
346
|
+
return None
|
347
|
+
return normalized_pkg_name(pkg_name)
|
348
|
+
|
349
|
+
|
350
|
+
def load_pypeline_config(
|
351
|
+
pkg_name: str = None, pypeline_yaml_filename: str = None, as_dict: bool = True
|
352
|
+
):
|
353
|
+
"""Load and parse the `pypeline.yaml` file. Issue usable exceptions for
|
354
|
+
known error modes so bootstrapping can handle appropriately.
|
355
|
+
|
356
|
+
Arguments:
|
357
|
+
pkg_name (required): Directory name for your Python
|
358
|
+
package. e.g. my_package_name . If none provided, will check
|
359
|
+
environment for `PYPELINE_CLIENT_PKG_NAME`. If not found,
|
360
|
+
will exit.
|
361
|
+
pypeline_yaml_filename (optional): Relative path to find your
|
362
|
+
`pypeline.yaml` configuration file. Defaults to `pypeline.yaml`
|
363
|
+
which should be found inside your `pkg_name`
|
364
|
+
as_dict (optional): If true (default), return the loaded pypeline
|
365
|
+
configuration as a dictionary. If false, return the loaded
|
366
|
+
string value of the yaml file.
|
367
|
+
"""
|
368
|
+
if pypeline_yaml_filename is None:
|
369
|
+
pypeline_yaml_filename = PYPELINE_YAML_PATH
|
370
|
+
|
371
|
+
logger.info(
|
372
|
+
f"Loading `pypeline.yaml` from package `{pkg_name}` "
|
373
|
+
f"and file location `{pypeline_yaml_filename}` ..."
|
374
|
+
)
|
375
|
+
pypeline_config = None
|
376
|
+
|
377
|
+
pkg_name = _get_pkg_name(pkg_name)
|
378
|
+
|
379
|
+
if pkg_name is None: # Nothing to retrieve at this point
|
380
|
+
logger.warning("Unable to retrieve pypeline.yaml configuration ...")
|
381
|
+
return pypeline_config
|
382
|
+
|
383
|
+
try:
|
384
|
+
pypeline_config_path = pkg_resources.resource_filename(
|
385
|
+
pkg_name, pypeline_yaml_filename
|
386
|
+
)
|
387
|
+
except Exception as e:
|
388
|
+
msg = (
|
389
|
+
"Either pkg_name ({}) or pypeline_yaml_filename ({}) is "
|
390
|
+
"invalid ...".format(pkg_name, pypeline_yaml_filename)
|
391
|
+
)
|
392
|
+
logger.error("{} ... {}".format(msg, e))
|
393
|
+
raise InvalidPackagePath(e)
|
394
|
+
|
395
|
+
try:
|
396
|
+
with open(pypeline_config_path, "r") as f:
|
397
|
+
pypeline_yaml = f.read()
|
398
|
+
pypeline_config = parse_config_file(pypeline_yaml)
|
399
|
+
except InvalidPypelineConfig as e:
|
400
|
+
raise
|
401
|
+
except FileNotFoundError as e:
|
402
|
+
msg = "Pypeline config file could not be found at path {} ...".format(
|
403
|
+
pypeline_config_path
|
404
|
+
)
|
405
|
+
raise MissingPypelineConfig(msg)
|
406
|
+
except Exception as e:
|
407
|
+
raise e
|
408
|
+
if as_dict:
|
409
|
+
return pypeline_config
|
410
|
+
return yaml.safe_dump(pypeline_config)
|
411
|
+
|
412
|
+
|
413
|
+
def load_client_config_and_version(
|
414
|
+
pkg_name: str = None, pypeline_yaml_filename: str = None
|
415
|
+
):
|
416
|
+
"""Load and parse the `pypeline.yaml` file and a client package's version.
|
417
|
+
|
418
|
+
Arguments:
|
419
|
+
pkg_name (required): Directory name for your Python
|
420
|
+
package. e.g. my_package_name . If none provided, will check
|
421
|
+
environment for `PYPELINE_CLIENT_PKG_NAME`. If not found,
|
422
|
+
will exit.
|
423
|
+
pypeline_yaml_filename (optional): Relative path to find your
|
424
|
+
`pypeline.yaml` configuration file. Defaults to `pypeline.yaml`
|
425
|
+
which should be found inside your `pkg_name`
|
426
|
+
as_dict (optional): If true (default), return the loaded pypeline
|
427
|
+
configuration as a dictionary. If false, return the loaded
|
428
|
+
string value of the yaml file.
|
429
|
+
|
430
|
+
For this to work properly, the provided package must be installed in the
|
431
|
+
same environment as this Pypeline package and it must have a `__version__`
|
432
|
+
variable inside its `__init__.py` file, e.g. `__version__ = '0.0.0'`
|
433
|
+
"""
|
434
|
+
pypeline_config = None
|
435
|
+
client_version = None
|
436
|
+
|
437
|
+
pkg_name = _get_pkg_name(pkg_name)
|
438
|
+
|
439
|
+
try:
|
440
|
+
loader = PypelineModuleLoader()
|
441
|
+
pkg = loader.get_module(pkg_name + ".__init__")
|
442
|
+
client_version = getattr(pkg, "__version__", "0.0.0")
|
443
|
+
pypeline_config = load_pypeline_config(pkg_name, pypeline_yaml_filename)
|
444
|
+
except MissingPypelineConfig as e:
|
445
|
+
logger.error(e)
|
446
|
+
except InvalidPypelineConfig as e:
|
447
|
+
logger.error(e)
|
448
|
+
except InvalidPackagePath as e:
|
449
|
+
logger.error(e)
|
450
|
+
except Exception as e:
|
451
|
+
logger.error(
|
452
|
+
"Unable to load client's pkg __version__ or "
|
453
|
+
"{} config file for package: {} ... {}".format(
|
454
|
+
pypeline_yaml_filename, pkg_name, e
|
455
|
+
)
|
456
|
+
)
|
457
|
+
|
458
|
+
return pypeline_config, client_version
|
@@ -1,30 +1,20 @@
|
|
1
1
|
""" Schemas for Schedule Configuration
|
2
2
|
"""
|
3
3
|
import re
|
4
|
-
|
4
|
+
|
5
|
+
# from celery.schedules import crontab_parser
|
5
6
|
from croniter import croniter
|
6
|
-
from marshmallow.validate import OneOf
|
7
7
|
from marshmallow.exceptions import ValidationError
|
8
8
|
from marshmallow import Schema, fields, EXCLUDE, pre_load, validates_schema
|
9
9
|
|
10
10
|
|
11
11
|
class ExcludeUnknownSchema(Schema):
|
12
|
-
"""
|
12
|
+
"""Remove unknown keys from loaded dictionary"""
|
13
13
|
|
14
|
-
# TODO this seems to be just ignoring and letting through vs excluding...
|
15
|
-
"""
|
16
14
|
class Meta:
|
17
15
|
unknown = EXCLUDE
|
18
16
|
|
19
17
|
|
20
|
-
class IntervalScheduleSchema(Schema):
|
21
|
-
every = fields.Integer(required=True)
|
22
|
-
period = fields.String(
|
23
|
-
required=True,
|
24
|
-
validate=OneOf(['microseconds', 'seconds', 'minutes', 'hours',
|
25
|
-
'days']))
|
26
|
-
|
27
|
-
|
28
18
|
class CrontabScheduleSchema(Schema):
|
29
19
|
minute = fields.String(required=True)
|
30
20
|
hour = fields.String(required=True)
|
@@ -34,14 +24,19 @@ class CrontabScheduleSchema(Schema):
|
|
34
24
|
|
35
25
|
@validates_schema
|
36
26
|
def validate_values(self, data, **kwargs):
|
37
|
-
if
|
38
|
-
data[
|
39
|
-
|
27
|
+
if (
|
28
|
+
data["minute"] is None
|
29
|
+
or data["hour"] is None
|
30
|
+
or data["dayOfWeek"] is None
|
31
|
+
or data["dayOfMonth"] is None
|
32
|
+
or data["monthOfYear"] is None
|
33
|
+
):
|
40
34
|
raise ValidationError("Empty crontab value")
|
41
35
|
|
42
|
-
test_cron_expression =
|
43
|
-
f"{data['minute']} {data['hour']} {data['dayOfMonth']} "
|
36
|
+
test_cron_expression = (
|
37
|
+
f"{data['minute']} {data['hour']} {data['dayOfMonth']} "
|
44
38
|
f"{data['monthOfYear']} {data['dayOfWeek']}"
|
39
|
+
)
|
45
40
|
|
46
41
|
if not croniter.is_valid(test_cron_expression):
|
47
42
|
return ValidationError("Invalid crontab value")
|
@@ -52,121 +47,40 @@ class Schedule(fields.Dict):
|
|
52
47
|
return value
|
53
48
|
|
54
49
|
def _deserialize(self, value, attr, data, **kwargs):
|
55
|
-
|
56
|
-
schema = CrontabScheduleSchema()
|
57
|
-
else:
|
58
|
-
schema = IntervalScheduleSchema()
|
50
|
+
schema = CrontabScheduleSchema()
|
59
51
|
return schema.load(value)
|
60
52
|
|
61
53
|
|
62
54
|
class ScheduleConfigSchemaV1(ExcludeUnknownSchema):
|
63
|
-
"""
|
55
|
+
"""Definition of a single schedule entry"""
|
64
56
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
57
|
+
queue = fields.String(
|
58
|
+
required=True,
|
59
|
+
description="Name of queue on which to place task.",
|
60
|
+
example="my-default-queue",
|
61
|
+
)
|
62
|
+
task = fields.String(
|
70
63
|
required=True,
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
data_key='scheduleType')
|
75
|
-
|
76
|
-
queue = fields.String(required=True,
|
77
|
-
description="Name of queue on which to place task.",
|
78
|
-
example="my-default-queue")
|
79
|
-
task = fields.String(required=True,
|
80
|
-
description="Path to task to invoke.",
|
81
|
-
example="my_app.module.method")
|
82
|
-
exchange = fields.String(
|
83
|
-
required=False,
|
84
|
-
description="Exchange for the task. Celery default "
|
85
|
-
"used if not set, which is recommended.",
|
86
|
-
example="tasks")
|
87
|
-
routing_key = fields.String(
|
88
|
-
required=False,
|
89
|
-
description="Routing key for the task. Celery "
|
90
|
-
"default used if not set, which is recommended.",
|
91
|
-
example="task.default",
|
92
|
-
data_key='routingKey')
|
93
|
-
expires = fields.Integer(
|
94
|
-
required=False,
|
95
|
-
description="Number of seconds after which task "
|
96
|
-
"expires if not executed. Default: no expiration.",
|
97
|
-
example=60)
|
64
|
+
description="Path to task to invoke.",
|
65
|
+
example="my_app.module.method",
|
66
|
+
)
|
98
67
|
|
99
68
|
schedule = Schedule(required=True)
|
100
69
|
|
101
|
-
@pre_load
|
102
|
-
def validate_string_fields(self, item, **kwargs):
|
103
|
-
""" Ensure string fields with no OneOf validation conform to patterns
|
104
|
-
"""
|
105
|
-
if item is None:
|
106
|
-
raise ValidationError("NoneType provided, check input.")
|
107
|
-
|
108
|
-
validation_map = {
|
109
|
-
'name': r'^[\w\d\-\_\.\s]+$',
|
110
|
-
'queue': r'^[\w\d\-\_\.]+$',
|
111
|
-
'task': r'^[\w\d\-\_\.]+$',
|
112
|
-
'exchange': r'^[\w\d\-\_\.]+$',
|
113
|
-
'routing_key': r'^[\w\d\-\_\.]+$'
|
114
|
-
}
|
115
|
-
for field in validation_map:
|
116
|
-
if item.get(field, None) is None:
|
117
|
-
continue
|
118
|
-
if not bool(re.match(validation_map[field], item[field])):
|
119
|
-
raise ValidationError(
|
120
|
-
f"Invalid {field}: `{item[field]}``. Must match pattern: "
|
121
|
-
f"{validation_map[field]}")
|
122
|
-
|
123
|
-
if 'scheduleType' not in item:
|
124
|
-
raise ValidationError('Missing required field scheduleType')
|
125
|
-
|
126
|
-
if item['scheduleType'] == 'crontab':
|
127
|
-
cron_validation_map = {
|
128
|
-
'minute': crontab_parser(60),
|
129
|
-
'hour': crontab_parser(24),
|
130
|
-
'dayOfWeek': crontab_parser(7),
|
131
|
-
'dayOfMonth': crontab_parser(31, 1),
|
132
|
-
'monthOfYear': crontab_parser(12, 1)
|
133
|
-
}
|
134
|
-
|
135
|
-
for field in cron_validation_map:
|
136
|
-
try:
|
137
|
-
cron_validation_map[field].parse(item['schedule'][field])
|
138
|
-
except:
|
139
|
-
raise ValidationError(
|
140
|
-
f"Invalid {field}: `{item['schedule'][field]}`. Must "
|
141
|
-
"be valid crontab pattern.")
|
142
|
-
|
143
|
-
return item
|
144
|
-
|
145
70
|
|
146
71
|
class BaseScheduleSchema(ExcludeUnknownSchema):
|
147
72
|
__schema_version__ = 0
|
148
73
|
|
149
|
-
name = fields.String(
|
150
|
-
|
151
|
-
|
74
|
+
name = fields.String(
|
75
|
+
required=True,
|
76
|
+
description="Name of schedule entry.",
|
77
|
+
example="My Scheduled Task",
|
78
|
+
)
|
152
79
|
schemaVersion = fields.Integer(required=True)
|
153
80
|
config = fields.Dict(required=True)
|
154
|
-
enabled = fields.Boolean(
|
155
|
-
|
156
|
-
|
157
|
-
# TODO Figure out where that wonky timestamp format is coming from and
|
158
|
-
# update this and in celery_beat.py.
|
159
|
-
lastRunAt = fields.DateTime(allow_none=True,
|
160
|
-
missing=None,
|
161
|
-
description="Timestamp of last run time.",
|
162
|
-
example="Tue, 18 Aug 2020 01:36:06 GMT",
|
163
|
-
data_key='lastRunAt')
|
164
|
-
totalRunCount = fields.Integer(
|
165
|
-
allow_none=True,
|
166
|
-
missing=0,
|
167
|
-
description="Count of number of executions.",
|
168
|
-
example=12345,
|
169
|
-
data_key='totalRunCount')
|
81
|
+
enabled = fields.Boolean(
|
82
|
+
required=True, description="Whether entry is enabled.", example=True
|
83
|
+
)
|
170
84
|
|
171
85
|
@classmethod
|
172
86
|
def get_by_version(cls, version):
|
@@ -189,7 +103,7 @@ class BaseScheduleSchema(ExcludeUnknownSchema):
|
|
189
103
|
|
190
104
|
@validates_schema
|
191
105
|
def validate_scheduled_tasks(self, data, **kwargs):
|
192
|
-
schema_version = data[
|
106
|
+
schema_version = data["schemaVersion"]
|
193
107
|
TaskSchema = BaseScheduleSchema.get_by_version(schema_version)
|
194
108
|
schema = TaskSchema()
|
195
109
|
schema.load(data)
|
@@ -201,7 +115,8 @@ class ScheduleSchemaV1(BaseScheduleSchema):
|
|
201
115
|
config = fields.Nested(
|
202
116
|
ScheduleConfigSchemaV1,
|
203
117
|
required=True,
|
204
|
-
description="Configuration information for this schedule."
|
118
|
+
description="Configuration information for this schedule.",
|
119
|
+
)
|
205
120
|
|
206
121
|
def validate_scheduled_tasks(self, data, **kwargs):
|
207
122
|
# We need to add this function to avoid infinite recursion since
|