scalable-pypeline 1.1.5__py2.py3-none-any.whl → 1.2.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pypeline/__init__.py +1 -1
- pypeline/celery.py +22 -86
- pypeline/constants.py +3 -3
- pypeline/flask/api/pipelines.py +16 -3
- pypeline/flask/api/utils.py +2 -3
- pypeline/pipeline/__init__.py +0 -0
- pypeline/pipeline/chained_task.py +70 -0
- pypeline/pipeline/generator.py +254 -0
- pypeline/pipeline_config_schema.py +46 -12
- pypeline/sermos_yaml.py +8 -303
- pypeline/utils/task_utils.py +22 -273
- {scalable_pypeline-1.1.5.dist-info → scalable_pypeline-1.2.1.dist-info}/METADATA +7 -5
- {scalable_pypeline-1.1.5.dist-info → scalable_pypeline-1.2.1.dist-info}/RECORD +17 -14
- {scalable_pypeline-1.1.5.dist-info → scalable_pypeline-1.2.1.dist-info}/LICENSE +0 -0
- {scalable_pypeline-1.1.5.dist-info → scalable_pypeline-1.2.1.dist-info}/WHEEL +0 -0
- {scalable_pypeline-1.1.5.dist-info → scalable_pypeline-1.2.1.dist-info}/entry_points.txt +0 -0
- {scalable_pypeline-1.1.5.dist-info → scalable_pypeline-1.2.1.dist-info}/top_level.txt +0 -0
pypeline/utils/task_utils.py
CHANGED
@@ -2,21 +2,20 @@
|
|
2
2
|
"""
|
3
3
|
import os
|
4
4
|
import logging
|
5
|
+
import typing
|
5
6
|
import uuid
|
6
7
|
from typing import List, Any, Union
|
7
8
|
from networkx.classes.digraph import DiGraph
|
8
|
-
from celery import
|
9
|
+
from celery import signature
|
9
10
|
|
10
11
|
from pypeline.constants import DEFAULT_TASK_TTL, \
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
from pypeline.utils.graph_utils import get_execution_graph, get_chainable_tasks
|
16
|
-
from pypeline.utils.config_utils import retrieve_latest_pipeline_config, \
|
17
|
-
load_json_config_from_redis, set_json_config_to_redis
|
12
|
+
PIPELINE_RUN_WRAPPER_CACHE_KEY, DEFAULT_RESULT_TTL, \
|
13
|
+
PIPELINE_RESULT_CACHE_KEY
|
14
|
+
from pypeline.utils.graph_utils import get_execution_graph
|
15
|
+
from pypeline.utils.config_utils import load_json_config_from_redis, set_json_config_to_redis
|
18
16
|
from pypeline.pipeline_config_schema import PipelineConfigValidator
|
19
17
|
|
18
|
+
|
20
19
|
logger = logging.getLogger(__name__)
|
21
20
|
WORKER_NAME = os.environ.get('WORKER_NAME', None)
|
22
21
|
|
@@ -37,8 +36,7 @@ def get_service_config_for_worker(sermos_config: dict,
|
|
37
36
|
|
38
37
|
service_config = sermos_config.get('serviceConfig', [])
|
39
38
|
for service in service_config:
|
40
|
-
if service['
|
41
|
-
'name'] == worker_name:
|
39
|
+
if service['name'] == worker_name:
|
42
40
|
return service
|
43
41
|
|
44
42
|
raise ValueError('Could not find a service config for worker '
|
@@ -81,31 +79,31 @@ def get_task_signature(task_path: str,
|
|
81
79
|
# If we still have None or 'default', set the default queue!
|
82
80
|
if max_ttl in (None, 'default'):
|
83
81
|
max_ttl = DEFAULT_TASK_TTL
|
84
|
-
|
82
|
+
task_id = str(uuid.uuid4())
|
85
83
|
kwargs = {
|
86
84
|
'event': {
|
87
85
|
'access_key': access_key,
|
88
86
|
'pipeline_id': pipeline_id,
|
89
|
-
'execution_id': execution_id
|
87
|
+
'execution_id': execution_id,
|
88
|
+
'task_id': task_id
|
90
89
|
}
|
91
90
|
}
|
92
91
|
if custom_event_data is not None:
|
93
92
|
kwargs['event'] = {**kwargs['event'], **custom_event_data}
|
94
93
|
|
95
|
-
# TODO where do we inject the 'event' data from sermos yaml schema?
|
96
|
-
|
97
94
|
sig = signature(
|
98
95
|
task_path,
|
99
96
|
args=(),
|
100
97
|
kwargs=kwargs,
|
101
98
|
immutable=immutable,
|
102
|
-
task_id=
|
99
|
+
task_id=task_id,
|
103
100
|
options={
|
104
101
|
'queue': queue,
|
105
102
|
'expires': 86400, # Expire after 1 day. TODO make tunable.
|
106
103
|
'soft_time_limit': max_ttl,
|
107
104
|
'time_limit': max_ttl + 10, # Add 10s buffer for cleanup
|
108
|
-
}
|
105
|
+
}
|
106
|
+
)
|
109
107
|
return sig
|
110
108
|
|
111
109
|
|
@@ -127,6 +125,7 @@ class PipelineRunWrapper:
|
|
127
125
|
"""
|
128
126
|
pipeline_id: str = None
|
129
127
|
pipeline_config: dict = None # Pipeline configuration in dictionary format
|
128
|
+
celery_task_status: dict = None # This tracks the state of tasks within the pipeline
|
130
129
|
dag_config: dict = None
|
131
130
|
execution_id: str = None
|
132
131
|
current_event: dict = None # For single task when from_event(). NOT cached.
|
@@ -187,7 +186,7 @@ class PipelineRunWrapper:
|
|
187
186
|
during the .load() stage.
|
188
187
|
"""
|
189
188
|
return ('pipeline_config', 'max_ttl', 'max_retry', 'retry_count',
|
190
|
-
'chain_payload', 'pipeline_id')
|
189
|
+
'chain_payload', 'pipeline_id', 'celery_task_status')
|
191
190
|
|
192
191
|
def _load_from_cache(self, is_retry=False):
|
193
192
|
""" Attempt to load this PipelineRunWrapper from cache.
|
@@ -215,6 +214,9 @@ class PipelineRunWrapper:
|
|
215
214
|
|
216
215
|
return
|
217
216
|
|
217
|
+
def get_task_celery_status(self, task_id: type[uuid.uuid4()]) -> typing.Union[dict, None]:
|
218
|
+
return next(filter(lambda task: task["task_id"] == task_id, self.celery_task_status), None)
|
219
|
+
|
218
220
|
def save_to_cache(self):
|
219
221
|
""" Save current state of PipelineRunWrapper to cache, json serialized.
|
220
222
|
Re-set the key's TTL
|
@@ -277,7 +279,8 @@ class PipelineRunWrapper:
|
|
277
279
|
# object purely for convenience and to provide logical defaults.
|
278
280
|
for key in self._cachable_keys:
|
279
281
|
if key in ('pipeline_config', 'pipeline_id', 'max_retry',
|
280
|
-
'
|
282
|
+
'max_retry', 'max_ttl', 'retry_count',
|
283
|
+
'chain_payload', 'celery_task_status'):
|
281
284
|
continue
|
282
285
|
setattr(self, key, self.pipeline_config[key])
|
283
286
|
|
@@ -453,259 +456,6 @@ class PipelineResult:
|
|
453
456
|
}
|
454
457
|
|
455
458
|
|
456
|
-
class PipelineGenerator(object):
|
457
|
-
""" Allows an API endpoint to generate a functional pipeline based on the
|
458
|
-
requested pipeline id. Allows API to then issue the tasks asynchronously
|
459
|
-
to initiate the pipeline. Thereafter, celery will monitor status and
|
460
|
-
handle success/failure modes so the API web worker can return
|
461
|
-
immediately.
|
462
|
-
|
463
|
-
The primary purpose is to unpack the pipeline config, create the
|
464
|
-
requisite cached entities to track pipeline progress, and apply the
|
465
|
-
chained pipeline tasks asynchronously so Celery can take over.
|
466
|
-
|
467
|
-
Usage:
|
468
|
-
gen = PipelineGenerator(pipeline_id)
|
469
|
-
chain = gen.generate_chain()
|
470
|
-
chain.on_error(custom_error_task.s()) # Optional add error handling
|
471
|
-
chain.delay()
|
472
|
-
"""
|
473
|
-
def __init__(self,
|
474
|
-
pipeline_id: str,
|
475
|
-
access_key: str = None,
|
476
|
-
execution_id: str = None,
|
477
|
-
queue: str = None,
|
478
|
-
default_task_ttl: int = None,
|
479
|
-
regulator_queue: str = None,
|
480
|
-
regulator_task: str = None,
|
481
|
-
success_queue: str = None,
|
482
|
-
success_task: str = None,
|
483
|
-
retry_task: str = None,
|
484
|
-
add_retry: bool = True,
|
485
|
-
default_max_retry: int = None,
|
486
|
-
chain_payload: dict = None):
|
487
|
-
super().__init__()
|
488
|
-
self.pipeline_id = pipeline_id
|
489
|
-
self.access_key = access_key
|
490
|
-
|
491
|
-
pipeline_config_api_resp = retrieve_latest_pipeline_config(
|
492
|
-
pipeline_id=self.pipeline_id, access_key=self.access_key)
|
493
|
-
|
494
|
-
if pipeline_config_api_resp is None:
|
495
|
-
raise ValueError("Unable to load Pipeline Configuration for "
|
496
|
-
f"pipeline id: {self.pipeline_id} ...")
|
497
|
-
|
498
|
-
# The only part of the API response used for any 'pipeline config'
|
499
|
-
# is the `config` key. The API nests it under `config` to preserve
|
500
|
-
# ability to add additional detail at a later date.
|
501
|
-
self.pipeline_config = pipeline_config_api_resp.get('config', {})
|
502
|
-
schema_version = pipeline_config_api_resp.get('schemaVersion')
|
503
|
-
PipelineConfigValidator(config_dict=self.pipeline_config,
|
504
|
-
schema_version=schema_version)
|
505
|
-
|
506
|
-
self.execution_id = execution_id # UUID string
|
507
|
-
self.good_to_go = False # Indicates initialization/loading success
|
508
|
-
self.loading_message = None # Allows access to success/error messages
|
509
|
-
self.is_retry = False if self.execution_id is None else True
|
510
|
-
self.add_retry = add_retry
|
511
|
-
self.retry_task = retry_task\
|
512
|
-
if retry_task is not None else DEFAULT_RETRY_TASK
|
513
|
-
|
514
|
-
self.default_max_retry = default_max_retry \
|
515
|
-
if default_max_retry is not None else \
|
516
|
-
self.pipeline_config['metadata'].get('maxRetry', DEFAULT_MAX_RETRY)
|
517
|
-
|
518
|
-
# Queue on which to place tasks by default and default TTL per task
|
519
|
-
# These can be overridden in PipelineConfig.config['taskDefinitions']
|
520
|
-
self.queue = queue \
|
521
|
-
if queue is not None \
|
522
|
-
else self.pipeline_config['metadata']['queue']
|
523
|
-
self.default_task_ttl = default_task_ttl \
|
524
|
-
if default_task_ttl is not None else \
|
525
|
-
self.pipeline_config['metadata'].get('maxTtl', DEFAULT_TASK_TTL)
|
526
|
-
|
527
|
-
# See docstring in self._get_regulator()
|
528
|
-
self.regulator_queue = regulator_queue \
|
529
|
-
if regulator_queue is not None \
|
530
|
-
else self.pipeline_config['metadata']['queue']
|
531
|
-
self.regulator_task = regulator_task\
|
532
|
-
if regulator_task is not None else DEFAULT_REGULATOR_TASK
|
533
|
-
|
534
|
-
# See docstring in self._get_success_task()
|
535
|
-
self.success_queue = success_queue \
|
536
|
-
if success_queue is not None \
|
537
|
-
else self.pipeline_config['metadata']['queue']
|
538
|
-
self.success_task = success_task\
|
539
|
-
if success_task is not None else DEFAULT_SUCCESS_TASK
|
540
|
-
|
541
|
-
# Optional data to pass to each step in chain
|
542
|
-
self.chain_payload = chain_payload\
|
543
|
-
if chain_payload is not None else {}
|
544
|
-
|
545
|
-
self.pipeline_wrapper = None # Allows access to the PipelineRunWrapper
|
546
|
-
self.chain = None # Must be intentionally built with generate_chain()
|
547
|
-
|
548
|
-
try:
|
549
|
-
# Generate our wrapper for this pipeline_id / execution_id
|
550
|
-
self.pipeline_wrapper = PipelineRunWrapper(
|
551
|
-
pipeline_id=self.pipeline_id,
|
552
|
-
pipeline_config=self.pipeline_config,
|
553
|
-
execution_id=self.execution_id,
|
554
|
-
max_ttl=self.default_task_ttl,
|
555
|
-
max_retry=self.default_max_retry,
|
556
|
-
chain_payload=self.chain_payload)
|
557
|
-
|
558
|
-
# Loads pipeline config from remote or cache if it's already there
|
559
|
-
# `is_retry` will be True for any PipelineGenerator instantiated
|
560
|
-
# with an execution_id. This flag helps the wrapper increment the
|
561
|
-
# retry count and determine if this should be deadlettered.
|
562
|
-
# This step also saves the valid/initialized run wrapper to cache.
|
563
|
-
self.pipeline_wrapper.load(is_retry=self.is_retry)
|
564
|
-
|
565
|
-
# Set all variables that were established from the run wrapper
|
566
|
-
# initialization. Notably, default_task_ttl can be overloaded
|
567
|
-
# if the pipeline config has an explicit maxTtl set in metadata.
|
568
|
-
self.good_to_go = self.pipeline_wrapper.good_to_go
|
569
|
-
self.loading_message = self.pipeline_wrapper.loading_message
|
570
|
-
self.execution_id = self.pipeline_wrapper.execution_id
|
571
|
-
|
572
|
-
except Exception as e:
|
573
|
-
fail_msg = "Failed to load Pipeline for id {} ... {}".format(
|
574
|
-
self.pipeline_id, e)
|
575
|
-
self.loading_message = fail_msg
|
576
|
-
logger.error(fail_msg)
|
577
|
-
raise e
|
578
|
-
|
579
|
-
def _get_regulator(self):
|
580
|
-
""" Create a chain regulator celery task signature.
|
581
|
-
|
582
|
-
For a chain(), if each element is a group() then celery does not
|
583
|
-
properly adhere to the chain elements occurring sequentially. If you
|
584
|
-
insert a task that is not a group() in between, though, then the
|
585
|
-
chain operates as expected.
|
586
|
-
"""
|
587
|
-
return signature(self.regulator_task,
|
588
|
-
queue=self.regulator_queue,
|
589
|
-
immutable=True)
|
590
|
-
|
591
|
-
def _get_success_task(self):
|
592
|
-
""" A final 'success' task that's added to the end of every pipeline.
|
593
|
-
|
594
|
-
This stores the 'success' state in the cached result. Users can
|
595
|
-
set other values by using TaskRunner().save_result()
|
596
|
-
"""
|
597
|
-
return get_task_signature(task_path=self.success_task,
|
598
|
-
queue=self.success_queue,
|
599
|
-
pipeline_id=self.pipeline_id,
|
600
|
-
execution_id=self.execution_id)
|
601
|
-
|
602
|
-
def _get_retry_task(self):
|
603
|
-
""" The retry task will re-invoke a chain.
|
604
|
-
"""
|
605
|
-
return get_task_signature(task_path=self.retry_task,
|
606
|
-
queue=self.queue,
|
607
|
-
access_key=self.access_key,
|
608
|
-
pipeline_id=self.pipeline_id,
|
609
|
-
execution_id=self.execution_id,
|
610
|
-
max_ttl=DEFAULT_RETRY_TASK_MAX_TTL,
|
611
|
-
custom_event_data={
|
612
|
-
'queue': self.queue,
|
613
|
-
'default_task_ttl':
|
614
|
-
self.default_task_ttl,
|
615
|
-
'add_retry': self.add_retry,
|
616
|
-
'chain_payload': self.chain_payload
|
617
|
-
})
|
618
|
-
|
619
|
-
def _get_signature(self, node):
|
620
|
-
""" Create a celery task signature based on a graph node.
|
621
|
-
"""
|
622
|
-
metadata = self.pipeline_config['metadata']
|
623
|
-
node_config = self.pipeline_config['taskDefinitions'][node]
|
624
|
-
|
625
|
-
# Node config takes precedence, pipeline metadata as default
|
626
|
-
queue = node_config.get('queue', metadata['queue'])
|
627
|
-
max_ttl = node_config.get('maxTtl', metadata.get('maxTtl', None))
|
628
|
-
|
629
|
-
# Ensures task signatures include requisite information to retrieve
|
630
|
-
# PipelineRunWrapper from cache using the pipeline id, and execution id.
|
631
|
-
# We set immutable=True to ensure each client task can be defined
|
632
|
-
# with this specific signature (event)
|
633
|
-
# http://docs.celeryproject.org/en/master/userguide/canvas.html#immutability
|
634
|
-
return get_task_signature(task_path=node_config.get('handler'),
|
635
|
-
queue=queue,
|
636
|
-
access_key=self.access_key,
|
637
|
-
pipeline_id=self.pipeline_id,
|
638
|
-
execution_id=self.execution_id,
|
639
|
-
max_ttl=max_ttl,
|
640
|
-
immutable=True,
|
641
|
-
task_config=node_config)
|
642
|
-
|
643
|
-
def generate_chain(self):
|
644
|
-
""" Generate the full pipeline chain.
|
645
|
-
"""
|
646
|
-
logger.debug(f'Starting Pipeline {self.pipeline_id}')
|
647
|
-
|
648
|
-
if not self.good_to_go:
|
649
|
-
logger.info("Chain deemed to be not good to go.")
|
650
|
-
if self.loading_message is None:
|
651
|
-
self.loading_message = CHAIN_FAILURE_MSG
|
652
|
-
return None
|
653
|
-
|
654
|
-
try:
|
655
|
-
# Create the task chain such that all concurrent tasks are grouped
|
656
|
-
# and all high level node groups are run serially
|
657
|
-
G = self.pipeline_wrapper.execution_graph
|
658
|
-
|
659
|
-
total_tasks = 0
|
660
|
-
pipeline_chain = []
|
661
|
-
chainable_tasks = get_chainable_tasks(G, None, [])
|
662
|
-
|
663
|
-
# Current chord+chain solution based on
|
664
|
-
# https://stackoverflow.com/questions/15123772/celery-chaining-groups-and-subtasks-out-of-order-execution
|
665
|
-
# Look also at last comment from Nov 7, 2017 here
|
666
|
-
# https://github.com/celery/celery/issues/3597
|
667
|
-
# Big outstanding bug in Celery related to failures in chords that
|
668
|
-
# results in really nasty log output. See
|
669
|
-
# https://github.com/celery/celery/issues/4834
|
670
|
-
for i, node_group in enumerate(chainable_tasks):
|
671
|
-
total_tasks += len(node_group)
|
672
|
-
this_group = []
|
673
|
-
for node in node_group:
|
674
|
-
node_signature = self._get_signature(node)
|
675
|
-
this_group.append(node_signature)
|
676
|
-
|
677
|
-
if len(this_group) <= 1:
|
678
|
-
this_group.append(self._get_regulator())
|
679
|
-
|
680
|
-
the_chord = chord(header=this_group,
|
681
|
-
body=self._get_regulator())
|
682
|
-
|
683
|
-
pipeline_chain.append(the_chord)
|
684
|
-
|
685
|
-
# Add a 'finished/success' task to the end of all pipelines
|
686
|
-
pipeline_chain.append(
|
687
|
-
chord(header=self._get_success_task(),
|
688
|
-
body=self._get_regulator()))
|
689
|
-
|
690
|
-
the_chain = chain(*pipeline_chain)
|
691
|
-
|
692
|
-
# Add retry
|
693
|
-
if self.add_retry:
|
694
|
-
the_chain.link_error(self._get_retry_task())
|
695
|
-
|
696
|
-
self.loading_message = CHAIN_SUCCESS_MSG
|
697
|
-
|
698
|
-
self.chain = the_chain
|
699
|
-
except Exception as e:
|
700
|
-
self.loading_message = CHAIN_FAILURE_MSG + " {}".format(e)
|
701
|
-
logger.exception(e)
|
702
|
-
the_chain = None
|
703
|
-
|
704
|
-
self.chain = the_chain
|
705
|
-
|
706
|
-
return the_chain
|
707
|
-
|
708
|
-
|
709
459
|
class TaskRunner:
|
710
460
|
""" Run tasks in Sermos
|
711
461
|
"""
|
@@ -737,7 +487,6 @@ class TaskRunner:
|
|
737
487
|
If not specified, system default is used.
|
738
488
|
"""
|
739
489
|
try:
|
740
|
-
# TODO consider whether to add access key/deployment id here
|
741
490
|
worker = get_task_signature(task_path=task_path,
|
742
491
|
queue=queue,
|
743
492
|
max_ttl=max_ttl,
|
@@ -789,7 +538,7 @@ class TaskRunner:
|
|
789
538
|
grouping_key:
|
790
539
|
task_payload_list[idx:idx + max_per_task]
|
791
540
|
}
|
792
|
-
|
541
|
+
|
793
542
|
worker = get_task_signature(
|
794
543
|
task_path=task_path,
|
795
544
|
queue=queue,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: scalable-pypeline
|
3
|
-
Version: 1.1
|
3
|
+
Version: 1.2.1
|
4
4
|
Summary: PypeLine - Python pipelines for the Real World
|
5
5
|
Home-page: https://gitlab.com/bravos2/pypeline
|
6
6
|
Author: Bravos Power Corporation
|
@@ -27,19 +27,21 @@ Requires-Dist: awscli (>=1.11) ; extra == 'dev'
|
|
27
27
|
Requires-Dist: pylint (>=2.5.3) ; extra == 'dev'
|
28
28
|
Requires-Dist: pip-licenses ; extra == 'dev'
|
29
29
|
Provides-Extra: flask
|
30
|
-
Requires-Dist:
|
31
|
-
Requires-Dist:
|
30
|
+
Requires-Dist: Werkzeug (==2.0.3) ; extra == 'flask'
|
31
|
+
Requires-Dist: Flask (<2,>=1.1.2) ; extra == 'flask'
|
32
|
+
Requires-Dist: flask-smorest (<0.29,>=0.23.0) ; extra == 'flask'
|
33
|
+
Requires-Dist: Jinja2 (==3.0.3) ; extra == 'flask'
|
32
34
|
Provides-Extra: test
|
33
35
|
Requires-Dist: pytest-cov (<3,>=2.6.1) ; extra == 'test'
|
34
36
|
Requires-Dist: tox (<4,>=3.14.1) ; extra == 'test'
|
35
37
|
Requires-Dist: mock (<2,>=1) ; extra == 'test'
|
36
|
-
Requires-Dist: moto (
|
38
|
+
Requires-Dist: moto (>=1.3.16) ; extra == 'test'
|
37
39
|
Requires-Dist: responses (<0.11,>=0.10.16) ; extra == 'test'
|
38
40
|
Requires-Dist: fakeredis (<3,>=2.10.3) ; extra == 'test'
|
39
41
|
Requires-Dist: importlib-metadata (<5,>=4.12) ; extra == 'test'
|
40
42
|
Provides-Extra: web
|
41
43
|
Requires-Dist: gunicorn ; extra == 'web'
|
42
|
-
Requires-Dist: gevent ; extra == 'web'
|
44
|
+
Requires-Dist: gevent (<22,>=21.12.0) ; extra == 'web'
|
43
45
|
Provides-Extra: workers
|
44
46
|
Requires-Dist: celery[redis] (<6,>=5.1.2) ; extra == 'workers'
|
45
47
|
Requires-Dist: networkx (>=2.4) ; extra == 'workers'
|
@@ -1,30 +1,33 @@
|
|
1
|
-
pypeline/__init__.py,sha256=
|
2
|
-
pypeline/celery.py,sha256=
|
1
|
+
pypeline/__init__.py,sha256=kmfHpHo2YhSrigutlRQs8qXIR8nEm-D-c8lScQQN64M,22
|
2
|
+
pypeline/celery.py,sha256=rbMCQQqevhdcyFYJSyoQT2b_NAbL3Inc9S789AtzN_w,9038
|
3
3
|
pypeline/celery_beat.py,sha256=KNmEpZEFOadVTwtRJtgX0AirSSNq65PFLJer4Hsq-xw,10759
|
4
|
-
pypeline/constants.py,sha256=
|
4
|
+
pypeline/constants.py,sha256=VxCIBXpEsXiLxDN3PV2pxNd3bx6Gfpk3g1nTI3XMxUI,4185
|
5
5
|
pypeline/extensions.py,sha256=_tv62NtJDrwQUM9mablHUmYyBf8TXlIZmf1LM1rlHtc,599
|
6
6
|
pypeline/generators.py,sha256=kRcJCohOMMljar6_nAaICTdNX1cNGyfkSvtl52Pd_hc,6776
|
7
7
|
pypeline/logging_config.py,sha256=QbUbSqLtxUOlqLkY0sWxVe9VGFKjghZ-MrlduZmtgLw,5503
|
8
|
-
pypeline/pipeline_config_schema.py,sha256=
|
8
|
+
pypeline/pipeline_config_schema.py,sha256=wmUCpCDOXPj5-qTNd6oHC1XszfQfXZLr9_DYmjHzK3o,8691
|
9
9
|
pypeline/schedule_config_schema.py,sha256=rGN-tGC9fj-Nc_CVasBUQAOzn2OvYyj0V_S6Az3sq6I,7385
|
10
|
-
pypeline/sermos_yaml.py,sha256=
|
10
|
+
pypeline/sermos_yaml.py,sha256=A0rFabU0ib9-IY64HZuYvB7sL06sTl84myTYmNSLK3g,17336
|
11
11
|
pypeline/flask/__init__.py,sha256=_kh1kQcpuU7yNYH36AtMsaCMYru5l43lYsRBe0lTgKE,523
|
12
12
|
pypeline/flask/decorators.py,sha256=zH9OB6DqxLUDSDBMs1Nd3Pt3qYdsAOoD8RL2MliRyRs,2298
|
13
13
|
pypeline/flask/flask_sermos.py,sha256=D-mTlJENm8MCYgNmCFwHeZb43XoUBsTO8ER1YqnSL3M,5791
|
14
14
|
pypeline/flask/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
15
|
-
pypeline/flask/api/pipelines.py,sha256=
|
15
|
+
pypeline/flask/api/pipelines.py,sha256=l12HnLC6GQfF6cd5gEnQjIgzO7xajZtidJ04JQMIwn8,8821
|
16
16
|
pypeline/flask/api/schedules.py,sha256=AHiGYzZL__1sq5KZho75VwPWCn9Pz_ooNM179uKuQ7Q,2314
|
17
|
-
pypeline/flask/api/utils.py,sha256=
|
17
|
+
pypeline/flask/api/utils.py,sha256=bkUBZIbJHaG9nYahHI2J0NqoqoSlQ6j4aArY9q2ggqE,1302
|
18
|
+
pypeline/pipeline/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
|
+
pypeline/pipeline/chained_task.py,sha256=G7pakmdUZK29E9X3_I5I2cTxTcEuRlXi_BeDAMJt8Qg,2961
|
20
|
+
pypeline/pipeline/generator.py,sha256=Nt65IIs5jc2XCQqWUaOoJIdpaEvQvxJgQSnj9DuYX3s,11509
|
18
21
|
pypeline/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
22
|
pypeline/utils/config_utils.py,sha256=SwpYy6Xk2apDK2GEb61ctHJh7q8IUo3bHOegE_xsDDk,13029
|
20
23
|
pypeline/utils/graph_utils.py,sha256=prs0ATCYCxWGWx8s9_gg3PtwJA1DORIIwUlMfKRjHJM,4642
|
21
24
|
pypeline/utils/module_utils.py,sha256=X4O2TdBvvoboK6PxzK18UuKbod9l2BfTIrALk_dI0tM,4166
|
22
|
-
pypeline/utils/task_utils.py,sha256=
|
25
|
+
pypeline/utils/task_utils.py,sha256=p66d4xcuPuBsqtcnaXgqfNXU3-ZSe4lvN11MkJ5_8XY,22222
|
23
26
|
tests/fixtures/__init__.py,sha256=vHbv5BMJXGb6XX764sChg5Ax7fixPuijiYNBuxgVTUQ,41
|
24
27
|
tests/fixtures/s3_fixtures.py,sha256=jbsp0WeIibLtjdV1nPSkEuJf1n6e9O7LO-kNFkMqylo,1694
|
25
|
-
scalable_pypeline-1.1.
|
26
|
-
scalable_pypeline-1.1.
|
27
|
-
scalable_pypeline-1.1.
|
28
|
-
scalable_pypeline-1.1.
|
29
|
-
scalable_pypeline-1.1.
|
30
|
-
scalable_pypeline-1.1.
|
28
|
+
scalable_pypeline-1.2.1.dist-info/LICENSE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
29
|
+
scalable_pypeline-1.2.1.dist-info/METADATA,sha256=kczf4M1kyCRZFFbeV4jDKltmHCnmRAMwCjyXxKN8EoY,6174
|
30
|
+
scalable_pypeline-1.2.1.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
|
31
|
+
scalable_pypeline-1.2.1.dist-info/entry_points.txt,sha256=ZDh7vdDaHrZD0RwUCiZidXg5-d2fBOYcEo7E6CL4g0U,56
|
32
|
+
scalable_pypeline-1.2.1.dist-info/top_level.txt,sha256=C7dpkEOc_-nnsAQb28BfQknjD6XHRyS9ZrvVeoIbV7s,15
|
33
|
+
scalable_pypeline-1.2.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|