scalable-pypeline 2.0.10__py2.py3-none-any.whl → 2.1.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pypeline/__init__.py +1 -1
- pypeline/barrier.py +3 -0
- pypeline/dramatiq.py +26 -154
- pypeline/flask/api/pipelines.py +60 -4
- pypeline/flask/api/schedules.py +1 -3
- pypeline/pipeline_config_schema.py +91 -3
- pypeline/pipeline_settings_schema.py +334 -0
- pypeline/pipelines/__init__.py +0 -0
- pypeline/pipelines/composition/__init__.py +0 -0
- pypeline/pipelines/composition/pypeline_composition.py +188 -0
- pypeline/pipelines/factory.py +107 -0
- pypeline/pipelines/middleware/__init__.py +0 -0
- pypeline/pipelines/middleware/pypeline_middleware.py +188 -0
- pypeline/utils/dramatiq_utils.py +126 -0
- pypeline/utils/module_utils.py +27 -2
- pypeline/utils/pipeline_utils.py +22 -37
- pypeline/utils/schema_utils.py +24 -0
- {scalable_pypeline-2.0.10.dist-info → scalable_pypeline-2.1.0.dist-info}/METADATA +1 -1
- scalable_pypeline-2.1.0.dist-info/RECORD +36 -0
- scalable_pypeline-2.0.10.dist-info/RECORD +0 -27
- /pypeline/{composition.py → pipelines/composition/parallel_pipeline_composition.py} +0 -0
- /pypeline/{middleware.py → pipelines/middleware/parallel_pipeline_middleware.py} +0 -0
- {scalable_pypeline-2.0.10.dist-info → scalable_pypeline-2.1.0.dist-info}/LICENSE +0 -0
- {scalable_pypeline-2.0.10.dist-info → scalable_pypeline-2.1.0.dist-info}/WHEEL +0 -0
- {scalable_pypeline-2.0.10.dist-info → scalable_pypeline-2.1.0.dist-info}/entry_points.txt +0 -0
- {scalable_pypeline-2.0.10.dist-info → scalable_pypeline-2.1.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,188 @@
|
|
1
|
+
from copy import copy
|
2
|
+
|
3
|
+
import networkx as nx
|
4
|
+
from dramatiq import Middleware
|
5
|
+
|
6
|
+
from pypeline.barrier import LockingParallelBarrier
|
7
|
+
from pypeline.constants import PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
|
8
|
+
from pypeline.utils.module_utils import get_callable
|
9
|
+
from pypeline.utils.pipeline_utils import get_execution_graph
|
10
|
+
from pypeline.utils.dramatiq_utils import register_lazy_actor
|
11
|
+
|
12
|
+
|
13
|
+
class PypelineMiddleware(Middleware):
|
14
|
+
|
15
|
+
def __init__(self, redis_url):
|
16
|
+
self.redis_url = redis_url
|
17
|
+
|
18
|
+
def after_process_message(self, broker, message, *, result=None, exception=None):
|
19
|
+
|
20
|
+
if exception is not None:
|
21
|
+
return
|
22
|
+
|
23
|
+
if "pipeline" not in message.options:
|
24
|
+
return
|
25
|
+
|
26
|
+
pipeline = message.options["pipeline"]
|
27
|
+
pipeline_config = pipeline["config"]
|
28
|
+
task_replacements = message.options["task_replacements"]
|
29
|
+
execution_id = message.options["execution_id"]
|
30
|
+
task_definitions = pipeline_config["taskDefinitions"]
|
31
|
+
task_name = message.options["task_name"]
|
32
|
+
task_key = f"{execution_id}-{task_name}"
|
33
|
+
|
34
|
+
# Signal to other jobs that current job is finished
|
35
|
+
locking_parallel_barrier = LockingParallelBarrier(
|
36
|
+
self.redis_url,
|
37
|
+
task_key=task_key,
|
38
|
+
lock_key=f"{message.options['root_execution_id']}-lock",
|
39
|
+
)
|
40
|
+
try:
|
41
|
+
locking_parallel_barrier.acquire_lock(
|
42
|
+
timeout=PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
|
43
|
+
)
|
44
|
+
_ = locking_parallel_barrier.decrement_task_count()
|
45
|
+
finally:
|
46
|
+
locking_parallel_barrier.release_lock()
|
47
|
+
|
48
|
+
graph = get_execution_graph(pipeline_config)
|
49
|
+
children_tasks = pipeline_config["dagAdjacency"].get(task_name, [])
|
50
|
+
|
51
|
+
messages = []
|
52
|
+
for child in children_tasks:
|
53
|
+
child_ancestors = sorted(nx.ancestors(graph, child))
|
54
|
+
|
55
|
+
ancestor_tasks_complete = True
|
56
|
+
|
57
|
+
for ancestor in child_ancestors:
|
58
|
+
task_key = f"{execution_id}-{ancestor}"
|
59
|
+
|
60
|
+
locking_parallel_barrier = LockingParallelBarrier(
|
61
|
+
self.redis_url,
|
62
|
+
task_key=task_key,
|
63
|
+
lock_key=f"{message.options['root_execution_id']}-lock",
|
64
|
+
)
|
65
|
+
try:
|
66
|
+
locking_parallel_barrier.acquire_lock(
|
67
|
+
timeout=PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
|
68
|
+
)
|
69
|
+
|
70
|
+
if locking_parallel_barrier.task_exists():
|
71
|
+
remaining_tasks = locking_parallel_barrier.get_task_count()
|
72
|
+
else:
|
73
|
+
remaining_tasks = None
|
74
|
+
finally:
|
75
|
+
locking_parallel_barrier.release_lock()
|
76
|
+
|
77
|
+
if not remaining_tasks:
|
78
|
+
task_key = f"{message.options['root_execution_id']}-{ancestor}"
|
79
|
+
|
80
|
+
locking_parallel_barrier = LockingParallelBarrier(
|
81
|
+
self.redis_url,
|
82
|
+
task_key=task_key,
|
83
|
+
lock_key=f"{message.options['root_execution_id']}-lock",
|
84
|
+
)
|
85
|
+
try:
|
86
|
+
locking_parallel_barrier.acquire_lock(
|
87
|
+
timeout=PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
|
88
|
+
)
|
89
|
+
|
90
|
+
if locking_parallel_barrier.task_exists():
|
91
|
+
remaining_tasks = locking_parallel_barrier.get_task_count()
|
92
|
+
else:
|
93
|
+
raise Exception(
|
94
|
+
f"Parent task {ancestor} barrier lock not found"
|
95
|
+
)
|
96
|
+
finally:
|
97
|
+
locking_parallel_barrier.release_lock()
|
98
|
+
if remaining_tasks >= 1:
|
99
|
+
ancestor_tasks_complete = False
|
100
|
+
break
|
101
|
+
|
102
|
+
# If the child's ancestor tasks aren't complete move onto the next child to check
|
103
|
+
if not ancestor_tasks_complete:
|
104
|
+
break
|
105
|
+
|
106
|
+
if message.options["root_execution_id"] == message.options["execution_id"]:
|
107
|
+
for scenario in message.options["scenarios"]:
|
108
|
+
child_predecessors = list(graph.predecessors(child))
|
109
|
+
if (
|
110
|
+
child in scenario["tasksToRunInScenario"]
|
111
|
+
and task_name in child_predecessors
|
112
|
+
and task_name not in scenario["tasksToRunInScenario"]
|
113
|
+
):
|
114
|
+
task_key = f"{scenario['execution_id']}-{child}"
|
115
|
+
locking_parallel_barrier = LockingParallelBarrier(
|
116
|
+
self.redis_url,
|
117
|
+
task_key=task_key,
|
118
|
+
lock_key=f"{message.options['root_execution_id']}-lock",
|
119
|
+
)
|
120
|
+
locking_parallel_barrier.set_task_count(1)
|
121
|
+
handler = task_definitions[child]["handlers"][
|
122
|
+
task_replacements.get(child, 0)
|
123
|
+
]
|
124
|
+
|
125
|
+
lazy_actor = register_lazy_actor(
|
126
|
+
broker,
|
127
|
+
get_callable(handler),
|
128
|
+
pipeline_config["metadata"],
|
129
|
+
)
|
130
|
+
scenario_message = lazy_actor.message()
|
131
|
+
scenario_message.options["pipeline"] = pipeline
|
132
|
+
scenario_message.options["task_replacements"] = (
|
133
|
+
task_replacements
|
134
|
+
)
|
135
|
+
scenario_message.options["execution_id"] = scenario[
|
136
|
+
"execution_id"
|
137
|
+
]
|
138
|
+
|
139
|
+
scenario_message.options["task_name"] = child
|
140
|
+
scenario_message.options["root_execution_id"] = message.options[
|
141
|
+
"root_execution_id"
|
142
|
+
]
|
143
|
+
scenario_message.options["scenarios"] = message.options[
|
144
|
+
"scenarios"
|
145
|
+
]
|
146
|
+
if "settings" in message.kwargs:
|
147
|
+
scenario_message.kwargs["settings"] = copy(
|
148
|
+
message.kwargs["settings"]
|
149
|
+
)
|
150
|
+
scenario_message.kwargs["settings"]["execution_id"] = (
|
151
|
+
scenario["execution_id"]
|
152
|
+
)
|
153
|
+
messages.append(scenario_message)
|
154
|
+
task_key = f"{execution_id}-{child}"
|
155
|
+
locking_parallel_barrier = LockingParallelBarrier(
|
156
|
+
self.redis_url,
|
157
|
+
task_key=task_key,
|
158
|
+
lock_key=f"{message.options['root_execution_id']}-lock",
|
159
|
+
)
|
160
|
+
locking_parallel_barrier.set_task_count(1)
|
161
|
+
handler = task_definitions[child]["handlers"][
|
162
|
+
task_replacements.get(child, 0)
|
163
|
+
]
|
164
|
+
lazy_actor = register_lazy_actor(
|
165
|
+
broker,
|
166
|
+
get_callable(handler),
|
167
|
+
pipeline_config["metadata"],
|
168
|
+
)
|
169
|
+
|
170
|
+
child_message = lazy_actor.message()
|
171
|
+
child_message.options["pipeline"] = pipeline
|
172
|
+
child_message.options["task_replacements"] = task_replacements
|
173
|
+
child_message.options["execution_id"] = execution_id
|
174
|
+
child_message.options["task_name"] = child
|
175
|
+
child_message.options["root_execution_id"] = message.options[
|
176
|
+
"root_execution_id"
|
177
|
+
]
|
178
|
+
child_message.options["scenarios"] = message.options["scenarios"]
|
179
|
+
if "settings" in message.kwargs:
|
180
|
+
child_message.kwargs["settings"] = message.kwargs["settings"]
|
181
|
+
child_message.kwargs["settings"]["execution_id"] = message.options[
|
182
|
+
"execution_id"
|
183
|
+
]
|
184
|
+
|
185
|
+
messages.append(child_message)
|
186
|
+
|
187
|
+
for new_message in messages:
|
188
|
+
broker.enqueue(new_message)
|
@@ -0,0 +1,126 @@
|
|
1
|
+
import os.path
|
2
|
+
import sys
|
3
|
+
import typing
|
4
|
+
from typing import Optional, Callable, Union, Awaitable
|
5
|
+
from functools import wraps
|
6
|
+
from typing import TYPE_CHECKING, TypeVar
|
7
|
+
from dramatiq import Broker, actor as register_actor
|
8
|
+
|
9
|
+
from pypeline.constants import (
|
10
|
+
DEFAULT_TASK_MAX_RETRY,
|
11
|
+
DEFAULT_TASK_MIN_BACKOFF,
|
12
|
+
MS_IN_SECONDS,
|
13
|
+
DEFAULT_TASK_MAX_BACKOFF,
|
14
|
+
DEFAULT_TASK_TTL,
|
15
|
+
DEFAULT_RESULT_TTL,
|
16
|
+
)
|
17
|
+
|
18
|
+
if TYPE_CHECKING:
|
19
|
+
from typing_extensions import ParamSpec
|
20
|
+
|
21
|
+
P = ParamSpec("P")
|
22
|
+
else:
|
23
|
+
P = TypeVar("P")
|
24
|
+
|
25
|
+
R = TypeVar("R")
|
26
|
+
|
27
|
+
|
28
|
+
def guess_code_directory(broker):
|
29
|
+
actor = next(iter(broker.actors.values()))
|
30
|
+
modname, *_ = actor.fn.__module__.partition(".")
|
31
|
+
mod = sys.modules[modname]
|
32
|
+
return os.path.dirname(mod.__file__)
|
33
|
+
|
34
|
+
|
35
|
+
def list_managed_actors(broker, queues):
|
36
|
+
queues = set(queues)
|
37
|
+
all_actors = broker.actors.values()
|
38
|
+
if not queues:
|
39
|
+
return all_actors
|
40
|
+
else:
|
41
|
+
return [a for a in all_actors if a.queue_name in queues]
|
42
|
+
|
43
|
+
|
44
|
+
def register_lazy_actor(
|
45
|
+
broker: Broker,
|
46
|
+
fn: Optional[Callable[P, Union[Awaitable[R], R]]] = None,
|
47
|
+
pipeline_meta: typing.Dict = {},
|
48
|
+
**kwargs,
|
49
|
+
) -> typing.Type["LazyActor"]:
|
50
|
+
kwargs["queue_name"] = pipeline_meta.get("queue", "default")
|
51
|
+
kwargs["max_retries"] = pipeline_meta.get("maxRetry", DEFAULT_TASK_MAX_RETRY)
|
52
|
+
# Convert from seconds to milliseconds
|
53
|
+
kwargs["min_backoff"] = (
|
54
|
+
pipeline_meta.get("retryBackoff", DEFAULT_TASK_MIN_BACKOFF) * MS_IN_SECONDS
|
55
|
+
)
|
56
|
+
kwargs["max_backoff"] = (
|
57
|
+
pipeline_meta.get("retryBackoffMax", DEFAULT_TASK_MAX_BACKOFF) * MS_IN_SECONDS
|
58
|
+
)
|
59
|
+
kwargs["time_limit"] = pipeline_meta.get("maxTtl", DEFAULT_TASK_TTL) * MS_IN_SECONDS
|
60
|
+
# Always store results for registered pipeline actors
|
61
|
+
kwargs["store_results"] = pipeline_meta.get("store_results", True)
|
62
|
+
if kwargs["store_results"]:
|
63
|
+
kwargs["result_ttl"] = (
|
64
|
+
pipeline_meta.get("result_ttl", DEFAULT_RESULT_TTL) * MS_IN_SECONDS
|
65
|
+
)
|
66
|
+
lazy_actor: LazyActor = LazyActor(fn, kwargs)
|
67
|
+
lazy_actor.register(broker)
|
68
|
+
return lazy_actor
|
69
|
+
|
70
|
+
|
71
|
+
def ensure_return_value(default_value=None):
|
72
|
+
def decorator(func):
|
73
|
+
@wraps(func)
|
74
|
+
def wrapper(*args, **kwargs):
|
75
|
+
# Call the original function
|
76
|
+
result = func(*args, **kwargs)
|
77
|
+
# Check if the function has returned a value
|
78
|
+
if result is None:
|
79
|
+
# Return the default value if the function returned None
|
80
|
+
return default_value
|
81
|
+
return result
|
82
|
+
|
83
|
+
return wrapper
|
84
|
+
|
85
|
+
return decorator
|
86
|
+
|
87
|
+
|
88
|
+
class LazyActor(object):
|
89
|
+
# Intermediate object that register actor on broker an call.
|
90
|
+
|
91
|
+
def __init__(self, fn, kw):
|
92
|
+
self.fn = fn
|
93
|
+
self.kw = kw
|
94
|
+
self.actor = None
|
95
|
+
|
96
|
+
def __call__(self, *a, **kw):
|
97
|
+
return self.fn(*a, **kw)
|
98
|
+
|
99
|
+
def __repr__(self):
|
100
|
+
return "<%s %s.%s>" % (
|
101
|
+
self.__class__.__name__,
|
102
|
+
self.fn.__module__,
|
103
|
+
self.fn.__name__,
|
104
|
+
)
|
105
|
+
|
106
|
+
def __getattr__(self, name):
|
107
|
+
if not self.actor:
|
108
|
+
raise AttributeError(name)
|
109
|
+
return getattr(self.actor, name)
|
110
|
+
|
111
|
+
def register(self, broker):
|
112
|
+
self.actor = register_actor(
|
113
|
+
actor_name=f"{self.fn.__module__}.{self.fn.__name__}",
|
114
|
+
broker=broker,
|
115
|
+
**self.kw,
|
116
|
+
)(ensure_return_value(default_value=True)(self.fn))
|
117
|
+
|
118
|
+
# Next is regular actor API.
|
119
|
+
def send(self, *a, **kw):
|
120
|
+
return self.actor.send(*a, **kw)
|
121
|
+
|
122
|
+
def message(self, *a, **kw):
|
123
|
+
return self.actor.message(*a, **kw)
|
124
|
+
|
125
|
+
def send_with_options(self, *a, **kw):
|
126
|
+
return self.actor.send_with_options(*a, **kw)
|
pypeline/utils/module_utils.py
CHANGED
@@ -1,11 +1,10 @@
|
|
1
1
|
""" Utilities for loading modules/callables based on strings.
|
2
2
|
"""
|
3
|
-
|
3
|
+
|
4
4
|
import re
|
5
5
|
import logging
|
6
6
|
import importlib
|
7
7
|
from typing import Callable
|
8
|
-
from pypeline.constants import API_ACCESS_KEY, PYPELINE_CLIENT_PKG_NAME
|
9
8
|
|
10
9
|
logger = logging.getLogger(__name__)
|
11
10
|
|
@@ -81,3 +80,29 @@ def match_prefix_suffix(string: str, prefix_p: str, suffix_p: str) -> bool:
|
|
81
80
|
if match_prefix(string, prefix_p) and match_suffix(string, suffix_p):
|
82
81
|
return True
|
83
82
|
return False
|
83
|
+
|
84
|
+
|
85
|
+
def get_module(resource_dot_path: str):
|
86
|
+
"""Retrieve the module based on a 'resource dot path'.
|
87
|
+
e.g. package.subdir.feature_file.MyCallable
|
88
|
+
"""
|
89
|
+
module_path = ".".join(resource_dot_path.split(".")[:-1])
|
90
|
+
module = importlib.import_module(module_path)
|
91
|
+
return module
|
92
|
+
|
93
|
+
|
94
|
+
def get_callable_name(resource_dot_path: str) -> str:
|
95
|
+
"""Retrieve the callable based on config string.
|
96
|
+
e.g. package.subdir.feature_file.MyCallable
|
97
|
+
"""
|
98
|
+
callable_name = resource_dot_path.split(".")[-1]
|
99
|
+
return callable_name
|
100
|
+
|
101
|
+
|
102
|
+
def get_callable(resource_dot_path: str) -> Callable:
|
103
|
+
"""Retrieve the actual handler class based on config string.
|
104
|
+
e.g. package.subdir.feature_file.MyCallable
|
105
|
+
"""
|
106
|
+
module = get_module(resource_dot_path)
|
107
|
+
callable_name = get_callable_name(resource_dot_path)
|
108
|
+
return getattr(module, callable_name)
|
pypeline/utils/pipeline_utils.py
CHANGED
@@ -1,10 +1,7 @@
|
|
1
1
|
import logging
|
2
2
|
import typing
|
3
3
|
import networkx as nx
|
4
|
-
|
5
|
-
from pypeline.composition import parallel_pipeline
|
6
|
-
from pypeline.dramatiq import LazyActor, get_callable, register_lazy_actor
|
7
|
-
from pypeline.utils.config_utils import retrieve_latest_pipeline_config
|
4
|
+
|
8
5
|
|
9
6
|
T = typing.TypeVar("T") # T can be any type
|
10
7
|
|
@@ -124,36 +121,24 @@ def topological_sort_with_parallelism(
|
|
124
121
|
return topological_sort_with_parallelism(graph, executable_nodes)
|
125
122
|
|
126
123
|
|
127
|
-
def
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
if args and not kwargs:
|
149
|
-
message_group.append(registered_actors[task].message(*args))
|
150
|
-
elif kwargs and not args:
|
151
|
-
message_group.append(registered_actors[task].message(**kwargs))
|
152
|
-
elif args and kwargs:
|
153
|
-
message_group.append(registered_actors[task].message(*args, **kwargs))
|
154
|
-
else:
|
155
|
-
message_group.append(registered_actors[task].message())
|
156
|
-
messages.append(message_group)
|
157
|
-
p = parallel_pipeline(messages)
|
158
|
-
|
159
|
-
return p
|
124
|
+
def plt_execution_tree(G):
|
125
|
+
import networkx as nx
|
126
|
+
import matplotlib.pyplot as plt
|
127
|
+
|
128
|
+
# Draw the graph
|
129
|
+
plt.figure(figsize=(8, 6))
|
130
|
+
pos = nx.spring_layout(G) # Compute positions for nodes
|
131
|
+
nx.draw(
|
132
|
+
G,
|
133
|
+
pos,
|
134
|
+
with_labels=True,
|
135
|
+
node_color="lightblue",
|
136
|
+
node_size=3000,
|
137
|
+
edge_color="gray",
|
138
|
+
arrowsize=20,
|
139
|
+
font_size=12,
|
140
|
+
)
|
141
|
+
|
142
|
+
# Show the plot
|
143
|
+
plt.title("Directed Graph Visualization")
|
144
|
+
plt.show()
|
@@ -0,0 +1,24 @@
|
|
1
|
+
def get_clean_validation_messages(validation_error):
|
2
|
+
"""
|
3
|
+
Extract and format clean validation error messages.
|
4
|
+
|
5
|
+
Args:
|
6
|
+
validation_error (ValidationError): The Marshmallow ValidationError instance.
|
7
|
+
|
8
|
+
Returns:
|
9
|
+
str: A formatted string with all validation error messages.
|
10
|
+
"""
|
11
|
+
|
12
|
+
def format_errors(errors, parent_key=""):
|
13
|
+
messages = []
|
14
|
+
for key, value in errors.items():
|
15
|
+
full_key = f"{parent_key}.{key}" if parent_key else key
|
16
|
+
if isinstance(value, dict):
|
17
|
+
# Recursively format nested errors
|
18
|
+
messages.extend(format_errors(value, full_key))
|
19
|
+
else:
|
20
|
+
# Append error messages
|
21
|
+
messages.append(f"{full_key}: {', '.join(value)}")
|
22
|
+
return messages
|
23
|
+
|
24
|
+
return "\n".join(format_errors(validation_error.messages))
|
@@ -0,0 +1,36 @@
|
|
1
|
+
pypeline/__init__.py,sha256=Xybt2skBZamGMNlLuOX1IG-h4uIxqUDGAO8MIGWrJac,22
|
2
|
+
pypeline/barrier.py,sha256=oO964l9qOCOibweOHyNivmAvufdXOke9nz2tdgclouo,1172
|
3
|
+
pypeline/constants.py,sha256=coiF8dMP25qIwoNYSnS7oy7hCd4-5yqPFmdPsN93Q1A,2892
|
4
|
+
pypeline/dramatiq.py,sha256=LWsl0o0t5FdxewIl87ARZKrNK0ENoYJEJAEVDSNFa40,12272
|
5
|
+
pypeline/extensions.py,sha256=BzOTnXhNxap3N7uIUUh_hO6dDwx08Vc_RJDE93_K0Lo,610
|
6
|
+
pypeline/pipeline_config_schema.py,sha256=hK2_egtg-YFx_XJDs_NyrOTGKkel7W83X-G0sic52sM,10592
|
7
|
+
pypeline/pipeline_settings_schema.py,sha256=7zMXtBT_V4qom_j6JInRVPq0X1f1vQOgH33RNBaLo-o,12136
|
8
|
+
pypeline/pypeline_yaml.py,sha256=Og08sUKwOjq7JYPnkg-NIcGbHravYCkC5Arz22rZEtA,16981
|
9
|
+
pypeline/schedule_config_schema.py,sha256=vtZV-5wpGcAiYcXxdBPRkrjsbR6x_9E-1PC2elrKKbE,3611
|
10
|
+
pypeline/flask/__init__.py,sha256=AdljRh0lMiS8ExgDmgzObwVs8jW7hqQuf83Ml8kn8GQ,491
|
11
|
+
pypeline/flask/decorators.py,sha256=ki6jkjZwbDbCWuj7ET7N-ncZwrASp4Fy7257WIYiAAQ,1102
|
12
|
+
pypeline/flask/flask_pypeline.py,sha256=Uqyu3PnSP3DoVZUJPqV9chjT4xdRgvcL3OMXxkbdTEg,5490
|
13
|
+
pypeline/flask/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
|
+
pypeline/flask/api/pipelines.py,sha256=8Y5dkIVb32dMc0jBI7lB2sQgsAIe7WYmPn-G9tlUY5o,10161
|
15
|
+
pypeline/flask/api/schedules.py,sha256=8PKCMdPucaer8opchNlI5aDssK2UqT79hHpeg5BMtTA,1210
|
16
|
+
pypeline/pipelines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
17
|
+
pypeline/pipelines/factory.py,sha256=4HNGUJzYtgBOWP7fStXF0M61CYNAid9l9PGru9HyhXA,4115
|
18
|
+
pypeline/pipelines/composition/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
|
+
pypeline/pipelines/composition/parallel_pipeline_composition.py,sha256=pTw9Xb9h4JnV4siFc3JStm5lB-i9djUADo3Kh5K3s7g,12976
|
20
|
+
pypeline/pipelines/composition/pypeline_composition.py,sha256=ieTuQZ8zxTtvmPEkrWFbItjGtvO3JUotXcR-Jim2mss,7204
|
21
|
+
pypeline/pipelines/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
22
|
+
pypeline/pipelines/middleware/parallel_pipeline_middleware.py,sha256=kTp6niYoe2nXIiN6EGRfdpxrJyioo0GPxDkfefbGlEk,2821
|
23
|
+
pypeline/pipelines/middleware/pypeline_middleware.py,sha256=kvt5A9OxDwpIo0PsH11Im62tH6VquUc6OFoZDw2Gxsk,8036
|
24
|
+
pypeline/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
|
+
pypeline/utils/config_utils.py,sha256=rAIATyoW7kGETZ_Z2DqiXtGd7bJp5uPfcLtfNPOYsNs,2167
|
26
|
+
pypeline/utils/dramatiq_utils.py,sha256=5GDcOvKY-8S8r---wb6Q8QAywhbKVJ-qILjcYNHei8Y,3658
|
27
|
+
pypeline/utils/module_utils.py,sha256=-yEJIukDCoXnmlZVXB6Dww25tH6GdPE5SoFqv6pfdVU,3682
|
28
|
+
pypeline/utils/pipeline_utils.py,sha256=kGP1QwCJikGC5QNRtzRXCDVewyRMpWIqERTNnxGLlSY,4795
|
29
|
+
pypeline/utils/schema_utils.py,sha256=Fgl0y9Cuo_TZeEx_S3gaSVnLjn6467LTkjb2ek7Ms98,851
|
30
|
+
tests/fixtures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
31
|
+
scalable_pypeline-2.1.0.dist-info/LICENSE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
32
|
+
scalable_pypeline-2.1.0.dist-info/METADATA,sha256=Oulf-ivIRF8QmuY4zeDajhjwgH_Ae7WMz_7BYO76HC0,5926
|
33
|
+
scalable_pypeline-2.1.0.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
|
34
|
+
scalable_pypeline-2.1.0.dist-info/entry_points.txt,sha256=uWs10ODfHSBKo2Cx_QaUjPHQTpZ3e77j9VlAdRRmMyg,119
|
35
|
+
scalable_pypeline-2.1.0.dist-info/top_level.txt,sha256=C7dpkEOc_-nnsAQb28BfQknjD6XHRyS9ZrvVeoIbV7s,15
|
36
|
+
scalable_pypeline-2.1.0.dist-info/RECORD,,
|
@@ -1,27 +0,0 @@
|
|
1
|
-
pypeline/__init__.py,sha256=QvVarwQu86KS14HXwAYbCqU1tjzA5eNjQxH2V34_iIU,23
|
2
|
-
pypeline/barrier.py,sha256=dLDaprH5NB-C7MQjZqPpBBhMjmO0VV_kTonlgweznHc,1096
|
3
|
-
pypeline/composition.py,sha256=pTw9Xb9h4JnV4siFc3JStm5lB-i9djUADo3Kh5K3s7g,12976
|
4
|
-
pypeline/constants.py,sha256=coiF8dMP25qIwoNYSnS7oy7hCd4-5yqPFmdPsN93Q1A,2892
|
5
|
-
pypeline/dramatiq.py,sha256=Y909HoNhH5Berd61N6nHrpE1dTU-zmvimH91SldP-SI,15912
|
6
|
-
pypeline/extensions.py,sha256=BzOTnXhNxap3N7uIUUh_hO6dDwx08Vc_RJDE93_K0Lo,610
|
7
|
-
pypeline/middleware.py,sha256=kTp6niYoe2nXIiN6EGRfdpxrJyioo0GPxDkfefbGlEk,2821
|
8
|
-
pypeline/pipeline_config_schema.py,sha256=DQ_RMucnA0AyrndlW6lkb0orGromcO6C9GgLHyG6lJ0,8013
|
9
|
-
pypeline/pypeline_yaml.py,sha256=Og08sUKwOjq7JYPnkg-NIcGbHravYCkC5Arz22rZEtA,16981
|
10
|
-
pypeline/schedule_config_schema.py,sha256=vtZV-5wpGcAiYcXxdBPRkrjsbR6x_9E-1PC2elrKKbE,3611
|
11
|
-
pypeline/flask/__init__.py,sha256=AdljRh0lMiS8ExgDmgzObwVs8jW7hqQuf83Ml8kn8GQ,491
|
12
|
-
pypeline/flask/decorators.py,sha256=ki6jkjZwbDbCWuj7ET7N-ncZwrASp4Fy7257WIYiAAQ,1102
|
13
|
-
pypeline/flask/flask_pypeline.py,sha256=Uqyu3PnSP3DoVZUJPqV9chjT4xdRgvcL3OMXxkbdTEg,5490
|
14
|
-
pypeline/flask/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
15
|
-
pypeline/flask/api/pipelines.py,sha256=RzRmSL5Zaia7ftXmXyDAC0ZAkPoFsvyefzHfIyWhRqk,8118
|
16
|
-
pypeline/flask/api/schedules.py,sha256=31lwoFlGv-S-2ahGUCnD5YbmKws8yddj6_PEzzdBi9s,1321
|
17
|
-
pypeline/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
18
|
-
pypeline/utils/config_utils.py,sha256=rAIATyoW7kGETZ_Z2DqiXtGd7bJp5uPfcLtfNPOYsNs,2167
|
19
|
-
pypeline/utils/module_utils.py,sha256=boEP9IYr4p_ick7HlVUfIxOYHQlEmo7dgvDBCQc-C28,2914
|
20
|
-
pypeline/utils/pipeline_utils.py,sha256=tt71hLEFgPieokJZlC1rP2dmCTctrOPt7K1rGlbnT4o,5967
|
21
|
-
tests/fixtures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
22
|
-
scalable_pypeline-2.0.10.dist-info/LICENSE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
23
|
-
scalable_pypeline-2.0.10.dist-info/METADATA,sha256=mFn_XkdRsU4d5oT48HAQNeP9wEUifs2-aWR43-3SG-4,5927
|
24
|
-
scalable_pypeline-2.0.10.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
|
25
|
-
scalable_pypeline-2.0.10.dist-info/entry_points.txt,sha256=uWs10ODfHSBKo2Cx_QaUjPHQTpZ3e77j9VlAdRRmMyg,119
|
26
|
-
scalable_pypeline-2.0.10.dist-info/top_level.txt,sha256=C7dpkEOc_-nnsAQb28BfQknjD6XHRyS9ZrvVeoIbV7s,15
|
27
|
-
scalable_pypeline-2.0.10.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|