scalable-pypeline 2.1.26__tar.gz → 2.1.28__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of scalable-pypeline might be problematic. Click here for more details.

Files changed (49) hide show
  1. {scalable-pypeline-2.1.26/scalable_pypeline.egg-info → scalable-pypeline-2.1.28}/PKG-INFO +1 -1
  2. scalable-pypeline-2.1.28/pypeline/__init__.py +1 -0
  3. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipeline_settings_schema.py +1 -0
  4. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/middleware/pypeline_middleware.py +40 -40
  5. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28/scalable_pypeline.egg-info}/PKG-INFO +1 -1
  6. scalable-pypeline-2.1.26/pypeline/__init__.py +0 -1
  7. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/LICENSE +0 -0
  8. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/MANIFEST.in +0 -0
  9. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/README.md +0 -0
  10. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/barrier.py +0 -0
  11. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/constants.py +0 -0
  12. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/dramatiq.py +0 -0
  13. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/executable_job_config_schema.py +0 -0
  14. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/extensions.py +0 -0
  15. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/flask/__init__.py +0 -0
  16. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/flask/api/__init__.py +0 -0
  17. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/flask/api/pipelines.py +0 -0
  18. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/flask/api/schedules.py +0 -0
  19. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/flask/decorators.py +0 -0
  20. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/flask/flask_pypeline.py +0 -0
  21. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipeline_config_schema.py +0 -0
  22. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/__init__.py +0 -0
  23. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/composition/__init__.py +0 -0
  24. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/composition/parallel_pipeline_composition.py +0 -0
  25. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/composition/pypeline_composition.py +0 -0
  26. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/factory.py +0 -0
  27. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/middleware/__init__.py +0 -0
  28. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/middleware/get_active_worker_id_middleware.py +0 -0
  29. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/middleware/graceful_shutdown_middleware.py +0 -0
  30. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pipelines/middleware/parallel_pipeline_middleware.py +0 -0
  31. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/pypeline_yaml.py +0 -0
  32. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/schedule_config_schema.py +0 -0
  33. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/__init__.py +0 -0
  34. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/config_utils.py +0 -0
  35. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/dramatiq_utils.py +0 -0
  36. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/executable_job_util.py +0 -0
  37. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/graceful_shutdown_util.py +0 -0
  38. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/module_utils.py +0 -0
  39. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/pipeline_utils.py +0 -0
  40. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/pypeline/utils/schema_utils.py +0 -0
  41. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/requirements.txt +0 -0
  42. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/scalable_pypeline.egg-info/SOURCES.txt +0 -0
  43. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/scalable_pypeline.egg-info/dependency_links.txt +0 -0
  44. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/scalable_pypeline.egg-info/entry_points.txt +0 -0
  45. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/scalable_pypeline.egg-info/requires.txt +0 -0
  46. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/scalable_pypeline.egg-info/top_level.txt +0 -0
  47. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/setup.cfg +0 -0
  48. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/setup.py +0 -0
  49. {scalable-pypeline-2.1.26 → scalable-pypeline-2.1.28}/tests/fixtures/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scalable-pypeline
3
- Version: 2.1.26
3
+ Version: 2.1.28
4
4
  Summary: PypeLine - Python pipelines for the Real World
5
5
  Home-page: https://gitlab.com/bravos2/pypeline
6
6
  Author: Bravos Power Corporation
@@ -0,0 +1 @@
1
+ __version__ = "2.1.28"
@@ -465,6 +465,7 @@ class PipelineScenarioSchema(Schema):
465
465
  "produce alternative calculations and or results."
466
466
  },
467
467
  )
468
+ execution_id = fields.String(required=False, metadata={"description":"Execution id for a known scenario"})
468
469
 
469
470
 
470
471
  class PipelineScenariosSchema(Schema):
@@ -46,74 +46,74 @@ class PypelineMiddleware(Middleware):
46
46
 
47
47
  graph = get_execution_graph(pipeline_config)
48
48
  children_tasks = pipeline_config["dagAdjacency"].get(task_name, [])
49
-
50
49
  messages = []
51
50
  for child in children_tasks:
52
51
  child_ancestors = sorted(graph.predecessors(child))
52
+ child_ancestors_complete = {a: False for a in child_ancestors}
53
53
 
54
- ancestor_tasks_complete = True
54
+ for scenario in message.options["scenarios"]:
55
+ if scenario["execution_id"] == execution_id:
56
+ tasks_to_run_in_scenario = scenario["tasksToRunInScenario"]
55
57
 
56
58
  for ancestor in child_ancestors:
57
- ancestor_task_key = f"{execution_id}-{ancestor}"
58
-
59
- locking_parallel_barrier = LockingParallelBarrier(
60
- self.redis_url,
61
- task_key=ancestor_task_key,
62
- lock_key=f"{message.options['base_case_execution_id']}-lock",
63
- )
64
- try:
65
- locking_parallel_barrier.acquire_lock(
66
- timeout=PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
59
+ if ancestor in tasks_to_run_in_scenario:
60
+ current_scenario_ancestor_task_key = f"{execution_id}-{ancestor}"
61
+ locking_parallel_barrier = LockingParallelBarrier(
62
+ self.redis_url,
63
+ task_key=current_scenario_ancestor_task_key,
64
+ lock_key=f"{message.options['base_case_execution_id']}-lock",
67
65
  )
68
-
69
- if locking_parallel_barrier.task_exists():
70
- remaining_tasks = locking_parallel_barrier.get_task_count()
71
- else:
72
- remaining_tasks = None
73
- finally:
74
- locking_parallel_barrier.release_lock()
75
-
76
- # If the lock didn't exist for the current tasks execution id then it would indicate
77
- # that this is the start of a new scenario. Therefore we need to find the ancestor
78
- # that is executed in the base case execution id and make sure it has completed
79
- if remaining_tasks is None:
80
- ancestor_task_key = (
66
+ try:
67
+ locking_parallel_barrier.acquire_lock(
68
+ timeout=PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
69
+ )
70
+ if not locking_parallel_barrier.task_exists():
71
+ child_ancestors_complete[ancestor] = False
72
+ elif locking_parallel_barrier.get_task_count() <= 0:
73
+ child_ancestors_complete[ancestor] = True
74
+ finally:
75
+ locking_parallel_barrier.release_lock()
76
+ else:
77
+ base_scenario_ancestor_task_key = (
81
78
  f"{message.options['base_case_execution_id']}-{ancestor}"
82
79
  )
83
-
84
80
  locking_parallel_barrier = LockingParallelBarrier(
85
81
  self.redis_url,
86
- task_key=ancestor_task_key,
82
+ task_key=base_scenario_ancestor_task_key,
87
83
  lock_key=f"{message.options['base_case_execution_id']}-lock",
88
84
  )
89
85
  try:
90
86
  locking_parallel_barrier.acquire_lock(
91
87
  timeout=PARALLEL_PIPELINE_CALLBACK_BARRIER_TTL
92
88
  )
93
-
94
- if locking_parallel_barrier.task_exists():
95
- remaining_tasks = locking_parallel_barrier.get_task_count()
89
+ if not locking_parallel_barrier.task_exists():
90
+ child_ancestors_complete[ancestor] = False
91
+ elif locking_parallel_barrier.get_task_count() <= 0:
92
+ child_ancestors_complete[ancestor] = True
96
93
  finally:
97
94
  locking_parallel_barrier.release_lock()
98
- if remaining_tasks is None or remaining_tasks >= 1:
99
- ancestor_tasks_complete = False
100
- break
101
95
 
102
- # If the child's ancestor tasks aren't complete move onto the next child to check
103
- if not ancestor_tasks_complete:
104
- break
96
+ if any(complete is False for complete in child_ancestors_complete.values()):
97
+ continue
105
98
 
106
- # Handle situation where base case kicks off new scenario
107
99
  if (
108
100
  message.options["base_case_execution_id"]
109
101
  == message.options["execution_id"]
110
102
  ):
111
103
  for scenario in message.options["scenarios"]:
112
- child_predecessors = list(graph.predecessors(child))
104
+ child_ancestors = list(graph.predecessors(child))
105
+ child_has_other_ancestors_in_scenario = False
106
+
107
+ for ancestor in child_ancestors:
108
+ if ancestor in scenario["tasksToRunInScenario"]:
109
+ child_has_other_ancestors_in_scenario = True
110
+ break
111
+
113
112
  if (
114
113
  child in scenario["tasksToRunInScenario"]
115
- and task_name in child_predecessors
114
+ and task_name in child_ancestors
116
115
  and task_name not in scenario["tasksToRunInScenario"]
116
+ and not child_has_other_ancestors_in_scenario
117
117
  ):
118
118
  task_key = f"{scenario['execution_id']}-{child}"
119
119
  locking_parallel_barrier = LockingParallelBarrier(
@@ -160,7 +160,7 @@ class PypelineMiddleware(Middleware):
160
160
  )
161
161
  messages.append(scenario_message)
162
162
 
163
- # Kick off child task for current scenario
163
+ # If we've made it here all ancestors of this child are complete, and it's time to run.
164
164
  task_key = f"{execution_id}-{child}"
165
165
  locking_parallel_barrier = LockingParallelBarrier(
166
166
  self.redis_url,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scalable-pypeline
3
- Version: 2.1.26
3
+ Version: 2.1.28
4
4
  Summary: PypeLine - Python pipelines for the Real World
5
5
  Home-page: https://gitlab.com/bravos2/pypeline
6
6
  Author: Bravos Power Corporation
@@ -1 +0,0 @@
1
- __version__ = "2.1.26"