dkist-processing-ops 1.0.0rc2__py3-none-any.whl → 1.0.0rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dkist-processing-ops might be problematic. Click here for more details.
- dkist_processing_ops/_version.py +1 -1
- dkist_processing_ops/tasks/wait.py +0 -8
- dkist_processing_ops/workflows/scale.py +2 -109
- {dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/METADATA +2 -2
- dkist_processing_ops-1.0.0rc3.dist-info/RECORD +13 -0
- dkist_processing_ops-1.0.0rc2.dist-info/RECORD +0 -13
- {dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/LICENSE.rst +0 -0
- {dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/WHEEL +0 -0
- {dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/top_level.txt +0 -0
dkist_processing_ops/_version.py
CHANGED
|
@@ -11,14 +11,6 @@ SLEEP_TIME = 300
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class WaitTaskBase(TaskBase):
|
|
14
|
-
def __init__(
|
|
15
|
-
self,
|
|
16
|
-
recipe_run_id: int = 0,
|
|
17
|
-
workflow_name: str = "ops",
|
|
18
|
-
workflow_version: str = "ops_ver",
|
|
19
|
-
):
|
|
20
|
-
super().__init__(recipe_run_id, workflow_name, workflow_version)
|
|
21
|
-
|
|
22
14
|
def run(self) -> None:
|
|
23
15
|
sleep(SLEEP_TIME)
|
|
24
16
|
|
|
@@ -72,67 +72,13 @@ ALL_WAIT_TASKS = [
|
|
|
72
72
|
]
|
|
73
73
|
|
|
74
74
|
|
|
75
|
-
#
|
|
76
|
-
# Default Resource Queue
|
|
77
|
-
#
|
|
78
|
-
|
|
79
|
-
|
|
80
75
|
def add_parallel_nodes(count: int, workflow: Workflow, resource_queue: ResourceQueue):
|
|
81
76
|
"""Add the 'count' number of nodes to run in parallel to a workflow"""
|
|
82
77
|
for task in ALL_WAIT_TASKS[:count]:
|
|
83
78
|
workflow.add_node(task=task, upstreams=None, resource_queue=resource_queue)
|
|
84
79
|
|
|
85
80
|
|
|
86
|
-
|
|
87
|
-
input_data="ops",
|
|
88
|
-
output_data="scale",
|
|
89
|
-
category="default",
|
|
90
|
-
detail="1",
|
|
91
|
-
workflow_package=__package__,
|
|
92
|
-
)
|
|
93
|
-
add_parallel_nodes(count=1, workflow=single_default, resource_queue=ResourceQueue.DEFAULT)
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
two_default = Workflow(
|
|
97
|
-
input_data="ops",
|
|
98
|
-
output_data="scale",
|
|
99
|
-
category="default",
|
|
100
|
-
detail="2",
|
|
101
|
-
workflow_package=__package__,
|
|
102
|
-
)
|
|
103
|
-
add_parallel_nodes(count=2, workflow=two_default, resource_queue=ResourceQueue.DEFAULT)
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
four_default = Workflow(
|
|
107
|
-
input_data="ops",
|
|
108
|
-
output_data="scale",
|
|
109
|
-
category="default",
|
|
110
|
-
detail="4",
|
|
111
|
-
workflow_package=__package__,
|
|
112
|
-
)
|
|
113
|
-
add_parallel_nodes(count=4, workflow=four_default, resource_queue=ResourceQueue.DEFAULT)
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
eight_default = Workflow(
|
|
117
|
-
input_data="ops",
|
|
118
|
-
output_data="scale",
|
|
119
|
-
category="default",
|
|
120
|
-
detail="8",
|
|
121
|
-
workflow_package=__package__,
|
|
122
|
-
)
|
|
123
|
-
add_parallel_nodes(count=8, workflow=eight_default, resource_queue=ResourceQueue.DEFAULT)
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
sixteen_default = Workflow(
|
|
127
|
-
input_data="ops",
|
|
128
|
-
output_data="scale",
|
|
129
|
-
category="default",
|
|
130
|
-
detail="16",
|
|
131
|
-
workflow_package=__package__,
|
|
132
|
-
)
|
|
133
|
-
add_parallel_nodes(count=16, workflow=sixteen_default, resource_queue=ResourceQueue.DEFAULT)
|
|
134
|
-
|
|
135
|
-
|
|
81
|
+
# Default resource queue
|
|
136
82
|
thirty_two_default = Workflow(
|
|
137
83
|
input_data="ops",
|
|
138
84
|
output_data="scale",
|
|
@@ -142,60 +88,7 @@ thirty_two_default = Workflow(
|
|
|
142
88
|
)
|
|
143
89
|
add_parallel_nodes(count=32, workflow=thirty_two_default, resource_queue=ResourceQueue.DEFAULT)
|
|
144
90
|
|
|
145
|
-
#
|
|
146
|
-
# High Mem Resource Queue
|
|
147
|
-
#
|
|
148
|
-
|
|
149
|
-
single_high_mem = Workflow(
|
|
150
|
-
input_data="ops",
|
|
151
|
-
output_data="scale",
|
|
152
|
-
category="high_mem",
|
|
153
|
-
detail="1",
|
|
154
|
-
workflow_package=__package__,
|
|
155
|
-
)
|
|
156
|
-
add_parallel_nodes(count=1, workflow=single_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
two_high_mem = Workflow(
|
|
160
|
-
input_data="ops",
|
|
161
|
-
output_data="scale",
|
|
162
|
-
category="high_mem",
|
|
163
|
-
detail="2",
|
|
164
|
-
workflow_package=__package__,
|
|
165
|
-
)
|
|
166
|
-
add_parallel_nodes(count=2, workflow=two_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
four_high_mem = Workflow(
|
|
170
|
-
input_data="ops",
|
|
171
|
-
output_data="scale",
|
|
172
|
-
category="high_mem",
|
|
173
|
-
detail="4",
|
|
174
|
-
workflow_package=__package__,
|
|
175
|
-
)
|
|
176
|
-
add_parallel_nodes(count=4, workflow=four_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
eight_high_mem = Workflow(
|
|
180
|
-
input_data="ops",
|
|
181
|
-
output_data="scale",
|
|
182
|
-
category="high_mem",
|
|
183
|
-
detail="8",
|
|
184
|
-
workflow_package=__package__,
|
|
185
|
-
)
|
|
186
|
-
add_parallel_nodes(count=8, workflow=eight_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
sixteen_high_mem = Workflow(
|
|
190
|
-
input_data="ops",
|
|
191
|
-
output_data="scale",
|
|
192
|
-
category="high_mem",
|
|
193
|
-
detail="16",
|
|
194
|
-
workflow_package=__package__,
|
|
195
|
-
)
|
|
196
|
-
add_parallel_nodes(count=16, workflow=sixteen_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
|
|
197
|
-
|
|
198
|
-
|
|
91
|
+
# High memory resource queue
|
|
199
92
|
thirty_two_high_mem = Workflow(
|
|
200
93
|
input_data="ops",
|
|
201
94
|
output_data="scale",
|
{dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dkist-processing-ops
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.0rc3
|
|
4
4
|
Summary: Automated Processing smoke test and operations workflows
|
|
5
5
|
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
6
|
License: BSD 3-Clause
|
|
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
11
11
|
Requires-Python: >=3.11
|
|
12
12
|
Description-Content-Type: text/x-rst
|
|
13
13
|
License-File: LICENSE.rst
|
|
14
|
-
Requires-Dist: dkist-processing-core ==3.0.
|
|
14
|
+
Requires-Dist: dkist-processing-core ==3.0.2rc1
|
|
15
15
|
Requires-Dist: dkist-service-configuration ==1.1.0
|
|
16
16
|
Provides-Extra: test
|
|
17
17
|
Requires-Dist: pytest ; extra == 'test'
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
dkist_processing_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
dkist_processing_ops/_version.py,sha256=ycNGyiWSL_U4dVtUSTwe_m70MXdXM3_AE0nLDQP53_Q,414
|
|
3
|
+
dkist_processing_ops/tasks/__init__.py,sha256=P81O9cg4dlBMqBTaWitdsAte68RsMtDlhV30JSZfXUY,107
|
|
4
|
+
dkist_processing_ops/tasks/wait.py,sha256=I_Jz93ZdZajbQCoRXIuk3CoHJXz3cBfHP6iA8ZABDi8,1667
|
|
5
|
+
dkist_processing_ops/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
dkist_processing_ops/tests/test_workflows.py,sha256=Ch_8BlGeQyPJU_9hB_GOncwW-SoZwpRUVKMOEz0RQZk,285
|
|
7
|
+
dkist_processing_ops/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
dkist_processing_ops/workflows/scale.py,sha256=YRsYg0cNG6d-cx5fd_dqTkM812yrUqwXlLMQdHm6NaA,3103
|
|
9
|
+
dkist_processing_ops-1.0.0rc3.dist-info/LICENSE.rst,sha256=LJjTmkf2-q1phdZSySMpiyPxgLOy6zYHOr3R1Bb1__8,327
|
|
10
|
+
dkist_processing_ops-1.0.0rc3.dist-info/METADATA,sha256=uYM_EoHhIN8mmacXkEJhzlC1izN2bacTjR3X_coYOJI,1502
|
|
11
|
+
dkist_processing_ops-1.0.0rc3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
12
|
+
dkist_processing_ops-1.0.0rc3.dist-info/top_level.txt,sha256=o_SNho1HKt6wvCSUhm9qzX9FS2iopnqYuMos1CCD9cI,21
|
|
13
|
+
dkist_processing_ops-1.0.0rc3.dist-info/RECORD,,
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
dkist_processing_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
dkist_processing_ops/_version.py,sha256=IdMJXAd0sDZTSxeQ-DJMdjldEZLvVOgK-kckvUiJhwU,414
|
|
3
|
-
dkist_processing_ops/tasks/__init__.py,sha256=P81O9cg4dlBMqBTaWitdsAte68RsMtDlhV30JSZfXUY,107
|
|
4
|
-
dkist_processing_ops/tasks/wait.py,sha256=sf9aqTNl4YIPFFptuqCgGapMMoVG3qIXxSO5PT7pa9M,1891
|
|
5
|
-
dkist_processing_ops/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
dkist_processing_ops/tests/test_workflows.py,sha256=Ch_8BlGeQyPJU_9hB_GOncwW-SoZwpRUVKMOEz0RQZk,285
|
|
7
|
-
dkist_processing_ops/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
-
dkist_processing_ops/workflows/scale.py,sha256=t0NreEfFR-UnD_EtaUh5dtxojgMAByu_JqBvU1arO7w,5560
|
|
9
|
-
dkist_processing_ops-1.0.0rc2.dist-info/LICENSE.rst,sha256=LJjTmkf2-q1phdZSySMpiyPxgLOy6zYHOr3R1Bb1__8,327
|
|
10
|
-
dkist_processing_ops-1.0.0rc2.dist-info/METADATA,sha256=OF2fFu3CxEGZZ6NRO2GxFVNVHNWi22DG9RCeckn7UCk,1499
|
|
11
|
-
dkist_processing_ops-1.0.0rc2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
12
|
-
dkist_processing_ops-1.0.0rc2.dist-info/top_level.txt,sha256=o_SNho1HKt6wvCSUhm9qzX9FS2iopnqYuMos1CCD9cI,21
|
|
13
|
-
dkist_processing_ops-1.0.0rc2.dist-info/RECORD,,
|
{dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/LICENSE.rst
RENAMED
|
File without changes
|
|
File without changes
|
{dkist_processing_ops-1.0.0rc2.dist-info → dkist_processing_ops-1.0.0rc3.dist-info}/top_level.txt
RENAMED
|
File without changes
|