dkist-processing-ops 1.0.0rc2__tar.gz → 1.0.0rc4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

Files changed (26) hide show
  1. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/PKG-INFO +2 -2
  2. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/bitbucket-pipelines.yml +1 -0
  3. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops/_version.py +1 -1
  4. dkist-processing-ops-1.0.0rc4/dkist_processing_ops/dags/scale.py +78 -0
  5. dkist-processing-ops-1.0.0rc4/dkist_processing_ops/tasks/wait.py +15 -0
  6. dkist-processing-ops-1.0.0rc4/dkist_processing_ops/workflows/smoke.py +28 -0
  7. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops.egg-info/PKG-INFO +2 -2
  8. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops.egg-info/SOURCES.txt +2 -1
  9. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops.egg-info/requires.txt +1 -1
  10. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/pyproject.toml +1 -1
  11. dkist-processing-ops-1.0.0rc2/dkist_processing_ops/tasks/wait.py +0 -151
  12. dkist-processing-ops-1.0.0rc2/dkist_processing_ops/workflows/scale.py +0 -206
  13. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/.gitignore +0 -0
  14. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/.pre-commit-config.yaml +0 -0
  15. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/LICENSE.rst +0 -0
  16. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/MANIFEST.in +0 -0
  17. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/README.rst +0 -0
  18. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops/__init__.py +0 -0
  19. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops/tasks/__init__.py +0 -0
  20. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops/tests/__init__.py +0 -0
  21. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops/tests/test_workflows.py +0 -0
  22. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops/workflows/__init__.py +0 -0
  23. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
  24. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
  25. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/dkist_processing_ops.egg-info/top_level.txt +0 -0
  26. {dkist-processing-ops-1.0.0rc2 → dkist-processing-ops-1.0.0rc4}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.0.0rc2
3
+ Version: 1.0.0rc4
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Requires-Python: >=3.11
12
12
  Description-Content-Type: text/x-rst
13
13
  License-File: LICENSE.rst
14
- Requires-Dist: dkist-processing-core==3.0.1
14
+ Requires-Dist: dkist-processing-common==6.1.0
15
15
  Requires-Dist: dkist-service-configuration==1.1.0
16
16
  Provides-Extra: test
17
17
  Requires-Dist: pytest; extra == "test"
@@ -50,6 +50,7 @@ definitions:
50
50
  - export BUILD_VERSION="${BITBUCKET_TAG:1}"
51
51
  - export ARTIFACT_FOLDER="${BITBUCKET_REPO_SLUG}_${BUILD_VERSION}/"
52
52
  - python -c "from dkist_processing_core.build_utils import export_dags; import dkist_processing_ops.workflows as workflow_package; export_dags(workflow_package, '${ARTIFACT_FOLDER}')"
53
+ - python -c "from dkist_processing_ops.dags.scale import export_scale_dags; export_scale_dags('${ARTIFACT_FOLDER}')"
53
54
  - export SOURCE_PATH="workflow_${BUILD_VERSION}.gz"
54
55
  - tar --exclude="bitbucket-pipelines.yml" -cvzf ${SOURCE_PATH} ${ARTIFACT_FOLDER}
55
56
  - export TARGET_PATH="generic-packages/dkist-processing-ops/${BUILD_VERSION}/"
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.0.0rc2'
15
+ __version__ = version = '1.0.0rc4'
16
16
  __version_tuple__ = version_tuple = (1, 0, 0)
@@ -0,0 +1,78 @@
1
+ """
2
+ DAG to use up workers to support scaling
3
+ """
4
+ from os import environ
5
+ from pathlib import Path
6
+
7
+ from dkist_processing_core.build_utils import export_dags
8
+
9
+
10
+ def export_scale_dags(path: Path | str) -> list[Path]:
11
+ """Export all the ops dags"""
12
+ result = []
13
+ dag_prefix = "ops_scale"
14
+ scales = [16, 32]
15
+ queues = ["default", "high_memory"]
16
+ sleep_duration_seconds = 60
17
+ for queue in queues:
18
+ for scale in scales:
19
+ dag_name = f"{dag_prefix}_{queue}_{scale}"
20
+ dag_body = _scale_dag(
21
+ dag_name=dag_name,
22
+ sleep_duration_seconds=sleep_duration_seconds,
23
+ queue=queue,
24
+ concurrent_task_count=scale,
25
+ )
26
+ dag_path = _export_ops_dag(dag_name=dag_name, dag_body=dag_body, path=path)
27
+ result.append(dag_path)
28
+ return result
29
+
30
+
31
+ def _export_ops_dag(dag_name: str, dag_body: str, path: Path | str | None = None) -> Path:
32
+ """Write a file representation of the scaling DAG."""
33
+ path = path or "dags/"
34
+ path = Path(path)
35
+ path.mkdir(exist_ok=True)
36
+ version = environ.get("BUILD_VERSION", "dev")
37
+ dag_name = f"{dag_name}_{version}"
38
+ workflow_py = path / f"{dag_name}.py"
39
+ with workflow_py.open(mode="w") as f:
40
+ f.write(dag_body)
41
+ return workflow_py
42
+
43
+
44
+ def _scale_dag(
45
+ dag_name: str,
46
+ sleep_duration_seconds: int = 60,
47
+ queue: str | None = None,
48
+ concurrent_task_count: int = 16,
49
+ ) -> str:
50
+ queue = queue or "default"
51
+
52
+ imports = f"""# Scale {concurrent_task_count} DAG on queue {queue}
53
+ from datetime import timedelta
54
+ import pendulum
55
+ from airflow import DAG
56
+ from airflow.operators.bash import BashOperator
57
+ """
58
+ dag = f"""with DAG(
59
+ dag_id="{dag_name}",
60
+ start_date=pendulum.today("UTC").add(days=-2),
61
+ schedule=None,
62
+ catchup=False,
63
+ tags=["ops", "scale"],
64
+ ) as d:"""
65
+ tasks = []
66
+ for idx in range(concurrent_task_count):
67
+ task = f""" t{idx} = BashOperator(
68
+ task_id="t{idx}",
69
+ bash_command=f"sleep {sleep_duration_seconds}",
70
+ retries=0,
71
+ retry_delay=timedelta(seconds=60),
72
+ owner="DKIST Data Center",
73
+ queue="{queue}",
74
+ )"""
75
+ tasks.append(task)
76
+ parts = [imports, dag] + tasks
77
+ body = "\n".join(parts)
78
+ return body
@@ -0,0 +1,15 @@
1
+ """Task for parallelization testing which sleeps a configurable amount of time"""
2
+ from time import sleep
3
+
4
+ from dkist_processing_core import TaskBase
5
+
6
+
7
+ __all__ = ["WaitTask"]
8
+
9
+
10
+ SLEEP_TIME = 60
11
+
12
+
13
+ class WaitTask(TaskBase):
14
+ def run(self) -> None:
15
+ sleep(SLEEP_TIME)
@@ -0,0 +1,28 @@
1
+ """Workflows to test task submission and spin up"""
2
+ from dkist_processing_common.tasks import TrialTeardown
3
+ from dkist_processing_core import ResourceQueue
4
+ from dkist_processing_core import Workflow
5
+
6
+ from dkist_processing_ops.tasks import WaitTask
7
+
8
+
9
+ smoke_default = Workflow(
10
+ input_data="ops",
11
+ output_data="common",
12
+ category="smoke",
13
+ detail="default",
14
+ workflow_package=__package__,
15
+ )
16
+ smoke_default.add_node(task=WaitTask, upstreams=None, resource_queue=ResourceQueue.DEFAULT)
17
+ smoke_default.add_node(task=TrialTeardown, upstreams=WaitTask)
18
+
19
+
20
+ smoke_high_mem = Workflow(
21
+ input_data="ops",
22
+ output_data="common",
23
+ category="smoke",
24
+ detail="high-mem",
25
+ workflow_package=__package__,
26
+ )
27
+ smoke_high_mem.add_node(task=WaitTask, upstreams=None, resource_queue=ResourceQueue.HIGH_MEMORY)
28
+ smoke_high_mem.add_node(task=TrialTeardown, upstreams=WaitTask)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.0.0rc2
3
+ Version: 1.0.0rc4
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Requires-Python: >=3.11
12
12
  Description-Content-Type: text/x-rst
13
13
  License-File: LICENSE.rst
14
- Requires-Dist: dkist-processing-core==3.0.1
14
+ Requires-Dist: dkist-processing-common==6.1.0
15
15
  Requires-Dist: dkist-service-configuration==1.1.0
16
16
  Provides-Extra: test
17
17
  Requires-Dist: pytest; extra == "test"
@@ -13,9 +13,10 @@ dkist_processing_ops.egg-info/dependency_links.txt
13
13
  dkist_processing_ops.egg-info/not-zip-safe
14
14
  dkist_processing_ops.egg-info/requires.txt
15
15
  dkist_processing_ops.egg-info/top_level.txt
16
+ dkist_processing_ops/dags/scale.py
16
17
  dkist_processing_ops/tasks/__init__.py
17
18
  dkist_processing_ops/tasks/wait.py
18
19
  dkist_processing_ops/tests/__init__.py
19
20
  dkist_processing_ops/tests/test_workflows.py
20
21
  dkist_processing_ops/workflows/__init__.py
21
- dkist_processing_ops/workflows/scale.py
22
+ dkist_processing_ops/workflows/smoke.py
@@ -1,4 +1,4 @@
1
- dkist-processing-core==3.0.1
1
+ dkist-processing-common==6.1.0
2
2
  dkist-service-configuration==1.1.0
3
3
 
4
4
  [test]
@@ -23,7 +23,7 @@ authors = [
23
23
  ]
24
24
 
25
25
  dependencies = [
26
- "dkist-processing-core==3.0.1",
26
+ "dkist-processing-common==6.1.0",
27
27
  "dkist-service-configuration==1.1.0",
28
28
  ]
29
29
  dynamic = ["version"]
@@ -1,151 +0,0 @@
1
- """Task for parallelization testing which sleeps a configurable amount of time"""
2
- from time import sleep
3
-
4
- from dkist_processing_core import TaskBase
5
-
6
-
7
- __all__ = [f"WaitTask{i}" for i in range(32)]
8
-
9
-
10
- SLEEP_TIME = 300
11
-
12
-
13
- class WaitTaskBase(TaskBase):
14
- def __init__(
15
- self,
16
- recipe_run_id: int = 0,
17
- workflow_name: str = "ops",
18
- workflow_version: str = "ops_ver",
19
- ):
20
- super().__init__(recipe_run_id, workflow_name, workflow_version)
21
-
22
- def run(self) -> None:
23
- sleep(SLEEP_TIME)
24
-
25
-
26
- class WaitTask0(WaitTaskBase):
27
- pass
28
-
29
-
30
- class WaitTask1(WaitTaskBase):
31
- pass
32
-
33
-
34
- class WaitTask2(WaitTaskBase):
35
- pass
36
-
37
-
38
- class WaitTask3(WaitTaskBase):
39
- pass
40
-
41
-
42
- class WaitTask4(WaitTaskBase):
43
- pass
44
-
45
-
46
- class WaitTask5(WaitTaskBase):
47
- pass
48
-
49
-
50
- class WaitTask6(WaitTaskBase):
51
- pass
52
-
53
-
54
- class WaitTask7(WaitTaskBase):
55
- pass
56
-
57
-
58
- class WaitTask8(WaitTaskBase):
59
- pass
60
-
61
-
62
- class WaitTask9(WaitTaskBase):
63
- pass
64
-
65
-
66
- class WaitTask10(WaitTaskBase):
67
- pass
68
-
69
-
70
- class WaitTask11(WaitTaskBase):
71
- pass
72
-
73
-
74
- class WaitTask12(WaitTaskBase):
75
- pass
76
-
77
-
78
- class WaitTask13(WaitTaskBase):
79
- pass
80
-
81
-
82
- class WaitTask14(WaitTaskBase):
83
- pass
84
-
85
-
86
- class WaitTask15(WaitTaskBase):
87
- pass
88
-
89
-
90
- class WaitTask16(WaitTaskBase):
91
- pass
92
-
93
-
94
- class WaitTask17(WaitTaskBase):
95
- pass
96
-
97
-
98
- class WaitTask18(WaitTaskBase):
99
- pass
100
-
101
-
102
- class WaitTask19(WaitTaskBase):
103
- pass
104
-
105
-
106
- class WaitTask20(WaitTaskBase):
107
- pass
108
-
109
-
110
- class WaitTask21(WaitTaskBase):
111
- pass
112
-
113
-
114
- class WaitTask22(WaitTaskBase):
115
- pass
116
-
117
-
118
- class WaitTask23(WaitTaskBase):
119
- pass
120
-
121
-
122
- class WaitTask24(WaitTaskBase):
123
- pass
124
-
125
-
126
- class WaitTask25(WaitTaskBase):
127
- pass
128
-
129
-
130
- class WaitTask26(WaitTaskBase):
131
- pass
132
-
133
-
134
- class WaitTask27(WaitTaskBase):
135
- pass
136
-
137
-
138
- class WaitTask28(WaitTaskBase):
139
- pass
140
-
141
-
142
- class WaitTask29(WaitTaskBase):
143
- pass
144
-
145
-
146
- class WaitTask30(WaitTaskBase):
147
- pass
148
-
149
-
150
- class WaitTask31(WaitTaskBase):
151
- pass
@@ -1,206 +0,0 @@
1
- """Workflows to test parallel scaling."""
2
- from dkist_processing_core import ResourceQueue
3
- from dkist_processing_core import Workflow
4
-
5
- from dkist_processing_ops.tasks import WaitTask0
6
- from dkist_processing_ops.tasks import WaitTask1
7
- from dkist_processing_ops.tasks import WaitTask10
8
- from dkist_processing_ops.tasks import WaitTask11
9
- from dkist_processing_ops.tasks import WaitTask12
10
- from dkist_processing_ops.tasks import WaitTask13
11
- from dkist_processing_ops.tasks import WaitTask14
12
- from dkist_processing_ops.tasks import WaitTask15
13
- from dkist_processing_ops.tasks import WaitTask16
14
- from dkist_processing_ops.tasks import WaitTask17
15
- from dkist_processing_ops.tasks import WaitTask18
16
- from dkist_processing_ops.tasks import WaitTask19
17
- from dkist_processing_ops.tasks import WaitTask2
18
- from dkist_processing_ops.tasks import WaitTask20
19
- from dkist_processing_ops.tasks import WaitTask21
20
- from dkist_processing_ops.tasks import WaitTask22
21
- from dkist_processing_ops.tasks import WaitTask23
22
- from dkist_processing_ops.tasks import WaitTask24
23
- from dkist_processing_ops.tasks import WaitTask25
24
- from dkist_processing_ops.tasks import WaitTask26
25
- from dkist_processing_ops.tasks import WaitTask27
26
- from dkist_processing_ops.tasks import WaitTask28
27
- from dkist_processing_ops.tasks import WaitTask29
28
- from dkist_processing_ops.tasks import WaitTask3
29
- from dkist_processing_ops.tasks import WaitTask30
30
- from dkist_processing_ops.tasks import WaitTask31
31
- from dkist_processing_ops.tasks import WaitTask4
32
- from dkist_processing_ops.tasks import WaitTask5
33
- from dkist_processing_ops.tasks import WaitTask6
34
- from dkist_processing_ops.tasks import WaitTask7
35
- from dkist_processing_ops.tasks import WaitTask8
36
- from dkist_processing_ops.tasks import WaitTask9
37
-
38
-
39
- ALL_WAIT_TASKS = [
40
- WaitTask0,
41
- WaitTask1,
42
- WaitTask2,
43
- WaitTask3,
44
- WaitTask4,
45
- WaitTask5,
46
- WaitTask6,
47
- WaitTask7,
48
- WaitTask8,
49
- WaitTask9,
50
- WaitTask10,
51
- WaitTask11,
52
- WaitTask12,
53
- WaitTask13,
54
- WaitTask14,
55
- WaitTask15,
56
- WaitTask16,
57
- WaitTask17,
58
- WaitTask18,
59
- WaitTask19,
60
- WaitTask20,
61
- WaitTask21,
62
- WaitTask22,
63
- WaitTask23,
64
- WaitTask24,
65
- WaitTask25,
66
- WaitTask26,
67
- WaitTask27,
68
- WaitTask28,
69
- WaitTask29,
70
- WaitTask30,
71
- WaitTask31,
72
- ]
73
-
74
-
75
- #
76
- # Default Resource Queue
77
- #
78
-
79
-
80
- def add_parallel_nodes(count: int, workflow: Workflow, resource_queue: ResourceQueue):
81
- """Add the 'count' number of nodes to run in parallel to a workflow"""
82
- for task in ALL_WAIT_TASKS[:count]:
83
- workflow.add_node(task=task, upstreams=None, resource_queue=resource_queue)
84
-
85
-
86
- single_default = Workflow(
87
- input_data="ops",
88
- output_data="scale",
89
- category="default",
90
- detail="1",
91
- workflow_package=__package__,
92
- )
93
- add_parallel_nodes(count=1, workflow=single_default, resource_queue=ResourceQueue.DEFAULT)
94
-
95
-
96
- two_default = Workflow(
97
- input_data="ops",
98
- output_data="scale",
99
- category="default",
100
- detail="2",
101
- workflow_package=__package__,
102
- )
103
- add_parallel_nodes(count=2, workflow=two_default, resource_queue=ResourceQueue.DEFAULT)
104
-
105
-
106
- four_default = Workflow(
107
- input_data="ops",
108
- output_data="scale",
109
- category="default",
110
- detail="4",
111
- workflow_package=__package__,
112
- )
113
- add_parallel_nodes(count=4, workflow=four_default, resource_queue=ResourceQueue.DEFAULT)
114
-
115
-
116
- eight_default = Workflow(
117
- input_data="ops",
118
- output_data="scale",
119
- category="default",
120
- detail="8",
121
- workflow_package=__package__,
122
- )
123
- add_parallel_nodes(count=8, workflow=eight_default, resource_queue=ResourceQueue.DEFAULT)
124
-
125
-
126
- sixteen_default = Workflow(
127
- input_data="ops",
128
- output_data="scale",
129
- category="default",
130
- detail="16",
131
- workflow_package=__package__,
132
- )
133
- add_parallel_nodes(count=16, workflow=sixteen_default, resource_queue=ResourceQueue.DEFAULT)
134
-
135
-
136
- thirty_two_default = Workflow(
137
- input_data="ops",
138
- output_data="scale",
139
- category="default",
140
- detail="32",
141
- workflow_package=__package__,
142
- )
143
- add_parallel_nodes(count=32, workflow=thirty_two_default, resource_queue=ResourceQueue.DEFAULT)
144
-
145
- #
146
- # High Mem Resource Queue
147
- #
148
-
149
- single_high_mem = Workflow(
150
- input_data="ops",
151
- output_data="scale",
152
- category="high_mem",
153
- detail="1",
154
- workflow_package=__package__,
155
- )
156
- add_parallel_nodes(count=1, workflow=single_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
157
-
158
-
159
- two_high_mem = Workflow(
160
- input_data="ops",
161
- output_data="scale",
162
- category="high_mem",
163
- detail="2",
164
- workflow_package=__package__,
165
- )
166
- add_parallel_nodes(count=2, workflow=two_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
167
-
168
-
169
- four_high_mem = Workflow(
170
- input_data="ops",
171
- output_data="scale",
172
- category="high_mem",
173
- detail="4",
174
- workflow_package=__package__,
175
- )
176
- add_parallel_nodes(count=4, workflow=four_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
177
-
178
-
179
- eight_high_mem = Workflow(
180
- input_data="ops",
181
- output_data="scale",
182
- category="high_mem",
183
- detail="8",
184
- workflow_package=__package__,
185
- )
186
- add_parallel_nodes(count=8, workflow=eight_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
187
-
188
-
189
- sixteen_high_mem = Workflow(
190
- input_data="ops",
191
- output_data="scale",
192
- category="high_mem",
193
- detail="16",
194
- workflow_package=__package__,
195
- )
196
- add_parallel_nodes(count=16, workflow=sixteen_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)
197
-
198
-
199
- thirty_two_high_mem = Workflow(
200
- input_data="ops",
201
- output_data="scale",
202
- category="high_mem",
203
- detail="32",
204
- workflow_package=__package__,
205
- )
206
- add_parallel_nodes(count=32, workflow=thirty_two_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)