dkist-processing-ops 1.0.0rc3__tar.gz → 1.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

Files changed (26) hide show
  1. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/PKG-INFO +2 -2
  2. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/bitbucket-pipelines.yml +1 -0
  3. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops/_version.py +2 -2
  4. dkist-processing-ops-1.0.1/dkist_processing_ops/dags/scale.py +77 -0
  5. dkist-processing-ops-1.0.1/dkist_processing_ops/tasks/wait.py +15 -0
  6. dkist-processing-ops-1.0.1/dkist_processing_ops/workflows/smoke.py +28 -0
  7. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops.egg-info/PKG-INFO +2 -2
  8. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops.egg-info/SOURCES.txt +2 -1
  9. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops.egg-info/requires.txt +1 -1
  10. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/pyproject.toml +1 -1
  11. dkist-processing-ops-1.0.0rc3/dkist_processing_ops/tasks/wait.py +0 -143
  12. dkist-processing-ops-1.0.0rc3/dkist_processing_ops/workflows/scale.py +0 -99
  13. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/.gitignore +0 -0
  14. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/.pre-commit-config.yaml +0 -0
  15. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/LICENSE.rst +0 -0
  16. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/MANIFEST.in +0 -0
  17. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/README.rst +0 -0
  18. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops/__init__.py +0 -0
  19. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops/tasks/__init__.py +0 -0
  20. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops/tests/__init__.py +0 -0
  21. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops/tests/test_workflows.py +0 -0
  22. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops/workflows/__init__.py +0 -0
  23. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
  24. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
  25. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/dkist_processing_ops.egg-info/top_level.txt +0 -0
  26. {dkist-processing-ops-1.0.0rc3 → dkist-processing-ops-1.0.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.0.0rc3
3
+ Version: 1.0.1
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Requires-Python: >=3.11
12
12
  Description-Content-Type: text/x-rst
13
13
  License-File: LICENSE.rst
14
- Requires-Dist: dkist-processing-core==3.0.2rc1
14
+ Requires-Dist: dkist-processing-common==6.1.0
15
15
  Requires-Dist: dkist-service-configuration==1.1.0
16
16
  Provides-Extra: test
17
17
  Requires-Dist: pytest; extra == "test"
@@ -50,6 +50,7 @@ definitions:
50
50
  - export BUILD_VERSION="${BITBUCKET_TAG:1}"
51
51
  - export ARTIFACT_FOLDER="${BITBUCKET_REPO_SLUG}_${BUILD_VERSION}/"
52
52
  - python -c "from dkist_processing_core.build_utils import export_dags; import dkist_processing_ops.workflows as workflow_package; export_dags(workflow_package, '${ARTIFACT_FOLDER}')"
53
+ - python -c "from dkist_processing_ops.dags.scale import export_scale_dags; export_scale_dags('${ARTIFACT_FOLDER}')"
53
54
  - export SOURCE_PATH="workflow_${BUILD_VERSION}.gz"
54
55
  - tar --exclude="bitbucket-pipelines.yml" -cvzf ${SOURCE_PATH} ${ARTIFACT_FOLDER}
55
56
  - export TARGET_PATH="generic-packages/dkist-processing-ops/${BUILD_VERSION}/"
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.0.0rc3'
16
- __version_tuple__ = version_tuple = (1, 0, 0)
15
+ __version__ = version = '1.0.1'
16
+ __version_tuple__ = version_tuple = (1, 0, 1)
@@ -0,0 +1,77 @@
1
+ """
2
+ DAG to use up workers to support scaling
3
+ """
4
+ from os import environ
5
+ from pathlib import Path
6
+
7
+ from dkist_processing_core.build_utils import export_dags
8
+
9
+
10
+ def export_scale_dags(path: Path | str) -> list[Path]:
11
+ """Export all the ops dags"""
12
+ result = []
13
+ dag_prefix = "ops_scale"
14
+ version = environ.get("BUILD_VERSION", "dev")
15
+ scales = [16, 32]
16
+ queues = ["default", "high_memory"]
17
+ sleep_duration_seconds = 60
18
+ for queue in queues:
19
+ for scale in scales:
20
+ dag_name = f"{dag_prefix}_{queue}_{scale}_{version}"
21
+ dag_body = _scale_dag(
22
+ dag_name=dag_name,
23
+ sleep_duration_seconds=sleep_duration_seconds,
24
+ queue=queue,
25
+ concurrent_task_count=scale,
26
+ )
27
+ dag_path = _export_ops_dag(dag_name=dag_name, dag_body=dag_body, path=path)
28
+ result.append(dag_path)
29
+ return result
30
+
31
+
32
+ def _export_ops_dag(dag_name: str, dag_body: str, path: Path | str | None = None) -> Path:
33
+ """Write a file representation of the scaling DAG."""
34
+ path = path or "dags/"
35
+ path = Path(path)
36
+ path.mkdir(exist_ok=True)
37
+ workflow_py = path / f"{dag_name}.py"
38
+ with workflow_py.open(mode="w") as f:
39
+ f.write(dag_body)
40
+ return workflow_py
41
+
42
+
43
+ def _scale_dag(
44
+ dag_name: str,
45
+ sleep_duration_seconds: int = 60,
46
+ queue: str | None = None,
47
+ concurrent_task_count: int = 16,
48
+ ) -> str:
49
+ queue = queue or "default"
50
+
51
+ imports = f"""# Scale {concurrent_task_count} DAG on queue {queue}
52
+ from datetime import timedelta
53
+ import pendulum
54
+ from airflow import DAG
55
+ from airflow.operators.bash import BashOperator
56
+ """
57
+ dag = f"""with DAG(
58
+ dag_id="{dag_name}",
59
+ start_date=pendulum.today("UTC").add(days=-2),
60
+ schedule=None,
61
+ catchup=False,
62
+ tags=["ops", "scale"],
63
+ ) as d:"""
64
+ tasks = []
65
+ for idx in range(concurrent_task_count):
66
+ task = f""" t{idx} = BashOperator(
67
+ task_id="t{idx}",
68
+ bash_command=f"sleep {sleep_duration_seconds}",
69
+ retries=0,
70
+ retry_delay=timedelta(seconds=60),
71
+ owner="DKIST Data Center",
72
+ queue="{queue}",
73
+ )"""
74
+ tasks.append(task)
75
+ parts = [imports, dag] + tasks
76
+ body = "\n".join(parts)
77
+ return body
@@ -0,0 +1,15 @@
1
+ """Task for parallelization testing which sleeps a configurable amount of time"""
2
+ from time import sleep
3
+
4
+ from dkist_processing_core import TaskBase
5
+
6
+
7
+ __all__ = ["WaitTask"]
8
+
9
+
10
+ SLEEP_TIME = 60
11
+
12
+
13
+ class WaitTask(TaskBase):
14
+ def run(self) -> None:
15
+ sleep(SLEEP_TIME)
@@ -0,0 +1,28 @@
1
+ """Workflows to test task submission and spin up"""
2
+ from dkist_processing_common.tasks import TrialTeardown
3
+ from dkist_processing_core import ResourceQueue
4
+ from dkist_processing_core import Workflow
5
+
6
+ from dkist_processing_ops.tasks import WaitTask
7
+
8
+
9
+ smoke_default = Workflow(
10
+ input_data="ops",
11
+ output_data="common",
12
+ category="smoke",
13
+ detail="default",
14
+ workflow_package=__package__,
15
+ )
16
+ smoke_default.add_node(task=WaitTask, upstreams=None, resource_queue=ResourceQueue.DEFAULT)
17
+ smoke_default.add_node(task=TrialTeardown, upstreams=WaitTask)
18
+
19
+
20
+ smoke_high_mem = Workflow(
21
+ input_data="ops",
22
+ output_data="common",
23
+ category="smoke",
24
+ detail="high-mem",
25
+ workflow_package=__package__,
26
+ )
27
+ smoke_high_mem.add_node(task=WaitTask, upstreams=None, resource_queue=ResourceQueue.HIGH_MEMORY)
28
+ smoke_high_mem.add_node(task=TrialTeardown, upstreams=WaitTask)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.0.0rc3
3
+ Version: 1.0.1
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Requires-Python: >=3.11
12
12
  Description-Content-Type: text/x-rst
13
13
  License-File: LICENSE.rst
14
- Requires-Dist: dkist-processing-core==3.0.2rc1
14
+ Requires-Dist: dkist-processing-common==6.1.0
15
15
  Requires-Dist: dkist-service-configuration==1.1.0
16
16
  Provides-Extra: test
17
17
  Requires-Dist: pytest; extra == "test"
@@ -13,9 +13,10 @@ dkist_processing_ops.egg-info/dependency_links.txt
13
13
  dkist_processing_ops.egg-info/not-zip-safe
14
14
  dkist_processing_ops.egg-info/requires.txt
15
15
  dkist_processing_ops.egg-info/top_level.txt
16
+ dkist_processing_ops/dags/scale.py
16
17
  dkist_processing_ops/tasks/__init__.py
17
18
  dkist_processing_ops/tasks/wait.py
18
19
  dkist_processing_ops/tests/__init__.py
19
20
  dkist_processing_ops/tests/test_workflows.py
20
21
  dkist_processing_ops/workflows/__init__.py
21
- dkist_processing_ops/workflows/scale.py
22
+ dkist_processing_ops/workflows/smoke.py
@@ -1,4 +1,4 @@
1
- dkist-processing-core==3.0.2rc1
1
+ dkist-processing-common==6.1.0
2
2
  dkist-service-configuration==1.1.0
3
3
 
4
4
  [test]
@@ -23,7 +23,7 @@ authors = [
23
23
  ]
24
24
 
25
25
  dependencies = [
26
- "dkist-processing-core==3.0.2rc1",
26
+ "dkist-processing-common==6.1.0",
27
27
  "dkist-service-configuration==1.1.0",
28
28
  ]
29
29
  dynamic = ["version"]
@@ -1,143 +0,0 @@
1
- """Task for parallelization testing which sleeps a configurable amount of time"""
2
- from time import sleep
3
-
4
- from dkist_processing_core import TaskBase
5
-
6
-
7
- __all__ = [f"WaitTask{i}" for i in range(32)]
8
-
9
-
10
- SLEEP_TIME = 300
11
-
12
-
13
- class WaitTaskBase(TaskBase):
14
- def run(self) -> None:
15
- sleep(SLEEP_TIME)
16
-
17
-
18
- class WaitTask0(WaitTaskBase):
19
- pass
20
-
21
-
22
- class WaitTask1(WaitTaskBase):
23
- pass
24
-
25
-
26
- class WaitTask2(WaitTaskBase):
27
- pass
28
-
29
-
30
- class WaitTask3(WaitTaskBase):
31
- pass
32
-
33
-
34
- class WaitTask4(WaitTaskBase):
35
- pass
36
-
37
-
38
- class WaitTask5(WaitTaskBase):
39
- pass
40
-
41
-
42
- class WaitTask6(WaitTaskBase):
43
- pass
44
-
45
-
46
- class WaitTask7(WaitTaskBase):
47
- pass
48
-
49
-
50
- class WaitTask8(WaitTaskBase):
51
- pass
52
-
53
-
54
- class WaitTask9(WaitTaskBase):
55
- pass
56
-
57
-
58
- class WaitTask10(WaitTaskBase):
59
- pass
60
-
61
-
62
- class WaitTask11(WaitTaskBase):
63
- pass
64
-
65
-
66
- class WaitTask12(WaitTaskBase):
67
- pass
68
-
69
-
70
- class WaitTask13(WaitTaskBase):
71
- pass
72
-
73
-
74
- class WaitTask14(WaitTaskBase):
75
- pass
76
-
77
-
78
- class WaitTask15(WaitTaskBase):
79
- pass
80
-
81
-
82
- class WaitTask16(WaitTaskBase):
83
- pass
84
-
85
-
86
- class WaitTask17(WaitTaskBase):
87
- pass
88
-
89
-
90
- class WaitTask18(WaitTaskBase):
91
- pass
92
-
93
-
94
- class WaitTask19(WaitTaskBase):
95
- pass
96
-
97
-
98
- class WaitTask20(WaitTaskBase):
99
- pass
100
-
101
-
102
- class WaitTask21(WaitTaskBase):
103
- pass
104
-
105
-
106
- class WaitTask22(WaitTaskBase):
107
- pass
108
-
109
-
110
- class WaitTask23(WaitTaskBase):
111
- pass
112
-
113
-
114
- class WaitTask24(WaitTaskBase):
115
- pass
116
-
117
-
118
- class WaitTask25(WaitTaskBase):
119
- pass
120
-
121
-
122
- class WaitTask26(WaitTaskBase):
123
- pass
124
-
125
-
126
- class WaitTask27(WaitTaskBase):
127
- pass
128
-
129
-
130
- class WaitTask28(WaitTaskBase):
131
- pass
132
-
133
-
134
- class WaitTask29(WaitTaskBase):
135
- pass
136
-
137
-
138
- class WaitTask30(WaitTaskBase):
139
- pass
140
-
141
-
142
- class WaitTask31(WaitTaskBase):
143
- pass
@@ -1,99 +0,0 @@
1
- """Workflows to test parallel scaling."""
2
- from dkist_processing_core import ResourceQueue
3
- from dkist_processing_core import Workflow
4
-
5
- from dkist_processing_ops.tasks import WaitTask0
6
- from dkist_processing_ops.tasks import WaitTask1
7
- from dkist_processing_ops.tasks import WaitTask10
8
- from dkist_processing_ops.tasks import WaitTask11
9
- from dkist_processing_ops.tasks import WaitTask12
10
- from dkist_processing_ops.tasks import WaitTask13
11
- from dkist_processing_ops.tasks import WaitTask14
12
- from dkist_processing_ops.tasks import WaitTask15
13
- from dkist_processing_ops.tasks import WaitTask16
14
- from dkist_processing_ops.tasks import WaitTask17
15
- from dkist_processing_ops.tasks import WaitTask18
16
- from dkist_processing_ops.tasks import WaitTask19
17
- from dkist_processing_ops.tasks import WaitTask2
18
- from dkist_processing_ops.tasks import WaitTask20
19
- from dkist_processing_ops.tasks import WaitTask21
20
- from dkist_processing_ops.tasks import WaitTask22
21
- from dkist_processing_ops.tasks import WaitTask23
22
- from dkist_processing_ops.tasks import WaitTask24
23
- from dkist_processing_ops.tasks import WaitTask25
24
- from dkist_processing_ops.tasks import WaitTask26
25
- from dkist_processing_ops.tasks import WaitTask27
26
- from dkist_processing_ops.tasks import WaitTask28
27
- from dkist_processing_ops.tasks import WaitTask29
28
- from dkist_processing_ops.tasks import WaitTask3
29
- from dkist_processing_ops.tasks import WaitTask30
30
- from dkist_processing_ops.tasks import WaitTask31
31
- from dkist_processing_ops.tasks import WaitTask4
32
- from dkist_processing_ops.tasks import WaitTask5
33
- from dkist_processing_ops.tasks import WaitTask6
34
- from dkist_processing_ops.tasks import WaitTask7
35
- from dkist_processing_ops.tasks import WaitTask8
36
- from dkist_processing_ops.tasks import WaitTask9
37
-
38
-
39
- ALL_WAIT_TASKS = [
40
- WaitTask0,
41
- WaitTask1,
42
- WaitTask2,
43
- WaitTask3,
44
- WaitTask4,
45
- WaitTask5,
46
- WaitTask6,
47
- WaitTask7,
48
- WaitTask8,
49
- WaitTask9,
50
- WaitTask10,
51
- WaitTask11,
52
- WaitTask12,
53
- WaitTask13,
54
- WaitTask14,
55
- WaitTask15,
56
- WaitTask16,
57
- WaitTask17,
58
- WaitTask18,
59
- WaitTask19,
60
- WaitTask20,
61
- WaitTask21,
62
- WaitTask22,
63
- WaitTask23,
64
- WaitTask24,
65
- WaitTask25,
66
- WaitTask26,
67
- WaitTask27,
68
- WaitTask28,
69
- WaitTask29,
70
- WaitTask30,
71
- WaitTask31,
72
- ]
73
-
74
-
75
- def add_parallel_nodes(count: int, workflow: Workflow, resource_queue: ResourceQueue):
76
- """Add the 'count' number of nodes to run in parallel to a workflow"""
77
- for task in ALL_WAIT_TASKS[:count]:
78
- workflow.add_node(task=task, upstreams=None, resource_queue=resource_queue)
79
-
80
-
81
- # Default resource queue
82
- thirty_two_default = Workflow(
83
- input_data="ops",
84
- output_data="scale",
85
- category="default",
86
- detail="32",
87
- workflow_package=__package__,
88
- )
89
- add_parallel_nodes(count=32, workflow=thirty_two_default, resource_queue=ResourceQueue.DEFAULT)
90
-
91
- # High memory resource queue
92
- thirty_two_high_mem = Workflow(
93
- input_data="ops",
94
- output_data="scale",
95
- category="high_mem",
96
- detail="32",
97
- workflow_package=__package__,
98
- )
99
- add_parallel_nodes(count=32, workflow=thirty_two_high_mem, resource_queue=ResourceQueue.HIGH_MEMORY)