dkist-processing-ops 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dkist-processing-ops might be problematic. Click here for more details.
- dkist-processing-ops-1.0.0/.gitignore +161 -0
- dkist-processing-ops-1.0.0/.pre-commit-config.yaml +20 -0
- dkist-processing-ops-1.0.0/LICENSE.rst +8 -0
- dkist-processing-ops-1.0.0/MANIFEST.in +19 -0
- dkist-processing-ops-1.0.0/PKG-INFO +41 -0
- dkist-processing-ops-1.0.0/README.rst +21 -0
- dkist-processing-ops-1.0.0/bitbucket-pipelines.yml +84 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/__init__.py +0 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/_version.py +16 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/dags/scale.py +78 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/tasks/__init__.py +2 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/tasks/wait.py +15 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/tests/__init__.py +0 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/tests/test_workflows.py +9 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/workflows/__init__.py +0 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops/workflows/smoke.py +28 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops.egg-info/PKG-INFO +41 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops.egg-info/SOURCES.txt +22 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops.egg-info/dependency_links.txt +1 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops.egg-info/not-zip-safe +1 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops.egg-info/requires.txt +7 -0
- dkist-processing-ops-1.0.0/dkist_processing_ops.egg-info/top_level.txt +2 -0
- dkist-processing-ops-1.0.0/pyproject.toml +80 -0
- dkist-processing-ops-1.0.0/setup.cfg +4 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
share/python-wheels/
|
|
24
|
+
*.egg-info/
|
|
25
|
+
.installed.cfg
|
|
26
|
+
*.egg
|
|
27
|
+
MANIFEST
|
|
28
|
+
dkist_quality/_version.py
|
|
29
|
+
|
|
30
|
+
# PyInstaller
|
|
31
|
+
# Usually these files are written by a python script from a template
|
|
32
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
33
|
+
*.manifest
|
|
34
|
+
*.spec
|
|
35
|
+
|
|
36
|
+
# Installer logs
|
|
37
|
+
pip-log.txt
|
|
38
|
+
pip-delete-this-directory.txt
|
|
39
|
+
|
|
40
|
+
# Unit test / coverage reports
|
|
41
|
+
htmlcov/
|
|
42
|
+
.tox/
|
|
43
|
+
.nox/
|
|
44
|
+
.coverage
|
|
45
|
+
.coverage.*
|
|
46
|
+
.cache
|
|
47
|
+
nosetests.xml
|
|
48
|
+
coverage.xml
|
|
49
|
+
*.cover
|
|
50
|
+
*.py,cover
|
|
51
|
+
.hypothesis/
|
|
52
|
+
.pytest_cache/
|
|
53
|
+
cover/
|
|
54
|
+
|
|
55
|
+
# Translations
|
|
56
|
+
*.mo
|
|
57
|
+
*.pot
|
|
58
|
+
|
|
59
|
+
# Django stuff:
|
|
60
|
+
*.log
|
|
61
|
+
local_settings.py
|
|
62
|
+
db.sqlite3
|
|
63
|
+
db.sqlite3-journal
|
|
64
|
+
|
|
65
|
+
# Flask stuff:
|
|
66
|
+
instance/
|
|
67
|
+
.webassets-cache
|
|
68
|
+
|
|
69
|
+
# Scrapy stuff:
|
|
70
|
+
.scrapy
|
|
71
|
+
|
|
72
|
+
# Sphinx documentation
|
|
73
|
+
docs/_build/
|
|
74
|
+
|
|
75
|
+
# PyBuilder
|
|
76
|
+
.pybuilder/
|
|
77
|
+
target/
|
|
78
|
+
|
|
79
|
+
# Jupyter Notebook
|
|
80
|
+
.ipynb_checkpoints
|
|
81
|
+
|
|
82
|
+
# IPython
|
|
83
|
+
profile_default/
|
|
84
|
+
ipython_config.py
|
|
85
|
+
|
|
86
|
+
# pyenv
|
|
87
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
88
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
89
|
+
# .python-version
|
|
90
|
+
|
|
91
|
+
# pipenv
|
|
92
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
93
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
94
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
95
|
+
# install all needed dependencies.
|
|
96
|
+
#Pipfile.lock
|
|
97
|
+
|
|
98
|
+
# poetry
|
|
99
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
100
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
101
|
+
# commonly ignored for libraries.
|
|
102
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
103
|
+
#poetry.lock
|
|
104
|
+
|
|
105
|
+
# pdm
|
|
106
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
107
|
+
#pdm.lock
|
|
108
|
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
109
|
+
# in version control.
|
|
110
|
+
# https://pdm.fming.dev/#use-with-ide
|
|
111
|
+
.pdm.toml
|
|
112
|
+
|
|
113
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
114
|
+
__pypackages__/
|
|
115
|
+
|
|
116
|
+
# Celery stuff
|
|
117
|
+
celerybeat-schedule
|
|
118
|
+
celerybeat.pid
|
|
119
|
+
|
|
120
|
+
# SageMath parsed files
|
|
121
|
+
*.sage.py
|
|
122
|
+
|
|
123
|
+
# Environments
|
|
124
|
+
.env
|
|
125
|
+
.venv
|
|
126
|
+
env/
|
|
127
|
+
venv/
|
|
128
|
+
ENV/
|
|
129
|
+
env.bak/
|
|
130
|
+
venv.bak/
|
|
131
|
+
|
|
132
|
+
# Spyder project settings
|
|
133
|
+
.spyderproject
|
|
134
|
+
.spyproject
|
|
135
|
+
|
|
136
|
+
# Rope project settings
|
|
137
|
+
.ropeproject
|
|
138
|
+
|
|
139
|
+
# mkdocs documentation
|
|
140
|
+
/site
|
|
141
|
+
|
|
142
|
+
# mypy
|
|
143
|
+
.mypy_cache/
|
|
144
|
+
.dmypy.json
|
|
145
|
+
dmypy.json
|
|
146
|
+
|
|
147
|
+
# Pyre type checker
|
|
148
|
+
.pyre/
|
|
149
|
+
|
|
150
|
+
# pytype static type analyzer
|
|
151
|
+
.pytype/
|
|
152
|
+
|
|
153
|
+
# Cython debug symbols
|
|
154
|
+
cython_debug/
|
|
155
|
+
|
|
156
|
+
# PyCharm
|
|
157
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
158
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
159
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
160
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
161
|
+
#.idea/
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# See https://pre-commit.com for more information
|
|
2
|
+
# See https://pre-commit.com/hooks.html for more hooks
|
|
3
|
+
repos:
|
|
4
|
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
5
|
+
rev: v2.4.0
|
|
6
|
+
hooks:
|
|
7
|
+
- id: trailing-whitespace
|
|
8
|
+
- id: end-of-file-fixer
|
|
9
|
+
- id: check-yaml
|
|
10
|
+
- id: check-added-large-files
|
|
11
|
+
- id: debug-statements
|
|
12
|
+
- repo: https://github.com/psf/black
|
|
13
|
+
rev: 22.3.0
|
|
14
|
+
hooks:
|
|
15
|
+
- id: black
|
|
16
|
+
args: [ "-l 100" ]
|
|
17
|
+
- repo: https://github.com/asottile/reorder_python_imports
|
|
18
|
+
rev: v2.3.5
|
|
19
|
+
hooks:
|
|
20
|
+
- id: reorder-python-imports
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
License
|
|
2
|
+
-------
|
|
3
|
+
|
|
4
|
+
This project is Copyright (c) NSO / AURA and licensed under
|
|
5
|
+
the terms of the BSD 3-Clause license. This package is based upon
|
|
6
|
+
the `Openastronomy packaging guide <https://github.com/OpenAstronomy/packaging-guide>`_
|
|
7
|
+
which is licensed under the BSD 3-clause licence. See the licenses folder for
|
|
8
|
+
more information.
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# Include standard files
|
|
2
|
+
include README.rst
|
|
3
|
+
include CHANGES.rst
|
|
4
|
+
include setup.cfg
|
|
5
|
+
include LICENSE.rst
|
|
6
|
+
include pyproject.toml
|
|
7
|
+
|
|
8
|
+
# Exclude specific files
|
|
9
|
+
# All files which are tracked by git and not explicitly excluded here are included by setuptools_scm
|
|
10
|
+
|
|
11
|
+
# Prune folders
|
|
12
|
+
prune build
|
|
13
|
+
prune docs/_build
|
|
14
|
+
prune docs/api
|
|
15
|
+
global-exclude *.pyc *.o
|
|
16
|
+
|
|
17
|
+
# This subpackage is only used in development checkouts
|
|
18
|
+
# and should not be included in built tarballs
|
|
19
|
+
prune dkist_quality/_dev
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: dkist-processing-ops
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Automated Processing smoke test and operations workflows
|
|
5
|
+
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
|
+
License: BSD 3-Clause
|
|
7
|
+
Project-URL: repository, https://bitbucket.org/dkistdc/dkist-processing-ops
|
|
8
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Requires-Python: >=3.11
|
|
12
|
+
Description-Content-Type: text/x-rst
|
|
13
|
+
License-File: LICENSE.rst
|
|
14
|
+
Requires-Dist: dkist-processing-common==6.1.0
|
|
15
|
+
Requires-Dist: dkist-service-configuration==1.1.0
|
|
16
|
+
Provides-Extra: test
|
|
17
|
+
Requires-Dist: pytest; extra == "test"
|
|
18
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
19
|
+
Requires-Dist: pytest-xdist; extra == "test"
|
|
20
|
+
|
|
21
|
+
dkist-processing-ops
|
|
22
|
+
--------------------
|
|
23
|
+
|codecov|
|
|
24
|
+
|
|
25
|
+
This repository works in concert with `dkist-processing-core <https://pypi.org/project/dkist-processing-core/>`_ and
|
|
26
|
+
`dkist-processing-common <https://pypi.org/project/dkist-processing-common/>`_ to provide workflows for the
|
|
27
|
+
operational management and smoke testing of the `Automated Processing <https://nso.atlassian.net/wiki/spaces/DPD/pages/3671451/04+-+Automated+Processing>`_ stack.
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
Developer Setup
|
|
31
|
+
~~~~~~~~~~~~~~~
|
|
32
|
+
|
|
33
|
+
.. code-block:: bash
|
|
34
|
+
|
|
35
|
+
pip install -e .[test]
|
|
36
|
+
pip install pre-commit
|
|
37
|
+
pre-commit install
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
.. |codecov| image:: https://codecov.io/bb/dkistdc/dkist-processing-ops/branch/main/graph/badge.svg
|
|
41
|
+
:target: https://codecov.io/bb/dkistdc/dkist-processing-ops
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
dkist-processing-ops
|
|
2
|
+
--------------------
|
|
3
|
+
|codecov|
|
|
4
|
+
|
|
5
|
+
This repository works in concert with `dkist-processing-core <https://pypi.org/project/dkist-processing-core/>`_ and
|
|
6
|
+
`dkist-processing-common <https://pypi.org/project/dkist-processing-common/>`_ to provide workflows for the
|
|
7
|
+
operational management and smoke testing of the `Automated Processing <https://nso.atlassian.net/wiki/spaces/DPD/pages/3671451/04+-+Automated+Processing>`_ stack.
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
Developer Setup
|
|
11
|
+
~~~~~~~~~~~~~~~
|
|
12
|
+
|
|
13
|
+
.. code-block:: bash
|
|
14
|
+
|
|
15
|
+
pip install -e .[test]
|
|
16
|
+
pip install pre-commit
|
|
17
|
+
pre-commit install
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
.. |codecov| image:: https://codecov.io/bb/dkistdc/dkist-processing-ops/branch/main/graph/badge.svg
|
|
21
|
+
:target: https://codecov.io/bb/dkistdc/dkist-processing-ops
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
#Build Configuration for docker deployment to artifactory
|
|
2
|
+
image: python:3.11
|
|
3
|
+
|
|
4
|
+
definitions:
|
|
5
|
+
services:
|
|
6
|
+
redis:
|
|
7
|
+
image: redis
|
|
8
|
+
steps:
|
|
9
|
+
- step: &lint
|
|
10
|
+
caches:
|
|
11
|
+
- pip
|
|
12
|
+
name: Lint
|
|
13
|
+
script:
|
|
14
|
+
- pip install -U pip
|
|
15
|
+
- pip install pre-commit
|
|
16
|
+
- pre-commit install
|
|
17
|
+
- pre-commit run --all-files
|
|
18
|
+
- step: &scan
|
|
19
|
+
caches:
|
|
20
|
+
- pip
|
|
21
|
+
name: Scan
|
|
22
|
+
script:
|
|
23
|
+
- pip install -U pip
|
|
24
|
+
- pip install .
|
|
25
|
+
- pip freeze | grep -v @ > requirements.txt
|
|
26
|
+
- cat requirements.txt
|
|
27
|
+
- echo $SNYK_VERSION
|
|
28
|
+
- curl -L -o snyk https://github.com/snyk/snyk/releases/download/$SNYK_VERSION/snyk-linux
|
|
29
|
+
- chmod 755 snyk
|
|
30
|
+
- ./snyk -d auth $SNYK_TOKEN
|
|
31
|
+
- echo $SNYK_CLI_COMMAND
|
|
32
|
+
- $SNYK_CLI_COMMAND
|
|
33
|
+
- step: &test
|
|
34
|
+
caches:
|
|
35
|
+
- pip
|
|
36
|
+
name: Test
|
|
37
|
+
script:
|
|
38
|
+
- pip install -U pip
|
|
39
|
+
- pip install .[test]
|
|
40
|
+
- pytest -v -n auto -m "not development" --pyargs dkist_processing_ops --cov dkist_processing_ops --cov-report xml:coverage.xml
|
|
41
|
+
services:
|
|
42
|
+
- redis
|
|
43
|
+
- step: &push_workflow
|
|
44
|
+
caches:
|
|
45
|
+
- pip
|
|
46
|
+
name: Push Workflow
|
|
47
|
+
script:
|
|
48
|
+
- pip install -U pip
|
|
49
|
+
- pip install .
|
|
50
|
+
- export BUILD_VERSION="${BITBUCKET_TAG:1}"
|
|
51
|
+
- export ARTIFACT_FOLDER="${BITBUCKET_REPO_SLUG}_${BUILD_VERSION}/"
|
|
52
|
+
- python -c "from dkist_processing_core.build_utils import export_dags; import dkist_processing_ops.workflows as workflow_package; export_dags(workflow_package, '${ARTIFACT_FOLDER}')"
|
|
53
|
+
- python -c "from dkist_processing_ops.dags.scale import export_scale_dags; export_scale_dags('${ARTIFACT_FOLDER}')"
|
|
54
|
+
- export SOURCE_PATH="workflow_${BUILD_VERSION}.gz"
|
|
55
|
+
- tar --exclude="bitbucket-pipelines.yml" -cvzf ${SOURCE_PATH} ${ARTIFACT_FOLDER}
|
|
56
|
+
- export TARGET_PATH="generic-packages/dkist-processing-ops/${BUILD_VERSION}/"
|
|
57
|
+
- curl -fL https://getcli.jfrog.io | sh
|
|
58
|
+
- ./jfrog rt u --url $ARTIFACTORY_URL --user $ARTIFACTORY_USER --password $ARTIFACTORY_PASSWORD ${SOURCE_PATH} ${TARGET_PATH}
|
|
59
|
+
- step: &push_code
|
|
60
|
+
caches:
|
|
61
|
+
- pip
|
|
62
|
+
name: Push Code
|
|
63
|
+
script:
|
|
64
|
+
- pip install -U pip
|
|
65
|
+
- pip install twine build
|
|
66
|
+
- python -m build --outdir wheelhouse .
|
|
67
|
+
- python -m twine upload --skip-existing wheelhouse/*
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
pipelines:
|
|
72
|
+
default:
|
|
73
|
+
- step: *lint
|
|
74
|
+
- parallel:
|
|
75
|
+
- step: *scan
|
|
76
|
+
- step: *test
|
|
77
|
+
tags:
|
|
78
|
+
'v*':
|
|
79
|
+
- parallel:
|
|
80
|
+
- step: *lint
|
|
81
|
+
- step: *scan
|
|
82
|
+
- step: *test
|
|
83
|
+
- step: *push_workflow
|
|
84
|
+
- step: *push_code
|
|
File without changes
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# file generated by setuptools_scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
TYPE_CHECKING = False
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from typing import Tuple, Union
|
|
6
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
7
|
+
else:
|
|
8
|
+
VERSION_TUPLE = object
|
|
9
|
+
|
|
10
|
+
version: str
|
|
11
|
+
__version__: str
|
|
12
|
+
__version_tuple__: VERSION_TUPLE
|
|
13
|
+
version_tuple: VERSION_TUPLE
|
|
14
|
+
|
|
15
|
+
__version__ = version = '1.0.0'
|
|
16
|
+
__version_tuple__ = version_tuple = (1, 0, 0)
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
DAG to use up workers to support scaling
|
|
3
|
+
"""
|
|
4
|
+
from os import environ
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from dkist_processing_core.build_utils import export_dags
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def export_scale_dags(path: Path | str) -> list[Path]:
|
|
11
|
+
"""Export all the ops dags"""
|
|
12
|
+
result = []
|
|
13
|
+
dag_prefix = "ops_scale"
|
|
14
|
+
scales = [16, 32]
|
|
15
|
+
queues = ["default", "high_memory"]
|
|
16
|
+
sleep_duration_seconds = 60
|
|
17
|
+
for queue in queues:
|
|
18
|
+
for scale in scales:
|
|
19
|
+
dag_name = f"{dag_prefix}_{queue}_{scale}"
|
|
20
|
+
dag_body = _scale_dag(
|
|
21
|
+
dag_name=dag_name,
|
|
22
|
+
sleep_duration_seconds=sleep_duration_seconds,
|
|
23
|
+
queue=queue,
|
|
24
|
+
concurrent_task_count=scale,
|
|
25
|
+
)
|
|
26
|
+
dag_path = _export_ops_dag(dag_name=dag_name, dag_body=dag_body, path=path)
|
|
27
|
+
result.append(dag_path)
|
|
28
|
+
return result
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _export_ops_dag(dag_name: str, dag_body: str, path: Path | str | None = None) -> Path:
|
|
32
|
+
"""Write a file representation of the scaling DAG."""
|
|
33
|
+
path = path or "dags/"
|
|
34
|
+
path = Path(path)
|
|
35
|
+
path.mkdir(exist_ok=True)
|
|
36
|
+
version = environ.get("BUILD_VERSION", "dev")
|
|
37
|
+
dag_name = f"{dag_name}_{version}"
|
|
38
|
+
workflow_py = path / f"{dag_name}.py"
|
|
39
|
+
with workflow_py.open(mode="w") as f:
|
|
40
|
+
f.write(dag_body)
|
|
41
|
+
return workflow_py
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _scale_dag(
|
|
45
|
+
dag_name: str,
|
|
46
|
+
sleep_duration_seconds: int = 60,
|
|
47
|
+
queue: str | None = None,
|
|
48
|
+
concurrent_task_count: int = 16,
|
|
49
|
+
) -> str:
|
|
50
|
+
queue = queue or "default"
|
|
51
|
+
|
|
52
|
+
imports = f"""# Scale {concurrent_task_count} DAG on queue {queue}
|
|
53
|
+
from datetime import timedelta
|
|
54
|
+
import pendulum
|
|
55
|
+
from airflow import DAG
|
|
56
|
+
from airflow.operators.bash import BashOperator
|
|
57
|
+
"""
|
|
58
|
+
dag = f"""with DAG(
|
|
59
|
+
dag_id="{dag_name}",
|
|
60
|
+
start_date=pendulum.today("UTC").add(days=-2),
|
|
61
|
+
schedule=None,
|
|
62
|
+
catchup=False,
|
|
63
|
+
tags=["ops", "scale"],
|
|
64
|
+
) as d:"""
|
|
65
|
+
tasks = []
|
|
66
|
+
for idx in range(concurrent_task_count):
|
|
67
|
+
task = f""" t{idx} = BashOperator(
|
|
68
|
+
task_id="t{idx}",
|
|
69
|
+
bash_command=f"sleep {sleep_duration_seconds}",
|
|
70
|
+
retries=0,
|
|
71
|
+
retry_delay=timedelta(seconds=60),
|
|
72
|
+
owner="DKIST Data Center",
|
|
73
|
+
queue="{queue}",
|
|
74
|
+
)"""
|
|
75
|
+
tasks.append(task)
|
|
76
|
+
parts = [imports, dag] + tasks
|
|
77
|
+
body = "\n".join(parts)
|
|
78
|
+
return body
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Task for parallelization testing which sleeps a configurable amount of time"""
|
|
2
|
+
from time import sleep
|
|
3
|
+
|
|
4
|
+
from dkist_processing_core import TaskBase
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
__all__ = ["WaitTask"]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
SLEEP_TIME = 60
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class WaitTask(TaskBase):
|
|
14
|
+
def run(self) -> None:
|
|
15
|
+
sleep(SLEEP_TIME)
|
|
File without changes
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"""Test integrity of workflows."""
|
|
2
|
+
from dkist_processing_core.build_utils import validate_workflows
|
|
3
|
+
|
|
4
|
+
from dkist_processing_ops import workflows
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def test_workflow_integrity():
|
|
8
|
+
"""Validate workflow to ensure acyclic-ness and export compilation"""
|
|
9
|
+
validate_workflows(workflows)
|
|
File without changes
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Workflows to test task submission and spin up"""
|
|
2
|
+
from dkist_processing_common.tasks import TrialTeardown
|
|
3
|
+
from dkist_processing_core import ResourceQueue
|
|
4
|
+
from dkist_processing_core import Workflow
|
|
5
|
+
|
|
6
|
+
from dkist_processing_ops.tasks import WaitTask
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
smoke_default = Workflow(
|
|
10
|
+
input_data="ops",
|
|
11
|
+
output_data="common",
|
|
12
|
+
category="smoke",
|
|
13
|
+
detail="default",
|
|
14
|
+
workflow_package=__package__,
|
|
15
|
+
)
|
|
16
|
+
smoke_default.add_node(task=WaitTask, upstreams=None, resource_queue=ResourceQueue.DEFAULT)
|
|
17
|
+
smoke_default.add_node(task=TrialTeardown, upstreams=WaitTask)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
smoke_high_mem = Workflow(
|
|
21
|
+
input_data="ops",
|
|
22
|
+
output_data="common",
|
|
23
|
+
category="smoke",
|
|
24
|
+
detail="high-mem",
|
|
25
|
+
workflow_package=__package__,
|
|
26
|
+
)
|
|
27
|
+
smoke_high_mem.add_node(task=WaitTask, upstreams=None, resource_queue=ResourceQueue.HIGH_MEMORY)
|
|
28
|
+
smoke_high_mem.add_node(task=TrialTeardown, upstreams=WaitTask)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: dkist-processing-ops
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Automated Processing smoke test and operations workflows
|
|
5
|
+
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
|
+
License: BSD 3-Clause
|
|
7
|
+
Project-URL: repository, https://bitbucket.org/dkistdc/dkist-processing-ops
|
|
8
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Requires-Python: >=3.11
|
|
12
|
+
Description-Content-Type: text/x-rst
|
|
13
|
+
License-File: LICENSE.rst
|
|
14
|
+
Requires-Dist: dkist-processing-common==6.1.0
|
|
15
|
+
Requires-Dist: dkist-service-configuration==1.1.0
|
|
16
|
+
Provides-Extra: test
|
|
17
|
+
Requires-Dist: pytest; extra == "test"
|
|
18
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
19
|
+
Requires-Dist: pytest-xdist; extra == "test"
|
|
20
|
+
|
|
21
|
+
dkist-processing-ops
|
|
22
|
+
--------------------
|
|
23
|
+
|codecov|
|
|
24
|
+
|
|
25
|
+
This repository works in concert with `dkist-processing-core <https://pypi.org/project/dkist-processing-core/>`_ and
|
|
26
|
+
`dkist-processing-common <https://pypi.org/project/dkist-processing-common/>`_ to provide workflows for the
|
|
27
|
+
operational management and smoke testing of the `Automated Processing <https://nso.atlassian.net/wiki/spaces/DPD/pages/3671451/04+-+Automated+Processing>`_ stack.
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
Developer Setup
|
|
31
|
+
~~~~~~~~~~~~~~~
|
|
32
|
+
|
|
33
|
+
.. code-block:: bash
|
|
34
|
+
|
|
35
|
+
pip install -e .[test]
|
|
36
|
+
pip install pre-commit
|
|
37
|
+
pre-commit install
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
.. |codecov| image:: https://codecov.io/bb/dkistdc/dkist-processing-ops/branch/main/graph/badge.svg
|
|
41
|
+
:target: https://codecov.io/bb/dkistdc/dkist-processing-ops
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
.gitignore
|
|
2
|
+
.pre-commit-config.yaml
|
|
3
|
+
LICENSE.rst
|
|
4
|
+
MANIFEST.in
|
|
5
|
+
README.rst
|
|
6
|
+
bitbucket-pipelines.yml
|
|
7
|
+
pyproject.toml
|
|
8
|
+
dkist_processing_ops/__init__.py
|
|
9
|
+
dkist_processing_ops/_version.py
|
|
10
|
+
dkist_processing_ops.egg-info/PKG-INFO
|
|
11
|
+
dkist_processing_ops.egg-info/SOURCES.txt
|
|
12
|
+
dkist_processing_ops.egg-info/dependency_links.txt
|
|
13
|
+
dkist_processing_ops.egg-info/not-zip-safe
|
|
14
|
+
dkist_processing_ops.egg-info/requires.txt
|
|
15
|
+
dkist_processing_ops.egg-info/top_level.txt
|
|
16
|
+
dkist_processing_ops/dags/scale.py
|
|
17
|
+
dkist_processing_ops/tasks/__init__.py
|
|
18
|
+
dkist_processing_ops/tasks/wait.py
|
|
19
|
+
dkist_processing_ops/tests/__init__.py
|
|
20
|
+
dkist_processing_ops/tests/test_workflows.py
|
|
21
|
+
dkist_processing_ops/workflows/__init__.py
|
|
22
|
+
dkist_processing_ops/workflows/smoke.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
|
|
3
|
+
requires = [
|
|
4
|
+
"setuptools>=62.1",
|
|
5
|
+
"setuptools_scm[toml]>=6.2",
|
|
6
|
+
]
|
|
7
|
+
|
|
8
|
+
build-backend = 'setuptools.build_meta'
|
|
9
|
+
|
|
10
|
+
[project]
|
|
11
|
+
name = "dkist-processing-ops"
|
|
12
|
+
description = "Automated Processing smoke test and operations workflows"
|
|
13
|
+
readme = "README.rst"
|
|
14
|
+
requires-python = ">=3.11"
|
|
15
|
+
classifiers = [
|
|
16
|
+
"License :: OSI Approved :: BSD License",
|
|
17
|
+
"Programming Language :: Python :: 3",
|
|
18
|
+
"Programming Language :: Python :: 3.11",
|
|
19
|
+
]
|
|
20
|
+
license = { text = "BSD 3-Clause" }
|
|
21
|
+
authors = [
|
|
22
|
+
{ name = "NSO / AURA", email = "dkistdc@nso.edu" },
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
dependencies = [
|
|
26
|
+
"dkist-processing-common==6.1.0",
|
|
27
|
+
"dkist-service-configuration==1.1.0",
|
|
28
|
+
]
|
|
29
|
+
dynamic = ["version"]
|
|
30
|
+
|
|
31
|
+
# tox is not required to run the tests, but simplifies IDE integration
|
|
32
|
+
# Pygments is solely to support README.rst rendering
|
|
33
|
+
[project.optional-dependencies]
|
|
34
|
+
test = [
|
|
35
|
+
"pytest",
|
|
36
|
+
"pytest-cov",
|
|
37
|
+
"pytest-xdist",
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
[project.urls]
|
|
41
|
+
repository = "https://bitbucket.org/dkistdc/dkist-processing-ops"
|
|
42
|
+
|
|
43
|
+
[tool.setuptools]
|
|
44
|
+
zip-safe = false
|
|
45
|
+
include-package-data = true
|
|
46
|
+
|
|
47
|
+
[tool.setuptools.packages.find]
|
|
48
|
+
|
|
49
|
+
[tool.setuptools.package-data]
|
|
50
|
+
# include .ttf files from any directory
|
|
51
|
+
"*" = ["*.ttf"]
|
|
52
|
+
|
|
53
|
+
[tool.setuptools_scm]
|
|
54
|
+
write_to = "dkist_processing_ops/_version.py"
|
|
55
|
+
|
|
56
|
+
[tool.pytest.ini_options]
|
|
57
|
+
testpaths = [
|
|
58
|
+
"dkist_processing_ops",
|
|
59
|
+
]
|
|
60
|
+
markers = [
|
|
61
|
+
"development: For tests that can only be run while developing with a sidecar proxy (as opposed to in bitbucket pipelines)"
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
[tool.coverage.run]
|
|
65
|
+
omit = [
|
|
66
|
+
"dkist_processing_ops/__init*",
|
|
67
|
+
"dkist_processing_ops/conftest.py",
|
|
68
|
+
"dkist_processing_ops/*setup_package*",
|
|
69
|
+
"dkist_processing_ops/tests/*",
|
|
70
|
+
"dkist_processing_ops/*/tests/*",
|
|
71
|
+
"dkist_processing_ops/extern/*",
|
|
72
|
+
"dkist_processing_ops/version*",
|
|
73
|
+
"*/dkist_processing_ops/__init*",
|
|
74
|
+
"*/dkist_processing_ops/conftest.py",
|
|
75
|
+
"*/dkist_processing_ops/*setup_package*",
|
|
76
|
+
"*/dkist_processing_ops/tests/*",
|
|
77
|
+
"*/dkist_processing_ops/*/tests/*",
|
|
78
|
+
"*/dkist_processing_ops/extern/*",
|
|
79
|
+
"*/dkist_processing_ops/version*",
|
|
80
|
+
]
|