lsst-ctrl-execute 28.2025.500__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. lsst_ctrl_execute-28.2025.500/.github/workflows/build.yaml +115 -0
  2. lsst_ctrl_execute-28.2025.500/.github/workflows/formatting.yaml +11 -0
  3. lsst_ctrl_execute-28.2025.500/.github/workflows/lint.yaml +11 -0
  4. lsst_ctrl_execute-28.2025.500/.github/workflows/rebase_checker.yaml +8 -0
  5. lsst_ctrl_execute-28.2025.500/.gitignore +234 -0
  6. lsst_ctrl_execute-28.2025.500/.pre-commit-config.yaml +27 -0
  7. lsst_ctrl_execute-28.2025.500/PKG-INFO +20 -0
  8. lsst_ctrl_execute-28.2025.500/SConstruct +4 -0
  9. lsst_ctrl_execute-28.2025.500/etc/configs/gordon_config.py +7 -0
  10. lsst_ctrl_execute-28.2025.500/etc/configs/lsst_config.py +5 -0
  11. lsst_ctrl_execute-28.2025.500/etc/scripts/generateDag.py +220 -0
  12. lsst_ctrl_execute-28.2025.500/pyproject.toml +52 -0
  13. lsst_ctrl_execute-28.2025.500/python/lsst/__init__.py +3 -0
  14. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/__init__.py +3 -0
  15. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/__init__.py +27 -0
  16. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/allocationConfig.py +69 -0
  17. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/allocator.py +474 -0
  18. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/allocatorParser.py +230 -0
  19. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/condorConfig.py +75 -0
  20. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/condorInfoConfig.py +65 -0
  21. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/envString.py +50 -0
  22. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/findPackageFile.py +107 -0
  23. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/libexec/allocateNodes.py +93 -0
  24. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/libexec/dagIdInfo.py +60 -0
  25. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/libexec/qdelete.py +48 -0
  26. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/libexec/qstatus.py +59 -0
  27. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/namedClassFactory.py +57 -0
  28. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/pbsPlugin.py +124 -0
  29. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/qCommand.py +66 -0
  30. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/seqFile.py +64 -0
  31. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/slurmPlugin.py +488 -0
  32. lsst_ctrl_execute-28.2025.500/python/lsst/ctrl/execute/templateWriter.py +50 -0
  33. lsst_ctrl_execute-28.2025.500/setup.cfg +13 -0
  34. lsst_ctrl_execute-28.2025.500/tests/README +7 -0
  35. lsst_ctrl_execute-28.2025.500/tests/SConscript +4 -0
  36. lsst_ctrl_execute-28.2025.500/tests/test_allocationConfig.py +61 -0
  37. lsst_ctrl_execute-28.2025.500/tests/test_allocatorParser.py +72 -0
  38. lsst_ctrl_execute-28.2025.500/tests/test_condorConfig.py +123 -0
  39. lsst_ctrl_execute-28.2025.500/tests/test_condorInfoConfig.py +52 -0
  40. lsst_ctrl_execute-28.2025.500/tests/test_dagIdInfo.py +69 -0
  41. lsst_ctrl_execute-28.2025.500/tests/test_findPackageFile.py +44 -0
  42. lsst_ctrl_execute-28.2025.500/tests/test_seqFile.py +49 -0
  43. lsst_ctrl_execute-28.2025.500/tests/test_templateWriter.py +55 -0
  44. lsst_ctrl_execute-28.2025.500/tests/testfiles/allocator-info1.py +6 -0
  45. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_allocation.py +6 -0
  46. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_allocation_slurm.py +5 -0
  47. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_asserts.py +8 -0
  48. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_condor.py +8 -0
  49. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_condorInfo.py +5 -0
  50. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_condor_getenv.py +9 -0
  51. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_condor_setups.py +9 -0
  52. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_condor_slurm.py +9 -0
  53. lsst_ctrl_execute-28.2025.500/tests/testfiles/config_pegasus.py +11 -0
  54. lsst_ctrl_execute-28.2025.500/tests/testfiles/generic.pbs.template +86 -0
  55. lsst_ctrl_execute-28.2025.500/tests/testfiles/generic.pbs.txt +86 -0
  56. lsst_ctrl_execute-28.2025.500/tests/testfiles/generic.slurm.template +6 -0
  57. lsst_ctrl_execute-28.2025.500/tests/testfiles/generic.slurm.txt +6 -0
  58. lsst_ctrl_execute-28.2025.500/tests/testfiles/glidein_condor_config.template +46 -0
  59. lsst_ctrl_execute-28.2025.500/tests/testfiles/glidein_condor_config.txt +46 -0
  60. lsst_ctrl_execute-28.2025.500/tests/testfiles/templateWriter.template +4 -0
  61. lsst_ctrl_execute-28.2025.500/tests/testfiles/templateWriter.txt +4 -0
  62. lsst_ctrl_execute-28.2025.500/tests/testfiles/test.diamond.dag +141 -0
  63. lsst_ctrl_execute-28.2025.500/ups/ctrl_execute.table +7 -0
@@ -0,0 +1,115 @@
1
+ name: build_and_test
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ - "u/**"
8
+ tags:
9
+ - "*"
10
+ pull_request:
11
+
12
+ jobs:
13
+ build_and_test:
14
+ runs-on: ubuntu-latest
15
+ strategy:
16
+ matrix:
17
+ python-version: ["3.11", "3.12"]
18
+
19
+ steps:
20
+ - uses: actions/checkout@v4
21
+ with:
22
+ # Need to clone everything for the git tags.
23
+ fetch-depth: 0
24
+
25
+ - name: Install uv
26
+ uses: astral-sh/setup-uv@v5
27
+ with:
28
+ version: "0.5.x"
29
+ enable-cache: true
30
+ python-version: ${{ matrix.python-version }}
31
+
32
+ - name: Install dependencies
33
+ run: |
34
+ uv sync
35
+
36
+ - name: Run tests
37
+ run: >-
38
+ uv run pytest -r a -v -n 3
39
+ --cov=lsst.ctrl.execute --cov-report=xml --cov-report=term --cov-branch
40
+ --junitxml=junit.xml -o junit_family=legacy
41
+
42
+ - name: Upload coverage to codecov
43
+ uses: codecov/codecov-action@v4
44
+ with:
45
+ files: ./coverage.xml
46
+ token: ${{ secrets.CODECOV_TOKEN }}
47
+
48
+ - name: Upload test results to Codecov
49
+ if: ${{ !cancelled() }}
50
+ uses: codecov/test-results-action@v1
51
+ with:
52
+ token: ${{ secrets.CODECOV_TOKEN }}
53
+
54
+ check-changes:
55
+ outputs:
56
+ skip: ${{ steps.check.outputs.skip }}
57
+ runs-on: ubuntu-latest
58
+ if: startsWith(github.ref, 'refs/tags/')
59
+ steps:
60
+ - uses: actions/checkout@v4
61
+ with:
62
+ fetch-depth: 0
63
+ - name: Check if weekly changed
64
+ id: check
65
+ run: |
66
+ # Get SHA hashes for most recent weekly tags
67
+ weekly_sha=$(git tag -l 'w.*' | while read tag; do
68
+ git rev-list -n 1 "${tag}"
69
+ done)
70
+
71
+ echo "Weekly tag SHA ${weekly_sha}"
72
+ # Extract the current tag and its SHA
73
+ current_tag=${GITHUB_REF#refs/tags/}
74
+ echo "Current tag: ${current_tag}"
75
+ current_sha=$(git rev-list -1 "${current_tag}") || echo "no_value"
76
+ echo "Current sha: ${current_sha}"
77
+ # Count occurrences of the current SHA in the weekly SHA list
78
+ n=$(echo "${weekly_sha}" | grep -c "${current_sha}") || echo "0"
79
+ echo "Current tag ${current_tag} (${current_sha}) SHA found ${n} time(s)"
80
+
81
+ # Determine whether to skip the upload based on the count
82
+ if [ "${n}" -gt 1 ]; then
83
+ echo "Skip upload"
84
+ echo "skip=true" >> "${GITHUB_OUTPUT}"
85
+ else
86
+ echo "Enable upload"
87
+ echo "skip=false" >> "${GITHUB_OUTPUT}"
88
+ fi
89
+
90
+ pypi:
91
+ runs-on: ubuntu-latest
92
+ needs: [build_and_test, check-changes]
93
+ permissions:
94
+ id-token: write
95
+ if: "${{ ! startsWith(github.ref, 'refs/tags/w.') || needs.check-changes.outputs.skip == 'false' }}"
96
+
97
+ steps:
98
+ - uses: actions/checkout@v4
99
+ with:
100
+ # Need to clone everything to embed the version.
101
+ fetch-depth: 0
102
+
103
+ - name: Install uv
104
+ uses: astral-sh/setup-uv@v5
105
+ with:
106
+ version: "0.5.x"
107
+ enable-cache: true
108
+ python-version: "3.11"
109
+
110
+ - name: Build and create distribution
111
+ run: |
112
+ uv build
113
+
114
+ - name: Upload
115
+ uses: pypa/gh-action-pypi-publish@release/v1
@@ -0,0 +1,11 @@
1
+ name: Check Python formatting
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ pull_request:
8
+
9
+ jobs:
10
+ call-workflow:
11
+ uses: lsst/rubin_workflows/.github/workflows/formatting.yaml@main
@@ -0,0 +1,11 @@
1
+ name: lint
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ pull_request:
8
+
9
+ jobs:
10
+ call-workflow:
11
+ uses: lsst/rubin_workflows/.github/workflows/lint.yaml@main
@@ -0,0 +1,8 @@
1
+ ---
2
+ name: Check that 'main' is not merged into the development branch
3
+
4
+ on: pull_request
5
+
6
+ jobs:
7
+ call-workflow:
8
+ uses: lsst/rubin_workflows/.github/workflows/rebase_checker.yaml@main
@@ -0,0 +1,234 @@
1
+ # General
2
+ .DS_Store
3
+ .AppleDouble
4
+ .LSOverride
5
+
6
+ # Icon must end with two \r
7
+ Icon
8
+
9
+ # Thumbnails
10
+ ._*
11
+
12
+ # Files that might appear in the root of a volume
13
+ .DocumentRevisions-V100
14
+ .fseventsd
15
+ .Spotlight-V100
16
+ .TemporaryItems
17
+ .Trashes
18
+ .VolumeIcon.icns
19
+ .com.apple.timemachine.donotpresent
20
+
21
+ # Directories potentially created on remote AFP share
22
+ .AppleDB
23
+ .AppleDesktop
24
+ Network Trash Folder
25
+ Temporary Items
26
+ .apdisk
27
+ *~
28
+
29
+ # temporary files which can be created if a process still has a handle open of a deleted file
30
+ .fuse_hidden*
31
+
32
+ # KDE directory preferences
33
+ .directory
34
+
35
+ # Linux trash folder which might appear on any partition or disk
36
+ .Trash-*
37
+
38
+ # .nfs files are created when an open file is removed but is still being accessed
39
+ .nfs*
40
+
41
+ # Byte-compiled / optimized / DLL files
42
+ __pycache__/
43
+ *.py[cod]
44
+ *$py.class
45
+
46
+ # C extensions
47
+ *.so
48
+
49
+ # Distribution / packaging
50
+ .Python
51
+ build/
52
+ develop-eggs/
53
+ dist/
54
+ downloads/
55
+ eggs/
56
+ .eggs/
57
+ lib/
58
+ lib64/
59
+ parts/
60
+ sdist/
61
+ var/
62
+ wheels/
63
+ share/python-wheels/
64
+ *.egg-info/
65
+ .installed.cfg
66
+ *.egg
67
+ MANIFEST
68
+
69
+ # PyInstaller
70
+ # Usually these files are written by a python script from a template
71
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
72
+ *.manifest
73
+ *.spec
74
+
75
+ # Installer logs
76
+ pip-log.txt
77
+ pip-delete-this-directory.txt
78
+
79
+ # Unit test / coverage reports
80
+ htmlcov/
81
+ .tox/
82
+ .nox/
83
+ .coverage
84
+ .coverage.*
85
+ .cache
86
+ nosetests.xml
87
+ coverage.xml
88
+ *.cover
89
+ *.py,cover
90
+ .hypothesis/
91
+ .pytest_cache/
92
+ cover/
93
+
94
+ # Translations
95
+ *.mo
96
+ *.pot
97
+
98
+ # Django stuff:
99
+ *.log
100
+ local_settings.py
101
+ db.sqlite3
102
+ db.sqlite3-journal
103
+
104
+ # Flask stuff:
105
+ instance/
106
+ .webassets-cache
107
+
108
+ # Scrapy stuff:
109
+ .scrapy
110
+
111
+ # Sphinx documentation
112
+ docs/_build/
113
+
114
+ # PyBuilder
115
+ .pybuilder/
116
+ target/
117
+
118
+ # Jupyter Notebook
119
+ .ipynb_checkpoints
120
+
121
+ # IPython
122
+ profile_default/
123
+ ipython_config.py
124
+
125
+ # pyenv
126
+ # For a library or package, you might want to ignore these files since the code is
127
+ # intended to run in multiple environments; otherwise, check them in:
128
+ # .python-version
129
+
130
+ # pipenv
131
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
132
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
133
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
134
+ # install all needed dependencies.
135
+ #Pipfile.lock
136
+
137
+ # UV
138
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
139
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
140
+ # commonly ignored for libraries.
141
+ uv.lock
142
+
143
+ # poetry
144
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
145
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
146
+ # commonly ignored for libraries.
147
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
148
+ #poetry.lock
149
+
150
+ # pdm
151
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
152
+ #pdm.lock
153
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
154
+ # in version control.
155
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
156
+ .pdm.toml
157
+ .pdm-python
158
+ .pdm-build/
159
+
160
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
161
+ __pypackages__/
162
+
163
+ # Celery stuff
164
+ celerybeat-schedule
165
+ celerybeat.pid
166
+
167
+ # SageMath parsed files
168
+ *.sage.py
169
+
170
+ # Environments
171
+ .env
172
+ .env.*
173
+ .envrc
174
+ .venv
175
+ env/
176
+ venv/
177
+ ENV/
178
+ env.bak/
179
+ venv.bak/
180
+
181
+ # Spyder project settings
182
+ .spyderproject
183
+ .spyproject
184
+
185
+ # Rope project settings
186
+ .ropeproject
187
+
188
+ # mkdocs documentation
189
+ /site
190
+
191
+ # mypy
192
+ .mypy_cache/
193
+ .dmypy.json
194
+ dmypy.json
195
+
196
+ # Pyre type checker
197
+ .pyre/
198
+
199
+ # pytype static type analyzer
200
+ .pytype/
201
+
202
+ # Cython debug symbols
203
+ cython_debug/
204
+
205
+ # PyCharm
206
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
207
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
208
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
209
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
210
+ #.idea/
211
+
212
+ # PyPI configuration file
213
+ .pypirc
214
+
215
+ # Local Ignores
216
+
217
+ # Do not commit the local version file
218
+ **/version.py
219
+
220
+ # VS Code
221
+ .vscode/
222
+
223
+ # Ruff
224
+ # Ruff usually installs a .gitignore file in its directory.
225
+ .ruff_cache/
226
+
227
+ # Scons
228
+ .sconsign.dblite
229
+ .sconf_temp
230
+ tests/.tests/
231
+ bin/*
232
+ python/*.dist-info
233
+ config.log
234
+ _build.*
@@ -0,0 +1,27 @@
1
+ repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v4.4.0
4
+ hooks:
5
+ - id: check-yaml
6
+ args:
7
+ - "--unsafe"
8
+ - id: end-of-file-fixer
9
+ - id: trailing-whitespace
10
+ - repo: https://github.com/psf/black
11
+ rev: 24.8.0
12
+ hooks:
13
+ - id: black
14
+ # It is recommended to specify the latest version of Python
15
+ # supported by your project here, or alternatively use
16
+ # pre-commit's default_language_version, see
17
+ # https://pre-commit.com/#top_level-default_language_version
18
+ language_version: python3.10
19
+ - repo: https://github.com/pycqa/isort
20
+ rev: 5.12.0
21
+ hooks:
22
+ - id: isort
23
+ name: isort (python)
24
+ - repo: https://github.com/PyCQA/flake8
25
+ rev: 6.1.0
26
+ hooks:
27
+ - id: flake8
@@ -0,0 +1,20 @@
1
+ Metadata-Version: 2.4
2
+ Name: lsst-ctrl-execute
3
+ Version: 28.2025.500
4
+ Summary: Utilities for executing and managing workloads.
5
+ Project-URL: Homepage, https://github.com/lsst/ctrl_execute
6
+ Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
7
+ License: BSD 3-Clause License
8
+ Keywords: lsst
9
+ Classifier: Intended Audience :: Science/Research
10
+ Classifier: License :: OSI Approved :: BSD License
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Topic :: Scientific/Engineering :: Astronomy
16
+ Requires-Python: <3.13.0,>=3.11.0
17
+ Requires-Dist: lsst-ctrl-bps-htcondor>=28.2024.5000; sys_platform == 'linux'
18
+ Requires-Dist: lsst-pex-config
19
+ Requires-Dist: lsst-resources
20
+ Requires-Dist: lsst-utils
@@ -0,0 +1,4 @@
1
+ # -*- python -*-
2
+ from lsst.sconsUtils import scripts
3
+
4
+ scripts.BasicSConstruct("ctrl_execute", disableCc=True, noCfgFile=True)
@@ -0,0 +1,7 @@
1
+ # flake8: noqa
2
+ config.platform.localScratch = "$HOME/condor_logs"
3
+ config.platform.defaultRoot = "/oasis/scratch/ux453102/temp_project/lsst"
4
+ config.platform.dataDirectory = (
5
+ "/oasis/scratch/ux453102/temp_project/lsst/stripe82/dr7/runs"
6
+ )
7
+ config.platform.fileSystemDomain = "sdsc.edu"
@@ -0,0 +1,5 @@
1
+ # flake8: noqa
2
+ config.platform.localScratch = "$HOME/condor_logs"
3
+ config.platform.defaultRoot = "/lsst/DC3root"
4
+ config.platform.dataDirectory = "/lsst7/stripe82/dr7/runs"
5
+ config.platform.fileSystemDomain = "ncsa.illinois.edu"
@@ -0,0 +1,220 @@
1
+ #!/usr/bin/env python
2
+
3
+ #
4
+ # LSST Data Management System
5
+ # Copyright 2008, 2009, 2010 LSST Corporation.
6
+ #
7
+ # This product includes software developed by the
8
+ # LSST Project (http://www.lsst.org/).
9
+ #
10
+ # This program is free software: you can redistribute it and/or modify
11
+ # it under the terms of the GNU General Public License as published by
12
+ # the Free Software Foundation, either version 3 of the License, or
13
+ # (at your option) any later version.
14
+ #
15
+ # This program is distributed in the hope that it will be useful,
16
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
17
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18
+ # GNU General Public License for more details.
19
+ #
20
+ # You should have received a copy of the LSST License Statement and
21
+ # the GNU General Public License along with this program. If not,
22
+ # see <http://www.lsstcorp.org/LegalNotices/>.
23
+ #
24
+
25
+ import argparse
26
+ import sys
27
+ from shlex import split as cmd_split
28
+
29
+
30
+ def _line_to_args(self, line):
31
+ for arg in cmd_split.split(line, comments=True, posix=True):
32
+ if not arg.strip():
33
+ continue
34
+ yield arg
35
+
36
+
37
+ def makeArgumentParser(description, inRootsRequired=True, addRegistryOption=True):
38
+ parser = argparse.ArgumentParser(
39
+ description=description,
40
+ fromfile_prefix_chars="@",
41
+ formatter_class=argparse.RawDescriptionHelpFormatter,
42
+ epilog=" \n" "ly.",
43
+ )
44
+ parser.convert_arg_line_to_args = _line_to_args
45
+
46
+ parser.add_argument(
47
+ "-s", "--source", dest="source", help="Source site for file transfer."
48
+ )
49
+
50
+ parser.add_argument("-w", "--workerdir", dest="workerdir", help="workers directory")
51
+
52
+ parser.add_argument("-t", "--template", dest="template", help="template file")
53
+
54
+ parser.add_argument("-p", "--prescript", dest="prescript", help="pre shell script")
55
+
56
+ parser.add_argument("-r", "--runid", dest="runid", help="runid of production")
57
+
58
+ parser.add_argument(
59
+ "-i", "--idsPerJob", dest="idsPerJob", help="number of ids to run per job"
60
+ )
61
+
62
+ return parser
63
+
64
+
65
+ def writeVarsInfo(output, count, myDataTotal, visit, runid):
66
+ output.write("VARS A" + count + ' var1="' + myDataTotal + '" \n')
67
+ output.write("VARS A" + count + ' var2="' + count + '" \n')
68
+ output.write("VARS A" + count + ' visit="' + visit + '" \n')
69
+ output.write("VARS A" + count + ' runid="' + runid + '" \n')
70
+ output.write("VARS A" + count + ' workerid="' + count + '" \n')
71
+
72
+
73
+ def writeMapInfo(output, count, newDataTotal, myDataTotal):
74
+ output.write(count + " " + newDataTotal + "\n")
75
+ output.write(count + " " + myDataTotal + "\n")
76
+
77
+
78
+ def writeDagFile(
79
+ pipeline, templateFile, infile, workerdir, prescriptFile, runid, idsPerJob
80
+ ):
81
+ """
82
+ Write Condor Dag Submission files.
83
+ """
84
+
85
+ print("Writing DAG file ")
86
+ print("idsPerJob")
87
+ print(idsPerJob)
88
+
89
+ listSize = idsPerJob
90
+
91
+ outname = pipeline + ".diamond.dag"
92
+ mapname = pipeline + ".mapping"
93
+ configname = pipeline + ".config"
94
+
95
+ print(outname)
96
+
97
+ mapObj = open(mapname, "w")
98
+ outObj = open(outname, "w")
99
+ configObj = open(configname, "w")
100
+
101
+ configObj.write("DAGMAN_MAX_SUBMITS_PER_INTERVAL=1000\n")
102
+ configObj.write("DAGMAN_SUBMIT_DELAY=0\n")
103
+ configObj.write("DAGMAN_USER_LOG_SCAN_INTERVAL=5\n")
104
+
105
+ outObj.write("CONFIG %s\n" % configname)
106
+ outObj.write("JOB A " + workerdir + "/" + pipeline + ".pre\n")
107
+ outObj.write("JOB B " + workerdir + "/" + pipeline + ".post\n")
108
+ outObj.write(" \n")
109
+
110
+ print("prescriptFile = ", prescriptFile)
111
+ if prescriptFile is not None:
112
+ outObj.write("SCRIPT PRE A " + prescriptFile + "\n")
113
+
114
+ #
115
+ # note: we make multiple passes through the input file because it could be
116
+ # quite large
117
+ #
118
+
119
+ #
120
+ # A first pass through the Input File to define the individual Jobs
121
+ # Loop over input entries
122
+ #
123
+ fileObj = open(infile, "r")
124
+ count = 0
125
+ acount = 0
126
+ myDataTotal = ""
127
+ myDataList = []
128
+ newDataTotal = ""
129
+ newDataList = []
130
+ for aline in fileObj:
131
+ acount += 1
132
+ myData = aline.rstrip()
133
+
134
+ #
135
+ # Construct a string without spaces from the dataids for a job
136
+ # suitable for a unix file name
137
+ #
138
+ # Searching for a space detects
139
+ # extended input like : visit=887136081 raft=2,2 sensor=0,1
140
+ # If there is no space, the dataid is something simple like
141
+ # a skytile id
142
+ newData = myData
143
+ visit = str(count // 100)
144
+
145
+ myDataList.append(myData)
146
+ newDataList.append(newData)
147
+
148
+ # For example:
149
+ # VARS A1 var1="visit=887136081 raft=2,2 sensor=0,1"
150
+ # VARS A1 var2="visit-887136081:raft-2_2:sensor-0_1"
151
+
152
+ if acount == listSize:
153
+ count += 1
154
+ outObj.write(
155
+ "JOB A" + str(count) + " " + workerdir + "/" + templateFile + "\n"
156
+ )
157
+ myDataTotal = " X ".join(myDataList)
158
+ newDataTotal = "_".join(newDataList)
159
+ writeVarsInfo(outObj, str(count), myDataTotal, visit, runid)
160
+ writeMapInfo(mapObj, str(count), newDataTotal, myDataTotal)
161
+ # PARENT A CHILD A1
162
+ # PARENT A1 CHILD B
163
+ outObj.write("PARENT A CHILD A" + str(count) + " \n")
164
+ outObj.write("PARENT A" + str(count) + " CHILD B \n")
165
+
166
+ acount = 0
167
+ myDataTotal = ""
168
+ newDataTotal = ""
169
+ myDataList = []
170
+ newDataList = []
171
+ outObj.write("\n")
172
+
173
+ # if acount != 0, then we have left over ids to deal with, and need
174
+ # to create one more worker to do so.
175
+ if acount != 0:
176
+ count += 1
177
+ outObj.write("JOB A" + str(count) + " " + workerdir + "/" + templateFile + "\n")
178
+ myDataTotal = " X ".join(myDataList)
179
+ newDataTotal = "_".join(newDataList)
180
+ writeVarsInfo(outObj, str(count), myDataTotal, visit, runid)
181
+ writeMapInfo(mapObj, str(count), newDataTotal, myDataTotal)
182
+ outObj.write("PARENT A CHILD A" + str(count) + " \n")
183
+ outObj.write("PARENT A" + str(count) + " CHILD B \n")
184
+ outObj.write("\n")
185
+
186
+ fileObj.close()
187
+ configObj.close()
188
+ outObj.close()
189
+ mapObj.close()
190
+
191
+
192
+ def main():
193
+ print("Starting generateDag.py")
194
+ parser = makeArgumentParser(
195
+ description="generateDag.py write a Condor DAG for job submission"
196
+ "by reading input list and writing the attribute as an argument."
197
+ )
198
+ print("Created parser")
199
+ ns = parser.parse_args()
200
+ print("Parsed Arguments")
201
+ print(ns)
202
+ print(ns.idsPerJob)
203
+
204
+ pipeline = "Workflow"
205
+
206
+ writeDagFile(
207
+ pipeline,
208
+ ns.template,
209
+ ns.source,
210
+ ns.workerdir,
211
+ ns.prescript,
212
+ ns.runid,
213
+ int(ns.idsPerJob),
214
+ )
215
+
216
+ sys.exit(0)
217
+
218
+
219
+ if __name__ == "__main__":
220
+ main()