atex 0.9__tar.gz → 0.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. atex-0.11/.github/workflows/self-tests.yaml +54 -0
  2. atex-0.9/README.md → atex-0.11/DEVEL.md +25 -60
  3. atex-0.11/PKG-INFO +86 -0
  4. atex-0.11/README.md +70 -0
  5. atex-0.11/atex/aggregator/__init__.py +62 -0
  6. atex-0.11/atex/aggregator/json.py +279 -0
  7. {atex-0.9 → atex-0.11}/atex/cli/__init__.py +14 -1
  8. {atex-0.9 → atex-0.11}/atex/cli/fmf.py +7 -7
  9. {atex-0.9 → atex-0.11}/atex/cli/libvirt.py +3 -2
  10. {atex-0.9 → atex-0.11}/atex/cli/testingfarm.py +74 -3
  11. {atex-0.9 → atex-0.11}/atex/connection/podman.py +2 -4
  12. {atex-0.9 → atex-0.11}/atex/connection/ssh.py +7 -14
  13. {atex-0.9 → atex-0.11}/atex/executor/executor.py +21 -20
  14. {atex-0.9 → atex-0.11}/atex/executor/scripts.py +5 -3
  15. {atex-0.9 → atex-0.11}/atex/executor/testcontrol.py +1 -1
  16. atex-0.11/atex/orchestrator/__init__.py +76 -0
  17. atex-0.9/atex/orchestrator/orchestrator.py → atex-0.11/atex/orchestrator/adhoc.py +246 -108
  18. atex-0.11/atex/orchestrator/contest.py +94 -0
  19. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/__init__.py +48 -52
  20. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/libvirt/libvirt.py +34 -15
  21. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/libvirt/locking.py +3 -1
  22. atex-0.11/atex/provisioner/podman/__init__.py +2 -0
  23. atex-0.11/atex/provisioner/podman/podman.py +169 -0
  24. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/testingfarm/api.py +56 -48
  25. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/testingfarm/testingfarm.py +43 -45
  26. atex-0.11/atex/util/log.py +71 -0
  27. atex-0.11/atex/util/subprocess.py +86 -0
  28. {atex-0.9 → atex-0.11}/atex/util/threads.py +7 -0
  29. {atex-0.9 → atex-0.11}/pyproject.toml +10 -5
  30. atex-0.11/tests/executor/conftest.py +37 -0
  31. atex-0.11/tests/executor/fmf_tree/file_in_fmfdir +1 -0
  32. atex-0.11/tests/executor/fmf_tree/main.fmf +23 -0
  33. atex-0.11/tests/executor/test_cwd.py +25 -0
  34. atex-0.11/tests/executor/test_env.py +63 -0
  35. {atex-0.9/tests/provision → atex-0.11/tests/provisioner}/shared.py +33 -37
  36. atex-0.11/tests/provisioner/test_podman.py +84 -0
  37. {atex-0.9/tests/provision → atex-0.11/tests/provisioner}/test_testingfarm.py +12 -12
  38. {atex-0.9 → atex-0.11}/utils/finished_excludes.py +14 -7
  39. atex-0.9/DEVEL.md +0 -43
  40. atex-0.9/PKG-INFO +0 -178
  41. atex-0.9/TODO +0 -302
  42. atex-0.9/aggrtest-combined.py +0 -74
  43. atex-0.9/aggrtest.py +0 -41
  44. atex-0.9/atex/orchestrator/__init__.py +0 -3
  45. atex-0.9/atex/orchestrator/aggregator.py +0 -111
  46. atex-0.9/atex/provision/podman/__init__.py +0 -1
  47. atex-0.9/atex/provision/podman/podman.py +0 -274
  48. atex-0.9/atex/util/log.py +0 -76
  49. atex-0.9/atex/util/subprocess.py +0 -52
  50. atex-0.9/contest.py +0 -153
  51. atex-0.9/orch.py +0 -40
  52. atex-0.9/prov.py +0 -39
  53. atex-0.9/reporter.py +0 -26
  54. atex-0.9/runtest.py +0 -74
  55. atex-0.9/ssh.py +0 -86
  56. atex-0.9/tests/provision/test_podman.py +0 -86
  57. atex-0.9/tf.py +0 -31
  58. atex-0.9/tmt_tests/plans/reserve.fmf +0 -5
  59. atex-0.9/tmt_tests/reserve/main.fmf +0 -11
  60. atex-0.9/tmt_tests/reserve/test.sh +0 -115
  61. {atex-0.9 → atex-0.11}/.editorconfig +0 -0
  62. {atex-0.9 → atex-0.11}/.gitignore +0 -0
  63. {atex-0.9 → atex-0.11}/COPYING.txt +0 -0
  64. {atex-0.9 → atex-0.11}/atex/__init__.py +0 -0
  65. {atex-0.9 → atex-0.11}/atex/connection/__init__.py +0 -0
  66. {atex-0.9 → atex-0.11}/atex/executor/README.md +0 -0
  67. {atex-0.9 → atex-0.11}/atex/executor/RESULTS.md +0 -0
  68. {atex-0.9 → atex-0.11}/atex/executor/TEST_CONTROL.md +0 -0
  69. {atex-0.9 → atex-0.11}/atex/executor/__init__.py +0 -0
  70. {atex-0.9 → atex-0.11}/atex/executor/duration.py +0 -0
  71. {atex-0.9 → atex-0.11}/atex/executor/reporter.py +0 -0
  72. {atex-0.9 → atex-0.11}/atex/fmf.py +0 -0
  73. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/libvirt/VM_PROVISION +0 -0
  74. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/libvirt/__init__.py +0 -0
  75. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/libvirt/setup-libvirt.sh +0 -0
  76. {atex-0.9/atex/provision → atex-0.11/atex/provisioner}/testingfarm/__init__.py +0 -0
  77. {atex-0.9 → atex-0.11}/atex/util/README.md +0 -0
  78. {atex-0.9 → atex-0.11}/atex/util/__init__.py +0 -0
  79. {atex-0.9 → atex-0.11}/atex/util/dedent.py +0 -0
  80. {atex-0.9 → atex-0.11}/atex/util/libvirt.py +0 -0
  81. {atex-0.9 → atex-0.11}/atex/util/named_mapping.py +0 -0
  82. {atex-0.9 → atex-0.11}/atex/util/path.py +0 -0
  83. {atex-0.9 → atex-0.11}/atex/util/ssh_keygen.py +0 -0
  84. {atex-0.9 → atex-0.11}/tests/conftest.py +0 -0
  85. {atex-0.9/tests/fmf → atex-0.11/tests/executor}/fmf_tree/.fmf/version +0 -0
  86. {atex-0.9/tmt_tests → atex-0.11/tests/fmf/fmf_tree}/.fmf/version +0 -0
  87. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/adjusted.fmf +0 -0
  88. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/disabled.fmf +0 -0
  89. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/environment.fmf +0 -0
  90. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/filters.fmf +0 -0
  91. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/inherit/child/main.fmf +0 -0
  92. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/inherit/main.fmf +0 -0
  93. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/listlike.fmf +0 -0
  94. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/manual.fmf +0 -0
  95. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/nontest.fmf +0 -0
  96. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/plans/filtered.fmf +0 -0
  97. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/plans/listlike.fmf +0 -0
  98. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/plans/scripts.fmf +0 -0
  99. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/plans/with_env.fmf +0 -0
  100. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/simple/main.fmf +0 -0
  101. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/story.fmf +0 -0
  102. {atex-0.9 → atex-0.11}/tests/fmf/fmf_tree/virtual.fmf +0 -0
  103. {atex-0.9 → atex-0.11}/tests/fmf/test_fmf.py +0 -0
  104. {atex-0.9 → atex-0.11}/tests/testutil/__init__.py +0 -0
  105. {atex-0.9 → atex-0.11}/tests/testutil/timeout.py +0 -0
@@ -0,0 +1,54 @@
1
+ name: Sanity self-tests
2
+
3
+ on:
4
+ pull_request:
5
+ workflow_dispatch:
6
+
7
+ jobs:
8
+ provisioner-testing-farm:
9
+ runs-on: ubuntu-latest
10
+ environment:
11
+ name: SelfTests
12
+ steps:
13
+ - uses: actions/checkout@v4
14
+ - run: sudo apt-get install -y python3-pytest python3-pytest-xdist
15
+ - run: pip install .
16
+ - name: Run tests
17
+ run: |
18
+ pytest -s -n 50 tests/provisioner/test_testingfarm.py
19
+ env:
20
+ TESTING_FARM_COMPOSE: CentOS-Stream-10
21
+ TESTING_FARM_API_TOKEN: ${{ secrets.TESTING_FARM_API_TOKEN }}
22
+ provisioner-podman:
23
+ runs-on: ubuntu-latest
24
+ environment:
25
+ name: SelfTests
26
+ steps:
27
+ - uses: actions/checkout@v4
28
+ - run: sudo apt-get install -y python3-pytest podman
29
+ - run: pip install .
30
+ - name: Run tests
31
+ run: |
32
+ pytest -s tests/provisioner/test_podman.py
33
+ executor:
34
+ runs-on: ubuntu-latest
35
+ environment:
36
+ name: SelfTests
37
+ steps:
38
+ - uses: actions/checkout@v4
39
+ - run: sudo apt-get install -y python3-pytest
40
+ - run: pip install .
41
+ - name: Run tests
42
+ run: |
43
+ pytest -s tests/executor
44
+ fmf:
45
+ runs-on: ubuntu-latest
46
+ environment:
47
+ name: SelfTests
48
+ steps:
49
+ - uses: actions/checkout@v4
50
+ - run: sudo apt-get install -y python3-pytest
51
+ - run: pip install .
52
+ - name: Run tests
53
+ run: |
54
+ pytest -s tests/fmf
@@ -1,58 +1,34 @@
1
- # ATEX = Ad-hoc Test EXecutor
1
+ # Misc development notes
2
2
 
3
- A collections of Python APIs to provision operating systems, collect
4
- and execute [FMF](https://github.com/teemtee/fmf/)-style tests, gather
5
- and organize their results and generate reports from those results.
3
+ ## Contributing
6
4
 
7
- The name comes from a (fairly unique to FMF/TMT ecosystem) approach that
8
- allows provisioning a pool of systems and scheduling tests on them as one would
9
- on an ad-hoc pool of thread/process workers - once a worker becomes free,
10
- it receives a test to run.
11
- This is in contrast to splitting a large list of N tests onto M workers
12
- like N/M, which yields significant time penalties due to tests having
13
- very varies runtimes.
5
+ TODO - coding style
14
6
 
15
- Above all, this project is meant to be a toolbox, not a silver-plate solution.
16
- Use its Python APIs to build a CLI tool for your specific use case.
17
- The CLI tool provided here is just for demonstration / testing, not for serious
18
- use - we want to avoid huge modular CLIs for Every Possible Scenario. That's
19
- the job of the Python API. Any CLI should be simple by nature.
7
+ ## Executor and test results
20
8
 
21
- ---
9
+ TODO: mention that tests output their own JSON + uploaded files
10
+ to some temporary dir, which is then ingested by an Aggregator
11
+ to (potentially) a very different JSON format - the JSON here
12
+ is literally just a format, not a specific kind of data - like
13
+ "INI" doesn't always mean "Midnight Commander config", but a generic
14
+ format useful for many things
22
15
 
23
- THIS PROJECT IS HEAVILY WIP, THINGS WILL MOVE AROUND, CHANGE AND OTHERWISE
24
- BREAK. DO NOT USE IT (for now).
16
+ TODO: also, test -> results+files --> Aggregator --> more files
17
+ where results+files can have many different keys/values, but
18
+ Aggregators typically only look for a few specific ones (ie. 'note')
25
19
 
26
- ---
20
+ ## Release workflow
27
21
 
28
- ## License
22
+ NEVER commit these to git, they are ONLY for the PyPI release.
29
23
 
30
- Unless specified otherwise, any content within this repository is distributed
31
- under the GNU GPLv3 license, see the [COPYING.txt](COPYING.txt) file for more.
32
-
33
- ## Testing this project
34
-
35
- There are some limited sanity tests provided via `pytest`, although:
36
-
37
- * Some require additional variables (ie. Testing Farm) and will ERROR
38
- without them.
39
- * Some take a long time (ie. Testing Farm) due to system provisioning
40
- taking a long time, so install `pytest-xdist` and run with a large `-n`.
41
-
42
- Currently, the recommended approach is to split the execution:
43
-
44
- ```
45
- # synchronously, because podman CLI has concurrency issues
46
- pytest tests/provision/test_podman.py
47
-
48
- # in parallel, because provisioning takes a long time
49
- export TESTING_FARM_API_TOKEN=...
50
- export TESTING_FARM_COMPOSE=...
51
- pytest -n 20 tests/provision/test_podman.py
52
-
53
- # fast enough for synchronous execution
54
- pytest tests/fmf
55
- ```
24
+ 1. Increase `version = ` in `pyproject.toml`
25
+ 1. Tag a new version in the `atex-reserve` repo, push the tag
26
+ 1. Point to that tag from `atex/provisioner/testingfarm/api.py`,
27
+ `DEFAULT_RESERVE_TEST`
28
+ 1. ...
29
+ 1. `python3 -m build`
30
+ 1. `pip install -U twine`
31
+ 1. `python3 -m twine upload dist/*`
56
32
 
57
33
  ## Parallelism and cleanup
58
34
 
@@ -146,17 +122,6 @@ Also note that `.reserve()` and `.abort()` could be also called by a context
146
122
  manager as `__enter__` and `__exit__`, ie. by a non-threaded caller (running
147
123
  everything in the main thread).
148
124
 
125
+ ## Upcoming API breakages
149
126
 
150
- ## Unsorted notes
151
-
152
- TODO: codestyle from contest
153
-
154
- ```
155
- - this is not tmt, the goal is to make a python toolbox *for* making runcontest
156
- style tools easily, not to replace those tools with tmt-style CLI syntax
157
-
158
- - the whole point is to make usecase-targeted easy-to-use tools that don't
159
- intimidate users with 1 KB long command line, and runcontest is a nice example
160
-
161
- - TL;DR - use a modular pythonic approach, not a gluetool-style long CLI
162
- ```
127
+ - rename `FMFTests` argument `plan_name` to `plan`
atex-0.11/PKG-INFO ADDED
@@ -0,0 +1,86 @@
1
+ Metadata-Version: 2.4
2
+ Name: atex
3
+ Version: 0.11
4
+ Summary: Ad-hoc Test EXecutor
5
+ Project-URL: Homepage, https://github.com/RHSecurityCompliance/atex
6
+ License-Expression: GPL-3.0-or-later
7
+ License-File: COPYING.txt
8
+ Classifier: Operating System :: POSIX :: Linux
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: Topic :: Software Development :: Testing
11
+ Requires-Python: >=3.11
12
+ Requires-Dist: fmf>=1.6
13
+ Requires-Dist: pyyaml
14
+ Requires-Dist: urllib3<3,>=2
15
+ Description-Content-Type: text/markdown
16
+
17
+ # ATEX = Ad-hoc Test EXecutor
18
+
19
+ A collections of Python APIs to provision operating systems, collect
20
+ and execute [FMF](https://github.com/teemtee/fmf/)-style tests, gather
21
+ and organize their results and generate reports from those results.
22
+
23
+ The name comes from a (fairly unique to FMF/TMT ecosystem) approach that
24
+ allows provisioning a pool of systems and scheduling tests on them as one would
25
+ on an ad-hoc pool of thread/process workers - once a worker becomes free,
26
+ it receives a test to run.
27
+ This is in contrast to splitting a large list of N tests onto M workers
28
+ like N/M, which yields significant time penalties due to tests having
29
+ very varies runtimes.
30
+
31
+ Above all, this project is meant to be a toolbox, not a silver-plate solution.
32
+ Use its Python APIs to build a CLI tool for your specific use case.
33
+ The CLI tool provided here is just for demonstration / testing, not for serious
34
+ use - we want to avoid huge modular CLIs for Every Possible Scenario. That's
35
+ the job of the Python API. Any CLI should be simple by nature.
36
+
37
+ ---
38
+
39
+ ## License
40
+
41
+ Unless specified otherwise, any content within this repository is distributed
42
+ under the GNU GPLv3 license, see the [COPYING.txt](COPYING.txt) file for more.
43
+
44
+ ## Environment variables
45
+
46
+ - `ATEX_DEBUG_TEST`
47
+ - Set to `1` to print out detailed runner-related trace within the test output
48
+ stream (as if it was printed out by the test).
49
+
50
+ ## Testing this project
51
+
52
+ There are some limited sanity tests provided via `pytest`, although:
53
+
54
+ - Some require additional variables (ie. Testing Farm) and will ERROR
55
+ without them.
56
+ - Some take a long time (ie. Testing Farm) due to system provisioning
57
+ taking a long time, so install `pytest-xdist` and run with a large `-n`.
58
+
59
+ Currently, the recommended approach is to split the execution:
60
+
61
+ ```
62
+ # synchronously, because podman CLI has concurrency issues
63
+ pytest tests/provision/test_podman.py
64
+
65
+ # in parallel, because provisioning takes a long time
66
+ export TESTING_FARM_API_TOKEN=...
67
+ export TESTING_FARM_COMPOSE=...
68
+ pytest -n 20 tests/provision/test_podman.py
69
+
70
+ # fast enough for synchronous execution
71
+ pytest tests/fmf
72
+ ```
73
+
74
+ ## Unsorted notes
75
+
76
+ TODO: codestyle from contest
77
+
78
+ ```
79
+ - this is not tmt, the goal is to make a python toolbox *for* making runcontest
80
+ style tools easily, not to replace those tools with tmt-style CLI syntax
81
+
82
+ - the whole point is to make usecase-targeted easy-to-use tools that don't
83
+ intimidate users with 1 KB long command line, and runcontest is a nice example
84
+
85
+ - TL;DR - use a modular pythonic approach, not a gluetool-style long CLI
86
+ ```
atex-0.11/README.md ADDED
@@ -0,0 +1,70 @@
1
+ # ATEX = Ad-hoc Test EXecutor
2
+
3
+ A collections of Python APIs to provision operating systems, collect
4
+ and execute [FMF](https://github.com/teemtee/fmf/)-style tests, gather
5
+ and organize their results and generate reports from those results.
6
+
7
+ The name comes from a (fairly unique to FMF/TMT ecosystem) approach that
8
+ allows provisioning a pool of systems and scheduling tests on them as one would
9
+ on an ad-hoc pool of thread/process workers - once a worker becomes free,
10
+ it receives a test to run.
11
+ This is in contrast to splitting a large list of N tests onto M workers
12
+ like N/M, which yields significant time penalties due to tests having
13
+ very varies runtimes.
14
+
15
+ Above all, this project is meant to be a toolbox, not a silver-plate solution.
16
+ Use its Python APIs to build a CLI tool for your specific use case.
17
+ The CLI tool provided here is just for demonstration / testing, not for serious
18
+ use - we want to avoid huge modular CLIs for Every Possible Scenario. That's
19
+ the job of the Python API. Any CLI should be simple by nature.
20
+
21
+ ---
22
+
23
+ ## License
24
+
25
+ Unless specified otherwise, any content within this repository is distributed
26
+ under the GNU GPLv3 license, see the [COPYING.txt](COPYING.txt) file for more.
27
+
28
+ ## Environment variables
29
+
30
+ - `ATEX_DEBUG_TEST`
31
+ - Set to `1` to print out detailed runner-related trace within the test output
32
+ stream (as if it was printed out by the test).
33
+
34
+ ## Testing this project
35
+
36
+ There are some limited sanity tests provided via `pytest`, although:
37
+
38
+ - Some require additional variables (ie. Testing Farm) and will ERROR
39
+ without them.
40
+ - Some take a long time (ie. Testing Farm) due to system provisioning
41
+ taking a long time, so install `pytest-xdist` and run with a large `-n`.
42
+
43
+ Currently, the recommended approach is to split the execution:
44
+
45
+ ```
46
+ # synchronously, because podman CLI has concurrency issues
47
+ pytest tests/provision/test_podman.py
48
+
49
+ # in parallel, because provisioning takes a long time
50
+ export TESTING_FARM_API_TOKEN=...
51
+ export TESTING_FARM_COMPOSE=...
52
+ pytest -n 20 tests/provision/test_podman.py
53
+
54
+ # fast enough for synchronous execution
55
+ pytest tests/fmf
56
+ ```
57
+
58
+ ## Unsorted notes
59
+
60
+ TODO: codestyle from contest
61
+
62
+ ```
63
+ - this is not tmt, the goal is to make a python toolbox *for* making runcontest
64
+ style tools easily, not to replace those tools with tmt-style CLI syntax
65
+
66
+ - the whole point is to make usecase-targeted easy-to-use tools that don't
67
+ intimidate users with 1 KB long command line, and runcontest is a nice example
68
+
69
+ - TL;DR - use a modular pythonic approach, not a gluetool-style long CLI
70
+ ```
@@ -0,0 +1,62 @@
1
+ import importlib as _importlib
2
+ import pkgutil as _pkgutil
3
+
4
+
5
+ class Aggregator:
6
+ """
7
+ TODO: generic description, not JSON-specific
8
+ """
9
+
10
+ def ingest(self, platform, test_name, test_results, test_files):
11
+ """
12
+ Process 'test_results' (string/Path) for as results reported by a test
13
+ ran by Executor, along with 'test_files' as files uploaded by that test,
14
+ aggregating them under 'platform' (string) as 'test_name' (string).
15
+
16
+ This is DESTRUCTIVE, the input results/files are consumed in the
17
+ process.
18
+ """
19
+ raise NotImplementedError(f"'ingest' not implemented for {self.__class__.__name__}")
20
+
21
+ def start(self):
22
+ """
23
+ Start the Aggregator instance, opening any files / allocating resources
24
+ as necessary.
25
+ """
26
+ raise NotImplementedError(f"'start' not implemented for {self.__class__.__name__}")
27
+
28
+ def stop(self):
29
+ """
30
+ Stop the Aggregator instance, freeing all allocated resources.
31
+ """
32
+ raise NotImplementedError(f"'stop' not implemented for {self.__class__.__name__}")
33
+
34
+ def __enter__(self):
35
+ try:
36
+ self.start()
37
+ return self
38
+ except Exception:
39
+ self.stop()
40
+ raise
41
+
42
+ def __exit__(self, exc_type, exc_value, traceback):
43
+ self.stop()
44
+
45
+
46
+ _submodules = [
47
+ info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
48
+ ]
49
+
50
+ __all__ = [*_submodules, Aggregator.__name__] # noqa: PLE0604
51
+
52
+
53
+ def __dir__():
54
+ return __all__
55
+
56
+
57
+ # lazily import submodules
58
+ def __getattr__(attr):
59
+ if attr in _submodules:
60
+ return _importlib.import_module(f".{attr}", __name__)
61
+ else:
62
+ raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")
@@ -0,0 +1,279 @@
1
+ import abc
2
+ import gzip
3
+ import lzma
4
+ import json
5
+ import shutil
6
+ import threading
7
+ from pathlib import Path
8
+
9
+ from . import Aggregator
10
+
11
+
12
+ def _verbatim_move(src, dst):
13
+ def copy_without_symlinks(src, dst):
14
+ return shutil.copy2(src, dst, follow_symlinks=False)
15
+ shutil.move(src, dst, copy_function=copy_without_symlinks)
16
+
17
+
18
+ class JSONAggregator(Aggregator):
19
+ """
20
+ Collects reported results in a line-JSON output file and uploaded files
21
+ (logs) from multiple test runs under a shared directory.
22
+
23
+ Note that the aggregated JSON file *does not* use the test-based JSON format
24
+ described by executor/RESULTS.md - both use JSON, but are very different.
25
+
26
+ This aggergated format uses a top-level array (on each line) with a fixed
27
+ field order:
28
+
29
+ platform, status, test name, subtest name, files, note
30
+
31
+ All these are strings except 'files', which is another (nested) array
32
+ of strings.
33
+
34
+ If 'testout' is present in an input test result, it is prepended to
35
+ the list of 'files'.
36
+ If a field is missing in the source result, it is translated to a null
37
+ value.
38
+ """
39
+
40
+ def __init__(self, target, files):
41
+ """
42
+ 'target' is a string/Path to a .json file for all ingested
43
+ results to be aggregated (written) to.
44
+
45
+ 'files' is a string/Path of the top-level parent for all
46
+ per-platform / per-test files uploaded by tests.
47
+ """
48
+ self.lock = threading.RLock()
49
+ self.target = Path(target)
50
+ self.files = Path(files)
51
+ self.target_fobj = None
52
+
53
+ def start(self):
54
+ if self.target.exists():
55
+ raise FileExistsError(f"{self.target} already exists")
56
+ self.target_fobj = open(self.target, "w")
57
+
58
+ if self.files.exists():
59
+ raise FileExistsError(f"{self.files} already exists")
60
+ self.files.mkdir()
61
+
62
+ def stop(self):
63
+ if self.target_fobj:
64
+ self.target_fobj.close()
65
+ self.target_fobj = None
66
+
67
+ def _get_test_files_path(self, platform, test_name):
68
+ """
69
+ Return a directory path to where uploaded files should be stored
70
+ for a particular 'platform' and 'test_name'.
71
+ """
72
+ platform_files = self.files / platform
73
+ platform_files.mkdir(exist_ok=True)
74
+ test_files = platform_files / test_name.lstrip("/")
75
+ return test_files
76
+
77
+ @staticmethod
78
+ def _modify_file_list(test_files):
79
+ return test_files
80
+
81
+ @staticmethod
82
+ def _move_test_files(test_files, target_dir):
83
+ """
84
+ Move (or otherwise process) 'test_files' as directory of files uploaded
85
+ by the test, into the pre-computed 'target_dir' location (inside
86
+ a hierarchy of all files from all tests).
87
+ """
88
+ _verbatim_move(test_files, target_dir)
89
+
90
+ def _gen_test_results(self, input_fobj, platform, test_name):
91
+ """
92
+ Yield complete output JSON objects, one for each input result.
93
+ """
94
+ # 'testout' , 'files' and others are standard fields in the
95
+ # test control interface, see RESULTS.md for the Executor
96
+ for raw_line in input_fobj:
97
+ result_line = json.loads(raw_line)
98
+
99
+ file_names = []
100
+ # process the file specified by the 'testout' key
101
+ if "testout" in result_line:
102
+ file_names.append(result_line["testout"])
103
+ # process any additional files in the 'files' key
104
+ if "files" in result_line:
105
+ file_names += (f["name"] for f in result_line["files"])
106
+
107
+ file_names = self._modify_file_list(file_names)
108
+
109
+ output_line = (
110
+ platform,
111
+ result_line["status"],
112
+ test_name,
113
+ result_line.get("name"), # subtest
114
+ file_names,
115
+ result_line.get("note"),
116
+ )
117
+ yield json.dumps(output_line, indent=None)
118
+
119
+ def ingest(self, platform, test_name, test_results, test_files):
120
+ target_test_files = self._get_test_files_path(platform, test_name)
121
+ if target_test_files.exists():
122
+ raise FileExistsError(f"{target_test_files} already exists for {test_name}")
123
+
124
+ # parse the results separately, before writing any aggregated output,
125
+ # to ensure that either ALL results from the test are ingested, or none
126
+ # at all (ie. if one of the result lines contains JSON errors)
127
+ with open(test_results) as test_results_fobj:
128
+ output_results = self._gen_test_results(test_results_fobj, platform, test_name)
129
+ output_json = "\n".join(output_results) + "\n"
130
+
131
+ with self.lock:
132
+ self.target_fobj.write(output_json)
133
+ self.target_fobj.flush()
134
+
135
+ # clean up the source test_results (Aggregator should 'mv', not 'cp')
136
+ Path(test_results).unlink()
137
+
138
+ # if the test_files dir is not empty
139
+ if any(test_files.iterdir()):
140
+ self._move_test_files(test_files, target_test_files)
141
+
142
+
143
+ class CompressedJSONAggregator(JSONAggregator, abc.ABC):
144
+ compress_files = False
145
+ suffix = ""
146
+ exclude = ()
147
+
148
+ @abc.abstractmethod
149
+ def compressed_open(self, *args, **kwargs):
150
+ pass
151
+
152
+ def start(self):
153
+ if self.target.exists():
154
+ raise FileExistsError(f"{self.target_file} already exists")
155
+ self.target_fobj = self.compressed_open(self.target, "wt", newline="\n")
156
+
157
+ if self.files.exists():
158
+ raise FileExistsError(f"{self.storage_dir} already exists")
159
+ self.files.mkdir()
160
+
161
+ def _modify_file_list(self, test_files):
162
+ if self.compress_files and self.suffix:
163
+ return [
164
+ (name if name in self.exclude else f"{name}{self.suffix}")
165
+ for name in test_files
166
+ ]
167
+ else:
168
+ return super()._modify_file_list(test_files)
169
+
170
+ def _move_test_files(self, test_files, target_dir):
171
+ if not self.compress_files:
172
+ super()._move_test_files(test_files, target_dir)
173
+ return
174
+
175
+ for root, _, files in test_files.walk(top_down=False):
176
+ for file_name in files:
177
+ src_path = root / file_name
178
+ dst_path = target_dir / src_path.relative_to(test_files)
179
+
180
+ dst_path.parent.mkdir(parents=True, exist_ok=True)
181
+
182
+ # skip dirs, symlinks, device files, etc.
183
+ if not src_path.is_file(follow_symlinks=False) or file_name in self.exclude:
184
+ _verbatim_move(src_path, dst_path)
185
+ continue
186
+
187
+ if self.suffix:
188
+ dst_path = dst_path.with_name(f"{dst_path.name}{self.suffix}")
189
+
190
+ with open(src_path, "rb") as plain_fobj:
191
+ with self.compressed_open(dst_path, "wb") as compress_fobj:
192
+ shutil.copyfileobj(plain_fobj, compress_fobj, 1048576)
193
+
194
+ src_path.unlink()
195
+
196
+ # we're walking bottom-up, so the local root should be empty now
197
+ root.rmdir()
198
+
199
+
200
+ class GzipJSONAggregator(CompressedJSONAggregator):
201
+ """
202
+ Identical to JSONAggregator, but transparently Gzips either or both of
203
+ the output line-JSON file with results and the uploaded files.
204
+ """
205
+ def compressed_open(self, *args, **kwargs):
206
+ return gzip.open(*args, compresslevel=self.level, **kwargs)
207
+
208
+ def __init__(
209
+ self, target, files, *, compress_level=9,
210
+ compress_files=True, compress_files_suffix=".gz", compress_files_exclude=None,
211
+ ):
212
+ """
213
+ 'target' is a string/Path to a .json.gz file for all ingested
214
+ results to be aggregated (written) to.
215
+
216
+ 'files' is a string/Path of the top-level parent for all
217
+ per-platform / per-test files uploaded by tests.
218
+
219
+ 'compress_level' specifies how much effort should be spent compressing,
220
+ (1 = fast, 9 = slow).
221
+
222
+ If 'compress_files' is True, compress also any files uploaded by tests.
223
+
224
+ The 'compress_files_suffix' is appended to any processed test-uploaded
225
+ files, and the respective 'files' results array is modified with the
226
+ new file names (as if the test uploaded compressed files already).
227
+ Set to "" (empty string) to use original file names and just compress
228
+ them transparently in-place.
229
+
230
+ 'compress_files_exclude' is a tuple/list of strings (input 'files'
231
+ names) to skip when compressing. Their names also won't be modified.
232
+ """
233
+ super().__init__(target, files)
234
+ self.level = compress_level
235
+ self.compress_files = compress_files
236
+ self.suffix = compress_files_suffix
237
+ self.exclude = compress_files_exclude or ()
238
+
239
+
240
+ class LZMAJSONAggregator(CompressedJSONAggregator):
241
+ """
242
+ Identical to JSONAggregator, but transparently compresses (via LZMA/XZ)
243
+ either or both of the output line-JSON file with results and the uploaded
244
+ files.
245
+ """
246
+ def compressed_open(self, *args, **kwargs):
247
+ return lzma.open(*args, preset=self.preset, **kwargs)
248
+
249
+ def __init__(
250
+ self, target, files, *, compress_preset=9,
251
+ compress_files=True, compress_files_suffix=".xz", compress_files_exclude=None,
252
+ ):
253
+ """
254
+ 'target' is a string/Path to a .json.xz file for all ingested
255
+ results to be aggregated (written) to.
256
+
257
+ 'files' is a string/Path of the top-level parent for all
258
+ per-platform / per-test files uploaded by tests.
259
+
260
+ 'compress_preset' specifies how much effort should be spent compressing,
261
+ (1 = fast, 9 = slow). Optionally ORed with lzma.PRESET_EXTREME to spend
262
+ even more CPU time compressing.
263
+
264
+ If 'compress_files' is True, compress also any files uploaded by tests.
265
+
266
+ The 'compress_files_suffix' is appended to any processed test-uploaded
267
+ files, and the respective 'files' results array is modified with the
268
+ new file names (as if the test uploaded compressed files already).
269
+ Set to "" (empty string) to use original file names and just compress
270
+ them transparently in-place.
271
+
272
+ 'compress_files_exclude' is a tuple/list of strings (input 'files'
273
+ names) to skip when compressing. Their names also won't be modified.
274
+ """
275
+ super().__init__(target, files)
276
+ self.preset = compress_preset
277
+ self.compress_files = compress_files
278
+ self.suffix = compress_files_suffix
279
+ self.exclude = compress_files_exclude or ()
@@ -27,12 +27,21 @@ import pkgutil
27
27
  import argparse
28
28
  import logging
29
29
 
30
+ from .. import util
31
+
30
32
 
31
33
  def setup_logging(level):
34
+ if level <= util.EXTRADEBUG:
35
+ fmt = "%(asctime)s %(name)s: %(filename)s:%(lineno)s: %(funcName)s(): %(message)s"
36
+ # also print urllib3 headers
37
+ import http.client # noqa: PLC0415
38
+ http.client.HTTPConnection.debuglevel = 5
39
+ else:
40
+ fmt = "%(asctime)s %(name)s: %(message)s"
32
41
  logging.basicConfig(
33
42
  level=level,
34
43
  stream=sys.stderr,
35
- format="%(asctime)s %(name)s: %(message)s",
44
+ format=fmt,
36
45
  datefmt="%Y-%m-%d %H:%M:%S",
37
46
  )
38
47
 
@@ -53,6 +62,10 @@ def main():
53
62
  "--debug", "-d", action="store_const", dest="loglevel", const=logging.DEBUG,
54
63
  help="enable extra debugging (logging.DEBUG)",
55
64
  )
65
+ log_grp.add_argument(
66
+ "--extra-debug", "-D", action="store_const", dest="loglevel", const=util.EXTRADEBUG,
67
+ help="enable extra debugging (atex.util.EXTRADEBUG)",
68
+ )
56
69
  log_grp.add_argument(
57
70
  "--quiet", "-q", action="store_const", dest="loglevel", const=logging.WARNING,
58
71
  help="be quiet during normal operation (logging.WARNING)",