ewokstxs 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ewokstxs-1.0.0/LICENSE.md +20 -0
- ewokstxs-1.0.0/PKG-INFO +52 -0
- ewokstxs-1.0.0/README.md +11 -0
- ewokstxs-1.0.0/pyproject.toml +78 -0
- ewokstxs-1.0.0/setup.cfg +4 -0
- ewokstxs-1.0.0/src/ewokstxs/__init__.py +0 -0
- ewokstxs-1.0.0/src/ewokstxs/__main__.py +109 -0
- ewokstxs-1.0.0/src/ewokstxs/tasks/__init__.py +0 -0
- ewokstxs-1.0.0/src/ewokstxs/tasks/txs.py +152 -0
- ewokstxs-1.0.0/src/ewokstxs/tasks/utils.py +282 -0
- ewokstxs-1.0.0/src/ewokstxs/tests/__init__.py +0 -0
- ewokstxs-1.0.0/src/ewokstxs/tests/test_TxsTask.py +85 -0
- ewokstxs-1.0.0/src/ewokstxs.egg-info/PKG-INFO +52 -0
- ewokstxs-1.0.0/src/ewokstxs.egg-info/SOURCES.txt +16 -0
- ewokstxs-1.0.0/src/ewokstxs.egg-info/dependency_links.txt +1 -0
- ewokstxs-1.0.0/src/ewokstxs.egg-info/entry_points.txt +2 -0
- ewokstxs-1.0.0/src/ewokstxs.egg-info/requires.txt +25 -0
- ewokstxs-1.0.0/src/ewokstxs.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# MIT License
|
|
2
|
+
|
|
3
|
+
**Copyright (c) 2023 European Synchrotron Radiation Facility**
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
6
|
+
this software and associated documentation files (the "Software"), to deal in
|
|
7
|
+
the Software without restriction, including without limitation the rights to
|
|
8
|
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
9
|
+
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
10
|
+
subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
17
|
+
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
18
|
+
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
19
|
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
20
|
+
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
ewokstxs-1.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ewokstxs
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Data processing workflows for ID09
|
|
5
|
+
Author-email: ESRF <dau-pydev@esrf.fr>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/
|
|
8
|
+
Project-URL: Documentation, https://ewokstxs.readthedocs.io/
|
|
9
|
+
Project-URL: Repository, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/
|
|
10
|
+
Project-URL: Issues, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/issues
|
|
11
|
+
Project-URL: Changelog, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/-/blob/main/CHANGELOG.md
|
|
12
|
+
Keywords: ewoks
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Requires-Python: >=3.10
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
License-File: LICENSE.md
|
|
18
|
+
Requires-Dist: blissdata>=2.3.0
|
|
19
|
+
Requires-Dist: ewoks
|
|
20
|
+
Requires-Dist: ewokscore
|
|
21
|
+
Requires-Dist: ewoksdata[online]
|
|
22
|
+
Requires-Dist: ewoksutils
|
|
23
|
+
Requires-Dist: pytxs
|
|
24
|
+
Provides-Extra: test
|
|
25
|
+
Requires-Dist: pytest>=7; extra == "test"
|
|
26
|
+
Requires-Dist: pyqt5; extra == "test"
|
|
27
|
+
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: ewokstxs[test]; extra == "dev"
|
|
29
|
+
Requires-Dist: bandit; extra == "dev"
|
|
30
|
+
Requires-Dist: black>=26; extra == "dev"
|
|
31
|
+
Requires-Dist: flake8>=4; extra == "dev"
|
|
32
|
+
Requires-Dist: isort; extra == "dev"
|
|
33
|
+
Requires-Dist: mypy; extra == "dev"
|
|
34
|
+
Provides-Extra: doc
|
|
35
|
+
Requires-Dist: ewokstxs[test]; extra == "doc"
|
|
36
|
+
Requires-Dist: sphinx>=4.5; extra == "doc"
|
|
37
|
+
Requires-Dist: pydata_sphinx_theme; extra == "doc"
|
|
38
|
+
Requires-Dist: ewokssphinx; extra == "doc"
|
|
39
|
+
Requires-Dist: sphinx-copybutton; extra == "doc"
|
|
40
|
+
Dynamic: license-file
|
|
41
|
+
|
|
42
|
+
# ewokstxs
|
|
43
|
+
|
|
44
|
+
Ewoks tasks for the [txs package](https://gitlab.esrf.fr/levantin/txs).
|
|
45
|
+
|
|
46
|
+
## Resources
|
|
47
|
+
|
|
48
|
+
- [Package on pypi.org](https://pypi.org/project/ewokstxs/)
|
|
49
|
+
- [Documentation](https://ewokstxs.readthedocs.io/)
|
|
50
|
+
- [Contributing](CONTRIBUTING.md)
|
|
51
|
+
- [Changelog](CHANGELOG.md)
|
|
52
|
+
|
ewokstxs-1.0.0/README.md
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# ewokstxs
|
|
2
|
+
|
|
3
|
+
Ewoks tasks for the [txs package](https://gitlab.esrf.fr/levantin/txs).
|
|
4
|
+
|
|
5
|
+
## Resources
|
|
6
|
+
|
|
7
|
+
- [Package on pypi.org](https://pypi.org/project/ewokstxs/)
|
|
8
|
+
- [Documentation](https://ewokstxs.readthedocs.io/)
|
|
9
|
+
- [Contributing](CONTRIBUTING.md)
|
|
10
|
+
- [Changelog](CHANGELOG.md)
|
|
11
|
+
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=77.0.3"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "ewokstxs"
|
|
7
|
+
version = "1.0.0"
|
|
8
|
+
keywords = ['ewoks']
|
|
9
|
+
authors = [{name = "ESRF", email = "dau-pydev@esrf.fr"}]
|
|
10
|
+
description = "Data processing workflows for ID09"
|
|
11
|
+
readme = "README.md"
|
|
12
|
+
license = "MIT"
|
|
13
|
+
license-files = ["LICENSE.md"]
|
|
14
|
+
classifiers = [
|
|
15
|
+
"Intended Audience :: Science/Research",
|
|
16
|
+
"Programming Language :: Python :: 3",
|
|
17
|
+
]
|
|
18
|
+
requires-python = ">=3.10"
|
|
19
|
+
dependencies = [
|
|
20
|
+
"blissdata >= 2.3.0",
|
|
21
|
+
"ewoks",
|
|
22
|
+
"ewokscore",
|
|
23
|
+
"ewoksdata[online]",
|
|
24
|
+
"ewoksutils",
|
|
25
|
+
"pytxs",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
[project.urls]
|
|
29
|
+
Homepage = "https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/"
|
|
30
|
+
Documentation = "https://ewokstxs.readthedocs.io/"
|
|
31
|
+
Repository = "https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/"
|
|
32
|
+
Issues = "https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/issues"
|
|
33
|
+
Changelog = "https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/-/blob/main/CHANGELOG.md"
|
|
34
|
+
|
|
35
|
+
[project.optional-dependencies]
|
|
36
|
+
test = [
|
|
37
|
+
"pytest >=7",
|
|
38
|
+
"pyqt5",
|
|
39
|
+
]
|
|
40
|
+
dev = [
|
|
41
|
+
"ewokstxs[test]",
|
|
42
|
+
"bandit",
|
|
43
|
+
"black >=26",
|
|
44
|
+
"flake8 >=4",
|
|
45
|
+
"isort",
|
|
46
|
+
"mypy",
|
|
47
|
+
]
|
|
48
|
+
doc = [
|
|
49
|
+
"ewokstxs[test]",
|
|
50
|
+
"sphinx >=4.5",
|
|
51
|
+
"pydata_sphinx_theme",
|
|
52
|
+
"ewokssphinx",
|
|
53
|
+
"sphinx-copybutton",
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
[tool.setuptools]
|
|
57
|
+
package-dir = { "" = "src" }
|
|
58
|
+
|
|
59
|
+
[tool.setuptools.packages.find]
|
|
60
|
+
where = ["src"]
|
|
61
|
+
|
|
62
|
+
[project.entry-points."ewoks.tasks.class"]
|
|
63
|
+
"ewokstxs.tasks.*" = "ewokstxs"
|
|
64
|
+
|
|
65
|
+
[tool.bandit.assert_used]
|
|
66
|
+
skips = ["*/test_*.py"]
|
|
67
|
+
|
|
68
|
+
[tool.coverage.run]
|
|
69
|
+
omit = ['*/tests/*']
|
|
70
|
+
|
|
71
|
+
[tool.isort]
|
|
72
|
+
profile = "black"
|
|
73
|
+
|
|
74
|
+
[tool.mypy]
|
|
75
|
+
python_version = "3.10"
|
|
76
|
+
files = "src"
|
|
77
|
+
ignore_missing_imports = true
|
|
78
|
+
explicit_package_bases = true
|
ewokstxs-1.0.0/setup.cfg
ADDED
|
File without changes
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
from pprint import pprint
|
|
4
|
+
|
|
5
|
+
from ewoks import execute_graph
|
|
6
|
+
from ewoksutils.task_utils import task_inputs
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def main():
|
|
10
|
+
"""Run TxsTask with the provided arguments"""
|
|
11
|
+
parser = argparse.ArgumentParser(
|
|
12
|
+
description="Run TxsTask with the given arguments",
|
|
13
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
14
|
+
)
|
|
15
|
+
parser.add_argument(
|
|
16
|
+
"filename",
|
|
17
|
+
type=str,
|
|
18
|
+
help="Path of the HDF5 file where the scan is saved",
|
|
19
|
+
)
|
|
20
|
+
parser.add_argument(
|
|
21
|
+
"scan",
|
|
22
|
+
type=int,
|
|
23
|
+
help="Scan number",
|
|
24
|
+
)
|
|
25
|
+
parser.add_argument(
|
|
26
|
+
"output_filename",
|
|
27
|
+
type=str,
|
|
28
|
+
help="Path of the HDF5 file where to save the integrated results",
|
|
29
|
+
)
|
|
30
|
+
parser.add_argument(
|
|
31
|
+
"-e",
|
|
32
|
+
"--energy",
|
|
33
|
+
required=True,
|
|
34
|
+
type=float,
|
|
35
|
+
help="X-ray photon energy (eV)",
|
|
36
|
+
)
|
|
37
|
+
parser.add_argument(
|
|
38
|
+
"-d",
|
|
39
|
+
"--distance",
|
|
40
|
+
required=True,
|
|
41
|
+
type=float,
|
|
42
|
+
help="Sample-to-detector distance (m)",
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
# optional
|
|
46
|
+
|
|
47
|
+
parser.add_argument(
|
|
48
|
+
"-c",
|
|
49
|
+
"--center",
|
|
50
|
+
nargs=2,
|
|
51
|
+
type=float,
|
|
52
|
+
default=(960.0, 960.0),
|
|
53
|
+
help="Coordinates of the image center (hor, ver) (pixel)",
|
|
54
|
+
)
|
|
55
|
+
parser.add_argument(
|
|
56
|
+
"--detector",
|
|
57
|
+
type=str,
|
|
58
|
+
default="rayonix",
|
|
59
|
+
help="Detector name",
|
|
60
|
+
)
|
|
61
|
+
parser.add_argument(
|
|
62
|
+
"-b",
|
|
63
|
+
"--binning",
|
|
64
|
+
nargs=2,
|
|
65
|
+
type=int,
|
|
66
|
+
default=(2, 2),
|
|
67
|
+
help="Detector binning (hor, ver)",
|
|
68
|
+
)
|
|
69
|
+
parser.add_argument(
|
|
70
|
+
"-p",
|
|
71
|
+
"--pixel",
|
|
72
|
+
nargs=2,
|
|
73
|
+
type=float,
|
|
74
|
+
default=None,
|
|
75
|
+
help="Pixel size (hor, ver) (m)",
|
|
76
|
+
)
|
|
77
|
+
parser.add_argument(
|
|
78
|
+
"-v",
|
|
79
|
+
"--verbose",
|
|
80
|
+
action="count",
|
|
81
|
+
default=0,
|
|
82
|
+
help="Increase verbosity level (-v: INFO, -vv:DEBUG)",
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
inputs = {k: v for k, v in vars(parser.parse_args()).items() if k != "verbose"}
|
|
86
|
+
inputs["scan_key"] = None
|
|
87
|
+
|
|
88
|
+
print("Inputs:")
|
|
89
|
+
pprint(inputs)
|
|
90
|
+
result = execute_graph(
|
|
91
|
+
{
|
|
92
|
+
"graph": {"id": "txs"},
|
|
93
|
+
"nodes": [
|
|
94
|
+
{
|
|
95
|
+
"id": "txs_task",
|
|
96
|
+
"task_type": "class",
|
|
97
|
+
"task_identifier": "ewokstxs.tasks.txs.TxsTask",
|
|
98
|
+
},
|
|
99
|
+
],
|
|
100
|
+
},
|
|
101
|
+
inputs=task_inputs(id="txs_task", inputs=inputs),
|
|
102
|
+
)
|
|
103
|
+
print("Result:")
|
|
104
|
+
pprint(result)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
if __name__ == "__main__":
|
|
108
|
+
logging.basicConfig(level=logging.WARNING)
|
|
109
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os.path
|
|
4
|
+
from collections.abc import Generator
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import txs
|
|
9
|
+
from ewokscore import Task
|
|
10
|
+
from pyFAI.containers import Integrate1dResult
|
|
11
|
+
from silx.io.url import DataUrl
|
|
12
|
+
|
|
13
|
+
from .utils import TxsResultsWriter, detector_frames
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def txs_integration_results(
|
|
17
|
+
scan_key: str | None,
|
|
18
|
+
filename: str | None,
|
|
19
|
+
scan_number: int,
|
|
20
|
+
energy: float,
|
|
21
|
+
distance: float,
|
|
22
|
+
center: tuple[float, float],
|
|
23
|
+
detector: str,
|
|
24
|
+
binning: int | tuple[int, int] | None,
|
|
25
|
+
pixel: float | tuple[float, float] | None = None,
|
|
26
|
+
integrate1d_options: dict[str, Any] | None = None,
|
|
27
|
+
) -> Generator[tuple[float, Integrate1dResult]]:
|
|
28
|
+
"""Generator of txs's azimuthal integration results
|
|
29
|
+
|
|
30
|
+
At least one of scan_key and filename must not be None or an empty string
|
|
31
|
+
|
|
32
|
+
:param scan_key: blissdata scan unique identifier
|
|
33
|
+
:param filename: Path of the HDF5 file where the scan is saved
|
|
34
|
+
:param scan_number: Scan number
|
|
35
|
+
:param energy: X-ray photon energy (eV)
|
|
36
|
+
:param distance: Sample-to-detector distance (m)
|
|
37
|
+
:param center: Coordinates of the image center (hor, ver) (pixel)
|
|
38
|
+
:param detector: Detector name
|
|
39
|
+
:param binning: Detector binning (hor, ver). Giving one value will set both hor and ver.
|
|
40
|
+
:param pixel: Pixel size (hor, ver) (m)
|
|
41
|
+
:param integrate1d_options: Extra arguments to pass to integrate1d
|
|
42
|
+
"""
|
|
43
|
+
if not scan_key and not filename:
|
|
44
|
+
raise ValueError(
|
|
45
|
+
"Both scan_key and filename are None or an empty string. At least one must be defined"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
if pixel:
|
|
49
|
+
ai = txs.get_ai(energy, distance, center, binning=binning, pixel=pixel)
|
|
50
|
+
else:
|
|
51
|
+
ai = txs.get_ai(energy, distance, center, binning=binning, detector=detector)
|
|
52
|
+
|
|
53
|
+
if integrate1d_options is None:
|
|
54
|
+
integrate1d_options = {}
|
|
55
|
+
for timestamp, image in detector_frames(scan_key, filename, scan_number, detector):
|
|
56
|
+
yield timestamp, txs.azav.integrate1d(image, ai, **integrate1d_options)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class TxsTask( # type: ignore[call-arg]
|
|
60
|
+
Task,
|
|
61
|
+
input_names=[
|
|
62
|
+
"scan_key",
|
|
63
|
+
"filename",
|
|
64
|
+
"scan",
|
|
65
|
+
"energy",
|
|
66
|
+
"distance",
|
|
67
|
+
"center",
|
|
68
|
+
"detector",
|
|
69
|
+
"binning",
|
|
70
|
+
"output_filename",
|
|
71
|
+
],
|
|
72
|
+
optional_input_names=["pixel", "integrate1d_options"],
|
|
73
|
+
output_names=["nxdata_url"],
|
|
74
|
+
):
|
|
75
|
+
"""txs integration task which saves the results to a HDF5 file"""
|
|
76
|
+
|
|
77
|
+
def run(self):
|
|
78
|
+
scan_group_name = f"/{self.inputs.scan}.1"
|
|
79
|
+
|
|
80
|
+
with TxsResultsWriter(
|
|
81
|
+
scan_nxentry_url=DataUrl(
|
|
82
|
+
file_path=os.path.abspath(self.inputs.filename),
|
|
83
|
+
data_path=scan_group_name,
|
|
84
|
+
),
|
|
85
|
+
results_nxentry_url=DataUrl(
|
|
86
|
+
file_path=os.path.abspath(self.inputs.output_filename),
|
|
87
|
+
data_path=scan_group_name,
|
|
88
|
+
),
|
|
89
|
+
) as writer:
|
|
90
|
+
for timestamp, result in txs_integration_results(
|
|
91
|
+
self.inputs.scan_key,
|
|
92
|
+
self.inputs.filename,
|
|
93
|
+
self.inputs.scan,
|
|
94
|
+
self.inputs.energy,
|
|
95
|
+
self.inputs.distance,
|
|
96
|
+
self.inputs.center,
|
|
97
|
+
self.inputs.detector,
|
|
98
|
+
self.inputs.binning,
|
|
99
|
+
self.get_input_value("pixel", None),
|
|
100
|
+
self.get_input_value("integrate1d_options", None),
|
|
101
|
+
):
|
|
102
|
+
writer.add_result(result, timestamp)
|
|
103
|
+
|
|
104
|
+
nxdata_url = writer.nxdata_url
|
|
105
|
+
self.outputs.nxdata_url = nxdata_url.path() if nxdata_url else None
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class TxsTaskWithoutSaving( # type: ignore[call-arg]
|
|
109
|
+
Task,
|
|
110
|
+
input_names=[
|
|
111
|
+
"scan_key",
|
|
112
|
+
"filename",
|
|
113
|
+
"scan",
|
|
114
|
+
"energy",
|
|
115
|
+
"distance",
|
|
116
|
+
"center",
|
|
117
|
+
"detector",
|
|
118
|
+
"binning",
|
|
119
|
+
],
|
|
120
|
+
optional_input_names=["pixel", "integrate1d_options"],
|
|
121
|
+
output_names=[
|
|
122
|
+
"radial",
|
|
123
|
+
"radial_units",
|
|
124
|
+
"intensity",
|
|
125
|
+
"intensity_error",
|
|
126
|
+
],
|
|
127
|
+
):
|
|
128
|
+
"""txs integration task which returns the results"""
|
|
129
|
+
|
|
130
|
+
def run(self):
|
|
131
|
+
results = []
|
|
132
|
+
for _, result in txs_integration_results(
|
|
133
|
+
self.inputs.scan_key,
|
|
134
|
+
self.inputs.filename,
|
|
135
|
+
self.inputs.scan,
|
|
136
|
+
self.inputs.energy,
|
|
137
|
+
self.inputs.distance,
|
|
138
|
+
self.inputs.center,
|
|
139
|
+
self.inputs.detector,
|
|
140
|
+
self.inputs.binning,
|
|
141
|
+
self.get_input_value("pixel", None),
|
|
142
|
+
self.get_input_value("integrate1d_options", None),
|
|
143
|
+
):
|
|
144
|
+
results.append(result)
|
|
145
|
+
|
|
146
|
+
if not results:
|
|
147
|
+
raise RuntimeError("No data was processed")
|
|
148
|
+
|
|
149
|
+
self.outputs.radial = results[0].radial
|
|
150
|
+
self.outputs.radial_units = results[0].unit.name
|
|
151
|
+
self.outputs.intensity = np.array([res.intensity for res in results])
|
|
152
|
+
self.outputs.intensity_error = np.array([res.sigma for res in results])
|
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import os.path
|
|
5
|
+
from collections.abc import Generator
|
|
6
|
+
|
|
7
|
+
import h5py
|
|
8
|
+
import numpy as np
|
|
9
|
+
import txs
|
|
10
|
+
from blissdata.beacon.data import BeaconData
|
|
11
|
+
from blissdata.exceptions import ScanNotFoundError, ScanValidationError
|
|
12
|
+
from blissdata.redis_engine.store import DataStore
|
|
13
|
+
from ewoksdata.data import nexus
|
|
14
|
+
from ewoksdata.data.bliss import iter_bliss_scan_data, iter_bliss_scan_data_from_memory
|
|
15
|
+
from pyFAI.containers import Integrate1dResult
|
|
16
|
+
from silx.io import h5py_utils
|
|
17
|
+
from silx.io.dictdump import dicttonx
|
|
18
|
+
from silx.io.url import DataUrl
|
|
19
|
+
|
|
20
|
+
_logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
RETRY_OPTIONS = {"retry_timeout": 2 * 60, "retry_period": 1}
|
|
24
|
+
"""Retry options used both for reading and writing"""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@h5py_utils.retry()
|
|
28
|
+
def _open_h5_file(filename: str, mode: str = "r"):
|
|
29
|
+
"""Open a HDF5 file and retries if it fails"""
|
|
30
|
+
return h5py.File(filename, mode)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TxsResultsWriter:
|
|
34
|
+
"""Writes txs results to a HDF5 group.
|
|
35
|
+
|
|
36
|
+
:param scan_nxentry_url: URL of the HDF5 group of the scan
|
|
37
|
+
:param results_nxentry_url: URL of the HDF5 group where to write the results
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
FLUSH_PERIOD = 3.0
|
|
41
|
+
"""Period at which to write to the HDF5 file (seconds)"""
|
|
42
|
+
|
|
43
|
+
def __init__(
|
|
44
|
+
self,
|
|
45
|
+
scan_nxentry_url: DataUrl,
|
|
46
|
+
results_nxentry_url: DataUrl,
|
|
47
|
+
):
|
|
48
|
+
self._scan_nxentry_url = scan_nxentry_url
|
|
49
|
+
self._results_nxentry_url = results_nxentry_url
|
|
50
|
+
|
|
51
|
+
self._results_buffer: list[Integrate1dResult] = []
|
|
52
|
+
self._results_timestamp: list[float] = []
|
|
53
|
+
self._chunk_length: int | None = None
|
|
54
|
+
self._nxdata_url: DataUrl | None = None
|
|
55
|
+
|
|
56
|
+
def __del__(self):
|
|
57
|
+
self.flush(all=True)
|
|
58
|
+
|
|
59
|
+
def __enter__(self):
|
|
60
|
+
return self
|
|
61
|
+
|
|
62
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
63
|
+
self.flush(all=True)
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def nxdata_url(self) -> DataUrl | None:
|
|
67
|
+
"""URL of the HDF5 group storing the NXdata, None if not yet created"""
|
|
68
|
+
return self._nxdata_url
|
|
69
|
+
|
|
70
|
+
def add_result(self, result: Integrate1dResult, timestamp: float):
|
|
71
|
+
"""Append data from txs result to the HDF5 datasets"""
|
|
72
|
+
self._results_buffer.append(result)
|
|
73
|
+
self._results_timestamp.append(timestamp)
|
|
74
|
+
|
|
75
|
+
if timestamp - self._results_timestamp[0] < self.FLUSH_PERIOD:
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
if (
|
|
79
|
+
self._chunk_length is None
|
|
80
|
+
or len(self._results_buffer) >= self._chunk_length
|
|
81
|
+
):
|
|
82
|
+
self.flush()
|
|
83
|
+
|
|
84
|
+
def _create_result_nxentry(self, result: Integrate1dResult) -> DataUrl:
|
|
85
|
+
# Create the output result group and create folders and HDF5 file if needed
|
|
86
|
+
nexus.create_url(self._results_nxentry_url, **RETRY_OPTIONS)
|
|
87
|
+
result_filename = self._results_nxentry_url.file_path()
|
|
88
|
+
|
|
89
|
+
# Use a relative path for the external links
|
|
90
|
+
scan_filename = os.path.relpath(
|
|
91
|
+
self._scan_nxentry_url.file_path(),
|
|
92
|
+
os.path.dirname(result_filename),
|
|
93
|
+
)
|
|
94
|
+
scan_group_name = self._scan_nxentry_url.data_path()
|
|
95
|
+
|
|
96
|
+
radial_name, radial_unit = result.unit.name.split("_")
|
|
97
|
+
|
|
98
|
+
with _open_h5_file(result_filename, mode="a", **RETRY_OPTIONS) as h5file:
|
|
99
|
+
dicttonx(
|
|
100
|
+
{
|
|
101
|
+
# Add links to some of the scan entries
|
|
102
|
+
"instrument": h5py.ExternalLink(
|
|
103
|
+
scan_filename, f"{scan_group_name}/instrument"
|
|
104
|
+
),
|
|
105
|
+
"measurement": h5py.ExternalLink(
|
|
106
|
+
scan_filename, f"{scan_group_name}/measurement"
|
|
107
|
+
),
|
|
108
|
+
"sample": h5py.ExternalLink(
|
|
109
|
+
scan_filename, f"{scan_group_name}/sample"
|
|
110
|
+
),
|
|
111
|
+
"title": h5py.ExternalLink(
|
|
112
|
+
scan_filename, f"{scan_group_name}/title"
|
|
113
|
+
),
|
|
114
|
+
# NXProcess group storing txs results
|
|
115
|
+
"@default": "integrate",
|
|
116
|
+
"integrate": {
|
|
117
|
+
"@NX_class": "NXprocess",
|
|
118
|
+
# NXData group storing integrated patterns
|
|
119
|
+
"@default": "integrated",
|
|
120
|
+
"integrated": {
|
|
121
|
+
"@NX_class": "NXdata",
|
|
122
|
+
"@signal": "intensity",
|
|
123
|
+
"@axes": [".", radial_name],
|
|
124
|
+
radial_name: result.radial,
|
|
125
|
+
f"{radial_name}@units": radial_unit,
|
|
126
|
+
},
|
|
127
|
+
"program": "txs",
|
|
128
|
+
"version": txs.__version__,
|
|
129
|
+
},
|
|
130
|
+
},
|
|
131
|
+
h5file,
|
|
132
|
+
self._results_nxentry_url.data_path(),
|
|
133
|
+
update_mode="modify",
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
nxdata_path = (
|
|
137
|
+
f"{self._results_nxentry_url.data_path()}/integrate/integrated"
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# Create intensity and errors datasets
|
|
141
|
+
npt_rad = len(result.radial)
|
|
142
|
+
|
|
143
|
+
nxdata_group = h5file[nxdata_path]
|
|
144
|
+
|
|
145
|
+
intensity_dataset = nxdata_group.create_dataset(
|
|
146
|
+
"intensity",
|
|
147
|
+
dtype=result.intensity.dtype,
|
|
148
|
+
shape=(0, npt_rad),
|
|
149
|
+
chunks=(self._chunk_length, npt_rad),
|
|
150
|
+
maxshape=(None, npt_rad),
|
|
151
|
+
compression="gzip",
|
|
152
|
+
)
|
|
153
|
+
intensity_dataset.attrs["interpretation"] = "spectrum"
|
|
154
|
+
|
|
155
|
+
if result.sigma is not None:
|
|
156
|
+
nxdata_group.create_dataset(
|
|
157
|
+
"intensity_errors",
|
|
158
|
+
dtype=result.sigma.dtype,
|
|
159
|
+
shape=(0, npt_rad),
|
|
160
|
+
chunks=(self._chunk_length, npt_rad),
|
|
161
|
+
maxshape=(None, npt_rad),
|
|
162
|
+
compression="gzip",
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
return DataUrl(file_path=result_filename, data_path=nxdata_path)
|
|
166
|
+
|
|
167
|
+
def _guess_chunk_length(self) -> int:
|
|
168
|
+
if len(self._results_timestamp) > 1:
|
|
169
|
+
# expects regular framerate
|
|
170
|
+
average_frame_rate = 1.0 / np.mean(np.diff(self._results_timestamp))
|
|
171
|
+
chunk_length = int(average_frame_rate * self.FLUSH_PERIOD)
|
|
172
|
+
else:
|
|
173
|
+
chunk_length = len(self._results_timestamp)
|
|
174
|
+
return max(min(chunk_length, 32), 1)
|
|
175
|
+
|
|
176
|
+
def flush(self, all: bool = False) -> None:
|
|
177
|
+
if len(self._results_buffer) == 0:
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
if self._chunk_length is None:
|
|
181
|
+
self._chunk_length = self._guess_chunk_length()
|
|
182
|
+
|
|
183
|
+
if all:
|
|
184
|
+
write_length = len(self._results_buffer)
|
|
185
|
+
else:
|
|
186
|
+
write_length = (
|
|
187
|
+
len(self._results_buffer) // self._chunk_length * self._chunk_length
|
|
188
|
+
)
|
|
189
|
+
if write_length == 0:
|
|
190
|
+
return
|
|
191
|
+
|
|
192
|
+
if self._nxdata_url is None:
|
|
193
|
+
# Creates hdf5 entry lazily from first received result
|
|
194
|
+
self._nxdata_url = self._create_result_nxentry(self._results_buffer[0])
|
|
195
|
+
|
|
196
|
+
with _open_h5_file(
|
|
197
|
+
self._nxdata_url.file_path(), mode="a", **RETRY_OPTIONS
|
|
198
|
+
) as h5file:
|
|
199
|
+
nxdata_group = h5file[self._nxdata_url.data_path()]
|
|
200
|
+
intensity_dataset = nxdata_group["intensity"]
|
|
201
|
+
|
|
202
|
+
write_offset = len(intensity_dataset)
|
|
203
|
+
dataset_length = write_offset + write_length
|
|
204
|
+
|
|
205
|
+
intensity_dataset.resize(dataset_length, axis=0)
|
|
206
|
+
intensity_dataset[write_offset:dataset_length] = [
|
|
207
|
+
result.intensity for result in self._results_buffer[:write_length]
|
|
208
|
+
]
|
|
209
|
+
|
|
210
|
+
intensity_errors_group = nxdata_group.get("intensity_errors")
|
|
211
|
+
if intensity_errors_group is not None:
|
|
212
|
+
intensity_errors_group.resize(dataset_length, axis=0)
|
|
213
|
+
intensity_errors_group[write_offset:dataset_length] = [
|
|
214
|
+
result.sigma for result in self._results_buffer[:write_length]
|
|
215
|
+
]
|
|
216
|
+
|
|
217
|
+
self._results_buffer = self._results_buffer[write_length:]
|
|
218
|
+
|
|
219
|
+
if all:
|
|
220
|
+
nxdata_group["points"] = np.arange(dataset_length)
|
|
221
|
+
|
|
222
|
+
axes = nxdata_group.attrs["axes"]
|
|
223
|
+
axes[0] = "points"
|
|
224
|
+
nxdata_group.attrs["axes"] = axes
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def detector_frames(
|
|
228
|
+
scan_key: str | None,
|
|
229
|
+
filename: str | None,
|
|
230
|
+
scan_number: int,
|
|
231
|
+
detector: str,
|
|
232
|
+
) -> Generator[tuple[float, np.ndarray]]:
|
|
233
|
+
"""Generator of detector frames from a scan retrieved either through redis or from file
|
|
234
|
+
|
|
235
|
+
At least one of scan_key and filename must not be None or empty.
|
|
236
|
+
|
|
237
|
+
:scan_key: blissdata key of the scan
|
|
238
|
+
:filename: Path of the HDF5 raw data file to read
|
|
239
|
+
:scan_entry_name: HDF5 path in the file of the group containing the scan data
|
|
240
|
+
:detector: name of the detector
|
|
241
|
+
"""
|
|
242
|
+
if not scan_key and not filename:
|
|
243
|
+
raise ValueError("At least one of scan_key and filename must not None or empty")
|
|
244
|
+
|
|
245
|
+
if scan_key: # Try using redis first
|
|
246
|
+
# TODO rework
|
|
247
|
+
try:
|
|
248
|
+
data_store = DataStore(BeaconData().get_redis_data_db())
|
|
249
|
+
except Exception:
|
|
250
|
+
_logger.info("Cannot connect to beacon host or redis")
|
|
251
|
+
_logger.debug("Backtrace", exc_info=True)
|
|
252
|
+
else:
|
|
253
|
+
try:
|
|
254
|
+
_ = data_store.load_scan(scan_key)
|
|
255
|
+
except (ScanNotFoundError, ScanValidationError):
|
|
256
|
+
_logger.info(f"Cannot retrieve scan from redis: {scan_key}")
|
|
257
|
+
_logger.debug("Backtrace", exc_info=True)
|
|
258
|
+
else:
|
|
259
|
+
_logger.info("Retrieve frames through redis")
|
|
260
|
+
for data in iter_bliss_scan_data_from_memory(
|
|
261
|
+
scan_key,
|
|
262
|
+
lima_names=[detector],
|
|
263
|
+
counter_names=["elapsed_time"],
|
|
264
|
+
**RETRY_OPTIONS,
|
|
265
|
+
):
|
|
266
|
+
yield data["elapsed_time"], data[detector]
|
|
267
|
+
return
|
|
268
|
+
|
|
269
|
+
if not filename:
|
|
270
|
+
raise RuntimeError("Cannot connect to redis or retrieve the scan")
|
|
271
|
+
|
|
272
|
+
_logger.info("Read frames from file")
|
|
273
|
+
for data in iter_bliss_scan_data(
|
|
274
|
+
filename,
|
|
275
|
+
scan_number,
|
|
276
|
+
lima_names=[detector],
|
|
277
|
+
# One counter is needed here to make sure to iterate over the right number of frames
|
|
278
|
+
counter_names=["elapsed_time"],
|
|
279
|
+
**RETRY_OPTIONS,
|
|
280
|
+
):
|
|
281
|
+
_logger.info(f"Read image for {data['elapsed_time']}")
|
|
282
|
+
yield data["elapsed_time"], data[detector]
|
|
File without changes
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
import h5py
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pytest
|
|
6
|
+
from ewoks import execute_graph
|
|
7
|
+
from silx.io.dictdump import dicttonx
|
|
8
|
+
from silx.io.url import DataUrl
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.fixture
|
|
12
|
+
def h5_scan_file(tmp_path):
|
|
13
|
+
"""Create a temporary HDF5 file with a minimal structure for testing."""
|
|
14
|
+
|
|
15
|
+
file_path = tmp_path / "dataset.h5"
|
|
16
|
+
|
|
17
|
+
h5_file_content = {
|
|
18
|
+
"@NX_class": "NXentry",
|
|
19
|
+
"start_time": "2023-07-04T23:10:56.747201+02:00",
|
|
20
|
+
"end_time": "2023-07-04T23:13:55.143782+02:00",
|
|
21
|
+
"title": "test_scan_command",
|
|
22
|
+
"instrument": {
|
|
23
|
+
"@NX_class": "NXinstrument",
|
|
24
|
+
"rayonix": {
|
|
25
|
+
"@NX_class": "NXdetector",
|
|
26
|
+
">data": "image",
|
|
27
|
+
"image": np.random.randint(
|
|
28
|
+
low=10, high=1000, size=(3, 1920, 1920), dtype=np.uint16
|
|
29
|
+
),
|
|
30
|
+
},
|
|
31
|
+
},
|
|
32
|
+
"measurement": {
|
|
33
|
+
"@NX_class": "NXcollection",
|
|
34
|
+
"elapsed_time": [0.0, 1.0, 2.0],
|
|
35
|
+
">rayonix": "../instrument/rayonix/data",
|
|
36
|
+
},
|
|
37
|
+
"sample": {
|
|
38
|
+
"@NX_class": "NXsample",
|
|
39
|
+
"name": "Test",
|
|
40
|
+
},
|
|
41
|
+
}
|
|
42
|
+
with h5py.File(file_path, "w") as h5_file:
|
|
43
|
+
dicttonx(h5_file_content, h5_file, h5path="1.1")
|
|
44
|
+
|
|
45
|
+
return file_path
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def test_task(tmp_path, h5_scan_file):
|
|
49
|
+
workflow = {
|
|
50
|
+
"graph": {"id": "txs"},
|
|
51
|
+
"nodes": [
|
|
52
|
+
{
|
|
53
|
+
"id": "txs",
|
|
54
|
+
"task_type": "class",
|
|
55
|
+
"task_identifier": "ewokstxs.tasks.txs.TxsTask",
|
|
56
|
+
},
|
|
57
|
+
],
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
output_filename = str(tmp_path.absolute() / "output.h5")
|
|
61
|
+
scan_number = 1
|
|
62
|
+
|
|
63
|
+
parameters = {
|
|
64
|
+
"scan_key": None,
|
|
65
|
+
"filename": str(h5_scan_file),
|
|
66
|
+
"scan": scan_number,
|
|
67
|
+
"energy": 18000,
|
|
68
|
+
"distance": 0.3,
|
|
69
|
+
"center": (960.0, 960.0),
|
|
70
|
+
"detector": "rayonix",
|
|
71
|
+
"binning": (2, 2),
|
|
72
|
+
"output_filename": output_filename,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
inputs = [
|
|
76
|
+
{"id": "txs", "name": name, "value": value}
|
|
77
|
+
for name, value in parameters.items()
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
logging.debug(f"worflow parameters: {parameters}")
|
|
81
|
+
result = execute_graph(workflow, inputs=inputs)
|
|
82
|
+
logging.debug(f"workflow result: {result}")
|
|
83
|
+
assert DataUrl(result["nxdata_url"]) == DataUrl(
|
|
84
|
+
file_path=output_filename, data_path=f"/{scan_number}.1/integrate/integrated"
|
|
85
|
+
)
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ewokstxs
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Data processing workflows for ID09
|
|
5
|
+
Author-email: ESRF <dau-pydev@esrf.fr>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/
|
|
8
|
+
Project-URL: Documentation, https://ewokstxs.readthedocs.io/
|
|
9
|
+
Project-URL: Repository, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/
|
|
10
|
+
Project-URL: Issues, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/issues
|
|
11
|
+
Project-URL: Changelog, https://gitlab.esrf.fr/workflow/ewoksapps/ewokstxs/-/blob/main/CHANGELOG.md
|
|
12
|
+
Keywords: ewoks
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Requires-Python: >=3.10
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
License-File: LICENSE.md
|
|
18
|
+
Requires-Dist: blissdata>=2.3.0
|
|
19
|
+
Requires-Dist: ewoks
|
|
20
|
+
Requires-Dist: ewokscore
|
|
21
|
+
Requires-Dist: ewoksdata[online]
|
|
22
|
+
Requires-Dist: ewoksutils
|
|
23
|
+
Requires-Dist: pytxs
|
|
24
|
+
Provides-Extra: test
|
|
25
|
+
Requires-Dist: pytest>=7; extra == "test"
|
|
26
|
+
Requires-Dist: pyqt5; extra == "test"
|
|
27
|
+
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: ewokstxs[test]; extra == "dev"
|
|
29
|
+
Requires-Dist: bandit; extra == "dev"
|
|
30
|
+
Requires-Dist: black>=26; extra == "dev"
|
|
31
|
+
Requires-Dist: flake8>=4; extra == "dev"
|
|
32
|
+
Requires-Dist: isort; extra == "dev"
|
|
33
|
+
Requires-Dist: mypy; extra == "dev"
|
|
34
|
+
Provides-Extra: doc
|
|
35
|
+
Requires-Dist: ewokstxs[test]; extra == "doc"
|
|
36
|
+
Requires-Dist: sphinx>=4.5; extra == "doc"
|
|
37
|
+
Requires-Dist: pydata_sphinx_theme; extra == "doc"
|
|
38
|
+
Requires-Dist: ewokssphinx; extra == "doc"
|
|
39
|
+
Requires-Dist: sphinx-copybutton; extra == "doc"
|
|
40
|
+
Dynamic: license-file
|
|
41
|
+
|
|
42
|
+
# ewokstxs
|
|
43
|
+
|
|
44
|
+
Ewoks tasks for the [txs package](https://gitlab.esrf.fr/levantin/txs).
|
|
45
|
+
|
|
46
|
+
## Resources
|
|
47
|
+
|
|
48
|
+
- [Package on pypi.org](https://pypi.org/project/ewokstxs/)
|
|
49
|
+
- [Documentation](https://ewokstxs.readthedocs.io/)
|
|
50
|
+
- [Contributing](CONTRIBUTING.md)
|
|
51
|
+
- [Changelog](CHANGELOG.md)
|
|
52
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
LICENSE.md
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
src/ewokstxs/__init__.py
|
|
5
|
+
src/ewokstxs/__main__.py
|
|
6
|
+
src/ewokstxs.egg-info/PKG-INFO
|
|
7
|
+
src/ewokstxs.egg-info/SOURCES.txt
|
|
8
|
+
src/ewokstxs.egg-info/dependency_links.txt
|
|
9
|
+
src/ewokstxs.egg-info/entry_points.txt
|
|
10
|
+
src/ewokstxs.egg-info/requires.txt
|
|
11
|
+
src/ewokstxs.egg-info/top_level.txt
|
|
12
|
+
src/ewokstxs/tasks/__init__.py
|
|
13
|
+
src/ewokstxs/tasks/txs.py
|
|
14
|
+
src/ewokstxs/tasks/utils.py
|
|
15
|
+
src/ewokstxs/tests/__init__.py
|
|
16
|
+
src/ewokstxs/tests/test_TxsTask.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
blissdata>=2.3.0
|
|
2
|
+
ewoks
|
|
3
|
+
ewokscore
|
|
4
|
+
ewoksdata[online]
|
|
5
|
+
ewoksutils
|
|
6
|
+
pytxs
|
|
7
|
+
|
|
8
|
+
[dev]
|
|
9
|
+
ewokstxs[test]
|
|
10
|
+
bandit
|
|
11
|
+
black>=26
|
|
12
|
+
flake8>=4
|
|
13
|
+
isort
|
|
14
|
+
mypy
|
|
15
|
+
|
|
16
|
+
[doc]
|
|
17
|
+
ewokstxs[test]
|
|
18
|
+
sphinx>=4.5
|
|
19
|
+
pydata_sphinx_theme
|
|
20
|
+
ewokssphinx
|
|
21
|
+
sphinx-copybutton
|
|
22
|
+
|
|
23
|
+
[test]
|
|
24
|
+
pytest>=7
|
|
25
|
+
pyqt5
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ewokstxs
|