dkist-processing-ops 1.6.15__tar.gz → 1.6.17__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

Files changed (27) hide show
  1. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/PKG-INFO +1 -1
  2. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/_version.py +2 -2
  3. dkist_processing_ops-1.6.17/dkist_processing_ops/tasks/read_memory_leak.py +103 -0
  4. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops.egg-info/PKG-INFO +1 -1
  5. dkist_processing_ops-1.6.15/dkist_processing_ops/tasks/read_memory_leak.py +0 -92
  6. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/.gitignore +0 -0
  7. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/.pre-commit-config.yaml +0 -0
  8. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/LICENSE.rst +0 -0
  9. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/MANIFEST.in +0 -0
  10. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/README.rst +0 -0
  11. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/bitbucket-pipelines.yml +0 -0
  12. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/__init__.py +0 -0
  13. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/dags/scale.py +0 -0
  14. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/tasks/__init__.py +0 -0
  15. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/tasks/wait.py +0 -0
  16. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/tests/__init__.py +0 -0
  17. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/tests/test_workflows.py +0 -0
  18. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/workflows/__init__.py +0 -0
  19. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/workflows/memory_leak.py +0 -0
  20. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops/workflows/smoke.py +0 -0
  21. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops.egg-info/SOURCES.txt +0 -0
  22. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
  23. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
  24. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops.egg-info/requires.txt +0 -0
  25. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/dkist_processing_ops.egg-info/top_level.txt +0 -0
  26. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/pyproject.toml +0 -0
  27. {dkist_processing_ops-1.6.15 → dkist_processing_ops-1.6.17}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.15
3
+ Version: 1.6.17
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.6.15'
16
- __version_tuple__ = version_tuple = (1, 6, 15)
15
+ __version__ = version = '1.6.17'
16
+ __version_tuple__ = version_tuple = (1, 6, 17)
@@ -0,0 +1,103 @@
1
+ import gc
2
+ import logging
3
+ from abc import ABC
4
+ from pathlib import Path
5
+
6
+ import numpy as np
7
+ from astropy.io import fits
8
+ from dkist_processing_common.codecs.fits import fits_hdu_decoder
9
+ from dkist_processing_common.codecs.path import path_decoder
10
+ from dkist_processing_common.models.tags import Tag
11
+ from dkist_processing_common.tasks import WorkflowTaskBase
12
+
13
+
14
+ # def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
15
+ # data = np.random.rand(4096, 4096)
16
+ # hdu = fits.CompImageHDU(data)
17
+ # return hdu
18
+
19
+
20
+ def fits_bytes_decoder(path: Path) -> bytes:
21
+ with open(path, "rb") as f:
22
+ return f.read()
23
+
24
+
25
+ # def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
26
+ # hdul = fits.open(path)
27
+ # hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
28
+ # hdul.close()
29
+ # del hdul
30
+ # gc.collect()
31
+ # return hdu
32
+
33
+
34
+ class FitsDataRead(WorkflowTaskBase, ABC):
35
+ @property
36
+ def run_type(self):
37
+ return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
38
+
39
+ def run(self) -> None:
40
+ if self.run_type == "bytes_read":
41
+ bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
42
+ for i, byte_object in enumerate(bytes_objects):
43
+ pass
44
+
45
+ if self.run_type == "bytes_task":
46
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
47
+ for filepath in filepaths:
48
+ with open(filepath, "rb") as f:
49
+ byte_object = f.read()
50
+
51
+ if self.run_type == "file_read":
52
+ hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
53
+ for hdu in hdus:
54
+ h = hdu.header
55
+ d = hdu.data
56
+
57
+ if self.run_type == "file_task":
58
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
59
+ for filepath in filepaths:
60
+ hdu = fits.open(filepath)[1]
61
+ h = hdu.header
62
+ d = hdu.data
63
+
64
+ # if self.run_type == "garbage_collect_read":
65
+ # hdus = self.read(
66
+ # tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
67
+ # )
68
+ # for i, hdu in enumerate(hdus):
69
+ # h = hdu.header
70
+ # d = hdu.data
71
+ # if i % 50 == 0:
72
+ # cur, peak = tracemalloc.get_traced_memory()
73
+ # logging.info(
74
+ # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
75
+ # )
76
+ #
77
+ # if self.run_type == "garbage_collect_task":
78
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
79
+ # for i, filepath in enumerate(filepaths):
80
+ # hdu = fits_garbage_collect_hdu_decoder(filepath)
81
+ # h = hdu.header
82
+ # d = hdu.data
83
+ # if i % 50 == 0:
84
+ # cur, peak = tracemalloc.get_traced_memory()
85
+ # logging.info(
86
+ # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
87
+ # )
88
+ #
89
+ # if self.run_type == "generated_read":
90
+ # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
91
+ # for hdu in hdus:
92
+ # h = hdu.header
93
+ # d = hdu.data
94
+ #
95
+
96
+ #
97
+ # if self.run_type == "generated_task":
98
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
99
+ # for filepath in filepaths:
100
+ # data = np.random.rand(4096, 4096)
101
+ # hdu = fits.CompImageHDU(data)
102
+ # h = hdu.header
103
+ # d = hdu.data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.15
3
+ Version: 1.6.17
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -1,92 +0,0 @@
1
- import gc
2
- import logging
3
- import tracemalloc
4
- from abc import ABC
5
- from pathlib import Path
6
-
7
- import numpy as np
8
- from astropy.io import fits
9
- from dkist_processing_common.codecs.fits import fits_hdu_decoder
10
- from dkist_processing_common.codecs.path import path_decoder
11
- from dkist_processing_common.models.tags import Tag
12
- from dkist_processing_common.tasks import WorkflowTaskBase
13
-
14
-
15
- def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
16
- data = np.random.rand(4096, 4096)
17
- hdu = fits.CompImageHDU(data)
18
- return hdu
19
-
20
-
21
- def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
22
- hdul = fits.open(path)
23
- hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
24
- hdul.close()
25
- del hdul
26
- gc.collect()
27
- return hdu
28
-
29
-
30
- class FitsDataRead(WorkflowTaskBase, ABC):
31
- @property
32
- def run_type(self):
33
- return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
34
-
35
- def run(self) -> None:
36
- tracemalloc.start()
37
- if self.run_type == "garbage_collect_read":
38
- hdus = self.read(
39
- tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
40
- )
41
- for i, hdu in enumerate(hdus):
42
- h = hdu.header
43
- d = hdu.data
44
- if i % 50 == 0:
45
- cur, peak = tracemalloc.get_traced_memory()
46
- logging.info(
47
- f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
48
- )
49
-
50
- if self.run_type == "garbage_collect_task":
51
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
52
- for i, filepath in enumerate(filepaths):
53
- hdu = fits_garbage_collect_hdu_decoder(filepath)
54
- h = hdu.header
55
- d = hdu.data
56
- if i % 50 == 0:
57
- cur, peak = tracemalloc.get_traced_memory()
58
- logging.info(
59
- f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
60
- )
61
-
62
- if self.run_type == "file_read":
63
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
64
- for i, hdu in enumerate(hdus):
65
- h = hdu.header
66
- d = hdu.data
67
- if i % 50 == 0:
68
- cur, peak = tracemalloc.get_traced_memory()
69
- logging.info(
70
- f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
71
- )
72
- #
73
- # if self.run_type == "generated_read":
74
- # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
75
- # for hdu in hdus:
76
- # h = hdu.header
77
- # d = hdu.data
78
- #
79
- # if self.run_type == "file_task":
80
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
81
- # for filepath in filepaths:
82
- # hdu = fits.open(filepath)[1]
83
- # h = hdu.header
84
- # d = hdu.data
85
- #
86
- # if self.run_type == "generated_task":
87
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
88
- # for filepath in filepaths:
89
- # data = np.random.rand(4096, 4096)
90
- # hdu = fits.CompImageHDU(data)
91
- # h = hdu.header
92
- # d = hdu.data