dkist-processing-ops 1.6.13__tar.gz → 1.6.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

Files changed (27) hide show
  1. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/PKG-INFO +1 -1
  2. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/_version.py +2 -2
  3. dkist_processing_ops-1.6.15/dkist_processing_ops/tasks/read_memory_leak.py +92 -0
  4. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops.egg-info/PKG-INFO +1 -1
  5. dkist_processing_ops-1.6.13/dkist_processing_ops/tasks/read_memory_leak.py +0 -49
  6. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/.gitignore +0 -0
  7. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/.pre-commit-config.yaml +0 -0
  8. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/LICENSE.rst +0 -0
  9. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/MANIFEST.in +0 -0
  10. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/README.rst +0 -0
  11. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/bitbucket-pipelines.yml +0 -0
  12. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/__init__.py +0 -0
  13. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/dags/scale.py +0 -0
  14. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/tasks/__init__.py +0 -0
  15. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/tasks/wait.py +0 -0
  16. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/tests/__init__.py +0 -0
  17. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/tests/test_workflows.py +0 -0
  18. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/workflows/__init__.py +0 -0
  19. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/workflows/memory_leak.py +0 -0
  20. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops/workflows/smoke.py +0 -0
  21. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops.egg-info/SOURCES.txt +0 -0
  22. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
  23. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
  24. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops.egg-info/requires.txt +0 -0
  25. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/dkist_processing_ops.egg-info/top_level.txt +0 -0
  26. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/pyproject.toml +0 -0
  27. {dkist_processing_ops-1.6.13 → dkist_processing_ops-1.6.15}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.13
3
+ Version: 1.6.15
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.6.13'
16
- __version_tuple__ = version_tuple = (1, 6, 13)
15
+ __version__ = version = '1.6.15'
16
+ __version_tuple__ = version_tuple = (1, 6, 15)
@@ -0,0 +1,92 @@
1
+ import gc
2
+ import logging
3
+ import tracemalloc
4
+ from abc import ABC
5
+ from pathlib import Path
6
+
7
+ import numpy as np
8
+ from astropy.io import fits
9
+ from dkist_processing_common.codecs.fits import fits_hdu_decoder
10
+ from dkist_processing_common.codecs.path import path_decoder
11
+ from dkist_processing_common.models.tags import Tag
12
+ from dkist_processing_common.tasks import WorkflowTaskBase
13
+
14
+
15
+ def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
16
+ data = np.random.rand(4096, 4096)
17
+ hdu = fits.CompImageHDU(data)
18
+ return hdu
19
+
20
+
21
+ def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
22
+ hdul = fits.open(path)
23
+ hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
24
+ hdul.close()
25
+ del hdul
26
+ gc.collect()
27
+ return hdu
28
+
29
+
30
+ class FitsDataRead(WorkflowTaskBase, ABC):
31
+ @property
32
+ def run_type(self):
33
+ return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
34
+
35
+ def run(self) -> None:
36
+ tracemalloc.start()
37
+ if self.run_type == "garbage_collect_read":
38
+ hdus = self.read(
39
+ tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
40
+ )
41
+ for i, hdu in enumerate(hdus):
42
+ h = hdu.header
43
+ d = hdu.data
44
+ if i % 50 == 0:
45
+ cur, peak = tracemalloc.get_traced_memory()
46
+ logging.info(
47
+ f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
48
+ )
49
+
50
+ if self.run_type == "garbage_collect_task":
51
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
52
+ for i, filepath in enumerate(filepaths):
53
+ hdu = fits_garbage_collect_hdu_decoder(filepath)
54
+ h = hdu.header
55
+ d = hdu.data
56
+ if i % 50 == 0:
57
+ cur, peak = tracemalloc.get_traced_memory()
58
+ logging.info(
59
+ f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
60
+ )
61
+
62
+ if self.run_type == "file_read":
63
+ hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
64
+ for i, hdu in enumerate(hdus):
65
+ h = hdu.header
66
+ d = hdu.data
67
+ if i % 50 == 0:
68
+ cur, peak = tracemalloc.get_traced_memory()
69
+ logging.info(
70
+ f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
71
+ )
72
+ #
73
+ # if self.run_type == "generated_read":
74
+ # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
75
+ # for hdu in hdus:
76
+ # h = hdu.header
77
+ # d = hdu.data
78
+ #
79
+ # if self.run_type == "file_task":
80
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
81
+ # for filepath in filepaths:
82
+ # hdu = fits.open(filepath)[1]
83
+ # h = hdu.header
84
+ # d = hdu.data
85
+ #
86
+ # if self.run_type == "generated_task":
87
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
88
+ # for filepath in filepaths:
89
+ # data = np.random.rand(4096, 4096)
90
+ # hdu = fits.CompImageHDU(data)
91
+ # h = hdu.header
92
+ # d = hdu.data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.13
3
+ Version: 1.6.15
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -1,49 +0,0 @@
1
- from abc import ABC
2
- from pathlib import Path
3
-
4
- import numpy as np
5
- from astropy.io import fits
6
- from dkist_processing_common.codecs.fits import fits_hdu_decoder
7
- from dkist_processing_common.codecs.path import path_decoder
8
- from dkist_processing_common.models.tags import Tag
9
- from dkist_processing_common.tasks import WorkflowTaskBase
10
-
11
-
12
- def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
13
- data = np.random.rand(4096, 4096)
14
- hdu = fits.CompImageHDU(data)
15
- return hdu
16
-
17
-
18
- class FitsDataRead(WorkflowTaskBase, ABC):
19
- @property
20
- def run_type(self):
21
- return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
22
-
23
- def run(self) -> None:
24
- if self.run_type == "file_read":
25
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
26
- for hdu in hdus:
27
- h = hdu.header
28
- d = hdu.data
29
-
30
- if self.run_type == "generated_read":
31
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
32
- for hdu in hdus:
33
- h = hdu.header
34
- d = hdu.data
35
-
36
- if self.run_type == "file_task":
37
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
38
- for filepath in filepaths:
39
- hdu = fits.open(filepath)[1]
40
- h = hdu.header
41
- d = hdu.data
42
-
43
- if self.run_type == "generated_task":
44
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
45
- for filepath in filepaths:
46
- data = np.random.rand(4096, 4096)
47
- hdu = fits.CompImageHDU(data)
48
- h = hdu.header
49
- d = hdu.data