dkist-processing-ops 1.6.17__tar.gz → 1.6.18__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

Files changed (27) hide show
  1. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/PKG-INFO +1 -1
  2. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/_version.py +2 -2
  3. dkist_processing_ops-1.6.18/dkist_processing_ops/tasks/read_memory_leak.py +47 -0
  4. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/PKG-INFO +1 -1
  5. dkist_processing_ops-1.6.17/dkist_processing_ops/tasks/read_memory_leak.py +0 -103
  6. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/.gitignore +0 -0
  7. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/.pre-commit-config.yaml +0 -0
  8. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/LICENSE.rst +0 -0
  9. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/MANIFEST.in +0 -0
  10. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/README.rst +0 -0
  11. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/bitbucket-pipelines.yml +0 -0
  12. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/__init__.py +0 -0
  13. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/dags/scale.py +0 -0
  14. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tasks/__init__.py +0 -0
  15. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tasks/wait.py +0 -0
  16. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tests/__init__.py +0 -0
  17. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tests/test_workflows.py +0 -0
  18. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/workflows/__init__.py +0 -0
  19. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/workflows/memory_leak.py +0 -0
  20. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops/workflows/smoke.py +0 -0
  21. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/SOURCES.txt +0 -0
  22. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
  23. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
  24. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/requires.txt +0 -0
  25. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/top_level.txt +0 -0
  26. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/pyproject.toml +0 -0
  27. {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.18}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.17
3
+ Version: 1.6.18
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.6.17'
16
- __version_tuple__ = version_tuple = (1, 6, 17)
15
+ __version__ = version = '1.6.18'
16
+ __version_tuple__ = version_tuple = (1, 6, 18)
@@ -0,0 +1,47 @@
1
+ import gc
2
+ import logging
3
+ from abc import ABC
4
+ from pathlib import Path
5
+
6
+ import numpy as np
7
+ from astropy.io import fits
8
+ from dkist_processing_common.codecs.fits import fits_hdu_decoder
9
+ from dkist_processing_common.codecs.path import path_decoder
10
+ from dkist_processing_common.models.tags import Tag
11
+ from dkist_processing_common.tasks import WorkflowTaskBase
12
+
13
+
14
+ def fits_bytes_decoder(path: Path) -> bytes:
15
+ with open(path, "rb") as f:
16
+ return f.read()
17
+
18
+
19
+ class FitsDataRead(WorkflowTaskBase, ABC):
20
+ @property
21
+ def run_type(self):
22
+ return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
23
+
24
+ def run(self) -> None:
25
+ if self.run_type == "bytes_read":
26
+ bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
27
+ for i, byte_object in enumerate(bytes_objects):
28
+ pass
29
+
30
+ if self.run_type == "bytes_task":
31
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
32
+ for filepath in filepaths:
33
+ with open(filepath, "rb") as f:
34
+ byte_object = f.read()
35
+
36
+ if self.run_type == "file_read":
37
+ hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
38
+ for hdu in hdus:
39
+ h = hdu.header
40
+ d = hdu.data
41
+
42
+ if self.run_type == "file_task":
43
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
44
+ for filepath in filepaths:
45
+ hdu = fits.open(filepath)[1]
46
+ h = hdu.header
47
+ d = hdu.data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.17
3
+ Version: 1.6.18
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -1,103 +0,0 @@
1
- import gc
2
- import logging
3
- from abc import ABC
4
- from pathlib import Path
5
-
6
- import numpy as np
7
- from astropy.io import fits
8
- from dkist_processing_common.codecs.fits import fits_hdu_decoder
9
- from dkist_processing_common.codecs.path import path_decoder
10
- from dkist_processing_common.models.tags import Tag
11
- from dkist_processing_common.tasks import WorkflowTaskBase
12
-
13
-
14
- # def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
15
- # data = np.random.rand(4096, 4096)
16
- # hdu = fits.CompImageHDU(data)
17
- # return hdu
18
-
19
-
20
- def fits_bytes_decoder(path: Path) -> bytes:
21
- with open(path, "rb") as f:
22
- return f.read()
23
-
24
-
25
- # def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
26
- # hdul = fits.open(path)
27
- # hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
28
- # hdul.close()
29
- # del hdul
30
- # gc.collect()
31
- # return hdu
32
-
33
-
34
- class FitsDataRead(WorkflowTaskBase, ABC):
35
- @property
36
- def run_type(self):
37
- return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
38
-
39
- def run(self) -> None:
40
- if self.run_type == "bytes_read":
41
- bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
42
- for i, byte_object in enumerate(bytes_objects):
43
- pass
44
-
45
- if self.run_type == "bytes_task":
46
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
47
- for filepath in filepaths:
48
- with open(filepath, "rb") as f:
49
- byte_object = f.read()
50
-
51
- if self.run_type == "file_read":
52
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
53
- for hdu in hdus:
54
- h = hdu.header
55
- d = hdu.data
56
-
57
- if self.run_type == "file_task":
58
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
59
- for filepath in filepaths:
60
- hdu = fits.open(filepath)[1]
61
- h = hdu.header
62
- d = hdu.data
63
-
64
- # if self.run_type == "garbage_collect_read":
65
- # hdus = self.read(
66
- # tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
67
- # )
68
- # for i, hdu in enumerate(hdus):
69
- # h = hdu.header
70
- # d = hdu.data
71
- # if i % 50 == 0:
72
- # cur, peak = tracemalloc.get_traced_memory()
73
- # logging.info(
74
- # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
75
- # )
76
- #
77
- # if self.run_type == "garbage_collect_task":
78
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
79
- # for i, filepath in enumerate(filepaths):
80
- # hdu = fits_garbage_collect_hdu_decoder(filepath)
81
- # h = hdu.header
82
- # d = hdu.data
83
- # if i % 50 == 0:
84
- # cur, peak = tracemalloc.get_traced_memory()
85
- # logging.info(
86
- # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
87
- # )
88
- #
89
- # if self.run_type == "generated_read":
90
- # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
91
- # for hdu in hdus:
92
- # h = hdu.header
93
- # d = hdu.data
94
- #
95
-
96
- #
97
- # if self.run_type == "generated_task":
98
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
99
- # for filepath in filepaths:
100
- # data = np.random.rand(4096, 4096)
101
- # hdu = fits.CompImageHDU(data)
102
- # h = hdu.header
103
- # d = hdu.data