dkist-processing-ops 1.6.16__tar.gz → 1.6.18__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

Files changed (27) hide show
  1. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/PKG-INFO +1 -1
  2. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/_version.py +2 -2
  3. dkist_processing_ops-1.6.18/dkist_processing_ops/tasks/read_memory_leak.py +47 -0
  4. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/PKG-INFO +1 -1
  5. dkist_processing_ops-1.6.16/dkist_processing_ops/tasks/read_memory_leak.py +0 -95
  6. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/.gitignore +0 -0
  7. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/.pre-commit-config.yaml +0 -0
  8. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/LICENSE.rst +0 -0
  9. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/MANIFEST.in +0 -0
  10. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/README.rst +0 -0
  11. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/bitbucket-pipelines.yml +0 -0
  12. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/__init__.py +0 -0
  13. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/dags/scale.py +0 -0
  14. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tasks/__init__.py +0 -0
  15. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tasks/wait.py +0 -0
  16. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tests/__init__.py +0 -0
  17. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/tests/test_workflows.py +0 -0
  18. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/workflows/__init__.py +0 -0
  19. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/workflows/memory_leak.py +0 -0
  20. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops/workflows/smoke.py +0 -0
  21. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/SOURCES.txt +0 -0
  22. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
  23. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
  24. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/requires.txt +0 -0
  25. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/dkist_processing_ops.egg-info/top_level.txt +0 -0
  26. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/pyproject.toml +0 -0
  27. {dkist_processing_ops-1.6.16 → dkist_processing_ops-1.6.18}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.16
3
+ Version: 1.6.18
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.6.16'
16
- __version_tuple__ = version_tuple = (1, 6, 16)
15
+ __version__ = version = '1.6.18'
16
+ __version_tuple__ = version_tuple = (1, 6, 18)
@@ -0,0 +1,47 @@
1
+ import gc
2
+ import logging
3
+ from abc import ABC
4
+ from pathlib import Path
5
+
6
+ import numpy as np
7
+ from astropy.io import fits
8
+ from dkist_processing_common.codecs.fits import fits_hdu_decoder
9
+ from dkist_processing_common.codecs.path import path_decoder
10
+ from dkist_processing_common.models.tags import Tag
11
+ from dkist_processing_common.tasks import WorkflowTaskBase
12
+
13
+
14
+ def fits_bytes_decoder(path: Path) -> bytes:
15
+ with open(path, "rb") as f:
16
+ return f.read()
17
+
18
+
19
+ class FitsDataRead(WorkflowTaskBase, ABC):
20
+ @property
21
+ def run_type(self):
22
+ return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
23
+
24
+ def run(self) -> None:
25
+ if self.run_type == "bytes_read":
26
+ bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
27
+ for i, byte_object in enumerate(bytes_objects):
28
+ pass
29
+
30
+ if self.run_type == "bytes_task":
31
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
32
+ for filepath in filepaths:
33
+ with open(filepath, "rb") as f:
34
+ byte_object = f.read()
35
+
36
+ if self.run_type == "file_read":
37
+ hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
38
+ for hdu in hdus:
39
+ h = hdu.header
40
+ d = hdu.data
41
+
42
+ if self.run_type == "file_task":
43
+ filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
44
+ for filepath in filepaths:
45
+ hdu = fits.open(filepath)[1]
46
+ h = hdu.header
47
+ d = hdu.data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.16
3
+ Version: 1.6.18
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -1,95 +0,0 @@
1
- import gc
2
- import logging
3
- from abc import ABC
4
- from pathlib import Path
5
-
6
- import numpy as np
7
- from astropy.io import fits
8
- from dkist_processing_common.codecs.fits import fits_hdu_decoder
9
- from dkist_processing_common.codecs.path import path_decoder
10
- from dkist_processing_common.models.tags import Tag
11
- from dkist_processing_common.tasks import WorkflowTaskBase
12
-
13
-
14
- # def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
15
- # data = np.random.rand(4096, 4096)
16
- # hdu = fits.CompImageHDU(data)
17
- # return hdu
18
-
19
-
20
- def fits_bytes_decoder(path: Path) -> bytes:
21
- with open(path, "rb") as f:
22
- return f.read()
23
-
24
-
25
- # def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
26
- # hdul = fits.open(path)
27
- # hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
28
- # hdul.close()
29
- # del hdul
30
- # gc.collect()
31
- # return hdu
32
-
33
-
34
- class FitsDataRead(WorkflowTaskBase, ABC):
35
- @property
36
- def run_type(self):
37
- return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
38
-
39
- def run(self) -> None:
40
- if self.run_type == "bytes_read":
41
- bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
42
- for i, byte_object in enumerate(bytes_objects):
43
- pass
44
-
45
- if self.run_type == "file_read":
46
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
47
- for i, hdu in enumerate(hdus):
48
- h = hdu.header
49
- d = hdu.data
50
-
51
- # if self.run_type == "garbage_collect_read":
52
- # hdus = self.read(
53
- # tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
54
- # )
55
- # for i, hdu in enumerate(hdus):
56
- # h = hdu.header
57
- # d = hdu.data
58
- # if i % 50 == 0:
59
- # cur, peak = tracemalloc.get_traced_memory()
60
- # logging.info(
61
- # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
62
- # )
63
- #
64
- # if self.run_type == "garbage_collect_task":
65
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
66
- # for i, filepath in enumerate(filepaths):
67
- # hdu = fits_garbage_collect_hdu_decoder(filepath)
68
- # h = hdu.header
69
- # d = hdu.data
70
- # if i % 50 == 0:
71
- # cur, peak = tracemalloc.get_traced_memory()
72
- # logging.info(
73
- # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
74
- # )
75
- #
76
- # if self.run_type == "generated_read":
77
- # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
78
- # for hdu in hdus:
79
- # h = hdu.header
80
- # d = hdu.data
81
- #
82
- # if self.run_type == "file_task":
83
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
84
- # for filepath in filepaths:
85
- # hdu = fits.open(filepath)[1]
86
- # h = hdu.header
87
- # d = hdu.data
88
- #
89
- # if self.run_type == "generated_task":
90
- # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
91
- # for filepath in filepaths:
92
- # data = np.random.rand(4096, 4096)
93
- # hdu = fits.CompImageHDU(data)
94
- # h = hdu.header
95
- # d = hdu.data