dkist-processing-ops 1.6.13__py3-none-any.whl → 1.6.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.6.13'
16
- __version_tuple__ = version_tuple = (1, 6, 13)
15
+ __version__ = version = '1.6.14'
16
+ __version_tuple__ = version_tuple = (1, 6, 14)
@@ -1,3 +1,4 @@
1
+ import gc
1
2
  from abc import ABC
2
3
  from pathlib import Path
3
4
 
@@ -15,35 +16,59 @@ def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
15
16
  return hdu
16
17
 
17
18
 
19
+ def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
20
+ hdul = fits.open(path)
21
+ hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
22
+ hdul.close()
23
+ del hdul
24
+ gc.collect()
25
+ return hdu
26
+
27
+
18
28
  class FitsDataRead(WorkflowTaskBase, ABC):
19
29
  @property
20
30
  def run_type(self):
21
31
  return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
22
32
 
23
33
  def run(self) -> None:
24
- if self.run_type == "file_read":
25
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
26
- for hdu in hdus:
27
- h = hdu.header
28
- d = hdu.data
29
-
30
- if self.run_type == "generated_read":
31
- hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
34
+ if self.run_type == "garbage_collect_read":
35
+ hdus = self.read(
36
+ tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
37
+ )
32
38
  for hdu in hdus:
33
39
  h = hdu.header
34
40
  d = hdu.data
35
41
 
36
- if self.run_type == "file_task":
42
+ if self.run_type == "garbage_collect_task":
37
43
  filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
38
44
  for filepath in filepaths:
39
- hdu = fits.open(filepath)[1]
45
+ hdu = fits_garbage_collect_hdu_decoder(filepath)
40
46
  h = hdu.header
41
47
  d = hdu.data
42
48
 
43
- if self.run_type == "generated_task":
44
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
45
- for filepath in filepaths:
46
- data = np.random.rand(4096, 4096)
47
- hdu = fits.CompImageHDU(data)
49
+ if self.run_type == "file_read":
50
+ hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
51
+ for hdu in hdus:
48
52
  h = hdu.header
49
53
  d = hdu.data
54
+ #
55
+ # if self.run_type == "generated_read":
56
+ # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
57
+ # for hdu in hdus:
58
+ # h = hdu.header
59
+ # d = hdu.data
60
+ #
61
+ # if self.run_type == "file_task":
62
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
63
+ # for filepath in filepaths:
64
+ # hdu = fits.open(filepath)[1]
65
+ # h = hdu.header
66
+ # d = hdu.data
67
+ #
68
+ # if self.run_type == "generated_task":
69
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
70
+ # for filepath in filepaths:
71
+ # data = np.random.rand(4096, 4096)
72
+ # hdu = fits.CompImageHDU(data)
73
+ # h = hdu.header
74
+ # d = hdu.data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.13
3
+ Version: 1.6.14
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -1,16 +1,16 @@
1
1
  dkist_processing_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- dkist_processing_ops/_version.py,sha256=F6yOQF2i7n8OMZVy5diTWAk2RoBPwyUW2P5AsaXwnHo,413
2
+ dkist_processing_ops/_version.py,sha256=ENoeph2R-6hTwDBnYv-6HGbXQQBxCuxRZdUsEOyJGbk,413
3
3
  dkist_processing_ops/dags/scale.py,sha256=We5TYjNhkJ-5ykfbrOMgjTpXdzOCkIeyKyA-40sU9r0,2312
4
4
  dkist_processing_ops/tasks/__init__.py,sha256=P81O9cg4dlBMqBTaWitdsAte68RsMtDlhV30JSZfXUY,107
5
- dkist_processing_ops/tasks/read_memory_leak.py,sha256=vlw-ao2GYObw3tCJGZDXmVbF5TPwEAQiOVRuukhimxk,1752
5
+ dkist_processing_ops/tasks/read_memory_leak.py,sha256=WuwS7Jb-wUBqGsI6Qn8DvGvDHZDsj6jVyRp_vKf2QV4,2657
6
6
  dkist_processing_ops/tasks/wait.py,sha256=uObka-nH1dKPcGBDsp3t2RCtTV2F1kksM0V-lRewFuY,273
7
7
  dkist_processing_ops/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  dkist_processing_ops/tests/test_workflows.py,sha256=Ch_8BlGeQyPJU_9hB_GOncwW-SoZwpRUVKMOEz0RQZk,285
9
9
  dkist_processing_ops/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  dkist_processing_ops/workflows/memory_leak.py,sha256=lYXAYyJVjXif3Os9xPDp-bPTG_je6HOw1uvRJ4WMUi4,758
11
11
  dkist_processing_ops/workflows/smoke.py,sha256=ofXu0_iYF6L3zQy-BOVvS5VdzKhmXs1gyugqMNkd-GM,878
12
- dkist_processing_ops-1.6.13.dist-info/LICENSE.rst,sha256=LJjTmkf2-q1phdZSySMpiyPxgLOy6zYHOr3R1Bb1__8,327
13
- dkist_processing_ops-1.6.13.dist-info/METADATA,sha256=3QbaFLhReEeqB_QEdo9j9dfQXA69FEAqOng4BnQAXl4,1500
14
- dkist_processing_ops-1.6.13.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
15
- dkist_processing_ops-1.6.13.dist-info/top_level.txt,sha256=o_SNho1HKt6wvCSUhm9qzX9FS2iopnqYuMos1CCD9cI,21
16
- dkist_processing_ops-1.6.13.dist-info/RECORD,,
12
+ dkist_processing_ops-1.6.14.dist-info/LICENSE.rst,sha256=LJjTmkf2-q1phdZSySMpiyPxgLOy6zYHOr3R1Bb1__8,327
13
+ dkist_processing_ops-1.6.14.dist-info/METADATA,sha256=KUW80lrBGyblfl5r9uSPaWF3ohu26KT2rSlQRV7qleA,1500
14
+ dkist_processing_ops-1.6.14.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
15
+ dkist_processing_ops-1.6.14.dist-info/top_level.txt,sha256=o_SNho1HKt6wvCSUhm9qzX9FS2iopnqYuMos1CCD9cI,21
16
+ dkist_processing_ops-1.6.14.dist-info/RECORD,,