dkist-processing-ops 1.6.15__py3-none-any.whl → 1.6.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-ops might be problematic. Click here for more details.

@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.6.15'
16
- __version_tuple__ = version_tuple = (1, 6, 15)
15
+ __version__ = version = '1.6.16'
16
+ __version_tuple__ = version_tuple = (1, 6, 16)
@@ -1,6 +1,5 @@
1
1
  import gc
2
2
  import logging
3
- import tracemalloc
4
3
  from abc import ABC
5
4
  from pathlib import Path
6
5
 
@@ -12,19 +11,24 @@ from dkist_processing_common.models.tags import Tag
12
11
  from dkist_processing_common.tasks import WorkflowTaskBase
13
12
 
14
13
 
15
- def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
16
- data = np.random.rand(4096, 4096)
17
- hdu = fits.CompImageHDU(data)
18
- return hdu
14
+ # def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
15
+ # data = np.random.rand(4096, 4096)
16
+ # hdu = fits.CompImageHDU(data)
17
+ # return hdu
19
18
 
20
19
 
21
- def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
22
- hdul = fits.open(path)
23
- hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
24
- hdul.close()
25
- del hdul
26
- gc.collect()
27
- return hdu
20
+ def fits_bytes_decoder(path: Path) -> bytes:
21
+ with open(path, "rb") as f:
22
+ return f.read()
23
+
24
+
25
+ # def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
26
+ # hdul = fits.open(path)
27
+ # hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
28
+ # hdul.close()
29
+ # del hdul
30
+ # gc.collect()
31
+ # return hdu
28
32
 
29
33
 
30
34
  class FitsDataRead(WorkflowTaskBase, ABC):
@@ -33,42 +37,41 @@ class FitsDataRead(WorkflowTaskBase, ABC):
33
37
  return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
34
38
 
35
39
  def run(self) -> None:
36
- tracemalloc.start()
37
- if self.run_type == "garbage_collect_read":
38
- hdus = self.read(
39
- tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
40
- )
41
- for i, hdu in enumerate(hdus):
42
- h = hdu.header
43
- d = hdu.data
44
- if i % 50 == 0:
45
- cur, peak = tracemalloc.get_traced_memory()
46
- logging.info(
47
- f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
48
- )
49
-
50
- if self.run_type == "garbage_collect_task":
51
- filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
52
- for i, filepath in enumerate(filepaths):
53
- hdu = fits_garbage_collect_hdu_decoder(filepath)
54
- h = hdu.header
55
- d = hdu.data
56
- if i % 50 == 0:
57
- cur, peak = tracemalloc.get_traced_memory()
58
- logging.info(
59
- f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
60
- )
40
+ if self.run_type == "bytes_read":
41
+ bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
42
+ for i, byte_object in enumerate(bytes_objects):
43
+ pass
61
44
 
62
45
  if self.run_type == "file_read":
63
46
  hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
64
47
  for i, hdu in enumerate(hdus):
65
48
  h = hdu.header
66
49
  d = hdu.data
67
- if i % 50 == 0:
68
- cur, peak = tracemalloc.get_traced_memory()
69
- logging.info(
70
- f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
71
- )
50
+
51
+ # if self.run_type == "garbage_collect_read":
52
+ # hdus = self.read(
53
+ # tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
54
+ # )
55
+ # for i, hdu in enumerate(hdus):
56
+ # h = hdu.header
57
+ # d = hdu.data
58
+ # if i % 50 == 0:
59
+ # cur, peak = tracemalloc.get_traced_memory()
60
+ # logging.info(
61
+ # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
62
+ # )
63
+ #
64
+ # if self.run_type == "garbage_collect_task":
65
+ # filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
66
+ # for i, filepath in enumerate(filepaths):
67
+ # hdu = fits_garbage_collect_hdu_decoder(filepath)
68
+ # h = hdu.header
69
+ # d = hdu.data
70
+ # if i % 50 == 0:
71
+ # cur, peak = tracemalloc.get_traced_memory()
72
+ # logging.info(
73
+ # f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
74
+ # )
72
75
  #
73
76
  # if self.run_type == "generated_read":
74
77
  # hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dkist-processing-ops
3
- Version: 1.6.15
3
+ Version: 1.6.16
4
4
  Summary: Automated Processing smoke test and operations workflows
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD 3-Clause
@@ -1,16 +1,16 @@
1
1
  dkist_processing_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- dkist_processing_ops/_version.py,sha256=G6p4cF-LZDRxAX98giKjw_I7RSw9Ya1xCzihpDrYDsA,413
2
+ dkist_processing_ops/_version.py,sha256=Krs1A6GFKeSDM3HlmJuBHtdakqIKMggtBrGf0aTD85E,413
3
3
  dkist_processing_ops/dags/scale.py,sha256=We5TYjNhkJ-5ykfbrOMgjTpXdzOCkIeyKyA-40sU9r0,2312
4
4
  dkist_processing_ops/tasks/__init__.py,sha256=P81O9cg4dlBMqBTaWitdsAte68RsMtDlhV30JSZfXUY,107
5
- dkist_processing_ops/tasks/read_memory_leak.py,sha256=HQzzQLn_45EauauLCuBYeXZYav5KiXekgKhEzkbSwGM,3559
5
+ dkist_processing_ops/tasks/read_memory_leak.py,sha256=yQeWMjUm0YrgyRtC4XGuExAVDUgCjNetDB45M0jz4Ug,3649
6
6
  dkist_processing_ops/tasks/wait.py,sha256=uObka-nH1dKPcGBDsp3t2RCtTV2F1kksM0V-lRewFuY,273
7
7
  dkist_processing_ops/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  dkist_processing_ops/tests/test_workflows.py,sha256=Ch_8BlGeQyPJU_9hB_GOncwW-SoZwpRUVKMOEz0RQZk,285
9
9
  dkist_processing_ops/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  dkist_processing_ops/workflows/memory_leak.py,sha256=lYXAYyJVjXif3Os9xPDp-bPTG_je6HOw1uvRJ4WMUi4,758
11
11
  dkist_processing_ops/workflows/smoke.py,sha256=ofXu0_iYF6L3zQy-BOVvS5VdzKhmXs1gyugqMNkd-GM,878
12
- dkist_processing_ops-1.6.15.dist-info/LICENSE.rst,sha256=LJjTmkf2-q1phdZSySMpiyPxgLOy6zYHOr3R1Bb1__8,327
13
- dkist_processing_ops-1.6.15.dist-info/METADATA,sha256=Q1K-2rhN2vtaVdNEfFLb_UrzFw7p60mzgzgMZaOkCtA,1500
14
- dkist_processing_ops-1.6.15.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
15
- dkist_processing_ops-1.6.15.dist-info/top_level.txt,sha256=o_SNho1HKt6wvCSUhm9qzX9FS2iopnqYuMos1CCD9cI,21
16
- dkist_processing_ops-1.6.15.dist-info/RECORD,,
12
+ dkist_processing_ops-1.6.16.dist-info/LICENSE.rst,sha256=LJjTmkf2-q1phdZSySMpiyPxgLOy6zYHOr3R1Bb1__8,327
13
+ dkist_processing_ops-1.6.16.dist-info/METADATA,sha256=r2cyD9NBVw7w2D4O7RrOg4maweWbprBmSqMZKxEoYFw,1500
14
+ dkist_processing_ops-1.6.16.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
15
+ dkist_processing_ops-1.6.16.dist-info/top_level.txt,sha256=o_SNho1HKt6wvCSUhm9qzX9FS2iopnqYuMos1CCD9cI,21
16
+ dkist_processing_ops-1.6.16.dist-info/RECORD,,