dkist-processing-ops 1.6.17__tar.gz → 1.6.19__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dkist-processing-ops might be problematic. Click here for more details.
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/PKG-INFO +2 -2
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/_version.py +2 -2
- dkist_processing_ops-1.6.19/dkist_processing_ops/tasks/read_memory_leak.py +47 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/PKG-INFO +2 -2
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/requires.txt +1 -1
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/pyproject.toml +1 -1
- dkist_processing_ops-1.6.17/dkist_processing_ops/tasks/read_memory_leak.py +0 -103
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/.gitignore +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/.pre-commit-config.yaml +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/LICENSE.rst +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/MANIFEST.in +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/README.rst +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/bitbucket-pipelines.yml +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/__init__.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/dags/scale.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tasks/__init__.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tasks/wait.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tests/__init__.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tests/test_workflows.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/workflows/__init__.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/workflows/memory_leak.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/workflows/smoke.py +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/SOURCES.txt +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/dependency_links.txt +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/not-zip-safe +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/top_level.txt +0 -0
- {dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dkist-processing-ops
|
|
3
|
-
Version: 1.6.
|
|
3
|
+
Version: 1.6.19
|
|
4
4
|
Summary: Automated Processing smoke test and operations workflows
|
|
5
5
|
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
6
|
License: BSD 3-Clause
|
|
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
11
11
|
Requires-Python: >=3.11
|
|
12
12
|
Description-Content-Type: text/x-rst
|
|
13
13
|
License-File: LICENSE.rst
|
|
14
|
-
Requires-Dist: dkist-processing-common==10.
|
|
14
|
+
Requires-Dist: dkist-processing-common==10.4.0
|
|
15
15
|
Requires-Dist: dkist-service-configuration==2.3.0
|
|
16
16
|
Provides-Extra: test
|
|
17
17
|
Requires-Dist: pytest; extra == "test"
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import gc
|
|
2
|
+
import logging
|
|
3
|
+
from abc import ABC
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
from astropy.io import fits
|
|
8
|
+
from dkist_processing_common.codecs.fits import fits_hdu_decoder
|
|
9
|
+
from dkist_processing_common.codecs.path import path_decoder
|
|
10
|
+
from dkist_processing_common.models.tags import Tag
|
|
11
|
+
from dkist_processing_common.tasks import WorkflowTaskBase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def fits_bytes_decoder(path: Path) -> bytes:
|
|
15
|
+
with open(path, "rb") as f:
|
|
16
|
+
return f.read()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class FitsDataRead(WorkflowTaskBase, ABC):
|
|
20
|
+
@property
|
|
21
|
+
def run_type(self):
|
|
22
|
+
return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
|
|
23
|
+
|
|
24
|
+
def run(self) -> None:
|
|
25
|
+
if self.run_type == "bytes_read":
|
|
26
|
+
bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
|
|
27
|
+
for i, byte_object in enumerate(bytes_objects):
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
if self.run_type == "bytes_task":
|
|
31
|
+
filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
|
|
32
|
+
for filepath in filepaths:
|
|
33
|
+
with open(filepath, "rb") as f:
|
|
34
|
+
byte_object = f.read()
|
|
35
|
+
|
|
36
|
+
if self.run_type == "file_read":
|
|
37
|
+
hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
|
|
38
|
+
for hdu in hdus:
|
|
39
|
+
h = hdu.header
|
|
40
|
+
d = hdu.data
|
|
41
|
+
|
|
42
|
+
if self.run_type == "file_task":
|
|
43
|
+
filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
|
|
44
|
+
for filepath in filepaths:
|
|
45
|
+
hdu = fits.open(filepath)[1]
|
|
46
|
+
h = hdu.header
|
|
47
|
+
d = hdu.data
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops.egg-info/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dkist-processing-ops
|
|
3
|
-
Version: 1.6.
|
|
3
|
+
Version: 1.6.19
|
|
4
4
|
Summary: Automated Processing smoke test and operations workflows
|
|
5
5
|
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
6
|
License: BSD 3-Clause
|
|
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
11
11
|
Requires-Python: >=3.11
|
|
12
12
|
Description-Content-Type: text/x-rst
|
|
13
13
|
License-File: LICENSE.rst
|
|
14
|
-
Requires-Dist: dkist-processing-common==10.
|
|
14
|
+
Requires-Dist: dkist-processing-common==10.4.0
|
|
15
15
|
Requires-Dist: dkist-service-configuration==2.3.0
|
|
16
16
|
Provides-Extra: test
|
|
17
17
|
Requires-Dist: pytest; extra == "test"
|
|
@@ -1,103 +0,0 @@
|
|
|
1
|
-
import gc
|
|
2
|
-
import logging
|
|
3
|
-
from abc import ABC
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
|
|
6
|
-
import numpy as np
|
|
7
|
-
from astropy.io import fits
|
|
8
|
-
from dkist_processing_common.codecs.fits import fits_hdu_decoder
|
|
9
|
-
from dkist_processing_common.codecs.path import path_decoder
|
|
10
|
-
from dkist_processing_common.models.tags import Tag
|
|
11
|
-
from dkist_processing_common.tasks import WorkflowTaskBase
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
# def generated_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
|
|
15
|
-
# data = np.random.rand(4096, 4096)
|
|
16
|
-
# hdu = fits.CompImageHDU(data)
|
|
17
|
-
# return hdu
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def fits_bytes_decoder(path: Path) -> bytes:
|
|
21
|
-
with open(path, "rb") as f:
|
|
22
|
-
return f.read()
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
# def fits_garbage_collect_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
|
|
26
|
-
# hdul = fits.open(path)
|
|
27
|
-
# hdu = fits.CompImageHDU(header=hdul[1].header, data=hdul[1].data)
|
|
28
|
-
# hdul.close()
|
|
29
|
-
# del hdul
|
|
30
|
-
# gc.collect()
|
|
31
|
-
# return hdu
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class FitsDataRead(WorkflowTaskBase, ABC):
|
|
35
|
-
@property
|
|
36
|
-
def run_type(self):
|
|
37
|
-
return self.metadata_store_recipe_run_configuration().get("run_type", "file_read")
|
|
38
|
-
|
|
39
|
-
def run(self) -> None:
|
|
40
|
-
if self.run_type == "bytes_read":
|
|
41
|
-
bytes_objects = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_bytes_decoder)
|
|
42
|
-
for i, byte_object in enumerate(bytes_objects):
|
|
43
|
-
pass
|
|
44
|
-
|
|
45
|
-
if self.run_type == "bytes_task":
|
|
46
|
-
filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
|
|
47
|
-
for filepath in filepaths:
|
|
48
|
-
with open(filepath, "rb") as f:
|
|
49
|
-
byte_object = f.read()
|
|
50
|
-
|
|
51
|
-
if self.run_type == "file_read":
|
|
52
|
-
hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=fits_hdu_decoder)
|
|
53
|
-
for hdu in hdus:
|
|
54
|
-
h = hdu.header
|
|
55
|
-
d = hdu.data
|
|
56
|
-
|
|
57
|
-
if self.run_type == "file_task":
|
|
58
|
-
filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
|
|
59
|
-
for filepath in filepaths:
|
|
60
|
-
hdu = fits.open(filepath)[1]
|
|
61
|
-
h = hdu.header
|
|
62
|
-
d = hdu.data
|
|
63
|
-
|
|
64
|
-
# if self.run_type == "garbage_collect_read":
|
|
65
|
-
# hdus = self.read(
|
|
66
|
-
# tags=[Tag.input(), Tag.frame()], decoder=fits_garbage_collect_hdu_decoder
|
|
67
|
-
# )
|
|
68
|
-
# for i, hdu in enumerate(hdus):
|
|
69
|
-
# h = hdu.header
|
|
70
|
-
# d = hdu.data
|
|
71
|
-
# if i % 50 == 0:
|
|
72
|
-
# cur, peak = tracemalloc.get_traced_memory()
|
|
73
|
-
# logging.info(
|
|
74
|
-
# f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
|
|
75
|
-
# )
|
|
76
|
-
#
|
|
77
|
-
# if self.run_type == "garbage_collect_task":
|
|
78
|
-
# filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
|
|
79
|
-
# for i, filepath in enumerate(filepaths):
|
|
80
|
-
# hdu = fits_garbage_collect_hdu_decoder(filepath)
|
|
81
|
-
# h = hdu.header
|
|
82
|
-
# d = hdu.data
|
|
83
|
-
# if i % 50 == 0:
|
|
84
|
-
# cur, peak = tracemalloc.get_traced_memory()
|
|
85
|
-
# logging.info(
|
|
86
|
-
# f"After {i} files current memory usage is {cur / 10 ** 6}MB; Peak was {peak / 10 ** 6}MB"
|
|
87
|
-
# )
|
|
88
|
-
#
|
|
89
|
-
# if self.run_type == "generated_read":
|
|
90
|
-
# hdus = self.read(tags=[Tag.input(), Tag.frame()], decoder=generated_hdu_decoder)
|
|
91
|
-
# for hdu in hdus:
|
|
92
|
-
# h = hdu.header
|
|
93
|
-
# d = hdu.data
|
|
94
|
-
#
|
|
95
|
-
|
|
96
|
-
#
|
|
97
|
-
# if self.run_type == "generated_task":
|
|
98
|
-
# filepaths = self.read(tags=[Tag.input(), Tag.frame()], decoder=path_decoder)
|
|
99
|
-
# for filepath in filepaths:
|
|
100
|
-
# data = np.random.rand(4096, 4096)
|
|
101
|
-
# hdu = fits.CompImageHDU(data)
|
|
102
|
-
# h = hdu.header
|
|
103
|
-
# d = hdu.data
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/__init__.py
RENAMED
|
File without changes
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/dags/scale.py
RENAMED
|
File without changes
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tasks/__init__.py
RENAMED
|
File without changes
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tasks/wait.py
RENAMED
|
File without changes
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/tests/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dkist_processing_ops-1.6.17 → dkist_processing_ops-1.6.19}/dkist_processing_ops/workflows/smoke.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|