setiastrosuitepro 1.6.4__py3-none-any.whl → 1.6.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of setiastrosuitepro might be problematic. Click here for more details.
- setiastro/images/abeicon.svg +16 -0
- setiastro/images/acv_icon.png +0 -0
- setiastro/images/colorwheel.svg +97 -0
- setiastro/images/cosmic.svg +40 -0
- setiastro/images/cosmicsat.svg +24 -0
- setiastro/images/first_quarter.png +0 -0
- setiastro/images/full_moon.png +0 -0
- setiastro/images/graxpert.svg +19 -0
- setiastro/images/last_quarter.png +0 -0
- setiastro/images/linearfit.svg +32 -0
- setiastro/images/new_moon.png +0 -0
- setiastro/images/pixelmath.svg +42 -0
- setiastro/images/waning_crescent_1.png +0 -0
- setiastro/images/waning_crescent_2.png +0 -0
- setiastro/images/waning_crescent_3.png +0 -0
- setiastro/images/waning_crescent_4.png +0 -0
- setiastro/images/waning_crescent_5.png +0 -0
- setiastro/images/waning_gibbous_1.png +0 -0
- setiastro/images/waning_gibbous_2.png +0 -0
- setiastro/images/waning_gibbous_3.png +0 -0
- setiastro/images/waning_gibbous_4.png +0 -0
- setiastro/images/waning_gibbous_5.png +0 -0
- setiastro/images/waxing_crescent_1.png +0 -0
- setiastro/images/waxing_crescent_2.png +0 -0
- setiastro/images/waxing_crescent_3.png +0 -0
- setiastro/images/waxing_crescent_4.png +0 -0
- setiastro/images/waxing_crescent_5.png +0 -0
- setiastro/images/waxing_gibbous_1.png +0 -0
- setiastro/images/waxing_gibbous_2.png +0 -0
- setiastro/images/waxing_gibbous_3.png +0 -0
- setiastro/images/waxing_gibbous_4.png +0 -0
- setiastro/images/waxing_gibbous_5.png +0 -0
- setiastro/qml/ResourceMonitor.qml +84 -82
- setiastro/saspro/__main__.py +20 -1
- setiastro/saspro/_generated/build_info.py +2 -2
- setiastro/saspro/abe.py +37 -4
- setiastro/saspro/aberration_ai.py +237 -21
- setiastro/saspro/acv_exporter.py +379 -0
- setiastro/saspro/add_stars.py +33 -6
- setiastro/saspro/backgroundneutral.py +108 -40
- setiastro/saspro/blemish_blaster.py +4 -1
- setiastro/saspro/blink_comparator_pro.py +74 -24
- setiastro/saspro/clahe.py +4 -1
- setiastro/saspro/continuum_subtract.py +4 -1
- setiastro/saspro/convo.py +13 -7
- setiastro/saspro/cosmicclarity.py +129 -18
- setiastro/saspro/crop_dialog_pro.py +123 -7
- setiastro/saspro/curve_editor_pro.py +109 -42
- setiastro/saspro/doc_manager.py +245 -15
- setiastro/saspro/exoplanet_detector.py +120 -28
- setiastro/saspro/frequency_separation.py +1158 -204
- setiastro/saspro/ghs_dialog_pro.py +81 -16
- setiastro/saspro/graxpert.py +1 -0
- setiastro/saspro/gui/main_window.py +429 -228
- setiastro/saspro/gui/mixins/dock_mixin.py +245 -24
- setiastro/saspro/gui/mixins/menu_mixin.py +27 -1
- setiastro/saspro/gui/mixins/theme_mixin.py +160 -14
- setiastro/saspro/gui/mixins/toolbar_mixin.py +384 -18
- setiastro/saspro/gui/mixins/update_mixin.py +138 -36
- setiastro/saspro/gui/mixins/view_mixin.py +42 -0
- setiastro/saspro/halobgon.py +4 -0
- setiastro/saspro/histogram.py +5 -1
- setiastro/saspro/image_combine.py +4 -0
- setiastro/saspro/image_peeker_pro.py +4 -0
- setiastro/saspro/imageops/starbasedwhitebalance.py +23 -52
- setiastro/saspro/imageops/stretch.py +582 -62
- setiastro/saspro/isophote.py +4 -0
- setiastro/saspro/layers.py +13 -9
- setiastro/saspro/layers_dock.py +183 -3
- setiastro/saspro/legacy/image_manager.py +154 -20
- setiastro/saspro/legacy/numba_utils.py +67 -47
- setiastro/saspro/legacy/xisf.py +240 -98
- setiastro/saspro/live_stacking.py +180 -79
- setiastro/saspro/luminancerecombine.py +228 -27
- setiastro/saspro/mask_creation.py +174 -15
- setiastro/saspro/mfdeconv.py +113 -35
- setiastro/saspro/mfdeconvcudnn.py +119 -70
- setiastro/saspro/mfdeconvsport.py +112 -35
- setiastro/saspro/morphology.py +4 -0
- setiastro/saspro/multiscale_decomp.py +51 -12
- setiastro/saspro/numba_utils.py +72 -57
- setiastro/saspro/ops/commands.py +18 -18
- setiastro/saspro/ops/script_editor.py +10 -2
- setiastro/saspro/ops/scripts.py +122 -0
- setiastro/saspro/perfect_palette_picker.py +37 -3
- setiastro/saspro/plate_solver.py +84 -49
- setiastro/saspro/psf_viewer.py +119 -37
- setiastro/saspro/resources.py +67 -0
- setiastro/saspro/rgbalign.py +4 -0
- setiastro/saspro/selective_color.py +4 -1
- setiastro/saspro/sfcc.py +364 -152
- setiastro/saspro/shortcuts.py +160 -29
- setiastro/saspro/signature_insert.py +692 -33
- setiastro/saspro/stacking_suite.py +1331 -484
- setiastro/saspro/star_alignment.py +247 -123
- setiastro/saspro/star_spikes.py +4 -0
- setiastro/saspro/star_stretch.py +38 -3
- setiastro/saspro/stat_stretch.py +743 -128
- setiastro/saspro/subwindow.py +786 -360
- setiastro/saspro/supernovaasteroidhunter.py +1 -1
- setiastro/saspro/wavescale_hdr.py +4 -1
- setiastro/saspro/wavescalede.py +4 -1
- setiastro/saspro/whitebalance.py +84 -12
- setiastro/saspro/widgets/common_utilities.py +28 -21
- setiastro/saspro/widgets/resource_monitor.py +109 -59
- setiastro/saspro/widgets/spinboxes.py +10 -13
- setiastro/saspro/wimi.py +27 -656
- setiastro/saspro/wims.py +13 -3
- setiastro/saspro/xisf.py +101 -11
- {setiastrosuitepro-1.6.4.dist-info → setiastrosuitepro-1.6.12.dist-info}/METADATA +2 -1
- {setiastrosuitepro-1.6.4.dist-info → setiastrosuitepro-1.6.12.dist-info}/RECORD +115 -82
- {setiastrosuitepro-1.6.4.dist-info → setiastrosuitepro-1.6.12.dist-info}/WHEEL +0 -0
- {setiastrosuitepro-1.6.4.dist-info → setiastrosuitepro-1.6.12.dist-info}/entry_points.txt +0 -0
- {setiastrosuitepro-1.6.4.dist-info → setiastrosuitepro-1.6.12.dist-info}/licenses/LICENSE +0 -0
- {setiastrosuitepro-1.6.4.dist-info → setiastrosuitepro-1.6.12.dist-info}/licenses/license.txt +0 -0
setiastro/saspro/legacy/xisf.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
#legacy.xisf.py
|
|
2
1
|
# coding: utf-8
|
|
3
2
|
|
|
4
3
|
"""
|
|
@@ -35,7 +34,14 @@ import sys
|
|
|
35
34
|
from datetime import datetime
|
|
36
35
|
import ast
|
|
37
36
|
|
|
38
|
-
__version__ = "1.0.
|
|
37
|
+
__version__ = "1.0.1"
|
|
38
|
+
|
|
39
|
+
def _is_attached_or_inline_property(p_dict):
|
|
40
|
+
return "location" in p_dict # location implies inline/embedded/attachment
|
|
41
|
+
|
|
42
|
+
def _make_lazy(p_dict):
|
|
43
|
+
p_dict["_lazy"] = True
|
|
44
|
+
return p_dict
|
|
39
45
|
|
|
40
46
|
class XISF:
|
|
41
47
|
"""Implements an baseline XISF Decoder and a simple baseline Encoder.
|
|
@@ -68,7 +74,7 @@ class XISF:
|
|
|
68
74
|
|
|
69
75
|
Usage example:
|
|
70
76
|
```
|
|
71
|
-
from xisf import XISF
|
|
77
|
+
from setiastro.saspro.xisf import XISF
|
|
72
78
|
import matplotlib.pyplot as plt
|
|
73
79
|
xisf = XISF("file.xisf")
|
|
74
80
|
file_meta = xisf.get_file_metadata()
|
|
@@ -88,7 +94,7 @@ class XISF:
|
|
|
88
94
|
If the file is not huge and it contains only an image (or you're interested just in one of the
|
|
89
95
|
images inside the file), there is a convenience method for reading the data and the metadata:
|
|
90
96
|
```
|
|
91
|
-
from xisf import XISF
|
|
97
|
+
from setiastro.saspro.xisf import XISF
|
|
92
98
|
import matplotlib.pyplot as plt
|
|
93
99
|
im_data = XISF.read("file.xisf")
|
|
94
100
|
plt.imshow(im_data)
|
|
@@ -341,7 +347,7 @@ class XISF:
|
|
|
341
347
|
def _read_embedded_data_block(elem):
|
|
342
348
|
assert elem["location"][0] == "embedded"
|
|
343
349
|
data_elem = ET.fromstring(elem["value"])
|
|
344
|
-
encoding, data =
|
|
350
|
+
encoding, data = data_elem.attrib["encoding"], data_elem.text
|
|
345
351
|
return XISF._decode_inline_or_embedded_data(encoding, data, elem)
|
|
346
352
|
|
|
347
353
|
@staticmethod
|
|
@@ -732,18 +738,23 @@ class XISF:
|
|
|
732
738
|
tp_str = p_dict.get("value", "")
|
|
733
739
|
if tp_str:
|
|
734
740
|
# Handle XISF TimePoint format: ISO 8601 with optional timezone
|
|
741
|
+
# Examples: "2023-01-15T10:30:00Z", "2023-01-15T10:30:00.123456"
|
|
735
742
|
tp_str = tp_str.replace("Z", "+00:00")
|
|
736
743
|
if "." in tp_str and "+" not in tp_str.split(".")[-1] and "-" not in tp_str.split(".")[-1]:
|
|
744
|
+
# Add UTC timezone if missing after fractional seconds
|
|
737
745
|
tp_str += "+00:00"
|
|
738
746
|
p_dict["datetime"] = datetime.fromisoformat(tp_str)
|
|
739
747
|
except (ValueError, TypeError):
|
|
748
|
+
# Keep original string value if parsing fails
|
|
740
749
|
p_dict["datetime"] = None
|
|
741
750
|
elif p_dict["type"] == "String":
|
|
742
|
-
p_dict["value"] = p_et.text
|
|
751
|
+
# NOTE: currently does: p_dict["value"] = p_et.text; then if location -> read block now
|
|
752
|
+
p_dict["value"] = p_et.text # may be None
|
|
743
753
|
if "location" in p_dict:
|
|
744
|
-
# Process location and compression attributes to find data block
|
|
745
754
|
self._process_location_compression(p_dict)
|
|
746
|
-
|
|
755
|
+
# LAZY: do NOT read block here
|
|
756
|
+
return _make_lazy(p_dict)
|
|
757
|
+
return p_dict
|
|
747
758
|
elif p_dict["type"] == "Boolean":
|
|
748
759
|
# Boolean valid values are "true" and "false"
|
|
749
760
|
p_dict["value"] = p_dict["value"] == "true"
|
|
@@ -755,24 +766,105 @@ class XISF:
|
|
|
755
766
|
p_dict["length"] = int(p_dict["length"])
|
|
756
767
|
p_dict["dtype"] = self._parse_vector_dtype(p_dict["type"])
|
|
757
768
|
self._process_location_compression(p_dict)
|
|
758
|
-
|
|
759
|
-
|
|
769
|
+
# LAZY: do NOT read block here
|
|
770
|
+
return _make_lazy(p_dict)
|
|
771
|
+
|
|
760
772
|
elif "Matrix" in p_dict["type"]:
|
|
761
773
|
p_dict["value"] = p_et.text
|
|
762
774
|
p_dict["rows"] = int(p_dict["rows"])
|
|
763
775
|
p_dict["columns"] = int(p_dict["columns"])
|
|
764
|
-
length = p_dict["rows"] * p_dict["columns"]
|
|
765
776
|
p_dict["dtype"] = self._parse_vector_dtype(p_dict["type"])
|
|
766
777
|
self._process_location_compression(p_dict)
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
p_dict["value"] = p_dict["value"].reshape((p_dict["rows"], p_dict["columns"]))
|
|
778
|
+
# LAZY: do NOT read block here
|
|
779
|
+
return _make_lazy(p_dict)
|
|
770
780
|
else:
|
|
771
781
|
print(f"Unsupported Property type {p_dict['type']}: {p_et}")
|
|
772
782
|
p_dict = False
|
|
773
783
|
|
|
774
784
|
return p_dict
|
|
775
785
|
|
|
786
|
+
def resolve_property(self, p_dict):
|
|
787
|
+
"""
|
|
788
|
+
Resolve a lazy property (String/Vector/Matrix with a data block).
|
|
789
|
+
Mutates p_dict in place and returns decoded 'value'.
|
|
790
|
+
"""
|
|
791
|
+
if not p_dict.get("_lazy"):
|
|
792
|
+
return p_dict.get("value")
|
|
793
|
+
|
|
794
|
+
raw = self._read_data_block(p_dict)
|
|
795
|
+
|
|
796
|
+
t = p_dict["type"]
|
|
797
|
+
if t == "String":
|
|
798
|
+
val = raw.decode("utf-8")
|
|
799
|
+
elif "Vector" in t:
|
|
800
|
+
val = np.frombuffer(raw, dtype=p_dict["dtype"], count=p_dict["length"])
|
|
801
|
+
elif "Matrix" in t:
|
|
802
|
+
length = p_dict["rows"] * p_dict["columns"]
|
|
803
|
+
val = np.frombuffer(raw, dtype=p_dict["dtype"], count=length).reshape((p_dict["rows"], p_dict["columns"]))
|
|
804
|
+
else:
|
|
805
|
+
# if something else ever gets marked lazy
|
|
806
|
+
val = raw
|
|
807
|
+
|
|
808
|
+
p_dict["value"] = val
|
|
809
|
+
p_dict["_lazy"] = False
|
|
810
|
+
return val
|
|
811
|
+
|
|
812
|
+
def can_partial_read_image(self, n=0):
|
|
813
|
+
meta = self._images_meta[n]
|
|
814
|
+
if meta["location"][0] != "attachment":
|
|
815
|
+
return False
|
|
816
|
+
if "compression" in meta:
|
|
817
|
+
return False
|
|
818
|
+
return True
|
|
819
|
+
|
|
820
|
+
def read_image_roi(self, n=0, x0=0, y0=0, x1=None, y1=None, channels=None, data_format="channels_last"):
|
|
821
|
+
meta = self._images_meta[n]
|
|
822
|
+
if meta["location"][0] != "attachment":
|
|
823
|
+
raise NotImplementedError("ROI read only supported for attachment blocks")
|
|
824
|
+
if "compression" in meta:
|
|
825
|
+
raise NotImplementedError("ROI read not supported for compressed image blocks")
|
|
826
|
+
|
|
827
|
+
w, h, chc = meta["geometry"]
|
|
828
|
+
dtype = meta["dtype"]
|
|
829
|
+
itemsize = dtype.itemsize
|
|
830
|
+
|
|
831
|
+
if x1 is None: x1 = w
|
|
832
|
+
if y1 is None: y1 = h
|
|
833
|
+
x0 = max(0, min(w, x0)); x1 = max(0, min(w, x1))
|
|
834
|
+
y0 = max(0, min(h, y0)); y1 = max(0, min(h, y1))
|
|
835
|
+
if x1 <= x0 or y1 <= y0:
|
|
836
|
+
raise ValueError("Empty ROI")
|
|
837
|
+
|
|
838
|
+
if channels is None:
|
|
839
|
+
channels = list(range(chc))
|
|
840
|
+
else:
|
|
841
|
+
channels = list(channels)
|
|
842
|
+
|
|
843
|
+
_, pos, _size = meta["location"]
|
|
844
|
+
roi_w = x1 - x0
|
|
845
|
+
roi_h = y1 - y0
|
|
846
|
+
|
|
847
|
+
out = np.empty((len(channels), roi_h, roi_w), dtype=dtype)
|
|
848
|
+
|
|
849
|
+
row_bytes = w * itemsize
|
|
850
|
+
roi_bytes = roi_w * itemsize
|
|
851
|
+
plane_bytes = h * row_bytes
|
|
852
|
+
|
|
853
|
+
with open(self._fname, "rb") as f:
|
|
854
|
+
for ci, c in enumerate(channels):
|
|
855
|
+
if c < 0 or c >= chc:
|
|
856
|
+
raise IndexError(f"channel {c} out of range")
|
|
857
|
+
plane_base = pos + c * plane_bytes
|
|
858
|
+
for r, y in enumerate(range(y0, y1)):
|
|
859
|
+
offset = plane_base + y * row_bytes + x0 * itemsize
|
|
860
|
+
f.seek(offset)
|
|
861
|
+
out[ci, r, :] = np.frombuffer(f.read(roi_bytes), dtype=dtype, count=roi_w)
|
|
862
|
+
|
|
863
|
+
if data_format == "channels_last":
|
|
864
|
+
return np.transpose(out, (1, 2, 0))
|
|
865
|
+
return out
|
|
866
|
+
|
|
867
|
+
|
|
776
868
|
@staticmethod
|
|
777
869
|
def _process_location_compression(p_dict):
|
|
778
870
|
p_dict["location"] = XISF._parse_location(p_dict["location"])
|
|
@@ -781,109 +873,130 @@ class XISF:
|
|
|
781
873
|
|
|
782
874
|
# Insert XISF properties in the XML tree
|
|
783
875
|
@staticmethod
|
|
784
|
-
def _insert_property(parent, p_dict, max_inline_block_size):
|
|
785
|
-
|
|
876
|
+
def _insert_property(parent, p_dict, max_inline_block_size, codec=None, shuffle=False):
|
|
877
|
+
"""Insert a property into the XML tree.
|
|
878
|
+
|
|
879
|
+
Args:
|
|
880
|
+
parent: Parent XML element
|
|
881
|
+
p_dict: Property dictionary with 'id', 'type', 'value', and optional 'format', 'comment'
|
|
882
|
+
max_inline_block_size: Maximum size for inline data blocks
|
|
883
|
+
codec: Compression codec (None, 'zlib', 'lz4', 'lz4hc', 'zstd')
|
|
884
|
+
shuffle: Enable byte shuffling for compression
|
|
885
|
+
"""
|
|
786
886
|
scalars = ["Int", "Byte", "Short", "Float", "Boolean", "TimePoint"]
|
|
887
|
+
|
|
888
|
+
# Build base attributes including optional format and comment
|
|
889
|
+
def _build_attrs(base_attrs):
|
|
890
|
+
attrs = dict(base_attrs)
|
|
891
|
+
if "format" in p_dict and p_dict["format"]:
|
|
892
|
+
attrs["format"] = str(p_dict["format"])
|
|
893
|
+
if "comment" in p_dict and p_dict["comment"]:
|
|
894
|
+
attrs["comment"] = str(p_dict["comment"])
|
|
895
|
+
return attrs
|
|
787
896
|
|
|
788
897
|
if any(t in p_dict["type"] for t in scalars):
|
|
789
898
|
# scalars and TimePoint
|
|
790
|
-
|
|
791
|
-
#
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
)
|
|
899
|
+
value_str = str(p_dict["value"])
|
|
900
|
+
# Boolean requires lowercase per XISF spec
|
|
901
|
+
if p_dict["type"] == "Boolean":
|
|
902
|
+
value_str = "true" if p_dict["value"] else "false"
|
|
903
|
+
attrs = _build_attrs({
|
|
904
|
+
"id": p_dict["id"],
|
|
905
|
+
"type": p_dict["type"],
|
|
906
|
+
"value": value_str,
|
|
907
|
+
})
|
|
908
|
+
ET.SubElement(parent, "Property", attrs)
|
|
801
909
|
elif p_dict["type"] == "String":
|
|
802
910
|
text = str(p_dict["value"])
|
|
803
|
-
|
|
911
|
+
data_bytes = text.encode("utf-8")
|
|
912
|
+
sz = len(data_bytes)
|
|
804
913
|
if sz > max_inline_block_size:
|
|
805
|
-
# Attach string as data block
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
914
|
+
# Attach string as data block with optional compression
|
|
915
|
+
attrs = _build_attrs({
|
|
916
|
+
"id": p_dict["id"],
|
|
917
|
+
"type": p_dict["type"],
|
|
918
|
+
})
|
|
919
|
+
if codec:
|
|
920
|
+
compressed, comp_str = XISF._compress_data_block(data_bytes, codec, shuffle, 1)
|
|
921
|
+
attrs["location"] = XISF._to_location(("attachment", "", len(compressed)))
|
|
922
|
+
attrs["compression"] = comp_str
|
|
923
|
+
xml = ET.SubElement(parent, "Property", attrs)
|
|
924
|
+
return {"xml": xml, "location": 0, "size": len(compressed), "data": compressed}
|
|
925
|
+
else:
|
|
926
|
+
attrs["location"] = XISF._to_location(("attachment", "", sz))
|
|
927
|
+
xml = ET.SubElement(parent, "Property", attrs)
|
|
928
|
+
return {"xml": xml, "location": 0, "size": sz, "data": data_bytes}
|
|
817
929
|
else:
|
|
818
930
|
# string directly as child (no 'location' attribute)
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
"
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
"type": p_dict["type"],
|
|
825
|
-
},
|
|
826
|
-
).text = text
|
|
931
|
+
attrs = _build_attrs({
|
|
932
|
+
"id": p_dict["id"],
|
|
933
|
+
"type": p_dict["type"],
|
|
934
|
+
})
|
|
935
|
+
ET.SubElement(parent, "Property", attrs).text = text
|
|
827
936
|
elif "Vector" in p_dict["type"]:
|
|
828
|
-
# TODO ignores compression
|
|
829
937
|
data = p_dict["value"]
|
|
830
|
-
|
|
938
|
+
raw_bytes = data.tobytes()
|
|
939
|
+
sz = len(raw_bytes)
|
|
940
|
+
item_size = data.itemsize
|
|
831
941
|
if sz > max_inline_block_size:
|
|
832
|
-
# Attach vector as data block
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
"
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
942
|
+
# Attach vector as data block with optional compression
|
|
943
|
+
attrs = _build_attrs({
|
|
944
|
+
"id": p_dict["id"],
|
|
945
|
+
"type": p_dict["type"],
|
|
946
|
+
"length": str(data.size),
|
|
947
|
+
})
|
|
948
|
+
if codec:
|
|
949
|
+
compressed, comp_str = XISF._compress_data_block(raw_bytes, codec, shuffle, item_size)
|
|
950
|
+
attrs["location"] = XISF._to_location(("attachment", "", len(compressed)))
|
|
951
|
+
attrs["compression"] = comp_str
|
|
952
|
+
xml = ET.SubElement(parent, "Property", attrs)
|
|
953
|
+
return {"xml": xml, "location": 0, "size": len(compressed), "data": compressed}
|
|
954
|
+
else:
|
|
955
|
+
attrs["location"] = XISF._to_location(("attachment", "", sz))
|
|
956
|
+
xml = ET.SubElement(parent, "Property", attrs)
|
|
957
|
+
return {"xml": xml, "location": 0, "size": sz, "data": data}
|
|
844
958
|
else:
|
|
845
959
|
# Inline data block (assuming base64)
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
"
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
"location": XISF._to_location(("inline", "base64")),
|
|
854
|
-
},
|
|
855
|
-
).text = str(base64.b64encode(data.tobytes()), "ascii")
|
|
960
|
+
attrs = _build_attrs({
|
|
961
|
+
"id": p_dict["id"],
|
|
962
|
+
"type": p_dict["type"],
|
|
963
|
+
"length": str(data.size),
|
|
964
|
+
"location": XISF._to_location(("inline", "base64")),
|
|
965
|
+
})
|
|
966
|
+
ET.SubElement(parent, "Property", attrs).text = str(base64.b64encode(data.tobytes()), "ascii")
|
|
856
967
|
elif "Matrix" in p_dict["type"]:
|
|
857
|
-
# TODO ignores compression
|
|
858
968
|
data = p_dict["value"]
|
|
859
|
-
|
|
969
|
+
raw_bytes = data.tobytes()
|
|
970
|
+
sz = len(raw_bytes)
|
|
971
|
+
item_size = data.itemsize
|
|
860
972
|
if sz > max_inline_block_size:
|
|
861
|
-
# Attach
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
"
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
973
|
+
# Attach matrix as data block with optional compression
|
|
974
|
+
attrs = _build_attrs({
|
|
975
|
+
"id": p_dict["id"],
|
|
976
|
+
"type": p_dict["type"],
|
|
977
|
+
"rows": str(data.shape[0]),
|
|
978
|
+
"columns": str(data.shape[1]),
|
|
979
|
+
})
|
|
980
|
+
if codec:
|
|
981
|
+
compressed, comp_str = XISF._compress_data_block(raw_bytes, codec, shuffle, item_size)
|
|
982
|
+
attrs["location"] = XISF._to_location(("attachment", "", len(compressed)))
|
|
983
|
+
attrs["compression"] = comp_str
|
|
984
|
+
xml = ET.SubElement(parent, "Property", attrs)
|
|
985
|
+
return {"xml": xml, "location": 0, "size": len(compressed), "data": compressed}
|
|
986
|
+
else:
|
|
987
|
+
attrs["location"] = XISF._to_location(("attachment", "", sz))
|
|
988
|
+
xml = ET.SubElement(parent, "Property", attrs)
|
|
989
|
+
return {"xml": xml, "location": 0, "size": sz, "data": data}
|
|
874
990
|
else:
|
|
875
991
|
# Inline data block (assuming base64)
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
"
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
"location": XISF._to_location(("inline", "base64")),
|
|
885
|
-
},
|
|
886
|
-
).text = str(base64.b64encode(data.tobytes()), "ascii")
|
|
992
|
+
attrs = _build_attrs({
|
|
993
|
+
"id": p_dict["id"],
|
|
994
|
+
"type": p_dict["type"],
|
|
995
|
+
"rows": str(data.shape[0]),
|
|
996
|
+
"columns": str(data.shape[1]),
|
|
997
|
+
"location": XISF._to_location(("inline", "base64")),
|
|
998
|
+
})
|
|
999
|
+
ET.SubElement(parent, "Property", attrs).text = str(base64.b64encode(data.tobytes()), "ascii")
|
|
887
1000
|
else:
|
|
888
1001
|
print(f"Warning: skipping unsupported property {p_dict}")
|
|
889
1002
|
|
|
@@ -1047,6 +1160,35 @@ class XISF:
|
|
|
1047
1160
|
|
|
1048
1161
|
return data
|
|
1049
1162
|
|
|
1163
|
+
@staticmethod
|
|
1164
|
+
def _compress_data_block(data, codec, shuffle=False, itemsize=1):
|
|
1165
|
+
"""Compress a data block and return (compressed_bytes, compression_attr_string).
|
|
1166
|
+
|
|
1167
|
+
Args:
|
|
1168
|
+
data: bytes or numpy array to compress
|
|
1169
|
+
codec: 'zlib', 'lz4', 'lz4hc', or 'zstd'
|
|
1170
|
+
shuffle: enable byte shuffling
|
|
1171
|
+
itemsize: item size for byte shuffling (1 for strings, dtype.itemsize for arrays)
|
|
1172
|
+
|
|
1173
|
+
Returns:
|
|
1174
|
+
tuple: (compressed_bytes, compression_attribute_string)
|
|
1175
|
+
"""
|
|
1176
|
+
if hasattr(data, 'tobytes'):
|
|
1177
|
+
raw_bytes = data.tobytes()
|
|
1178
|
+
else:
|
|
1179
|
+
raw_bytes = bytes(data)
|
|
1180
|
+
|
|
1181
|
+
uncompressed_size = len(raw_bytes)
|
|
1182
|
+
compressed = XISF._compress(raw_bytes, codec, shuffle=shuffle, itemsize=itemsize if shuffle else None)
|
|
1183
|
+
|
|
1184
|
+
# Build compression attribute string: "codec:uncompressed_size" or "codec+sh:uncompressed_size:itemsize"
|
|
1185
|
+
if shuffle and itemsize > 1:
|
|
1186
|
+
comp_str = f"{codec}+sh:{uncompressed_size}:{itemsize}"
|
|
1187
|
+
else:
|
|
1188
|
+
comp_str = f"{codec}:{uncompressed_size}"
|
|
1189
|
+
|
|
1190
|
+
return compressed, comp_str
|
|
1191
|
+
|
|
1050
1192
|
# LZ4/zlib/zstd compression
|
|
1051
1193
|
@staticmethod
|
|
1052
1194
|
def _compress(data, codec, level=None, shuffle=False, itemsize=None):
|