setiastrosuitepro 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of setiastrosuitepro might be problematic. Click here for more details.

Files changed (174) hide show
  1. setiastro/__init__.py +2 -0
  2. setiastro/saspro/__init__.py +20 -0
  3. setiastro/saspro/__main__.py +784 -0
  4. setiastro/saspro/_generated/__init__.py +7 -0
  5. setiastro/saspro/_generated/build_info.py +2 -0
  6. setiastro/saspro/abe.py +1295 -0
  7. setiastro/saspro/abe_preset.py +196 -0
  8. setiastro/saspro/aberration_ai.py +694 -0
  9. setiastro/saspro/aberration_ai_preset.py +224 -0
  10. setiastro/saspro/accel_installer.py +218 -0
  11. setiastro/saspro/accel_workers.py +30 -0
  12. setiastro/saspro/add_stars.py +621 -0
  13. setiastro/saspro/astrobin_exporter.py +1007 -0
  14. setiastro/saspro/astrospike.py +153 -0
  15. setiastro/saspro/astrospike_python.py +1839 -0
  16. setiastro/saspro/autostretch.py +196 -0
  17. setiastro/saspro/backgroundneutral.py +560 -0
  18. setiastro/saspro/batch_convert.py +325 -0
  19. setiastro/saspro/batch_renamer.py +519 -0
  20. setiastro/saspro/blemish_blaster.py +488 -0
  21. setiastro/saspro/blink_comparator_pro.py +2923 -0
  22. setiastro/saspro/bundles.py +61 -0
  23. setiastro/saspro/bundles_dock.py +114 -0
  24. setiastro/saspro/cheat_sheet.py +168 -0
  25. setiastro/saspro/clahe.py +342 -0
  26. setiastro/saspro/comet_stacking.py +1377 -0
  27. setiastro/saspro/config.py +38 -0
  28. setiastro/saspro/config_bootstrap.py +40 -0
  29. setiastro/saspro/config_manager.py +316 -0
  30. setiastro/saspro/continuum_subtract.py +1617 -0
  31. setiastro/saspro/convo.py +1397 -0
  32. setiastro/saspro/convo_preset.py +414 -0
  33. setiastro/saspro/copyastro.py +187 -0
  34. setiastro/saspro/cosmicclarity.py +1564 -0
  35. setiastro/saspro/cosmicclarity_preset.py +407 -0
  36. setiastro/saspro/crop_dialog_pro.py +948 -0
  37. setiastro/saspro/crop_preset.py +189 -0
  38. setiastro/saspro/curve_editor_pro.py +2544 -0
  39. setiastro/saspro/curves_preset.py +375 -0
  40. setiastro/saspro/debayer.py +670 -0
  41. setiastro/saspro/debug_utils.py +29 -0
  42. setiastro/saspro/dnd_mime.py +35 -0
  43. setiastro/saspro/doc_manager.py +2634 -0
  44. setiastro/saspro/exoplanet_detector.py +2166 -0
  45. setiastro/saspro/file_utils.py +284 -0
  46. setiastro/saspro/fitsmodifier.py +744 -0
  47. setiastro/saspro/free_torch_memory.py +48 -0
  48. setiastro/saspro/frequency_separation.py +1343 -0
  49. setiastro/saspro/function_bundle.py +1594 -0
  50. setiastro/saspro/ghs_dialog_pro.py +660 -0
  51. setiastro/saspro/ghs_preset.py +284 -0
  52. setiastro/saspro/graxpert.py +634 -0
  53. setiastro/saspro/graxpert_preset.py +287 -0
  54. setiastro/saspro/gui/__init__.py +0 -0
  55. setiastro/saspro/gui/main_window.py +8494 -0
  56. setiastro/saspro/gui/mixins/__init__.py +33 -0
  57. setiastro/saspro/gui/mixins/dock_mixin.py +263 -0
  58. setiastro/saspro/gui/mixins/file_mixin.py +445 -0
  59. setiastro/saspro/gui/mixins/geometry_mixin.py +403 -0
  60. setiastro/saspro/gui/mixins/header_mixin.py +441 -0
  61. setiastro/saspro/gui/mixins/mask_mixin.py +421 -0
  62. setiastro/saspro/gui/mixins/menu_mixin.py +361 -0
  63. setiastro/saspro/gui/mixins/theme_mixin.py +367 -0
  64. setiastro/saspro/gui/mixins/toolbar_mixin.py +1324 -0
  65. setiastro/saspro/gui/mixins/update_mixin.py +309 -0
  66. setiastro/saspro/gui/mixins/view_mixin.py +435 -0
  67. setiastro/saspro/halobgon.py +462 -0
  68. setiastro/saspro/header_viewer.py +445 -0
  69. setiastro/saspro/headless_utils.py +88 -0
  70. setiastro/saspro/histogram.py +753 -0
  71. setiastro/saspro/history_explorer.py +939 -0
  72. setiastro/saspro/image_combine.py +414 -0
  73. setiastro/saspro/image_peeker_pro.py +1596 -0
  74. setiastro/saspro/imageops/__init__.py +37 -0
  75. setiastro/saspro/imageops/mdi_snap.py +292 -0
  76. setiastro/saspro/imageops/scnr.py +36 -0
  77. setiastro/saspro/imageops/starbasedwhitebalance.py +210 -0
  78. setiastro/saspro/imageops/stretch.py +244 -0
  79. setiastro/saspro/isophote.py +1179 -0
  80. setiastro/saspro/layers.py +208 -0
  81. setiastro/saspro/layers_dock.py +714 -0
  82. setiastro/saspro/lazy_imports.py +193 -0
  83. setiastro/saspro/legacy/__init__.py +2 -0
  84. setiastro/saspro/legacy/image_manager.py +2226 -0
  85. setiastro/saspro/legacy/numba_utils.py +3659 -0
  86. setiastro/saspro/legacy/xisf.py +1071 -0
  87. setiastro/saspro/linear_fit.py +534 -0
  88. setiastro/saspro/live_stacking.py +1830 -0
  89. setiastro/saspro/log_bus.py +5 -0
  90. setiastro/saspro/logging_config.py +460 -0
  91. setiastro/saspro/luminancerecombine.py +309 -0
  92. setiastro/saspro/main_helpers.py +201 -0
  93. setiastro/saspro/mask_creation.py +928 -0
  94. setiastro/saspro/masks_core.py +56 -0
  95. setiastro/saspro/mdi_widgets.py +353 -0
  96. setiastro/saspro/memory_utils.py +666 -0
  97. setiastro/saspro/metadata_patcher.py +75 -0
  98. setiastro/saspro/mfdeconv.py +3826 -0
  99. setiastro/saspro/mfdeconv_earlystop.py +71 -0
  100. setiastro/saspro/mfdeconvcudnn.py +3263 -0
  101. setiastro/saspro/mfdeconvsport.py +2382 -0
  102. setiastro/saspro/minorbodycatalog.py +567 -0
  103. setiastro/saspro/morphology.py +382 -0
  104. setiastro/saspro/multiscale_decomp.py +1290 -0
  105. setiastro/saspro/nbtorgb_stars.py +531 -0
  106. setiastro/saspro/numba_utils.py +3044 -0
  107. setiastro/saspro/numba_warmup.py +141 -0
  108. setiastro/saspro/ops/__init__.py +9 -0
  109. setiastro/saspro/ops/command_help_dialog.py +623 -0
  110. setiastro/saspro/ops/command_runner.py +217 -0
  111. setiastro/saspro/ops/commands.py +1594 -0
  112. setiastro/saspro/ops/script_editor.py +1102 -0
  113. setiastro/saspro/ops/scripts.py +1413 -0
  114. setiastro/saspro/ops/settings.py +560 -0
  115. setiastro/saspro/parallel_utils.py +554 -0
  116. setiastro/saspro/pedestal.py +121 -0
  117. setiastro/saspro/perfect_palette_picker.py +1053 -0
  118. setiastro/saspro/pipeline.py +110 -0
  119. setiastro/saspro/pixelmath.py +1600 -0
  120. setiastro/saspro/plate_solver.py +2435 -0
  121. setiastro/saspro/project_io.py +797 -0
  122. setiastro/saspro/psf_utils.py +136 -0
  123. setiastro/saspro/psf_viewer.py +549 -0
  124. setiastro/saspro/pyi_rthook_astroquery.py +95 -0
  125. setiastro/saspro/remove_green.py +314 -0
  126. setiastro/saspro/remove_stars.py +1625 -0
  127. setiastro/saspro/remove_stars_preset.py +404 -0
  128. setiastro/saspro/resources.py +472 -0
  129. setiastro/saspro/rgb_combination.py +207 -0
  130. setiastro/saspro/rgb_extract.py +19 -0
  131. setiastro/saspro/rgbalign.py +723 -0
  132. setiastro/saspro/runtime_imports.py +7 -0
  133. setiastro/saspro/runtime_torch.py +754 -0
  134. setiastro/saspro/save_options.py +72 -0
  135. setiastro/saspro/selective_color.py +1552 -0
  136. setiastro/saspro/sfcc.py +1425 -0
  137. setiastro/saspro/shortcuts.py +2807 -0
  138. setiastro/saspro/signature_insert.py +1099 -0
  139. setiastro/saspro/stacking_suite.py +17712 -0
  140. setiastro/saspro/star_alignment.py +7420 -0
  141. setiastro/saspro/star_alignment_preset.py +329 -0
  142. setiastro/saspro/star_metrics.py +49 -0
  143. setiastro/saspro/star_spikes.py +681 -0
  144. setiastro/saspro/star_stretch.py +470 -0
  145. setiastro/saspro/stat_stretch.py +502 -0
  146. setiastro/saspro/status_log_dock.py +78 -0
  147. setiastro/saspro/subwindow.py +3267 -0
  148. setiastro/saspro/supernovaasteroidhunter.py +1712 -0
  149. setiastro/saspro/swap_manager.py +99 -0
  150. setiastro/saspro/torch_backend.py +89 -0
  151. setiastro/saspro/torch_rejection.py +434 -0
  152. setiastro/saspro/view_bundle.py +1555 -0
  153. setiastro/saspro/wavescale_hdr.py +624 -0
  154. setiastro/saspro/wavescale_hdr_preset.py +100 -0
  155. setiastro/saspro/wavescalede.py +657 -0
  156. setiastro/saspro/wavescalede_preset.py +228 -0
  157. setiastro/saspro/wcs_update.py +374 -0
  158. setiastro/saspro/whitebalance.py +456 -0
  159. setiastro/saspro/widgets/__init__.py +48 -0
  160. setiastro/saspro/widgets/common_utilities.py +305 -0
  161. setiastro/saspro/widgets/graphics_views.py +122 -0
  162. setiastro/saspro/widgets/image_utils.py +518 -0
  163. setiastro/saspro/widgets/preview_dialogs.py +280 -0
  164. setiastro/saspro/widgets/spinboxes.py +275 -0
  165. setiastro/saspro/widgets/themed_buttons.py +13 -0
  166. setiastro/saspro/widgets/wavelet_utils.py +299 -0
  167. setiastro/saspro/window_shelf.py +185 -0
  168. setiastro/saspro/xisf.py +1123 -0
  169. setiastrosuitepro-1.6.0.dist-info/METADATA +266 -0
  170. setiastrosuitepro-1.6.0.dist-info/RECORD +174 -0
  171. setiastrosuitepro-1.6.0.dist-info/WHEEL +4 -0
  172. setiastrosuitepro-1.6.0.dist-info/entry_points.txt +6 -0
  173. setiastrosuitepro-1.6.0.dist-info/licenses/LICENSE +674 -0
  174. setiastrosuitepro-1.6.0.dist-info/licenses/license.txt +2580 -0
@@ -0,0 +1,1123 @@
1
+ # coding: utf-8
2
+
3
+ """
4
+ XISF Encoder/Decoder (see https://pixinsight.com/xisf/).
5
+
6
+ This implementation is not endorsed nor related with PixInsight development team.
7
+
8
+ Copyright (C) 2021-2023 Sergio Díaz, sergiodiaz.eu
9
+
10
+ This program is free software: you can redistribute it and/or modify it
11
+ under the terms of the GNU General Public License as published by the
12
+ Free Software Foundation, version 3 of the License.
13
+
14
+ This program is distributed in the hope that it will be useful, but WITHOUT
15
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
+ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
17
+ more details.
18
+
19
+ You should have received a copy of the GNU General Public License along with
20
+ this program. If not, see <http://www.gnu.org/licenses/>.
21
+ """
22
+
23
+ from importlib.metadata import version
24
+
25
+
26
+ import platform
27
+ import xml.etree.ElementTree as ET
28
+ import numpy as np
29
+ import lz4.block # https://python-lz4.readthedocs.io/en/stable/lz4.block.html
30
+ import zlib # https://docs.python.org/3/library/zlib.html
31
+ import zstandard # https://python-zstandard.readthedocs.io/en/stable/
32
+ import base64
33
+ import sys
34
+ from datetime import datetime
35
+ import ast
36
+
37
+ __version__ = "1.0.0"
38
+
39
+ class XISF:
40
+ """Implements an baseline XISF Decoder and a simple baseline Encoder.
41
+ It parses metadata from Image and Metadata XISF core elements. Image data is returned as a numpy ndarray
42
+ (using the "channels-last" convention by default).
43
+
44
+ What's supported:
45
+ - Monolithic XISF files only
46
+ - XISF data blocks with attachment, inline or embedded block locations
47
+ - Planar pixel storage models, *however it assumes 2D images only* (with multiple channels)
48
+ - UInt8/16/32 and Float32/64 pixel sample formats
49
+ - Grayscale and RGB color spaces
50
+ - Decoding:
51
+ - multiple Image core elements from a monolithic XISF file
52
+ - Support all standard compression codecs defined in this specification for decompression
53
+ (zlib/lz4[hc]/zstd + byte shuffling)
54
+ - Encoding:
55
+ - Single image core element with an attached data block
56
+ - Support all standard compression codecs defined in this specification for decompression
57
+ (zlib/lz4[hc]/zstd + byte shuffling)
58
+ - "Atomic" properties (scalar types, String, TimePoint), Vector and Matrix (e.g. astrometric
59
+ solutions)
60
+ - Metadata and FITSKeyword core elements
61
+
62
+ What's not supported (at least by now):
63
+ - Read pixel data in the normal pixel storage models
64
+ - Read pixel data in the planar pixel storage models other than 2D images
65
+ - Complex and Table properties
66
+ - Any other not explicitly supported core elements (Resolution, Thumbnail, ICCProfile, etc.)
67
+
68
+ Usage example:
69
+ ```
70
+ from setiastro.saspro.xisf import XISF
71
+ import matplotlib.pyplot as plt
72
+ xisf = XISF("file.xisf")
73
+ file_meta = xisf.get_file_metadata()
74
+ file_meta
75
+ ims_meta = xisf.get_images_metadata()
76
+ ims_meta
77
+ im_data = xisf.read_image(0)
78
+ plt.imshow(im_data)
79
+ plt.show()
80
+ XISF.write(
81
+ "output.xisf", im_data,
82
+ creator_app="My script v1.0", image_metadata=ims_meta[0], xisf_metadata=file_meta,
83
+ codec='lz4hc', shuffle=True
84
+ )
85
+ ```
86
+
87
+ If the file is not huge and it contains only an image (or you're interested just in one of the
88
+ images inside the file), there is a convenience method for reading the data and the metadata:
89
+ ```
90
+ from setiastro.saspro.xisf import XISF
91
+ import matplotlib.pyplot as plt
92
+ im_data = XISF.read("file.xisf")
93
+ plt.imshow(im_data)
94
+ plt.show()
95
+ ```
96
+
97
+ The XISF format specification is available at https://pixinsight.com/doc/docs/XISF-1.0-spec/XISF-1.0-spec.html
98
+ """
99
+
100
+ # Static attributes
101
+ _creator_app = f"Python {platform.python_version()}"
102
+ _creator_module = f"XISF Python Module v{__version__} github.com/sergio-dr/xisf"
103
+ _signature = b"XISF0100" # Monolithic
104
+ _headerlength_len = 4
105
+ _reserved_len = 4
106
+ _xml_ns = {"xisf": "http://www.pixinsight.com/xisf"}
107
+ _xisf_attrs = {
108
+ "xmlns": "http://www.pixinsight.com/xisf",
109
+ "xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance",
110
+ "version": "1.0",
111
+ "xsi:schemaLocation": "http://www.pixinsight.com/xisf http://pixinsight.com/xisf/xisf-1.0.xsd",
112
+ }
113
+ _compression_def_level = {
114
+ "zlib": 6, # 1..9, default: 6 as indicated in https://docs.python.org/3/library/zlib.html
115
+ "lz4": 0, # no other values, as indicated in https://python-lz4.readthedocs.io/en/stable/lz4.block.html
116
+ "lz4hc": 9, # 1..12, (4-9 recommended), default: 9 as indicated in https://python-lz4.readthedocs.io/en/stable/lz4.block.html
117
+ "zstd": 3, # 1..22, (3-9 recommended), default: 3 as indicated in https://facebook.github.io/zstd/zstd_manual.html
118
+ }
119
+ _block_alignment_size = 4096
120
+ _max_inline_block_size = 3072
121
+
122
+ def __init__(self, fname):
123
+ """Opens a XISF file and extract its metadata. To get the metadata and the images, see get_file_metadata(),
124
+ get_images_metadata() and read_image().
125
+ Args:
126
+ fname: filename
127
+
128
+ Returns:
129
+ XISF object.
130
+ """
131
+ self._fname = fname
132
+ self._headerlength = None
133
+ self._xisf_header = None
134
+ self._xisf_header_xml = None
135
+ self._images_meta = None
136
+ self._file_meta = None
137
+ ET.register_namespace("", self._xml_ns["xisf"])
138
+
139
+ self._read()
140
+
141
+ def _read(self):
142
+ with open(self._fname, "rb") as f:
143
+ # Check XISF signature
144
+ signature = f.read(len(self._signature))
145
+ if signature != self._signature:
146
+ raise ValueError("File doesn't have XISF signature")
147
+
148
+ # Get header length
149
+ self._headerlength = int.from_bytes(f.read(self._headerlength_len), byteorder="little")
150
+ # Equivalent:
151
+ # self._headerlength = np.fromfile(f, dtype=np.uint32, count=1)[0]
152
+
153
+ # Skip reserved field
154
+ _ = f.read(self._reserved_len)
155
+
156
+ # Get XISF (XML) Header
157
+ self._xisf_header = f.read(self._headerlength)
158
+ self._xisf_header_xml = ET.fromstring(self._xisf_header)
159
+ self._analyze_header()
160
+
161
+ def _analyze_header(self):
162
+ # Analyze header to get Data Blocks position and length
163
+ self._images_meta = []
164
+ for image in self._xisf_header_xml.findall("xisf:Image", self._xml_ns):
165
+ image_basic_meta = image.attrib
166
+
167
+ # Parse and replace geometry and location with tuples,
168
+ # parses and translates sampleFormat to numpy dtypes,
169
+ # and extend with metadata from children entities (FITSKeywords, XISFProperties)
170
+
171
+ # The same FITS keyword can appear multiple times, so we have to
172
+ # prepare a dict of lists. Each element in the list is a dict
173
+ # that hold the value and the comment associated with the keyword.
174
+ # Not as clear as I would like.
175
+ fits_keywords = {}
176
+ for a in image.findall("xisf:FITSKeyword", self._xml_ns):
177
+ fits_keywords.setdefault(a.attrib["name"], []).append(
178
+ {
179
+ "value": a.attrib["value"].strip("'").strip(" "),
180
+ "comment": a.attrib["comment"],
181
+ }
182
+ )
183
+
184
+ image_extended_meta = {
185
+ "geometry": self._parse_geometry(image.attrib["geometry"]),
186
+ "location": self._parse_location(image.attrib["location"]),
187
+ "dtype": self._parse_sampleFormat(image.attrib["sampleFormat"]),
188
+ "FITSKeywords": fits_keywords,
189
+ "XISFProperties": {
190
+ p.attrib["id"]: prop
191
+ for p in image.findall("xisf:Property", self._xml_ns)
192
+ if (prop := self._process_property(p))
193
+ },
194
+ }
195
+ # Also parses compression attribute if present, converting it to a tuple
196
+ if "compression" in image.attrib:
197
+ image_extended_meta["compression"] = self._parse_compression(
198
+ image.attrib["compression"]
199
+ )
200
+
201
+ # Merge basic and extended metadata in a dict
202
+ image_meta = {**image_basic_meta, **image_extended_meta}
203
+
204
+ # Append the image metadata to the list
205
+ self._images_meta.append(image_meta)
206
+
207
+ # Analyze header for file metadata
208
+ self._file_meta = {}
209
+ for p in self._xisf_header_xml.find("xisf:Metadata", self._xml_ns):
210
+ self._file_meta[p.attrib["id"]] = self._process_property(p)
211
+
212
+ # Parse additional XISF core elements: Resolution, ICCProfile, Thumbnail
213
+ self._parse_resolution_elements()
214
+ self._parse_icc_profiles()
215
+ self._parse_thumbnails()
216
+
217
+ def _parse_resolution_elements(self):
218
+ """Parse Resolution core elements and attach to image metadata."""
219
+ for i, image in enumerate(self._xisf_header_xml.findall("xisf:Image", self._xml_ns)):
220
+ res_elem = image.find("xisf:Resolution", self._xml_ns)
221
+ if res_elem is not None:
222
+ try:
223
+ res_data = {
224
+ "horizontal": float(res_elem.attrib.get("horizontal", 72.0)),
225
+ "vertical": float(res_elem.attrib.get("vertical", 72.0)),
226
+ "unit": res_elem.attrib.get("unit", "inch"), # "inch" or "cm"
227
+ }
228
+ if i < len(self._images_meta):
229
+ self._images_meta[i]["Resolution"] = res_data
230
+ except (ValueError, KeyError):
231
+ pass
232
+
233
+ def _parse_icc_profiles(self):
234
+ """Parse ICCProfile core elements and attach to image metadata."""
235
+ for i, image in enumerate(self._xisf_header_xml.findall("xisf:Image", self._xml_ns)):
236
+ icc_elem = image.find("xisf:ICCProfile", self._xml_ns)
237
+ if icc_elem is not None:
238
+ try:
239
+ icc_data = {"present": True}
240
+ if "location" in icc_elem.attrib:
241
+ loc = self._parse_location(icc_elem.attrib["location"])
242
+ icc_data["location"] = loc
243
+ # Read ICC profile binary data
244
+ if loc[0] == "attachment" and len(loc) >= 3:
245
+ icc_data["size"] = loc[2]
246
+ if i < len(self._images_meta):
247
+ self._images_meta[i]["ICCProfile"] = icc_data
248
+ except (ValueError, KeyError):
249
+ pass
250
+
251
+ def _parse_thumbnails(self):
252
+ """Parse Thumbnail core elements and attach to image metadata."""
253
+ for i, image in enumerate(self._xisf_header_xml.findall("xisf:Image", self._xml_ns)):
254
+ thumb_elem = image.find("xisf:Thumbnail", self._xml_ns)
255
+ if thumb_elem is not None:
256
+ try:
257
+ thumb_data = {
258
+ "present": True,
259
+ "geometry": self._parse_geometry(thumb_elem.attrib.get("geometry", "0:0:0")),
260
+ }
261
+ if "location" in thumb_elem.attrib:
262
+ thumb_data["location"] = self._parse_location(thumb_elem.attrib["location"])
263
+ if "sampleFormat" in thumb_elem.attrib:
264
+ thumb_data["dtype"] = self._parse_sampleFormat(thumb_elem.attrib["sampleFormat"])
265
+ if "colorSpace" in thumb_elem.attrib:
266
+ thumb_data["colorSpace"] = thumb_elem.attrib["colorSpace"]
267
+ if i < len(self._images_meta):
268
+ self._images_meta[i]["Thumbnail"] = thumb_data
269
+ except (ValueError, KeyError):
270
+ pass
271
+
272
+ def get_images_metadata(self):
273
+ """Provides the metadata of all image blocks contained in the XISF File, extracted from
274
+ the header (<Image> core elements). To get the actual image data, see read_image().
275
+
276
+ It outputs a dictionary m_i for each image, with the following structure:
277
+ ```
278
+ m_i = {
279
+ 'geometry': (width, height, channels), # only 2D images (with multiple channels) are supported
280
+ 'location': (pos, size), # used internally in read_image()
281
+ 'dtype': np.dtype('...'), # derived from sampleFormat argument
282
+ 'compression': (codec, uncompressed_size, item_size), # optional
283
+ 'key': 'value', # other <Image> attributes are simply copied
284
+ ...,
285
+ 'FITSKeywords': { <fits_keyword>: fits_keyword_values_list, ... },
286
+ 'XISFProperties': { <xisf_property_name>: property_dict, ... }
287
+ }
288
+
289
+ where:
290
+
291
+ fits_keyword_values_list = [ {'value': <value>, 'comment': <comment> }, ...]
292
+ property_dict = {'id': <xisf_property_name>, 'type': <xisf_type>, 'value': property_value, ...}
293
+ ```
294
+
295
+ Returns:
296
+ list [ m_0, m_1, ..., m_{n-1} ] where m_i is a dict as described above.
297
+
298
+ """
299
+ return self._images_meta
300
+
301
+ def get_file_metadata(self):
302
+ """Provides the metadata from the header of the XISF File (<Metadata> core elements).
303
+
304
+ Returns:
305
+ dictionary with one entry per property: { <xisf_property_name>: property_dict, ... }
306
+ where:
307
+ ```
308
+ property_dict = {'id': <xisf_property_name>, 'type': <xisf_type>, 'value': property_value, ...}
309
+ ```
310
+
311
+ """
312
+ return self._file_meta
313
+
314
+ def get_metadata_xml(self):
315
+ """Returns the complete XML header as a xml.etree.ElementTree.Element object.
316
+
317
+ Returns:
318
+ xml.etree.ElementTree.Element: complete XML XISF header
319
+ """
320
+ return self._xisf_header_xml
321
+
322
+ def _read_data_block(self, elem):
323
+ method = elem["location"][0]
324
+ if method == "inline":
325
+ return self._read_inline_data_block(elem)
326
+ elif method == "embedded":
327
+ return self._read_embedded_data_block(elem)
328
+ elif method == "attachment":
329
+ return self._read_attached_data_block(elem)
330
+ else:
331
+ raise NotImplementedError(f"Data block location type '{method}' not implemented: {elem}")
332
+
333
+ @staticmethod
334
+ def _read_inline_data_block(elem):
335
+ method, encoding = elem["location"]
336
+ assert method == "inline"
337
+ return XISF._decode_inline_or_embedded_data(encoding, elem["value"], elem)
338
+
339
+ @staticmethod
340
+ def _read_embedded_data_block(elem):
341
+ assert elem["location"][0] == "embedded"
342
+ data_elem = ET.fromstring(elem["value"])
343
+ encoding, data = data.attrib["encoding"], data_elem.text
344
+ return XISF._decode_inline_or_embedded_data(encoding, data, elem)
345
+
346
+ @staticmethod
347
+ def _decode_inline_or_embedded_data(encoding, data, elem):
348
+ encodings = {"base64": base64.b64decode, "hex": base64.b16decode}
349
+ if encoding not in encodings:
350
+ raise NotImplementedError(
351
+ f"Data block encoding type '{encoding}' not implemented: {elem}"
352
+ )
353
+
354
+ data = encodings[encoding](data)
355
+ if "compression" in elem:
356
+ data = XISF._decompress(data, elem)
357
+
358
+ return data
359
+
360
+ def _read_attached_data_block(self, elem):
361
+ # Position and size of the Data Block containing the image data
362
+ method, pos, size = elem["location"]
363
+
364
+ assert method == "attachment"
365
+
366
+ with open(self._fname, "rb") as f:
367
+ f.seek(pos)
368
+ data = f.read(size)
369
+
370
+ if "compression" in elem:
371
+ data = XISF._decompress(data, elem)
372
+
373
+ return data
374
+
375
+ def read_image(self, n=0, data_format="channels_last"):
376
+ """Extracts an image from a XISF object.
377
+
378
+ Args:
379
+ n: index of the image to extract in the list returned by get_images_metadata()
380
+ data_format: channels axis can be 'channels_first' or 'channels_last' (as used in
381
+ keras/tensorflow, pyplot's imshow, etc.), 0 by default.
382
+
383
+ Returns:
384
+ Numpy ndarray with the image data, in the requested format (channels_first or channels_last).
385
+
386
+ """
387
+ try:
388
+ meta = self._images_meta[n]
389
+ except IndexError as e:
390
+ if self._xisf_header is None:
391
+ raise RuntimeError("No file loaded") from e
392
+ elif not self._images_meta:
393
+ raise ValueError("File does not contain image data") from e
394
+ else:
395
+ raise ValueError(
396
+ f"Requested image #{n}, valid range is [0..{len(self._images_meta) - 1}]"
397
+ ) from e
398
+
399
+ try:
400
+ # Assumes *two*-dimensional images (chc=channel count)
401
+ w, h, chc = meta["geometry"]
402
+ except ValueError as e:
403
+ raise NotImplementedError(
404
+ f"Assumed 2D channels (width, height, channels), found {meta['geometry']} geometry"
405
+ )
406
+
407
+ data = self._read_data_block(meta)
408
+ im_data = np.frombuffer(data, dtype=meta["dtype"])
409
+ im_data = im_data.reshape((chc, h, w))
410
+ return np.transpose(im_data, (1, 2, 0)) if data_format == "channels_last" else im_data
411
+
412
+ @staticmethod
413
+ def read(fname, n=0, image_metadata={}, xisf_metadata={}):
414
+ """Convenience method for reading a file containing a single image.
415
+
416
+ Args:
417
+ fname (string): filename
418
+ n (int, optional): index of the image to extract (in the list returned by get_images_metadata()). Defaults to 0.
419
+ image_metadata (dict, optional): dictionary that will be updated with the metadata of the image.
420
+ xisf_metadata (dict, optional): dictionary that will be updated with the metadata of the file.
421
+
422
+ Returns:
423
+ [np.ndarray]: Numpy ndarray with the image data, in the requested format (channels_first or channels_last).
424
+ """
425
+ xisf = XISF(fname)
426
+ xisf_metadata.update(xisf.get_file_metadata())
427
+ image_metadata.update(xisf.get_images_metadata()[n])
428
+ return xisf.read_image(n)
429
+
430
+ # if 'colorSpace' is not specified, im_data.shape[2] dictates if colorSpace is 'Gray' or 'RGB'
431
+ # For float sample formats, bounds="0:1" is assumed
432
+ @staticmethod
433
+ def write(
434
+ fname,
435
+ im_data,
436
+ creator_app=None,
437
+ image_metadata=None,
438
+ xisf_metadata=None,
439
+ codec=None,
440
+ shuffle=False,
441
+ level=None,
442
+ ):
443
+ """Writes an image (numpy array) to a XISF file. Compression may be requested but it only
444
+ will be used if it actually reduces the data size.
445
+
446
+ Args:
447
+ fname: filename (will overwrite if existing)
448
+ im_data: numpy ndarray with the image data
449
+ creator_app: string for XISF:CreatorApplication file property (defaults to python version in None provided)
450
+ image_metadata: dict with the same structure described for m_i in get_images_metadata().
451
+ Only 'FITSKeywords' and 'XISFProperties' keys are actually written, the rest are derived from im_data.
452
+ xisf_metadata: file metadata, dict with the same structure returned by get_file_metadata()
453
+ codec: compression codec ('zlib', 'lz4', 'lz4hc' or 'zstd'), or None to disable compression
454
+ shuffle: whether to apply byte-shuffling before compression (ignored if codec is None). Recommended
455
+ for 'lz4' ,'lz4hc' and 'zstd' compression algorithms.
456
+ level: for zlib, 1..9 (default: 6); for lz4hc, 1..12 (default: 9); for zstd, 1..22 (default: 3).
457
+ Higher means more compression.
458
+ Returns:
459
+ bytes_written: the total number of bytes written into the output file.
460
+ codec: The codec actually used, i.e., None if compression did not reduce the data block size so
461
+ compression was not finally used.
462
+
463
+ """
464
+ if image_metadata is None:
465
+ image_metadata = {}
466
+
467
+ if xisf_metadata is None:
468
+ xisf_metadata = {}
469
+
470
+ # Data block alignment
471
+ blk_sz = xisf_metadata.get("XISF:BlockAlignmentSize", {"value": XISF._block_alignment_size})[
472
+ "value"
473
+ ]
474
+ # Maximum inline block size (larger will be attached instead)
475
+ max_inline_blk_sz = xisf_metadata.get(
476
+ "XISF:MaxInlineBlockSize", {"value": XISF._max_inline_block_size}
477
+ )["value"]
478
+
479
+ # Prepare basic image metadata
480
+ def _create_image_metadata(im_data, id):
481
+ image_attrs = {"id": id}
482
+ if im_data.shape[2] == 3 or im_data.shape[2] == 1:
483
+ data_format = "channels_last"
484
+ geometry = (im_data.shape[1], im_data.shape[0], im_data.shape[2])
485
+ channels = im_data.shape[2]
486
+ else:
487
+ data_format = "channels_first"
488
+ geometry = im_data.shape
489
+ channels = im_data.shape[0]
490
+ image_attrs["geometry"] = "%d:%d:%d" % geometry
491
+ image_attrs["colorSpace"] = "Gray" if channels == 1 else "RGB"
492
+ image_attrs["sampleFormat"] = XISF._get_sampleFormat(im_data.dtype)
493
+ if image_attrs["sampleFormat"].startswith("Float"):
494
+ image_attrs["bounds"] = "0:1" # Assumed
495
+ if sys.byteorder == "big" and image_attrs["sampleFormat"] != "UInt8":
496
+ image_attrs["byteOrder"] = "big"
497
+ return image_attrs, data_format
498
+
499
+ # Rearrange ndarray for data_format and serialize to bytes
500
+ def _prepare_image_data_block(im_data, data_format):
501
+ return np.transpose(im_data, (2, 0, 1)) if data_format == "channels_last" else im_data
502
+
503
+ # Serialize a data block, with optional compression (i.e., when codec is not None)
504
+ # Compression will be only applied if effectively reduces size
505
+ def _serialize_data_block(data, attr_dict, codec, level, shuffle):
506
+ data_block = data.tobytes()
507
+ uncompressed_size = data.nbytes
508
+ codec_str = codec
509
+
510
+ if codec is None:
511
+ data_size = uncompressed_size
512
+ else:
513
+ compressed_block = XISF._compress(data_block, codec, level, shuffle, data.itemsize)
514
+ compressed_size = len(compressed_block)
515
+
516
+ if compressed_size < uncompressed_size:
517
+ # The ideal situation, compressing actually reduces size
518
+ data_block, data_size = compressed_block, compressed_size
519
+
520
+ # Add 'compression' image attribute: (codec:uncompressed-size[:item-size])
521
+ if shuffle:
522
+ codec_str += "+sh"
523
+ attr_dict["compression"] = f"{codec_str}:{uncompressed_size}:{data.itemsize}"
524
+ else:
525
+ attr_dict["compression"] = f"{codec}:{uncompressed_size}"
526
+ else:
527
+ # If there's no gain in compressing, just discard the compressed block
528
+ # See https://pixinsight.com/forum.old/index.php?topic=10942.msg68043#msg68043
529
+ # (In fact, PixInsight will show garbage image data if the data block is
530
+ # compressed but the uncompressed size is smaller)
531
+ data_size = uncompressed_size
532
+ codec_str = None
533
+
534
+ return data_block, data_size, codec_str
535
+
536
+ # Overwrites/creates XISF metadata
537
+ def _update_xisf_metadata(creator_app, blk_sz, max_inline_blk_sz, codec, level):
538
+ # Create file metadata
539
+ xisf_metadata["XISF:CreationTime"] = {
540
+ "id": "XISF:CreationTime",
541
+ "type": "String",
542
+ "value": datetime.utcnow().isoformat(),
543
+ }
544
+ xisf_metadata["XISF:CreatorApplication"] = {
545
+ "id": "XISF:CreatorApplication",
546
+ "type": "String",
547
+ "value": creator_app if creator_app else XISF._creator_app,
548
+ }
549
+ xisf_metadata["XISF:CreatorModule"] = {
550
+ "id": "XISF:CreatorModule",
551
+ "type": "String",
552
+ "value": XISF._creator_module,
553
+ }
554
+ _OSes = {
555
+ "linux": "Linux",
556
+ "win32": "Windows",
557
+ "cygwin": "Windows",
558
+ "darwin": "macOS",
559
+ }
560
+ xisf_metadata["XISF:CreatorOS"] = {
561
+ "id": "XISF:CreatorOS",
562
+ "type": "String",
563
+ "value": _OSes[sys.platform],
564
+ }
565
+ xisf_metadata["XISF:BlockAlignmentSize"] = {
566
+ "id": "XISF:BlockAlignmentSize",
567
+ "type": "UInt16",
568
+ "value": blk_sz,
569
+ }
570
+ xisf_metadata["XISF:MaxInlineBlockSize"] = {
571
+ "id": "XISF:MaxInlineBlockSize",
572
+ "type": "UInt16",
573
+ "value": max_inline_blk_sz,
574
+ }
575
+ if codec is not None:
576
+ # Add XISF:CompressionCodecs and XISF:CompressionLevel to file metadata
577
+ xisf_metadata["XISF:CompressionCodecs"] = {
578
+ "id": "XISF:CompressionCodecs",
579
+ "type": "String",
580
+ "value": codec,
581
+ }
582
+ xisf_metadata["XISF:CompressionLevel"] = {
583
+ "id": "XISF:CompressionLevel",
584
+ "type": "Int",
585
+ "value": level if level else XISF._compression_def_level[codec],
586
+ }
587
+ else:
588
+ # Remove compression metadata if exists
589
+ try:
590
+ del xisf_metadata["XISF:CompressionCodecs"]
591
+ del xisf_metadata["XISF:CompressionLevel"]
592
+ except KeyError:
593
+ pass # Ignore if keys don't exist
594
+
595
+ def _compute_attached_positions(hdr_prov_sz, attached_blocks_locations):
596
+ # Computes aligned position nearest to the given one
597
+ _aligned_position = lambda pos: ((pos + blk_sz - 1) // blk_sz) * blk_sz
598
+
599
+ # Iterates data block positions until header size stabilizes
600
+ # (positions are represented as strings in the header so their
601
+ # values may impact header size, therefore changing data block
602
+ # positions in the file)
603
+ hdr_sz = hdr_prov_sz
604
+ prev_sum_len_positions = 0
605
+ while True:
606
+ # account for the size of the (provisional) header
607
+ pos = _aligned_position(hdr_sz)
608
+
609
+ # positions for data blocks of properties with attachment location
610
+ sum_len_positions = 0
611
+ for loc in attached_blocks_locations:
612
+ # Save the (possibly provisional) position
613
+ loc['position'] = pos
614
+ # Accumulate the size of the position string
615
+ sum_len_positions += len(str(pos))
616
+ # Fast forward position adding the size, honoring alignment
617
+ pos = _aligned_position(pos + loc['size'])
618
+
619
+ if sum_len_positions == prev_sum_len_positions:
620
+ break
621
+
622
+ prev_sum_len_positions = sum_len_positions
623
+ hdr_sz = hdr_prov_sz + sum_len_positions
624
+
625
+ # Update data blocks positions in XML Header
626
+ for b in attached_blocks_locations:
627
+ xml_elem, pos, sz = b["xml"], b["position"], b["size"]
628
+ xml_elem.attrib["location"] = XISF._to_location(("attachment", pos, sz))
629
+
630
+ # Zero padding (used for reserved fields and data block alignment)
631
+ def _zero_pad(length):
632
+ assert length >= 0
633
+ return (0).to_bytes(length, byteorder="little")
634
+
635
+ # __/ Prepare image and its metadata \__________
636
+ im_id = image_metadata.get("id", "image")
637
+ im_attrs, data_format = _create_image_metadata(im_data, im_id)
638
+ im_data = _prepare_image_data_block(im_data, data_format)
639
+ im_data_block, data_size, codec_str = _serialize_data_block(
640
+ im_data, im_attrs, codec, level, shuffle
641
+ )
642
+
643
+ # Assemble location attribute, *provisional* until we can compute the data block position
644
+ im_attrs["location"] = XISF._to_location(("attachment", "", data_size))
645
+
646
+ # __/ Build (provisional) XML Header \__________
647
+ # (for attached data blocks, the location is provisional)
648
+ # Convert metadata (dict) to XML Header
649
+ xisf_header_xml = ET.Element("xisf", XISF._xisf_attrs)
650
+
651
+ # Image
652
+ image_xml = ET.SubElement(xisf_header_xml, "Image", im_attrs)
653
+
654
+ # Image FITSKeywords
655
+ for kw_name, kw_values in image_metadata.get("FITSKeywords", {}).items():
656
+ XISF._insert_fitskeyword(image_xml, kw_name, kw_values)
657
+
658
+ # attached_blocks_locations will reference every element whose data block is to be attached
659
+ # = [{"xml": ElementTree, "position": int, "size": int, "data": ndarray or str}]
660
+ # (position key is actually a placeholder, it will be overwritten by
661
+ # _compute_attached_positions)
662
+ # The first element is the image (*provisional* location):
663
+ attached_blocks_locations = [
664
+ {
665
+ "xml": image_xml,
666
+ "position": 0,
667
+ "size": data_size,
668
+ "data": im_data_block,
669
+ }
670
+ ]
671
+
672
+ # Image XISFProperties
673
+ for p_dict in image_metadata.get("XISFProperties", {}).values():
674
+ if attached_block := XISF._insert_property(image_xml, p_dict, max_inline_blk_sz):
675
+ attached_blocks_locations.append(attached_block)
676
+
677
+ # File Metadata
678
+ metadata_xml = ET.SubElement(xisf_header_xml, "Metadata")
679
+ _update_xisf_metadata(creator_app, blk_sz, max_inline_blk_sz, codec, level)
680
+ for property_dict in xisf_metadata.values():
681
+ if attached_block := XISF._insert_property(
682
+ metadata_xml, property_dict, max_inline_blk_sz
683
+ ):
684
+ attached_blocks_locations.append(attached_block)
685
+
686
+ # Header provisional size (without attachment positions)
687
+ xisf_header = ET.tostring(xisf_header_xml, encoding="utf8")
688
+ header_provisional_sz = (
689
+ len(XISF._signature) + XISF._headerlength_len + len(xisf_header) + XISF._reserved_len
690
+ )
691
+
692
+ # Update location for every block in attached_blocks_locations
693
+ _compute_attached_positions(header_provisional_sz, attached_blocks_locations)
694
+
695
+ with open(fname, "wb") as f:
696
+ # Write XISF signature
697
+ f.write(XISF._signature)
698
+
699
+ xisf_header = ET.tostring(xisf_header_xml, encoding="utf8")
700
+ headerlength = len(xisf_header)
701
+ # Write header length
702
+ f.write(headerlength.to_bytes(XISF._headerlength_len, byteorder="little"))
703
+
704
+ # Write reserved field
705
+ reserved_field = _zero_pad(XISF._reserved_len)
706
+ f.write(reserved_field)
707
+
708
+ # Write header
709
+ f.write(xisf_header)
710
+
711
+ # Write data blocks
712
+ for b in attached_blocks_locations:
713
+ pos, data_block = b["position"], b["data"]
714
+ f.write(_zero_pad(pos - f.tell()))
715
+ assert f.tell() == pos
716
+ f.write(data_block)
717
+ bytes_written = f.tell()
718
+
719
+ return bytes_written, codec_str
720
+
721
+ # __/ Auxiliary functions to handle XISF attributes \________
722
+
723
+ # Process property attributes and convert to dict
724
+ def _process_property(self, p_et):
725
+ p_dict = p_et.attrib.copy()
726
+
727
+ if p_dict["type"] == "TimePoint":
728
+ # Timepoint 'value' attribute already set (as str)
729
+ # Convert ISO 8601 string to datetime object
730
+ try:
731
+ tp_str = p_dict.get("value", "")
732
+ if tp_str:
733
+ # Handle XISF TimePoint format: ISO 8601 with optional timezone
734
+ # Examples: "2023-01-15T10:30:00Z", "2023-01-15T10:30:00.123456"
735
+ tp_str = tp_str.replace("Z", "+00:00")
736
+ if "." in tp_str and "+" not in tp_str.split(".")[-1] and "-" not in tp_str.split(".")[-1]:
737
+ # Add UTC timezone if missing after fractional seconds
738
+ tp_str += "+00:00"
739
+ p_dict["datetime"] = datetime.fromisoformat(tp_str)
740
+ except (ValueError, TypeError):
741
+ # Keep original string value if parsing fails
742
+ p_dict["datetime"] = None
743
+ elif p_dict["type"] == "String":
744
+ p_dict["value"] = p_et.text
745
+ if "location" in p_dict:
746
+ # Process location and compression attributes to find data block
747
+ self._process_location_compression(p_dict)
748
+ p_dict["value"] = self._read_data_block(p_dict).decode("utf-8")
749
+ elif p_dict["type"] == "Boolean":
750
+ # Boolean valid values are "true" and "false"
751
+ p_dict["value"] = p_dict["value"] == "true"
752
+ elif "value" in p_et.attrib:
753
+ # Scalars (Float64, UInt32, etc.) and Complex*
754
+ p_dict["value"] = ast.literal_eval(p_dict["value"])
755
+ elif "Vector" in p_dict["type"]:
756
+ p_dict["value"] = p_et.text
757
+ p_dict["length"] = int(p_dict["length"])
758
+ p_dict["dtype"] = self._parse_vector_dtype(p_dict["type"])
759
+ self._process_location_compression(p_dict)
760
+ raw_data = self._read_data_block(p_dict)
761
+ p_dict["value"] = np.frombuffer(raw_data, dtype=p_dict["dtype"], count=p_dict["length"])
762
+ elif "Matrix" in p_dict["type"]:
763
+ p_dict["value"] = p_et.text
764
+ p_dict["rows"] = int(p_dict["rows"])
765
+ p_dict["columns"] = int(p_dict["columns"])
766
+ length = p_dict["rows"] * p_dict["columns"]
767
+ p_dict["dtype"] = self._parse_vector_dtype(p_dict["type"])
768
+ self._process_location_compression(p_dict)
769
+ raw_data = self._read_data_block(p_dict)
770
+ p_dict["value"] = np.frombuffer(raw_data, dtype=p_dict["dtype"], count=length)
771
+ p_dict["value"] = p_dict["value"].reshape((p_dict["rows"], p_dict["columns"]))
772
+ else:
773
+ print(f"Unsupported Property type {p_dict['type']}: {p_et}")
774
+ p_dict = False
775
+
776
+ return p_dict
777
+
778
+ @staticmethod
779
+ def _process_location_compression(p_dict):
780
+ p_dict["location"] = XISF._parse_location(p_dict["location"])
781
+ if "compression" in p_dict:
782
+ p_dict["compression"] = XISF._parse_compression(p_dict["compression"])
783
+
784
+ # Insert XISF properties in the XML tree
785
+ @staticmethod
786
+ def _insert_property(parent, p_dict, max_inline_block_size, codec=None, shuffle=False):
787
+ """Insert a property into the XML tree.
788
+
789
+ Args:
790
+ parent: Parent XML element
791
+ p_dict: Property dictionary with 'id', 'type', 'value', and optional 'format', 'comment'
792
+ max_inline_block_size: Maximum size for inline data blocks
793
+ codec: Compression codec (None, 'zlib', 'lz4', 'lz4hc', 'zstd')
794
+ shuffle: Enable byte shuffling for compression
795
+ """
796
+ scalars = ["Int", "Byte", "Short", "Float", "Boolean", "TimePoint"]
797
+
798
+ # Build base attributes including optional format and comment
799
+ def _build_attrs(base_attrs):
800
+ attrs = dict(base_attrs)
801
+ if "format" in p_dict and p_dict["format"]:
802
+ attrs["format"] = str(p_dict["format"])
803
+ if "comment" in p_dict and p_dict["comment"]:
804
+ attrs["comment"] = str(p_dict["comment"])
805
+ return attrs
806
+
807
+ if any(t in p_dict["type"] for t in scalars):
808
+ # scalars and TimePoint
809
+ value_str = str(p_dict["value"])
810
+ # Boolean requires lowercase per XISF spec
811
+ if p_dict["type"] == "Boolean":
812
+ value_str = "true" if p_dict["value"] else "false"
813
+ attrs = _build_attrs({
814
+ "id": p_dict["id"],
815
+ "type": p_dict["type"],
816
+ "value": value_str,
817
+ })
818
+ ET.SubElement(parent, "Property", attrs)
819
+ elif p_dict["type"] == "String":
820
+ text = str(p_dict["value"])
821
+ data_bytes = text.encode("utf-8")
822
+ sz = len(data_bytes)
823
+ if sz > max_inline_block_size:
824
+ # Attach string as data block with optional compression
825
+ attrs = _build_attrs({
826
+ "id": p_dict["id"],
827
+ "type": p_dict["type"],
828
+ })
829
+ if codec:
830
+ compressed, comp_str = XISF._compress_data_block(data_bytes, codec, shuffle, 1)
831
+ attrs["location"] = XISF._to_location(("attachment", "", len(compressed)))
832
+ attrs["compression"] = comp_str
833
+ xml = ET.SubElement(parent, "Property", attrs)
834
+ return {"xml": xml, "location": 0, "size": len(compressed), "data": compressed}
835
+ else:
836
+ attrs["location"] = XISF._to_location(("attachment", "", sz))
837
+ xml = ET.SubElement(parent, "Property", attrs)
838
+ return {"xml": xml, "location": 0, "size": sz, "data": data_bytes}
839
+ else:
840
+ # string directly as child (no 'location' attribute)
841
+ attrs = _build_attrs({
842
+ "id": p_dict["id"],
843
+ "type": p_dict["type"],
844
+ })
845
+ ET.SubElement(parent, "Property", attrs).text = text
846
+ elif "Vector" in p_dict["type"]:
847
+ data = p_dict["value"]
848
+ raw_bytes = data.tobytes()
849
+ sz = len(raw_bytes)
850
+ item_size = data.itemsize
851
+ if sz > max_inline_block_size:
852
+ # Attach vector as data block with optional compression
853
+ attrs = _build_attrs({
854
+ "id": p_dict["id"],
855
+ "type": p_dict["type"],
856
+ "length": str(data.size),
857
+ })
858
+ if codec:
859
+ compressed, comp_str = XISF._compress_data_block(raw_bytes, codec, shuffle, item_size)
860
+ attrs["location"] = XISF._to_location(("attachment", "", len(compressed)))
861
+ attrs["compression"] = comp_str
862
+ xml = ET.SubElement(parent, "Property", attrs)
863
+ return {"xml": xml, "location": 0, "size": len(compressed), "data": compressed}
864
+ else:
865
+ attrs["location"] = XISF._to_location(("attachment", "", sz))
866
+ xml = ET.SubElement(parent, "Property", attrs)
867
+ return {"xml": xml, "location": 0, "size": sz, "data": data}
868
+ else:
869
+ # Inline data block (assuming base64)
870
+ attrs = _build_attrs({
871
+ "id": p_dict["id"],
872
+ "type": p_dict["type"],
873
+ "length": str(data.size),
874
+ "location": XISF._to_location(("inline", "base64")),
875
+ })
876
+ ET.SubElement(parent, "Property", attrs).text = str(base64.b64encode(data.tobytes()), "ascii")
877
+ elif "Matrix" in p_dict["type"]:
878
+ data = p_dict["value"]
879
+ raw_bytes = data.tobytes()
880
+ sz = len(raw_bytes)
881
+ item_size = data.itemsize
882
+ if sz > max_inline_block_size:
883
+ # Attach matrix as data block with optional compression
884
+ attrs = _build_attrs({
885
+ "id": p_dict["id"],
886
+ "type": p_dict["type"],
887
+ "rows": str(data.shape[0]),
888
+ "columns": str(data.shape[1]),
889
+ })
890
+ if codec:
891
+ compressed, comp_str = XISF._compress_data_block(raw_bytes, codec, shuffle, item_size)
892
+ attrs["location"] = XISF._to_location(("attachment", "", len(compressed)))
893
+ attrs["compression"] = comp_str
894
+ xml = ET.SubElement(parent, "Property", attrs)
895
+ return {"xml": xml, "location": 0, "size": len(compressed), "data": compressed}
896
+ else:
897
+ attrs["location"] = XISF._to_location(("attachment", "", sz))
898
+ xml = ET.SubElement(parent, "Property", attrs)
899
+ return {"xml": xml, "location": 0, "size": sz, "data": data}
900
+ else:
901
+ # Inline data block (assuming base64)
902
+ attrs = _build_attrs({
903
+ "id": p_dict["id"],
904
+ "type": p_dict["type"],
905
+ "rows": str(data.shape[0]),
906
+ "columns": str(data.shape[1]),
907
+ "location": XISF._to_location(("inline", "base64")),
908
+ })
909
+ ET.SubElement(parent, "Property", attrs).text = str(base64.b64encode(data.tobytes()), "ascii")
910
+ else:
911
+ print(f"Warning: skipping unsupported property {p_dict}")
912
+
913
+ return False
914
+
915
+ # Insert FITS Keywords in the XML tree
916
+ @staticmethod
917
+ def _insert_fitskeyword(image_xml, keyword_name, keyword_values):
918
+ for entry in keyword_values:
919
+ ET.SubElement(
920
+ image_xml,
921
+ "FITSKeyword",
922
+ {
923
+ "name": keyword_name,
924
+ "value": entry["value"],
925
+ "comment": entry["comment"],
926
+ },
927
+ )
928
+
929
+ # Returns image shape, e.g. (x, y, channels)
930
+ @staticmethod
931
+ def _parse_geometry(g):
932
+ return tuple(map(int, g.split(":")))
933
+
934
+ # Returns ("attachment", position, size), ("inline", encoding) or ("embedded")
935
+ @staticmethod
936
+ def _parse_location(l):
937
+ ll = l.split(":")
938
+ if ll[0] not in ["inline", "embedded", "attachment"]:
939
+ raise NotImplementedError(f"Data block location type '{ll[0]}' not implemented")
940
+ return (ll[0], int(ll[1]), int(ll[2])) if ll[0] == "attachment" else ll
941
+
942
+ # Serialize location tuple to string, as value for location attribute
943
+ @staticmethod
944
+ def _to_location(location_tuple):
945
+ return ":".join([str(e) for e in location_tuple])
946
+
947
+ # Returns (codec, uncompressed_size, item_size); item_size is None if not using byte shuffling
948
+ @staticmethod
949
+ def _parse_compression(c):
950
+ cl = c.split(":")
951
+ if len(cl) == 3:
952
+ # (codec+byteshuffling, uncompressed_size, shuffling_item_size)
953
+ return (cl[0], int(cl[1]), int(cl[2]))
954
+ else:
955
+ # (codec, uncompressed_size, None)
956
+ return (cl[0], int(cl[1]), None)
957
+
958
+ # Return equivalent numpy dtype
959
+ @staticmethod
960
+ def _parse_sampleFormat(s):
961
+ # Translate alternate names to "canonical" type names
962
+ alternate_names = {
963
+ 'Byte': 'UInt8',
964
+ 'Short': 'Int16',
965
+ 'UShort': 'UInt16',
966
+ 'Int': 'Int32',
967
+ 'UInt': 'UInt32',
968
+ 'Float': 'Float32',
969
+ 'Double': 'Float64',
970
+ }
971
+ try:
972
+ s = alternate_names[s]
973
+ except KeyError:
974
+ pass
975
+
976
+ _dtypes = {
977
+ "UInt8": np.dtype("uint8"),
978
+ "UInt16": np.dtype("uint16"),
979
+ "UInt32": np.dtype("uint32"),
980
+ "Float32": np.dtype("float32"),
981
+ "Float64": np.dtype("float64"),
982
+ }
983
+ try:
984
+ return _dtypes[s]
985
+ except:
986
+ raise NotImplementedError(f"sampleFormat {s} not implemented")
987
+
988
+ # Return XISF data type from numpy dtype
989
+ @staticmethod
990
+ def _get_sampleFormat(dtype):
991
+ _sampleFormats = {
992
+ "uint8": "UInt8",
993
+ "uint16": "UInt16",
994
+ "uint32": "UInt32",
995
+ "float32": "Float32",
996
+ "float64": "Float64",
997
+ }
998
+ try:
999
+ return _sampleFormats[str(dtype)]
1000
+ except:
1001
+ raise NotImplementedError(f"sampleFormat for {dtype} not implemented")
1002
+
1003
+ @staticmethod
1004
+ def _parse_vector_dtype(type_name):
1005
+ # Translate alternate names to "canonical" type names
1006
+ alternate_names = {
1007
+ 'ByteArray': 'UI8Vector',
1008
+ 'IVector': 'I32Vector',
1009
+ 'UIVector': 'UI32Vector',
1010
+ 'Vector': 'F64Vector',
1011
+ }
1012
+ try:
1013
+ type_name = alternate_names[type_name]
1014
+ except KeyError:
1015
+ pass
1016
+
1017
+ type_prefix = type_name[:-6] # removes "Vector" and "Matrix" suffixes
1018
+ _dtypes = {
1019
+ "I8": np.dtype("int8"),
1020
+ "UI8": np.dtype("uint8"),
1021
+ "I16": np.dtype("int16"),
1022
+ "UI16": np.dtype("uint16"),
1023
+ "I32": np.dtype("int32"),
1024
+ "UI32": np.dtype("uint32"),
1025
+ "I64": np.dtype("int64"),
1026
+ "UI64": np.dtype("uint64"),
1027
+ "F32": np.dtype("float32"),
1028
+ "F64": np.dtype("float64"),
1029
+ "C32": np.dtype("csingle"),
1030
+ "C64": np.dtype("cdouble"),
1031
+ }
1032
+ try:
1033
+ return _dtypes[type_prefix]
1034
+ except:
1035
+ raise NotImplementedError(f"data type {type_name} not implemented")
1036
+
1037
+ # __/ Auxiliary functions for compression/shuffling \________
1038
+
1039
+ # Un-byteshuffling implementation based on numpy
1040
+ @staticmethod
1041
+ def _unshuffle(d, item_size):
1042
+ a = np.frombuffer(d, dtype=np.dtype("uint8"))
1043
+ a = a.reshape((item_size, -1))
1044
+ return np.transpose(a).tobytes()
1045
+
1046
+ # Byteshuffling implementation based on numpy
1047
+ @staticmethod
1048
+ def _shuffle(d, item_size):
1049
+ a = np.frombuffer(d, dtype=np.dtype("uint8"))
1050
+ a = a.reshape((-1, item_size))
1051
+ return np.transpose(a).tobytes()
1052
+
1053
+ # LZ4/zlib/zstd decompression
1054
+ @staticmethod
1055
+ def _decompress(data, elem):
1056
+ # (codec, uncompressed-size, item-size); item-size is None if not using byte shuffling
1057
+ codec, uncompressed_size, item_size = elem["compression"]
1058
+
1059
+ if codec.startswith("lz4"):
1060
+ data = lz4.block.decompress(data, uncompressed_size=uncompressed_size)
1061
+ elif codec.startswith("zstd"):
1062
+ data = zstandard.decompress(data, max_output_size=uncompressed_size)
1063
+ elif codec.startswith("zlib"):
1064
+ data = zlib.decompress(data)
1065
+ else:
1066
+ raise NotImplementedError(f"Unimplemented compression codec {codec}")
1067
+
1068
+ if item_size: # using byte-shuffling
1069
+ data = XISF._unshuffle(data, item_size)
1070
+
1071
+ return data
1072
+
1073
+ @staticmethod
1074
+ def _compress_data_block(data, codec, shuffle=False, itemsize=1):
1075
+ """Compress a data block and return (compressed_bytes, compression_attr_string).
1076
+
1077
+ Args:
1078
+ data: bytes or numpy array to compress
1079
+ codec: 'zlib', 'lz4', 'lz4hc', or 'zstd'
1080
+ shuffle: enable byte shuffling
1081
+ itemsize: item size for byte shuffling (1 for strings, dtype.itemsize for arrays)
1082
+
1083
+ Returns:
1084
+ tuple: (compressed_bytes, compression_attribute_string)
1085
+ """
1086
+ if hasattr(data, 'tobytes'):
1087
+ raw_bytes = data.tobytes()
1088
+ else:
1089
+ raw_bytes = bytes(data)
1090
+
1091
+ uncompressed_size = len(raw_bytes)
1092
+ compressed = XISF._compress(raw_bytes, codec, shuffle=shuffle, itemsize=itemsize if shuffle else None)
1093
+
1094
+ # Build compression attribute string: "codec:uncompressed_size" or "codec+sh:uncompressed_size:itemsize"
1095
+ if shuffle and itemsize > 1:
1096
+ comp_str = f"{codec}+sh:{uncompressed_size}:{itemsize}"
1097
+ else:
1098
+ comp_str = f"{codec}:{uncompressed_size}"
1099
+
1100
+ return compressed, comp_str
1101
+
1102
+ # LZ4/zlib/zstd compression
1103
+ @staticmethod
1104
+ def _compress(data, codec, level=None, shuffle=False, itemsize=None):
1105
+ compressed = XISF._shuffle(data, itemsize) if shuffle else data
1106
+
1107
+ if codec == "lz4hc":
1108
+ level = level if level else XISF._compression_def_level["lz4hc"]
1109
+ compressed = lz4.block.compress(
1110
+ compressed, mode="high_compression", compression=level, store_size=False
1111
+ )
1112
+ elif codec == "lz4":
1113
+ compressed = lz4.block.compress(compressed, store_size=False)
1114
+ elif codec == "zstd":
1115
+ level = level if level else XISF._compression_def_level["zstd"]
1116
+ compressed = zstandard.compress(compressed, level=level)
1117
+ elif codec == "zlib":
1118
+ level = level if level else XISF._compression_def_level["zlib"]
1119
+ compressed = zlib.compress(compressed, level=level)
1120
+ else:
1121
+ raise NotImplementedError(f"Unimplemented compression codec {codec}")
1122
+
1123
+ return compressed