sigima 0.0.1.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (264) hide show
  1. sigima/__init__.py +142 -2
  2. sigima/client/__init__.py +105 -0
  3. sigima/client/base.py +780 -0
  4. sigima/client/remote.py +469 -0
  5. sigima/client/stub.py +814 -0
  6. sigima/client/utils.py +90 -0
  7. sigima/config.py +444 -0
  8. sigima/data/logo/Sigima.svg +135 -0
  9. sigima/data/tests/annotations.json +798 -0
  10. sigima/data/tests/curve_fitting/exponential_fit.txt +511 -0
  11. sigima/data/tests/curve_fitting/gaussian_fit.txt +100 -0
  12. sigima/data/tests/curve_fitting/piecewiseexponential_fit.txt +1022 -0
  13. sigima/data/tests/curve_fitting/polynomial_fit.txt +100 -0
  14. sigima/data/tests/curve_fitting/twohalfgaussian_fit.txt +1000 -0
  15. sigima/data/tests/curve_formats/bandwidth.txt +201 -0
  16. sigima/data/tests/curve_formats/boxcar.npy +0 -0
  17. sigima/data/tests/curve_formats/datetime.txt +1001 -0
  18. sigima/data/tests/curve_formats/dynamic_parameters.txt +4000 -0
  19. sigima/data/tests/curve_formats/fw1e2.txt +301 -0
  20. sigima/data/tests/curve_formats/fwhm.txt +319 -0
  21. sigima/data/tests/curve_formats/multiple_curves.csv +29 -0
  22. sigima/data/tests/curve_formats/noised_saw.mat +0 -0
  23. sigima/data/tests/curve_formats/oscilloscope.csv +111 -0
  24. sigima/data/tests/curve_formats/other/other2/recursive2.txt +5 -0
  25. sigima/data/tests/curve_formats/other/recursive1.txt +5 -0
  26. sigima/data/tests/curve_formats/paracetamol.npy +0 -0
  27. sigima/data/tests/curve_formats/paracetamol.txt +1010 -0
  28. sigima/data/tests/curve_formats/paracetamol_dx_dy.csv +1000 -0
  29. sigima/data/tests/curve_formats/paracetamol_dy.csv +1001 -0
  30. sigima/data/tests/curve_formats/pulse1.npy +0 -0
  31. sigima/data/tests/curve_formats/pulse2.npy +0 -0
  32. sigima/data/tests/curve_formats/simple.txt +5 -0
  33. sigima/data/tests/curve_formats/spectrum.mca +2139 -0
  34. sigima/data/tests/curve_formats/square2.npy +0 -0
  35. sigima/data/tests/curve_formats/step.npy +0 -0
  36. sigima/data/tests/fabry-perot1.jpg +0 -0
  37. sigima/data/tests/fabry-perot2.jpg +0 -0
  38. sigima/data/tests/flower.npy +0 -0
  39. sigima/data/tests/image_formats/NF 180338201.scor-data +11003 -0
  40. sigima/data/tests/image_formats/binary_image.npy +0 -0
  41. sigima/data/tests/image_formats/binary_image.png +0 -0
  42. sigima/data/tests/image_formats/centroid_test.npy +0 -0
  43. sigima/data/tests/image_formats/coordinated_text/complex_image.txt +10011 -0
  44. sigima/data/tests/image_formats/coordinated_text/complex_ref_image.txt +10010 -0
  45. sigima/data/tests/image_formats/coordinated_text/image.txt +15 -0
  46. sigima/data/tests/image_formats/coordinated_text/image2.txt +14 -0
  47. sigima/data/tests/image_formats/coordinated_text/image_no_unit_no_label.txt +14 -0
  48. sigima/data/tests/image_formats/coordinated_text/image_with_nan.txt +15 -0
  49. sigima/data/tests/image_formats/coordinated_text/image_with_unit.txt +14 -0
  50. sigima/data/tests/image_formats/fiber.csv +480 -0
  51. sigima/data/tests/image_formats/fiber.jpg +0 -0
  52. sigima/data/tests/image_formats/fiber.png +0 -0
  53. sigima/data/tests/image_formats/fiber.txt +480 -0
  54. sigima/data/tests/image_formats/gaussian_spot_with_noise.npy +0 -0
  55. sigima/data/tests/image_formats/mr-brain.dcm +0 -0
  56. sigima/data/tests/image_formats/noised_gaussian.mat +0 -0
  57. sigima/data/tests/image_formats/sif_reader/nd_lum_image_no_glue.sif +0 -0
  58. sigima/data/tests/image_formats/sif_reader/raman1.sif +0 -0
  59. sigima/data/tests/image_formats/tiling.txt +10 -0
  60. sigima/data/tests/image_formats/uint16.tiff +0 -0
  61. sigima/data/tests/image_formats/uint8.tiff +0 -0
  62. sigima/data/tests/laser_beam/TEM00_z_13.jpg +0 -0
  63. sigima/data/tests/laser_beam/TEM00_z_18.jpg +0 -0
  64. sigima/data/tests/laser_beam/TEM00_z_23.jpg +0 -0
  65. sigima/data/tests/laser_beam/TEM00_z_30.jpg +0 -0
  66. sigima/data/tests/laser_beam/TEM00_z_35.jpg +0 -0
  67. sigima/data/tests/laser_beam/TEM00_z_40.jpg +0 -0
  68. sigima/data/tests/laser_beam/TEM00_z_45.jpg +0 -0
  69. sigima/data/tests/laser_beam/TEM00_z_50.jpg +0 -0
  70. sigima/data/tests/laser_beam/TEM00_z_55.jpg +0 -0
  71. sigima/data/tests/laser_beam/TEM00_z_60.jpg +0 -0
  72. sigima/data/tests/laser_beam/TEM00_z_65.jpg +0 -0
  73. sigima/data/tests/laser_beam/TEM00_z_70.jpg +0 -0
  74. sigima/data/tests/laser_beam/TEM00_z_75.jpg +0 -0
  75. sigima/data/tests/laser_beam/TEM00_z_80.jpg +0 -0
  76. sigima/enums.py +195 -0
  77. sigima/io/__init__.py +123 -0
  78. sigima/io/base.py +311 -0
  79. sigima/io/common/__init__.py +5 -0
  80. sigima/io/common/basename.py +164 -0
  81. sigima/io/common/converters.py +189 -0
  82. sigima/io/common/objmeta.py +181 -0
  83. sigima/io/common/textreader.py +58 -0
  84. sigima/io/convenience.py +157 -0
  85. sigima/io/enums.py +17 -0
  86. sigima/io/ftlab.py +395 -0
  87. sigima/io/image/__init__.py +9 -0
  88. sigima/io/image/base.py +177 -0
  89. sigima/io/image/formats.py +1016 -0
  90. sigima/io/image/funcs.py +414 -0
  91. sigima/io/signal/__init__.py +9 -0
  92. sigima/io/signal/base.py +129 -0
  93. sigima/io/signal/formats.py +290 -0
  94. sigima/io/signal/funcs.py +723 -0
  95. sigima/objects/__init__.py +260 -0
  96. sigima/objects/base.py +937 -0
  97. sigima/objects/image/__init__.py +88 -0
  98. sigima/objects/image/creation.py +556 -0
  99. sigima/objects/image/object.py +524 -0
  100. sigima/objects/image/roi.py +904 -0
  101. sigima/objects/scalar/__init__.py +57 -0
  102. sigima/objects/scalar/common.py +215 -0
  103. sigima/objects/scalar/geometry.py +502 -0
  104. sigima/objects/scalar/table.py +784 -0
  105. sigima/objects/shape.py +290 -0
  106. sigima/objects/signal/__init__.py +133 -0
  107. sigima/objects/signal/constants.py +27 -0
  108. sigima/objects/signal/creation.py +1428 -0
  109. sigima/objects/signal/object.py +444 -0
  110. sigima/objects/signal/roi.py +274 -0
  111. sigima/params.py +405 -0
  112. sigima/proc/__init__.py +96 -0
  113. sigima/proc/base.py +381 -0
  114. sigima/proc/decorator.py +330 -0
  115. sigima/proc/image/__init__.py +513 -0
  116. sigima/proc/image/arithmetic.py +335 -0
  117. sigima/proc/image/base.py +260 -0
  118. sigima/proc/image/detection.py +519 -0
  119. sigima/proc/image/edges.py +329 -0
  120. sigima/proc/image/exposure.py +406 -0
  121. sigima/proc/image/extraction.py +458 -0
  122. sigima/proc/image/filtering.py +219 -0
  123. sigima/proc/image/fourier.py +147 -0
  124. sigima/proc/image/geometry.py +661 -0
  125. sigima/proc/image/mathops.py +340 -0
  126. sigima/proc/image/measurement.py +195 -0
  127. sigima/proc/image/morphology.py +155 -0
  128. sigima/proc/image/noise.py +107 -0
  129. sigima/proc/image/preprocessing.py +182 -0
  130. sigima/proc/image/restoration.py +235 -0
  131. sigima/proc/image/threshold.py +217 -0
  132. sigima/proc/image/transformations.py +393 -0
  133. sigima/proc/signal/__init__.py +376 -0
  134. sigima/proc/signal/analysis.py +206 -0
  135. sigima/proc/signal/arithmetic.py +551 -0
  136. sigima/proc/signal/base.py +262 -0
  137. sigima/proc/signal/extraction.py +60 -0
  138. sigima/proc/signal/features.py +310 -0
  139. sigima/proc/signal/filtering.py +484 -0
  140. sigima/proc/signal/fitting.py +276 -0
  141. sigima/proc/signal/fourier.py +259 -0
  142. sigima/proc/signal/mathops.py +420 -0
  143. sigima/proc/signal/processing.py +580 -0
  144. sigima/proc/signal/stability.py +175 -0
  145. sigima/proc/title_formatting.py +227 -0
  146. sigima/proc/validation.py +272 -0
  147. sigima/tests/__init__.py +7 -0
  148. sigima/tests/common/__init__.py +0 -0
  149. sigima/tests/common/arithmeticparam_unit_test.py +26 -0
  150. sigima/tests/common/basename_unit_test.py +126 -0
  151. sigima/tests/common/client_unit_test.py +412 -0
  152. sigima/tests/common/converters_unit_test.py +77 -0
  153. sigima/tests/common/decorator_unit_test.py +176 -0
  154. sigima/tests/common/examples_unit_test.py +104 -0
  155. sigima/tests/common/kernel_normalization_unit_test.py +242 -0
  156. sigima/tests/common/roi_basic_unit_test.py +73 -0
  157. sigima/tests/common/roi_geometry_unit_test.py +171 -0
  158. sigima/tests/common/scalar_builder_unit_test.py +142 -0
  159. sigima/tests/common/scalar_unit_test.py +991 -0
  160. sigima/tests/common/shape_unit_test.py +183 -0
  161. sigima/tests/common/stat_unit_test.py +138 -0
  162. sigima/tests/common/title_formatting_unit_test.py +338 -0
  163. sigima/tests/common/tools_coordinates_unit_test.py +60 -0
  164. sigima/tests/common/transformations_unit_test.py +178 -0
  165. sigima/tests/common/validation_unit_test.py +205 -0
  166. sigima/tests/conftest.py +129 -0
  167. sigima/tests/data.py +998 -0
  168. sigima/tests/env.py +280 -0
  169. sigima/tests/guiutils.py +163 -0
  170. sigima/tests/helpers.py +532 -0
  171. sigima/tests/image/__init__.py +28 -0
  172. sigima/tests/image/binning_unit_test.py +128 -0
  173. sigima/tests/image/blob_detection_unit_test.py +312 -0
  174. sigima/tests/image/centroid_unit_test.py +170 -0
  175. sigima/tests/image/check_2d_array_unit_test.py +63 -0
  176. sigima/tests/image/contour_unit_test.py +172 -0
  177. sigima/tests/image/convolution_unit_test.py +178 -0
  178. sigima/tests/image/datatype_unit_test.py +67 -0
  179. sigima/tests/image/edges_unit_test.py +155 -0
  180. sigima/tests/image/enclosingcircle_unit_test.py +88 -0
  181. sigima/tests/image/exposure_unit_test.py +223 -0
  182. sigima/tests/image/fft2d_unit_test.py +189 -0
  183. sigima/tests/image/filtering_unit_test.py +166 -0
  184. sigima/tests/image/geometry_unit_test.py +654 -0
  185. sigima/tests/image/hough_circle_unit_test.py +147 -0
  186. sigima/tests/image/imageobj_unit_test.py +737 -0
  187. sigima/tests/image/morphology_unit_test.py +71 -0
  188. sigima/tests/image/noise_unit_test.py +57 -0
  189. sigima/tests/image/offset_correction_unit_test.py +72 -0
  190. sigima/tests/image/operation_unit_test.py +518 -0
  191. sigima/tests/image/peak2d_limits_unit_test.py +41 -0
  192. sigima/tests/image/peak2d_unit_test.py +133 -0
  193. sigima/tests/image/profile_unit_test.py +159 -0
  194. sigima/tests/image/projections_unit_test.py +121 -0
  195. sigima/tests/image/restoration_unit_test.py +141 -0
  196. sigima/tests/image/roi2dparam_unit_test.py +53 -0
  197. sigima/tests/image/roi_advanced_unit_test.py +588 -0
  198. sigima/tests/image/roi_grid_unit_test.py +279 -0
  199. sigima/tests/image/spectrum2d_unit_test.py +40 -0
  200. sigima/tests/image/threshold_unit_test.py +91 -0
  201. sigima/tests/io/__init__.py +0 -0
  202. sigima/tests/io/addnewformat_unit_test.py +125 -0
  203. sigima/tests/io/convenience_funcs_unit_test.py +470 -0
  204. sigima/tests/io/coordinated_text_format_unit_test.py +495 -0
  205. sigima/tests/io/datetime_csv_unit_test.py +198 -0
  206. sigima/tests/io/imageio_formats_test.py +41 -0
  207. sigima/tests/io/ioregistry_unit_test.py +69 -0
  208. sigima/tests/io/objmeta_unit_test.py +87 -0
  209. sigima/tests/io/readobj_unit_test.py +130 -0
  210. sigima/tests/io/readwriteobj_unit_test.py +67 -0
  211. sigima/tests/signal/__init__.py +0 -0
  212. sigima/tests/signal/analysis_unit_test.py +135 -0
  213. sigima/tests/signal/check_1d_arrays_unit_test.py +169 -0
  214. sigima/tests/signal/convolution_unit_test.py +404 -0
  215. sigima/tests/signal/datetime_unit_test.py +176 -0
  216. sigima/tests/signal/fft1d_unit_test.py +303 -0
  217. sigima/tests/signal/filters_unit_test.py +403 -0
  218. sigima/tests/signal/fitting_unit_test.py +929 -0
  219. sigima/tests/signal/fwhm_unit_test.py +111 -0
  220. sigima/tests/signal/noise_unit_test.py +128 -0
  221. sigima/tests/signal/offset_correction_unit_test.py +34 -0
  222. sigima/tests/signal/operation_unit_test.py +489 -0
  223. sigima/tests/signal/peakdetection_unit_test.py +145 -0
  224. sigima/tests/signal/processing_unit_test.py +657 -0
  225. sigima/tests/signal/pulse/__init__.py +112 -0
  226. sigima/tests/signal/pulse/crossing_times_unit_test.py +123 -0
  227. sigima/tests/signal/pulse/plateau_detection_unit_test.py +102 -0
  228. sigima/tests/signal/pulse/pulse_unit_test.py +1824 -0
  229. sigima/tests/signal/roi_advanced_unit_test.py +392 -0
  230. sigima/tests/signal/signalobj_unit_test.py +603 -0
  231. sigima/tests/signal/stability_unit_test.py +431 -0
  232. sigima/tests/signal/uncertainty_unit_test.py +611 -0
  233. sigima/tests/vistools.py +1030 -0
  234. sigima/tools/__init__.py +59 -0
  235. sigima/tools/checks.py +290 -0
  236. sigima/tools/coordinates.py +308 -0
  237. sigima/tools/datatypes.py +26 -0
  238. sigima/tools/image/__init__.py +97 -0
  239. sigima/tools/image/detection.py +451 -0
  240. sigima/tools/image/exposure.py +77 -0
  241. sigima/tools/image/extraction.py +48 -0
  242. sigima/tools/image/fourier.py +260 -0
  243. sigima/tools/image/geometry.py +190 -0
  244. sigima/tools/image/preprocessing.py +165 -0
  245. sigima/tools/signal/__init__.py +86 -0
  246. sigima/tools/signal/dynamic.py +254 -0
  247. sigima/tools/signal/features.py +135 -0
  248. sigima/tools/signal/filtering.py +171 -0
  249. sigima/tools/signal/fitting.py +1171 -0
  250. sigima/tools/signal/fourier.py +466 -0
  251. sigima/tools/signal/interpolation.py +70 -0
  252. sigima/tools/signal/peakdetection.py +126 -0
  253. sigima/tools/signal/pulse.py +1626 -0
  254. sigima/tools/signal/scaling.py +50 -0
  255. sigima/tools/signal/stability.py +258 -0
  256. sigima/tools/signal/windowing.py +90 -0
  257. sigima/worker.py +79 -0
  258. sigima-1.0.0.dist-info/METADATA +233 -0
  259. sigima-1.0.0.dist-info/RECORD +262 -0
  260. {sigima-0.0.1.dev0.dist-info → sigima-1.0.0.dist-info}/licenses/LICENSE +29 -29
  261. sigima-0.0.1.dev0.dist-info/METADATA +0 -60
  262. sigima-0.0.1.dev0.dist-info/RECORD +0 -6
  263. {sigima-0.0.1.dev0.dist-info → sigima-1.0.0.dist-info}/WHEEL +0 -0
  264. {sigima-0.0.1.dev0.dist-info → sigima-1.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1016 @@
1
+ # Copyright (c) DataLab Platform Developers, BSD 3-Clause license, see LICENSE file.
2
+
3
+ """
4
+ Sigima I/O image formats
5
+ """
6
+
7
+ # pylint: disable=invalid-name # Allows short reference names like x, y, ...
8
+
9
+ from __future__ import annotations
10
+
11
+ import datetime
12
+ import os.path as osp
13
+ import re
14
+
15
+ import imageio.v3 as iio
16
+ import numpy as np
17
+ import pandas as pd
18
+ import scipy.io as sio
19
+ import skimage.io
20
+ from guidata.io import HDF5Reader, HDF5Writer
21
+
22
+ import sigima
23
+ from sigima.config import _, options
24
+ from sigima.io import ftlab
25
+ from sigima.io.base import FormatInfo
26
+ from sigima.io.common.converters import convert_array_to_valid_dtype
27
+ from sigima.io.enums import FileEncoding
28
+ from sigima.io.image import funcs
29
+ from sigima.io.image.base import (
30
+ ImageFormatBase,
31
+ MultipleImagesFormatBase,
32
+ SingleImageFormatBase,
33
+ )
34
+ from sigima.objects.image import ImageObj, create_image
35
+ from sigima.worker import CallbackWorkerProtocol
36
+
37
+
38
+ class HDF5ImageFormat(ImageFormatBase):
39
+ """Object representing HDF5 image file type"""
40
+
41
+ FORMAT_INFO = FormatInfo(
42
+ name="HDF5",
43
+ extensions="*.h5ima",
44
+ readable=True,
45
+ writeable=True,
46
+ )
47
+ GROUP_NAME = "image"
48
+
49
+ # pylint: disable=unused-argument
50
+ def read(
51
+ self, filename: str, worker: CallbackWorkerProtocol | None = None
52
+ ) -> list[ImageObj]:
53
+ """Read list of image objects from file
54
+
55
+ Args:
56
+ filename: File name
57
+ worker: Callback worker object
58
+
59
+ Returns:
60
+ List of image objects
61
+ """
62
+ reader = HDF5Reader(filename)
63
+ try:
64
+ with reader.group(self.GROUP_NAME):
65
+ obj = ImageObj()
66
+ obj.deserialize(reader)
67
+ except ValueError as exc:
68
+ raise ValueError("No valid image data found") from exc
69
+ except Exception as exc: # pylint: disable=broad-except
70
+ raise RuntimeError(
71
+ f"Unexpected error reading HDF5 image from {filename}"
72
+ ) from exc
73
+ finally:
74
+ reader.close()
75
+ return [obj]
76
+
77
+ def write(self, filename: str, obj: ImageObj) -> None:
78
+ """Write data to file
79
+
80
+ Args:
81
+ filename: file name
82
+ obj: native object (signal or image)
83
+
84
+ Raises:
85
+ NotImplementedError: if format is not supported
86
+ """
87
+ assert isinstance(obj, ImageObj), "Object is not an image"
88
+ writer = HDF5Writer(filename)
89
+ with writer.group(self.GROUP_NAME):
90
+ obj.serialize(writer)
91
+ writer.close()
92
+
93
+
94
+ class ClassicsImageFormat(SingleImageFormatBase):
95
+ """Object representing classic image file types"""
96
+
97
+ FORMAT_INFO = FormatInfo(
98
+ name="BMP, JPEG, PNG, TIFF, JPEG2000",
99
+ extensions="*.bmp *.jpg *.jpeg *.png *.tif *.tiff *.jp2",
100
+ readable=True,
101
+ writeable=True,
102
+ )
103
+
104
+ @staticmethod
105
+ def read_data(filename: str) -> np.ndarray:
106
+ """Read data and return it
107
+
108
+ Args:
109
+ filename: File name
110
+
111
+ Returns:
112
+ Image array data
113
+ """
114
+ return skimage.io.imread(filename, as_gray=True)
115
+
116
+ @staticmethod
117
+ def write_data(filename: str, data: np.ndarray) -> None:
118
+ """Write data to file
119
+
120
+ Args:
121
+ filename: File name
122
+ data: Image array data
123
+ """
124
+ ext = osp.splitext(filename)[1].lower()
125
+ if ext in (".bmp", ".jpg", ".jpeg", ".png"):
126
+ if data.dtype is not np.uint8:
127
+ data = data.astype(np.uint8)
128
+ if ext in (".jp2",):
129
+ if data.dtype not in (np.uint8, np.uint16):
130
+ data = data.astype(np.uint16)
131
+ skimage.io.imsave(filename, data, check_contrast=False)
132
+
133
+
134
+ class NumPyImageFormat(SingleImageFormatBase):
135
+ """Object representing NumPy image file type"""
136
+
137
+ FORMAT_INFO = FormatInfo(
138
+ name="NumPy",
139
+ extensions="*.npy",
140
+ readable=True,
141
+ writeable=True,
142
+ ) # pylint: disable=duplicate-code
143
+
144
+ @staticmethod
145
+ def read_data(filename: str) -> np.ndarray:
146
+ """Read data and return it
147
+
148
+ Args:
149
+ filename: File name
150
+
151
+ Returns:
152
+ Image array data
153
+ """
154
+ return convert_array_to_valid_dtype(np.load(filename), ImageObj.VALID_DTYPES)
155
+
156
+ @staticmethod
157
+ def write_data(filename: str, data: np.ndarray) -> None:
158
+ """Write data to file
159
+
160
+ Args:
161
+ filename: File name
162
+ data: Image array data
163
+ """
164
+ np.save(filename, data)
165
+
166
+
167
+ class NotCoordinatedTextFileError(Exception):
168
+ """Exception raised when a file is not a coordinated text file"""
169
+
170
+
171
+ class CoordinatedTextFileReader:
172
+ """Utility class for reading text files with metadata and coordinates"""
173
+
174
+ @staticmethod
175
+ def read_images(filename: str) -> list[ImageObj]:
176
+ """Read list of image objects from coordinated text file.
177
+
178
+ Args:
179
+ filename: File name
180
+
181
+ Returns:
182
+ List of image objects
183
+ """
184
+ file_metadata = CoordinatedTextFileReader.read_metadata(filename)
185
+
186
+ # Validate metadata and raise on inconsistent or missing keys
187
+ CoordinatedTextFileReader.verify_metadata(filename, file_metadata)
188
+
189
+ dict_keys = file_metadata.keys()
190
+ allowed_column_header = {
191
+ "X",
192
+ "Y",
193
+ "Z",
194
+ "Zre",
195
+ "Zim",
196
+ "Z Error",
197
+ "Zre Error",
198
+ "Zim Error",
199
+ }
200
+ columns_header = [k for k in dict_keys if k in allowed_column_header]
201
+
202
+ # Remove keys that are in columns_header and construct metadata dictionary
203
+ metadata = {
204
+ key: value[0]
205
+ for key, value in file_metadata.items()
206
+ if key not in columns_header
207
+ }
208
+ metadata["source"] = filename
209
+
210
+ df = CoordinatedTextFileReader.read_data(filename, columns_header)
211
+
212
+ name = osp.basename(filename)
213
+
214
+ try:
215
+ # Check if coordinates are uniform or non-uniform
216
+ x_coords = np.sort(df["X"].unique())
217
+ y_coords = np.sort(df["Y"].unique())
218
+
219
+ # Check if we have a regular grid structure
220
+ expected_points = len(x_coords) * len(y_coords)
221
+ actual_points = len(df)
222
+
223
+ # Extract coordinate and data information
224
+ (zlabel, zunit) = file_metadata.get("Z", file_metadata.get("Zre", ("", "")))
225
+ (xlabel, xunit) = file_metadata.get("X", ("X", ""))
226
+ (ylabel, yunit) = file_metadata.get("Y", ("Y", ""))
227
+
228
+ if xlabel is None:
229
+ xlabel = "X"
230
+ if ylabel is None:
231
+ ylabel = "Y"
232
+ if zlabel is None:
233
+ zlabel = "Z"
234
+
235
+ xunit = "" if xunit is None else str(xunit)
236
+ yunit = "" if yunit is None else str(yunit)
237
+ zunit = "" if zunit is None else str(zunit)
238
+
239
+ if expected_points == actual_points:
240
+ # Regular grid - can use pivot to create 2D array
241
+ data = df.pivot(index="Y", columns="X", values="Z").values
242
+ data = convert_array_to_valid_dtype(data, ImageObj.VALID_DTYPES)
243
+
244
+ # Check if coordinates are truly uniform (evenly spaced)
245
+ x_uniform = len(x_coords) >= 2 and np.allclose(
246
+ np.diff(x_coords), x_coords[1] - x_coords[0], rtol=1e-10
247
+ )
248
+ y_uniform = len(y_coords) >= 2 and np.allclose(
249
+ np.diff(y_coords), y_coords[1] - y_coords[0], rtol=1e-10
250
+ )
251
+
252
+ image = create_image(
253
+ name,
254
+ metadata=metadata,
255
+ data=data,
256
+ units=(xunit, yunit, zunit),
257
+ labels=(xlabel, ylabel, zlabel),
258
+ )
259
+
260
+ if x_uniform and y_uniform:
261
+ # Set uniform coordinates
262
+ dx = float(x_coords[1] - x_coords[0]) if len(x_coords) > 1 else 1.0
263
+ dy = float(y_coords[1] - y_coords[0]) if len(y_coords) > 1 else 1.0
264
+ x0 = float(x_coords[0]) if len(x_coords) > 0 else 0.0
265
+ y0 = float(y_coords[0]) if len(y_coords) > 0 else 0.0
266
+ image.set_uniform_coords(dx, dy, x0, y0)
267
+ else:
268
+ # Set non-uniform coordinates
269
+ image.set_coords(x_coords.astype(float), y_coords.astype(float))
270
+ else:
271
+ # Non-regular grid - cannot create proper 2D array from this data
272
+ raise ValueError(
273
+ f"File {filename} contains {actual_points} data points "
274
+ f"but expected {expected_points} for a regular grid "
275
+ f"({len(x_coords)}×{len(y_coords)}). "
276
+ "Coordinated text files must contain data on a complete "
277
+ "rectangular grid."
278
+ )
279
+
280
+ images_list = [image]
281
+ except ValueError as exc:
282
+ raise ValueError(f"File {filename} wrong format.\n{exc}") from exc
283
+
284
+ if "Z Error" in df.columns:
285
+ # For error data, use the same coordinate structure as the main image
286
+ error_data = df.pivot(index="Y", columns="X", values="Z Error").values
287
+
288
+ image_error = create_image(
289
+ name + " error",
290
+ metadata={"source": filename},
291
+ data=error_data,
292
+ units=(
293
+ file_metadata["X"][1],
294
+ file_metadata["Y"][1],
295
+ file_metadata.get(
296
+ "Z Error",
297
+ file_metadata.get(
298
+ "Zre Error",
299
+ file_metadata.get("Z", file_metadata.get("Zre", ("", ""))),
300
+ ),
301
+ )[1],
302
+ ),
303
+ labels=(
304
+ file_metadata["X"][0],
305
+ file_metadata["Y"][0],
306
+ file_metadata.get("Z", file_metadata.get("Zre", ("", "")))[0]
307
+ + " error",
308
+ ),
309
+ )
310
+
311
+ # Apply the same coordinate system as the main image
312
+ if image.is_uniform_coords:
313
+ image_error.set_uniform_coords(image.dx, image.dy, image.x0, image.y0)
314
+ else:
315
+ image_error.set_coords(image.xcoords.copy(), image.ycoords.copy())
316
+
317
+ images_list.append(image_error)
318
+
319
+ return images_list
320
+
321
+ @staticmethod
322
+ def read_metadata(filename: str) -> dict[str, tuple | None]:
323
+ """Read metadata from file
324
+
325
+ Args:
326
+ filename: File name
327
+
328
+ Returns:
329
+ Metadata dictionary structured as {key: (value, unit)}
330
+ Available keys can be are:
331
+ - nx (value is int)
332
+ - ny (value is int)
333
+ - X (value represents axis label)
334
+ - Y (value represents axis label)
335
+ - Z (value represents axis label)
336
+ - Zre (value represents axis label)
337
+ - Zim (value represents axis label)
338
+ - Z Error (value is none)
339
+ - Zre Error (value is none)
340
+ - Zim Error (value is none)
341
+ """
342
+ metadata = {}
343
+
344
+ try:
345
+ with open(filename, encoding="utf-8") as f:
346
+ for line in f:
347
+ line = line.strip()
348
+ if not line.startswith("#"):
349
+ break
350
+
351
+ # Remove leading '#' and strip whitespace
352
+ content = line[1:].strip()
353
+
354
+ # Parse specific patterns
355
+ parsed = CoordinatedTextFileReader._parse_metadata_line(content)
356
+ if parsed:
357
+ key, value_unit = parsed
358
+ metadata[key] = value_unit
359
+
360
+ except (ValueError, IOError) as exc:
361
+ raise ValueError(f"Could not read metadata from file {filename}") from exc
362
+
363
+ return metadata
364
+
365
+ @staticmethod
366
+ def _parse_metadata_line(line: str) -> tuple[str, tuple] | None:
367
+ """Parse a single metadata line into key-value-unit tuple.
368
+
369
+ Args:
370
+ line: Line to parse (without # prefix)
371
+
372
+ Returns:
373
+ Tuple of (key, (value, unit)) or None if not parseable
374
+ """
375
+ # Handle special patterns first
376
+ if match := re.match(r"Created by (.*)", line):
377
+ return "author", (match.group(1).strip(), None)
378
+
379
+ if match := re.match(
380
+ r"Created on (\d{4}-\d{2}-\d{2}) (\d{2}:\d{2}:\d{2}\.\d+)", line
381
+ ):
382
+ date_str, _time_str = match.groups()
383
+ return "creation_date", (date_str, None)
384
+ # Note: creation_time is lost in this simplified version
385
+
386
+ if match := re.match(r"Using matrislib ([\d\.a-zA-Z-]+)", line):
387
+ return "software_version", (f"matrislib {match.group(1)}", None)
388
+
389
+ # Handle error columns without colons
390
+ if line.startswith(("Z Error", "Zre Error", "Zim Error")):
391
+ if ":" not in line:
392
+ line = line.replace("Error", "Error :", 1)
393
+
394
+ # Must contain colon for key-value pairs
395
+ if ":" not in line:
396
+ return None
397
+
398
+ # Remove Real(...) or Imaginary(...) wrappers
399
+ line = re.sub(r"(?:Real|Imaginary)\(([^\)]*)\)", r"\1", line)
400
+
401
+ # Split on first colon
402
+ key, rest = line.split(":", 1)
403
+ key = key.strip()
404
+ rest = rest.strip()
405
+
406
+ # Parse value and unit
407
+ value, unit = CoordinatedTextFileReader._parse_value_and_unit(rest)
408
+
409
+ return key, (value, unit)
410
+
411
+ @staticmethod
412
+ def _parse_value_and_unit(
413
+ text: str,
414
+ ) -> tuple[int | float | bool | str | None, str | None]:
415
+ """Parse value and unit from text like 'value (unit)' or just 'value'.
416
+
417
+ Intelligently converts values to appropriate types:
418
+ - Booleans: "true"/"false" (case-insensitive) → bool
419
+ - Integers: "123", "-456" → int
420
+ - Floats: "1.23", "-4.56", "1.2e-3" → float
421
+ - None: empty string → None
422
+ - Strings: everything else → str
423
+
424
+ Args:
425
+ text: Text to parse
426
+
427
+ Returns:
428
+ Tuple of (value, unit) where value can be int, float, bool, str, or None
429
+ """
430
+ text = text.strip()
431
+
432
+ # Extract unit in parentheses if present
433
+ unit = None
434
+ if text.endswith(")"):
435
+ if "(" in text:
436
+ parts = text.rsplit("(", 1)
437
+ text = parts[0].strip()
438
+ unit = parts[1].rstrip(")").strip()
439
+ if not unit:
440
+ unit = None
441
+
442
+ # Parse value with intelligent type detection
443
+ if not text:
444
+ value = None
445
+ elif text.lower() in ("true", "false"):
446
+ # Boolean values
447
+ value = text.lower() == "true"
448
+ else:
449
+ # Try to parse as number
450
+ try:
451
+ # Check if it looks like an integer (no decimal point or exponent)
452
+ if "." not in text and "e" not in text.lower():
453
+ value = int(text)
454
+ else:
455
+ # Parse as float
456
+ value = float(text)
457
+ except ValueError:
458
+ # Not a number, keep as string
459
+ value = text
460
+
461
+ return value, unit
462
+
463
+ @staticmethod
464
+ def verify_metadata(filename: str, metadata: dict[str, tuple | None]) -> None:
465
+ """Verify metadata keys consistency.
466
+
467
+ Perform a set of sanity checks on the parsed metadata and raise an
468
+ appropriate exception on failure.
469
+
470
+ Args:
471
+ filename: Parsed filename used for error messages.
472
+ metadata: Metadata dictionary parsed from file header.
473
+
474
+ Raises:
475
+ NotCoordinatedTextFileError: When file is not a valid format.
476
+ ValueError: When required fields are missing or inconsistent.
477
+ """
478
+ # Check if this is a coordinated text file by looking for key indicators
479
+ has_format_indicators = "software_version" in metadata or (
480
+ "creation_date" in metadata
481
+ and any(col in metadata for col in ["X", "Y", "Z", "Zre", "Zim"])
482
+ )
483
+
484
+ if not has_format_indicators:
485
+ raise NotCoordinatedTextFileError(
486
+ f"File {filename} does not appear to be a coordinated text format file "
487
+ "(missing expected metadata structure)"
488
+ )
489
+
490
+ columns_header = [k for k in metadata.keys() if k not in ("nx", "ny")]
491
+
492
+ # Required columns check
493
+ if "X" not in columns_header or "Y" not in columns_header:
494
+ raise ValueError(
495
+ f"File {filename}: Missing required X, Y columns in header"
496
+ )
497
+
498
+ # Z column validation
499
+ has_z = "Z" in columns_header
500
+ has_complex = "Zre" in columns_header or "Zim" in columns_header
501
+
502
+ if not (has_z or has_complex):
503
+ raise ValueError(
504
+ f"File {filename}: Must contain either Z column or Zre/Zim columns"
505
+ )
506
+
507
+ if has_z and has_complex:
508
+ raise ValueError(
509
+ f"File {filename}: Cannot contain both Z and Zre/Zim columns"
510
+ )
511
+
512
+ # Complex Z validation
513
+ if has_complex:
514
+ if ("Zre" in columns_header) ^ ("Zim" in columns_header):
515
+ raise ValueError(
516
+ f"File {filename}: Both Zre and Zim columns "
517
+ f"must be present together"
518
+ )
519
+
520
+ # Error column validation
521
+ has_z_error = "Z Error" in columns_header
522
+ has_complex_error = (
523
+ "Zre Error" in columns_header or "Zim Error" in columns_header
524
+ )
525
+
526
+ if has_z_error and has_complex_error:
527
+ raise ValueError(
528
+ f"File {filename}: Cannot contain both Z Error and "
529
+ f"Zre Error/Zim Error columns"
530
+ )
531
+
532
+ if has_complex_error:
533
+ if ("Zre Error" in columns_header) ^ ("Zim Error" in columns_header):
534
+ raise ValueError(
535
+ f"File {filename}: Both Zre Error and Zim Error columns "
536
+ f"must be present together"
537
+ )
538
+
539
+ @staticmethod
540
+ def _try_df_reading(filename: str, columns_header: list[str]) -> pd.DataFrame:
541
+ """Try to read the data file with various parsing options.
542
+
543
+ Args:
544
+ filename: File name
545
+ columns_header: List of column headers to use when reading the data.
546
+
547
+ Returns:
548
+ DataFrame containing the image data.
549
+
550
+ Raises:
551
+ ValueError: If the file cannot be read with any of the tried options.
552
+ """
553
+ # Define parsing configurations to try in order of preference
554
+ parsing_configs = [
555
+ (encoding, decimal, delimiter)
556
+ for encoding in FileEncoding
557
+ for decimal in (".", ",")
558
+ for delimiter in (r"\s+", ",", ";")
559
+ ]
560
+
561
+ last_error = None
562
+ for encoding, decimal, delimiter in parsing_configs:
563
+ try:
564
+ df = pd.read_csv(
565
+ filename,
566
+ decimal=decimal,
567
+ comment="#",
568
+ delimiter=delimiter,
569
+ encoding=encoding,
570
+ names=columns_header,
571
+ )
572
+ # Drop entirely empty columns introduced by trailing delimiters
573
+ df = df.dropna(axis=1, how="all")
574
+ return df
575
+
576
+ except (ValueError, UnicodeDecodeError) as exc:
577
+ last_error = exc
578
+ continue
579
+
580
+ # If we get here, all parsing attempts failed
581
+ raise ValueError(
582
+ f"Could not read image data from file {filename}. Last error: {last_error}"
583
+ ) from last_error
584
+
585
+ @staticmethod
586
+ def read_data(filename: str, columns_header: list[str]) -> pd.DataFrame:
587
+ """Read data and return it.
588
+
589
+ Args:
590
+ filename: File name
591
+
592
+ Returns:
593
+ Image array data
594
+ """
595
+ # Try several parsing variants (encoding, decimal and delimiter).
596
+ df: pd.DataFrame | None = None
597
+
598
+ df = CoordinatedTextFileReader._try_df_reading(filename, columns_header)
599
+
600
+ # if Z is present, the image is Real
601
+
602
+ if "Zre" in df.columns:
603
+ df["Z"] = df["Zre"] + 1j * df["Zim"]
604
+ df = df.drop(columns=["Zre", "Zim"])
605
+ if "Zre Error" in df.columns:
606
+ df["Z Error"] = df["Zre Error"] + 1j * df["Zim Error"]
607
+ df = df.drop(columns=["Zre Error", "Zim Error"])
608
+
609
+ return df
610
+
611
+
612
+ class CoordinatedTextFileWriter:
613
+ """Utility class for writing text files with metadata and coordinates"""
614
+
615
+ @staticmethod
616
+ def write_image(filename: str, obj: ImageObj) -> None:
617
+ """Write image object to coordinated text file.
618
+
619
+ Args:
620
+ filename: File name to write to
621
+ obj: Image object to write
622
+
623
+ Raises:
624
+ ValueError: If image has invalid coordinate system
625
+ """
626
+ # Validate that we can write this image
627
+ if obj.data is None:
628
+ raise ValueError(
629
+ "Cannot write image with no data to coordinated text format"
630
+ )
631
+
632
+ # Get coordinate information
633
+ if obj.is_uniform_coords:
634
+ # Generate coordinate arrays for uniform coordinates
635
+ ny, nx = obj.data.shape
636
+ x_coords = obj.x0 + np.arange(nx) * obj.dx
637
+ y_coords = obj.y0 + np.arange(ny) * obj.dy
638
+ else:
639
+ # Use non-uniform coordinates directly
640
+ x_coords = obj.xcoords
641
+ y_coords = obj.ycoords
642
+ if x_coords is None or y_coords is None:
643
+ raise ValueError("Cannot write image with missing coordinate arrays")
644
+
645
+ # Create meshgrid for the data
646
+ X, Y = np.meshgrid(x_coords, y_coords)
647
+
648
+ # Flatten arrays for CSV output
649
+ x_flat = X.flatten()
650
+ y_flat = Y.flatten()
651
+ z_flat = obj.data.flatten()
652
+
653
+ # Write file
654
+ with open(filename, "w", encoding="utf-8") as f:
655
+ # Write metadata header
656
+ f.write(f"# Created by Sigima {sigima.__version__}\n")
657
+ timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
658
+ f.write(f"# Created on {timestamp}\n")
659
+ f.write(f"# nx: {obj.data.shape[1]}\n")
660
+ f.write(f"# ny: {obj.data.shape[0]}\n")
661
+
662
+ # Write axis information
663
+ f.write(f"# X: {obj.xlabel}")
664
+ if obj.xunit:
665
+ f.write(f" ({obj.xunit})")
666
+ f.write("\n")
667
+
668
+ f.write(f"# Y: {obj.ylabel}")
669
+ if obj.yunit:
670
+ f.write(f" ({obj.yunit})")
671
+ f.write("\n")
672
+
673
+ f.write(f"# Z: {obj.zlabel}")
674
+ if obj.zunit:
675
+ f.write(f" ({obj.zunit})")
676
+ f.write("\n")
677
+
678
+ # Write additional metadata if present
679
+ if obj.metadata:
680
+ for key, value in obj.metadata.items():
681
+ if key not in ("source",): # Skip internal metadata
682
+ f.write(f"# {key}: {value}\n")
683
+
684
+ # Write data columns
685
+ for x, y, z in zip(x_flat, y_flat, z_flat):
686
+ f.write(f"{x}\t{y}\t{z}\n")
687
+
688
+
689
+ class TextImageFormat(SingleImageFormatBase):
690
+ """Object representing text image file type"""
691
+
692
+ FORMAT_INFO = FormatInfo(
693
+ name=_("Text files"),
694
+ extensions="*.txt *.csv *.asc",
695
+ readable=True,
696
+ writeable=True,
697
+ )
698
+
699
+ def read(
700
+ self, filename: str, worker: CallbackWorkerProtocol | None = None
701
+ ) -> list[ImageObj]:
702
+ """Read list of image objects from file
703
+
704
+ Args:
705
+ filename: File name
706
+ worker: Callback worker object
707
+
708
+ Returns:
709
+ List of image objects
710
+ """
711
+ # Try to read as coordinated text format first
712
+ # (for .txt/.csv files with metadata and coordinates)
713
+ if filename.lower().endswith((".txt", ".csv")):
714
+ try:
715
+ return CoordinatedTextFileReader.read_images(filename)
716
+ except NotCoordinatedTextFileError:
717
+ # Not a coordinated text file, continue with regular text processing
718
+ pass
719
+
720
+ # Read as generic text file
721
+ obj = self.create_object(filename)
722
+ obj.data = self.read_data(filename)
723
+ unique_values = np.unique(obj.data)
724
+ if len(unique_values) == 2:
725
+ # Binary image: set LUT range to unique values
726
+ obj.zscalemin, obj.zscalemax = unique_values.tolist()
727
+ return [obj]
728
+
729
+ @staticmethod
730
+ def read_data(filename: str) -> np.ndarray:
731
+ """Read data and return it
732
+
733
+ Args:
734
+ filename: File name
735
+
736
+ Returns:
737
+ Image array data
738
+ """
739
+ for encoding in FileEncoding:
740
+ for decimal in (".", ","):
741
+ for delimiter in (",", ";", r"\s+"):
742
+ try:
743
+ df = pd.read_csv(
744
+ filename,
745
+ decimal=decimal,
746
+ delimiter=delimiter,
747
+ encoding=encoding,
748
+ header=None,
749
+ )
750
+ # Handle the extra column created with trailing delimiters.
751
+ df = df.dropna(axis=1, how="all")
752
+ data = df.to_numpy()
753
+ return convert_array_to_valid_dtype(data, ImageObj.VALID_DTYPES)
754
+ except ValueError:
755
+ continue
756
+ raise ValueError(f"Could not read image data from file {filename}.")
757
+
758
+ @staticmethod
759
+ def write_data(filename: str, data: np.ndarray) -> None:
760
+ """Write data to file.
761
+
762
+ Args:
763
+ filename: File name.
764
+ data: Image array data.
765
+ """
766
+ if np.issubdtype(data.dtype, np.integer):
767
+ fmt = "%d"
768
+ elif np.issubdtype(data.dtype, np.floating) or np.issubdtype(
769
+ data.dtype, np.complexfloating
770
+ ):
771
+ fmt = "%.18e"
772
+ else:
773
+ raise NotImplementedError(
774
+ f"Writing data of type {data.dtype} to text file is not supported."
775
+ )
776
+ ext = osp.splitext(filename)[1]
777
+ if ext.lower() in (".txt", ".asc", ""):
778
+ np.savetxt(filename, data, fmt=fmt)
779
+ elif ext.lower() == ".csv":
780
+ np.savetxt(filename, data, fmt=fmt, delimiter=",")
781
+ else:
782
+ raise ValueError(f"Unknown text file extension {ext}")
783
+
784
+ def write(self, filename: str, obj: ImageObj) -> None:
785
+ """Write data to file
786
+
787
+ Args:
788
+ filename: file name
789
+ obj: image object
790
+ """
791
+ if not isinstance(obj, ImageObj):
792
+ raise ValueError("Object is not an image")
793
+
794
+ # Check if object has non-uniform coordinates and filename is TXT or CSV
795
+ # If so, use coordinated text format
796
+ ext = osp.splitext(filename)[1].lower()
797
+ if ext in (".txt", ".csv") and not obj.is_uniform_coords:
798
+ try:
799
+ CoordinatedTextFileWriter.write_image(filename, obj)
800
+ return
801
+ except Exception: # pylint: disable=broad-except
802
+ # Fall back to regular text format if writing fails
803
+ pass
804
+
805
+ # Use default text format
806
+ super().write(filename, obj)
807
+
808
+
809
+ class MatImageFormat(SingleImageFormatBase):
810
+ """Object representing MAT-File image file type"""
811
+
812
+ FORMAT_INFO = FormatInfo(
813
+ name=_("MAT-Files"),
814
+ extensions="*.mat",
815
+ readable=True,
816
+ writeable=True,
817
+ ) # pylint: disable=duplicate-code
818
+
819
+ def read(
820
+ self, filename: str, worker: CallbackWorkerProtocol | None = None
821
+ ) -> list[ImageObj]:
822
+ """Read list of image objects from file
823
+
824
+ Args:
825
+ filename: File name
826
+ worker: Callback worker object
827
+
828
+ Returns:
829
+ List of image objects
830
+ """
831
+ mat = sio.loadmat(filename)
832
+ allimg: list[ImageObj] = []
833
+ for dname, data in mat.items():
834
+ if dname.startswith("__") or not isinstance(data, np.ndarray):
835
+ continue
836
+ if len(data.shape) != 2:
837
+ continue
838
+ obj = self.create_object(filename)
839
+ obj.data = data
840
+ if dname != "img":
841
+ obj.title += f" ({dname})"
842
+ allimg.append(obj)
843
+ return allimg
844
+
845
+ @staticmethod
846
+ def read_data(filename: str) -> np.ndarray:
847
+ """Read data and return it
848
+
849
+ Args:
850
+ filename: File name
851
+
852
+ Returns:
853
+ Image array data
854
+ """
855
+ # This method is not used, as read() is overridden
856
+
857
+ @staticmethod
858
+ def write_data(filename: str, data: np.ndarray) -> None:
859
+ """Write data to file
860
+
861
+ Args:
862
+ filename: File name
863
+ data: Image array data
864
+ """
865
+ sio.savemat(filename, {"img": data})
866
+
867
+
868
+ class DICOMImageFormat(SingleImageFormatBase):
869
+ """Object representing DICOM image file type"""
870
+
871
+ FORMAT_INFO = FormatInfo(
872
+ name="DICOM",
873
+ extensions="*.dcm *.dicom",
874
+ readable=True,
875
+ writeable=False,
876
+ requires=["pydicom"],
877
+ )
878
+
879
+ @staticmethod
880
+ def read_data(filename: str) -> np.ndarray:
881
+ """Read data and return it
882
+
883
+ Args:
884
+ filename: File name
885
+
886
+ Returns:
887
+ Image array data
888
+ """
889
+ return funcs.imread_dicom(filename)
890
+
891
+
892
+ class AndorSIFImageFormat(MultipleImagesFormatBase):
893
+ """Object representing an Andor SIF image file type"""
894
+
895
+ FORMAT_INFO = FormatInfo(
896
+ name="Andor SIF",
897
+ extensions="*.sif",
898
+ readable=True,
899
+ writeable=False,
900
+ )
901
+
902
+ @staticmethod
903
+ def read_data(filename: str) -> np.ndarray:
904
+ """Read data and return it
905
+
906
+ Args:
907
+ filename: File name
908
+
909
+ Returns:
910
+ Image array data
911
+ """
912
+ return funcs.imread_sif(filename)
913
+
914
+
915
+ # Generate classes based on the information above:
916
+ def generate_imageio_format_classes(
917
+ imageio_formats: list[list[str, str]]
918
+ | list[tuple[str, str]]
919
+ | tuple[tuple[str, str]]
920
+ | tuple[list[str, str]]
921
+ | None = None,
922
+ ) -> None:
923
+ """Generate classes based on the information above"""
924
+ if imageio_formats is None:
925
+ imageio_formats = options.imageio_formats.get()
926
+
927
+ for extensions, name in imageio_formats:
928
+ class_dict = {
929
+ "FORMAT_INFO": FormatInfo(
930
+ name=name, extensions=extensions, readable=True, writeable=False
931
+ ),
932
+ "read_data": staticmethod(
933
+ lambda filename: iio.imread(filename, index=None)
934
+ ),
935
+ }
936
+ class_name = extensions.split()[0].split(".")[1].upper() + "ImageFormat"
937
+ globals()[class_name] = type(
938
+ class_name, (MultipleImagesFormatBase,), class_dict
939
+ )
940
+
941
+
942
+ generate_imageio_format_classes()
943
+
944
+
945
+ class SpiriconImageFormat(SingleImageFormatBase):
946
+ """Object representing Spiricon image file type"""
947
+
948
+ FORMAT_INFO = FormatInfo(
949
+ name="Spiricon",
950
+ extensions="*.scor-data",
951
+ readable=True,
952
+ writeable=False,
953
+ )
954
+
955
+ @staticmethod
956
+ def read_data(filename: str) -> np.ndarray:
957
+ """Read data and return it
958
+
959
+ Args:
960
+ filename: File name
961
+
962
+ Returns:
963
+ Image array data
964
+ """
965
+ return funcs.imread_scor(filename)
966
+
967
+
968
+ class XYZImageFormat(SingleImageFormatBase):
969
+ """Object representing Dürr NDT XYZ image file type"""
970
+
971
+ FORMAT_INFO = FormatInfo(
972
+ name="Dürr NDT",
973
+ extensions="*.xyz",
974
+ readable=True,
975
+ writeable=False,
976
+ )
977
+
978
+ @staticmethod
979
+ def read_data(filename: str) -> np.ndarray:
980
+ """Read data and return it
981
+
982
+ Args:
983
+ filename: File name
984
+
985
+ Returns:
986
+ Image array data
987
+ """
988
+ with open(filename, "rb") as fdesc:
989
+ cols = int(np.fromfile(fdesc, dtype=np.uint16, count=1)[0])
990
+ rows = int(np.fromfile(fdesc, dtype=np.uint16, count=1)[0])
991
+ arr = np.fromfile(fdesc, dtype=np.uint16, count=cols * rows)
992
+ arr = arr.reshape((rows, cols))
993
+ return np.fliplr(arr)
994
+
995
+
996
+ class FTLabImageFormat(SingleImageFormatBase):
997
+ """FT-Lab image file."""
998
+
999
+ FORMAT_INFO = FormatInfo(
1000
+ name="FT-Lab",
1001
+ extensions="*.ima",
1002
+ readable=True,
1003
+ writeable=False,
1004
+ )
1005
+
1006
+ @staticmethod
1007
+ def read_data(filename: str) -> np.ndarray:
1008
+ """Read and return data.
1009
+
1010
+ Args:
1011
+ filename: Path to FT-Lab file.
1012
+
1013
+ Returns:
1014
+ Image data.
1015
+ """
1016
+ return ftlab.imread_ftlabima(filename)