napari-tmidas 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
napari_tmidas/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.1.3'
21
- __version_tuple__ = version_tuple = (0, 1, 3)
20
+ __version__ = version = '0.1.4'
21
+ __version_tuple__ = version_tuple = (0, 1, 4)
napari_tmidas/napari.yaml CHANGED
@@ -24,6 +24,9 @@ contributions:
24
24
  - id: napari-tmidas.file_selector
25
25
  python_name: napari_tmidas._file_selector:napari_experimental_provide_dock_widget
26
26
  title: File selector
27
+ - id: napari-tmidas._file_conversion
28
+ python_name: napari_tmidas._file_conversion:napari_experimental_provide_dock_widget
29
+ title: File converter
27
30
  readers:
28
31
  - command: napari-tmidas.get_reader
29
32
  accepts_directories: false
@@ -44,3 +47,5 @@ contributions:
44
47
  display_name: File selector
45
48
  - command: napari-tmidas._label_inspection
46
49
  display_name: Label inspector
50
+ - command: napari-tmidas._file_conversion
51
+ display_name: File converter
@@ -8,53 +8,35 @@ from napari_tmidas._registry import BatchProcessingRegistry
8
8
 
9
9
 
10
10
  @BatchProcessingRegistry.register(
11
- name="Min-Max Normalization",
12
- suffix="_normalized",
13
- description="Normalize image values to range [0, 1] using min-max scaling",
14
- )
15
- def normalize_image(image: np.ndarray) -> np.ndarray:
16
- """
17
- Simple min-max normalization
18
- """
19
- if image.min() == image.max():
20
- return np.zeros_like(image, dtype=float)
21
- return (image - image.min()) / (image.max() - image.min())
22
-
23
-
24
- @BatchProcessingRegistry.register(
25
- name="Contrast Stretch",
26
- suffix="_contrast",
27
- description="Stretch the contrast by clipping percentiles and rescaling",
11
+ name="Gamma Correction",
12
+ suffix="_gamma",
13
+ description="Apply gamma correction to the image (>1: enhance bright regions, <1: enhance dark regions)",
28
14
  parameters={
29
- "low_percentile": {
30
- "type": float,
31
- "default": 2.0,
32
- "min": 0.0,
33
- "max": 49.0,
34
- "description": "Low percentile to clip",
35
- },
36
- "high_percentile": {
15
+ "gamma": {
37
16
  "type": float,
38
- "default": 98.0,
39
- "min": 51.0,
40
- "max": 100.0,
41
- "description": "High percentile to clip",
17
+ "default": 1.0,
18
+ "min": 0.1,
19
+ "max": 10.0,
20
+ "description": "Gamma correction factor",
42
21
  },
43
22
  },
44
23
  )
45
- def contrast_stretch(
46
- image: np.ndarray,
47
- low_percentile: float = 2.0,
48
- high_percentile: float = 98.0,
49
- ) -> np.ndarray:
24
+ def gamma_correction(image: np.ndarray, gamma: float = 1.0) -> np.ndarray:
50
25
  """
51
- Stretch contrast by clipping percentiles
26
+ Apply gamma correction to the image
52
27
  """
53
- p_low = np.percentile(image, low_percentile)
54
- p_high = np.percentile(image, high_percentile)
28
+ # Determine maximum value based on dtype
29
+ max_val = (
30
+ np.iinfo(image.dtype).max
31
+ if np.issubdtype(image.dtype, np.integer)
32
+ else 1.0
33
+ )
34
+
35
+ # Normalize image to [0, 1]
36
+ normalized = image.astype(np.float32) / max_val
37
+
38
+ # Apply gamma correction
39
+ corrected = np.power(normalized, gamma)
55
40
 
56
- # Clip and normalize
57
- image_clipped = np.clip(image, p_low, p_high)
58
- if p_high == p_low:
59
- return np.zeros_like(image, dtype=float)
60
- return (image_clipped - p_low) / (p_high - p_low)
41
+ # Scale back to original range and dtype
42
+ return (corrected * max_val).clip(0, max_val).astype(image.dtype)
@@ -7,6 +7,7 @@ import numpy as np
7
7
  try:
8
8
  import skimage.exposure
9
9
  import skimage.filters
10
+ import skimage.morphology
10
11
 
11
12
  SKIMAGE_AVAILABLE = True
12
13
  except ImportError:
@@ -19,49 +20,38 @@ from napari_tmidas._registry import BatchProcessingRegistry
19
20
 
20
21
  if SKIMAGE_AVAILABLE:
21
22
 
22
- @BatchProcessingRegistry.register(
23
- name="Adaptive Histogram Equalization",
24
- suffix="_clahe",
25
- description="Enhance contrast using Contrast Limited Adaptive Histogram Equalization",
26
- parameters={
27
- "kernel_size": {
28
- "type": int,
29
- "default": 8,
30
- "min": 4,
31
- "max": 64,
32
- "description": "Size of local region for histogram equalization",
33
- },
34
- "clip_limit": {
35
- "type": float,
36
- "default": 0.01,
37
- "min": 0.001,
38
- "max": 0.1,
39
- "description": "Clipping limit for contrast enhancement",
40
- },
41
- },
42
- )
43
- def adaptive_hist_eq(
44
- image: np.ndarray, kernel_size: int = 8, clip_limit: float = 0.01
45
- ) -> np.ndarray:
46
- """
47
- Apply Contrast Limited Adaptive Histogram Equalization
48
- """
49
- # CLAHE expects image in [0, 1] range
50
- img_norm = skimage.exposure.rescale_intensity(image, out_range=(0, 1))
51
- return skimage.exposure.equalize_adapthist(
52
- img_norm, kernel_size=kernel_size, clip_limit=clip_limit
53
- )
54
-
55
- @BatchProcessingRegistry.register(
56
- name="Edge Detection",
57
- suffix="_edges",
58
- description="Detect edges using Sobel filter",
59
- )
60
- def edge_detection(image: np.ndarray) -> np.ndarray:
61
- """
62
- Detect edges using Sobel filter
63
- """
64
- return skimage.filters.sobel(image)
23
+ # @BatchProcessingRegistry.register(
24
+ # name="Adaptive Histogram Equalization",
25
+ # suffix="_clahe",
26
+ # description="Enhance contrast using Contrast Limited Adaptive Histogram Equalization",
27
+ # parameters={
28
+ # "kernel_size": {
29
+ # "type": int,
30
+ # "default": 8,
31
+ # "min": 4,
32
+ # "max": 64,
33
+ # "description": "Size of local region for histogram equalization",
34
+ # },
35
+ # "clip_limit": {
36
+ # "type": float,
37
+ # "default": 0.01,
38
+ # "min": 0.001,
39
+ # "max": 0.1,
40
+ # "description": "Clipping limit for contrast enhancement",
41
+ # },
42
+ # },
43
+ # )
44
+ # def adaptive_hist_eq(
45
+ # image: np.ndarray, kernel_size: int = 8, clip_limit: float = 0.01
46
+ # ) -> np.ndarray:
47
+ # """
48
+ # Apply Contrast Limited Adaptive Histogram Equalization
49
+ # """
50
+ # # CLAHE expects image in [0, 1] range
51
+ # img_norm = skimage.exposure.rescale_intensity(image, out_range=(0, 1))
52
+ # return skimage.exposure.equalize_adapthist(
53
+ # img_norm, kernel_size=kernel_size, clip_limit=clip_limit
54
+ # )
65
55
 
66
56
  # simple otsu thresholding
67
57
  @BatchProcessingRegistry.register(
@@ -110,4 +100,31 @@ if SKIMAGE_AVAILABLE:
110
100
  """
111
101
  Threshold image using a fixed threshold
112
102
  """
103
+ # convert to 8-bit
104
+ image = skimage.img_as_ubyte(image)
113
105
  return image > threshold
106
+
107
+ # remove small objects
108
+ @BatchProcessingRegistry.register(
109
+ name="Remove Small Objects",
110
+ suffix="_rm_small",
111
+ description="Remove small objects from the binary image",
112
+ parameters={
113
+ "min_size": {
114
+ "type": int,
115
+ "default": 100,
116
+ "min": 1,
117
+ "max": 100000,
118
+ "description": "Remove labels smaller than: ",
119
+ },
120
+ },
121
+ )
122
+ def remove_small_objects(
123
+ image: np.ndarray, min_size: int = 100
124
+ ) -> np.ndarray:
125
+ """
126
+ Remove small objects from the binary image
127
+ """
128
+ return skimage.morphology.remove_small_objects(
129
+ image, min_size=min_size
130
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: napari-tmidas
3
- Version: 0.1.3
3
+ Version: 0.1.4
4
4
  Summary: Tissue Microscopy Image Data Analysis Suite
5
5
  Author: Marco Meer
6
6
  Author-email: marco.meer@pm.me
@@ -79,10 +79,13 @@ The Tissue Microscopy Image Data Analysis Suite (short: T-MIDAS), is a collectio
79
79
 
80
80
  ## Installation
81
81
 
82
- First install Napari in a virtual environment following the latest [Napari installation instructions](https://github.com/Napari/napari?tab=readme-ov-file#installation).
82
+ First install Napari in a virtual environment:
83
83
 
84
+ mamba create -y -n napari-tmidas -c conda-forge python=3.11
85
+ mamba activate napari-tmidas
86
+ python -m pip install "napari[all]"
84
87
 
85
- After you have activated the environment, you can install `napari-tmidas` via [pip]:
88
+ Now you can install `napari-tmidas` via [pip]:
86
89
 
87
90
  pip install napari-tmidas
88
91
 
@@ -90,31 +93,46 @@ To install the latest development version:
90
93
 
91
94
  pip install git+https://github.com/macromeer/napari-tmidas.git
92
95
 
96
+ ### Dependencies
97
+ For the File converter, we need some libraries to read some microscopy formats and to write ome-zarr:
98
+
99
+ pip install nd2 readlif tiffslide pylibCZIrw ome-zarr
100
+
101
+
93
102
  ## Usage
94
103
 
95
104
  You can find the installed plugin here:
96
-
105
+
97
106
  ![image](https://github.com/user-attachments/assets/504db09a-d66e-49eb-90cd-3237024d9d7a)
98
107
 
108
+
109
+ ### File converter
110
+
111
+ You might first want to batch convert microscopy image data. Currently, this plugin supports `.nd2, .lif, .ndpi, .czi`. After launching the file converter, you can scan a folder of your choice for microscopy image data. It will also detect series images that you can preview. Start by selecting an original image in the first column of the table. This allows you to preview or convert.
112
+
113
+ ![image](https://github.com/user-attachments/assets/e377ca71-2f30-447d-825e-d2feebf7061b)
114
+
115
+
116
+
99
117
  ### File inspector
100
118
 
101
119
  1. After opening `Plugins > T-MIDAS > File selector`, enter the path to the folder containing the images to be processed (currently supports TIF, later also ZARR). You can also filter for filename suffix.
102
-
120
+
103
121
  ![image](https://github.com/user-attachments/assets/41ecb689-9abe-4371-83b5-9c5eb37069f9)
104
122
 
105
123
  2. As a result, a table appears with the found images.
106
-
124
+
107
125
  ![image](https://github.com/user-attachments/assets/8360942a-be8f-49ec-bc25-385ee43bd601)
108
126
 
109
127
  3. Next, select a processing function, set parameters if applicable and `Start Batch Processing`.
110
-
128
+
111
129
  ![image](https://github.com/user-attachments/assets/05929660-6672-4f76-89da-4f17749ccfad)
112
130
 
113
- 4. You can click on the images in the table to show them in the viewer. For example first click on one of the `Original Files`, and then the corresponding `Processed File` to see an overlay.
114
-
131
+ 4. You can click on the images in the table to show them in the viewer. For example first click on one of the `Original Files`, and then the corresponding `Processed File` to see an overlay.
132
+
115
133
  ![image](https://github.com/user-attachments/assets/cfe84828-c1cc-4196-9a53-5dfb82d5bfce)
116
134
 
117
- Note that whenever you click on an `Original File` or `Processed File` in the table, it will replace the one that is currently shown in the viewer. So naturally, you'd first select the original image, and then the processed image to correctly see the image pair that you want to inspect.
135
+ Note that whenever you click on an `Original File` or `Processed File` in the table, it will replace the one that is currently shown in the viewer. So naturally, you'd first select the original image, and then the processed image to correctly see the image pair that you want to inspect.
118
136
 
119
137
  ### Label inspector
120
138
  If you have already segmented a folder full of images and now you want to maybe inspect and edit each label image, you can use the `Plugins > T-MIDAS > Label inspector`, which automatically saves your changes to the existing label image once you click the `Save Changes and Continue` button (bottom right).
@@ -122,6 +140,7 @@ If you have already segmented a folder full of images and now you want to maybe
122
140
  ![image](https://github.com/user-attachments/assets/0bf8c6ae-4212-449d-8183-e91b23ba740e)
123
141
 
124
142
 
143
+
125
144
  ## Contributing
126
145
 
127
146
  Contributions are very welcome. Tests can be run with [tox], please ensure
@@ -1,25 +1,26 @@
1
1
  napari_tmidas/__init__.py,sha256=Z9mznblUlUsRyH3d4k8SxUo4iXLMwJXURbq41QzhPpo,459
2
- napari_tmidas/_file_selector.py,sha256=YzjS-XIqLD8826n50KXAc0GfQeXOhvDs41QP3-bTtCU,17471
3
- napari_tmidas/_label_inspection.py,sha256=0icowMfyNRnaxgTIOi8KHxLXKfpfqtShIl8iVR4wJjc,4819
2
+ napari_tmidas/_file_conversion.py,sha256=jkoP3x-4iePkdGfv3_5XQnXNpHhn9dxLlRCB98yVAJs,54395
3
+ napari_tmidas/_file_selector.py,sha256=XLbqeQ4fG86gLHgmPZzrcmMTir5gpneO32KumJY8ZbM,27369
4
+ napari_tmidas/_label_inspection.py,sha256=5p0heCX1xCQVYDGHe_R2gPwbZpl6sXIqLBwqbZLJKqo,6983
4
5
  napari_tmidas/_reader.py,sha256=A9_hdDxtVkVGmbOsbqgnARCSvpEh7GGPo7ylzmbnu8o,2485
5
6
  napari_tmidas/_registry.py,sha256=Oz9HFJh41MKRLeKxRuc7x7yzc-OrmoTdRFnfngFU_XE,2007
6
7
  napari_tmidas/_sample_data.py,sha256=khuv1jemz_fCjqNwEKMFf83Ju0EN4S89IKydsUMmUxw,645
7
- napari_tmidas/_version.py,sha256=NIzzV8ZM0W-CSLuEs1weG4zPrn_-8yr1AwwI1iuS6yo,511
8
+ napari_tmidas/_version.py,sha256=hcPkC9vIGgfrKK6ft7ysLT7iOCjpFmCBmyKLmXiaZ1g,511
8
9
  napari_tmidas/_widget.py,sha256=u9uf9WILAwZg_InhFyjWInY4ej1TV1a59dR8Fe3vNF8,4794
9
10
  napari_tmidas/_writer.py,sha256=wbVfHFjjHdybSg37VR4lVmL-kdCkDZsUPDJ66AVLaFQ,1941
10
- napari_tmidas/napari.yaml,sha256=Xmui3_7pxNxOkIFRWsZWuka56d6PXLQ2rl4XvMDl2aw,1839
11
+ napari_tmidas/napari.yaml,sha256=phcxk8yQb9i6bFP94UBvLdyzgLPe3_Ka1Hdn7RVhKjo,2079
11
12
  napari_tmidas/_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
13
  napari_tmidas/_tests/test_reader.py,sha256=gN_2StATLZYUL56X27ImJTVru_qSoFiY4vtgajcx3H0,975
13
14
  napari_tmidas/_tests/test_sample_data.py,sha256=D1HU_C3hWpO3mlSW_7Z94xaYHDtxz0XUrMjQoYop9Ag,104
14
15
  napari_tmidas/_tests/test_widget.py,sha256=I_d-Cra_CTcS0QdMItg_HMphvhj0XCx81JnFyCHk9lg,2204
15
16
  napari_tmidas/_tests/test_writer.py,sha256=4_MlZM9a5So74J16_4tIOJc6pwTOw9R0-oAE_YioIx4,122
16
17
  napari_tmidas/processing_functions/__init__.py,sha256=osXY9jSgDsrwFaS6ShPHP0wGRxMuX1mHRN9EDa9l41g,1891
17
- napari_tmidas/processing_functions/basic.py,sha256=g7tQ25UIxA26n6GBYcHlkSjUbv_lD-7x_Sd-ZvWbzUY,1711
18
+ napari_tmidas/processing_functions/basic.py,sha256=m_Q1LwwmQ8Nto2eM7SbMw2o1wolbTr9ZqCnYzxEZy7I,1182
18
19
  napari_tmidas/processing_functions/scipy_filters.py,sha256=kKpDAlQQ0ZNbkt77QUWi-Bwolk6MMDvtG_bZJV3MjOo,1612
19
- napari_tmidas/processing_functions/skimage_filters.py,sha256=RpBywSImAQc_L_0pysA3yAJlHHfZuqu6c3vokDv5p1I,3517
20
- napari_tmidas-0.1.3.dist-info/LICENSE,sha256=tSjiOqj57exmEIfP2YVPCEeQf0cH49S6HheQR8IiY3g,1485
21
- napari_tmidas-0.1.3.dist-info/METADATA,sha256=bXMq78n-Y-vWxw1W3zC9PHkG2-4KvIqxoUDXSNV72Yk,8222
22
- napari_tmidas-0.1.3.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
23
- napari_tmidas-0.1.3.dist-info/entry_points.txt,sha256=fbVjzbJTm4aDMIBtel1Lyqvq-CwXY7wmCOo_zJ-jtRY,60
24
- napari_tmidas-0.1.3.dist-info/top_level.txt,sha256=63ybdxCZ4SeT13f_Ou4TsivGV_2Gtm_pJOXToAt30_E,14
25
- napari_tmidas-0.1.3.dist-info/RECORD,,
20
+ napari_tmidas/processing_functions/skimage_filters.py,sha256=IsfMJTtd9Vwb47UlTEcLlXYv2CX6uTARV1CZCHY0HBw,4094
21
+ napari_tmidas-0.1.4.dist-info/LICENSE,sha256=tSjiOqj57exmEIfP2YVPCEeQf0cH49S6HheQR8IiY3g,1485
22
+ napari_tmidas-0.1.4.dist-info/METADATA,sha256=tL1EIRM1NIDAxry5rPObWm2F2kl0j-eGyGuPiZCXyZE,8863
23
+ napari_tmidas-0.1.4.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
24
+ napari_tmidas-0.1.4.dist-info/entry_points.txt,sha256=fbVjzbJTm4aDMIBtel1Lyqvq-CwXY7wmCOo_zJ-jtRY,60
25
+ napari_tmidas-0.1.4.dist-info/top_level.txt,sha256=63ybdxCZ4SeT13f_Ou4TsivGV_2Gtm_pJOXToAt30_E,14
26
+ napari_tmidas-0.1.4.dist-info/RECORD,,