anemoi-datasets 0.5.16__py3-none-any.whl → 0.5.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (155) hide show
  1. anemoi/datasets/__init__.py +4 -1
  2. anemoi/datasets/__main__.py +12 -2
  3. anemoi/datasets/_version.py +9 -4
  4. anemoi/datasets/commands/cleanup.py +17 -2
  5. anemoi/datasets/commands/compare.py +18 -2
  6. anemoi/datasets/commands/copy.py +196 -14
  7. anemoi/datasets/commands/create.py +50 -7
  8. anemoi/datasets/commands/finalise-additions.py +17 -2
  9. anemoi/datasets/commands/finalise.py +17 -2
  10. anemoi/datasets/commands/init-additions.py +17 -2
  11. anemoi/datasets/commands/init.py +16 -2
  12. anemoi/datasets/commands/inspect.py +283 -62
  13. anemoi/datasets/commands/load-additions.py +16 -2
  14. anemoi/datasets/commands/load.py +16 -2
  15. anemoi/datasets/commands/patch.py +17 -2
  16. anemoi/datasets/commands/publish.py +17 -2
  17. anemoi/datasets/commands/scan.py +31 -3
  18. anemoi/datasets/compute/recentre.py +47 -11
  19. anemoi/datasets/create/__init__.py +612 -85
  20. anemoi/datasets/create/check.py +142 -20
  21. anemoi/datasets/create/chunks.py +64 -4
  22. anemoi/datasets/create/config.py +185 -21
  23. anemoi/datasets/create/filter.py +50 -0
  24. anemoi/datasets/create/filters/__init__.py +33 -0
  25. anemoi/datasets/create/filters/empty.py +37 -0
  26. anemoi/datasets/create/filters/legacy.py +93 -0
  27. anemoi/datasets/create/filters/noop.py +37 -0
  28. anemoi/datasets/create/filters/orog_to_z.py +58 -0
  29. anemoi/datasets/create/{functions/filters → filters}/pressure_level_relative_humidity_to_specific_humidity.py +33 -10
  30. anemoi/datasets/create/{functions/filters → filters}/pressure_level_specific_humidity_to_relative_humidity.py +32 -8
  31. anemoi/datasets/create/filters/rename.py +205 -0
  32. anemoi/datasets/create/{functions/filters → filters}/rotate_winds.py +43 -28
  33. anemoi/datasets/create/{functions/filters → filters}/single_level_dewpoint_to_relative_humidity.py +32 -9
  34. anemoi/datasets/create/{functions/filters → filters}/single_level_relative_humidity_to_dewpoint.py +33 -9
  35. anemoi/datasets/create/{functions/filters → filters}/single_level_relative_humidity_to_specific_humidity.py +55 -7
  36. anemoi/datasets/create/{functions/filters → filters}/single_level_specific_humidity_to_relative_humidity.py +98 -37
  37. anemoi/datasets/create/filters/speeddir_to_uv.py +95 -0
  38. anemoi/datasets/create/{functions/filters → filters}/sum.py +24 -27
  39. anemoi/datasets/create/filters/transform.py +53 -0
  40. anemoi/datasets/create/{functions/filters → filters}/unrotate_winds.py +27 -18
  41. anemoi/datasets/create/filters/uv_to_speeddir.py +94 -0
  42. anemoi/datasets/create/{functions/filters → filters}/wz_to_w.py +51 -33
  43. anemoi/datasets/create/input/__init__.py +76 -5
  44. anemoi/datasets/create/input/action.py +149 -13
  45. anemoi/datasets/create/input/concat.py +81 -10
  46. anemoi/datasets/create/input/context.py +39 -4
  47. anemoi/datasets/create/input/data_sources.py +72 -6
  48. anemoi/datasets/create/input/empty.py +21 -3
  49. anemoi/datasets/create/input/filter.py +60 -12
  50. anemoi/datasets/create/input/function.py +154 -37
  51. anemoi/datasets/create/input/join.py +86 -14
  52. anemoi/datasets/create/input/misc.py +67 -17
  53. anemoi/datasets/create/input/pipe.py +33 -6
  54. anemoi/datasets/create/input/repeated_dates.py +189 -41
  55. anemoi/datasets/create/input/result.py +202 -87
  56. anemoi/datasets/create/input/step.py +119 -22
  57. anemoi/datasets/create/input/template.py +100 -13
  58. anemoi/datasets/create/input/trace.py +62 -7
  59. anemoi/datasets/create/patch.py +52 -4
  60. anemoi/datasets/create/persistent.py +134 -17
  61. anemoi/datasets/create/size.py +15 -1
  62. anemoi/datasets/create/source.py +51 -0
  63. anemoi/datasets/create/sources/__init__.py +36 -0
  64. anemoi/datasets/create/{functions/sources → sources}/accumulations.py +296 -30
  65. anemoi/datasets/create/{functions/sources → sources}/constants.py +27 -2
  66. anemoi/datasets/create/{functions/sources → sources}/eccc_fstd.py +7 -3
  67. anemoi/datasets/create/sources/empty.py +37 -0
  68. anemoi/datasets/create/{functions/sources → sources}/forcings.py +25 -1
  69. anemoi/datasets/create/sources/grib.py +297 -0
  70. anemoi/datasets/create/{functions/sources → sources}/hindcasts.py +38 -4
  71. anemoi/datasets/create/sources/legacy.py +93 -0
  72. anemoi/datasets/create/{functions/sources → sources}/mars.py +168 -20
  73. anemoi/datasets/create/sources/netcdf.py +42 -0
  74. anemoi/datasets/create/sources/opendap.py +43 -0
  75. anemoi/datasets/create/{functions/sources/__init__.py → sources/patterns.py} +35 -4
  76. anemoi/datasets/create/sources/recentre.py +150 -0
  77. anemoi/datasets/create/{functions/sources → sources}/source.py +27 -5
  78. anemoi/datasets/create/{functions/sources → sources}/tendencies.py +64 -7
  79. anemoi/datasets/create/sources/xarray.py +92 -0
  80. anemoi/datasets/create/sources/xarray_kerchunk.py +36 -0
  81. anemoi/datasets/create/sources/xarray_support/README.md +1 -0
  82. anemoi/datasets/create/{functions/sources/xarray → sources/xarray_support}/__init__.py +109 -8
  83. anemoi/datasets/create/sources/xarray_support/coordinates.py +442 -0
  84. anemoi/datasets/create/{functions/sources/xarray → sources/xarray_support}/field.py +94 -16
  85. anemoi/datasets/create/{functions/sources/xarray → sources/xarray_support}/fieldlist.py +90 -25
  86. anemoi/datasets/create/sources/xarray_support/flavour.py +1036 -0
  87. anemoi/datasets/create/{functions/sources/xarray → sources/xarray_support}/grid.py +92 -31
  88. anemoi/datasets/create/sources/xarray_support/metadata.py +395 -0
  89. anemoi/datasets/create/sources/xarray_support/patch.py +91 -0
  90. anemoi/datasets/create/sources/xarray_support/time.py +391 -0
  91. anemoi/datasets/create/sources/xarray_support/variable.py +331 -0
  92. anemoi/datasets/create/sources/xarray_zarr.py +41 -0
  93. anemoi/datasets/create/{functions/sources → sources}/zenodo.py +34 -5
  94. anemoi/datasets/create/statistics/__init__.py +233 -44
  95. anemoi/datasets/create/statistics/summary.py +52 -6
  96. anemoi/datasets/create/testing.py +76 -0
  97. anemoi/datasets/create/{functions/filters/noop.py → typing.py} +6 -3
  98. anemoi/datasets/create/utils.py +97 -6
  99. anemoi/datasets/create/writer.py +26 -4
  100. anemoi/datasets/create/zarr.py +170 -23
  101. anemoi/datasets/data/__init__.py +51 -4
  102. anemoi/datasets/data/complement.py +191 -40
  103. anemoi/datasets/data/concat.py +141 -16
  104. anemoi/datasets/data/dataset.py +558 -62
  105. anemoi/datasets/data/debug.py +197 -26
  106. anemoi/datasets/data/ensemble.py +93 -8
  107. anemoi/datasets/data/fill_missing.py +165 -18
  108. anemoi/datasets/data/forwards.py +428 -56
  109. anemoi/datasets/data/grids.py +323 -97
  110. anemoi/datasets/data/indexing.py +112 -19
  111. anemoi/datasets/data/interpolate.py +92 -12
  112. anemoi/datasets/data/join.py +158 -19
  113. anemoi/datasets/data/masked.py +129 -15
  114. anemoi/datasets/data/merge.py +137 -23
  115. anemoi/datasets/data/misc.py +172 -16
  116. anemoi/datasets/data/missing.py +233 -29
  117. anemoi/datasets/data/rescale.py +111 -10
  118. anemoi/datasets/data/select.py +168 -26
  119. anemoi/datasets/data/statistics.py +67 -6
  120. anemoi/datasets/data/stores.py +149 -64
  121. anemoi/datasets/data/subset.py +159 -25
  122. anemoi/datasets/data/unchecked.py +168 -57
  123. anemoi/datasets/data/xy.py +168 -25
  124. anemoi/datasets/dates/__init__.py +191 -16
  125. anemoi/datasets/dates/groups.py +189 -47
  126. anemoi/datasets/grids.py +270 -31
  127. anemoi/datasets/testing.py +28 -1
  128. {anemoi_datasets-0.5.16.dist-info → anemoi_datasets-0.5.18.dist-info}/METADATA +9 -6
  129. anemoi_datasets-0.5.18.dist-info/RECORD +137 -0
  130. {anemoi_datasets-0.5.16.dist-info → anemoi_datasets-0.5.18.dist-info}/WHEEL +1 -1
  131. anemoi/datasets/create/functions/__init__.py +0 -66
  132. anemoi/datasets/create/functions/filters/__init__.py +0 -9
  133. anemoi/datasets/create/functions/filters/empty.py +0 -17
  134. anemoi/datasets/create/functions/filters/orog_to_z.py +0 -58
  135. anemoi/datasets/create/functions/filters/rename.py +0 -79
  136. anemoi/datasets/create/functions/filters/speeddir_to_uv.py +0 -78
  137. anemoi/datasets/create/functions/filters/uv_to_speeddir.py +0 -56
  138. anemoi/datasets/create/functions/sources/empty.py +0 -15
  139. anemoi/datasets/create/functions/sources/grib.py +0 -150
  140. anemoi/datasets/create/functions/sources/netcdf.py +0 -15
  141. anemoi/datasets/create/functions/sources/opendap.py +0 -15
  142. anemoi/datasets/create/functions/sources/recentre.py +0 -60
  143. anemoi/datasets/create/functions/sources/xarray/coordinates.py +0 -255
  144. anemoi/datasets/create/functions/sources/xarray/flavour.py +0 -472
  145. anemoi/datasets/create/functions/sources/xarray/metadata.py +0 -148
  146. anemoi/datasets/create/functions/sources/xarray/patch.py +0 -44
  147. anemoi/datasets/create/functions/sources/xarray/time.py +0 -177
  148. anemoi/datasets/create/functions/sources/xarray/variable.py +0 -188
  149. anemoi/datasets/create/functions/sources/xarray_kerchunk.py +0 -42
  150. anemoi/datasets/create/functions/sources/xarray_zarr.py +0 -15
  151. anemoi/datasets/utils/fields.py +0 -47
  152. anemoi_datasets-0.5.16.dist-info/RECORD +0 -129
  153. {anemoi_datasets-0.5.16.dist-info → anemoi_datasets-0.5.18.dist-info}/entry_points.txt +0 -0
  154. {anemoi_datasets-0.5.16.dist-info → anemoi_datasets-0.5.18.dist-info/licenses}/LICENSE +0 -0
  155. {anemoi_datasets-0.5.16.dist-info → anemoi_datasets-0.5.18.dist-info}/top_level.txt +0 -0
@@ -10,11 +10,21 @@
10
10
 
11
11
  import logging
12
12
  from functools import cached_property
13
+ from typing import Any
14
+ from typing import Dict
15
+ from typing import List
16
+ from typing import Optional
17
+ from typing import Tuple
18
+ from typing import Union
13
19
 
14
20
  import numpy as np
21
+ from numpy.typing import NDArray
15
22
 
16
23
  from ..grids import cropping_mask
17
24
  from .dataset import Dataset
25
+ from .dataset import FullIndex
26
+ from .dataset import Shape
27
+ from .dataset import TupleIndex
18
28
  from .debug import Node
19
29
  from .debug import debug_indexing
20
30
  from .forwards import Forwards
@@ -27,7 +37,18 @@ LOG = logging.getLogger(__name__)
27
37
 
28
38
 
29
39
  class Masked(Forwards):
30
- def __init__(self, forward, mask):
40
+ """A class to represent a masked dataset."""
41
+
42
+ def __init__(self, forward: Dataset, mask: NDArray[np.bool_]) -> None:
43
+ """Initialize the Masked class.
44
+
45
+ Parameters
46
+ ----------
47
+ forward : Dataset
48
+ The dataset to be masked.
49
+ mask : NDArray[np.bool_]
50
+ The mask array.
51
+ """
31
52
  super().__init__(forward)
32
53
  assert len(forward.shape) == 4, "Grids must be 1D for now"
33
54
  self.mask = mask
@@ -36,19 +57,34 @@ class Masked(Forwards):
36
57
  self.mask_name = f"{self.__class__.__name__.lower()}_mask"
37
58
 
38
59
  @cached_property
39
- def shape(self):
60
+ def shape(self) -> Shape:
61
+ """Get the shape of the masked dataset."""
40
62
  return self.forward.shape[:-1] + (np.count_nonzero(self.mask),)
41
63
 
42
64
  @cached_property
43
- def latitudes(self):
65
+ def latitudes(self) -> NDArray[Any]:
66
+ """Get the masked latitudes."""
44
67
  return self.forward.latitudes[self.mask]
45
68
 
46
69
  @cached_property
47
- def longitudes(self):
70
+ def longitudes(self) -> NDArray[Any]:
71
+ """Get the masked longitudes."""
48
72
  return self.forward.longitudes[self.mask]
49
73
 
50
74
  @debug_indexing
51
- def __getitem__(self, index):
75
+ def __getitem__(self, index: FullIndex) -> NDArray[Any]:
76
+ """Get the masked data at the specified index.
77
+
78
+ Parameters
79
+ ----------
80
+ index : FullIndex
81
+ The index to retrieve data from.
82
+
83
+ Returns
84
+ -------
85
+ NDArray[Any]
86
+ The masked data at the specified index.
87
+ """
52
88
  if isinstance(index, tuple):
53
89
  return self._get_tuple(index)
54
90
 
@@ -60,7 +96,19 @@ class Masked(Forwards):
60
96
 
61
97
  @debug_indexing
62
98
  @expand_list_indexing
63
- def _get_tuple(self, index):
99
+ def _get_tuple(self, index: TupleIndex) -> NDArray[Any]:
100
+ """Get the masked data for a tuple index.
101
+
102
+ Parameters
103
+ ----------
104
+ index : TupleIndex
105
+ The tuple index to retrieve data from.
106
+
107
+ Returns
108
+ -------
109
+ NDArray[Any]
110
+ The masked data for the tuple index.
111
+ """
64
112
  index, changes = index_to_slices(index, self.shape)
65
113
  index, previous = update_tuple(index, self.axis, slice(None))
66
114
  result = self.forward[index]
@@ -69,14 +117,35 @@ class Masked(Forwards):
69
117
  result = apply_index_to_slices_changes(result, changes)
70
118
  return result
71
119
 
72
- def collect_supporting_arrays(self, collected, *path):
120
+ def collect_supporting_arrays(self, collected: List[Tuple], *path: Any) -> None:
121
+ """Collect supporting arrays.
122
+
123
+ Parameters
124
+ ----------
125
+ collected : List[Tuple]
126
+ The list to collect supporting arrays into.
127
+ path : Any
128
+ Additional path arguments.
129
+ """
73
130
  super().collect_supporting_arrays(collected, *path)
74
131
  collected.append((path, self.mask_name, self.mask))
75
132
 
76
133
 
77
134
  class Thinning(Masked):
78
-
79
- def __init__(self, forward, thinning, method):
135
+ """A class to represent a thinned dataset."""
136
+
137
+ def __init__(self, forward: Dataset, thinning: Optional[int], method: str) -> None:
138
+ """Initialize the Thinning class.
139
+
140
+ Parameters
141
+ ----------
142
+ forward : Dataset
143
+ The dataset to be thinned.
144
+ thinning : Optional[int]
145
+ The thinning factor.
146
+ method : str
147
+ The thinning method.
148
+ """
80
149
  self.thinning = thinning
81
150
  self.method = method
82
151
 
@@ -105,20 +174,51 @@ class Thinning(Masked):
105
174
  super().__init__(forward, mask)
106
175
 
107
176
  def mutate(self) -> Dataset:
177
+ """Mutate the dataset.
178
+
179
+ Returns
180
+ -------
181
+ Dataset
182
+ The mutated dataset.
183
+ """
108
184
  if self.thinning is None:
109
185
  return self.forward.mutate()
110
186
  return super().mutate()
111
187
 
112
- def tree(self):
188
+ def tree(self) -> Node:
189
+ """Get the tree representation of the dataset.
190
+
191
+ Returns
192
+ -------
193
+ Node
194
+ The tree representation of the dataset.
195
+ """
113
196
  return Node(self, [self.forward.tree()], thinning=self.thinning, method=self.method)
114
197
 
115
- def subclass_metadata_specific(self):
198
+ def forwards_subclass_metadata_specific(self) -> Dict[str, Any]:
199
+ """Get the metadata specific to the Thinning subclass.
200
+
201
+ Returns
202
+ -------
203
+ Dict[str, Any]
204
+ The metadata specific to the Thinning subclass.
205
+ """
116
206
  return dict(thinning=self.thinning, method=self.method)
117
207
 
118
208
 
119
209
  class Cropping(Masked):
120
-
121
- def __init__(self, forward, area):
210
+ """A class to represent a cropped dataset."""
211
+
212
+ def __init__(self, forward: Dataset, area: Union[Dataset, Tuple[float, float, float, float]]) -> None:
213
+ """Initialize the Cropping class.
214
+
215
+ Parameters
216
+ ----------
217
+ forward : Dataset
218
+ The dataset to be cropped.
219
+ area : Union[Dataset, Tuple[float, float, float, float]]
220
+ The cropping area.
221
+ """
122
222
  from ..data import open_dataset
123
223
 
124
224
  area = area if isinstance(area, (list, tuple)) else open_dataset(area)
@@ -135,8 +235,22 @@ class Cropping(Masked):
135
235
 
136
236
  super().__init__(forward, mask)
137
237
 
138
- def tree(self):
238
+ def tree(self) -> Node:
239
+ """Get the tree representation of the dataset.
240
+
241
+ Returns
242
+ -------
243
+ Node
244
+ The tree representation of the dataset.
245
+ """
139
246
  return Node(self, [self.forward.tree()], area=self.area)
140
247
 
141
- def subclass_metadata_specific(self):
248
+ def forwards_subclass_metadata_specific(self) -> Dict[str, Any]:
249
+ """Get the metadata specific to the Cropping subclass.
250
+
251
+ Returns
252
+ -------
253
+ Dict[str, Any]
254
+ The metadata specific to the Cropping subclass.
255
+ """
142
256
  return dict(area=self.area)
@@ -8,12 +8,22 @@
8
8
  # nor does it submit to any jurisdiction.
9
9
 
10
10
 
11
+ import datetime
11
12
  import logging
12
13
  from functools import cached_property
14
+ from typing import Any
15
+ from typing import Dict
16
+ from typing import List
17
+ from typing import Set
18
+ from typing import Tuple
13
19
 
14
20
  import numpy as np
21
+ from numpy.typing import NDArray
15
22
 
16
23
  from . import MissingDateError
24
+ from .dataset import Dataset
25
+ from .dataset import FullIndex
26
+ from .dataset import TupleIndex
17
27
  from .debug import Node
18
28
  from .debug import debug_indexing
19
29
  from .forwards import Combined
@@ -28,14 +38,18 @@ LOG = logging.getLogger(__name__)
28
38
 
29
39
 
30
40
  class Merge(Combined):
31
-
32
- # d0 d2 d4 d6 ...
33
- # d1 d3 d5 d7 ...
34
-
35
- # gives
36
- # d0 d1 d2 d3 ...
37
-
38
- def __init__(self, datasets, allow_gaps_in_dates=False):
41
+ """A class to merge multiple datasets along the dates axis, handling gaps in dates if allowed."""
42
+
43
+ def __init__(self, datasets: List[Dataset], allow_gaps_in_dates: bool = False) -> None:
44
+ """Initialize the Merge object.
45
+
46
+ Parameters
47
+ ----------
48
+ datasets : List[Dataset]
49
+ List of datasets to merge.
50
+ allow_gaps_in_dates : bool, optional
51
+ Whether to allow gaps in dates. Defaults to False.
52
+ """
39
53
  super().__init__(datasets)
40
54
 
41
55
  self.allow_gaps_in_dates = allow_gaps_in_dates
@@ -91,23 +105,33 @@ class Merge(Combined):
91
105
 
92
106
  self._dates = np.array(_dates, dtype="datetime64[s]")
93
107
  self._indices = np.array(indices)
94
- self._frequency = frequency # .astype(object)
108
+ self._frequency = frequency.astype(object)
95
109
 
96
- def __len__(self):
110
+ def __len__(self) -> int:
111
+ """Get the number of dates in the merged dataset.
112
+
113
+ Returns
114
+ -------
115
+ int
116
+ Number of dates.
117
+ """
97
118
  return len(self._dates)
98
119
 
99
120
  @property
100
- def dates(self):
121
+ def dates(self) -> NDArray[np.datetime64]:
122
+ """Get the dates of the merged dataset."""
101
123
  return self._dates
102
124
 
103
125
  @property
104
- def frequency(self):
126
+ def frequency(self) -> datetime.timedelta:
127
+ """Get the frequency of the dates in the merged dataset."""
105
128
  return self._frequency
106
129
 
107
130
  @cached_property
108
- def missing(self):
131
+ def missing(self) -> Set[int]:
132
+ """Get the indices of missing dates in the merged dataset."""
109
133
  # TODO: optimize
110
- result = set()
134
+ result: Set[int] = set()
111
135
 
112
136
  for i, (dataset, row) in enumerate(self._indices):
113
137
  if dataset == self._missing_index:
@@ -119,26 +143,79 @@ class Merge(Combined):
119
143
 
120
144
  return result
121
145
 
122
- def check_same_lengths(self, d1, d2):
146
+ def check_same_lengths(self, d1: Dataset, d2: Dataset) -> None:
147
+ """Check if the lengths of two datasets are the same. (Disabled for merging).
148
+
149
+ Parameters
150
+ ----------
151
+ d1 : Dataset
152
+ First dataset.
153
+ d2 : Dataset
154
+ Second dataset.
155
+ """
123
156
  # Turned off because we are concatenating along the first axis
124
157
  pass
125
158
 
126
- def check_same_dates(self, d1, d2):
159
+ def check_same_dates(self, d1: Dataset, d2: Dataset) -> None:
160
+ """Check if the dates of two datasets are the same. (Disabled for merging).
161
+
162
+ Parameters
163
+ ----------
164
+ d1 : Dataset
165
+ First dataset.
166
+ d2 : Dataset
167
+ Second dataset.
168
+ """
127
169
  # Turned off because we are concatenating along the dates axis
128
170
  pass
129
171
 
130
- def check_compatibility(self, d1, d2):
172
+ def check_compatibility(self, d1: Dataset, d2: Dataset) -> None:
173
+ """Check if two datasets are compatible for merging.
174
+
175
+ Parameters
176
+ ----------
177
+ d1 : Dataset
178
+ First dataset.
179
+ d2 : Dataset
180
+ Second dataset.
181
+ """
131
182
  super().check_compatibility(d1, d2)
132
183
  self.check_same_sub_shapes(d1, d2, drop_axis=0)
133
184
 
134
- def tree(self):
185
+ def tree(self) -> Node:
186
+ """Get the tree representation of the merged dataset.
187
+
188
+ Returns
189
+ -------
190
+ Node
191
+ Tree representation of the merged dataset.
192
+ """
135
193
  return Node(self, [d.tree() for d in self.datasets], allow_gaps_in_dates=self.allow_gaps_in_dates)
136
194
 
137
- def metadata_specific(self):
195
+ def metadata_specific(self) -> Dict[str, Any]:
196
+ """Get the specific metadata for the merged dataset.
197
+
198
+ Returns
199
+ -------
200
+ Dict[str, Any]
201
+ Specific metadata.
202
+ """
138
203
  return {"allow_gaps_in_dates": self.allow_gaps_in_dates}
139
204
 
140
205
  @debug_indexing
141
- def __getitem__(self, n):
206
+ def __getitem__(self, n: FullIndex) -> NDArray[Any]:
207
+ """Get the item at the specified index.
208
+
209
+ Parameters
210
+ ----------
211
+ n : FullIndex
212
+ Index to retrieve.
213
+
214
+ Returns
215
+ -------
216
+ NDArray[Any]
217
+ Retrieved item.
218
+ """
142
219
  if isinstance(n, tuple):
143
220
  return self._get_tuple(n)
144
221
 
@@ -154,18 +231,55 @@ class Merge(Combined):
154
231
 
155
232
  @debug_indexing
156
233
  @expand_list_indexing
157
- def _get_tuple(self, index):
234
+ def _get_tuple(self, index: TupleIndex) -> NDArray[Any]:
235
+ """Get the item at the specified tuple index.
236
+
237
+ Parameters
238
+ ----------
239
+ index : TupleIndex
240
+ Tuple index to retrieve.
241
+
242
+ Returns
243
+ -------
244
+ NDArray[Any]
245
+ Retrieved item.
246
+ """
158
247
  index, changes = index_to_slices(index, self.shape)
159
248
  index, previous = update_tuple(index, 0, slice(None))
160
249
  result = self._get_slice(previous)
161
250
  return apply_index_to_slices_changes(result[index], changes)
162
251
 
163
- def _get_slice(self, s):
252
+ def _get_slice(self, s: slice) -> NDArray[Any]:
253
+ """Get the items in the specified slice.
254
+
255
+ Parameters
256
+ ----------
257
+ s : slice
258
+ Slice to retrieve.
259
+
260
+ Returns
261
+ -------
262
+ NDArray[Any]
263
+ Retrieved items.
264
+ """
164
265
  return np.stack([self[i] for i in range(*s.indices(self._len))])
165
266
 
166
267
 
167
- def merge_factory(args, kwargs):
268
+ def merge_factory(args: Tuple, kwargs: Dict[str, Any]) -> Dataset:
269
+ """Factory function to create a merged dataset.
270
+
271
+ Parameters
272
+ ----------
273
+ args : Tuple
274
+ Positional arguments.
275
+ kwargs : Dict[str, Any]
276
+ Keyword arguments.
168
277
 
278
+ Returns
279
+ -------
280
+ Dataset
281
+ Merged dataset.
282
+ """
169
283
  datasets = kwargs.pop("merge")
170
284
 
171
285
  assert isinstance(datasets, (list, tuple))