ssb-sgis 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. sgis/__init__.py +107 -121
  2. sgis/exceptions.py +5 -3
  3. sgis/geopandas_tools/__init__.py +1 -0
  4. sgis/geopandas_tools/bounds.py +86 -47
  5. sgis/geopandas_tools/buffer_dissolve_explode.py +62 -39
  6. sgis/geopandas_tools/centerlines.py +53 -44
  7. sgis/geopandas_tools/cleaning.py +87 -104
  8. sgis/geopandas_tools/conversion.py +164 -107
  9. sgis/geopandas_tools/duplicates.py +33 -19
  10. sgis/geopandas_tools/general.py +84 -52
  11. sgis/geopandas_tools/geometry_types.py +24 -10
  12. sgis/geopandas_tools/neighbors.py +23 -11
  13. sgis/geopandas_tools/overlay.py +136 -53
  14. sgis/geopandas_tools/point_operations.py +11 -10
  15. sgis/geopandas_tools/polygon_operations.py +53 -61
  16. sgis/geopandas_tools/polygons_as_rings.py +121 -78
  17. sgis/geopandas_tools/sfilter.py +17 -17
  18. sgis/helpers.py +116 -58
  19. sgis/io/dapla_functions.py +32 -23
  20. sgis/io/opener.py +13 -6
  21. sgis/io/read_parquet.py +2 -2
  22. sgis/maps/examine.py +55 -28
  23. sgis/maps/explore.py +471 -112
  24. sgis/maps/httpserver.py +12 -12
  25. sgis/maps/legend.py +285 -134
  26. sgis/maps/map.py +248 -129
  27. sgis/maps/maps.py +123 -119
  28. sgis/maps/thematicmap.py +260 -94
  29. sgis/maps/tilesources.py +3 -8
  30. sgis/networkanalysis/_get_route.py +5 -4
  31. sgis/networkanalysis/_od_cost_matrix.py +44 -1
  32. sgis/networkanalysis/_points.py +10 -4
  33. sgis/networkanalysis/_service_area.py +5 -2
  34. sgis/networkanalysis/closing_network_holes.py +22 -64
  35. sgis/networkanalysis/cutting_lines.py +58 -46
  36. sgis/networkanalysis/directednetwork.py +16 -8
  37. sgis/networkanalysis/finding_isolated_networks.py +6 -5
  38. sgis/networkanalysis/network.py +15 -13
  39. sgis/networkanalysis/networkanalysis.py +79 -61
  40. sgis/networkanalysis/networkanalysisrules.py +21 -17
  41. sgis/networkanalysis/nodes.py +2 -3
  42. sgis/networkanalysis/traveling_salesman.py +6 -3
  43. sgis/parallel/parallel.py +372 -142
  44. sgis/raster/base.py +9 -3
  45. sgis/raster/cube.py +331 -213
  46. sgis/raster/cubebase.py +15 -29
  47. sgis/raster/image_collection.py +2560 -0
  48. sgis/raster/indices.py +17 -12
  49. sgis/raster/raster.py +356 -275
  50. sgis/raster/sentinel_config.py +104 -0
  51. sgis/raster/zonal.py +38 -14
  52. {ssb_sgis-1.0.1.dist-info → ssb_sgis-1.0.3.dist-info}/LICENSE +1 -1
  53. {ssb_sgis-1.0.1.dist-info → ssb_sgis-1.0.3.dist-info}/METADATA +87 -16
  54. ssb_sgis-1.0.3.dist-info/RECORD +61 -0
  55. {ssb_sgis-1.0.1.dist-info → ssb_sgis-1.0.3.dist-info}/WHEEL +1 -1
  56. sgis/raster/bands.py +0 -48
  57. sgis/raster/gradient.py +0 -78
  58. sgis/raster/methods_as_functions.py +0 -124
  59. sgis/raster/torchgeo.py +0 -150
  60. ssb_sgis-1.0.1.dist-info/RECORD +0 -63
sgis/raster/cube.py CHANGED
@@ -1,11 +1,16 @@
1
1
  import functools
2
2
  import itertools
3
3
  import multiprocessing
4
- import os
5
4
  import re
6
- from copy import copy, deepcopy
5
+ import warnings
6
+ from collections.abc import Callable
7
+ from collections.abc import Iterable
8
+ from collections.abc import Iterator
9
+ from copy import copy
10
+ from copy import deepcopy
7
11
  from pathlib import Path
8
- from typing import Any, Callable, Iterable, Optional, Sequence
12
+ from typing import Any
13
+ from typing import ClassVar
9
14
 
10
15
  import geopandas as gpd
11
16
  import numpy as np
@@ -13,42 +18,41 @@ import pandas as pd
13
18
  import pyproj
14
19
  import rasterio
15
20
  import shapely
16
- from geopandas import GeoDataFrame, GeoSeries
17
- from pandas import DataFrame, Series
18
- from pandas.api.types import is_dict_like, is_list_like
21
+ from geopandas import GeoDataFrame
22
+ from geopandas import GeoSeries
23
+ from pandas import DataFrame
24
+ from pandas import Series
25
+ from pandas.api.types import is_dict_like
26
+ from pandas.api.types import is_list_like
19
27
  from rasterio import merge as rasterio_merge
20
28
 
21
-
22
29
  try:
23
30
  import xarray as xr
24
- from rioxarray.merge import merge_arrays
25
31
  from xarray import Dataset
26
32
  except ImportError:
27
33
 
28
34
  class Dataset:
29
- pass
35
+ """Placeholder."""
30
36
 
31
37
 
32
- from rtree.index import Index, Property
38
+ from rtree.index import Index
39
+ from rtree.index import Property
33
40
  from shapely import Geometry
34
41
  from typing_extensions import Self # TODO: imperter fra typing når python 3.11
35
42
 
36
- from ..geopandas_tools.bounds import get_total_bounds, make_grid
37
- from ..geopandas_tools.conversion import (
38
- crs_to_string,
39
- is_bbox_like,
40
- to_bbox,
41
- to_shapely,
42
- )
43
+ from ..geopandas_tools.bounds import make_grid
44
+ from ..geopandas_tools.conversion import is_bbox_like
45
+ from ..geopandas_tools.conversion import to_bbox
46
+ from ..geopandas_tools.conversion import to_shapely
43
47
  from ..geopandas_tools.general import get_common_crs
44
48
  from ..geopandas_tools.overlay import clean_overlay
45
- from ..helpers import dict_zip_intersection, get_all_files, get_numpy_func
49
+ from ..helpers import get_all_files
50
+ from ..helpers import get_numpy_func
46
51
  from ..io._is_dapla import is_dapla
47
52
  from ..io.opener import opener
48
53
  from ..parallel.parallel import Parallel
49
54
  from .raster import Raster
50
55
 
51
-
52
56
  try:
53
57
  from torchgeo.datasets.geo import RasterDataset
54
58
  from torchgeo.datasets.utils import BoundingBox
@@ -57,13 +61,15 @@ except ImportError:
57
61
  class BoundingBox:
58
62
  """Placeholder."""
59
63
 
60
- def __init__(self, *args, **kwargs):
64
+ def __init__(self, *args, **kwargs) -> None:
65
+ """Placeholder."""
61
66
  raise ImportError("missing optional dependency 'torchgeo'")
62
67
 
63
68
  class RasterDataset:
64
69
  """Placeholder."""
65
70
 
66
- def __init__(self, *args, **kwargs):
71
+ def __init__(self, *args, **kwargs) -> None:
72
+ """Placeholder."""
67
73
  raise ImportError("missing optional dependency 'torchgeo'")
68
74
 
69
75
 
@@ -76,7 +82,7 @@ except ImportError:
76
82
  """Placeholder."""
77
83
 
78
84
  class Tensor:
79
- pass
85
+ """Placeholder to reference torch.Tensor."""
80
86
 
81
87
 
82
88
  try:
@@ -85,37 +91,36 @@ except ImportError:
85
91
  pass
86
92
 
87
93
  try:
88
- from dapla import FileClient, write_pandas
94
+ from dapla import FileClient
95
+ from dapla import write_pandas
89
96
  except ImportError:
90
97
  pass
91
98
 
92
- from .bands import Sentinel2
93
- from .base import ALLOWED_KEYS, NESSECARY_META, get_index_mapper
94
- from .cubebase import _from_gdf_func, _method_as_func, _raster_from_path, _write_func
95
- from .indices import get_raster_pairs, index_calc_pair
96
- from .zonal import make_geometry_iterrows, prepare_zonal, zonal_func, zonal_post
97
-
98
-
99
- class DataCube:
100
- """Experimental.
99
+ from .base import ALLOWED_KEYS
100
+ from .base import NESSECARY_META
101
+ from .base import get_index_mapper
102
+ from .cubebase import _from_gdf_func
103
+ from .cubebase import _method_as_func
104
+ from .cubebase import _raster_from_path
105
+ from .cubebase import _write_func
106
+ from .indices import get_raster_pairs
107
+ from .indices import index_calc_pair
108
+ from .zonal import _make_geometry_iterrows
109
+ from .zonal import _prepare_zonal
110
+ from .zonal import _zonal_func
111
+ from .zonal import _zonal_post
101
112
 
102
- Examples
103
- --------
113
+ TORCHGEO_RETURN_TYPE = dict[str, torch.Tensor | pyproj.CRS | BoundingBox]
104
114
 
105
- >>> cube = sg.DataCube.from_root(...)
106
- >>> clipped = cube.clip(mask).merge(by="date")
107
- >>>
108
- """
109
115
 
110
- CUBE_DF_NAME = "cube_df.parquet"
116
+ class DataCube:
117
+ """Experimental."""
111
118
 
112
- CANON_RASTER_TYPES = {
113
- "Raster": Raster,
114
- "Sentinel2": Sentinel2,
115
- }
119
+ CUBE_DF_NAME: ClassVar[str] = "cube_df.parquet"
116
120
 
117
- separate_files = True
118
- is_image = True
121
+ separate_files: ClassVar[bool] = True
122
+ is_image: ClassVar[bool] = True
123
+ date_format: ClassVar[str | None] = None
119
124
 
120
125
  def __init__(
121
126
  self,
@@ -124,8 +129,22 @@ class DataCube:
124
129
  res: int | None = None,
125
130
  nodata: int | None = None,
126
131
  copy: bool = False,
127
- parallelizer: Optional[Parallel] = None,
132
+ parallelizer: Parallel | None = None,
128
133
  ) -> None:
134
+ """Initialize a DataCube instance with optional Raster data.
135
+
136
+ Args:
137
+ data: Iterable of Raster objects or a single DataCube to copy data from.
138
+ crs: Coordinate reference system to be applied to the images.
139
+ res: Spatial resolution of the images, applied uniformly to all Rasters.
140
+ nodata: Nodata value to unify across all Rasters within the cube.
141
+ copy: If True, makes deep copies of Rasters provided.
142
+ parallelizer: sgis.Parallel instance to handle concurrent operations.
143
+ """
144
+ warnings.warn(
145
+ "This class is deprecated in favor of ImageCollection", stacklevel=1
146
+ )
147
+
129
148
  self._arrays = None
130
149
  self._res = res
131
150
  self.parallelizer = parallelizer
@@ -190,19 +209,39 @@ class DataCube:
190
209
  root: str | Path,
191
210
  *,
192
211
  res: int | None = None,
193
- raster_type: Raster = Raster,
194
212
  check_for_df: bool = True,
195
213
  contains: str | None = None,
196
214
  endswith: str = ".tif",
197
- regex: str | None = None,
198
- parallelizer: Optional[Parallel] = None,
215
+ bands: str | list[str] | None = None,
216
+ filename_regex: str | None = None,
217
+ parallelizer: Parallel | None = None,
199
218
  file_system=None,
200
219
  **kwargs,
201
- ):
202
- kwargs = {
203
- "raster_type": raster_type,
204
- "res": res,
205
- } | kwargs
220
+ ) -> "DataCube":
221
+ """Construct a DataCube by searching for files starting from a root directory.
222
+
223
+ Args:
224
+ root: Root directory path to search for raster image files.
225
+ res: Resolution to unify the data within the cube.
226
+ check_for_df: Check for a parquet file in the root directory
227
+ that holds metadata for the files in the directory.
228
+ contains: Filter files containing specific substrings.
229
+ endswith: Filter files that end with specific substrings.
230
+ bands: One or more band ids to keep.
231
+ filename_regex: Regular expression to match file names
232
+ and attributes (date, band, tile, resolution).
233
+ parallelizer: sgis.Parallel instance for concurrent file processing.
234
+ file_system: File system to use for file operations, used in GCS environment.
235
+ **kwargs: Additional keyword arguments to pass to 'from_path' method.
236
+
237
+ Returns:
238
+ An instance of DataCube containing the raster data from specified paths.
239
+ """
240
+ kwargs["res"] = res
241
+ kwargs["filename_regex"] = filename_regex
242
+ kwargs["contains"] = contains
243
+ kwargs["bands"] = bands
244
+ kwargs["endswith"] = endswith
206
245
 
207
246
  if is_dapla():
208
247
  if file_system is None:
@@ -217,35 +256,16 @@ class DataCube:
217
256
 
218
257
  dfs = [path for path in paths if path.endswith(cls.CUBE_DF_NAME)]
219
258
 
220
- if contains:
221
- paths = [path for path in paths if contains in path]
222
- if endswith:
223
- paths = [path for path in paths if path.endswith(endswith)]
224
- if regex:
225
- regex = re.compile(regex)
226
- paths = [path for path in paths if re.search(regex, path)]
227
- if raster_type.filename_regex is not None:
228
- # regex = raster_type.filename_regex
229
- # paths = [path for path in paths if re.search(regex, Path(path).name)]
230
- regex = re.compile(raster_type.filename_regex, re.VERBOSE)
231
- paths = [
232
- path
233
- for path in paths
234
- if re.match(regex, os.path.basename(path))
235
- or re.search(regex, os.path.basename(path))
236
- ]
237
-
238
259
  if not check_for_df or not len(dfs):
239
260
  return cls.from_paths(
240
261
  paths,
241
- # indexes=indexes,
242
262
  parallelizer=parallelizer,
243
263
  **kwargs,
244
264
  )
245
265
 
246
266
  folders_with_df: set[Path] = {Path(path).parent for path in dfs if path}
247
267
 
248
- cubes: list[DataCube] = [cls.from_cube_df(df, **kwargs) for df in dfs]
268
+ cubes: list[DataCube] = [cls.from_cube_df(df, res=res) for df in dfs]
249
269
 
250
270
  paths_in_folders_without_df = [
251
271
  path for path in paths if Path(path).parent not in folders_with_df
@@ -268,20 +288,52 @@ class DataCube:
268
288
  paths: Iterable[str | Path],
269
289
  *,
270
290
  res: int | None = None,
271
- raster_type: Raster = Raster,
272
- parallelizer: Optional[Parallel] = None,
291
+ parallelizer: Parallel | None = None,
273
292
  file_system=None,
293
+ contains: str | None = None,
294
+ bands: str | list[str] | None = None,
295
+ endswith: str = ".tif",
296
+ filename_regex: str | None = None,
274
297
  **kwargs,
275
- ):
298
+ ) -> "DataCube":
299
+ """Create a DataCube from a list of file paths.
300
+
301
+ Args:
302
+ paths: Iterable of file paths to raster files.
303
+ res: Resolution to unify the data within the cube.
304
+ parallelizer: Joblib Parallel instance for concurrent file processing.
305
+ file_system: File system to use for file operations, used in Dapla environment.
306
+ contains: Filter files containing specific substrings.
307
+ endswith: Filter files that end with specific substrings.
308
+ bands: One or more band ids to keep.
309
+ filename_regex: Regular expression to match file names.
310
+ **kwargs: Additional keyword arguments to pass to the raster loading function.
311
+
312
+ Returns:
313
+ An instance of DataCube containing the raster data from specified paths.
314
+ """
276
315
  crs = kwargs.pop("crs", None)
277
316
 
317
+ if contains:
318
+ paths = [path for path in paths if contains in path]
319
+ if endswith:
320
+ paths = [path for path in paths if path.endswith(endswith)]
321
+ if filename_regex:
322
+ compiled = re.compile(filename_regex, re.VERBOSE)
323
+ paths = [path for path in paths if re.search(compiled, Path(path).name)]
324
+ if bands:
325
+ if isinstance(bands, str):
326
+ bands = [bands]
327
+ paths = [path for path in paths if any(band in str(path) for band in bands)]
328
+
278
329
  if not paths:
279
330
  return cls(crs=crs, parallelizer=parallelizer, res=res)
280
331
 
281
- kwargs = dict(raster_type=raster_type, res=res) | kwargs
332
+ kwargs["res"] = res
333
+ kwargs["filename_regex"] = filename_regex
282
334
 
283
335
  if file_system is None and is_dapla():
284
- kwargs |= {"file_system": FileClient.get_gcs_file_system()}
336
+ kwargs["file_system"] = FileClient.get_gcs_file_system()
285
337
 
286
338
  if parallelizer is None:
287
339
  rasters: list[Raster] = [
@@ -302,17 +354,24 @@ class DataCube:
302
354
  gdf: GeoDataFrame | Iterable[GeoDataFrame],
303
355
  columns: str | Iterable[str],
304
356
  res: int,
305
- parallelizer: Optional[Parallel] = None,
357
+ parallelizer: Parallel | None = None,
306
358
  tile_size: int | None = None,
307
359
  grid: GeoSeries | None = None,
308
- raster_type: Raster = Raster,
309
360
  **kwargs,
310
- ):
311
- """
361
+ ) -> "DataCube":
362
+ """Create a DataCube from a GeoDataFrame or a set of them, tiling the spatial data as specified.
312
363
 
313
364
  Args:
314
- grid: A grid.
315
- **kwargs: Keyword arguments passed to Raster.from_gdf.
365
+ gdf: GeoDataFrame or an iterable of GeoDataFrames to rasterize.
366
+ columns: The column(s) in the GeoDataFrame that will be used as values for the rasterization.
367
+ res: Spatial resolution of the output rasters.
368
+ parallelizer: Joblib Parallel instance for concurrent processing.
369
+ tile_size: Size of each tile/grid cell in the output raster.
370
+ grid: Predefined grid to align the rasterization.
371
+ **kwargs: Additional keyword arguments passed to Raster.from_gdf.
372
+
373
+ Returns:
374
+ An instance of DataCube containing rasterized data from the GeoDataFrame(s).
316
375
  """
317
376
  if grid is None and tile_size is None:
318
377
  raise ValueError("Must specify either 'tile_size' or 'grid'.")
@@ -335,7 +394,6 @@ class DataCube:
335
394
  _from_gdf_func,
336
395
  columns=columns,
337
396
  res=res,
338
- raster_type=raster_type,
339
397
  **kwargs,
340
398
  )
341
399
 
@@ -344,16 +402,16 @@ class DataCube:
344
402
 
345
403
  rasters = []
346
404
 
347
- if processes > 1:
405
+ if parallelizer.processes > 1:
348
406
  rasters = parallelizer.map(
349
407
  clean_overlay, gdf, args=(grid,), kwargs=dict(keep_geom_type=True)
350
408
  )
351
- with multiprocessing.get_context("spawn").Pool(processes) as p:
409
+ with multiprocessing.get_context("spawn").Pool(parallelizer.processes) as p:
352
410
  for frame in gdf:
353
411
  frame = frame.overlay(grid, keep_geom_type=True)
354
412
  gdfs = to_gdf_list(frame)
355
413
  rasters += p.map(partial_func, gdfs)
356
- elif processes < 1:
414
+ elif parallelizer.processes < 1:
357
415
  raise ValueError("processes must be an integer 1 or greater.")
358
416
  else:
359
417
  for frame in gdf:
@@ -364,7 +422,18 @@ class DataCube:
364
422
  return cls(rasters, res=res)
365
423
 
366
424
  @classmethod
367
- def from_cube_df(cls, df: DataFrame | str | Path, res: int | None = None):
425
+ def from_cube_df(
426
+ cls, df: DataFrame | str | Path, res: int | None = None
427
+ ) -> "DataCube":
428
+ """Construct a DataCube from a DataFrame or path containing metadata or paths of rasters.
429
+
430
+ Args:
431
+ df: DataFrame, path to a DataFrame, or string path pointing to cube data.
432
+ res: Optional resolution to standardize all rasters to this resolution.
433
+
434
+ Returns:
435
+ A DataCube instance containing the raster data described by the DataFrame.
436
+ """
368
437
  if isinstance(df, (str, Path)):
369
438
  df = read_geopandas(df) if is_dapla() else gpd.read_parquet(df)
370
439
 
@@ -381,28 +450,15 @@ class DataCube:
381
450
  elif not isinstance(df, DataFrame):
382
451
  raise TypeError("df must be DataFrame or file path to a parquet file.")
383
452
 
384
- try:
385
- raster_types = [cls.CANON_RASTER_TYPES[x] for x in df["type"]]
386
- except KeyError:
387
- for x in df["type"]:
388
- try:
389
- cls.CANON_RASTER_TYPES[x]
390
- except KeyError:
391
- raise ValueError(
392
- f"Cannot convert raster type '{x}' to a Raster instance."
393
- )
394
-
395
453
  rasters: list[Raster] = [
396
- raster_type.from_dict(meta)
397
- for raster_type, (_, meta) in zip(
398
- raster_types, df[NESSECARY_META].iterrows()
399
- )
454
+ Raster.from_dict(meta) for _, meta in (df[NESSECARY_META].iterrows())
400
455
  ]
401
456
  return cls(rasters)
402
457
 
403
458
  def to_gdf(
404
459
  self, column: str | None = None, ignore_index: bool = False, concat: bool = True
405
460
  ) -> GeoDataFrame:
461
+ """Convert DataCube to GeoDataFrame."""
406
462
  gdfs = self.run_raster_method("to_gdf", column=column, return_self=False)
407
463
 
408
464
  if concat:
@@ -410,6 +466,7 @@ class DataCube:
410
466
  return gdfs
411
467
 
412
468
  def to_xarray(self) -> Dataset:
469
+ """Convert DataCube to an xarray.Dataset."""
413
470
  return xr.Dataset({i: r.to_xarray() for i, r in enumerate(self.data)})
414
471
 
415
472
  def zonal(
@@ -420,16 +477,17 @@ class DataCube:
420
477
  by_date: bool | None = None,
421
478
  dropna: bool = True,
422
479
  ) -> GeoDataFrame:
480
+ """Calculate zonal statistics within polygons."""
423
481
  idx_mapper, idx_name = get_index_mapper(polygons)
424
- polygons, aggfunc, func_names = prepare_zonal(polygons, aggfunc)
425
- poly_iter = make_geometry_iterrows(polygons)
482
+ polygons, aggfunc, func_names = _prepare_zonal(polygons, aggfunc)
483
+ poly_iter = _make_geometry_iterrows(polygons)
426
484
 
427
485
  if by_date is None:
428
486
  by_date: bool = all(r.date is not None for r in self)
429
487
 
430
488
  if not self.parallelizer:
431
489
  aggregated: list[DataFrame] = [
432
- zonal_func(
490
+ _zonal_func(
433
491
  poly,
434
492
  cube=self,
435
493
  array_func=array_func,
@@ -441,7 +499,7 @@ class DataCube:
441
499
  ]
442
500
  else:
443
501
  aggregated: list[DataFrame] = self.parallelizer.map(
444
- zonal_func,
502
+ _zonal_func,
445
503
  poly_iter,
446
504
  kwargs=dict(
447
505
  cube=self,
@@ -452,7 +510,7 @@ class DataCube:
452
510
  ),
453
511
  )
454
512
 
455
- return zonal_post(
513
+ return _zonal_post(
456
514
  aggregated,
457
515
  polygons=polygons,
458
516
  idx_mapper=idx_mapper,
@@ -461,6 +519,7 @@ class DataCube:
461
519
  )
462
520
 
463
521
  def gradient(self, degrees: bool = False) -> Self:
522
+ """Get gradients in each image."""
464
523
  self.data = self.run_raster_method("gradient", degrees=degrees)
465
524
  return self
466
525
 
@@ -499,7 +558,21 @@ class DataCube:
499
558
  self.data = data
500
559
  return self
501
560
 
561
+ def sample(self, n: int, copy: bool = True, **kwargs) -> Self:
562
+ """Take n samples of the cube."""
563
+ if self.crs is None:
564
+ self._crs = get_common_crs(self.data)
565
+
566
+ cube = self.copy() if copy else self
567
+
568
+ cube.data = list(pd.Series(cube.data).sample(n))
569
+
570
+ cube.data = cube.run_raster_method("load", **kwargs)
571
+
572
+ return cube
573
+
502
574
  def load(self, copy: bool = True, **kwargs) -> Self:
575
+ """Load all images as arrays into a DataCube copy."""
503
576
  if self.crs is None:
504
577
  self._crs = get_common_crs(self.data)
505
578
 
@@ -509,13 +582,17 @@ class DataCube:
509
582
 
510
583
  return cube
511
584
 
512
- def intersects(self, other, copy: bool = True) -> Self:
585
+ def intersection(self, other: Any, copy: bool = True) -> Self:
586
+ """Select the images that intersect 'other'."""
513
587
  other = to_shapely(other)
514
588
  cube = self.copy() if copy else self
515
589
  cube = cube[cube.boxes.intersects(other)]
516
590
  return cube
517
591
 
518
- def sfilter(self, other, copy: bool = True) -> Self:
592
+ def sfilter(
593
+ self, other: GeoDataFrame | GeoSeries | Geometry | tuple, copy: bool = True
594
+ ) -> Self:
595
+ """Spatially filter images by bounding box or geometry object."""
519
596
  other = to_shapely(other)
520
597
  cube = self.copy() if copy else self
521
598
  cube.data = [raster for raster in self if raster.unary_union.intersects(other)]
@@ -524,6 +601,7 @@ class DataCube:
524
601
  def clip(
525
602
  self, mask: GeoDataFrame | GeoSeries | Geometry, copy: bool = True, **kwargs
526
603
  ) -> Self:
604
+ """Clip the images by bounding box or geometry object."""
527
605
  if self.crs is None:
528
606
  self._crs = get_common_crs(self.data)
529
607
 
@@ -541,16 +619,24 @@ class DataCube:
541
619
  cube.data = cube.run_raster_method("clip", mask=mask, **kwargs)
542
620
  return cube
543
621
 
544
- def clipmerge(self, mask, **kwargs) -> Self:
545
- return clipmerge(self, mask, **kwargs)
622
+ def clipmerge(self, mask: GeoDataFrame | GeoSeries | Geometry, **kwargs) -> Self:
623
+ """Clip the images and merge to one image."""
624
+ return _clipmerge(self, mask, **kwargs)
546
625
 
547
626
  def merge_by_bounds(self, by: str | list[str] | None = None, **kwargs) -> Self:
548
- return merge_by_bounds(self, by=by, **kwargs)
627
+ """Merge images with the same bounding box."""
628
+ return _merge_by_bounds(self, by=by, **kwargs)
549
629
 
550
630
  def merge(self, by: str | list[str] | None = None, **kwargs) -> Self:
551
- return merge(self, by=by, **kwargs)
631
+ """Merge all images to one."""
632
+ return _merge(self, by=by, **kwargs)
552
633
 
553
634
  def explode(self) -> Self:
635
+ """Convert from 3D to 2D arrays.
636
+
637
+ Make multi-banded arrays (3d) into multiple single-banded arrays (2d).
638
+ """
639
+
554
640
  def explode_one_raster(raster: Raster) -> list[Raster]:
555
641
  property_values = {key: getattr(raster, key) for key in raster.properties}
556
642
 
@@ -561,7 +647,7 @@ class DataCube:
561
647
  ).items()
562
648
  if key in ALLOWED_KEYS and key not in ["array", "indexes"]
563
649
  }
564
- if raster.array is None:
650
+ if raster.values is None:
565
651
  return [
566
652
  raster.__class__.from_dict({"indexes": i} | all_meta)
567
653
  for i in raster.indexes_as_tuple()
@@ -581,7 +667,8 @@ class DataCube:
581
667
  )
582
668
  return self
583
669
 
584
- def dissolve_bands(self, aggfunc, copy: bool = True) -> Self:
670
+ def dissolve_bands(self, aggfunc: Callable | str, copy: bool = True) -> Self:
671
+ """Aggregate values in 3D arrays to a single value in a 2D array."""
585
672
  self._check_for_array()
586
673
  if not callable(aggfunc) and not isinstance(aggfunc, str):
587
674
  raise TypeError("Can only supply a single aggfunc")
@@ -605,6 +692,9 @@ class DataCube:
605
692
  its array values have been recalculated.
606
693
 
607
694
  Args:
695
+ root: Directory path where the images will be written to.
696
+ file_format: File extension.
697
+ **kwargs: Keyword arguments passed to rasterio.open.
608
698
 
609
699
  """
610
700
  self._check_for_array()
@@ -617,11 +707,17 @@ class DataCube:
617
707
  ]
618
708
 
619
709
  if self.parallelizer:
620
- self.parallelizer.starmap(_write_func, zip(self, paths), kwargs=kwargs)
710
+ self.parallelizer.starmap(
711
+ _write_func, zip(self, paths, strict=False), kwargs=kwargs
712
+ )
621
713
  else:
622
- [_write_func(raster, path, **kwargs) for raster, path in zip(self, paths)]
714
+ [
715
+ _write_func(raster, path, **kwargs)
716
+ for raster, path in zip(self, paths, strict=False)
717
+ ]
623
718
 
624
719
  def write_df(self, folder: str) -> None:
720
+ """Write metadata DataFrame."""
625
721
  df = pd.DataFrame(self.meta)
626
722
 
627
723
  folder = Path(folder)
@@ -638,9 +734,10 @@ class DataCube:
638
734
  index_func: Callable,
639
735
  band_name1: str,
640
736
  band_name2: str,
641
- copy=True,
737
+ copy: bool = True,
642
738
  **kwargs,
643
739
  ) -> Self:
740
+ """Calculate an index based on a function."""
644
741
  cube = self.copy() if copy else self
645
742
 
646
743
  raster_pairs: list[tuple[Raster, Raster]] = get_raster_pairs(
@@ -658,28 +755,34 @@ class DataCube:
658
755
 
659
756
  return cube.__class__(rasters)
660
757
 
661
- def reproject_match(self) -> Self:
662
- pass
758
+ # def reproject_match(self) -> Self:
759
+ # pass
663
760
 
664
- def to_crs(self, crs, copy: bool = True) -> Self:
761
+ def to_crs(self, crs: Any, copy: bool = True) -> Self:
762
+ """Reproject the coordinates of each image."""
665
763
  cube = self.copy() if copy else self
666
764
  cube.data = [r.to_crs(crs) for r in cube]
667
765
  cube._warped_crs = crs
668
766
  return cube
669
767
 
670
- def set_crs(self, crs, allow_override: bool = False, copy: bool = True) -> Self:
768
+ def set_crs(
769
+ self, crs: Any, allow_override: bool = False, copy: bool = True
770
+ ) -> Self:
771
+ """Set the CRS of each image."""
671
772
  cube = self.copy() if copy else self
672
773
  cube.data = [r.set_crs(crs, allow_override=allow_override) for r in cube]
673
774
  cube._warped_crs = crs
674
775
  return cube
675
776
 
676
777
  def min(self) -> Series:
778
+ """Get minimum array values for each image."""
677
779
  return Series(
678
780
  self.run_raster_method("min"),
679
781
  name="min",
680
782
  )
681
783
 
682
784
  def max(self) -> Series:
785
+ """Get maximum array values for each image."""
683
786
  return Series(
684
787
  self.run_raster_method("max"),
685
788
  name="max",
@@ -693,14 +796,14 @@ class DataCube:
693
796
  return Series(data, name=attribute)
694
797
 
695
798
  def run_raster_method(
696
- self, method: str, *args, copy: bool = True, return_self=False, **kwargs
799
+ self, method: str, *args, copy: bool = True, return_self: bool = False, **kwargs
697
800
  ) -> Self:
698
801
  """Run a Raster method for each raster in the cube."""
699
802
  if not all(hasattr(r, method) for r in self):
700
803
  raise AttributeError(f"Raster has no method {method!r}.")
701
804
 
702
805
  method_as_func = functools.partial(
703
- _method_as_func, method=method, *args, **kwargs
806
+ _method_as_func, *args, method=method, **kwargs
704
807
  )
705
808
 
706
809
  cube = self.copy() if copy else self
@@ -709,6 +812,7 @@ class DataCube:
709
812
 
710
813
  @property
711
814
  def meta(self) -> list[dict]:
815
+ """Get metadata property of each raster."""
712
816
  return [raster.meta for raster in self]
713
817
 
714
818
  # @property
@@ -723,6 +827,7 @@ class DataCube:
723
827
 
724
828
  @property
725
829
  def data(self) -> list[Raster]:
830
+ """The Rasters as a list."""
726
831
  return self._data
727
832
 
728
833
  @data.setter
@@ -738,10 +843,10 @@ class DataCube:
738
843
  self._data = list(data)
739
844
 
740
845
  for i, raster in enumerate(self._data):
741
- if raster.date and raster.date_format:
846
+ if raster.date:
742
847
  try:
743
- mint, maxt = disambiguate_timestamp(raster.date, raster.date_format)
744
- except NameError:
848
+ mint, maxt = disambiguate_timestamp(raster.date, self.date_format)
849
+ except (NameError, TypeError):
745
850
  mint, maxt = 0, 1
746
851
  else:
747
852
  mint, maxt = 0, 1
@@ -751,7 +856,8 @@ class DataCube:
751
856
 
752
857
  @property
753
858
  def arrays(self) -> list[np.ndarray]:
754
- return [raster.array for raster in self]
859
+ """The arrays of the images as a list."""
860
+ return [raster.values for raster in self]
755
861
 
756
862
  @arrays.setter
757
863
  def arrays(self, new_arrays: list[np.ndarray]):
@@ -762,17 +868,14 @@ class DataCube:
762
868
  if not all(isinstance(arr, np.ndarray) for arr in new_arrays):
763
869
  raise ValueError("Must be list of numpy ndarrays")
764
870
 
765
- self.data = [raster.update(array=arr) for raster, arr in zip(self, new_arrays)]
766
-
767
- @property
768
- def raster_type(self) -> Series:
769
- return Series(
770
- [r.__class__ for r in self],
771
- name="raster_type",
772
- )
871
+ self.data = [
872
+ raster.update(array=arr)
873
+ for raster, arr in zip(self, new_arrays, strict=False)
874
+ ]
773
875
 
774
876
  @property
775
877
  def band(self) -> Series:
878
+ """Get the 'band' attribute of the rasters."""
776
879
  return Series(
777
880
  [r.band for r in self],
778
881
  name="band",
@@ -780,6 +883,7 @@ class DataCube:
780
883
 
781
884
  @property
782
885
  def dtype(self) -> Series:
886
+ """Get the 'dtype' attribute of the rasters."""
783
887
  return Series(
784
888
  [r.dtype for r in self],
785
889
  name="dtype",
@@ -787,22 +891,27 @@ class DataCube:
787
891
 
788
892
  @property
789
893
  def nodata(self) -> int | None:
894
+ """No data value."""
790
895
  return self._nodata
791
896
 
792
897
  @property
793
898
  def path(self) -> Series:
899
+ """Get the 'path' attribute of the rasters."""
794
900
  return self.raster_attribute("path")
795
901
 
796
902
  @property
797
903
  def name(self) -> Series:
904
+ """Get the 'name' attribute of the rasters."""
798
905
  return self.raster_attribute("name")
799
906
 
800
907
  @property
801
908
  def date(self) -> Series:
909
+ """Get the 'date' attribute of the rasters."""
802
910
  return self.raster_attribute("date")
803
911
 
804
912
  @property
805
913
  def indexes(self) -> Series:
914
+ """Get the 'indexes' attribute of the rasters."""
806
915
  return self.raster_attribute("indexes")
807
916
 
808
917
  # @property
@@ -811,38 +920,46 @@ class DataCube:
811
920
 
812
921
  @property
813
922
  def area(self) -> Series:
923
+ """Get the 'area' attribute of the rasters."""
814
924
  return self.raster_attribute("area")
815
925
 
816
926
  @property
817
927
  def length(self) -> Series:
928
+ """Get the 'length' attribute of the rasters."""
818
929
  return self.raster_attribute("length")
819
930
 
820
931
  @property
821
932
  def height(self) -> Series:
933
+ """Get the 'height' attribute of the rasters."""
822
934
  return self.raster_attribute("height")
823
935
 
824
936
  @property
825
937
  def width(self) -> Series:
938
+ """Get the 'width' attribute of the rasters."""
826
939
  return self.raster_attribute("width")
827
940
 
828
941
  @property
829
942
  def shape(self) -> Series:
943
+ """Get the 'shape' attribute of the rasters."""
830
944
  return self.raster_attribute("shape")
831
945
 
832
946
  @property
833
947
  def count(self) -> Series:
948
+ """Get the 'count' attribute of the rasters."""
834
949
  return self.raster_attribute("count")
835
950
 
836
951
  @property
837
952
  def res(self) -> int:
953
+ """Spatial resolution of the images."""
838
954
  return self._res
839
955
 
840
956
  @res.setter
841
- def res(self, value):
957
+ def res(self, value) -> None:
842
958
  self._res = value
843
959
 
844
960
  @property
845
961
  def crs(self) -> pyproj.CRS:
962
+ """Coordinate reference system of the images."""
846
963
  crs = self._warped_crs if hasattr(self, "_warped_crs") else self._crs
847
964
  if crs is not None:
848
965
  return crs
@@ -853,10 +970,12 @@ class DataCube:
853
970
 
854
971
  @property
855
972
  def unary_union(self) -> Geometry:
973
+ """Box polygon of the combined bounds of each image."""
856
974
  return shapely.unary_union([shapely.box(*r.bounds) for r in self])
857
975
 
858
976
  @property
859
977
  def centroid(self) -> GeoSeries:
978
+ """Get the 'centroid' attribute of the rasters."""
860
979
  return GeoSeries(
861
980
  [r.centroid for r in self],
862
981
  name="centroid",
@@ -865,11 +984,12 @@ class DataCube:
865
984
 
866
985
  @property
867
986
  def tile(self) -> Series:
987
+ """Get the 'tile' attribute of the rasters."""
868
988
  return self.raster_attribute("tile")
869
989
 
870
990
  @property
871
991
  def boxes(self) -> GeoSeries:
872
- """GeoSeries of each raster's bounds as polygon."""
992
+ """Get the 'bounds' attribute of the rasters."""
873
993
  return GeoSeries(
874
994
  [shapely.box(*r.bounds) if r.bounds is not None else None for r in self],
875
995
  name="boxes",
@@ -878,6 +998,7 @@ class DataCube:
878
998
 
879
999
  @property
880
1000
  def total_bounds(self) -> tuple[float, float, float, float]:
1001
+ """Combined minimum and maximum longitude and latitude."""
881
1002
  return tuple(x for x in self.boxes.total_bounds)
882
1003
 
883
1004
  @property
@@ -889,7 +1010,7 @@ class DataCube:
889
1010
  """
890
1011
  return BoundingBox(*self.index.bounds)
891
1012
 
892
- def copy(self, deep=True) -> Self:
1013
+ def copy(self, deep: bool = True) -> Self:
893
1014
  """Returns a (deep) copy of the class instance and its rasters.
894
1015
 
895
1016
  Args:
@@ -899,19 +1020,32 @@ class DataCube:
899
1020
  copied.data = [raster.copy() for raster in copied]
900
1021
  return copied
901
1022
 
902
- def _check_for_array(self, text="") -> None:
1023
+ def _check_for_array(self, text: str = "") -> None:
903
1024
  mess = "Arrays are not loaded. " + text
904
- if all(raster.array is None for raster in self):
1025
+ if all(raster.values is None for raster in self):
905
1026
  raise ValueError(mess)
906
1027
 
907
1028
  def __getitem__(
908
- self, item: slice | int | Series | Sequence | Callable | Geometry | BoundingBox
909
- ) -> Self | Raster:
910
- """
911
-
912
- Examples
913
- --------
914
- >>> cube = sg.DataCube.from_root(testdata, endswith=".tif", crs=25833).load()
1029
+ self,
1030
+ item: (
1031
+ str
1032
+ | slice
1033
+ | int
1034
+ | Series
1035
+ | list
1036
+ | tuple
1037
+ | Callable
1038
+ | Geometry
1039
+ | BoundingBox
1040
+ ),
1041
+ ) -> Self | Raster | TORCHGEO_RETURN_TYPE:
1042
+ """Select one or more of the Rasters based on indexing or spatial or boolean predicates.
1043
+
1044
+ Examples:
1045
+ ------------
1046
+ >>> import sgis as sg
1047
+ >>> root = 'https://media.githubusercontent.com/media/statisticsnorway/ssb-sgis/main/tests/testdata/raster'
1048
+ >>> cube = sg.DataCube.from_root(root, filename_regex=sg.raster.SENTINEL2_FILENAME_REGEX, crs=25833).load()
915
1049
 
916
1050
  List slicing:
917
1051
 
@@ -929,6 +1063,14 @@ class DataCube:
929
1063
 
930
1064
  """
931
1065
  copy = self.copy()
1066
+ if isinstance(item, str) and copy.path is not None:
1067
+ copy.data = [raster for raster in copy if item in raster.path]
1068
+ if len(copy) == 1:
1069
+ return copy[0]
1070
+ elif not len(copy):
1071
+ return Raster()
1072
+ return copy
1073
+
932
1074
  if isinstance(item, slice):
933
1075
  copy.data = copy.data[item]
934
1076
  return copy
@@ -937,7 +1079,7 @@ class DataCube:
937
1079
  elif callable(item):
938
1080
  item = item(copy)
939
1081
  elif isinstance(item, BoundingBox):
940
- return cube_to_torch(self, item)
1082
+ return cube_to_torchgeo(self, item)
941
1083
 
942
1084
  elif isinstance(item, (GeoDataFrame, GeoSeries, Geometry)) or is_bbox_like(
943
1085
  item
@@ -956,7 +1098,8 @@ class DataCube:
956
1098
 
957
1099
  return copy
958
1100
 
959
- def __setattr__(self, attr, value):
1101
+ def __setattr__(self, attr: str, value: Any) -> None:
1102
+ """Set an attribute of the cube."""
960
1103
  if (
961
1104
  attr in ["data", "_data"]
962
1105
  or not is_list_like(value)
@@ -970,13 +1113,16 @@ class DataCube:
970
1113
  )
971
1114
  return super().__setattr__(attr, value)
972
1115
 
973
- def __iter__(self):
1116
+ def __iter__(self) -> Iterator[Raster]:
1117
+ """Iterate over the Rasters."""
974
1118
  return iter(self.data)
975
1119
 
976
1120
  def __len__(self) -> int:
1121
+ """Number of Rasters."""
977
1122
  return len(self.data)
978
1123
 
979
1124
  def __repr__(self) -> str:
1125
+ """String representation."""
980
1126
  return f"{self.__class__.__name__}({len(self)})"
981
1127
 
982
1128
  # def __mul__(self, scalar) -> Self:
@@ -998,26 +1144,35 @@ class DataCube:
998
1144
  # return self.map(_pow, scalar=scalar)
999
1145
 
1000
1146
 
1001
- def concat_cubes(cube_list: list[DataCube], res: int | None = None) -> DataCube:
1002
- if not all(isinstance(cube, DataCube) for cube in cube_list):
1003
- raise TypeError
1147
+ def concat_cubes(cubes: list[DataCube], res: int | None = None) -> DataCube:
1148
+ """Concatenate cubes to one.
1149
+
1150
+ Args:
1151
+ cubes: A sequence of DataCubes.
1152
+ res: Spatial resolution.
1153
+
1154
+ Returns:
1155
+ The cubes combined to one.
1156
+ """
1157
+ if not all(isinstance(cube, DataCube) for cube in cubes):
1158
+ raise TypeError("cubes must be of type DataCube.")
1004
1159
 
1005
1160
  return DataCube(
1006
- list(itertools.chain.from_iterable([cube.data for cube in cube_list])), res=res
1161
+ list(itertools.chain.from_iterable([cube.data for cube in cubes])), res=res
1007
1162
  )
1008
1163
 
1009
1164
 
1010
- def clipmerge(cube: DataCube, mask, **kwargs) -> DataCube:
1011
- return merge(cube, bounds=mask, **kwargs)
1165
+ def _clipmerge(cube: DataCube, mask: Any, **kwargs) -> DataCube:
1166
+ return _merge(cube, bounds=mask, **kwargs)
1012
1167
 
1013
1168
 
1014
- def merge(
1169
+ def _merge(
1015
1170
  cube: DataCube,
1016
- by=None,
1017
- bounds=None,
1171
+ by: str | list[str] | None = None,
1172
+ bounds: Any | None = None,
1018
1173
  **kwargs,
1019
1174
  ) -> DataCube:
1020
- if not all(r.array is None for r in cube):
1175
+ if not all(r.values is None for r in cube):
1021
1176
  raise ValueError("Arrays can't be loaded when calling merge.")
1022
1177
 
1023
1178
  bounds = to_bbox(bounds) if bounds is not None else bounds
@@ -1053,10 +1208,10 @@ def merge(
1053
1208
  )
1054
1209
 
1055
1210
 
1056
- def merge_by_bounds(
1211
+ def _merge_by_bounds(
1057
1212
  cube: DataCube,
1058
- by=None,
1059
- bounds=None,
1213
+ by: str | list[str] | None = None,
1214
+ bounds: Any | None = None,
1060
1215
  **kwargs,
1061
1216
  ) -> DataCube:
1062
1217
  if isinstance(by, str):
@@ -1064,9 +1219,9 @@ def merge_by_bounds(
1064
1219
  elif by is None:
1065
1220
  by = ["tile"]
1066
1221
  else:
1067
- by = by + ["tile"]
1222
+ by = list(by) + ["tile"]
1068
1223
 
1069
- return merge(
1224
+ return _merge(
1070
1225
  cube,
1071
1226
  by=by,
1072
1227
  bounds=bounds,
@@ -1074,65 +1229,27 @@ def merge_by_bounds(
1074
1229
  )
1075
1230
 
1076
1231
 
1077
- def _merge(cube, **kwargs) -> DataCube:
1232
+ def _merge(cube: DataCube, **kwargs) -> DataCube:
1233
+ by = kwargs.pop("by")
1078
1234
  if cube.crs is None:
1079
1235
  cube._crs = get_common_crs(cube.data)
1080
1236
 
1081
1237
  indexes = cube[0].indexes_as_tuple()
1082
1238
 
1083
- datasets = [load_raster(raster.path) for raster in cube]
1239
+ datasets = [_load_raster(raster.path) for raster in cube]
1084
1240
  array, transform = rasterio_merge.merge(datasets, indexes=indexes, **kwargs)
1085
1241
  cube.data = [Raster.from_array(array, crs=cube.crs, transform=transform)]
1086
1242
 
1087
1243
  return cube
1088
1244
 
1089
- if all(arr is None for arr in cube.arrays):
1090
- datasets = [load_raster(raster.path) for raster in cube]
1091
- array, transform = rasterio_merge.merge(datasets, indexes=indexes, **kwargs)
1092
- cube.data = [Raster.from_array(array, crs=cube.crs, transform=transform)]
1093
- return cube
1094
-
1095
- bounds = kwargs.pop("bounds", None)
1096
-
1097
- if bounds:
1098
- xarrays = [
1099
- r.to_xarray().transpose("y", "x")
1100
- for r in cube.explode()
1101
- if r.intersects(bounds)
1102
- ]
1103
- else:
1104
- xarrays = [r.to_xarray().transpose("y", "x") for r in cube.explode()]
1105
-
1106
- if len(xarrays) > 1:
1107
- merged = merge_arrays(
1108
- xarrays,
1109
- bounds=bounds,
1110
- res=cube.res,
1111
- nodata=cube.nodata,
1112
- **kwargs,
1113
- )
1114
- else:
1115
- try:
1116
- merged = xarrays[0]
1117
- except IndexError:
1118
- cube.data = []
1119
- return cube
1120
-
1121
- array = merged.to_numpy()
1122
-
1123
- raster = cube[0].__class__
1124
- out_bounds = bounds or cube.total_bounds
1125
- cube.data = [raster.from_array(array, bounds=out_bounds, crs=cube.crs)]
1126
-
1127
- return cube
1128
-
1129
1245
 
1130
- def load_raster(path):
1246
+ def _load_raster(path: str | Path) -> rasterio.io.DatasetReader:
1131
1247
  with opener(path) as file:
1132
1248
  return rasterio.open(file)
1133
1249
 
1134
1250
 
1135
1251
  def numpy_to_torch(array: np.ndarray) -> torch.Tensor:
1252
+ """Convert numpy array to a pytorch tensor."""
1136
1253
  # fix numpy dtypes which are not supported by pytorch tensors
1137
1254
  if array.dtype == np.uint16:
1138
1255
  array = array.astype(np.int32)
@@ -1142,7 +1259,8 @@ def numpy_to_torch(array: np.ndarray) -> torch.Tensor:
1142
1259
  return torch.tensor(array)
1143
1260
 
1144
1261
 
1145
- def cube_to_torch(cube: DataCube, query: BoundingBox):
1262
+ def cube_to_torchgeo(cube: DataCube, query: BoundingBox) -> TORCHGEO_RETURN_TYPE:
1263
+ """Convert a DayaCube to the type of dict returned from torchgeo datasets __getitem__."""
1146
1264
  bbox = shapely.box(*to_bbox(query))
1147
1265
  if cube.separate_files:
1148
1266
  cube = cube.sfilter(bbox).explode().load()