dea-tools 0.3.6.dev45__tar.gz → 0.3.7.dev2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {dea_tools-0.3.6.dev45/dea_tools.egg-info → dea_tools-0.3.7.dev2}/PKG-INFO +1 -1
  2. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/datahandling.py +60 -26
  3. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2/dea_tools.egg-info}/PKG-INFO +1 -1
  4. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools.egg-info/SOURCES.txt +0 -1
  5. dea_tools-0.3.6.dev45/dea_tools/climate.py +0 -330
  6. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/.gitignore +0 -0
  7. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/LICENSE +0 -0
  8. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/MANIFEST.in +0 -0
  9. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/README.rst +0 -0
  10. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/__init__.py +0 -0
  11. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/__main__.py +0 -0
  12. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/__init__.py +0 -0
  13. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/animations.py +0 -0
  14. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/changefilmstrips.py +0 -0
  15. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/crophealth.py +0 -0
  16. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/deacoastlines.py +0 -0
  17. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/geomedian.py +0 -0
  18. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/imageexport.py +0 -0
  19. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/miningrehab.py +0 -0
  20. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/wetlandsinsighttool.py +0 -0
  21. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/app/widgetconstructors.py +0 -0
  22. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/bandindices.py +0 -0
  23. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/bom.py +0 -0
  24. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/classification.py +0 -0
  25. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/coastal.py +0 -0
  26. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/dask.py +0 -0
  27. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/landcover.py +0 -0
  28. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/maps.py +0 -0
  29. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/plotting.py +0 -0
  30. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/pyfes_model.py +0 -0
  31. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/spatial.py +0 -0
  32. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/temporal.py +0 -0
  33. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/validation.py +0 -0
  34. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/waterbodies.py +0 -0
  35. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/wetlands.py +0 -0
  36. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools/wit_app.py +0 -0
  37. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools.egg-info/dependency_links.txt +0 -0
  38. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools.egg-info/requires.txt +0 -0
  39. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/dea_tools.egg-info/top_level.txt +0 -0
  40. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/index.rst +0 -0
  41. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/mock_imports.txt +0 -0
  42. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/pyproject.toml +0 -0
  43. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/setup.cfg +0 -0
  44. {dea_tools-0.3.6.dev45 → dea_tools-0.3.7.dev2}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dea-tools
3
- Version: 0.3.6.dev45
3
+ Version: 0.3.7.dev2
4
4
  Summary: Functions and algorithms for analysing Digital Earth Australia data.
5
5
  Home-page: https://github.com/GeoscienceAustralia/dea-notebooks
6
6
  Author: Geoscience Australia
@@ -17,7 +17,7 @@ here: https://gis.stackexchange.com/questions/tagged/open-data-cube).
17
17
  If you would like to report an issue with this script, you can file one
18
18
  on GitHub (https://github.com/GeoscienceAustralia/dea-notebooks/issues/new).
19
19
 
20
- Last modified: February 2025
20
+ Last modified: April 2025
21
21
  """
22
22
 
23
23
  import datetime
@@ -95,6 +95,16 @@ def _common_bands(dc, products):
95
95
  return [band for band in bands if band in common]
96
96
 
97
97
 
98
+ def _contiguity_fuser(dst: np.ndarray, src: np.ndarray) -> None:
99
+ """
100
+ Ensure contiguity data is properly combined by replacing
101
+ pixels in `dst` that are either 0 (non-contiguous) or 255
102
+ (nodata) with the corresponding value from `src`, propogating
103
+ 1 (valid contiguous data) if it exists.
104
+ """
105
+ np.copyto(dst, src, where=np.isin(dst, (255, 0)))
106
+
107
+
98
108
  def load_ard(
99
109
  dc,
100
110
  products=None,
@@ -108,6 +118,7 @@ def load_ard(
108
118
  ls7_slc_off=True,
109
119
  dtype="auto",
110
120
  predicate=None,
121
+ verbose=True,
111
122
  **kwargs,
112
123
  ):
113
124
  """
@@ -228,6 +239,8 @@ def load_ard(
228
239
  from `dc.find_datasets`), and return a boolean. For example,
229
240
  a predicate function could be used to return True for only
230
241
  datasets acquired in January: `dataset.time.begin.month == 1`
242
+ verbose : bool, optional
243
+ If True, print progress statements during loading
231
244
  **kwargs :
232
245
  A set of keyword arguments to `dc.load` that define the
233
246
  spatiotemporal query and load parameters used to extract data.
@@ -386,6 +399,12 @@ def load_ard(
386
399
  else pq_band
387
400
  )
388
401
 
402
+ # Use custom fuse function to ensure contiguity is combined correctly
403
+ # when grouping data by solar day. Without this, contiguity data from
404
+ # neighbouring images is pasted semi-randomly over each other,
405
+ # producing artefacts in the output.
406
+ kwargs["fuse_func"] = {contiguity_band: _contiguity_fuser}
407
+
389
408
  # If `measurements` are specified but do not include PQ or
390
409
  # contiguity variables, add these to `measurements`
391
410
  if pq_band not in measurements:
@@ -409,27 +428,30 @@ def load_ard(
409
428
 
410
429
  # If predicate is specified, use this function to filter the list
411
430
  # of datasets prior to load
412
- if predicate:
413
- print(
414
- "The 'predicate' parameter will be deprecated in future "
415
- "versions of this function as this functionality has now "
416
- "been added to Datacube itself. Please use "
417
- "`dataset_predicate=...` instead."
418
- )
419
- query["dataset_predicate"] = predicate
431
+ if verbose:
432
+ if predicate:
433
+ print(
434
+ "The 'predicate' parameter will be deprecated in future "
435
+ "versions of this function as this functionality has now "
436
+ "been added to Datacube itself. Please use "
437
+ "`dataset_predicate=...` instead."
438
+ )
439
+ query["dataset_predicate"] = predicate
420
440
 
421
441
  # Extract list of datasets for each product using query params
422
442
  dataset_list = []
423
443
 
424
444
  # Get list of datasets for each product
425
- print("Finding datasets")
445
+ if verbose:
446
+ print("Finding datasets")
426
447
  for product in products:
427
448
  # Obtain list of datasets for product
428
- print(
429
- f" {product} (ignoring SLC-off observations)"
430
- if not ls7_slc_off and product == "ga_ls7e_ard_3"
431
- else f" {product}"
432
- )
449
+ if verbose:
450
+ print(
451
+ f" {product} (ignoring SLC-off observations)"
452
+ if not ls7_slc_off and product == "ga_ls7e_ard_3"
453
+ else f" {product}"
454
+ )
433
455
  datasets = dc.find_datasets(product=product, **query)
434
456
 
435
457
  # Remove Landsat 7 SLC-off observations if ls7_slc_off=False
@@ -477,7 +499,8 @@ def load_ard(
477
499
  # completely to save processing time
478
500
  if min_gooddata > 0.0:
479
501
  # Compute good data for each observation as % of total pixels
480
- print(f"Counting good quality pixels for each time step using {cloud_mask}")
502
+ if verbose:
503
+ print(f"Counting good quality pixels for each time step using {cloud_mask}")
481
504
  data_perc = pq_mask.sum(axis=[1, 2], dtype="int32") / (
482
505
  pq_mask.shape[1] * pq_mask.shape[2]
483
506
  )
@@ -488,15 +511,20 @@ def load_ard(
488
511
  ds = ds.sel(time=keep)
489
512
  pq_mask = pq_mask.sel(time=keep)
490
513
 
491
- print(
492
- f"Filtering to {len(ds.time)} out of {total_obs} "
493
- f"time steps with at least {min_gooddata:.1%} "
494
- f"good quality pixels"
495
- )
514
+ if verbose:
515
+ print(
516
+ f"Filtering to {len(ds.time)} out of {total_obs} "
517
+ f"time steps with at least {min_gooddata:.1%} "
518
+ f"good quality pixels"
519
+ )
496
520
 
497
521
  # Morphological filtering on cloud masks
498
522
  if (mask_filters is not None) & (mask_pixel_quality != False):
499
- print(f"Applying morphological filters to pixel quality mask: {mask_filters}")
523
+ if verbose:
524
+ print(
525
+ f"Applying morphological filters to pixel quality mask: {mask_filters}"
526
+ )
527
+
500
528
  pq_mask = ~mask_cleanup(~pq_mask, mask_filters=mask_filters)
501
529
 
502
530
  warnings.warn(
@@ -519,12 +547,16 @@ def load_ard(
519
547
 
520
548
  # Add pixel quality mask to combined mask
521
549
  if mask_pixel_quality:
522
- print(f"Applying {cloud_mask} pixel quality/cloud mask")
550
+ if verbose:
551
+ print(f"Applying {cloud_mask} pixel quality/cloud mask")
552
+
523
553
  mask = pq_mask
524
554
 
525
555
  # Add contiguity mask to combined mask
526
556
  if mask_contiguity:
527
- print(f"Applying contiguity mask ({contiguity_band})")
557
+ if verbose:
558
+ print(f"Applying contiguity mask ({contiguity_band})")
559
+
528
560
  cont_mask = ds[contiguity_band] == 1
529
561
 
530
562
  # If mask already has data if mask_pixel_quality == True,
@@ -567,10 +599,12 @@ def load_ard(
567
599
  # If user supplied `dask_chunks`, return data as a dask array
568
600
  # without actually loading it into memory
569
601
  if dask_chunks is not None:
570
- print(f"Returning {len(ds.time)} time steps as a dask array")
602
+ if verbose:
603
+ print(f"Returning {len(ds.time)} time steps as a dask array")
571
604
  return ds
572
605
  else:
573
- print(f"Loading {len(ds.time)} time steps")
606
+ if verbose:
607
+ print(f"Loading {len(ds.time)} time steps")
574
608
  return ds.compute()
575
609
 
576
610
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dea-tools
3
- Version: 0.3.6.dev45
3
+ Version: 0.3.7.dev2
4
4
  Summary: Functions and algorithms for analysing Digital Earth Australia data.
5
5
  Home-page: https://github.com/GeoscienceAustralia/dea-notebooks
6
6
  Author: Geoscience Australia
@@ -11,7 +11,6 @@ dea_tools/__main__.py
11
11
  dea_tools/bandindices.py
12
12
  dea_tools/bom.py
13
13
  dea_tools/classification.py
14
- dea_tools/climate.py
15
14
  dea_tools/coastal.py
16
15
  dea_tools/dask.py
17
16
  dea_tools/datahandling.py
@@ -1,330 +0,0 @@
1
- # dea_climate.py
2
- '''
3
- Retrieving and manipulating gridded climate data.
4
-
5
- Adapted from scripts by Andrew Cherry and Brian Killough.
6
-
7
- License: The code in this notebook is licensed under the Apache License,
8
- Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0). Digital Earth
9
- Australia data is licensed under the Creative Commons by Attribution 4.0
10
- license (https://creativecommons.org/licenses/by/4.0/).
11
-
12
- Contact: If you need assistance, please post a question on the Open Data
13
- Cube Discord chat (https://discord.com/invite/4hhBQVas5U) or on the GIS Stack
14
- Exchange (https://gis.stackexchange.com/questions/ask?tags=open-data-cube)
15
- using the `open-data-cube` tag (you can view previously asked questions
16
- here: https://gis.stackexchange.com/questions/tagged/open-data-cube).
17
-
18
- If you would like to report an issue with this script, you can file one on
19
- GitHub https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/issues
20
-
21
- Last modified: October 2020
22
- '''
23
-
24
- import os
25
- import datetime
26
- import numpy as np
27
- from dateutil.parser import parse
28
- import boto3
29
- import botocore
30
- import xarray as xr
31
- import warnings
32
-
33
- ERA5_VARS = [
34
- "air_pressure_at_mean_sea_level",
35
- "air_temperature_at_2_metres",
36
- "air_temperature_at_2_metres_1hour_Maximum",
37
- "air_temperature_at_2_metres_1hour_Minimum",
38
- "dew_point_temperature_at_2_metres",
39
- "eastward_wind_at_100_metres",
40
- "eastward_wind_at_10_metres",
41
- "integral_wrt_time_of_surface_direct_downwelling_shortwave_flux_in_air_1hour_Accumulation",
42
- "lwe_thickness_of_surface_snow_amount",
43
- "northward_wind_at_100_metres",
44
- "northward_wind_at_10_metres",
45
- "precipitation_amount_1hour_Accumulation",
46
- "sea_surface_temperature",
47
- "sea_surface_wave_from_direction",
48
- "sea_surface_wave_mean_period",
49
- "significant_height_of_wind_and_swell_waves",
50
- "snow_density",
51
- "surface_air_pressure",
52
- ]
53
-
54
-
55
- def get_era5_daily(var,
56
- date_from_arg,
57
- date_to_arg=None,
58
- reduce_func=None,
59
- cache_dir='era5',
60
- resample='1D'):
61
- """
62
- Download and return an variable from the European Centre for Medium
63
- Range Weather Forecasts (ECMWF) global climate reanalysis product
64
- (ERA5) for a defined time window.
65
-
66
- Parameters
67
- ----------
68
- var : string
69
- Name of the ERA5 climate variable to download, e.g
70
- "air_temperature_at_2_metres"
71
-
72
- date_from_arg: string or datetime object
73
- Starting date of the time window.
74
-
75
- date_to_arg: string or datetime object
76
- End date of the time window. If not supplied, set to be the same
77
- as starting date.
78
-
79
- reduce_func: numpy function
80
- lets you specify a function to apply to each day's worth of data.
81
- The default is np.mean, which computes daily average. To get a
82
- sum, use np.sum.
83
-
84
- cache_dir: sting
85
- Path to save downloaded ERA5 data. The path will be created if
86
- not already exists.
87
- The default is 'era5'.
88
-
89
- resample: string
90
- Temporal resampling frequency to be used for xarray's resample
91
- function. The default is '1D', which is daily. Since ERA5 data
92
- is provided as one file per month, maximum resampling period is
93
- '1M'.
94
-
95
- Returns
96
- -------
97
- A lazy-loaded xarray dataset containing an ERA5 variable for the
98
- selected time window.
99
-
100
- """
101
-
102
- # Massage input data
103
- assert var in ERA5_VARS, "var must be one of [{}] (got {})".format(
104
- ','.join(ERA5_VARS), var)
105
- if not os.path.exists(cache_dir):
106
- os.mkdir(cache_dir)
107
- if reduce_func is None:
108
- reduce_func = np.mean
109
- if type(date_from_arg) == str:
110
- date_from_arg = parse(date_from_arg)
111
- if type(date_to_arg) == str:
112
- date_to_arg = parse(date_to_arg)
113
- if date_to_arg is None:
114
- date_to_arg = date_from_arg
115
-
116
- # Make sure our dates are in the correct order
117
- from_date = min(date_from_arg, date_to_arg)
118
- to_date = max(date_from_arg, date_to_arg)
119
-
120
- # Download ERA5 files to local cache if they don't already exist
121
- client = None # Boto client (if needed)
122
- local_files = [] # Will hold list of local filenames
123
- Y, M = from_date.year, from_date.month # Loop vars
124
- loop_end = to_date.year * 12 + to_date.month # Loop sentinel
125
- while Y * 12 + M <= loop_end:
126
- local_file = os.path.join(
127
- cache_dir, "{Y:04}_{M:02}_{var}.nc".format(Y=Y, M=M, var=var))
128
- data_key = "{Y:04}/{M:02}/data/{var}.nc".format(Y=Y, M=M, var=var)
129
- if not os.path.isfile(
130
- local_file
131
- ): # check if file already exists (TODO: move to temp, catch failed download)
132
- if client is None:
133
- client = boto3.client('s3',
134
- config=botocore.client.Config(
135
- signature_version=botocore.UNSIGNED))
136
- client.download_file('era5-pds', data_key, local_file)
137
- local_files.append(local_file)
138
- if M == 12:
139
- Y += 1
140
- M = 1
141
- else:
142
- M += 1
143
-
144
- # Load and merge the locally-cached ERA5 data from the list of filenames
145
- date_slice = slice(str(from_date.date()), str(to_date.date(
146
- ))) # I do this to INCLUDE the whole end date, not just 00:00
147
-
148
- def prepro(ds):
149
- if 'time0' in ds.dims:
150
- ds = ds.rename({"time0": "time"})
151
- if 'time1' in ds.dims:
152
- ds = ds.rename({
153
- "time1": "time"
154
- }) # This should INTENTIONALLY error if both times are defined
155
- ds = ds[[var]]
156
- output = ds.sel(time=date_slice).resample(
157
- time=resample).reduce(reduce_func)
158
- output.attrs = ds.attrs
159
- for v in output.data_vars:
160
- output[v].attrs = ds[v].attrs
161
- return output
162
-
163
- return xr.open_mfdataset(local_files,
164
- combine='by_coords',
165
- compat='equals',
166
- preprocess=prepro,
167
- parallel=True)
168
-
169
-
170
- def era5_area_crop(ds, lat, lon):
171
- """
172
- Crop a dataset containing European Centre for Medium Range Weather
173
- Forecasts (ECMWF) global climate reanalysis product (ERA5) variables
174
- to a location.
175
-
176
- The output spatial grid will either include input grid points within
177
- lat/lon boundaries or the nearest point if none is within the search
178
- location.
179
-
180
- Parameters
181
- ----------
182
- ds : xarray dataset
183
- A dataset containing ERA5 variables of interest.
184
-
185
- lat: tuple or list
186
- Latitude range for query.
187
-
188
- lon: tuple or list
189
- Longitude range for query.
190
-
191
- Returns
192
- -------
193
- An xarray dataset containing ERA5 variables for the selected
194
- location.
195
-
196
- """
197
-
198
- # Handle single value lat/lon args by wrapping them in lists
199
- try:
200
- min(lat)
201
- except TypeError:
202
- lat = [lat]
203
-
204
- try:
205
- min(lon)
206
- except TypeError:
207
- lon = [lon]
208
-
209
- if min(lon) < 0:
210
- # re-order along longitude to go from -180 to 180
211
- ds = ds.assign_coords({"lon": (((ds.lon + 180) % 360) - 180)})
212
- ds = ds.reindex({ "lon": np.sort(ds.lon)})
213
-
214
- # Issue warnings if args outside range.
215
- if min(lat) < ds.lat.min() or max(lat) > ds.lat.max():
216
- warnings.warn("Lats must be in range {} .. {}. Got: {}".format(
217
- ds.lat.min().values,
218
- ds.lat.max().values, lat))
219
- if min(lon) < ds.lon.min() or max(lon) > ds.lon.max():
220
- warnings.warn("Lons must be in range {} .. {}. Got: {}".format(
221
- ds.lon.min().values,
222
- ds.lon.max().values, lon))
223
-
224
- # Find existing coords between min&max
225
- lats = ds.lat[np.logical_and(
226
- ds.lat >= min(lat), ds.lat <= max(lat))].values
227
-
228
- # If there was nothing between, just plan to grab closest
229
- if len(lats) == 0:
230
- lats = np.unique(ds.lat.sel(lat=np.array(lat), method="nearest"))
231
- lons = ds.lon[np.logical_and(
232
- ds.lon >= min(lon), ds.lon <= max(lon))].values
233
- if len(lons) == 0:
234
- lons = np.unique(ds.lon.sel(lon=np.array(lon), method="nearest"))
235
-
236
- # crop and keep attrs
237
- output = ds.sel(lat=lats, lon=lons)
238
- output.attrs = ds.attrs
239
- for var in output.data_vars:
240
- output[var].attrs = ds[var].attrs
241
- return output
242
-
243
-
244
- def era5_area_nearest(ds, lat, lon):
245
- """
246
- Crop a dataset containing European Centre for Medium
247
- Range Weather Forecasts (ECMWF) global climate reanalysis product
248
- (ERA5) variables to a location.
249
-
250
- The output spatial grid is snapped to the nearest input grid points.
251
-
252
- Parameters
253
- ----------
254
- ds : xarray dataset
255
- A dataset containing ERA5 variables of interest.
256
-
257
- lat: tuple or list
258
- Latitude range for query.
259
-
260
- lon: tuple or list
261
- Longitude range for query.
262
-
263
- Returns
264
- -------
265
- An xarray dataset containing ERA5 variables for the selected location.
266
-
267
- """
268
-
269
- if min(lon) < 0:
270
- # re-order along longitude to go from -180 to 180
271
- ds = ds.assign_coords({"lon": (((ds.lon + 180) % 360) - 180)})
272
- ds = ds.reindex({ "lon": np.sort(ds.lon)})
273
-
274
- # find the nearest lat lon boundary points
275
- test = ds.sel(lat=list(lat), lon=list(lon), method='nearest')
276
-
277
- # define the lat/lon grid
278
- lat_range = slice(test.lat.max().values, test.lat.min().values)
279
- lon_range = slice(test.lon.min().values, test.lon.max().values)
280
-
281
- # crop and keep attrs
282
- output = ds.sel(lat=lat_range, lon=lon_range)
283
- output.attrs = ds.attrs
284
-
285
- for var in output.data_vars:
286
- output[var].attrs = ds[var].attrs
287
- return output
288
-
289
-
290
- def load_era5(var, lat, lon, time, grid='nearest', **kwargs):
291
- """
292
- Returns a European Centre for Medium Range Weather Forecasts (ECMWF)
293
- global climate reanalysis product (ERA5) variable for a selected
294
- location and time window.
295
-
296
- Parameters
297
- ----------
298
- var : string
299
- Name of the ERA5 climate variable to download, e.g
300
- "air_temperature_at_2_metres"
301
-
302
- lat: tuple or list
303
- Latitude range for query.
304
-
305
- lon: tuple or list
306
- Longitude range for query.
307
-
308
- time: tuple or list
309
- Time range for query.
310
-
311
- grid: string
312
- Option for output spatial gridding.
313
- The default is 'nearest', for which output spatial grid is
314
- snapped to the nearest ERA5 input grid points.
315
- Alternatively, output spatial grid will either include input
316
- grid points within lat/lon boundaries or the nearest point if
317
- none is within the search location.
318
-
319
- Returns
320
- -------
321
- An xarray dataset containing the variable for the selected location
322
- and time window.
323
-
324
- """
325
-
326
- ds = get_era5_daily(var, time[0], time[1], **kwargs)
327
- if grid == 'nearest':
328
- return era5_area_nearest(ds, lat, lon).compute()
329
- else:
330
- return era5_area_crop(ds, lat, lon).compute()
File without changes
File without changes