smashbox 1.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- smashbox/.spyproject/config/backups/codestyle.ini.bak +8 -0
- smashbox/.spyproject/config/backups/encoding.ini.bak +6 -0
- smashbox/.spyproject/config/backups/vcs.ini.bak +7 -0
- smashbox/.spyproject/config/backups/workspace.ini.bak +12 -0
- smashbox/.spyproject/config/codestyle.ini +8 -0
- smashbox/.spyproject/config/defaults/defaults-codestyle-0.2.0.ini +5 -0
- smashbox/.spyproject/config/defaults/defaults-encoding-0.2.0.ini +3 -0
- smashbox/.spyproject/config/defaults/defaults-vcs-0.2.0.ini +4 -0
- smashbox/.spyproject/config/defaults/defaults-workspace-0.2.0.ini +6 -0
- smashbox/.spyproject/config/encoding.ini +6 -0
- smashbox/.spyproject/config/vcs.ini +7 -0
- smashbox/.spyproject/config/workspace.ini +12 -0
- smashbox/__init__.py +8 -0
- smashbox/asset/flwdir/flowdir_fr_1000m.tif +0 -0
- smashbox/asset/outlets/.Rhistory +0 -0
- smashbox/asset/outlets/db_bnbv_fr.csv +142704 -0
- smashbox/asset/outlets/db_bnbv_light.csv +42084 -0
- smashbox/asset/outlets/db_sites.csv +8700 -0
- smashbox/asset/outlets/db_stations.csv +2916 -0
- smashbox/asset/outlets/db_stations_example.csv +19 -0
- smashbox/asset/outlets/edit_database.py +185 -0
- smashbox/asset/outlets/readme.txt +5 -0
- smashbox/asset/params/ci.tif +0 -0
- smashbox/asset/params/cp.tif +0 -0
- smashbox/asset/params/ct.tif +0 -0
- smashbox/asset/params/kexc.tif +0 -0
- smashbox/asset/params/kmlt.tif +0 -0
- smashbox/asset/params/llr.tif +0 -0
- smashbox/asset/setup/setup_rhax_gr4_dt3600.yaml +15 -0
- smashbox/asset/setup/setup_rhax_gr4_dt900.yaml +15 -0
- smashbox/asset/setup/setup_rhax_gr5_dt3600.yaml +15 -0
- smashbox/asset/setup/setup_rhax_gr5_dt900.yaml +15 -0
- smashbox/init/README.md +3 -0
- smashbox/init/__init__.py +3 -0
- smashbox/init/multimodel_statistics.py +405 -0
- smashbox/init/param.py +799 -0
- smashbox/init/smashbox.py +186 -0
- smashbox/model/__init__.py +1 -0
- smashbox/model/atmos_data_connector.py +518 -0
- smashbox/model/mesh.py +185 -0
- smashbox/model/model.py +829 -0
- smashbox/model/setup.py +109 -0
- smashbox/plot/__init__.py +1 -0
- smashbox/plot/myplot.py +1133 -0
- smashbox/plot/plot.py +1662 -0
- smashbox/read_inputdata/__init__.py +1 -0
- smashbox/read_inputdata/read_data.py +1229 -0
- smashbox/read_inputdata/smashmodel.py +395 -0
- smashbox/stats/__init__.py +1 -0
- smashbox/stats/mystats.py +1632 -0
- smashbox/stats/stats.py +2022 -0
- smashbox/test.py +532 -0
- smashbox/test_average_stats.py +122 -0
- smashbox/test_mesh.r +8 -0
- smashbox/test_mesh_from_graffas.py +69 -0
- smashbox/tools/__init__.py +1 -0
- smashbox/tools/geo_toolbox.py +1028 -0
- smashbox/tools/tools.py +461 -0
- smashbox/tutorial_R.r +182 -0
- smashbox/tutorial_R_graffas.r +88 -0
- smashbox/tutorial_R_graffas_local.r +33 -0
- smashbox/tutorial_python.py +102 -0
- smashbox/tutorial_readme.py +261 -0
- smashbox/tutorial_report.py +58 -0
- smashbox/tutorials/Python_tutorial.md +124 -0
- smashbox/tutorials/R_Graffas_tutorial.md +153 -0
- smashbox/tutorials/R_tutorial.md +121 -0
- smashbox/tutorials/__init__.py +6 -0
- smashbox/tutorials/generate_doc.md +7 -0
- smashbox-1.0.dist-info/METADATA +998 -0
- smashbox-1.0.dist-info/RECORD +73 -0
- smashbox-1.0.dist-info/WHEEL +5 -0
- smashbox-1.0.dist-info/licenses/LICENSE +100 -0
|
@@ -0,0 +1,518 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Created on Tue Jul 15 10:39:36 2025
|
|
5
|
+
|
|
6
|
+
@author: maxime
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import numpy as np
|
|
10
|
+
import datetime
|
|
11
|
+
import rasterio
|
|
12
|
+
from rasterio.transform import from_origin
|
|
13
|
+
from rasterio.io import MemoryFile
|
|
14
|
+
from rasterio.enums import Resampling
|
|
15
|
+
from rasterio.warp import reproject
|
|
16
|
+
from rasterio.windows import from_bounds
|
|
17
|
+
|
|
18
|
+
from tqdm import tqdm
|
|
19
|
+
|
|
20
|
+
from smashbox.tools import geo_toolbox
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class atmos_data_connector:
|
|
24
|
+
"""Class atmos_data_connector. This class handle external atmos data (precipitation
|
|
25
|
+
and potential evapotranspiration) provider. Precipiation and evapotranspiration
|
|
26
|
+
can be provided to Smash as a numpy.ndarray. This class has some functions to read
|
|
27
|
+
and crop the array."""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
input_prcp: np.ndarray | None = None,
|
|
32
|
+
input_pet: np.ndarray | None = None,
|
|
33
|
+
input_dt: float = 3600.0,
|
|
34
|
+
input_res: tuple | list = (1000.0, 1000.0),
|
|
35
|
+
input_start_time: str = "2050-01-01 01:00",
|
|
36
|
+
input_bbox: dict | None = None,
|
|
37
|
+
input_epsg: int = 2154,
|
|
38
|
+
):
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
:param input_prcp: Matrix of shape (nbx, nby, nbts) of the precipitation,
|
|
42
|
+
defaults to None
|
|
43
|
+
:type input_prcp: np.ndarray | None, optional
|
|
44
|
+
:param input_pet: Matrix of shape (nbx, nby,nbts) of the evapotranspiration,
|
|
45
|
+
defaults to None
|
|
46
|
+
:type input_pet: np.ndarray | None, optional
|
|
47
|
+
:param input_dt: Time step of the input atmos data in seconds, defaults to 3600.0
|
|
48
|
+
:type input_dt: float, optional
|
|
49
|
+
:param input_res: Resolution of the input data in meters, tuple or list of the
|
|
50
|
+
corresponding resolution in x and y direction, defaults to (1000.0, 1000.0)
|
|
51
|
+
:type input_res: tuple | list, optional
|
|
52
|
+
:param input_start_time: The custom start time of the input atmos data chronicle,
|
|
53
|
+
defaults to "2050-01-01 01:00"
|
|
54
|
+
:type input_start_time: str, optional
|
|
55
|
+
:param input_bbox: The bbox of the input atmos data, defaults to None. if None,
|
|
56
|
+
the bbox of the Smash model is used and the left bottom corner of the matrix is
|
|
57
|
+
positionned according the smash bbox.
|
|
58
|
+
:type input_bbox: dict | None, optional
|
|
59
|
+
:param input_epsg: The Epsg code of the input bbox coordinate, defaults to 2154
|
|
60
|
+
:type input_epsg: int, optional
|
|
61
|
+
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
self.input_prcp = input_prcp
|
|
65
|
+
"""Input precipiation matrix. np.ndarray of shape (nbx,nby,nbts)."""
|
|
66
|
+
self.input_pet = input_pet
|
|
67
|
+
"""Input evapotranspiration matrix. np.ndarray of shape (nbx,nby,nbts)."""
|
|
68
|
+
self.input_res = input_res
|
|
69
|
+
"""Input resolution of the atmos data matrix. tuple | list of the corresponding
|
|
70
|
+
resolution in the x and y direction (meters)"""
|
|
71
|
+
self.input_dt = input_dt
|
|
72
|
+
"""Input time-step in seconds of the atmos data."""
|
|
73
|
+
self.input_bbox = input_bbox
|
|
74
|
+
"""Input bounding box of the atmos data matrix. dictionnary
|
|
75
|
+
{'left':, 'right':,'top':,'bottom':}"""
|
|
76
|
+
self.input_epsg = input_epsg
|
|
77
|
+
"""Input epsg code of the corresponding coordinates of the bounding box."""
|
|
78
|
+
self.input_start_time = None
|
|
79
|
+
"""Custom start-time date of the chronicle."""
|
|
80
|
+
self.input_ntimestep = None
|
|
81
|
+
"""Number of time-step of the chronicle."""
|
|
82
|
+
|
|
83
|
+
self.smash_prcp = None
|
|
84
|
+
"""Cropped Smash precipitation matrix that will be copied into the Smash model."""
|
|
85
|
+
self.smash_pet = None
|
|
86
|
+
"""Cropped PET matrix that will be copied into the Smash model."""
|
|
87
|
+
self.smash_start_time = None
|
|
88
|
+
"""Futur Smash setup start-time"""
|
|
89
|
+
self.smash_end_time = None
|
|
90
|
+
"""Futur Smash setup end_time"""
|
|
91
|
+
|
|
92
|
+
if input_prcp is not None:
|
|
93
|
+
self.get_ntimestep(input_data=input_prcp)
|
|
94
|
+
self.set_setup_datetime(input_dt=input_dt, input_start_time=input_start_time)
|
|
95
|
+
|
|
96
|
+
if input_pet is not None:
|
|
97
|
+
if input_pet.shape != input_prcp.shape:
|
|
98
|
+
raise ValueError(
|
|
99
|
+
"The shape of input_pet and input_prcp must be the same."
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def get_ntimestep(self, input_data: np.ndarray | None = None):
|
|
103
|
+
"""
|
|
104
|
+
Compute the number of time-step of the input atmos data.
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
if input_data is not None:
|
|
108
|
+
|
|
109
|
+
if len(input_data.shape) != 3:
|
|
110
|
+
raise ValueError(
|
|
111
|
+
"</> Input atmos data matrix 'input_data' must be an"
|
|
112
|
+
"array of shape (nrow, ncol, n_time_step)."
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
self.input_ntimestep = input_data.shape[2]
|
|
116
|
+
|
|
117
|
+
def set_setup_datetime(
|
|
118
|
+
self, input_dt: float = 3600.0, input_start_time: str = "2050-01-01 01:00"
|
|
119
|
+
):
|
|
120
|
+
"""
|
|
121
|
+
Compute the start-time and end-time of the futur Smash simulation.
|
|
122
|
+
|
|
123
|
+
:param input_dt: The input time-step of the atmos-data in seconds,
|
|
124
|
+
defaults to 3600.0
|
|
125
|
+
:type input_dt: float, optional
|
|
126
|
+
:param input_start_time: The custom input-start time, defaults to "2050-01-01 01:00"
|
|
127
|
+
:type input_start_time: str, optional
|
|
128
|
+
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
if self.input_ntimestep is None:
|
|
132
|
+
raise ValueError(
|
|
133
|
+
"</> self.input_ntimestep is None. Use self.get_ntimestep() first."
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
end_time = datetime.datetime.fromisoformat(input_start_time) + datetime.timedelta(
|
|
137
|
+
seconds=input_dt * self.input_ntimestep
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
self.smash_start_time = input_start_time
|
|
141
|
+
self.smash_end_time = end_time.strftime("%Y-%m-%d %H:%M")
|
|
142
|
+
|
|
143
|
+
def change_setup(self, mysetup):
|
|
144
|
+
"""
|
|
145
|
+
Change the properties of the setup.mysetup() class.
|
|
146
|
+
:param mysetup: Class setup.mysetup()
|
|
147
|
+
:type mysetup: TYPE
|
|
148
|
+
|
|
149
|
+
"""
|
|
150
|
+
mysetup.set_setup("start_time", self.smash_start_time)
|
|
151
|
+
mysetup.set_setup("end_time", self.smash_end_time)
|
|
152
|
+
mysetup.set_setup("read_qobs", False)
|
|
153
|
+
|
|
154
|
+
if self.input_prcp is not None:
|
|
155
|
+
mysetup.set_setup("read_prcp", False)
|
|
156
|
+
|
|
157
|
+
if self.input_pet is not None:
|
|
158
|
+
mysetup.set_setup("read_pet", False)
|
|
159
|
+
|
|
160
|
+
def read_input_atmos_data(
|
|
161
|
+
self,
|
|
162
|
+
output_bbox: dict = None,
|
|
163
|
+
output_epsg: int = 2154,
|
|
164
|
+
output_res: tuple | list = (1000.0, 1000.0),
|
|
165
|
+
output_shape: tuple | list = (-99, -99),
|
|
166
|
+
resampling_method: str = "home_made_with_scipy_zoom",
|
|
167
|
+
):
|
|
168
|
+
"""
|
|
169
|
+
Read, crop and resample input atmos_data if required. Three methods are provided
|
|
170
|
+
with options resampling_method.
|
|
171
|
+
:param output_bbox: The output bounding box of the matrix, defaults to None
|
|
172
|
+
:type output_bbox: dict, optional
|
|
173
|
+
:param output_epsg: The output EPSG code of the coordinates, defaults to 2154
|
|
174
|
+
:type output_epsg: int, optional
|
|
175
|
+
:param output_res: The output resolution of the atmos data matrix,
|
|
176
|
+
defaults to (1000.0, 1000.0)
|
|
177
|
+
:type output_res: tuple | list, optional
|
|
178
|
+
:param output_shape: The ouput shape of the atmosdata matrix.
|
|
179
|
+
If output_shape and output_bbox are provided and if the input atmos_data matrix
|
|
180
|
+
matches these characteritics, the resampling is skipped (much faster).
|
|
181
|
+
Defaults to (-99, -99)
|
|
182
|
+
:type output_shape: tuple | list, optional
|
|
183
|
+
:param resampling_method: The resampling method to use, choice are rasterio_1,
|
|
184
|
+
rasterio_2 and , home_made_with_scipy_zoom. Defaults to "home_made_with_scipy_zoom"
|
|
185
|
+
:type resampling_method: str, optional
|
|
186
|
+
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
if resampling_method == "rasterio_1":
|
|
190
|
+
if self.input_prcp is not None:
|
|
191
|
+
array = self.read_input_data(
|
|
192
|
+
self.input_prcp,
|
|
193
|
+
output_bbox=output_bbox,
|
|
194
|
+
output_epsg=output_epsg,
|
|
195
|
+
output_res=output_res,
|
|
196
|
+
output_shape=output_shape,
|
|
197
|
+
)
|
|
198
|
+
self.smash_prcp = array
|
|
199
|
+
|
|
200
|
+
if self.input_pet is not None:
|
|
201
|
+
array = self.read_input_data(
|
|
202
|
+
self.input_pet,
|
|
203
|
+
output_bbox=output_bbox,
|
|
204
|
+
output_epsg=output_epsg,
|
|
205
|
+
output_res=output_res,
|
|
206
|
+
output_shape=output_shape,
|
|
207
|
+
)
|
|
208
|
+
self.smash_pet = array
|
|
209
|
+
|
|
210
|
+
elif resampling_method == "rasterio_2":
|
|
211
|
+
if self.input_prcp is not None:
|
|
212
|
+
array = self.read_input_data2(
|
|
213
|
+
self.input_prcp,
|
|
214
|
+
output_bbox=output_bbox,
|
|
215
|
+
output_epsg=output_epsg,
|
|
216
|
+
output_res=output_res,
|
|
217
|
+
output_shape=output_shape,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
self.smash_prcp = array
|
|
221
|
+
|
|
222
|
+
if self.input_pet is not None:
|
|
223
|
+
array = self.read_input_data2(
|
|
224
|
+
self.input_pet,
|
|
225
|
+
output_bbox=output_bbox,
|
|
226
|
+
output_epsg=output_epsg,
|
|
227
|
+
output_res=output_res,
|
|
228
|
+
output_shape=output_shape,
|
|
229
|
+
)
|
|
230
|
+
self.smash_pet = array
|
|
231
|
+
|
|
232
|
+
elif resampling_method == "home_made_with_scipy_zoom":
|
|
233
|
+
if self.input_prcp is not None:
|
|
234
|
+
array = self.read_input_data3(
|
|
235
|
+
self.input_prcp,
|
|
236
|
+
output_bbox=output_bbox,
|
|
237
|
+
output_epsg=output_epsg,
|
|
238
|
+
output_res=output_res,
|
|
239
|
+
output_shape=output_shape,
|
|
240
|
+
)
|
|
241
|
+
self.smash_prcp = array
|
|
242
|
+
|
|
243
|
+
if self.input_pet is not None:
|
|
244
|
+
array = self.read_input_data3(
|
|
245
|
+
self.input_pet,
|
|
246
|
+
output_bbox=output_bbox,
|
|
247
|
+
output_epsg=output_epsg,
|
|
248
|
+
output_res=output_res,
|
|
249
|
+
output_shape=output_shape,
|
|
250
|
+
)
|
|
251
|
+
self.smash_pet = array
|
|
252
|
+
|
|
253
|
+
def read_input_data(
|
|
254
|
+
self,
|
|
255
|
+
input_data: np.ndarray | None,
|
|
256
|
+
output_bbox: dict = None,
|
|
257
|
+
output_epsg: int = 2154,
|
|
258
|
+
output_res: tuple | list = (1000.0, 1000.0),
|
|
259
|
+
output_shape: tuple | list = (-99, -99),
|
|
260
|
+
):
|
|
261
|
+
"""
|
|
262
|
+
Read, crop and resample an input matrix. Use Rasterio MemoryFile and
|
|
263
|
+
Rasterio reproject function. Slowest method.
|
|
264
|
+
:param input_data: Input data as a matrix np.ndarray with shape (nbx,nby,nbts)
|
|
265
|
+
:type input_data: np.ndarray | None
|
|
266
|
+
:param output_bbox: The output bounding box of the matrix, defaults to None
|
|
267
|
+
:type output_bbox: dict, optional
|
|
268
|
+
:param output_epsg: The output EPSG code of the coordinates, defaults to 2154
|
|
269
|
+
:type output_epsg: int, optional
|
|
270
|
+
:param output_res: The output resolution of the atmos data matrix,
|
|
271
|
+
defaults to (1000.0, 1000.0)
|
|
272
|
+
:type output_res: tuple | list, optional
|
|
273
|
+
:param output_shape: The ouput shape of the atmosdata matrix.
|
|
274
|
+
If output_shape and output_bbox are provided and if the input atmos_data matrix
|
|
275
|
+
matches these characteritics, the resampling is skipped (much faster).
|
|
276
|
+
Defaults to (-99, -99)
|
|
277
|
+
:type output_shape: tuple | list, optional
|
|
278
|
+
:return: Array with shape ans resolution corresponding to the Smash model
|
|
279
|
+
:rtype: np.ndarray
|
|
280
|
+
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
height, width, ntimestep = input_data.shape
|
|
284
|
+
input_crs = f"EPSG:{self.input_epsg}"
|
|
285
|
+
output_crs = f"EPSG:{output_epsg}"
|
|
286
|
+
|
|
287
|
+
# Créer le transform (origine en haut à gauche, donc top et left)
|
|
288
|
+
if self.input_bbox is None:
|
|
289
|
+
print(
|
|
290
|
+
"</> Warning: no bbox or crs was provided with the graffas"
|
|
291
|
+
" prcp. We suppose the domain of the Graffas prcp equal to the"
|
|
292
|
+
" domain of the Smash mesh."
|
|
293
|
+
)
|
|
294
|
+
self.input_bbox = output_bbox
|
|
295
|
+
|
|
296
|
+
if (
|
|
297
|
+
sorted(self.input_bbox) == sorted(output_bbox)
|
|
298
|
+
and input_data.shape[0:2] == output_shape
|
|
299
|
+
):
|
|
300
|
+
return input_data
|
|
301
|
+
|
|
302
|
+
transform = from_origin(
|
|
303
|
+
self.input_bbox["left"],
|
|
304
|
+
self.input_bbox["top"],
|
|
305
|
+
self.input_res[0],
|
|
306
|
+
self.input_res[1],
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
with MemoryFile() as memfile:
|
|
310
|
+
with memfile.open(
|
|
311
|
+
driver="GTiff",
|
|
312
|
+
height=height,
|
|
313
|
+
width=width,
|
|
314
|
+
count=self.input_ntimestep,
|
|
315
|
+
dtype=input_data.dtype,
|
|
316
|
+
transform=transform,
|
|
317
|
+
crs=input_crs,
|
|
318
|
+
) as dataset:
|
|
319
|
+
print("</> Writing input data matrix in memory")
|
|
320
|
+
for t in tqdm(range(self.input_ntimestep)):
|
|
321
|
+
dataset.write(input_data[:, :, t], t + 1)
|
|
322
|
+
|
|
323
|
+
new_width = int(
|
|
324
|
+
(output_bbox["right"] - output_bbox["left"]) / output_res[0]
|
|
325
|
+
)
|
|
326
|
+
new_height = int(
|
|
327
|
+
(output_bbox["top"] - output_bbox["bottom"]) / output_res[1]
|
|
328
|
+
)
|
|
329
|
+
new_transform = from_origin(
|
|
330
|
+
output_bbox["left"], output_bbox["top"], output_res[0], output_res[1]
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# # Créer un tableau cible pour les données re-projetées
|
|
334
|
+
new_array = np.empty(
|
|
335
|
+
(self.input_ntimestep, new_height, new_width), dtype=np.float32
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
print("</>Reproject and resample input data matrix.")
|
|
339
|
+
for t in tqdm(range(self.input_ntimestep)):
|
|
340
|
+
reproject(
|
|
341
|
+
source=rasterio.band(dataset, t + 1),
|
|
342
|
+
destination=new_array[t],
|
|
343
|
+
src_transform=transform,
|
|
344
|
+
src_crs=input_crs,
|
|
345
|
+
dst_transform=new_transform,
|
|
346
|
+
dst_crs=output_crs,
|
|
347
|
+
resampling=Resampling.nearest,
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
return np.transpose(new_array, axes=(1, 2, 0))
|
|
351
|
+
# self.smash_prcp=np.transpose(new_array,axes=(1,2,0))
|
|
352
|
+
|
|
353
|
+
def read_input_data2(
|
|
354
|
+
self,
|
|
355
|
+
input_data: np.ndarray | None,
|
|
356
|
+
output_bbox: dict = None,
|
|
357
|
+
output_epsg: int = 2154,
|
|
358
|
+
output_res: tuple | list = (1000.0, 1000.0),
|
|
359
|
+
output_shape: tuple | list = (-99, -99),
|
|
360
|
+
):
|
|
361
|
+
"""
|
|
362
|
+
Read, crop and resample an input matrix. Use Rasterio MemoryFile and
|
|
363
|
+
Rasterio read function for resampling.
|
|
364
|
+
:param input_data: Input data as a matrix np.ndarray with shape (nbx,nby,nbts)
|
|
365
|
+
:type input_data: np.ndarray | None
|
|
366
|
+
:param output_bbox: The output bounding box of the matrix, defaults to None
|
|
367
|
+
:type output_bbox: dict, optional
|
|
368
|
+
:param output_epsg: The output EPSG code of the coordinates, defaults to 2154
|
|
369
|
+
:type output_epsg: int, optional
|
|
370
|
+
:param output_res: The output resolution of the atmos data matrix,
|
|
371
|
+
defaults to (1000.0, 1000.0)
|
|
372
|
+
:type output_res: tuple | list, optional
|
|
373
|
+
:param output_shape: The ouput shape of the atmosdata matrix.
|
|
374
|
+
If output_shape and output_bbox are provided and if the input atmos_data matrix
|
|
375
|
+
matches these characteritics, the resampling is skipped (much faster).
|
|
376
|
+
Defaults to (-99, -99)
|
|
377
|
+
:type output_shape: tuple | list, optional
|
|
378
|
+
:return: Array with shape ans resolution corresponding to the Smash model
|
|
379
|
+
:rtype: np.ndarray
|
|
380
|
+
|
|
381
|
+
"""
|
|
382
|
+
|
|
383
|
+
if input_data is None:
|
|
384
|
+
raise ValueError(
|
|
385
|
+
"</> self.input_data is None. Use grafas.grafas(input_data=np.array()) or"
|
|
386
|
+
" sb.model.graffas_connector(input_data=np.array()) "
|
|
387
|
+
"to upload precipitation."
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
height, width, ntimestep = input_data.shape
|
|
391
|
+
crs = f"EPSG:{self.input_epsg}"
|
|
392
|
+
|
|
393
|
+
if self.input_bbox is None:
|
|
394
|
+
print(
|
|
395
|
+
"</> Warning: no bbox or crs was provided with the graffas"
|
|
396
|
+
" prcp. We suppose the domain of the Graffas prcp equal to"
|
|
397
|
+
" the domain of the Smash mesh."
|
|
398
|
+
)
|
|
399
|
+
self.input_bbox = output_bbox
|
|
400
|
+
|
|
401
|
+
if (
|
|
402
|
+
sorted(self.input_bbox) == sorted(output_bbox)
|
|
403
|
+
and input_data.shape[0:2] == output_shape
|
|
404
|
+
):
|
|
405
|
+
return input_data
|
|
406
|
+
|
|
407
|
+
transform = from_origin(
|
|
408
|
+
self.input_bbox["left"],
|
|
409
|
+
self.input_bbox["top"],
|
|
410
|
+
self.input_res[0],
|
|
411
|
+
self.input_res[1],
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
with MemoryFile() as memfile:
|
|
415
|
+
with memfile.open(
|
|
416
|
+
driver="GTiff",
|
|
417
|
+
height=height,
|
|
418
|
+
width=width,
|
|
419
|
+
count=self.input_ntimestep,
|
|
420
|
+
dtype=input_data.dtype,
|
|
421
|
+
transform=transform,
|
|
422
|
+
crs=crs,
|
|
423
|
+
) as dataset:
|
|
424
|
+
for t in tqdm(range(self.input_ntimestep)):
|
|
425
|
+
dataset.write(input_data[:, :, t], t + 1)
|
|
426
|
+
|
|
427
|
+
x_scale_factor = dataset.res[0] / output_res[0]
|
|
428
|
+
y_scale_factor = dataset.res[1] / output_res[1]
|
|
429
|
+
|
|
430
|
+
# resampling first to avoid spatial shifting of the parameters
|
|
431
|
+
prcp_resampled = dataset.read(
|
|
432
|
+
out_shape=(
|
|
433
|
+
dataset.count,
|
|
434
|
+
int(dataset.height * y_scale_factor),
|
|
435
|
+
int(dataset.width * x_scale_factor),
|
|
436
|
+
),
|
|
437
|
+
resampling=Resampling.nearest,
|
|
438
|
+
)
|
|
439
|
+
|
|
440
|
+
# scale image transform
|
|
441
|
+
scaled_transform = dataset.transform * dataset.transform.scale(
|
|
442
|
+
(dataset.width / prcp_resampled.shape[-1]),
|
|
443
|
+
(dataset.height / prcp_resampled.shape[-2]),
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
# Get a window that corresponds to the smaller raster's bounds
|
|
447
|
+
window = from_bounds(**output_bbox, transform=scaled_transform)
|
|
448
|
+
|
|
449
|
+
prcp_cropped = np.transpose(
|
|
450
|
+
prcp_resampled[
|
|
451
|
+
:,
|
|
452
|
+
int(window.row_off) : int(window.row_off + window.height),
|
|
453
|
+
int(window.col_off) : int(window.col_off + window.width),
|
|
454
|
+
],
|
|
455
|
+
axes=(1, 2, 0),
|
|
456
|
+
)
|
|
457
|
+
return prcp_cropped
|
|
458
|
+
|
|
459
|
+
def read_input_data3(
|
|
460
|
+
self,
|
|
461
|
+
input_data: np.ndarray | None,
|
|
462
|
+
output_bbox: dict = None,
|
|
463
|
+
output_epsg: int = 2154,
|
|
464
|
+
output_res: tuple | list = (1000.0, 1000.0),
|
|
465
|
+
output_shape: tuple | list = (-99, -99),
|
|
466
|
+
):
|
|
467
|
+
"""
|
|
468
|
+
Read, crop and resample an input matrix. Use home made functions to crop he array.
|
|
469
|
+
Use scipy zoom method for resampling.
|
|
470
|
+
:param input_data: Input data as a matrix np.ndarray with shape (nbx,nby,nbts)
|
|
471
|
+
:type input_data: np.ndarray | None
|
|
472
|
+
:param output_bbox: The output bounding box of the matrix, defaults to None
|
|
473
|
+
:type output_bbox: dict, optional
|
|
474
|
+
:param output_epsg: The output EPSG code of the coordinates, defaults to 2154
|
|
475
|
+
:type output_epsg: int, optional
|
|
476
|
+
:param output_res: The output resolution of the atmos data matrix,
|
|
477
|
+
defaults to (1000.0, 1000.0)
|
|
478
|
+
:type output_res: tuple | list, optional
|
|
479
|
+
:param output_shape: The ouput shape of the atmosdata matrix.
|
|
480
|
+
If output_shape and output_bbox are provided and if the input atmos_data matrix
|
|
481
|
+
matches these characteritics, the resampling is skipped (much faster).
|
|
482
|
+
Defaults to (-99, -99)
|
|
483
|
+
:type output_shape: tuple | list, optional
|
|
484
|
+
:return: Array with shape ans resolution corresponding to the Smash model
|
|
485
|
+
:rtype: np.ndarray
|
|
486
|
+
|
|
487
|
+
"""
|
|
488
|
+
if self.input_bbox is None:
|
|
489
|
+
print(
|
|
490
|
+
"</> Warning: no bbox or crs was provided with the graffas"
|
|
491
|
+
" prcp. We suppose the domain of the Graffas prcp equal to the"
|
|
492
|
+
" domain of the Smash mesh."
|
|
493
|
+
)
|
|
494
|
+
self.input_bbox = output_bbox
|
|
495
|
+
|
|
496
|
+
if (
|
|
497
|
+
sorted(self.input_bbox) == sorted(output_bbox)
|
|
498
|
+
and input_data.shape[0:2] == output_shape
|
|
499
|
+
):
|
|
500
|
+
return input_data
|
|
501
|
+
|
|
502
|
+
print("</> use 'home_made_with_scipy_zoom' to crop and resample the input array")
|
|
503
|
+
# # Créer un tableau cible pour les données re-projetées
|
|
504
|
+
new_array = np.empty((*output_shape, self.input_ntimestep), dtype=np.float32)
|
|
505
|
+
|
|
506
|
+
for t in tqdm(range(self.input_ntimestep)):
|
|
507
|
+
new_array[:, :, t] = geo_toolbox.crop_array(
|
|
508
|
+
input_data[:, :, t],
|
|
509
|
+
bbox_in=self.input_bbox,
|
|
510
|
+
res_in={"dx": self.input_res[0], "dy": self.input_res[1]},
|
|
511
|
+
bbox_out=output_bbox,
|
|
512
|
+
res_out={"dx": output_res[0], "dy": output_res[1]},
|
|
513
|
+
order=0,
|
|
514
|
+
cval=-99.0,
|
|
515
|
+
grid_mode=True,
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
return new_array
|
smashbox/model/mesh.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import smash
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import geopandas as gpd
|
|
4
|
+
import os
|
|
5
|
+
import numpy as np
|
|
6
|
+
from smashbox.init.param import smashboxparam
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class mesh:
|
|
10
|
+
"""Class mesh(). This class has three functions and one attributes to generate, load,
|
|
11
|
+
write and store the mesh used by Smash."""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.mesh = None
|
|
15
|
+
self.catchment_polygon = None
|
|
16
|
+
|
|
17
|
+
def write_mesh(self, filename: os.PathLike | None = None):
|
|
18
|
+
"""
|
|
19
|
+
Write the mesh of the Smash model unsing hdf5 format.
|
|
20
|
+
:param filename: Path to the file where to write the mesh, defaults to None
|
|
21
|
+
:type filename: os.PathLike | None, optional
|
|
22
|
+
|
|
23
|
+
"""
|
|
24
|
+
if filename is not None:
|
|
25
|
+
smash.io.save_mesh(self.mesh, path=filename)
|
|
26
|
+
else:
|
|
27
|
+
raise ValueError(f"Output filename '{filename}' is None.")
|
|
28
|
+
|
|
29
|
+
def load_mesh(self, filename: os.PathLike | None = None):
|
|
30
|
+
"""
|
|
31
|
+
Read a mesh for Smash stored with the hdf5 format.
|
|
32
|
+
:param filename: path to the hdf5 file
|
|
33
|
+
:type filename: TYPE
|
|
34
|
+
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
if os.path.exists(filename):
|
|
38
|
+
self.mesh = smash.io.read_mesh(filename)
|
|
39
|
+
else:
|
|
40
|
+
raise ValueError(f"{filename} does not exist.")
|
|
41
|
+
|
|
42
|
+
def generate_mesh(
|
|
43
|
+
self,
|
|
44
|
+
param: smashboxparam | None = None,
|
|
45
|
+
query: str | None = None,
|
|
46
|
+
max_depth: float = 1.0,
|
|
47
|
+
area_error_th: None | float = None,
|
|
48
|
+
):
|
|
49
|
+
"""
|
|
50
|
+
:param param: Class param.smashboxparam(), store main smashbox parameters
|
|
51
|
+
:type param: param.smashboxparam()
|
|
52
|
+
:param query: Any pandas dataframe query as string: '(SURF>20) & (SURF<100)'. This query
|
|
53
|
+
must be build using the field (column name) in the outlet database.
|
|
54
|
+
https://pandas.pydata.org/docs/user_guide/indexing.html#the-query-method
|
|
55
|
+
:type query: str
|
|
56
|
+
:max_depth: The maximum depth accepted by the algorithm to find the catchment outlet.
|
|
57
|
+
A **max_depth** of 1 means that the algorithm will search among the
|
|
58
|
+
combinations in
|
|
59
|
+
(``row - 1``, ``row``, ``row + 1``; ``col - 1``, ``col``, ``col + 1``),
|
|
60
|
+
the coordinates that minimize
|
|
61
|
+
the relative error between the given catchment area and the modeled
|
|
62
|
+
catchment area calculated from the
|
|
63
|
+
flow directions file.
|
|
64
|
+
:type `int`, default 1
|
|
65
|
+
:param area_error_th: Tolerance error during the positionning of the outlets. If the Error `(Ssim-Sobs)/Sobs > area_error_th`, the outlet will be excluded.
|
|
66
|
+
:type area_error_th: float
|
|
67
|
+
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
if param.bbox is not None:
|
|
71
|
+
bbox = [
|
|
72
|
+
param.bbox["left"],
|
|
73
|
+
param.bbox["right"],
|
|
74
|
+
param.bbox["bottom"],
|
|
75
|
+
param.bbox["top"],
|
|
76
|
+
]
|
|
77
|
+
else:
|
|
78
|
+
bbox = None
|
|
79
|
+
|
|
80
|
+
if not os.path.exists(param.outlets_database):
|
|
81
|
+
param.outlets_database = os.path.join(
|
|
82
|
+
param.asset_dir, "outlets", param.outlets_database
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if os.path.exists(param.outlets_database):
|
|
86
|
+
stations_calage = pd.read_csv(param.outlets_database)
|
|
87
|
+
else:
|
|
88
|
+
raise ValueError(f"</> Error: file {param.outlets_database} not found")
|
|
89
|
+
|
|
90
|
+
if param.outletsID is not None:
|
|
91
|
+
if len(param.outletsID) > 0:
|
|
92
|
+
stations_calage = stations_calage.loc[
|
|
93
|
+
stations_calage[param.outlets_database_fields["id"]].isin(
|
|
94
|
+
param.outletsID
|
|
95
|
+
)
|
|
96
|
+
]
|
|
97
|
+
|
|
98
|
+
if query is not None:
|
|
99
|
+
stations_calage = stations_calage.query(query)
|
|
100
|
+
|
|
101
|
+
# selection par surface
|
|
102
|
+
# if max_surf is not None:
|
|
103
|
+
# stations_calage = stations_calage[
|
|
104
|
+
# stations_calage[param.outlets_database_fields["area"]] <= max_surf
|
|
105
|
+
# ]
|
|
106
|
+
|
|
107
|
+
# if min_surf is not None:
|
|
108
|
+
# stations_calage = stations_calage[
|
|
109
|
+
# stations_calage[param.outlets_database_fields["area"]] >= min_surf
|
|
110
|
+
# ]
|
|
111
|
+
|
|
112
|
+
if bbox is not None:
|
|
113
|
+
stations_calage = stations_calage[
|
|
114
|
+
(
|
|
115
|
+
stations_calage[param.outlets_database_fields["coord_x"]]
|
|
116
|
+
>= param.bbox["left"]
|
|
117
|
+
)
|
|
118
|
+
& (
|
|
119
|
+
stations_calage[param.outlets_database_fields["coord_x"]]
|
|
120
|
+
<= param.bbox["right"]
|
|
121
|
+
)
|
|
122
|
+
& (
|
|
123
|
+
stations_calage[param.outlets_database_fields["coord_y"]]
|
|
124
|
+
>= param.bbox["bottom"]
|
|
125
|
+
)
|
|
126
|
+
& (
|
|
127
|
+
stations_calage[param.outlets_database_fields["coord_y"]]
|
|
128
|
+
<= param.bbox["top"]
|
|
129
|
+
)
|
|
130
|
+
]
|
|
131
|
+
|
|
132
|
+
if len(stations_calage) == 0:
|
|
133
|
+
print(
|
|
134
|
+
f"</> Error: outlets {param.outletsID} not found in"
|
|
135
|
+
"{param.outlets_database}"
|
|
136
|
+
)
|
|
137
|
+
raise ValueError(
|
|
138
|
+
f"</> Error: outlets {param.outletsID} not found in"
|
|
139
|
+
"{param.outlets_database}"
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
columns = {
|
|
143
|
+
"coord_x": param.outlets_database_fields["coord_x"],
|
|
144
|
+
"coord_y": param.outlets_database_fields["coord_y"],
|
|
145
|
+
"area": param.outlets_database_fields["area"],
|
|
146
|
+
"id": param.outlets_database_fields["id"],
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
self.mesh = smash.factory.generate_mesh(
|
|
150
|
+
flwdir_path=param.flowdir,
|
|
151
|
+
bbox=bbox,
|
|
152
|
+
x=np.array(stations_calage[columns["coord_x"]][:]),
|
|
153
|
+
y=np.array(stations_calage[columns["coord_y"]][:]),
|
|
154
|
+
area=np.array(
|
|
155
|
+
stations_calage[columns["area"]][:] * 1e6
|
|
156
|
+
), # Convert km² to m²
|
|
157
|
+
code=np.array(stations_calage[columns["id"]][:]),
|
|
158
|
+
epsg=param.epsg,
|
|
159
|
+
shp_path=param.outlets_shapefile,
|
|
160
|
+
max_depth=max_depth,
|
|
161
|
+
area_error_th=area_error_th,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
if param.outlets_shapefile is not None:
|
|
165
|
+
print("</> Outlets shapefile detected. Loading outlets ...")
|
|
166
|
+
catchment_polygon = gpd.read_file(param.outlets_shapefile)
|
|
167
|
+
self.catchment_polygon = catchment_polygon.loc[
|
|
168
|
+
catchment_polygon.code.isin(
|
|
169
|
+
# np.array(stations_calage[columns["id"]][:])
|
|
170
|
+
self.mesh["code"]
|
|
171
|
+
)
|
|
172
|
+
]
|
|
173
|
+
del catchment_polygon
|
|
174
|
+
|
|
175
|
+
else:
|
|
176
|
+
stations_calage = pd.DataFrame(None)
|
|
177
|
+
|
|
178
|
+
if bbox is None:
|
|
179
|
+
raise ValueError(
|
|
180
|
+
"Bbox is None. If no outlets provided, the bbox must be defined."
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
self.mesh = smash.factory.generate_mesh(
|
|
184
|
+
flwdir_path=param.flowdir, bbox=bbox, epsg=param.epsg, max_depth=max_depth
|
|
185
|
+
)
|