smashbox 1.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. smashbox/.spyproject/config/backups/codestyle.ini.bak +8 -0
  2. smashbox/.spyproject/config/backups/encoding.ini.bak +6 -0
  3. smashbox/.spyproject/config/backups/vcs.ini.bak +7 -0
  4. smashbox/.spyproject/config/backups/workspace.ini.bak +12 -0
  5. smashbox/.spyproject/config/codestyle.ini +8 -0
  6. smashbox/.spyproject/config/defaults/defaults-codestyle-0.2.0.ini +5 -0
  7. smashbox/.spyproject/config/defaults/defaults-encoding-0.2.0.ini +3 -0
  8. smashbox/.spyproject/config/defaults/defaults-vcs-0.2.0.ini +4 -0
  9. smashbox/.spyproject/config/defaults/defaults-workspace-0.2.0.ini +6 -0
  10. smashbox/.spyproject/config/encoding.ini +6 -0
  11. smashbox/.spyproject/config/vcs.ini +7 -0
  12. smashbox/.spyproject/config/workspace.ini +12 -0
  13. smashbox/__init__.py +8 -0
  14. smashbox/asset/flwdir/flowdir_fr_1000m.tif +0 -0
  15. smashbox/asset/outlets/.Rhistory +0 -0
  16. smashbox/asset/outlets/db_bnbv_fr.csv +142704 -0
  17. smashbox/asset/outlets/db_bnbv_light.csv +42084 -0
  18. smashbox/asset/outlets/db_sites.csv +8700 -0
  19. smashbox/asset/outlets/db_stations.csv +2916 -0
  20. smashbox/asset/outlets/db_stations_example.csv +19 -0
  21. smashbox/asset/outlets/edit_database.py +185 -0
  22. smashbox/asset/outlets/readme.txt +5 -0
  23. smashbox/asset/params/ci.tif +0 -0
  24. smashbox/asset/params/cp.tif +0 -0
  25. smashbox/asset/params/ct.tif +0 -0
  26. smashbox/asset/params/kexc.tif +0 -0
  27. smashbox/asset/params/kmlt.tif +0 -0
  28. smashbox/asset/params/llr.tif +0 -0
  29. smashbox/asset/setup/setup_rhax_gr4_dt3600.yaml +15 -0
  30. smashbox/asset/setup/setup_rhax_gr4_dt900.yaml +15 -0
  31. smashbox/asset/setup/setup_rhax_gr5_dt3600.yaml +15 -0
  32. smashbox/asset/setup/setup_rhax_gr5_dt900.yaml +15 -0
  33. smashbox/init/README.md +3 -0
  34. smashbox/init/__init__.py +3 -0
  35. smashbox/init/multimodel_statistics.py +405 -0
  36. smashbox/init/param.py +799 -0
  37. smashbox/init/smashbox.py +186 -0
  38. smashbox/model/__init__.py +1 -0
  39. smashbox/model/atmos_data_connector.py +518 -0
  40. smashbox/model/mesh.py +185 -0
  41. smashbox/model/model.py +829 -0
  42. smashbox/model/setup.py +109 -0
  43. smashbox/plot/__init__.py +1 -0
  44. smashbox/plot/myplot.py +1133 -0
  45. smashbox/plot/plot.py +1662 -0
  46. smashbox/read_inputdata/__init__.py +1 -0
  47. smashbox/read_inputdata/read_data.py +1229 -0
  48. smashbox/read_inputdata/smashmodel.py +395 -0
  49. smashbox/stats/__init__.py +1 -0
  50. smashbox/stats/mystats.py +1632 -0
  51. smashbox/stats/stats.py +2022 -0
  52. smashbox/test.py +532 -0
  53. smashbox/test_average_stats.py +122 -0
  54. smashbox/test_mesh.r +8 -0
  55. smashbox/test_mesh_from_graffas.py +69 -0
  56. smashbox/tools/__init__.py +1 -0
  57. smashbox/tools/geo_toolbox.py +1028 -0
  58. smashbox/tools/tools.py +461 -0
  59. smashbox/tutorial_R.r +182 -0
  60. smashbox/tutorial_R_graffas.r +88 -0
  61. smashbox/tutorial_R_graffas_local.r +33 -0
  62. smashbox/tutorial_python.py +102 -0
  63. smashbox/tutorial_readme.py +261 -0
  64. smashbox/tutorial_report.py +58 -0
  65. smashbox/tutorials/Python_tutorial.md +124 -0
  66. smashbox/tutorials/R_Graffas_tutorial.md +153 -0
  67. smashbox/tutorials/R_tutorial.md +121 -0
  68. smashbox/tutorials/__init__.py +6 -0
  69. smashbox/tutorials/generate_doc.md +7 -0
  70. smashbox-1.0.dist-info/METADATA +998 -0
  71. smashbox-1.0.dist-info/RECORD +73 -0
  72. smashbox-1.0.dist-info/WHEEL +5 -0
  73. smashbox-1.0.dist-info/licenses/LICENSE +100 -0
@@ -0,0 +1,461 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ Created on Tue Jul 15 16:40:32 2025
5
+
6
+ @author: maxime
7
+ """
8
+
9
+ import os
10
+ import glob
11
+ import numpy as np
12
+ import pandas as pd
13
+ import numbers
14
+ from tqdm import tqdm
15
+
16
+ from rich import print
17
+ from rich.tree import Tree
18
+ import inspect
19
+
20
+ from typing import get_args
21
+
22
+
23
+ class Dict2Struct:
24
+ def __init__(self, **entries):
25
+ self.__dict__.update(entries)
26
+
27
+
28
+ def with_reticulate():
29
+ # Exemple : RETICULATE_PYTHON env variable peut être définie
30
+ return "R_SESSION_INITIALIZED" in os.environ or "RETICULATE_PYTHON" in os.environ
31
+
32
+
33
+ def build_object_tree(obj, name="root"):
34
+
35
+ tree = Tree(f"[bold cyan]Object {name}[/] ({type(obj).__name__})")
36
+
37
+ for attr in dir(obj):
38
+
39
+ if attr.startswith("_") or attr.startswith("__") and attr.endswith("__"):
40
+ continue
41
+ try:
42
+ val = getattr(obj, attr)
43
+
44
+ except Exception:
45
+ continue
46
+
47
+ if inspect.ismethod(val) or inspect.isfunction(val):
48
+ tree.add(f"[green]Method:[/] {attr}()")
49
+
50
+ elif inspect.isclass(val):
51
+ tree.add(f"[blue]Class:[/] {attr}")
52
+ subtree = build_object_tree(val, name=attr)
53
+
54
+ elif isinstance(val, dict):
55
+ tree.add(f"[red]Attribut:[/] {attr} = {val.keys()}")
56
+ elif isinstance(
57
+ val,
58
+ (
59
+ int,
60
+ float,
61
+ str,
62
+ bool,
63
+ type(None),
64
+ list,
65
+ set,
66
+ tuple,
67
+ ),
68
+ ):
69
+ tree.add(f"[red]Attribute:[/] {attr} = {repr(val)}")
70
+ elif isinstance(val, (pd.DataFrame, np.ndarray)):
71
+ tree.add(f"[red]Attribute:[/] {attr} = {type(val)}")
72
+ elif "<class 'smashbox" in str(type(val)):
73
+ subtree = build_object_tree(val, name=attr)
74
+ tree.add(subtree)
75
+ else:
76
+ tree.add(f"[red]Unknown attribute type:[/] {attr} = {type(val)}")
77
+
78
+ return tree
79
+
80
+
81
+ def autocast_args(func):
82
+ """
83
+ Decrorator function. Usgae @autocast_args previous the function definition
84
+ The goal of this decorator is to test input args type and auto cast them if possible.
85
+ If the type of the arg is not good and cannot be auto-casted, the function will throw
86
+ an execption.
87
+ :param func: Function on which to apply the decorator
88
+ :type func: Any function
89
+ """
90
+
91
+ sig = inspect.signature(func)
92
+ annotations = func.__annotations__
93
+
94
+ def wrapper(*args, **kwargs):
95
+
96
+ bound = sig.bind(*args, **kwargs)
97
+ bound.apply_defaults()
98
+
99
+ for name, value in bound.arguments.items():
100
+ if name in annotations:
101
+
102
+ target_type = annotations[name]
103
+
104
+ args_ = get_args(target_type)
105
+
106
+ if target_type is None and len(args_) == 0:
107
+ args_ = (type(None),)
108
+ target_type = type(None)
109
+
110
+ if not type(value) in args_:
111
+
112
+ if len(args_) > 1 and type(None) in args_:
113
+
114
+ converted = False
115
+ for t in args_:
116
+
117
+ if t is not type(None):
118
+
119
+ if value is not None:
120
+ try:
121
+ print(
122
+ f"</> Warning: Arg '{name}' of type {type(value)} is being"
123
+ f" converted to {t}"
124
+ )
125
+ bound.arguments[name] = t(value)
126
+ converted = True
127
+ except:
128
+ pass
129
+
130
+ if converted:
131
+ break
132
+
133
+ if not converted:
134
+ raise TypeError(
135
+ f"</> Error: Arg '{name}' must be a type of "
136
+ f" {args_}, got {value}"
137
+ f" ({type(value).__name__})"
138
+ )
139
+
140
+ else:
141
+ if not isinstance(value, target_type):
142
+ try:
143
+ print(
144
+ f"</> Warning: Arg '{name}' of type {type(value)} is being"
145
+ f" converted to {target_type}"
146
+ )
147
+ bound.arguments[name] = target_type(value)
148
+ except Exception:
149
+ raise TypeError(
150
+ f"</> Error: Arg '{name}' must be a type of "
151
+ f" {target_type.__name__}, got {value}"
152
+ f" ({type(value).__name__})"
153
+ )
154
+
155
+ return func(*bound.args, **bound.kwargs)
156
+
157
+ return wrapper
158
+
159
+
160
+ def infograffas_bbox_extractor(info_graffas):
161
+ """
162
+ A function to extract the bbox from info_graffas object and adapt it to the Smash
163
+ convention
164
+
165
+ Parameter:
166
+ ----------
167
+ info_graffas: Dictionary containing information about the Graffas domain. At least: a dictionary `domain` containing keys 'left,bottom,right,top' associated to each
168
+ bouning coordinates and a key 'resolution_sim' containing a real with the resolution of the domain in meter.
169
+
170
+ Return
171
+ ------
172
+
173
+ bbox, a dictionary of the bbox adapted to Smash.
174
+ """
175
+
176
+ bbox = {
177
+ "left": info_graffas["domain"]["left"] - info_graffas["resolution_sim"] / 2,
178
+ "bottom": info_graffas["domain"]["bottom"] - info_graffas["resolution_sim"] / 2,
179
+ "right": info_graffas["domain"]["right"] + info_graffas["resolution_sim"] / 2,
180
+ "top": info_graffas["domain"]["top"] + info_graffas["resolution_sim"] / 2,
181
+ }
182
+
183
+ return bbox
184
+
185
+
186
+ def GraffasVector2SmashArray(vdata, coordinates, resolution):
187
+
188
+ coord_x = ((coordinates["X"] - min(coordinates["X"])) / resolution).astype(int)
189
+ coord_y = ((coordinates["Y"] - min(coordinates["Y"])) / resolution).astype(int)
190
+
191
+ graffas_prcp = np.zeros(shape=(max(coord_x) + 1, max(coord_y) + 1, vdata.shape[1]))
192
+
193
+ for t in tqdm(range(graffas_prcp.shape[2])):
194
+ for j in range(len(coord_x)):
195
+ c_x = coord_x[j]
196
+ c_y = coord_y[j]
197
+ graffas_prcp[c_x, c_y, t] = vdata[j, t]
198
+
199
+ return graffas_prcp
200
+
201
+
202
+ def array_isin(arr1: np.ndarray = None, arr2: np.ndarray = None):
203
+
204
+ pos = []
205
+
206
+ for i in range(len(arr2)):
207
+
208
+ if not np.any(np.isin(arr1, arr2[i]) == 1):
209
+ print(f"</> Outlet name `{arr2[i]}` does not exist in the mesh.")
210
+
211
+ if arr1 is not None and arr2 is not None:
212
+ pos = list(np.where(np.isin(arr1, arr2))[0])
213
+
214
+ if len(pos) == 0:
215
+ raise ValueError("Invalid outlets name.")
216
+
217
+ return pos
218
+
219
+
220
+ def check_asset_path(asset_dir: str = "", path: None | os.PathLike = None):
221
+
222
+ if path is None:
223
+ return path
224
+
225
+ if not os.path.exists(path):
226
+
227
+ mypath = os.path.join(asset_dir)
228
+ matched_file = sorted(glob.glob(f"{mypath}/*{path}*"))
229
+
230
+ if len(matched_file) == 0:
231
+ raise ValueError(
232
+ f"'{path}' is not a valid {asset_dir} filename."
233
+ "Choice are: {os.listdir(mypath)}"
234
+ )
235
+ else:
236
+ path = matched_file[0]
237
+
238
+ return path
239
+
240
+
241
+ def print_tree(root_path, prefix=""):
242
+
243
+ files = sorted(os.listdir(root_path))
244
+
245
+ for index, name in enumerate(files):
246
+
247
+ path = os.path.join(root_path, name)
248
+ connector = "└── " if index == len(files) - 1 else "├── "
249
+
250
+ print(prefix + connector + name)
251
+
252
+ if os.path.isdir(path):
253
+ extension = " " if index == len(files) - 1 else "│ "
254
+ print_tree(path, prefix + extension)
255
+
256
+
257
+ @autocast_args
258
+ def time_resample_prcp_array(
259
+ array: np.ndarray | None,
260
+ input_dt: int | float = 3600.0,
261
+ output_dt: float = 3600.0,
262
+ t_axis: int = 2,
263
+ ):
264
+ """
265
+ Resample an array of prcp for a given time-step.
266
+ :param array: the matrix containing the discharge with shape (nbx, nby, nbts)
267
+ :type array: np.ndarray | None
268
+ :param quantile_duration: The duration of the quantile (hours), defaults to 1
269
+ :type quantile_duration: int | float, optional
270
+ :param model_time_step: the time-step of the Smash model (seconds), defaults to 3600
271
+ :type model_time_step: float, optional
272
+ :param t_axis: The array axis direction of the time-step, defaults to 2
273
+ :type t_axis: int, optional
274
+ :return: The resampled array
275
+ :rtype: np.ndarray
276
+
277
+ """
278
+
279
+ # resample array to the new duration
280
+ if pd.Timedelta(seconds=input_dt) < pd.Timedelta(seconds=output_dt):
281
+
282
+ if not (output_dt % input_dt == 0):
283
+ raise ValueError("output_dt must be a factor of intput_dt.")
284
+
285
+ print(
286
+ f"</> Resampling array with time-step `{input_dt}s`"
287
+ f" to time-step `{output_dt}s`"
288
+ )
289
+ chunk_size = int(output_dt / input_dt)
290
+
291
+ array_trans = np.moveaxis(array, t_axis, 0) # Axe à la position 0
292
+
293
+ new_shape = (array_trans.shape[0] // chunk_size, chunk_size) + array_trans.shape[
294
+ 1:
295
+ ]
296
+
297
+ array_trans_reshaped = array_trans[
298
+ 0 : chunk_size * (array_trans.shape[0] // chunk_size)
299
+ ].reshape(new_shape)
300
+
301
+ del array_trans
302
+
303
+ array_trans_reshaped_sum = np.sum(array_trans_reshaped, axis=1)
304
+
305
+ del array_trans_reshaped
306
+
307
+ array = np.moveaxis(array_trans_reshaped_sum, 0, t_axis)
308
+
309
+ del array_trans_reshaped_mean
310
+
311
+ if pd.Timedelta(seconds=input_dt) > pd.Timedelta(seconds=output_dt):
312
+
313
+ if not (input_dt % output_dt == 0):
314
+ raise ValueError("output_dt must be a factor of intput_dt.")
315
+
316
+ # array = np.zeros(shape=(2, 2, 6)) + np.random.randint(10, size=(2, 2, 6))
317
+
318
+ array = np.moveaxis(
319
+ array,
320
+ t_axis,
321
+ len(array.shape) - 1,
322
+ )
323
+
324
+ chunk_size = int(input_dt / output_dt)
325
+ # nb_chunk = int(array.shape[len(array.shape) - 1] / chunk_size)
326
+
327
+ array = np.repeat(array, chunk_size, axis=2) / chunk_size
328
+
329
+ # array = array.reshape(array.shape[0], array.shape[1], nb_chunk, chunk_size).sum(
330
+ # -1
331
+ # )
332
+
333
+ array = np.moveaxis(array, len(array.shape) - 1, t_axis)
334
+
335
+ return array
336
+
337
+
338
+ @autocast_args
339
+ def read_hourly_qobs(qobs_directory, code):
340
+ start_time = pd.Timestamp("1900-01-01 00:00")
341
+ end_time = pd.Timestamp.today().round(freq="24h")
342
+ dt = 3600.0
343
+
344
+ npdt = int((end_time - start_time).total_seconds() / dt) + 1
345
+
346
+ qobs = np.zeros(shape=(len(code), npdt)) - 99.0
347
+
348
+ miss = []
349
+
350
+ for i, c in enumerate(code):
351
+ f = glob.glob(f"{qobs_directory}/**/*{c}*.csv", recursive=True)
352
+
353
+ if f:
354
+ dat = pd.read_csv(f[0])
355
+ try:
356
+ file_start_time = pd.Timestamp(dat.columns[0])
357
+ except Exception:
358
+ raise ValueError(
359
+ f"Column header '{dat.columns[0]}' in the observed discharge file for catchment '{c}' "
360
+ f"is not a valid date"
361
+ ) from None
362
+
363
+ file_end_time = file_start_time + pd.Timedelta(seconds=dt * (len(dat) - 1))
364
+ start_diff = int((start_time - file_start_time).total_seconds() / dt) + 1
365
+ end_diff = int((end_time - file_start_time).total_seconds() / dt) + 1
366
+
367
+ # % Check if observed discharge file contains data for corresponding simulation period
368
+ if start_diff > dat.index.max() or end_diff < 0:
369
+ print(
370
+ f"</> The provided observed discharge file for catchment '{c}' does not contain data for the "
371
+ f"selected simulation period ['{start_time}', '{end_time}']. The file covers the period "
372
+ f"['{file_start_time}', '{file_end_time}']",
373
+ )
374
+ else:
375
+ ind_start_dat = max(0, start_diff)
376
+ ind_end_dat = min(dat.index.max(), end_diff)
377
+ ind_start_arr = max(0, -start_diff)
378
+ ind_end_arr = ind_start_arr + ind_end_dat - ind_start_dat
379
+
380
+ qobs[i, ind_start_arr:ind_end_arr] = dat.iloc[
381
+ ind_start_dat:ind_end_dat, 0
382
+ ]
383
+ else:
384
+ miss.append(c)
385
+
386
+ if miss:
387
+ print(f"</> Missing {len(miss)} observed discharge file(s): {miss}")
388
+
389
+ return qobs
390
+
391
+
392
+ @autocast_args
393
+ def read_object_as_dict(instance, recursion_counter: int = 0):
394
+ """
395
+
396
+ create a dictionary from a custom python object
397
+
398
+ Parameters
399
+ ----------
400
+
401
+ instance : object
402
+ an custom python object
403
+
404
+ Return
405
+ ------
406
+
407
+ key_data: dict
408
+ an dictionary containing all keys and atributes of the object
409
+
410
+ """
411
+ key_data = {}
412
+ # key_list = list()
413
+ # return_list = False
414
+ recursion_counter = 0
415
+ for attr in dir(instance):
416
+ # print(attr)
417
+ if not attr.startswith("_") and not attr in ["from_handle", "copy"]:
418
+ try:
419
+ value = getattr(instance, attr)
420
+
421
+ if isinstance(value, (np.ndarray, list, tuple)):
422
+
423
+ if isinstance(value, list):
424
+ value = np.array(value).astype("U")
425
+
426
+ if value.dtype == "object" or value.dtype.char == "U":
427
+ value = value.astype("U")
428
+
429
+ key_data.update({attr: value})
430
+
431
+ elif isinstance(value, dict):
432
+ key_data.update({attr: value})
433
+
434
+ elif isinstance(value, numbers.Number):
435
+ key_data.update({attr: value})
436
+
437
+ elif isinstance(value, str):
438
+ key_data.update({attr: value})
439
+
440
+ elif type(value) == "method":
441
+ next(attr)
442
+
443
+ else:
444
+
445
+ depp_key_data = read_object_as_dict(
446
+ value, recursion_counter=recursion_counter
447
+ )
448
+
449
+ recursion_counter = recursion_counter + 1
450
+
451
+ if len(depp_key_data) > 0:
452
+ key_data.update({attr: depp_key_data})
453
+
454
+ if recursion_counter > 100:
455
+ print("recursion counter exceed the limit of 100... return")
456
+ return
457
+ except:
458
+ print(f"unknown type for attribute : {attr}")
459
+ pass
460
+
461
+ return key_data
smashbox/tutorial_R.r ADDED
@@ -0,0 +1,182 @@
1
+ #import reticulate module to be able to import python module
2
+ library(reticulate)
3
+ use_virtualenv("~/python_venv/smashbox") #use a specific python environnement
4
+
5
+
6
+ #import the package smashbox (must be installed in the python environnement)
7
+ smashbox <- import("smashbox")
8
+
9
+ bbox=list('left'= 875000.0,
10
+ 'bottom'= 6228000.0,
11
+ 'right'= 1001000.0,
12
+ 'top'= 6320000.0)
13
+
14
+ #Main initialisation of the module
15
+ sb <- smashbox$SmashBox()
16
+ sb$myparam$set_param("bbox", bbox)
17
+ sb$myparam$set_param("outlets_database", "db_sites")
18
+
19
+ sb$newmodel("rex")
20
+
21
+ sb$rex$mysetup$update_setup(
22
+ list(
23
+ "start_time"= "2014-01-01 00:00",
24
+ "end_time"= "2014-02-01 00:00"
25
+ )
26
+ )
27
+
28
+ sb$rex$mysetup$update_setup(
29
+ list(
30
+ "pet_directory"= "/home/maxime/DATA/ETP-SFR-FRA-INTERA_L93",
31
+ "prcp_directory"= "/home/maxime/DATA/PLUIE",
32
+ "qobs_directory"= "/home/maxime/DATA/QOBS_SITE_60M",
33
+ "start_time"= "2014-01-01 00:00",
34
+ "end_time"= "2014-02-01 00:00"
35
+ )
36
+ )
37
+
38
+ sb$rex$generate_mesh(query="(SURF>40) & (INFLUENCE=='Influence nulle ou faible')", area_error_th=0.2)
39
+
40
+ sb$rex$myplot$plot_mesh(fig_settings=list("figname"= "output/mesh.png"))
41
+ sb$rex$myplot$plot_catchment_surface_error(fig_settings=list("figname"= "output/mesh_surface_error.png"))
42
+ sb$rex$myplot$plot_catchment_surface_consistency(
43
+ fig_settings=list("figname"= "output/mesh_surface_consistency.png")
44
+ )
45
+
46
+ sb$rex$forward_run(return_options=list("q_domain"= TRUE), warmup=100)
47
+
48
+
49
+ sb$rex$mystats$foutlets_stats()
50
+ sb$rex$mystats$fspatial_stats()
51
+ sb$rex$mystats$fmisfit_stats()
52
+
53
+
54
+ sb$rex$myplot$multiplot_parameters(
55
+ fig_settings=list("figname"= "output/muliplot_parameters.png", "xsize"=15,"ysize"=15)
56
+ )
57
+ sb$rex$myplot$plot_parameters(fig_settings=list("figname"= "output/cp_parameters.png"))
58
+
59
+ sb$rex$myplot$plot_outlet_stats(
60
+ fig_settings=list("figname"= "output/outlets_max_discharges.png")
61
+ )
62
+ sb$rex$myplot$plot_spatial_stats(
63
+ fig_settings=list("figname"= "output/spatial_max_discharges.png")
64
+ )
65
+
66
+
67
+ sb$rex$myplot$plot_hydrograph(fig_settings=list("figname"= "output/discharges_hydrogram.png"))
68
+
69
+
70
+ sb$rex$myplot$multiplot_misfit(
71
+ fig_settings=list("figname"= "output/multiplot_nse_misfit.png", "xsize"= 15, "ysize"= 15)
72
+ )
73
+ sb$rex$myplot$plot_misfit(
74
+ misfit="nnse", fig_settings=list("figname"= "output/nnse_misfit.png")
75
+ )
76
+ sb$rex$myplot$plot_misfit_map(fig_settings=list("figname"= "output/map_nse_misfit.png"))
77
+
78
+
79
+
80
+
81
+ #Graffas Tutorial
82
+
83
+ library(reticulate)
84
+ use_virtualenv("/nas/RHAX/UTILISATEURS/maxime.jay-allemand/smashbox/smashbox_env_21/") #use a specific python environnement
85
+
86
+ #import the package smashbox (must be installed in the python environnement)
87
+ smashbox <- import("smashbox")
88
+
89
+ path_to_graffas_rainfall="/nas/RHAX/UTILISATEURS/kevin.tartour/2025_Graffas/SPATIAL/Gapeau/DataOutput/simul/RRsim_Gapeau_res_1000_twin1.Rdata"
90
+ GraffasRainfall=load(path_to_graffas_rainfall)
91
+
92
+ #tranformation des pluies
93
+
94
+ #méthode 1
95
+ rsb=1000.
96
+ coord_x=(XYsim$X-min(XYsim$X))/1000 +1#col
97
+ coord_y=(XYsim$Y-min(XYsim$Y))/1000 +1#row
98
+
99
+ #~ thisarray=c(1:max(coord_x)*max(coord_y)*dim(RRsim)[2])
100
+ thisarray=rep(NaN,max(coord_x)*max(coord_y)*dim(RRsim)[2])
101
+ graffas_prcp=array(data=thisarray, dim=c(max(coord_x),max(coord_y),dim(RRsim)[2]))
102
+
103
+ for (t in 1:dim(RRsim)[2]) {
104
+ for (j in 1:length(XYsim$X)) {
105
+ c_x=coord_x[j]
106
+ c_y=coord_y[j]
107
+ graffas_prcp[c_x,c_y,t]=RRsim[j,t]
108
+ }
109
+ }
110
+
111
+
112
+ #deuxième méthode:
113
+ nx <- length(unique(XYsim$X))
114
+ ny <- length(unique(XYsim$Y))
115
+ nt <- ncol(RRsim)
116
+
117
+ xi <- match(XYsim$X, sort(unique(XYsim$X)))
118
+ yi <- match(XYsim$Y, sort(unique(XYsim$Y)))
119
+
120
+ # Création du tableau 3D vide
121
+ RRsim_3D <- array(NA, dim = c(nx, ny, nt))
122
+
123
+ # Répéter les positions (xi, yi) pour chaque pas de temps
124
+ xi_rep <- rep(xi, times = nt)
125
+ yi_rep <- rep(yi, times = nt)
126
+ ti_rep <- rep(1:nt, each = length(xi))
127
+
128
+ # Remplissage vectorisé
129
+ RRsim_3D[cbind(xi_rep, yi_rep, ti_rep)] <- as.vector(RRsim)
130
+
131
+
132
+
133
+
134
+ bbox=list('left'= min(XYsim$X),
135
+ 'bottom'= min(XYsim$Y),
136
+ 'right'= min(XYsim$X)+res*dim(graffas_prcp)[1],
137
+ 'top'= min(XYsim$Y)+res*dim(graffas_prcp)[2])
138
+
139
+ #Main initialisation of the module
140
+ sb <- smashbox$SmashBox()
141
+ sb$myparam$set_param("bbox", bbox)
142
+ sb$myparam$set_param("outlets_database", "db_sites")
143
+ sb$myparam$set_param(
144
+ "outlets_database_fields",
145
+ list(
146
+ "coord_x"= "X_L93",
147
+ "coord_y"= "Y_L93",
148
+ "area"= "SURF",
149
+ "id"= "CODE_SITE"
150
+ )
151
+ )
152
+ sb$myparam$set_param(
153
+ "outlets_shapefile",
154
+ "/nas/RHAX/DONNEES/SIG/BASSINS/CONTOUR/SITES_L93/Hydro_Bassins.shp",
155
+ )
156
+
157
+ sb$newmodel("gapeau")
158
+
159
+ sb$gapeau$generate_mesh()
160
+
161
+ sb$gapeau$myplot$plot_mesh(fig_settings=list("figname"="nas/RHAX/UTILISATEURS/maxime.jay-allemand/smashbox/output/gapeau_mesh.png"))
162
+
163
+ sb$gapeau$atmos_data_connector(input_prcp=graffas_prcp[,,(1:365*24*5)])
164
+
165
+ sb$gapeau$atmos_data_connector(input_prcp=RRsim_3D[,,(1:365*24*5)])
166
+
167
+ sb$gapeau$model()
168
+
169
+ sb$gapeau$myplot$multiplot_parameters(
170
+ fig_settings=list("figname"= "/nas/RHAX/UTILISATEURS/maxime.jay-allemand/smashbox/output/gapeau_parameters.png")
171
+ )
172
+
173
+ sb$gapeau$forward_run(warmup=365,return_options=list("q_domain"= TRUE))
174
+
175
+ sb$gapeau$mystats$foutlets_stats()
176
+ sb$gapeau$mystats$fspatial_stats()
177
+
178
+ sb$gapeau$mystats$fquantile_stats(
179
+ chunk_size=2, estimate_method="MLE", ncpu=as.integer(6), fit="gumbel"
180
+ )
181
+
182
+