tilupy 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tilupy/benchmark.py ADDED
@@ -0,0 +1,1563 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ from typing import Callable
4
+
5
+ import math as math
6
+ import numpy as np
7
+ import matplotlib
8
+ import matplotlib.pyplot as plt
9
+
10
+ import tilupy.read
11
+ import tilupy.notations as notations
12
+ from tilupy.analytic_sol import Coussot_shape
13
+
14
+
15
+ class Benchmark:
16
+ """Benchmark of simulation results.
17
+
18
+ This class groups together all the methods for processing and analyzing
19
+ the results of various simulation models, allowing, among other things,
20
+ the comparison of results between models.
21
+
22
+ Attributes
23
+ ----------
24
+ _loaded_results : dict[tilupy.read.Results]
25
+ Dictionnary of :class:`tilupy.read.Results` object for each model specified in
26
+ :meth:`load_numerical_result`.
27
+ _models_tim : dict[list]
28
+ Dictionnary of recorded time steps for each model specified in
29
+ :meth:`load_numerical_result`.
30
+ _simu_z : numpy.ndarray
31
+ Topographic elevation for the simulation loaded.
32
+ """
33
+ def __init__(self):
34
+ self._loaded_results = {}
35
+ self._models_tim = {}
36
+ self._simu_z = None
37
+
38
+
39
+ def load_numerical_result(self,
40
+ model: str,
41
+ **kwargs,
42
+ ) -> None:
43
+ """Load numerical simulation results using :func:`tilupy.read.get_results` for a given model and
44
+ store the :class:`tilupy.read.Results` object in :attr:`_loaded_results`.
45
+
46
+ Parameters
47
+ ----------
48
+ model : str
49
+ Name of the model to load. Must be one of the :data:`tilupy.read.ALLOWED_MODELS`.
50
+ **kwargs
51
+ Keyword arguments passed to the result reader for the specific model.
52
+
53
+ Raises
54
+ ------
55
+ ValueError
56
+ If size of stored :attr:`_simu_z` is different with new loaded ones.
57
+ UserWarning
58
+ If stored :attr:`_simu_z` are different with new loaded ones.
59
+ ValueError
60
+ If the provided model is not in the allowed list of :data:`tilupy.read.ALLOWED_MODELS`.
61
+ """
62
+ if model in tilupy.read.ALLOWED_MODELS:
63
+ if model not in self._loaded_results:
64
+ self._loaded_results[model] = tilupy.read.get_results(model, **kwargs)
65
+ self._models_tim[model] = self._loaded_results[model].tim
66
+
67
+ if self._simu_z is None:
68
+ self._simu_z = self._loaded_results[model].z
69
+
70
+ if self._simu_z.shape != self._loaded_results[model].z.shape:
71
+ raise ValueError("Simulations loaded doesn't have same size.")
72
+
73
+ try :
74
+ if not np.allclose(self._simu_z, self._loaded_results[model].z, rtol=0.1, atol=0.1):
75
+ mean_error = np.mean(self._simu_z - self._loaded_results[model].z)
76
+ self._simu_z = self._loaded_results[model].z
77
+ raise UserWarning(f"Stored elevation values different from loaded ones; average error: {mean_error}")
78
+ except UserWarning as w:
79
+ print(f"[WARNING] {w}")
80
+
81
+ else:
82
+ raise ValueError(f" -> No correct model selected, choose between:\n {tilupy.read.ALLOWED_MODELS}")
83
+
84
+
85
+ def compute_analytic_solution(self,
86
+ output: str,
87
+ solution: Callable,
88
+ model: str,
89
+ T: float | list[float],
90
+ **kwargs
91
+ ) -> None:
92
+ """
93
+ Compute the analytic solution for the given time steps using the provided model.
94
+
95
+ Parameters
96
+ ----------
97
+ output : str
98
+ Wanted output for the analytical solution. Can be : "h" or "u".
99
+ solution : Callable
100
+ Callable object representing the analytic solution model (model from :class:`tilupy.analytic_sol.Depth_result`)
101
+ model : str
102
+ Wanted model to compute the analytical solution from. Must be loaded first with
103
+ :meth:`load_numerical_result`.
104
+ T : float | list[float]
105
+ Time or list of times at which to compute the analytic solution.
106
+ **kwargs
107
+ Keyword arguments passed to the analytic solution for the specific model.
108
+
109
+ Raises
110
+ ------
111
+ ValueError
112
+ If no solution found.
113
+ """
114
+ solution = solution(**kwargs)
115
+
116
+ if isinstance(T, float) or isinstance(T, int):
117
+ T = [T]
118
+
119
+ if output not in ['h', 'u']:
120
+ raise ValueError(" -> Available output: 'h', 'u'.")
121
+
122
+ if output == 'h':
123
+ solution.compute_h(self._loaded_results[model].x, T)
124
+
125
+ if solution.h is not None:
126
+ return tilupy.read.TemporalResults1D(name=output,
127
+ d=solution.h[:].T,
128
+ t=T,
129
+ coords=self._loaded_results[model].x,
130
+ coords_name='x')
131
+ else:
132
+ raise ValueError("No analytic solution for fluid height.")
133
+
134
+ if output == 'u':
135
+ solution.compute_u(self._loaded_results[model].x, T)
136
+
137
+ if solution.u is not None:
138
+ return tilupy.read.TemporalResults1D(name=output,
139
+ d=solution.u[:].T,
140
+ t=T,
141
+ coords=self._loaded_results[model].x,
142
+ coords_name='x')
143
+ else:
144
+ raise ValueError("No analytic solution for fluid velocity.")
145
+
146
+
147
+ def show_output(self,
148
+ output: str,
149
+ model: str,
150
+ time_steps: float | list[float] = None,
151
+ show_plot: bool = True,
152
+ **plot_kwargs,
153
+ ) -> matplotlib.axes._axes.Axes:
154
+ """Plot output for a given model using :meth:`tilupy.read.Results.plot`.
155
+
156
+ Parameters
157
+ ----------
158
+ output : str
159
+ Wanted output, need to be in :data:`tilupy.read.DATA_NAMES`.
160
+ model : str
161
+ Wanted model to show the data field. Must be loaded with :meth:`load_numerical_result`
162
+ time_steps : float | list[float]
163
+ Value or list of time steps display output.
164
+ If None displays the output for all recorded time steps in the model's result.
165
+ By default None.
166
+ show_plot : bool, optional
167
+ If True, show the plot, by default True.
168
+
169
+ Returns
170
+ -------
171
+ matplotlib.axes._axes.Axes
172
+ The created plot.
173
+
174
+ Raises
175
+ ------
176
+ ValueError
177
+ If model is not loaded.
178
+ """
179
+ if model not in self._loaded_results.keys():
180
+ raise ValueError(" -> First load model using load_numerical_result.")
181
+
182
+ plot_kwargs = {} if plot_kwargs is None else plot_kwargs
183
+
184
+ ax = self._loaded_results[model].plot(output=output,
185
+ time_steps=time_steps,
186
+ display_plot=show_plot,
187
+ **plot_kwargs)
188
+ if show_plot:
189
+ plt.show()
190
+
191
+ return ax
192
+
193
+
194
+ def show_output_profile(self,
195
+ output: str,
196
+ model: str,
197
+ extraction_method: str = "axis",
198
+ extraction_params: dict = None,
199
+ time_steps: float | list[float] = None,
200
+ show_plot: bool = True,
201
+ **plot_kwargs
202
+ )-> matplotlib.axes._axes.Axes:
203
+ """Plot profile from 2D temporal or static output for a given model using
204
+ :meth:`tilupy.read.Results.plot_profile`.
205
+
206
+ Parameters
207
+ ----------
208
+ output : str
209
+ Wanted output, need to be in :data:`tilupy.read.TEMPORAL_DATA_2D` or :data:`tilupy.read.STATIC_DATA_2D`
210
+ model : str
211
+ Wanted model to show the output profile. Must be loaded with :meth:`load_numerical_result`
212
+ extraction_method : str, optional
213
+ Wanted profile extraction method. See :func:`tilupy.utils.get_profile`. By default "axis".
214
+ extraction_params : dict, optional
215
+ Profile extraction parameters. See :func:`tilupy.utils.get_profile`. Be default None.
216
+ time_steps : float | list[float], optional
217
+ Value or list of time steps required to extract and display profiles.
218
+ If None displays the profiles for all recorded time steps in the model's result.
219
+ Only available for models. By default None.
220
+ show_plot : bool, optional
221
+ If True, show the plot, by default True.
222
+
223
+ Returns
224
+ -------
225
+ matplotlib.axes._axes.Axes
226
+ The created plot.
227
+
228
+ Raises
229
+ ------
230
+ ValueError
231
+ If model is not loaded.
232
+ """
233
+ if model not in self._loaded_results.keys():
234
+ raise ValueError(" -> First load model using load_numerical_result")
235
+
236
+ plot_kwargs = {} if plot_kwargs is None else plot_kwargs
237
+
238
+ ax = self._loaded_results[model].plot_profile(output=output,
239
+ extraction_method = extraction_method,
240
+ extraction_params = extraction_params,
241
+ time_steps=time_steps,
242
+ display_plot=show_plot,
243
+ **plot_kwargs)
244
+ if show_plot:
245
+ plt.show()
246
+
247
+ return ax
248
+
249
+
250
+ def show_comparison_temporal1D(self,
251
+ output: str,
252
+ models: list[str],
253
+ profile_extraction_args: dict = None,
254
+ analytic_solution: dict = None,
255
+ time_steps: float | list[float] = None,
256
+ axes: matplotlib.axes._axes.Axes = None,
257
+ rows_nb: int = None,
258
+ cols_nb: int = None,
259
+ figsize: tuple[float] = None,
260
+ colors: list = None,
261
+ linestyles: list[str] = None,
262
+ plot_kwargs: dict = None,
263
+ as_kwargs: dict = None,
264
+ show_plot: bool = True,
265
+ ) -> matplotlib.axes._axes.Axes:
266
+ """Plot multiple temporal 1D data in the same graph.
267
+
268
+ Parameters
269
+ ----------
270
+ output : str
271
+ Wanted output, need to be in :data:`tilupy.read.TEMPORAL_DATA_1D`. If using a :data:`tilupy.read.TEMPORAL_DATA_2D`,
272
+ must use :data:`profile_extraction_args`.
273
+ models : list[str]
274
+ Wanted models to show the temporal 1D data. Must be loaded with :meth:`load_numerical_result`.
275
+ profile_extraction_args : dict, optional
276
+ Arguments for profile extraction. See :func:`tilupy.utils.get_profile`. By default None.
277
+ analytic_solution : dict, optional
278
+ Arguments for plotting an analytic solution. See :meth:`compute_analytic_solution`.
279
+ By default None.
280
+ time_steps : float | list[float], optional
281
+ Value or list of time steps required to extract and display profiles.
282
+ If None displays the profiles for all recorded time steps in the model's result.
283
+ Only available for models. By default None.
284
+ axes : matplotlib.axes._axes.Axes, optional
285
+ Existing matplotlib axe, by default None.
286
+ rows_nb : int, optional
287
+ Number of rows when plotting multiple time steps. If None automatically computed.
288
+ By default None.
289
+ cols_nb : int, optional
290
+ Number of colums when plotting multiple time steps. If None use 3 columns.
291
+ By default None.
292
+ figsize : tuple[float], optional
293
+ Size of the plotted figure (Height, Width), by default None.
294
+ colors : list, optional
295
+ List of colors for each model plotted. If None use "black". By default None.
296
+ linestyles : list[str], optional
297
+ List of linestyles for each model plotted. If None use regular straight line. By default None.
298
+ plot_kwargs : dict, optional
299
+ Additional argument for model's line, by default None.
300
+ as_kwargs : dict, optional
301
+ Additional argument for analytical solution's line, by default None.
302
+ show_plot : bool, optional
303
+ If True, show the plot, by default True.
304
+
305
+ Returns
306
+ -------
307
+ matplotlib.axes._axes.Axes
308
+ The created plot.
309
+
310
+ Raises
311
+ ------
312
+ ValueError
313
+ If models is not loaded.
314
+ """
315
+ for model in models:
316
+ if model not in self._loaded_results.keys():
317
+ raise ValueError(" -> First load model using load_numerical_result.")
318
+
319
+ plot_kwargs = {} if plot_kwargs is None else plot_kwargs
320
+ as_kwargs = {} if as_kwargs is None else as_kwargs
321
+ profile_extraction_args = {} if profile_extraction_args is None else profile_extraction_args
322
+ if "extraction_method" not in profile_extraction_args:
323
+ profile_extraction_args["extraction_method"] = "axis"
324
+
325
+ profile_models = {}
326
+ for model in models:
327
+ if output in tilupy.read.TEMPORAL_DATA_2D:
328
+ prof, data = self._loaded_results[model].get_profile(output=output,
329
+ **profile_extraction_args)
330
+ profile_models[model] = [prof, data]
331
+ elif output in tilupy.read.TEMPORAL_DATA_1D:
332
+ prof = self._loaded_results[model].get_output(output)
333
+ profile_models[model] = [prof, None]
334
+
335
+ if time_steps is None:
336
+ time_steps = self._models_tim[list(self._loaded_results.keys())[0]]
337
+
338
+ if analytic_solution is not None:
339
+ as_profile = self.compute_analytic_solution(output=output,
340
+ model=list(self._loaded_results.keys())[0],
341
+ T=time_steps,
342
+ **analytic_solution)
343
+
344
+ if isinstance(time_steps, float) or isinstance(time_steps, int):
345
+ time_steps = np.array([time_steps])
346
+ elif isinstance(time_steps, list):
347
+ time_steps = np.array(time_steps)
348
+
349
+ if isinstance(profile_models[model][0], tilupy.read.TemporalResults):
350
+ for model in models:
351
+ profile_models[model][0] = profile_models[model][0].extract_from_time_step(time_steps)
352
+
353
+ if axes is None:
354
+ if cols_nb is None:
355
+ cols_nb = len(time_steps) if len(time_steps) < 3 else 3
356
+
357
+ if rows_nb is None:
358
+ rows_nb = len(time_steps) // cols_nb
359
+ if len(time_steps) % cols_nb != 0:
360
+ rows_nb += 1
361
+
362
+ fig, axes = plt.subplots(nrows=rows_nb,
363
+ ncols=cols_nb,
364
+ figsize=figsize,
365
+ layout="constrained",
366
+ sharex=True,
367
+ sharey=True)
368
+ if isinstance(axes, plt.Axes):
369
+ axes = np.array([axes])
370
+ else:
371
+ axes = axes.flatten()
372
+
373
+ for T in range(len(time_steps)) :
374
+ # Plot models
375
+ for i in range(len(models)):
376
+ if colors is not None and len(colors) > i:
377
+ color = colors[i]
378
+ else:
379
+ color = "black"
380
+ if linestyles is not None and len(linestyles) > i:
381
+ linestyle = linestyles[i]
382
+ else:
383
+ linestyle = None
384
+
385
+ if "alpha" not in plot_kwargs:
386
+ plot_kwargs["alpha"] = 0.8
387
+ if "linewidth" not in plot_kwargs:
388
+ plot_kwargs["linewidth"] = 1.5
389
+
390
+ axes[T].plot(profile_models[models[i]][0].coords, profile_models[models[i]][0].d[:, T], color=color, linestyle=linestyle, label=models[i], **plot_kwargs)
391
+
392
+ # Plot analytic solution
393
+ if analytic_solution is not None:
394
+ if "color" not in as_kwargs:
395
+ as_kwargs["color"] = "red"
396
+ if "alpha" not in as_kwargs:
397
+ as_kwargs["alpha"] = 0.9
398
+ if "linewidth" not in as_kwargs:
399
+ as_kwargs["linewidth"] = 1
400
+
401
+ axes[T].plot(as_profile.coords, as_profile.d[:, T], label=f"{str(analytic_solution['solution']).split('.')[-1][:-2]}", **as_kwargs)
402
+
403
+ # Formatting fig
404
+ axes[T].set_xlim(left=min(profile_models[models[0]][0].coords), right=max(profile_models[models[0]][0].coords))
405
+ axes[T].grid(True, alpha=0.3)
406
+
407
+ if len(time_steps) == 1:
408
+ if profile_extraction_args["extraction_method"] == "axis" and output in tilupy.read.TEMPORAL_DATA_2D:
409
+ inv_axis = ""
410
+ if profile_models[models[0]][0].coords_name == 'x':
411
+ inv_axis = "Y"
412
+ else:
413
+ inv_axis = "X"
414
+ axes[T].set_title(f"{inv_axis}={profile_models[models[0]][1]}m | t={time_steps[T]}s")
415
+ else:
416
+ axes[T].set_title(f"t={time_steps[T]}s")
417
+ else:
418
+ axes[T].set_title(f"t={time_steps[T]}s", loc="left")
419
+ if profile_extraction_args["extraction_method"] == "axis" and output in tilupy.read.TEMPORAL_DATA_2D:
420
+ inv_axis = ""
421
+ if profile_models[models[0]][0].coords_name == 'x':
422
+ inv_axis = "Y"
423
+ else:
424
+ inv_axis = "X"
425
+ axes[T].set_title(f"{inv_axis}={profile_models[models[0]][1]}m", loc="right")
426
+
427
+ axes[T].set_xlabel(notations.get_label(profile_models[models[0]][0].coords_name))
428
+ axes[T].set_ylabel(notations.get_label(output))
429
+ axes[T].legend(loc='upper right')
430
+
431
+ for i in range(len(time_steps), len(axes)):
432
+ fig.delaxes(axes[i])
433
+
434
+ if show_plot:
435
+ plt.show()
436
+
437
+ return axes
438
+
439
+
440
+ def get_avrg_result(self,
441
+ output: str
442
+ ) -> tilupy.read.TemporalResults | tilupy.read.StaticResults:
443
+ """Get average result computed with all loaded model (:meth:`load_numerical_result`).
444
+
445
+ Parameters
446
+ ----------
447
+ output : str
448
+ Wanted average output.
449
+
450
+ Returns
451
+ -------
452
+ tilupy.read.TemporalResults | tilupy.read.StaticResults
453
+ Average result output.
454
+
455
+ Raises
456
+ ------
457
+ ValueError
458
+ If StaticResults0D output wanted.
459
+ """
460
+ output_list = []
461
+ for model in self._loaded_results:
462
+ output_list.append(self._loaded_results[model].get_output(output))
463
+
464
+ if isinstance(output_list[0], tilupy.read.TemporalResults2D):
465
+ data_list = [output_list[i].d for i in range(len(output_list))]
466
+ time_list = [output_list[i].t for i in range(len(output_list))]
467
+ x_list = [output_list[i].x for i in range(len(output_list))]
468
+ y_list = [output_list[i].y for i in range(len(output_list))]
469
+ z_list = [output_list[i].z for i in range(len(output_list))]
470
+
471
+ mean_data = np.mean(np.stack(data_list), axis=0)
472
+ mean_time = np.mean(np.stack(time_list), axis=0)
473
+ mean_x = np.mean(np.stack(x_list), axis=0)
474
+ mean_y = np.mean(np.stack(y_list), axis=0)
475
+ mean_z = np.mean(np.stack(z_list), axis=0)
476
+
477
+ mean_result = tilupy.read.TemporalResults2D(name=output_list[0].name,
478
+ d=mean_data,
479
+ t=mean_time,
480
+ x=mean_x,
481
+ y=mean_y,
482
+ z=mean_z)
483
+
484
+ return mean_result
485
+
486
+ elif isinstance(output_list[0], tilupy.read.TemporalResults1D):
487
+ data_list = [output_list[i].d for i in range(len(output_list))]
488
+ time_list = [output_list[i].t for i in range(len(output_list))]
489
+ coords_list = [output_list[i].coords for i in range(len(output_list))]
490
+
491
+ mean_data = np.mean(np.stack(data_list), axis=0)
492
+ mean_time = np.mean(np.stack(time_list), axis=0)
493
+ mean_coords = np.mean(np.stack(coords_list), axis=0)
494
+
495
+ mean_result = tilupy.read.TemporalResults1D(name=output_list[0].name,
496
+ d=mean_data,
497
+ t=mean_time,
498
+ coords=mean_coords,
499
+ coords_name=output_list[0].coords_name)
500
+
501
+ return mean_result
502
+
503
+ elif isinstance(output_list[0], tilupy.read.TemporalResults0D):
504
+ data_list = [output_list[i].d for i in range(len(output_list))]
505
+ time_list = [output_list[i].t for i in range(len(output_list))]
506
+
507
+ mean_data = np.mean(np.stack(data_list), axis=0)
508
+ mean_time = np.mean(np.stack(time_list), axis=0)
509
+
510
+ mean_result = tilupy.read.TemporalResults0D(name=output_list[0].name,
511
+ d=mean_data,
512
+ t=mean_time,
513
+ scalar_names=output_list[0].scalar_names)
514
+
515
+ return mean_result
516
+
517
+ elif isinstance(output_list[0], tilupy.read.StaticResults2D):
518
+ data_list = [output_list[i].d for i in range(len(output_list))]
519
+ x_list = [output_list[i].x for i in range(len(output_list))]
520
+ y_list = [output_list[i].y for i in range(len(output_list))]
521
+ z_list = [output_list[i].z for i in range(len(output_list))]
522
+
523
+ mean_data = np.mean(np.stack(data_list), axis=0)
524
+ mean_x = np.mean(np.stack(x_list), axis=0)
525
+ mean_y = np.mean(np.stack(y_list), axis=0)
526
+ mean_z = np.mean(np.stack(z_list), axis=0)
527
+
528
+ mean_result = tilupy.read.StaticResults2D(name=output_list[0].name,
529
+ d=mean_data,
530
+ x=mean_x,
531
+ y=mean_y,
532
+ z=mean_z)
533
+
534
+ return mean_result
535
+
536
+ elif isinstance(output_list[0], tilupy.read.StaticResults1D):
537
+ data_list = [output_list[i].d for i in range(len(output_list))]
538
+ coords_list = [output_list[i].coords for i in range(len(output_list))]
539
+
540
+ mean_data = np.mean(np.stack(data_list), axis=0)
541
+ mean_coords = np.mean(np.stack(coords_list), axis=0)
542
+
543
+ mean_result = tilupy.read.StaticResults1D(name=output_list[0].name,
544
+ d=mean_data,
545
+ coords=mean_coords,
546
+ coords_name=output_list[0].coords_name)
547
+
548
+ return mean_result
549
+
550
+ else:
551
+ raise ValueError("Not available for StaticResults0D.")
552
+
553
+
554
+ def compute_area(self,
555
+ flow_threshold: float = None
556
+ ) -> tuple[dict, dict]:
557
+ """Compute area at each recorded time steps, computed with 'h', for each model loaded
558
+ using :meth:`load_numerical_result`.
559
+
560
+ Parameters
561
+ ----------
562
+ flow_threshold : float, optional
563
+ Flow threshold to extract flow area, if None use 1% of initial maximal
564
+ flow height. By default None.
565
+
566
+ Returns
567
+ -------
568
+ tuple[dict, dict]
569
+ area_surf: dict
570
+ 2D area for each model: area_surf[model] = TemporalResults2D.
571
+ area_num: dict
572
+ Area value for each model: area_num[model] = TemporalResults0D.
573
+ """
574
+ output_list = []
575
+ for model in self._loaded_results:
576
+ output_list.append(self._loaded_results[model].get_output("h"))
577
+
578
+ height_list = [output_list[i].d for i in range(len(output_list))]
579
+
580
+ if flow_threshold is None:
581
+ flow_threshold = np.max(height_list[0]) * 0.01
582
+
583
+ for h in height_list:
584
+ h[h<flow_threshold] = 0
585
+ h[h>=flow_threshold] = 1
586
+
587
+ area_num = {}
588
+ area_surf = {}
589
+ model_name = list(self._loaded_results.keys())
590
+ for i in range(len(height_list)):
591
+ surface_list = []
592
+ dx = self._loaded_results[model].dx
593
+ dy = self._loaded_results[model].dy
594
+ cell_surface = dx*dy
595
+
596
+ for t in range(height_list[i].shape[2]):
597
+ nb_cell = np.sum(height_list[i][:, :, t] == 1)
598
+ surface_list.append(nb_cell*cell_surface)
599
+
600
+ area_num[model_name[i]] = tilupy.read.TemporalResults0D(name='s',
601
+ d=np.array(surface_list),
602
+ t=output_list[i].t,
603
+ scalar_names="Surface")
604
+
605
+ area_surf[model_name[i]] = tilupy.read.TemporalResults2D(name='s',
606
+ d=height_list[i],
607
+ t=output_list[i].t,
608
+ x=output_list[i].x,
609
+ y=output_list[i].y,
610
+ z=output_list[i].z)
611
+
612
+ return area_surf, area_num
613
+
614
+
615
+ def compute_impacted_area(self,
616
+ flow_threshold: float = None,
617
+ ) -> tuple[dict, dict]:
618
+ """Compute impacted area, computed with 'h_max', for each model loaded using
619
+ :meth:`load_numerical_result`.
620
+
621
+ Parameters
622
+ ----------
623
+ flow_threshold : float, optional
624
+ Flow threshold to extract flow area, if None use 1% of initial maximal
625
+ flow height. By default None.
626
+
627
+ Returns
628
+ -------
629
+ tuple[dict, dict]
630
+ area_surf: dict
631
+ 2D area for each model: area_surf[model] = StaticResults2D.
632
+ area_num: dict
633
+ Area value for each model: area_num[model] = area_val.
634
+ """
635
+ output_list = []
636
+ for model in self._loaded_results:
637
+ output_list.append(self._loaded_results[model].get_output("h_max"))
638
+
639
+ height_list = [output_list[i].d for i in range(len(output_list))]
640
+
641
+ if flow_threshold is None:
642
+ flow_threshold = np.max(height_list[0]) * 0.01
643
+
644
+ for h in height_list:
645
+ h[h<flow_threshold] = 0
646
+ h[h>=flow_threshold] = 1
647
+
648
+ area_num = {}
649
+ area_surf = {}
650
+ model_name = list(self._loaded_results.keys())
651
+ for i in range(len(height_list)):
652
+ dx = self._loaded_results[model].dx
653
+ dy = self._loaded_results[model].dy
654
+ cell_surface = dx*dy
655
+
656
+ nb_cell = np.sum(height_list[i][:, :] == 1)
657
+ surface = (nb_cell*cell_surface)
658
+
659
+ area_num[model_name[i]] = surface
660
+
661
+ area_surf[model_name[i]] = tilupy.read.StaticResults2D(name='s',
662
+ d=height_list[i],
663
+ x=output_list[i].x,
664
+ y=output_list[i].y,
665
+ z=output_list[i].z)
666
+
667
+ return area_surf, area_num
668
+
669
+
670
+ def compute_impacted_area_rms_from_avrg(self,
671
+ flow_threshold: float = None
672
+ ) -> tuple[dict, np.ndarray]:
673
+ """Compute impacted area, computed with 'h_max', RMS with average result for each model loaded
674
+ using :meth:`load_numerical_result`.
675
+
676
+ Parameters
677
+ ----------
678
+ flow_threshold : float, optional
679
+ Flow threshold to extract flow area, if None use 1% of initial maximal
680
+ flow height. By default None.
681
+
682
+ Returns
683
+ -------
684
+ tuple[dict, np.ndarray]
685
+ area_rms: dict
686
+ RMS value for each model: area_rms[model] = rms_value.
687
+ avrg_area: numpy.ndarray
688
+ Average area surface.
689
+ """
690
+ area_surf, area_num = self.compute_impacted_area(flow_threshold=flow_threshold)
691
+
692
+ mean_area = []
693
+ for model in area_surf:
694
+ mean_area.append(np.nan_to_num(area_surf[model].d))
695
+
696
+ avrg_area = np.mean(np.array(mean_area), axis=0)
697
+
698
+ area_rms = {}
699
+ for model in area_num:
700
+ rms = (np.sqrt(np.sum((area_surf[model].d - avrg_area)**2)) /
701
+ np.sqrt(np.sum((avrg_area)**2)))
702
+ area_rms[model] = rms
703
+
704
+ return area_rms, avrg_area
705
+
706
+
707
+ def compute_rms_from_avrg(self,
708
+ output: str,
709
+ ) -> dict:
710
+ """Compute RMS with average result for each model loaded using :meth:`load_numerical_result`.
711
+
712
+ Parameters
713
+ ----------
714
+ output : str
715
+ Wanted output to compute the RMS.
716
+
717
+ Returns
718
+ -------
719
+ dict
720
+ RMS value for each model: output_rms[model] = rms_value.
721
+ """
722
+ avrg_result = self.get_avrg_result(output)
723
+
724
+ extracted_data = {}
725
+ for model in self._loaded_results:
726
+ data = self._loaded_results[model].get_output(output)
727
+ extracted_data[model] = data.d
728
+ avrg_height = avrg_result.d
729
+
730
+ output_rms = {}
731
+ for model in self._loaded_results:
732
+ rms = (np.sqrt(np.sum((extracted_data[model] - avrg_height)**2)) /
733
+ np.sqrt(np.sum((avrg_height)**2)))
734
+ output_rms[model] = rms
735
+
736
+ return output_rms
737
+
738
+
739
+ def compute_dist_centermass(self,
740
+ flow_threshold: float = None
741
+ ) -> tuple[dict, dict]:
742
+ """Compute the distance between the initial position of the center of mass and the final furthest point
743
+ for each model loaded using :meth:`load_numerical_result`.
744
+
745
+ Parameters
746
+ ----------
747
+ flow_threshold : float, optional
748
+ Flow threshold to extract flow area, if None use 1% of initial maximal
749
+ flow height. By default None.
750
+
751
+ Returns
752
+ -------
753
+ tuple[dict, dict]
754
+ init_coord_centermass : dict
755
+ Coordinates of the initial center of mass for each model:
756
+ init_coord_centermass[model] = [X, Y, Z].
757
+ model_max_dist : dict
758
+ Maximal distance for each model : model_max_dist[model] = dist.
759
+ """
760
+ init_coord_centermass = {}
761
+ for model in self._loaded_results:
762
+ centermass = self._loaded_results[model].get_output("centermass")
763
+ init_coord_centermass[model] = [centermass.d[0, 0],
764
+ centermass.d[1, 0],
765
+ centermass.d[2, 0]]
766
+
767
+ area_surf, _ = self.compute_impacted_area(flow_threshold=flow_threshold)
768
+
769
+ model_max_dist = {}
770
+ for model in self._loaded_results:
771
+ model_max_dist[model] = [None, None, 0.0]
772
+
773
+ for model in self._loaded_results:
774
+ for i in range(len(area_surf[model].x)):
775
+ for j in range(len(area_surf[model].y)):
776
+ if area_surf[model].d[j, i] == 1:
777
+ distance = np.sqrt((area_surf[model].x[i] - init_coord_centermass[model][0])**2 +
778
+ (area_surf[model].y[j] - init_coord_centermass[model][1])**2 +
779
+ (area_surf[model].z[j,i] - init_coord_centermass[model][2])**2)
780
+ if distance > model_max_dist[model][2]:
781
+ model_max_dist[model][0] = area_surf[model].x[i]
782
+ model_max_dist[model][1] = area_surf[model].y[j]
783
+ model_max_dist[model][2] = distance
784
+
785
+ return init_coord_centermass, model_max_dist
786
+
787
+
788
+ def compute_average_velocity(self,
789
+ distance: float = None,
790
+ look_up_direction: str = "right",
791
+ flow_threshold: float = None,
792
+ **extration_profile_params
793
+ ) -> tuple[dict, dict, float, dict]:
794
+ """Compute average velocity for each model loaded using :meth:`load_numerical_result`.
795
+
796
+ Parameters
797
+ ----------
798
+ distance : float, optional
799
+ Distance used to calculate average speed, if None use :data:`maximal_distance/2`.
800
+ By default None.
801
+ look_up_direction : str, optional
802
+ Direction to look for the flow front, must be "right" or "left",
803
+ by default "right".
804
+ flow_threshold : float, optional
805
+ Flow threshold when extracting front position from profile, if None use
806
+ 1% of initial maximal flow height. By default None.
807
+
808
+ Returns
809
+ -------
810
+ tuple[dict, dict, float, dict]
811
+ model_avrg_vel: dict
812
+ Average velocity value for each model: model_avrg_vel[model] = avrg_velocity.
813
+ model_time: dict
814
+ Time for each model to reach the distance: model_time[model] = time.
815
+ distance: float
816
+ Distance used to compute the average velocity.
817
+ model_pos: dict
818
+ Front position of the profile of each model: model_pos[model] = [init_front_pos, final_front_pos,
819
+ maximal_distance].
820
+ """
821
+ def get_front_index(profile):
822
+ idx = np.where(profile <= flow_threshold)[0]
823
+ return (idx[0] - 1) if len(idx) else len(profile) - 1
824
+
825
+ def get_back_index(profile):
826
+ idx = np.where(profile <= flow_threshold)[0]
827
+ return (idx[-1] + 1) if len(idx) else 0
828
+
829
+ extration_profile_params = {} if extration_profile_params is None else extration_profile_params
830
+
831
+ if look_up_direction not in ["right", "left"]:
832
+ raise ValueError("Invalid look-up direction: 'right' or 'left'")
833
+
834
+ # Extract profile
835
+ model_profile = {}
836
+ for model in self._loaded_results:
837
+ profile, _ = self._loaded_results[model].get_profile(output="h",
838
+ **extration_profile_params)
839
+ model_profile[model] = profile
840
+
841
+ if flow_threshold is None:
842
+ flow_threshold = np.max(profile.d[:, 0])*0.01
843
+
844
+ # Find initial and final position
845
+ model_pos = {}
846
+ for model in self._loaded_results:
847
+ max_index_init = np.argmax(model_profile[model].d[:, 0])
848
+ max_index_final = np.argmax(model_profile[model].d[:, -1])
849
+
850
+ idx_r_init = get_front_index(model_profile[model].d[max_index_init:, 0]) + max_index_init
851
+ idx_l_init = get_back_index(model_profile[model].d[:max_index_init, 0])
852
+ idx_r_final = get_front_index(model_profile[model].d[max_index_final:, -1]) + max_index_final
853
+ idx_l_final = get_back_index(model_profile[model].d[:max_index_final, -1])
854
+
855
+ if look_up_direction == "right":
856
+ init_pos = model_profile[model].coords[idx_r_init]
857
+ final_pos = model_profile[model].coords[idx_r_final]
858
+ else:
859
+ init_pos = model_profile[model].coords[idx_l_init]
860
+ final_pos = model_profile[model].coords[idx_l_final]
861
+
862
+ model_pos[model] = (init_pos, final_pos, abs(final_pos-init_pos))
863
+
864
+ # Find minimal distance
865
+ min_distance = 1e10
866
+ for model in model_pos:
867
+ min_distance = model_pos[model][2] if model_pos[model][2] < min_distance else min_distance
868
+
869
+ if distance is not None and distance > min_distance:
870
+ raise ValueError(f"The requested distance is greater than the minimum distance: {min_distance}")
871
+
872
+ if distance is None:
873
+ distance = min_distance/2
874
+
875
+
876
+ # Find the time for each model to exceed the distance
877
+ model_time = {}
878
+ for model in model_profile:
879
+ for t in range(1, len(model_profile[model].t)):
880
+ max_index = np.argmax(model_profile[model].d[:, t])
881
+
882
+ idx_r = get_front_index(model_profile[model].d[max_index:, t]) + max_index
883
+ idx_l = get_back_index(model_profile[model].d[:max_index, t])
884
+
885
+ if look_up_direction == "right":
886
+ pos = model_profile[model].coords[idx_r]
887
+ else:
888
+ pos = model_profile[model].coords[idx_l]
889
+
890
+ if abs(pos - model_pos[model][0]) >= distance:
891
+ model_time[model] = model_profile[model].t[t]
892
+ break
893
+
894
+
895
+ # Compute the avrg velocity for each model
896
+ model_avrg_vel = {}
897
+ for model in model_time:
898
+ model_avrg_vel[model] = distance / model_time[model]
899
+
900
+ return model_avrg_vel, model_time, distance, model_pos
901
+
902
+
903
+ def compute_rms_from_coussot(self,
904
+ coussot_params: dict,
905
+ look_up_direction: str = "right",
906
+ flow_threshold: float = None,
907
+ **extration_profile_params
908
+ ) -> tuple[dict, dict, dict]:
909
+ """Compute RMS with Coussot's front shape for each model loaded using :meth:`load_numerical_result`.
910
+
911
+ Parameters
912
+ ----------
913
+ coussot_params : dict
914
+ Arguments for generating Coussot's solution. See :class:`tilupy.analytic_sol.Coussot_shape`.
915
+ look_up_direction : str, optional
916
+ Direction to look for the flow front, must be "right" or "left",
917
+ by default "right".
918
+ flow_threshold : float, optional
919
+ Flow threshold when extracting front position from profile, if None use
920
+ 1% of initial maximal flow height. By default None.
921
+
922
+ Returns
923
+ -------
924
+ tuple[dict, dict, dict]
925
+ output_rms: dict
926
+ RMS value for each model: output_rms[model] = rms_value.
927
+ model_front_pos: dict
928
+ Position of Coussot's profile for each model: model_front_pos[model] = pos_value.
929
+ model_coussot: dict
930
+ Coussot's profile for each model: model_coussot[model] = StaticResults1D.
931
+ """
932
+ def get_front_index(profile):
933
+ idx = np.where(profile <= flow_threshold)[0]
934
+ return (idx[0] - 1) if len(idx) else len(profile) - 1
935
+
936
+ def get_back_index(profile):
937
+ idx = np.where(profile <= flow_threshold)[0]
938
+ return (idx[-1] + 1) if len(idx) else 0
939
+
940
+ output_rms = {}
941
+ model_front_pos = {}
942
+ model_coussot = {}
943
+ for model in self._loaded_results:
944
+ # Extract profile
945
+ prof, _ = self._loaded_results[model].get_profile(output="h",
946
+ **extration_profile_params)
947
+ if flow_threshold is None:
948
+ flow_threshold = np.max(prof.d[:, 0])*0.01
949
+ max_index = np.argmax(prof.d[:, -1])
950
+ # Create Coussot profile
951
+ front_shape = Coussot_shape(**coussot_params, h_final=np.max(prof.d[:, -1]))
952
+ front_shape.compute_rheological_test_front_morpho()
953
+
954
+ if look_up_direction == "right":
955
+ front_shape.change_orientation_flow()
956
+ idx = get_front_index(prof.d[max_index:, -1]) + max_index
957
+ front_pos = prof.coords[idx]
958
+ else:
959
+ idx = get_back_index(prof.d[:max_index, -1])
960
+ front_pos = prof.coords[idx]
961
+ front_shape.translate_front(front_pos)
962
+ front_shape.interpolate_on_d()
963
+
964
+ coussot_pos = front_shape.d.copy()
965
+
966
+ # Find best position for Coussot's profile
967
+ max_dx = (prof.coords[1] - prof.coords[0]) * 5
968
+ max_index = np.argmax(prof.d[:, -1])
969
+
970
+ dx_range = np.linspace(-max_dx, max_dx, 5)
971
+ best_dx = 0.0
972
+ min_rms = 1e30
973
+ # x_index = [None, None]
974
+
975
+ for dx in dx_range:
976
+ temp_pos = coussot_pos + dx
977
+
978
+ # Extract front value of profile
979
+ if look_up_direction == "right":
980
+ x_index_max = np.argmin(np.abs(prof.coords[max_index:] - np.max(temp_pos))) + max_index
981
+ x_index_min = np.argmin(np.abs(prof.coords[max_index:] - np.min(temp_pos))) + max_index
982
+ else:
983
+ x_index_max = np.argmin(np.abs(prof.coords[:max_index] - np.max(temp_pos)))
984
+ x_index_min = np.argmin(np.abs(prof.coords[:max_index] - np.min(temp_pos)))
985
+
986
+ reduc_idx = np.linspace(0, len(temp_pos) - 1, len(prof.coords[x_index_min:x_index_max]), dtype=int)
987
+
988
+ rms = np.sqrt(np.mean(((prof.d[x_index_min:x_index_max, -1])
989
+ - (front_shape.h[reduc_idx]))**2))
990
+
991
+ rms /= np.sqrt(np.mean(((front_shape.h[reduc_idx]))**2))
992
+
993
+ if rms <= min_rms:
994
+ min_rms = rms
995
+ best_dx = dx
996
+ # x_index = [x_index_min, x_index_max]
997
+
998
+ output_rms[model] = min_rms
999
+ front_shape.translate_front(best_dx)
1000
+ model_front_pos[model] = front_pos + best_dx
1001
+
1002
+ model_coussot[model] = tilupy.read.StaticResults1D(name="h",
1003
+ d=front_shape.h,
1004
+ coords=front_shape.d,
1005
+ coords_name="x")
1006
+
1007
+ # prof.plot()
1008
+ # plt.plot(front_shape.d[reduc_idx], front_shape.h[reduc_idx], color='red')
1009
+ # plt.show()
1010
+
1011
+ # plt.plot([0, np.max(front_shape.h[reduc_idx])], [0, np.max(front_shape.h[reduc_idx])], color='red')
1012
+ # plt.plot(prof.d[x_index[0]:x_index[1], -1], front_shape.h[reduc_idx])
1013
+ # plt.xlabel("Solution numérique")
1014
+ # plt.ylabel("Solution de Coussot")
1015
+ # plt.show()
1016
+
1017
+ return output_rms, model_front_pos, model_coussot
1018
+
1019
+
1020
+ def generate_simulation_comparison_csv(self,
1021
+ save: bool = False,
1022
+ folder_out: str = None,
1023
+ file_name: str = None,
1024
+ fmt: str = "csv",
1025
+ available_profile: bool = False,
1026
+ extration_profile_params: dict = None,
1027
+ flow_threshold: float = 1e-1,
1028
+ profile_direction: str = "right",
1029
+ avrg_velocity_distance: float = None,
1030
+ coussot_criteria: dict = None,
1031
+ ) -> None:
1032
+ """Generate a csv file summarizing comparison criteria between flow models.
1033
+
1034
+ Generates a file grouping comparison criteria between numerical flow models:
1035
+
1036
+ - Criteria integrated throughout the simulation:
1037
+
1038
+ - Flow Area: flow area value and difference with mean result (:meth:`compute_impacted_area`).
1039
+ - Impacted Zone: RMS of impacted area versus mean result (:meth:`compute_impacted_area_rms_from_avrg`).
1040
+ - Maximal Height: RMS of flow maximal height versus mean result (:meth:`compute_rms_from_avrg`).
1041
+ - Maximal Momentum: RMS of flow maximal mementum versus mean result (:meth:`compute_rms_from_avrg`).
1042
+ - Average Velocity (if profile): time for flow to reach a given distance
1043
+ and average velocity calculated from these values (:meth:`compute_average_velocity`).
1044
+
1045
+ - Criteria for the final time step of the simulation:
1046
+
1047
+ - Final Height: RMS of flow final height versus mean result (:meth:`compute_rms_from_avrg`).
1048
+ - Maximal Extension: distance from the initial center of mass and the final furthest point of the flow (:meth:`compute_dist_centermass`).
1049
+ - Flow Front Position (if profile): maximum distance traveled by the flow and comparison with the average result (:meth:`compute_average_velocity`).
1050
+ - Front Shape (optional): RMS of the front shape versus Coussot's theorical front shape (:meth:`compute_rms_from_coussot`).
1051
+
1052
+ - Numerical criteria:
1053
+
1054
+ - Volume: value of the volume at final time steps (compared to initial volume) and RMS versus initial volume value.
1055
+
1056
+ Parameters
1057
+ ----------
1058
+ save : bool, optional
1059
+ If True, save the resulting tab at :data:`folder_out`. By default False.
1060
+ folder_out : str, optional
1061
+ Path to the folder where the file is saved, if None generate "xlsx_results"
1062
+ folder in code folder. By default None
1063
+ file_name : str, optional
1064
+ Name of the folder, if None use "results_[models]". By default None.
1065
+ fmt : str, optional
1066
+ Saving format of the table. Can be "csv" or "xlsx". By default "csv".
1067
+ available_profile: bool, optional
1068
+ If True, calculate criteria requiring a profile: avrg velocity and final front
1069
+ position. By default False.
1070
+ flow_threshold : float, optional
1071
+ Flow threshold when extracting front position from profile, by default 1e-1.
1072
+ profile_direction : str, optional
1073
+ Direction to look for the flow front, must be "right" or "left",
1074
+ by default "right".
1075
+ avrg_velocity_distance : float, optional
1076
+ Distance used to calculate average speed, if None use :data:`maximal_distance/2`.
1077
+ By default None.
1078
+ coussot_criteria : dict, optional
1079
+ If None, ignore Coussot criteria. Otherwise, list of arguments to generate a
1080
+ Coussot profile, by default None
1081
+
1082
+ Raises
1083
+ ------
1084
+ ValueError
1085
+ If the file already exists.
1086
+ """
1087
+ import pandas as pd
1088
+ import openpyxl
1089
+ import os
1090
+
1091
+ if save:
1092
+ if folder_out is None:
1093
+ folder_out = os.path.join(os.path.dirname(__file__), "xlsx_results")
1094
+
1095
+ os.makedirs(folder_out, exist_ok=True)
1096
+
1097
+ if file_name is None:
1098
+ file_name = "results"
1099
+ for model in self._loaded_results:
1100
+ file_name += "_" + model
1101
+
1102
+ if not file_name.endswith("." + fmt):
1103
+ file_name = file_name + "." + fmt
1104
+ saving_path = os.path.join(folder_out, file_name)
1105
+
1106
+ if os.path.exists(saving_path):
1107
+ raise ValueError(f"Existing file: {saving_path}")
1108
+
1109
+ # Create table columns
1110
+ cols = ["", ""]
1111
+ for model in self._loaded_results:
1112
+ cols.append(model)
1113
+
1114
+ table_content = []
1115
+
1116
+ if extration_profile_params is None and available_profile:
1117
+ extration_profile_params = {}
1118
+
1119
+ # --------------------------------------------------------------------------------------------
1120
+ # Criteria integrated throughout the simulation
1121
+ # --------------------------------------------------------------------------------------------
1122
+ table_content.append(["Criteria integrated throughout the simulation"])
1123
+
1124
+ # --------------------------------------- Flow area ------------------------------------------
1125
+ line = ["Flow Area", "Total Area [m2]"]
1126
+ _, area_num = self.compute_impacted_area(flow_threshold=flow_threshold)
1127
+
1128
+ mean_list = []
1129
+ for model in self._loaded_results:
1130
+ mean_list.append(area_num[model])
1131
+ line.append(area_num[model])
1132
+ table_content.append(line)
1133
+
1134
+ line = ["", "Compared to avrg"]
1135
+ mean_area = np.mean(np.array(mean_list))
1136
+ for model in self._loaded_results:
1137
+ line.append((area_num[model] - mean_area)/mean_area)
1138
+ table_content.append(line)
1139
+
1140
+ line = ["Impacted Zone", "RMS (avrg)"]
1141
+ rms_area, _ = self.compute_impacted_area_rms_from_avrg(flow_threshold=flow_threshold)
1142
+
1143
+ for model in self._loaded_results:
1144
+ line.append(rms_area[model])
1145
+ table_content.append(line)
1146
+
1147
+ # ------------------------------------- Maximal height ---------------------------------------
1148
+ line = ["Maximal Height", "RMS (avrg)"]
1149
+ rms_height = self.compute_rms_from_avrg(output="h_max")
1150
+
1151
+ for model in self._loaded_results:
1152
+ line.append(rms_height[model])
1153
+ table_content.append(line)
1154
+
1155
+ # ------------------------------------- Maximal momentum ---------------------------------------
1156
+ line = ["Maximal Momentum", "RMS (avrg)"]
1157
+ rms_hu = self.compute_rms_from_avrg(output="hu_max")
1158
+
1159
+ for model in self._loaded_results:
1160
+ line.append(rms_hu[model])
1161
+ table_content.append(line)
1162
+
1163
+ # ----------------------------------- Average flow speed -------------------------------------
1164
+ if available_profile:
1165
+ avrg_vel, time, dist, model_pos = self.compute_average_velocity(distance=avrg_velocity_distance,
1166
+ look_up_direction=profile_direction,
1167
+ flow_threshold=flow_threshold,
1168
+ **extration_profile_params)
1169
+ line = ["Velocity", f"Time [s] to complete d={dist}m"]
1170
+
1171
+ for model in self._loaded_results:
1172
+ line.append(time[model])
1173
+ table_content.append(line)
1174
+
1175
+ line = ["", "Average velocity [m/s]"]
1176
+
1177
+ for model in self._loaded_results:
1178
+ line.append(avrg_vel[model])
1179
+ table_content.append(line)
1180
+
1181
+
1182
+ # --------------------------------------------------------------------------------------------
1183
+ # Criteria for the final time step of the simulation
1184
+ # --------------------------------------------------------------------------------------------
1185
+ table_content.append(["Criteria for the final time step of the simulation"])
1186
+
1187
+ # --------------------------------------- Final height ---------------------------------------
1188
+ line = ["Final Thickness Repartition", "RMS (avrg)"]
1189
+ rms_height = self.compute_rms_from_avrg(output="h_final")
1190
+
1191
+ for model in self._loaded_results:
1192
+ line.append(rms_height[model])
1193
+ table_content.append(line)
1194
+
1195
+ # -------------------------------- Position of the flow front --------------------------------
1196
+ line = ["Maximal Extension", "Distance [m]"]
1197
+ _, model_max_dist = self.compute_dist_centermass(flow_threshold=flow_threshold)
1198
+
1199
+ for model in self._loaded_results:
1200
+ line.append(model_max_dist[model][2])
1201
+ table_content.append(line)
1202
+
1203
+ line = ["", "Compared to avrg"]
1204
+
1205
+ list_pos = []
1206
+ for model in self._loaded_results:
1207
+ list_pos.append(model_max_dist[model][2])
1208
+ mean_dist = np.mean(np.array(list_pos))
1209
+
1210
+ for model in self._loaded_results:
1211
+ line.append((model_max_dist[model][2]-mean_dist)/mean_dist)
1212
+ table_content.append(line)
1213
+
1214
+ if available_profile:
1215
+ line = ["Flow Front (Profile)", "Distance [m]"]
1216
+
1217
+ for model in self._loaded_results:
1218
+ line.append(model_pos[model][2])
1219
+ table_content.append(line)
1220
+
1221
+ line = ["", "Compared to avrg"]
1222
+
1223
+ list_pos = []
1224
+ for model in self._loaded_results:
1225
+ list_pos.append(model_pos[model][2])
1226
+ mean_dist = np.mean(np.array(list_pos))
1227
+
1228
+ for model in self._loaded_results:
1229
+ line.append((model_pos[model][2]-mean_dist)/mean_dist)
1230
+ table_content.append(line)
1231
+
1232
+ # ---------------------------------- Front shape comparison ----------------------------------
1233
+ if coussot_criteria is not None:
1234
+ line = ["Front Shape", "RMS (Coussot)"]
1235
+ model_rms, _, _ = self.compute_rms_from_coussot(look_up_direction=profile_direction,
1236
+ flow_threshold=flow_threshold,
1237
+ coussot_params=coussot_criteria)
1238
+
1239
+ for model in self._loaded_results:
1240
+ line.append(model_rms[model])
1241
+ table_content.append(line)
1242
+
1243
+
1244
+ # --------------------------------------------------------------------------------------------
1245
+ # Numeralical criterias
1246
+ # --------------------------------------------------------------------------------------------
1247
+ table_content.append(["Numerical criteria"])
1248
+
1249
+ # ------------------------------------------- Volume -----------------------------------------
1250
+ model_volume = {}
1251
+ for model in self._loaded_results:
1252
+ volume = self._loaded_results[model].get_output("volume")
1253
+ model_volume[model] = volume
1254
+
1255
+ line = ["Volume Variation", "RMS of Total Volume Variation"]
1256
+
1257
+ for model in self._loaded_results:
1258
+ rms_vinit = 0
1259
+ for v in model_volume[model].d:
1260
+ rms_vinit += np.sqrt((v - model_volume[model].d[0])**2) / model_volume[model].d[0]
1261
+ line.append(rms_vinit)
1262
+ table_content.append(line)
1263
+
1264
+ line = ["", "(Vf - Vi) / Vi"]
1265
+
1266
+ for model in self._loaded_results:
1267
+ line.append((model_volume[model].d[-1] - model_volume[model].d[0]) /
1268
+ model_volume[model].d[0])
1269
+ table_content.append(line)
1270
+
1271
+
1272
+ # --------------------------------------------------------------------------------------------
1273
+ # Saving table
1274
+ # --------------------------------------------------------------------------------------------
1275
+ df = pd.DataFrame(table_content, columns=cols)
1276
+ if save:
1277
+ if fmt == "csv":
1278
+ df.to_csv(saving_path, index=False, encoding="utf-8")
1279
+
1280
+ if fmt == "xlsx":
1281
+ df.to_excel(saving_path, index=False, engine="openpyxl")
1282
+
1283
+ # --------------------------------------- Reformat table -------------------------------------
1284
+ wb = openpyxl.load_workbook(saving_path)
1285
+ ws = wb.active
1286
+
1287
+ # Add border style
1288
+ bordure_fine = openpyxl.styles.Border(left=openpyxl.styles.Side(style="thin"),
1289
+ right=openpyxl.styles.Side(style="thin"),
1290
+ top=openpyxl.styles.Side(style="thin"),
1291
+ bottom=openpyxl.styles.Side(style="thin"))
1292
+
1293
+ for row in ws.iter_rows(min_row=1, max_row=ws.max_row, max_col=ws.max_column):
1294
+ for cell in row:
1295
+ cell.border = bordure_fine
1296
+ wb.save(saving_path)
1297
+
1298
+ print(f"Saved in: {saving_path}")
1299
+
1300
+ return df
1301
+
1302
+
1303
+ def generate_analytical_comparison_csv(self,
1304
+ analytic_solution: dict,
1305
+ save: bool = False,
1306
+ folder_out: str = None,
1307
+ file_name: str = None,
1308
+ fmt: str = "csv",
1309
+ compute_as_u: bool = True,
1310
+ extration_profile_params: dict = None,
1311
+ flow_threshold: float = None,
1312
+ ) -> None:
1313
+ """Generate a csv file summarizing comparison criteria between flow models and analytic solution.
1314
+
1315
+ Generates a file grouping comparison criteria between numerical flow models and analytic solution:
1316
+
1317
+ - Criteria integrated throughout the simulation:
1318
+
1319
+ - Total Height: RMS of flow height versus analytical solution.
1320
+ - Total Momentum: RMS of flow h*u versus analytical solution.
1321
+ - Total Front Position: RMS of flow front versus analytical solution.
1322
+
1323
+ - Criteria for the final time step of the simulation:
1324
+
1325
+ - Final Height: RMS of flow final height versus analytical solution.
1326
+ - Flow Front Position: maximum distance traveled by the flow and comparison with the analytic solution.
1327
+
1328
+ - Numerical criteria:
1329
+
1330
+ - Volume: value of the volume at final time steps (compared to initial volume) and RMS versus initial volume value.
1331
+
1332
+ Parameters
1333
+ ----------
1334
+ analytic_solution: dict
1335
+ Argument for the analytic solution. See :meth:`compute_analytic_solution`.
1336
+ save : bool, optional
1337
+ If True, save the resulting tab at :data:`folder_out`. By default False.
1338
+ folder_out : str, optional
1339
+ Path to the folder where the file is saved, if None generate "xlsx_results"
1340
+ folder in code folder. By default None
1341
+ file_name : str, optional
1342
+ Name of the folder, if None use "AS_comparison_[models]". By default None.
1343
+ fmt : str, optional
1344
+ Saving format of the table. Can be "csv" or "xlsx". By default "csv".
1345
+ compute_as_u: bool, optional
1346
+ If True, compute analytic solution for flow velocity. Can be disabled. By default True.
1347
+ extration_profile_params: dict, optional
1348
+ Argument for extracting profile. See :meth:`tilupy.read.Results.get_profile`. By default None.
1349
+ flow_threshold : float, optional
1350
+ Flow threshold when extracting front position from profile, by default None.
1351
+
1352
+ Raises
1353
+ ------
1354
+ ValueError
1355
+ If the file already exists.
1356
+ """
1357
+ import pandas as pd
1358
+ import openpyxl
1359
+ import os
1360
+
1361
+ if save:
1362
+ if folder_out is None:
1363
+ folder_out = os.path.join(os.path.dirname(__file__), "xlsx_results")
1364
+
1365
+ os.makedirs(folder_out, exist_ok=True)
1366
+
1367
+ if file_name is None:
1368
+ file_name = "AS_compare"
1369
+ for model in self._loaded_results:
1370
+ file_name += "_" + model
1371
+
1372
+ if not file_name.endswith("." + fmt):
1373
+ file_name = file_name + "." + fmt
1374
+ saving_path = os.path.join(folder_out, file_name)
1375
+
1376
+ if os.path.exists(saving_path):
1377
+ raise ValueError(f"Existing file: {saving_path}")
1378
+
1379
+ extration_profile_params = {} if extration_profile_params is None else extration_profile_params
1380
+
1381
+ # Create table columns
1382
+ cols = ["", ""]
1383
+ for model in self._loaded_results:
1384
+ cols.append(model)
1385
+
1386
+ table_content = []
1387
+
1388
+ # --------------------------------------------------------------------------------------------
1389
+ # Compute AS at each time steps for each models
1390
+ # --------------------------------------------------------------------------------------------
1391
+ model_AS_h = {}
1392
+ model_AS_u = {}
1393
+ for model in self._loaded_results:
1394
+ as_h_profile = self.compute_analytic_solution(output="h",
1395
+ T=self._models_tim[model],
1396
+ model=model,
1397
+ **analytic_solution)
1398
+ model_AS_h[model] = as_h_profile
1399
+ if compute_as_u:
1400
+ as_u_profile = self.compute_analytic_solution(output="u",
1401
+ T=self._models_tim[model],
1402
+ model=model,
1403
+ **analytic_solution)
1404
+
1405
+ model_AS_u[model] = np.nan_to_num(as_u_profile.d, nan=0)
1406
+
1407
+ if flow_threshold is None:
1408
+ flow_threshold = np.max(model_AS_h[model].d) * 0.01
1409
+
1410
+ # --------------------------------------------------------------------------------------------
1411
+ # Criteria integrated throughout the simulation
1412
+ # --------------------------------------------------------------------------------------------
1413
+ table_content.append(["Criteria integrated throughout the simulation"])
1414
+
1415
+ # ---------------------------------- Height difference ---------------------------------------
1416
+ model_h = {}
1417
+ for model in self._loaded_results:
1418
+ profile, _ = self._loaded_results[model].get_profile(output="h",
1419
+ **extration_profile_params)
1420
+ model_h[model] = profile
1421
+
1422
+ line = ["Total Height Difference", "RMS (AS)"]
1423
+ for model in self._loaded_results:
1424
+ line.append((np.sqrt(np.sum((model_h[model].d - model_AS_h[model].d)**2)) /
1425
+ np.sqrt(np.sum((model_AS_h[model].d)**2))))
1426
+
1427
+ table_content.append(line)
1428
+
1429
+ # ------------------------------------ HU difference -----------------------------------------
1430
+ if compute_as_u:
1431
+ model_u = {}
1432
+ for model in self._loaded_results:
1433
+ profile, _ = self._loaded_results[model].get_profile(output="u",
1434
+ **extration_profile_params)
1435
+ model_u[model] = profile
1436
+
1437
+ line = ["Total Momentum Difference", "RMS (AS)"]
1438
+ for model in self._loaded_results:
1439
+ hu = model_h[model].d * model_u[model].d
1440
+ as_hu = model_AS_h[model].d * model_AS_u[model]
1441
+ line.append((np.sqrt(np.sum((hu - as_hu)**2)) /
1442
+ np.sqrt(np.sum((as_hu)**2))))
1443
+
1444
+ table_content.append(line)
1445
+
1446
+ # ----------------------------------- Front position ---------------------------------------
1447
+ model_position = {}
1448
+ model_as_position = {}
1449
+ for model in self._loaded_results:
1450
+ list_position = []
1451
+ list_as_position = []
1452
+ for t in range(len(model_h[model].t)):
1453
+ # Model
1454
+ max_index = np.argmax(model_h[model].d[:, t])
1455
+ idx = np.where(model_h[model].d[max_index:, t] <= flow_threshold)[0]
1456
+ idx = (idx[0] - 1) if len(idx) else len(model_h[model].d[max_index:, t]) - 1
1457
+ idx += max_index
1458
+ list_position.append(model_h[model].coords[idx])
1459
+ # AS
1460
+ max_index = np.argmax(model_AS_h[model].d[:, t])
1461
+ idx = np.where(model_AS_h[model].d[max_index:, t] <= flow_threshold)[0]
1462
+ idx = (idx[0] - 1) if len(idx) else len(model_AS_h[model].d[max_index:, t]) - 1
1463
+ idx += max_index
1464
+ list_as_position.append(model_AS_h[model].coords[idx])
1465
+ model_position[model] = np.array(list_position)
1466
+ model_as_position[model] = np.array(list_as_position)
1467
+
1468
+ line = ["Total Front Position Difference", "RMS (AS)"]
1469
+
1470
+ for model in self._loaded_results:
1471
+ line.append((np.sqrt(np.sum((model_position[model] - model_as_position[model])**2)) /
1472
+ np.sqrt(np.sum((model_as_position[model])**2))))
1473
+
1474
+ table_content.append(line)
1475
+
1476
+ # --------------------------------------------------------------------------------------------
1477
+ # Criteria for the final time step of the simulation
1478
+ # --------------------------------------------------------------------------------------------
1479
+ table_content.append(["Criteria for the final time step of the simulation"])
1480
+
1481
+ # --------------------------------------- Final height ---------------------------------------
1482
+ line = ["Final Thickness Repartition", "RMS (AS)"]
1483
+ for model in self._loaded_results:
1484
+ line.append((np.sqrt(np.sum((model_h[model].d[:, -1] - model_AS_h[model].d[:, -1])**2)) /
1485
+ np.sqrt(np.sum((model_AS_h[model].d[:, -1])**2))))
1486
+
1487
+ table_content.append(line)
1488
+
1489
+ # ---------------------------- Front position at final time step -----------------------------
1490
+ line = ["Final Front Position", "Distance [m]"]
1491
+
1492
+ for model in self._loaded_results:
1493
+ line.append(model_position[model][-1] - model_position[model][0])
1494
+
1495
+ table_content.append(line)
1496
+
1497
+ line = ["", "Compared to AS"]
1498
+
1499
+ # model_as_position[model] différent selon model
1500
+ for model in self._loaded_results:
1501
+ line.append((model_position[model][-1] - model_as_position[model][-1]) /
1502
+ model_as_position[model][-1])
1503
+
1504
+ table_content.append(line)
1505
+
1506
+ # --------------------------------------------------------------------------------------------
1507
+ # Numeralical criteria
1508
+ # --------------------------------------------------------------------------------------------
1509
+ table_content.append(["Numerical criteria"])
1510
+
1511
+ # ------------------------------------------- Volume -----------------------------------------
1512
+ model_volume = {}
1513
+ for model in self._loaded_results:
1514
+ volume = self._loaded_results[model].get_output("volume")
1515
+ model_volume[model] = volume
1516
+
1517
+ line = ["Volume Variation", "RMS of Total Volume Variation"]
1518
+
1519
+ for model in self._loaded_results:
1520
+ rms_vinit = 0
1521
+ for v in model_volume[model].d:
1522
+ rms_vinit += np.sqrt((v - model_volume[model].d[0])**2) / model_volume[model].d[0]
1523
+ line.append(rms_vinit)
1524
+ table_content.append(line)
1525
+
1526
+ line = ["", "(Vf - Vi) / Vi"]
1527
+
1528
+ for model in self._loaded_results:
1529
+ line.append((model_volume[model].d[-1] - model_volume[model].d[0]) /
1530
+ model_volume[model].d[0])
1531
+ table_content.append(line)
1532
+
1533
+
1534
+ # --------------------------------------------------------------------------------------------
1535
+ # Saving table
1536
+ # --------------------------------------------------------------------------------------------
1537
+ df = pd.DataFrame(table_content, columns=cols)
1538
+ if save:
1539
+ if fmt == "csv":
1540
+ df.to_csv(saving_path, index=False, encoding="utf-8")
1541
+
1542
+ if fmt == "xlsx":
1543
+ df.to_excel(saving_path, index=False, engine="openpyxl")
1544
+
1545
+ # --------------------------------------- Reformat table -------------------------------------
1546
+ wb = openpyxl.load_workbook(saving_path)
1547
+ ws = wb.active
1548
+
1549
+ # Add border style
1550
+ bordure_fine = openpyxl.styles.Border(left=openpyxl.styles.Side(style="thin"),
1551
+ right=openpyxl.styles.Side(style="thin"),
1552
+ top=openpyxl.styles.Side(style="thin"),
1553
+ bottom=openpyxl.styles.Side(style="thin"))
1554
+
1555
+ for row in ws.iter_rows(min_row=1, max_row=ws.max_row, max_col=ws.max_column):
1556
+ for cell in row:
1557
+ cell.border = bordure_fine
1558
+ wb.save(saving_path)
1559
+
1560
+ print(f"Saved in: {saving_path}")
1561
+
1562
+ return df
1563
+