wolfhece 2.2.8__py3-none-any.whl → 2.2.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. wolfhece/PyDraw.py +94 -24
  2. wolfhece/PyGui.py +1 -0
  3. wolfhece/PyVertex.py +127 -19
  4. wolfhece/PyVertexvectors.py +73 -21
  5. wolfhece/__init__.py +5 -2
  6. wolfhece/apps/version.py +1 -1
  7. wolfhece/hydrology/Internal_variables.py +283 -0
  8. wolfhece/hydrology/Models_characteristics.py +223 -0
  9. wolfhece/hydrology/Optimisation.py +324 -14
  10. wolfhece/hydrology/SubBasin.py +112 -28
  11. wolfhece/hydrology/cst_exchanges.py +1 -0
  12. wolfhece/hydrometry/kiwis.py +8 -3
  13. wolfhece/lagrangian/particle_system_ui.py +1 -1
  14. wolfhece/lazviewer/processing/estimate_normals/estimate_normals.cp311-win_amd64.pyd +0 -0
  15. wolfhece/lazviewer/vfuncsdir/vfuncs.cp311-win_amd64.pyd +0 -0
  16. wolfhece/lazviewer/viewer/viewer.exe +0 -0
  17. wolfhece/lazviewer/viewer/viewer_310.exe +0 -0
  18. wolfhece/libs/WolfDll.dll +0 -0
  19. wolfhece/libs/get_infos.cp311-win_amd64.pyd +0 -0
  20. wolfhece/libs/verify_wolf.cp311-win_amd64.pyd +0 -0
  21. wolfhece/libs/wolfogl.cp311-win_amd64.pyd +0 -0
  22. wolfhece/pydike.py +1 -1
  23. wolfhece/pyviews.py +1 -1
  24. wolfhece/wolf_array.py +28 -6
  25. wolfhece-2.2.10.dist-info/METADATA +90 -0
  26. {wolfhece-2.2.8.dist-info → wolfhece-2.2.10.dist-info}/RECORD +33 -21
  27. {wolfhece-2.2.8.dist-info → wolfhece-2.2.10.dist-info}/WHEEL +1 -1
  28. {wolfhece-2.2.8.dist-info → wolfhece-2.3.0.dist-info}/METADATA +3 -3
  29. wolfhece-2.3.0.dist-info/WHEEL +5 -0
  30. wolfhece-2.3.0.dist-info/entry_points.txt +17 -0
  31. wolfhece-2.3.0.dist-info/top_level.txt +1 -0
  32. {wolfhece-2.2.8.dist-info → wolfhece-2.2.10.dist-info}/entry_points.txt +0 -0
  33. {wolfhece-2.2.8.dist-info → wolfhece-2.2.10.dist-info}/top_level.txt +0 -0
@@ -53,6 +53,9 @@ from .matplotlib_fig import Matplotlib_Figure as MplFig
53
53
  from .PyPalette import wolfpalette
54
54
 
55
55
  class Triangulation(Element_To_Draw):
56
+ """ Triangulation based on a listof vertices
57
+ and triangles enumerated by their vertex indices """
58
+
56
59
  def __init__(self, fn='', pts=[], tri=[], idx: str = '', plotted: bool = True, mapviewer=None, need_for_wx: bool = False) -> None:
57
60
  super().__init__(idx, plotted, mapviewer, need_for_wx)
58
61
 
@@ -74,20 +77,24 @@ class Triangulation(Element_To_Draw):
74
77
  self.filename=fn
75
78
  self.read(fn)
76
79
  else:
77
- self.valid_format()
80
+ self.validate_format()
78
81
  pass
79
82
 
80
- def valid_format(self):
83
+ def validate_format(self):
84
+ """ Force the format of the data """
85
+
81
86
  if isinstance(self.pts,list):
82
87
  self.pts = np.asarray(self.pts)
83
88
  if isinstance(self.tri,list):
84
89
  self.tri = np.asarray(self.tri)
85
90
 
86
91
  def as_polydata(self) -> pv.PolyData:
92
+ """ Convert the triangulation to a PyVista PolyData object """
87
93
 
88
94
  return pv.PolyData(np.asarray(self.pts),np.column_stack([[3]*self.nb_tri,self.tri]), self.nb_tri)
89
95
 
90
- def from_polydata(self,poly:pv.PolyData):
96
+ def from_polydata(self, poly:pv.PolyData):
97
+ """ Convert a PyVista PolyData object to the triangulation format """
91
98
 
92
99
  self.pts = np.asarray(poly.points.copy())
93
100
  self.tri = np.asarray(poly.faces.reshape([int(len(poly.faces)/4),4])[:,1:4])
@@ -95,7 +102,8 @@ class Triangulation(Element_To_Draw):
95
102
  self.nb_pts = len(self.pts)
96
103
  self.nb_tri = len(self.tri)
97
104
 
98
- def clip_surface(self,other,invert=True,subdivide=0):
105
+ def clip_surface(self, other:"Triangulation", invert=True, subdivide=0):
106
+ """ Clip the triangulation with another one """
99
107
 
100
108
  if subdivide==0:
101
109
  mypoly = self.as_polydata()
@@ -104,14 +112,12 @@ class Triangulation(Element_To_Draw):
104
112
  mypoly = self.as_polydata().subdivide(subdivide)
105
113
  mycrop = other.as_polydata().subdivide(subdivide)
106
114
 
107
- res = mypoly.clip_surface(mycrop,invert=invert)
115
+ res = mypoly.clip_surface(mycrop, invert=invert)
108
116
 
109
117
  if len(res.faces)>0:
110
118
  self.from_polydata(res)
111
- else:
112
- return None
113
119
 
114
- def get_mask(self,eps=1e-10):
120
+ def get_mask(self, eps:float= 1e-10):
115
121
  """
116
122
  Teste si la surface de tous les triangles est positive
117
123
 
@@ -122,7 +128,7 @@ class Triangulation(Element_To_Draw):
122
128
  v1 = [self.pts[curtri[1]][:2] - self.pts[curtri[0]][:2] for curtri in self.tri]
123
129
  v2 = [self.pts[curtri[2]][:2] - self.pts[curtri[0]][:2] for curtri in self.tri]
124
130
  self.areas = np.cross(v2,v1,)/2
125
- return self.areas<=eps
131
+ return self.areas <= eps
126
132
 
127
133
  # invalid_tri = np.sort(np.where(self.areas<=eps)[0])
128
134
  # for curinv in invalid_tri[::-1]:
@@ -131,8 +137,11 @@ class Triangulation(Element_To_Draw):
131
137
  # self.nb_tri = len(self.tri)
132
138
 
133
139
  def import_from_gltf(self, fn=''):
140
+ """ Import a GLTF file and convert it to the triangulation format """
134
141
 
135
- if fn =='':
142
+ wx_exists = wx.GetApp() is not None
143
+
144
+ if fn =='' and wx_exists:
136
145
  dlg=wx.FileDialog(None,_('Choose filename'),wildcard='binary gltf2 (*.glb)|*.glb|gltf2 (*.gltf)|*.gltf|All (*.*)|*.*',style=wx.FD_OPEN)
137
146
  ret=dlg.ShowModal()
138
147
  if ret==wx.ID_CANCEL:
@@ -141,6 +150,8 @@ class Triangulation(Element_To_Draw):
141
150
 
142
151
  fn=dlg.GetPath()
143
152
  dlg.Destroy()
153
+ else:
154
+ fn = str(fn)
144
155
 
145
156
  gltf = pygltflib.GLTF2().load(fn)
146
157
 
@@ -216,6 +227,7 @@ class Triangulation(Element_To_Draw):
216
227
  self.nb_tri = len(self.tri)
217
228
 
218
229
  def export_to_gltf(self,fn=''):
230
+ """ Export the triangulation to a GLTF file """
219
231
 
220
232
  #on force les types de variables
221
233
  triangles = np.asarray(self.tri).astype(np.uint32)
@@ -302,7 +314,7 @@ class Triangulation(Element_To_Draw):
302
314
  return
303
315
 
304
316
  if fn!='':
305
- self.filename=fn
317
+ self.filename = fn
306
318
 
307
319
  triangles = np.asarray(self.tri).astype(np.uint32)
308
320
  points = np.asarray(self.pts) #.astype(np.float64)
@@ -343,11 +355,13 @@ class Triangulation(Element_To_Draw):
343
355
  buf = np.frombuffer(f.read(4 * self.nb_tri * 3), dtype=np.uint32)
344
356
  self.tri = np.array(buf.copy(), dtype=np.uint32).reshape([self.nb_tri,3]).astype(np.int32)
345
357
 
346
- self.valid_format()
358
+ self.validate_format()
347
359
  self.find_minmax(True)
348
360
  self.reset_plot()
349
361
 
350
362
  def reset_plot(self):
363
+ """ Reset the OpenGL plot """
364
+
351
365
  try:
352
366
  if self.id_list!=-99999:
353
367
  glDeleteLists(self.id_list)
@@ -357,6 +371,7 @@ class Triangulation(Element_To_Draw):
357
371
  self.id_list = -99999
358
372
 
359
373
  def plot(self, sx=None, sy=None, xmin=None, ymin=None, xmax=None, ymax=None, size=None ):
374
+ """ Plot the triangulation in OpenGL """
360
375
 
361
376
  if self.id_list == -99999:
362
377
  try:
@@ -387,7 +402,25 @@ class Triangulation(Element_To_Draw):
387
402
  else:
388
403
  glCallList(self.id_list)
389
404
 
405
+ def plot_matplotlib(self, ax:Axes, color='black', alpha=1., lw=1.5, **kwargs):
406
+ """ Plot the triangulation in Matplotlib
407
+ """
408
+
409
+ if self.nb_tri>0:
410
+ for curtri in self.tri:
411
+ x = [self.pts[curtri[0]][0], self.pts[curtri[1]][0], self.pts[curtri[2]][0], self.pts[curtri[0]][0]]
412
+ y = [self.pts[curtri[0]][1], self.pts[curtri[1]][1], self.pts[curtri[2]][1], self.pts[curtri[0]][1]]
413
+ ax.plot(x, y, color=color, alpha=alpha, lw=lw, **kwargs)
414
+ else:
415
+ logging.warning('No triangles to plot')
416
+
417
+
390
418
  def find_minmax(self,force):
419
+ """ Find the min and max of the triangulation
420
+
421
+ :param force: force the min and max to be calculated
422
+ """
423
+
391
424
  if force:
392
425
  if self.nb_pts>0:
393
426
  self.xmin=np.min(self.pts[:,0])
@@ -396,6 +429,8 @@ class Triangulation(Element_To_Draw):
396
429
  self.ymax=np.max(self.pts[:,1])
397
430
 
398
431
  def import_dxf(self,fn):
432
+ """ Import a DXF file and convert it to the triangulation format """
433
+
399
434
  import ezdxf
400
435
 
401
436
  if not path.exists(fn):
@@ -431,7 +466,7 @@ class Triangulation(Element_To_Draw):
431
466
  self.pts = xyz_u
432
467
  self.nb_pts = len(self.pts)
433
468
  self.nb_tri = len(self.tri)
434
- self.valid_format()
469
+ self.validate_format()
435
470
 
436
471
  def set_cache(self):
437
472
  """ Set the cache for the vertices """
@@ -4038,7 +4073,7 @@ class zone:
4038
4073
  self.add_vector(mypl,0)
4039
4074
  self.add_vector(mypr,2)
4040
4075
 
4041
- def createmultibin(self, nb=None, nb2=0) -> Triangulation:
4076
+ def create_multibin(self, nb:int = None, nb2:int = 0) -> Triangulation:
4042
4077
  """
4043
4078
  Création d'une triangulation sur base des vecteurs
4044
4079
  Tient compte de l'ordre
@@ -4075,7 +4110,11 @@ class zone:
4075
4110
  nb=int(dlg.GetValue())
4076
4111
  dlg.Destroy()
4077
4112
  else:
4078
- logging.warning( _('Bad parameter nb'))
4113
+ try:
4114
+ nb=int(nb)
4115
+ except:
4116
+ logging.warning( _('Bad parameter nb'))
4117
+ return None
4079
4118
 
4080
4119
  # redécoupage des polylines
4081
4120
  s = np.linspace(0.,1.,num=nb,endpoint=True)
@@ -4095,10 +4134,16 @@ class zone:
4095
4134
  ret=dlg.ShowModal()
4096
4135
  if ret==wx.ID_CANCEL:
4097
4136
  dlg.Destroy()
4098
- return
4137
+ return None
4099
4138
 
4100
4139
  nb2=int(dlg.GetValue())
4101
4140
  dlg.Destroy()
4141
+ else:
4142
+ try:
4143
+ nb2=int(nb2)
4144
+ except:
4145
+ logging.warning( _('Bad parameter nb2'))
4146
+ return None
4102
4147
 
4103
4148
  if nb2>0:
4104
4149
  finalls = []
@@ -4187,9 +4232,12 @@ class zone:
4187
4232
 
4188
4233
  return interp
4189
4234
 
4190
- def create_constrainedDelaunay(self, nb=None) -> Triangulation:
4235
+ def create_constrainedDelaunay(self, nb:int = None) -> Triangulation:
4191
4236
  """
4192
- Création d'une triangulation Delaunay contrainte sur base des vecteurs
4237
+ Création d'une triangulation Delaunay contrainte sur base des vecteurs de la zone.
4238
+
4239
+ Il est nécessaire de définir au moins un polygone définissant la zone de triangulation.
4240
+ Les autres vecteurs seront utilisés comme contraintes de triangulation.
4193
4241
 
4194
4242
  Utilisation de la librairie "triangle" (https://www.cs.cmu.edu/~quake/triangle.delaunay.html)
4195
4243
 
@@ -4223,14 +4271,18 @@ class zone:
4223
4271
  nb=int(dlg.GetValue())
4224
4272
  dlg.Destroy()
4225
4273
  else:
4226
- logging.warning( _('Bad parameter nb'))
4274
+ try:
4275
+ nb=int(nb)
4276
+ except:
4277
+ logging.warning( _('Bad parameter nb'))
4278
+ return None
4227
4279
 
4228
4280
  if nb==0:
4229
4281
  # no decimation
4230
4282
  newls = myls
4231
4283
  else:
4232
4284
  # redécoupage des polylines
4233
- s = np.linspace(0.,1.,num=nb,endpoint=True)
4285
+ s = np.linspace(0., 1., num=nb, endpoint=True)
4234
4286
 
4235
4287
  newls = [LineString([curls.interpolate(curs,True) for curs in s]) for curls in myls if curls.length>0.]
4236
4288
 
@@ -7690,7 +7742,7 @@ class Zones(wx.Frame, Element_To_Draw):
7690
7742
  dlg.Destroy()
7691
7743
  return
7692
7744
 
7693
- mytri = myzone.createmultibin()
7745
+ mytri = myzone.create_multibin()
7694
7746
 
7695
7747
  self.mapviewer.add_object('triangulation',newobj=mytri)
7696
7748
  self.mapviewer.Refresh()
wolfhece/__init__.py CHANGED
@@ -1,11 +1,14 @@
1
1
  from . import _add_path
2
+ from .PyTranslate import _
2
3
 
3
4
  try:
4
- from osgeo import gdal, osr
5
+ from osgeo import gdal, osr, ogr
5
6
  gdal.UseExceptions()
7
+ ogr.UseExceptions()
8
+ osr.UseExceptions()
6
9
  except ImportError as e:
7
10
  print(e)
8
- raise Exception(_('Error importing GDAL library'))
11
+ raise Exception(_('Error importing GDAL library\nPlease ensure GDAL is installed and the Python bindings are available\n\ngdal wheels can be found at https://github.com/cgohlke/geospatial-wheels'))
9
12
 
10
13
  from .apps.version import WolfVersion
11
14
 
wolfhece/apps/version.py CHANGED
@@ -5,7 +5,7 @@ class WolfVersion():
5
5
 
6
6
  self.major = 2
7
7
  self.minor = 2
8
- self.patch = 8
8
+ self.patch = 10
9
9
 
10
10
  def __str__(self):
11
11
 
@@ -0,0 +1,283 @@
1
+ from __future__ import annotations
2
+ from typing import Optional
3
+ from os.path import exists, join
4
+ import numpy as np
5
+ import logging
6
+ from datetime import datetime as date
7
+ from datetime import timezone
8
+ from dataclasses import dataclass
9
+ from . import read as rd
10
+ from ..PyParams import Wolf_Param
11
+
12
+ ALL_VAR = 0
13
+ IV_VAR = 1
14
+ FRAC_VAR = 2
15
+ FINAL_OUT_VAR = 3
16
+ OUT_VAR = 4
17
+ DEFAULT_VAR = 5
18
+
19
+
20
+ @dataclass(frozen=True)
21
+ class Internal_Variable:
22
+ """
23
+ Class for managing internal variables in hydrological models.
24
+ """
25
+ name:str
26
+ file:str
27
+ type_of_var:int
28
+ linked_param:int
29
+
30
+
31
+ def get_time_serie(self, directory, prefix_file:str="",
32
+ interval:Optional[tuple[date, date]]=None) -> tuple[np.ndarray, np.ndarray]:
33
+ """
34
+ Get the time series of the internal variable.
35
+
36
+ :param interval: Optional interval for the time series.
37
+ :return: Time series of the internal variable.
38
+ """
39
+ filename, full_path = self.get_filename(directory, prefix=prefix_file)
40
+ if filename is None:
41
+ return None, None
42
+ time, cur_iv = rd.read_hydro_file(directory, fileName=filename)
43
+ # select the interval if needed
44
+ if interval is not None:
45
+ # Check if the datetime in interva are in UTC timezone
46
+ if interval[0].tzinfo == timezone.utc or interval[1].tzinfo == timezone.utc:
47
+ interval = (interval[0].replace(tzinfo=timezone.utc),
48
+ interval[1].replace(tzinfo=timezone.utc))
49
+ t_start = interval[0].timestamp()
50
+ t_end = interval[1].timestamp()
51
+ time = time[(time >= t_start) & (time <= t_end)]
52
+ cur_iv = cur_iv[(time >= t_start) & (time <= t_end)]
53
+ else:
54
+ logging.error("Interval is not in UTC timezone!")
55
+
56
+ return time, cur_iv
57
+
58
+
59
+ def get_filename(self, directory:str, prefix:str="")->tuple[str, str]:
60
+ """
61
+ Get the filename of the internal variable.
62
+
63
+ :param directory: Directory where the file is located.
64
+ :param prefix: Prefix for the filename.
65
+ :return: Tuple containing the name of the file only and the full path of the internal variable.
66
+ """
67
+ filename = "".join([prefix,"_", self.file, ".dat"])
68
+ full_path = join(directory, filename)
69
+ # check if the file exists
70
+ if not exists(full_path):
71
+ logging.error(f"File {full_path} not found!")
72
+ return None, None
73
+
74
+ return filename, full_path
75
+
76
+
77
+ # @dataclass(frozen=True)
78
+ class Param_to_Activate:
79
+ """
80
+ Class for managing parameters to activate in hydrological models.
81
+ """
82
+ key: str
83
+ group: str
84
+ file: str
85
+ all_variables: list[Internal_Variable]
86
+
87
+ def __init__(self, key:str="", group:str="", file:str="", all_variables:list[Internal_Variable]=[]):
88
+ """
89
+ Initialize the Params_to_Activate class with parameters for different models.
90
+ """
91
+ self.key = key
92
+ self.group = group
93
+ self.file = file
94
+ self.all_variables = all_variables
95
+
96
+ def add_param_info(self, key:str, group:str, file:str):
97
+ """
98
+ Add parameter information to the class.
99
+ """
100
+ self.key = key
101
+ self.group = group
102
+ self.file = file
103
+
104
+ def check_param_file(self, directory:str):
105
+ """
106
+ Define the working directory for the parameters.
107
+ """
108
+ cur_file = join(directory, self.file)
109
+ # check if the file exists
110
+ if not exists(cur_file):
111
+ logging.error(f"File {cur_file} not found!")
112
+
113
+ def add_variable(self, variable:Internal_Variable|list[Internal_Variable]):
114
+ """
115
+ Add one or a list of internal variable(s) to the list of variables.
116
+ """
117
+ if isinstance(variable, list):
118
+ self.all_variables += variable
119
+ else:
120
+ self.all_variables.append(variable)
121
+
122
+ def get_variables_names(self) -> list[str]:
123
+ """
124
+ Get the names of the internal variables.
125
+ """
126
+ return [var.name for var in self.all_variables]
127
+
128
+ def get_variables_files(self) -> list[str]:
129
+ """
130
+ Get the files of the internal variables.
131
+ """
132
+ return [var.file for var in self.all_variables]
133
+
134
+ def activate(self, directory:str, prefix_file:str="", type_of_var:int=ALL_VAR):
135
+ """
136
+ Activate the parameters for the internal variables.
137
+ """
138
+ if self.key is None or self.group is None:
139
+ return
140
+
141
+ to_activate = False
142
+ if type_of_var == ALL_VAR:
143
+ to_activate = True
144
+ else:
145
+ for var in self.all_variables:
146
+ if var.type_of_var == type_of_var or type_of_var == ALL_VAR:
147
+ to_activate = True
148
+ break
149
+
150
+ if to_activate:
151
+ new_prefix = self._build_prefix(prefix_file)
152
+ filename = ".".join([new_prefix,"param"])
153
+ param_filename = join(directory, filename)
154
+ param_file = Wolf_Param(to_read=True, filename=param_filename,toShow=False, init_GUI=False)
155
+ param_file.change_param(self.group, self.key, 1)
156
+ param_file.SavetoFile(None)
157
+ param_file.Reload(None)
158
+ else:
159
+ self.deactivate(directory, prefix_file)
160
+
161
+ def deactivate(self, directory:str, prefix_file:str=""):
162
+ """
163
+ Deactivate the parameters for the internal variables.
164
+ """
165
+ new_prefix = self._build_prefix(prefix_file)
166
+ filename = ".".join([new_prefix,"param"])
167
+ param_filename = join(directory, filename)
168
+ param_file = Wolf_Param(to_read=True, filename=param_filename,toShow=False, init_GUI=False)
169
+ param_file.change_param(self.group, self.key, 0)
170
+ param_file.SavetoFile(None)
171
+ param_file.Reload(None)
172
+
173
+ def get_iv_timeseries(self, directory:str, prefix_file:str="", interval:Optional[tuple[date, date]]=None, type_of_var:int=ALL_VAR) -> dict[str, np.ndarray]:
174
+ """
175
+ Get the time series of the internal variables.
176
+
177
+ :param directory: Directory where the file is located.
178
+ :param prefix_file: Prefix for the filename.
179
+ :param interval: Optional interval for the time series.
180
+ :return: List of tuples containing the time and internal variable data.
181
+ """
182
+
183
+ new_prefix = self._build_prefix(prefix_file)
184
+
185
+ all_timeseries = {var.name:
186
+ var.get_time_serie(directory, new_prefix, interval)[1]
187
+ for var in self.all_variables
188
+ if var.type_of_var == type_of_var or type_of_var == ALL_VAR}
189
+
190
+ return all_timeseries
191
+
192
+
193
+ def get_linked_params(self) -> dict[str, int]:
194
+ """
195
+ Get the linked parameters of the internal variables.
196
+
197
+ :return: Dictionary of linked parameters.
198
+ """
199
+ return {var.name: var.linked_param for var in self.all_variables if var.linked_param is not None}
200
+
201
+ def _build_prefix(self, prefix_file:str) -> str:
202
+ """
203
+ Build the prefix for the filename.
204
+
205
+ :param prefix_file: Prefix for the filename.
206
+ :return: Prefix for the filename.
207
+ """
208
+ if self.file == "":
209
+ return prefix_file
210
+ else:
211
+ return "_".join([prefix_file, self.file])
212
+
213
+
214
+
215
+ class Group_to_Activate:
216
+ """
217
+ Class for managing groups of parameters to activate in hydrological models.
218
+ """
219
+ name: str
220
+ all_params: list[Param_to_Activate]
221
+
222
+ def __init__(self, name:str="", all_params:list[Param_to_Activate]=[]):
223
+ """
224
+ Initialize the Group_to_Activate class with parameters for different models.
225
+ """
226
+ self.name = name
227
+ self.all_params = all_params
228
+
229
+ def get_keys(self) -> list[str]:
230
+ """
231
+ Get the keys of the parameters.
232
+ """
233
+ return [param.key for param in self.all_params]
234
+
235
+ def get_files_per_keys(self) -> list[str]:
236
+ """
237
+ Get the files of the parameters.
238
+ """
239
+ return [param.get_variables_files() for param in self.all_params]
240
+
241
+ def activate_all(self, directory:str, prefix_file:str="", type_of_var:int=ALL_VAR):
242
+ """
243
+ Activate all parameters in the group.
244
+ """
245
+ for param in self.all_params:
246
+ param.activate(directory, prefix_file, type_of_var)
247
+
248
+ def deactivate_all(self, directory:str, prefix_file:str=""):
249
+ """
250
+ Deactivate all parameters in the group.
251
+ """
252
+ for param in self.all_params:
253
+ param.deactivate(directory, prefix_file)
254
+
255
+ def get_all_iv_timeseries(self, directory:str, prefix_file:str="",
256
+ interval:Optional[tuple[date, date]]=None,
257
+ type_of_var:int=ALL_VAR) -> dict[str, np.ndarray]:
258
+ """
259
+ Get the time series of all internal variables in the group.
260
+
261
+ :param directory: Directory where the file is located.
262
+ :param prefix_file: Prefix for the filename.
263
+ :param interval: Optional interval for the time series.
264
+ :return: List of tuples containing the time and internal variable data.
265
+ """
266
+ all_timeseries = {}
267
+ for param in self.all_params:
268
+ all_timeseries.update(param.get_iv_timeseries(directory, prefix_file,
269
+ interval, type_of_var))
270
+
271
+ return all_timeseries
272
+
273
+ def get_all_linked_params(self) -> dict[str, int]:
274
+ """
275
+ Get the linked parameters of the internal variables.
276
+
277
+ :return: Dictionary of linked parameters.
278
+ """
279
+ all_linked_params = {}
280
+ for param in self.all_params:
281
+ all_linked_params.update(param.get_linked_params())
282
+
283
+ return all_linked_params