wolfhece 1.8.8__py3-none-any.whl → 1.8.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,7 +27,7 @@ class RetentionBasin():
27
27
  surfaceDrainedHydro:float # TO BUILD !!!!!!!!!!!!!!
28
28
  filledVolume:np.ndarray
29
29
 
30
- def __init__(self, _dateBegin, _dateEnd, _deltaT, _time=[], _id='J1', _name='Default name', _type='', _dictRB={}, _directDictRB={}, _tz=0, _outletNames=[], withRBDict=True, _workingDir=""):
30
+ def __init__(self, _dateBegin, _dateEnd, _deltaT, _time=None, _id='J1', _name='Default name', _type='', _dictRB={}, _directDictRB={}, _tz=0, _outletNames=[], withRBDict=True, _workingDir=""):
31
31
  print('Creation of a RetentionBasin!')
32
32
  self.iD = _id
33
33
  self.name = _name
@@ -65,12 +65,12 @@ class RetentionBasin():
65
65
 
66
66
  # inlets and outlets
67
67
  self.intletsObj = {}
68
- self.inlets = []
69
- self.inletsRaw = []
68
+ self.inlets = None
69
+ self.inletsRaw = None
70
70
 
71
71
  self.directFluxObj = {}
72
- self.directFluxInRB = []
73
- self.directFluxInRB_Raw = []
72
+ self.directFluxInRB = None
73
+ self.directFluxInRB_Raw = None
74
74
  self.downstreamObj = {}
75
75
  self._outFlow = {}
76
76
  if(_outletNames != []):
@@ -1213,7 +1213,7 @@ class RetentionBasin():
1213
1213
  sys.exit()
1214
1214
  # Save time array if it does not exist yet
1215
1215
  # Otherwise, check the consistency of the array with the time array of the object
1216
- if(self.time==[]):
1216
+ if(self.time is None):
1217
1217
  self.time=timeArray
1218
1218
  elif(self.time.all()!=timeArray.all()):
1219
1219
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -132,7 +132,7 @@ class SubBasin:
132
132
  self.isActivated = True
133
133
  ## Time array containing all the timestamps
134
134
  # @var time timestamps array of dimension equal to rain and evap (or 1 element more than myHydro so far (VHM but not UH)).
135
- self.time = []
135
+ self.time = None
136
136
 
137
137
  self.dateBegin = _dateBegin # Must be in GMT+0 !!!
138
138
  self.dateEnd = _dateEnd # Must be in GMT+0 !!!
@@ -159,8 +159,8 @@ class SubBasin:
159
159
  self.myHydro = [] # [m^3/s] Hydro of the subbasin only -> UPDATE: [mm/h]!!!!!
160
160
 
161
161
  self.intletsObj = {}
162
- self.inlets = []
163
- self.inletsRaw = []
162
+ self.inlets = None
163
+ self.inletsRaw = None
164
164
  self.downstreamObj = {}
165
165
 
166
166
  ## @var outFlow
@@ -171,15 +171,15 @@ class SubBasin:
171
171
  # self.outFlowRaw = [] # [m^3/s]
172
172
  # Hyeto
173
173
  self.myHyetoDict = {}
174
- self.myRain = [] # [mm/h] Caution in the difference of units in rain !!!!!!
174
+ self.myRain = None # [mm/h] Caution in the difference of units in rain !!!!!!
175
175
  self.rain = [] # [m^3/h] Caution in the difference of units in rain !!!!!!
176
176
  # Evapotranspiration
177
- self.myEvap = [] # [mm/h]
178
- self.evap = [] # [mm/h]
177
+ self.myEvap = None # [mm/h]
178
+ self.evap = None # [mm/h]
179
179
  # Temperature
180
- self.myTemp = []
180
+ self.myTemp = None
181
181
  # Outflow converted in hystograph
182
- self.hydrograph = [] # //
182
+ self.hydrograph = None # //
183
183
  # self.hystograph = []
184
184
 
185
185
  # Main subbasin characteristics
@@ -216,7 +216,7 @@ class SubBasin:
216
216
  self.init_timeDelay()
217
217
  if(readHydro):
218
218
  timeTest, self.myHydro = self.get_hydro(self.iDSorted, _workingDir, tzDelta=datetime.timedelta(hours=self.tz))
219
- if(self.time==[]):
219
+ if(self.time is None):
220
220
  self.time = timeTest
221
221
  else:
222
222
  if not(np.array_equal(timeTest,self.time)):
@@ -333,7 +333,7 @@ class SubBasin:
333
333
  sys.exit()
334
334
  # Save time array if it does not exist yet
335
335
  # Otherwise, check the consistency of the array with the time array of the object
336
- if(self.time==[]):
336
+ if(self.time is None):
337
337
  self.time=timeArray
338
338
  elif(self.time!=timeArray):
339
339
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -456,7 +456,7 @@ class SubBasin:
456
456
  sys.exit()
457
457
  # Save time array if it does not exist yet
458
458
  # Otherwise, check the consistency of the array with the time array of the object
459
- if(self.time==[]):
459
+ if(self.time is None):
460
460
  self.time=timeArray
461
461
  elif((self.time!=timeArray).all()):
462
462
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -583,7 +583,7 @@ class SubBasin:
583
583
  sys.exit()
584
584
  # Save time array if it does not exist yet
585
585
  # Otherwise, check the consistency of the array with the time array of the object
586
- if(self.time==[]):
586
+ if(self.time is None):
587
587
  self.time=timeArray
588
588
  elif((self.time!=timeArray).all()):
589
589
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -710,7 +710,7 @@ class SubBasin:
710
710
  sys.exit()
711
711
  # Save time array if it does not exist yet
712
712
  # Otherwise, check the consistency of the array with the time array of the object
713
- if(self.time==[]):
713
+ if(self.time is None):
714
714
  self.time=timeArray
715
715
  elif((self.time!=timeArray).all()):
716
716
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -810,7 +810,7 @@ class SubBasin:
810
810
  sys.exit()
811
811
  # Save time array if it does not exist yet
812
812
  # Otherwise, check the consistency of the array with the time array of the object
813
- if(self.time==[]):
813
+ if(self.time is None):
814
814
  self.time=timeArray
815
815
  elif(self.time!=timeArray):
816
816
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -883,7 +883,7 @@ class SubBasin:
883
883
  # sys.exit()
884
884
  # Save time array if it does not exist yet
885
885
  # Otherwise, check the consistency of the array with the time array of the object
886
- if(self.time==[]):
886
+ if(self.time is None):
887
887
  self.time=timeArray
888
888
  elif(self.time!=timeArray):
889
889
  print("ERROR: the dates read are not consitent with the dates already recored in this subbasin!")
@@ -1154,7 +1154,7 @@ class SubBasin:
1154
1154
  print("Date timestamp expected = ", self.dateEnd)
1155
1155
  sys.exit()
1156
1156
  # Save the time if it does not exist yet
1157
- if(self.time==[]):
1157
+ if(self.time is None):
1158
1158
  self.time=time
1159
1159
  print("Time didn't exist before, therefore it is save now according to rain data time serie!")
1160
1160
 
@@ -1239,7 +1239,7 @@ class SubBasin:
1239
1239
  print("Date expected = ", self.dateEnd)
1240
1240
  sys.exit()
1241
1241
  # Save the time if it does not exist yet
1242
- if(self.time==[]):
1242
+ if(self.time is None):
1243
1243
  self.time=time
1244
1244
  print("Time didn't exist before, therefore it is saved now according to rain data time serie!")
1245
1245
  elif not(np.array_equal(time,self.time)):
@@ -1311,7 +1311,7 @@ class SubBasin:
1311
1311
  print("Delta t read = ", diffTimeInSeconds)
1312
1312
  print("Delta t expected = ", self.deltaT)
1313
1313
  sys.exit()
1314
- if(self.time==[]):
1314
+ if(self.time is None):
1315
1315
  self.time = time
1316
1316
  elif not(np.array_equal(time,self.time)):
1317
1317
  print('Time arrays are not the same! Please check your answers.')
@@ -1363,8 +1363,8 @@ class SubBasin:
1363
1363
  else:
1364
1364
  tzDelta = datetime.timedelta(seconds=tzPlot*3600.0)
1365
1365
  timeDelayDelta = datetime.timedelta(seconds=self.timeDelay)
1366
- beginDate = datetime.datetime.fromtimestamp(self.time[0], tz=datetime.timezone.utc)+tzDelta
1367
- endDate = datetime.datetime.fromtimestamp(self.time[-1], tz=datetime.timezone.utc)+tzDelta
1366
+ beginDate = datetime.datetime.fromtimestamp(self.time[0], tz=datetime.timezone.utc)+tzDelta-timeDelayDelta
1367
+ endDate = datetime.datetime.fromtimestamp(self.time[-1], tz=datetime.timezone.utc)+tzDelta-timeDelayDelta
1368
1368
  dt = self.time[1]-self.time[0]
1369
1369
  time_delta = datetime.timedelta(seconds=dt)
1370
1370
  if(rangeData==[]):
@@ -2370,9 +2370,9 @@ class SubBasin:
2370
2370
  tmpSum = np.zeros(len(myOutFlow)-lastElement)
2371
2371
  y = np.zeros((len(myOutFlow)-lastElement,nbElements))
2372
2372
 
2373
- y[:,0] = tmpCumul[:-1]
2374
- y[:,1] = tmpCumul2[:-1]
2375
- y[:,2] = tmpCumul[:-1] - tmpCumul2[:-1]
2373
+ y[:,0] = tmpCumul[:]
2374
+ y[:,1] = tmpCumul2[:]
2375
+ y[:,2] = tmpCumul[:] - tmpCumul2[:]
2376
2376
 
2377
2377
 
2378
2378
 
@@ -2964,9 +2964,54 @@ class SubBasin:
2964
2964
  logging.error("=================")
2965
2965
 
2966
2966
  return gOutFlow
2967
+
2967
2968
 
2968
-
2969
- # FIXME Finish the function below
2969
+ @property
2970
+ def cumul_rain(self) -> np.array:
2971
+ return np.cumsum(self.myRain)
2972
+
2973
+
2974
+
2975
+ def evaluate_Nash(self, measure,
2976
+ intervals:list[tuple[datetime.datetime]]=[]) -> list[float]:
2977
+ ns = []
2978
+
2979
+ if intervals == []:
2980
+ ns.append( datt.evaluate_Nash(self.outFlow, self.time,
2981
+ measures=measure.get_myHydro(), tMeasures=measure.time,
2982
+ dateBegin=self.dateBegin, dateEnd=self.dateEnd) )
2983
+ return tuple(ns)
2984
+
2985
+ # for el in intervals:
2986
+ # ns.append( datt.evaluate_Nash(self.outFlow, self.time,
2987
+ # measures=measure.get_myHydro(), tMeasures=measure.time,
2988
+ # dateBegin=el[0], dateEnd=el[1]) )
2989
+ ns = [ datt.evaluate_Nash(self.outFlow, self.time,
2990
+ measures=measure.get_myHydro(), tMeasures=measure.time,
2991
+ dateBegin=el[0], dateEnd=el[1])
2992
+ for el in intervals ]
2993
+
2994
+ return tuple(ns)
2995
+
2996
+
2997
+ def get_peak(self, intervals:list[tuple[datetime.datetime]]=[]) -> list[float]:
2998
+
2999
+ peak_s = []
3000
+ for element in intervals:
3001
+ # We conisder the indice to form complete intervals
3002
+ simul_i = math.ceil((element[0]-self.dateBegin).total_seconds()/self.deltaT)
3003
+ simul_f = math.floor((element[1]-self.dateBegin).total_seconds()/self.deltaT)
3004
+ # meas_i = math.floor((element[0]-measure.dateBegin).total_seconds/measure.deltaT)
3005
+ # meas_f = math.floor((element[1]-measure.dateBegin).total_seconds/measure.deltaT)
3006
+ if simul_i<0 or simul_f>len(self.time):
3007
+ continue
3008
+ peak_s.append(self.outFlow[simul_i:simul_f+1].max())
3009
+
3010
+ return peak_s
3011
+
3012
+
3013
+
3014
+ # FIXME Finish the function below -> returning all possible or desired internal variables
2970
3015
  # def get_InterVar(self, typeVar:int):
2971
3016
 
2972
3017
  # if typeVar==
@@ -461,7 +461,7 @@ def cumul_fromCst(cst, dateBegin, dateEnd, dt, isFlow=False):
461
461
  def evaluate_Nash(simul, tSimul, measures, tMeasures, dateBegin, dateEnd, mask=[]):
462
462
  """
463
463
  Function evaluating the Nash-Suttcliff coeff
464
- Caution: So far, if a measure is 0, it is not considered the N-S evaluation.
464
+ Caution: So far, if a measure is 0, it is not considered in the N-S evaluation.
465
465
  """
466
466
  # Nash–Sutcliffe model efficiency coefficient
467
467
  print("TO CORRECT -> Check 'Hello !' !!!!!")
@@ -538,10 +538,12 @@ def evaluate_Nash(simul, tSimul, measures, tMeasures, dateBegin, dateEnd, mask=[
538
538
  counter = 0
539
539
  mask = [False for i in range(nb_el_measures)]
540
540
  for i in range(nb_el_measures):
541
- if(measures[i]!=0.0):
542
- meanMeasures += measures[i]
541
+ if(measures[first_el_measures+i]!=0.0):
542
+ meanMeasures += measures[first_el_measures+i]
543
543
  mask[i]= True
544
544
  counter += 1
545
+ if counter == 0:
546
+ return 0.0
545
547
  meanMeasures = meanMeasures/counter
546
548
 
547
549
  for i in range(nb_el_measures):
@@ -249,7 +249,6 @@ def plot_hydro(nbElements:int, y, rain=[], x_title="Dates", y_titles=_("Discharg
249
249
  title = y_labels
250
250
 
251
251
 
252
-
253
252
  # Plot hydro
254
253
  if(nbElements==1):
255
254
  if(len(np.shape(y))==1):
@@ -550,7 +549,7 @@ def plot_hydro(nbElements:int, y, rain=[], x_title="Dates", y_titles=_("Discharg
550
549
  ax3[i].set_ylabel(y_labelAddPlot[i],color='orange')
551
550
  xdatePlotGen = check_drange_bug(xdatePlotGen,y1)
552
551
  ax3[i].plot_date(xdatePlotGen,y1, '-', color='orange')
553
- ax3[i].set_xlim(rangeData[0]-time_deltaGen,rangeData[1])
552
+ ax3[i].set_xlim(rangeData[0],rangeData[1]-time_deltaGen)
554
553
  if(deltaMajorTicks>0):
555
554
  majorTicks = HourLocator(interval=math.floor(deltaMajorTicks/3600))
556
555
  ax3[i].xaxis.set_major_locator(majorTicks)
@@ -765,4 +764,66 @@ def plot_piechart(data:list, legend:list=[], colors:list=None, figSize:list = [1
765
764
  if toShow:
766
765
  plt.show()
767
766
 
768
- return fig
767
+ return fig
768
+
769
+
770
+ def bar_Nash_n_other(all_data:list[dict], all_colors:list[dict], nb_x, nb_data, nb_lines,
771
+ y_titles:list[str]=[], x_titles:list[str]=[], nameModel:list[str]=[], line_names:list[str]=[],
772
+ hatchs:list[str]=["/", ".", "*", "x", "|", "-", "+", "o", "\\"],
773
+ writeFile:str="", toShow:bool=False):
774
+
775
+ assert len(hatchs) < 10
776
+
777
+ nb_models = nb_data
778
+ nb_stations = nb_lines
779
+ nb_intervals = nb_x
780
+ type_of_data = y_titles
781
+ type_of_model = nameModel
782
+
783
+ if line_names == []:
784
+ sorted_keys = all_data[0].keys()
785
+ else:
786
+ assert len(line_names) == len(all_data[0].keys())
787
+ sorted_keys = line_names
788
+
789
+ all_names = x_titles
790
+
791
+ fig, ax = plt.subplots(nb_stations*2, nb_data)
792
+
793
+ x = np.arange(nb_intervals)
794
+ step = 1.0/nb_intervals
795
+ sub_x = np.arange(0, 1, step)
796
+
797
+
798
+ for i_int in range(nb_intervals):
799
+ for i_data in range(nb_data):
800
+ i_station = 0
801
+ for k in sorted_keys:
802
+ cur_ax = ax[i_station*2, i_data]
803
+ ax[i_station*2+1, i_data].set_axis_off()
804
+ cur_d = all_data[i_data][k]
805
+ cur_c = all_colors[i_data][k]
806
+ for i_model in range(nb_models):
807
+ # y = [cur_d[i_model]]
808
+ cur_ax.bar(x + sub_x[i_model], cur_d[i_model], color = cur_c[i_model], width = step, hatch = hatchs[i_model])
809
+ cur_ax.set_xticks(x)
810
+ if i_station == len(sorted_keys)-1:
811
+ cur_ax.set_xticklabels(labels=all_names, rotation=10)
812
+ else:
813
+ cur_ax.set_xticklabels(labels=[], rotation=10)
814
+
815
+ if i_station == 0:
816
+ cur_ax.set_ylabel(type_of_data[i_data])
817
+ # cur_ax.legend(labels=type_of_model)
818
+ cur_ax.set_ylim([-1, 1])
819
+ cur_ax.set_title(" ".join(["", k]))
820
+ i_station += 1
821
+
822
+ ax_legend = [plt.bar([0], np.nan, color="w", hatch=hatchs[i], label=type_of_model[i], edgecolor="k") for i in range(len(type_of_model))]
823
+ # fig.legend(ax_legend, labels=type_of_model)
824
+ # plt.legend()
825
+ fig.legend(handlelength=3, handleheight=2, borderpad=2)
826
+
827
+
828
+ if toShow :
829
+ plt.show()
@@ -202,7 +202,7 @@ def is_relative_path(path:str):
202
202
 
203
203
 
204
204
 
205
- def relative_2_absolute(fileName:str, prefix:str="", applyCWD:bool=True):
205
+ def relative_2_absolute(fileName:str, prefix:str="", applyCWD:bool=True)-> tuple[bool, str] :
206
206
 
207
207
  info = 0
208
208
 
@@ -211,14 +211,14 @@ def relative_2_absolute(fileName:str, prefix:str="", applyCWD:bool=True):
211
211
  # prefix = os.path.dirname(__file__)
212
212
  prefix = os.getcwd()
213
213
  else:
214
- logging.error("ERROR : the path is relative but no prefix is given")
214
+ logging.error("The path is relative but no prefix is given")
215
215
  info = -1
216
216
  return info
217
217
 
218
218
  if is_relative_path(fileName):
219
219
  finalName = os.path.join(prefix, fileName)
220
220
  else:
221
- logging.error("This path is not initially a relative path!")
221
+ logging.warning("This path is not initially a relative path!")
222
222
 
223
223
  info = 1
224
224
  finalName = fileName
@@ -226,7 +226,7 @@ def relative_2_absolute(fileName:str, prefix:str="", applyCWD:bool=True):
226
226
  return info, finalName
227
227
 
228
228
 
229
- def check_path(fileName:str, prefix:str="", applyCWD:bool=True):
229
+ def check_path(fileName:str, prefix:str="", applyCWD:bool=True) -> tuple[bool, str] :
230
230
 
231
231
  info, finalName = relative_2_absolute(fileName, prefix, applyCWD)
232
232
  if info<0:
wolfhece/libs/WolfDll.dll CHANGED
Binary file
Binary file
@@ -684,106 +684,110 @@ class prev_parameters_simul:
684
684
  self.translx = float(lines[1])
685
685
  self.transly = float(lines[2])
686
686
 
687
- with open(fn + '.par') as f:
688
- lines = f.read().splitlines()
687
+ if exists(fn + '.par'):
689
688
 
690
- # Lecture des PARAMETRES GLOBAUX
691
- # Durée de la simulation et résultats
692
- self.npas = np.int64(float(lines[0])) # nbre de pas de simulation à réaliser
693
- self.dur = float(lines[1]) # durée souhaitée d'un pas de temps
694
- self.freq = float(lines[2]) # fréquence de sortie des résultats
695
- self.ntypefreq = int(lines[3]) # type de fréquence de sortie des résultats (en temps ou en pas)
696
- self.ntypewrite = int(lines[4]) # format d'écriture des résultats (1 = texte, 2 = binaire, 3=csr)
697
- self.ntyperead = int(lines[5]) # format de lecture des données (1 = texte, 2 = binaire, 3 = binaire par blocs)
698
- self.nun_seul_resu = int(lines[6]) # ecriture d'un seul résu ou pas
699
- # maillage fin
700
- self.dxfin = float(lines[7]) # dx du maillage le + fin = maillage sur lequel sont données
701
- self.dyfin = float(lines[8]) # dy les caract de topo, frot,...
702
- self.nxfin = int(lines[9]) # nbre de noeuds selon x du maillage le + fin
703
- self.nyfin = int(lines[10]) # y
704
- self.xminfin = float(lines[11]) # coordonnées absolues inf droites de la matrice des données
705
- self.yminfin = float(lines[12]) # (maillage le plus fin : dxfin et dyfin)
706
- # conditions limites
707
- self.impfgen = int(lines[13]) # nbre de cl fortes
708
- self.impfbxgen = int(lines[14]) # nbre de cl faibles sur les bords x
709
- self.impfbygen = int(lines[15]) # nbre de cl faibles sur les bords y
710
- # stabilité et schéma
711
- self.ponderation = float(lines[16]) # indicateur du type de schéma r-k
712
- self.vncsouhaite = float(lines[17]) # nbre de courant souhaité
713
- self.mult_dt = float(lines[18]) # facteur mult du pas de temps pour vérif a posteriori
714
- self.noptpas = int(lines[19]) # =1 si optimisation du pas de temps
715
- self.nmacc = int(lines[20]) # mac cormack ou non
716
- # limiteurs
717
- self.ntyplimit = int(lines[21]) # 0 si pas de limiteur, 1 si barth jesperson, 2 si venkatakrishnan
718
- # 3 si superbee, 4 si van leer, 5 si van albada, 6 si minmod
719
- self.vkvenka = float(lines[22]) # k de venkatakrishnan et des limiteurs modifiés
720
- # constantes de calcul
721
- self.vminhdiv = float(lines[23]) # hauteur min de division
722
- self.vminh = float(lines[24]) # hauteur d'eau min sur 1 maille
723
- self.vminh2 = float(lines[25]) # hauteur d'eau min sur 1 maille pour la calculer
724
- self.nepsrel = int(lines[26]) # epsilon relatif pour la dtm de q nul sur les bords
725
- # paramètres de calcul
726
- self.nderdec = int(lines[27]) # =2 si dérivées centrées, 1 sinon
727
- self.npentecentree = int(lines[28]) # pente centrée ou non
728
- self.vlatitude = float(lines[29]) # latitude pour le calcul de la force de coriolis
729
- # options
730
- self.mailonly = int(lines[30]) # 1 si uniquement maillage
731
- self.nremaillage = int(lines[31]) # =1 si remaillage
732
- self.ntronc = int(lines[32]) # troncature des variables
733
- self.nsmooth = int(lines[33]) # =1 si smoothing arithmétique, =2 si smoothing géométrique
734
- self.nclinst = int(lines[34]) # cl instationnaires ou pas
735
- # nbre de blocs
736
- self.nblocks = int(lines[35]) # nombre de blocs
737
-
738
- # allocation des espaces mémoire pour le stockage des param de blocs
739
- self.my_param_blocks:list[prev_parameters_blocks] = []
740
-
741
- # lecture des parametres propres aux blocs
742
- decal = 36
743
- for nbblocks in range(self.nblocks):
744
- curparambl = prev_parameters_blocks(lines[decal:])
745
- self.my_param_blocks.append(curparambl)
746
- decal += 23
747
-
748
- # allocation des matrices contenant les cl générales
749
- self.clf.set_header()
750
- self.clfbx.set_header()
751
- self.clfby.set_header()
752
-
753
- self.clf.read_file(lines[decal:decal + self.impfgen], 'strongbc')
754
- decal += self.impfgen
755
- self.clfbx.read_file(lines[decal:decal + self.impfbxgen], 'x')
756
- decal += self.impfbxgen
757
- self.clfby.read_file(lines[decal:decal + self.impfbygen], 'y')
758
- decal += self.impfbygen
759
-
760
- # lecture des paramètres debug globaux
761
- self.vdebug = []
762
- for i in range(60):
763
- self.vdebug.append(float(lines[decal + i]))
764
-
765
- decal += 60
766
-
767
- # lecture des paramètres debug par blocs
768
- for nbblocks in range(self.nblocks):
689
+ with open(fn + '.par') as f:
690
+ lines = f.read().splitlines()
691
+
692
+ # Lecture des PARAMETRES GLOBAUX
693
+ # Durée de la simulation et résultats
694
+ self.npas = np.int64(float(lines[0])) # nbre de pas de simulation à réaliser
695
+ self.dur = float(lines[1]) # durée souhaitée d'un pas de temps
696
+ self.freq = float(lines[2]) # fréquence de sortie des résultats
697
+ self.ntypefreq = int(lines[3]) # type de fréquence de sortie des résultats (en temps ou en pas)
698
+ self.ntypewrite = int(lines[4]) # format d'écriture des résultats (1 = texte, 2 = binaire, 3=csr)
699
+ self.ntyperead = int(lines[5]) # format de lecture des données (1 = texte, 2 = binaire, 3 = binaire par blocs)
700
+ self.nun_seul_resu = int(lines[6]) # ecriture d'un seul résu ou pas
701
+ # maillage fin
702
+ self.dxfin = float(lines[7]) # dx du maillage le + fin = maillage sur lequel sont données
703
+ self.dyfin = float(lines[8]) # dy les caract de topo, frot,...
704
+ self.nxfin = int(lines[9]) # nbre de noeuds selon x du maillage le + fin
705
+ self.nyfin = int(lines[10]) # y
706
+ self.xminfin = float(lines[11]) # coordonnées absolues inf droites de la matrice des données
707
+ self.yminfin = float(lines[12]) # (maillage le plus fin : dxfin et dyfin)
708
+ # conditions limites
709
+ self.impfgen = int(lines[13]) # nbre de cl fortes
710
+ self.impfbxgen = int(lines[14]) # nbre de cl faibles sur les bords x
711
+ self.impfbygen = int(lines[15]) # nbre de cl faibles sur les bords y
712
+ # stabilité et schéma
713
+ self.ponderation = float(lines[16]) # indicateur du type de schéma r-k
714
+ self.vncsouhaite = float(lines[17]) # nbre de courant souhaité
715
+ self.mult_dt = float(lines[18]) # facteur mult du pas de temps pour vérif a posteriori
716
+ self.noptpas = int(lines[19]) # =1 si optimisation du pas de temps
717
+ self.nmacc = int(lines[20]) # mac cormack ou non
718
+ # limiteurs
719
+ self.ntyplimit = int(lines[21]) # 0 si pas de limiteur, 1 si barth jesperson, 2 si venkatakrishnan
720
+ # 3 si superbee, 4 si van leer, 5 si van albada, 6 si minmod
721
+ self.vkvenka = float(lines[22]) # k de venkatakrishnan et des limiteurs modifiés
722
+ # constantes de calcul
723
+ self.vminhdiv = float(lines[23]) # hauteur min de division
724
+ self.vminh = float(lines[24]) # hauteur d'eau min sur 1 maille
725
+ self.vminh2 = float(lines[25]) # hauteur d'eau min sur 1 maille pour la calculer
726
+ self.nepsrel = int(lines[26]) # epsilon relatif pour la dtm de q nul sur les bords
727
+ # paramètres de calcul
728
+ self.nderdec = int(lines[27]) # =2 si dérivées centrées, 1 sinon
729
+ self.npentecentree = int(lines[28]) # pente centrée ou non
730
+ self.vlatitude = float(lines[29]) # latitude pour le calcul de la force de coriolis
731
+ # options
732
+ self.mailonly = int(lines[30]) # 1 si uniquement maillage
733
+ self.nremaillage = int(lines[31]) # =1 si remaillage
734
+ self.ntronc = int(lines[32]) # troncature des variables
735
+ self.nsmooth = int(lines[33]) # =1 si smoothing arithmétique, =2 si smoothing géométrique
736
+ self.nclinst = int(lines[34]) # cl instationnaires ou pas
737
+ # nbre de blocs
738
+ self.nblocks = int(lines[35]) # nombre de blocs
739
+
740
+ # allocation des espaces mémoire pour le stockage des param de blocs
741
+ self.my_param_blocks:list[prev_parameters_blocks] = []
742
+
743
+ # lecture des parametres propres aux blocs
744
+ decal = 36
745
+ for nbblocks in range(self.nblocks):
746
+ curparambl = prev_parameters_blocks(lines[decal:])
747
+ self.my_param_blocks.append(curparambl)
748
+ decal += 23
749
+
750
+ # allocation des matrices contenant les cl générales
751
+ self.clf.set_header()
752
+ self.clfbx.set_header()
753
+ self.clfby.set_header()
754
+
755
+ self.clf.read_file(lines[decal:decal + self.impfgen], 'strongbc')
756
+ decal += self.impfgen
757
+ self.clfbx.read_file(lines[decal:decal + self.impfbxgen], 'x')
758
+ decal += self.impfbxgen
759
+ self.clfby.read_file(lines[decal:decal + self.impfbygen], 'y')
760
+ decal += self.impfbygen
761
+
762
+ # lecture des paramètres debug globaux
763
+ self.vdebug = []
769
764
  for i in range(60):
770
- self.my_param_blocks[nbblocks].vdebug.append(float(lines[decal + i]))
765
+ self.vdebug.append(float(lines[decal + i]))
766
+
771
767
  decal += 60
772
768
 
773
- # lecture index des blocs calculés
774
- if self.vdebug[0]>0:
775
- for idx in range(int(self.vdebug[0])):
776
- idx_block = int(lines[decal])-1
777
- self.my_param_blocks[idx_block].computed = True
778
- decal+=1
779
-
780
- # lecture des noms de chaque bloc
781
- try:
782
- for idx in range(self.nblocks):
783
- self.my_param_blocks[idx].name = lines[decal]
784
- decal+=1
785
- except :
786
- pass
769
+ # lecture des paramètres debug par blocs
770
+ for nbblocks in range(self.nblocks):
771
+ for i in range(60):
772
+ self.my_param_blocks[nbblocks].vdebug.append(float(lines[decal + i]))
773
+ decal += 60
774
+
775
+ # lecture index des blocs calculés
776
+ if self.vdebug[0]>0:
777
+ for idx in range(int(self.vdebug[0])):
778
+ idx_block = int(lines[decal])-1
779
+ self.my_param_blocks[idx_block].computed = True
780
+ decal+=1
781
+
782
+ # lecture des noms de chaque bloc
783
+ try:
784
+ for idx in range(self.nblocks):
785
+ self.my_param_blocks[idx].name = lines[decal]
786
+ decal+=1
787
+ except :
788
+ pass
789
+ else:
790
+ logging.warning(_('.par file not found !'))
787
791
 
788
792
  def write_file(self, fn=''):
789
793
  """Ecriture du fichier de paramètres"""
@@ -805,9 +809,6 @@ class prev_parameters_simul:
805
809
  if Path(fn+".par").exists():
806
810
  shutil.copyfile(fn + '.par', fnback)
807
811
 
808
- # with open(fn + '.par', 'r') as f:
809
- # mylines = f.read().splitlines()
810
-
811
812
  with open(fn + '.par', 'w') as f:
812
813
  # for i in range(14):
813
814
  # f.write(mylines[i] + '\n')
@@ -945,7 +946,7 @@ class prev_parameters_simul:
945
946
  # right.
946
947
  assert i >= 1 and i <= self.nxfin, f"1 <= i:{i} <= {self.nxfin+1}"
947
948
  assert j >= 1 and j <= self.nyfin, f"1 <= j:{j} <= {self.nyfin+1}"
948
- self.clfby.add(i,j,ntype,value,orient)
949
+ self.clfby.add(i,j,ntype,value,orient='y')
949
950
  self.impfbygen += 1
950
951
 
951
952
  def to_yaml(self):
@@ -1286,6 +1287,9 @@ class bloc_file():
1286
1287
  if self.filename is None:
1287
1288
  return
1288
1289
 
1290
+ if not exists(self.filename):
1291
+ return
1292
+
1289
1293
  ox = self.parent.myparam.xminfin
1290
1294
  oy = self.parent.myparam.yminfin
1291
1295
  tx = self.parent.myparam.translx