wolfhece 1.8.7__py3-none-any.whl → 1.8.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,141 @@
1
+ import ftplib
2
+ import os
3
+ import re
4
+
5
+ """
6
+ MIT license: 2017 - Jwely
7
+
8
+ Example usage:
9
+ ``` python
10
+ import ftplib
11
+ ftp = ftplib.FTP(mysite, username, password)
12
+ download_ftp_tree(ftp, remote_dir, local_dir)
13
+ ```
14
+
15
+ The code above will look for a directory called "remote_dir" on the ftp host, and then duplicate the
16
+ directory and its entire contents into the "local_dir".
17
+
18
+ *** Note that if wget is an option, I recommend using that instead ***
19
+
20
+ """
21
+
22
+
23
+ def _is_ftp_dir(ftp_handle, name, guess_by_extension=True):
24
+ """ simply determines if an item listed on the ftp server is a valid directory or not """
25
+
26
+ # if the name has a "." in the fourth to last position, its probably a file extension
27
+ # this is MUCH faster than trying to set every file to a working directory, and will work 99% of time.
28
+ if guess_by_extension is True:
29
+ if len(name) >= 4:
30
+ if name[-4] == '.':
31
+ return False
32
+
33
+ original_cwd = ftp_handle.pwd() # remember the current working directory
34
+ try:
35
+ ftp_handle.cwd(name) # try to set directory to new name
36
+ ftp_handle.cwd(original_cwd) # set it back to what it was
37
+ return True
38
+
39
+ except ftplib.error_perm as e:
40
+ print(e)
41
+ return False
42
+
43
+ except Exception as e:
44
+ print(e)
45
+ return False
46
+
47
+
48
+ def _make_parent_dir(fpath):
49
+ """ ensures the parent directory of a filepath exists """
50
+ dirname = os.path.dirname(fpath)
51
+ while not os.path.exists(dirname):
52
+ try:
53
+ os.makedirs(dirname)
54
+ print("created {0}".format(dirname))
55
+ except OSError as e:
56
+ print(e)
57
+ _make_parent_dir(dirname)
58
+
59
+
60
+ def _download_ftp_file(ftp_handle, name, dest, overwrite):
61
+ """ downloads a single file from an ftp server """
62
+ #_make_parent_dir(dest.lstrip("/"))
63
+ if not os.path.exists(dest) or overwrite is True:
64
+ try:
65
+ with open(dest, 'wb') as f:
66
+ ftp_handle.retrbinary("RETR {0}".format(name), f.write)
67
+ print("downloaded: {0}".format(dest))
68
+ except FileNotFoundError:
69
+ print("FAILED: {0}".format(dest))
70
+ else:
71
+ print("already exists: {0}".format(dest))
72
+
73
+
74
+ def _file_name_match_patern(pattern, name):
75
+ """ returns True if filename matches the pattern"""
76
+ if pattern is None:
77
+ return True
78
+ else:
79
+ return bool(re.match(pattern, name))
80
+
81
+
82
+ def _mirror_ftp_dir(ftp_handle, name, overwrite, guess_by_extension, pattern):
83
+ """ replicates a directory on an ftp server recursively """
84
+
85
+ ftp_handle.cwd(name)
86
+ files=[]
87
+ ftp_handle.dir(files.append)
88
+ files = [curf.split()[8] for curf in files]
89
+ for item in files:
90
+ if _is_ftp_dir(ftp_handle, item, guess_by_extension):
91
+
92
+ original_directory = os.getcwd() # remember working directory before function is executed
93
+ os.makedirs(item, exist_ok=True)
94
+ os.chdir(item) # change working directory to ftp mirror directory
95
+ _mirror_ftp_dir(ftp_handle, item, overwrite, guess_by_extension, pattern)
96
+ os.chdir(original_directory)
97
+ ftp_handle.cwd('..')
98
+ else:
99
+ if _file_name_match_patern(pattern, name):
100
+ _download_ftp_file(ftp_handle, item, item, overwrite)
101
+ else:
102
+ # quietly skip the file
103
+ pass
104
+
105
+
106
+ def download_ftp_tree(ftp_handle, path, destination, pattern=None, overwrite=False, guess_by_extension=True):
107
+ """
108
+ Downloads an entire directory tree from an ftp server to the local destination
109
+ :param ftp_handle: an authenticated ftplib.FTP instance
110
+ :param path: the folder on the ftp server to download
111
+ :param destination: the local directory to store the copied folder
112
+ :param pattern: Python regex pattern, only files that match this pattern will be downloaded.
113
+ :param overwrite: set to True to force re-download of all files, even if they appear to exist already
114
+ :param guess_by_extension: It takes a while to explicitly check if every item is a directory or a file.
115
+ if this flag is set to True, it will assume any file ending with a three character extension ".???" is
116
+ a file and not a directory. Set to False if some folders may have a "." in their names -4th position.
117
+ """
118
+ path = path.lstrip("/")
119
+ original_directory = os.getcwd() # remember working directory before function is executed
120
+ os.chdir(destination) # change working directory to ftp mirror directory
121
+
122
+ _mirror_ftp_dir(
123
+ ftp_handle,
124
+ path,
125
+ pattern=pattern,
126
+ overwrite=overwrite,
127
+ guess_by_extension=guess_by_extension)
128
+
129
+ os.chdir(original_directory) # reset working directory to what it was before function exec
130
+
131
+
132
+ if __name__ == "__main__":
133
+ # Example usage mirroring all jpg files in an FTP directory tree.
134
+ mysite = "some_ftp_site"
135
+ username = "anonymous"
136
+ password = None
137
+ remote_dir = ""
138
+ local_dir = ""
139
+ pattern = ".*\.jpg$"
140
+ ftp = ftplib.FTP(mysite, username, password)
141
+ download_ftp_tree(ftp, remote_dir, local_dir, pattern=pattern, overwrite=False, guess_by_extension=True)
@@ -31,6 +31,7 @@ from .read import *
31
31
  from ..wolf_array import *
32
32
  from ..PyParams import *
33
33
  from ..PyVertex import cloud_vertices, getIfromRGB,wolfvertex
34
+ from .PyWatershed import Watershed, Node_Watershed
34
35
 
35
36
 
36
37
  # %% Classes
@@ -50,6 +51,9 @@ class Catchment:
50
51
 
51
52
  time_delays_F:np.ndarray # array pointed to the array time_delays in Fortran and composed the time delays of each subbasin
52
53
  _version:float # version of the wolfHydro python code. Useful for identifying the file versions to read and how to interpret them
54
+ charact_watrshd:Watershed # Watershed object containing the most useful properties of the arrays in Characteristics maps
55
+ subBasinCloud:cloud_vertices # cloud of points containing the true coordinates (used in simulation) of all subbasin outlets
56
+ iP_Cloud:cloud_vertices # cloud of points containing the given coordinates (given in param files) of all subbasin outlets
53
57
 
54
58
 
55
59
  def __init__(self, _name, _workingDir, _plotAllSub, _plotNothing, _initWithResults=True, _catchmentFileName="", _rbFileName="", _tz=0, version=cst.VERSION_WOLFHYDRO):
@@ -62,7 +66,7 @@ class Catchment:
62
66
  if(self.plotNothing == True):
63
67
  self.plotAllSub = False
64
68
  self.tz = _tz # time zone in GMT+0
65
- self.time = [] #
69
+ self.time = None #
66
70
  self.deltaT = 0.0 # Time step of the simulation
67
71
  self.dateBegin = None # Object datetime of the beginning date of the simulation
68
72
  self.dateEnd = None # Object datetime of the end date of the simulation
@@ -77,6 +81,10 @@ class Catchment:
77
81
  self.subBasinCloud.myprop.color=getIfromRGB((255,131,250))
78
82
  self.subBasinCloud.myprop.filled=True
79
83
 
84
+ self.iP_Cloud=cloud_vertices()
85
+ self.iP_Cloud.myprop.color=getIfromRGB((255,131,250))
86
+ self.iP_Cloud.myprop.filled=True
87
+
80
88
  self.retentionBasinDict = {}
81
89
  self.topologyDict = {}
82
90
  self.dictIdConversion = {}
@@ -183,6 +191,8 @@ class Catchment:
183
191
  # self.topo_wolf_array = WolfArray(self.workingDir + "Characteristic_maps/Drainage_basin.b2")
184
192
  self.time_wolf_array = WolfArray(os.path.join(self.workingDir,"Characteristic_maps/Drainage_basin.time"))
185
193
  self.conv_wolf_array = WolfArray(os.path.join(self.workingDir,"Characteristic_maps/Drainage_basin.cnv"))
194
+ self.charact_watrshd = Watershed(self.workingDir)
195
+ self.set_eff_outlet_coord()
186
196
 
187
197
  # time array:
188
198
  self.get_time()
@@ -465,15 +475,19 @@ class Catchment:
465
475
  else:
466
476
  print("ERROR: Impossible to indentify the position of the interior points! Please check your param file!")
467
477
  sys.exit()
478
+
479
+ mysubxy=wolfvertex(x,y)
480
+ self.iP_Cloud.add_vertex(mysubxy)
468
481
 
469
482
  idSorted = self.catchmentDict['dictIdConversion'][counter]
483
+ cur_outlet:wolfvertex = self.subBasinCloud.myvertices[idSorted-1]["vertex"]
470
484
  # self.subBasinDict[counter] = SubBasin(counter, self.time, self.workingDir, self.hyeto, x, y, idSorted)
471
485
  self.subBasinDict[counter] = SubBasin(self.dateBegin, self.dateEnd, self.deltaT, self.myModel, self.workingDir,
472
- _iD_interiorPoint=counter, _idSorted=idSorted, _hyeto=self.hyeto, _x=x, _y=y, _tz=self.tz, version=self._version)
486
+ _iD_interiorPoint=counter, _idSorted=idSorted, _hyeto=self.hyeto, _x=cur_outlet.x, _y=cur_outlet.y, _tz=self.tz, version=self._version)
473
487
  self.catchmentDict['ss'+str(counter)] = self.subBasinDict[counter]
474
488
 
475
489
  mysubxy=wolfvertex(x,y)
476
- self.subBasinCloud.add_vertex(mysubxy)
490
+ self.iP_Cloud.add_vertex(mysubxy)
477
491
 
478
492
  counter += 1
479
493
  tmpNameParam = 'Outlet Coordinates'
@@ -494,12 +508,13 @@ class Catchment:
494
508
  # x = float(self.paramsInput.myparams[tmpNameParam]['X']['value'])
495
509
  # y = float(self.paramsInput.myparams[tmpNameParam]['Y']['value'])
496
510
  idSorted = self.catchmentDict['dictIdConversion'][counter]
511
+ cur_outlet:wolfvertex = self.subBasinCloud.myvertices[idSorted-1]["vertex"]
497
512
  # self.subBasinDict[counter] = SubBasin(counter, self.time, self.workingDir, self.hyeto, x, y, idSorted)
498
513
  self.subBasinDict[counter] = SubBasin(self.dateBegin, self.dateEnd, self.deltaT, self.myModel, self.workingDir,
499
514
  _iD_interiorPoint=counter, _idSorted=idSorted, _hyeto=self.hyeto, _x=x, _y=y, _tz=self.tz, version=self._version)
500
515
 
501
516
  mysubxy=wolfvertex(x,y)
502
- self.subBasinCloud.add_vertex(mysubxy)
517
+ self.iP_Cloud.add_vertex(mysubxy)
503
518
 
504
519
  self.catchmentDict['ss'+str(counter)] = self.subBasinDict[counter]
505
520
  # This following line must be present to create the outFlow dictionary of the last element
@@ -1803,6 +1818,26 @@ class Catchment:
1803
1818
  if(show):
1804
1819
  plt.show()
1805
1820
 
1821
+
1822
+ def get_all_cumulRain(self, selection_by_iD=[]) -> tuple[np.array, list[np.array]]:
1823
+ '''
1824
+
1825
+ '''
1826
+ list_rain = []
1827
+
1828
+ if(selection_by_iD==[]):
1829
+ for iBasin in range(1,len(self.subBasinDict)+1):
1830
+ curBasin:SubBasin = self.subBasinDict[iBasin]
1831
+ list_rain.append(curBasin.cumul_rain)
1832
+ else:
1833
+ for iBasin in self.subBasinDict:
1834
+ if iBasin in selection_by_iD:
1835
+ curBasin:SubBasin = self.subBasinDict[iBasin]
1836
+ list_rain.append(curBasin.cumul_rain)
1837
+
1838
+
1839
+ return self.time, list_rain
1840
+
1806
1841
 
1807
1842
  def read_measuring_stations_SPW(self, fileNameIn=""):
1808
1843
  """
@@ -2336,6 +2371,48 @@ class Catchment:
2336
2371
  return self._version
2337
2372
 
2338
2373
 
2374
+
2375
+ def get_sub_Nash(self, measure:SubBasin,
2376
+ selection_by_iD,
2377
+ intervals:list[tuple[datetime.datetime]]=[]):
2378
+
2379
+ # for element in selection_by_iD:
2380
+ # junctionKey = self.get_key_catchmentDict(name=element)
2381
+ # if junctionKey in self.subBasinDict:
2382
+ # curSub:SubBasin = self.subBasinDict[junctionKey]
2383
+ # ns = curSub.evaluate_Nash(measure=measure, intervals=intervals)
2384
+ junctionKey = self.get_key_catchmentDict(name=selection_by_iD)
2385
+ if junctionKey in self.catchmentDict:
2386
+ curSub:SubBasin = self.catchmentDict[junctionKey]
2387
+ ns = curSub.evaluate_Nash(measure=measure, intervals=intervals)
2388
+
2389
+ return ns
2390
+
2391
+
2392
+
2393
+ def get_sub_peak(self, selection_by_iD,
2394
+ intervals:list[tuple[datetime.datetime]]=[]):
2395
+
2396
+ junctionKey = self.get_key_catchmentDict(name=selection_by_iD)
2397
+ if junctionKey in self.catchmentDict:
2398
+ curSub:SubBasin = self.catchmentDict[junctionKey]
2399
+ ns = curSub.get_peak(intervals=intervals)
2400
+
2401
+ return ns
2402
+
2403
+
2404
+ def set_eff_outlet_coord(self):
2405
+
2406
+ all_nodes = [self.charact_watrshd.find_rivers(whichsub=ii+1)[0][0] for ii in range(self.nbSubBasin)]
2407
+ for el in all_nodes:
2408
+ el:Node_Watershed
2409
+ mysubxy=wolfvertex(el.x,el.y)
2410
+ self.subBasinCloud.add_vertex(mysubxy)
2411
+
2412
+ return 0
2413
+
2414
+
2415
+
2339
2416
  def make_nd_array(self, c_pointer, shape, dtype=np.float64, order='C', own_data=True,readonly=False):
2340
2417
  arr_size = np.prod(shape[:]) * np.dtype(dtype).itemsize
2341
2418
 
@@ -1368,9 +1368,29 @@ class Comparison:
1368
1368
  def plot_all_diff_cumulRain_with_lagtime(self, interval, selection_by_iD=[], writeDir=""):
1369
1369
 
1370
1370
  for idCatch in self.myCatchments:
1371
- curCatch = self.myCatchments[idCatch]['Object']
1371
+ curCatch:Catchment = self.myCatchments[idCatch]['Object']
1372
1372
  graph_title = curCatch.name + " :"
1373
- curCatch.plot_all_diff_cumulRain_with_lagtime(interval, selection_by_iD=selection_by_iD, graph_title=graph_title, show=False, writeDir=writeDir)
1373
+ curCatch.plot_all_diff_cumulRain_with_lagtime(interval, lagTime=0.0, selection_by_iD=selection_by_iD, graph_title=graph_title, show=False, writeDir=writeDir)
1374
+
1375
+
1376
+
1377
+ # FIXME : TODO complete that function that plot a comparison of the cumulated volumes
1378
+ # def plot_all_cumulRain(self, selection_by_iD:list=[], writeDir:str="", show:bool=True):
1379
+
1380
+ # times = []
1381
+ # for idCatch in self.myCatchments:
1382
+ # curCatch:Catchment = self.myCatchments[idCatch]['Object']
1383
+ # graph_title = curCatch.name + " :"
1384
+ # cur_t, curVol = curCatch.get_all_cumulRain(selection_by_iD)
1385
+
1386
+ # for
1387
+ # plt.figure()
1388
+ # plt.title(graph_title)
1389
+ # for i in range(len(curVol)):
1390
+ # plt.plot(cur_t, curVol[i])
1391
+ # plt.savefig(os.path.join(writeDir, graph_title))
1392
+
1393
+ # if show : plt.show()
1374
1394
 
1375
1395
 
1376
1396
 
@@ -1533,4 +1553,91 @@ class Comparison:
1533
1553
 
1534
1554
  backupAddData = self.plotDict[stationKey]["Add Data"]
1535
1555
  # self.plotDict[stationKey]["Add Data"] = {}
1536
- del self.plotDict[stationKey]["Add Data"]
1556
+ del self.plotDict[stationKey]["Add Data"]
1557
+
1558
+
1559
+
1560
+ def plot_Nash_and_peak(self, stationKey:list[str, int], measures:list[SubBasin], intervals:list=[]):
1561
+ assert len(stationKey) == len(measures)
1562
+
1563
+ all_ns = {stationKey[i]: [ self.myCatchments[el]["Object"].get_sub_Nash(measures[i], stationKey[i], intervals)
1564
+ for el in self.myCatchments ]
1565
+ for i in range(len(stationKey))
1566
+ }
1567
+ print(all_ns)
1568
+
1569
+ all_peaks = [ [ self.myCatchments[el]["Object"].get_sub_peak(stationKey[i], intervals)
1570
+ for el in self.myCatchments ]
1571
+ for i in range(len(stationKey))
1572
+ ]
1573
+
1574
+ print(all_peaks)
1575
+
1576
+ meas_peak = [ measures[i].get_peak(intervals)
1577
+ for i in range(len(stationKey)) ]
1578
+
1579
+ # The following lines is take the peak difference between simulation and measurements -> Display 0.0 if the measurement is 0.0
1580
+ isZero = np.array(meas_peak)==0
1581
+ notZero = np.array(meas_peak)!=0
1582
+ peak_prop = {stationKey[i]: [ list( (np.array(meas_peak[i])-np.array(el))/(np.array(meas_peak[i])+isZero[i]) *notZero[i] )
1583
+ for el in all_peaks[i] ]
1584
+ for i in range(len(stationKey))
1585
+ }
1586
+
1587
+ print(meas_peak)
1588
+ print(peak_prop)
1589
+
1590
+ all_data = [all_ns, peak_prop]
1591
+
1592
+ # Define all colors
1593
+ colors_Nash = {}
1594
+ for key, value in all_ns.items():
1595
+ colors_Nash[key] = []
1596
+ for i_model in range(len(value)):
1597
+ colors_Nash[key].append([])
1598
+ for j in range(len(value[i_model])):
1599
+ curNS = value[i_model][j]
1600
+ if curNS<0.0:
1601
+ colors_Nash[key][i_model].append("r")
1602
+ continue
1603
+ elif curNS<0.4:
1604
+ colors_Nash[key][i_model].append("tab:orange")
1605
+ continue
1606
+ elif curNS<0.6:
1607
+ colors_Nash[key][i_model].append("tab:olive")
1608
+ continue
1609
+ elif curNS<0.8:
1610
+ colors_Nash[key][i_model].append("tab:green")
1611
+ continue
1612
+ else:
1613
+ colors_Nash[key][i_model].append("g")
1614
+
1615
+
1616
+ colors_peaks = {}
1617
+ for key, value in peak_prop.items():
1618
+ colors_peaks[key] = []
1619
+ for i_model in range(len(value)):
1620
+ colors_peaks[key].append([])
1621
+ for j in range(len(value[i_model])):
1622
+ curP = value[i_model][j]
1623
+ if curP<0.0:
1624
+ colors_peaks[key][i_model].append("r")
1625
+ else:
1626
+ colors_peaks[key][i_model].append("b")
1627
+
1628
+ all_colors = [colors_Nash, colors_peaks]
1629
+
1630
+ ## Sort all station in a particular order
1631
+ sorted_keys = list(all_data[0].keys())
1632
+
1633
+ ## Str of dates
1634
+ all_names = ["-".join([cdate[0].strftime("%d/%m/%Y"), cdate[1].strftime("%d/%m/%Y")]) for cdate in intervals]
1635
+
1636
+
1637
+ ## Plot
1638
+ nb_stations = len(stationKey)
1639
+ type_of_model = [self.myCatchments[el]["Title"] for el in self.myCatchments]
1640
+ type_of_data = type_of_data = ["Nash", r"$ \frac{Q^{s}_{max}-Q^{m}_{max}}{Q^{m}_{max}} $ "]
1641
+
1642
+ ph.bar_Nash_n_other(all_data, all_colors, nb_x=len(intervals), nb_data=len(type_of_model), nb_lines=nb_stations,
1643
+ y_titles=type_of_data, x_titles=all_names, nameModel=type_of_model, line_names=sorted_keys, toShow=True)
@@ -412,6 +412,8 @@ class Optimisation(wx.Frame):
412
412
  self.enable_MenuBar("Param files")
413
413
  self.enable_MenuBar("Launch")
414
414
  self.enable_MenuBar("Tools")
415
+ if self.debugDLL:
416
+ self.enable_MenuBar("Debug")
415
417
 
416
418
  def apply_optim(self, event, idLauncher=0):
417
419
 
@@ -572,14 +574,13 @@ class Optimisation(wx.Frame):
572
574
  optimFileBin = os.path.join(self.workingDir, nameTMP+".rpt.dat")
573
575
 
574
576
  isOk, optimFileBin = check_path(optimFileBin)
575
- if isOk:
577
+ if isOk>0:
576
578
  optimFile = optimFileBin
577
-
578
579
  allParams = read_bin(self.workingDir, nameTMP+".rpt.dat", uniform_format=8)
579
580
  matrixData = np.array(allParams[-1]).astype("float")
580
581
  else:
581
582
  isOk, optimFileTxt = check_path(optimFileTxt)
582
- if isOk:
583
+ if isOk>0:
583
584
  optimFile = optimFileTxt
584
585
  try:
585
586
  with open(optimFile, newline = '') as fileID:
@@ -1210,6 +1211,7 @@ class Optimisation(wx.Frame):
1210
1211
  self.optiParam.SavetoFile(None)
1211
1212
  self.optiParam.Reload(None)
1212
1213
  self.update_myParams(idLauncher)
1214
+ # Prepare the paramPy dictionnary before calibration
1213
1215
  self.prepare_calibration_timeDelay(stationOut=stationOut)
1214
1216
  # Reload the useful modules
1215
1217
  self.reload_hydro(idCompar=0, fromStation=stationOut, lastLevel=previousLevel, updateAll=True)
@@ -1465,7 +1467,7 @@ class Optimisation(wx.Frame):
1465
1467
 
1466
1468
 
1467
1469
  ## Update the dictionnaries of myParams if any changes is identified
1468
- # TO DO : Generalised for all type of changes and all the necessary tests -> So far just update the junction name
1470
+ # TODO : Generalised for all type of changes and all the necessary tests -> So far just update the junction name
1469
1471
  def update_myParams(self, idLauncher=0):
1470
1472
  curCatch:Catchment
1471
1473
 
@@ -1528,6 +1530,13 @@ class Optimisation(wx.Frame):
1528
1530
  refCatch.plot_landuses(onlySub=False, show=True)
1529
1531
 
1530
1532
 
1533
+ ## Apply the best parameters of an optimisation which implies that :
1534
+ # - the ".rpt" file of the results of an optimisation should be present
1535
+ # - the optimal paramters will be replaced in their respective param files
1536
+ # - the timeDelays will then be updated either with :
1537
+ # - Python paramters itself
1538
+ # - an estimation from the runnof model
1539
+ # Once all the optimal parameters are applied, a new simulation is launched to generate the "best" hydrograph
1531
1540
  def generate_semiDist_optim_simul(self, event, idOpti=1,idLauncher:int=0):
1532
1541
 
1533
1542
  curCatch:Catchment = self.myCases[idLauncher].refCatchment
@@ -1559,13 +1568,22 @@ class Optimisation(wx.Frame):
1559
1568
  curCatch.define_station_out(stationOut)
1560
1569
  # Activate all the useful subs and write it in the param file
1561
1570
  curCatch.activate_usefulSubs(blockJunction=doneList, onlyItself=onlyOwnSub)
1571
+ # Rename the result file
1572
+ self.optiParam.change_param("Optimizer", "fname", stationOut)
1573
+ self.optiParam.SavetoFile(None)
1574
+ self.optiParam.Reload(None)
1575
+ #
1562
1576
  self.update_myParams(idLauncher)
1577
+ # Preparing the dictionnaries of Parameters to be updated -> not just useful for calibration here !
1578
+ self.prepare_calibration_timeDelay(stationOut=stationOut)
1563
1579
  # Fill the param files with their best values
1564
1580
  self.apply_optim(None)
1565
1581
  # Simulation with the best parameters
1566
1582
  self.compute_distributed_hydro_model()
1567
1583
  # Update myHydro of all effective subbasins to get the best configuration upstream
1568
1584
  curCatch.read_hydro_eff_subBasin()
1585
+ # Update timeDelays according to time wolf_array
1586
+ self.apply_timeDelay_dist(idOpti=idOpti, idLauncher=idLauncher, junctionKey=stationOut)
1569
1587
  # Update the outflows
1570
1588
  curCatch.update_hydro(idCompar=0)
1571
1589
  # All upstream elements of a reference will be fixed
@@ -1603,6 +1621,10 @@ class Optimisation(wx.Frame):
1603
1621
  curCatch.define_station_out(stationOut)
1604
1622
  # Activate all the useful subs and write it in the param file
1605
1623
  curCatch.activate_usefulSubs(blockJunction=doneList, onlyItself=onlyOwnSub)
1624
+ # Rename the result file
1625
+ self.optiParam.change_param("Optimizer", "fname", stationOut)
1626
+ self.optiParam.SavetoFile(None)
1627
+ self.optiParam.Reload(None)
1606
1628
  #
1607
1629
  self.update_myParams(idLauncher)
1608
1630
  # TO DO -> adapt all the debug_info files
@@ -1649,7 +1671,7 @@ class Optimisation(wx.Frame):
1649
1671
  elif format==".dat":
1650
1672
  optimFile = os.path.join(self.workingDir, nameTMP+".rpt.dat")
1651
1673
  isOk, optimFile = check_path(optimFile)
1652
- if isOk:
1674
+ if isOk>0:
1653
1675
  allData = read_bin(self.workingDir, nameTMP+".rpt.dat", uniform_format=8)
1654
1676
  allData = np.array(allData).astype("float")
1655
1677
  matrixParam = allData[:-1,:-1]
@@ -1667,6 +1689,10 @@ class Optimisation(wx.Frame):
1667
1689
 
1668
1690
  myModelDict = cste.modelParamsDict[myModel]["Parameters"]
1669
1691
 
1692
+ if self.curParams_vec is None \
1693
+ or len(self.curParams_vec) != self.nbParams:
1694
+ self.curParams_vec = np.empty((self.nbParams,), dtype=ct.c_double, order='F')
1695
+
1670
1696
  for i in range(self.nbParams):
1671
1697
  myType = self.myParams[i+1]["type"]
1672
1698
  if(int(myType)>0):
@@ -1684,9 +1710,7 @@ class Optimisation(wx.Frame):
1684
1710
  tmpWolf.OnClose(None)
1685
1711
  tmpWolf = None
1686
1712
  else:
1687
- if self.curParams_vec is None:
1688
- self.curParams_vec = np.empty((self.nbParams,), dtype=ct.c_double, order='F')
1689
-
1713
+
1690
1714
  self.curParams_vec[i] = params[i]
1691
1715
  self.update_timeDelay(i+1)
1692
1716
  refCatch.save_timeDelays([self.myParams[i+1]["junction_name"]])
@@ -14,8 +14,8 @@ if not '_' in __builtins__:
14
14
  _=gettext.gettext
15
15
 
16
16
  class Outlet:
17
-
18
- def __init__(self, _retentionBasinDict, _workingDir="", time=[]):
17
+
18
+ def __init__(self, _retentionBasinDict, _workingDir="", time=None):
19
19
  print("Run Outlet")
20
20
  self.myType = ''
21
21
  self.myRbDict = _retentionBasinDict
@@ -45,7 +45,7 @@ class Outlet:
45
45
 
46
46
  if(fileRef!=""):
47
47
  self.myRefInterp = self.read_ref(fileRef, tz=tz)
48
- if self.time!=[]:
48
+ if self.time is not None:
49
49
  # FIXME : if dt(reference) < dt(simulation) => utiliser une moyenne sur tout l'intervalle !!!!!! --> TO DO !!!!
50
50
  self.myRef = np.zeros(np.shape(self.time))
51
51
  # Check the indices useful for simulation and put the other
@@ -117,7 +117,7 @@ class Outlet:
117
117
  # return q
118
118
 
119
119
  def compute_forcedDam(self, time, index=-1):
120
- if self.time==[] or index<0:
120
+ if self.time is None or index<0:
121
121
  if(time<min(self.myRefInterp.x) or time>max(self.myRefInterp.x)):
122
122
  q=0.0
123
123
  else: