wolfhece 2.2.37__py3-none-any.whl → 2.2.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wolfhece/Coordinates_operations.py +5 -0
- wolfhece/GraphNotebook.py +72 -1
- wolfhece/GraphProfile.py +1 -1
- wolfhece/MulticriteriAnalysis.py +1579 -0
- wolfhece/PandasGrid.py +62 -1
- wolfhece/PyCrosssections.py +194 -43
- wolfhece/PyDraw.py +891 -73
- wolfhece/PyGui.py +913 -72
- wolfhece/PyGuiHydrology.py +528 -74
- wolfhece/PyPalette.py +26 -4
- wolfhece/PyParams.py +33 -0
- wolfhece/PyPictures.py +2 -2
- wolfhece/PyVertex.py +32 -0
- wolfhece/PyVertexvectors.py +147 -75
- wolfhece/PyWMS.py +52 -36
- wolfhece/acceptability/acceptability.py +15 -8
- wolfhece/acceptability/acceptability_gui.py +507 -360
- wolfhece/acceptability/func.py +80 -183
- wolfhece/apps/version.py +1 -1
- wolfhece/compare_series.py +480 -0
- wolfhece/drawing_obj.py +12 -1
- wolfhece/hydrology/Catchment.py +228 -162
- wolfhece/hydrology/Internal_variables.py +43 -2
- wolfhece/hydrology/Models_characteristics.py +69 -67
- wolfhece/hydrology/Optimisation.py +893 -182
- wolfhece/hydrology/PyWatershed.py +267 -165
- wolfhece/hydrology/SubBasin.py +185 -140
- wolfhece/hydrology/climate_data.py +334 -0
- wolfhece/hydrology/constant.py +11 -0
- wolfhece/hydrology/cst_exchanges.py +76 -1
- wolfhece/hydrology/forcedexchanges.py +413 -49
- wolfhece/hydrology/hyetograms.py +2095 -0
- wolfhece/hydrology/read.py +65 -5
- wolfhece/hydrometry/kiwis.py +42 -26
- wolfhece/hydrometry/kiwis_gui.py +7 -2
- wolfhece/insyde_be/INBE_func.py +746 -0
- wolfhece/insyde_be/INBE_gui.py +1776 -0
- wolfhece/insyde_be/__init__.py +3 -0
- wolfhece/interpolating_raster.py +366 -0
- wolfhece/irm_alaro.py +1457 -0
- wolfhece/irm_qdf.py +889 -57
- wolfhece/lifewatch.py +6 -3
- wolfhece/picc.py +124 -8
- wolfhece/pyLandUseFlanders.py +146 -0
- wolfhece/pydownloader.py +2 -1
- wolfhece/pywalous.py +225 -31
- wolfhece/toolshydrology_dll.py +149 -0
- wolfhece/wolf_array.py +63 -25
- {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/METADATA +3 -1
- {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/RECORD +53 -42
- {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/WHEEL +0 -0
- {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/entry_points.txt +0 -0
- {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/top_level.txt +0 -0
wolfhece/hydrology/Catchment.py
CHANGED
@@ -193,10 +193,7 @@ class Catchment:
|
|
193
193
|
sys.exit()
|
194
194
|
|
195
195
|
# Get the number of subbasins
|
196
|
-
|
197
|
-
self.nbSubBasin = int(self.paramsInput.myparams['Semi distributed model']['How many?'][key_Param.VALUE]) + 1 # +1 because the outlet is also counted
|
198
|
-
except:
|
199
|
-
self.nbSubBasin = int(self.paramsInput.myparams_default['Semi distributed model']['How many?'][key_Param.VALUE]) + 1 # +1 because the outlet is also counted
|
196
|
+
self.nbSubBasin = int(self.paramsInput[('Semi distributed model', 'How many?')]) + 1 # +1 because the outlet is also counted
|
200
197
|
|
201
198
|
# Fill the dictionary containing the id of the sorted subbasin returned by the Fortran code
|
202
199
|
self.init_dictIdConversion(self.workingDir)
|
@@ -217,7 +214,7 @@ class Catchment:
|
|
217
214
|
# TO DO: Check how the rain is read for the first time
|
218
215
|
|
219
216
|
# Get the hydrology model used (1-linear reservoir, 2-VHM, 3-Unit Hydrograph)
|
220
|
-
self.myModel =
|
217
|
+
self.myModel = self.paramsInput[('Model Type', 'Type of hydrological model')]
|
221
218
|
|
222
219
|
# Save the stations SPW characteristics in a dictionnary
|
223
220
|
self.read_measuring_stations_SPW()
|
@@ -225,123 +222,128 @@ class Catchment:
|
|
225
222
|
|
226
223
|
|
227
224
|
|
225
|
+
try:
|
228
226
|
|
227
|
+
# Construction of the Catchment
|
228
|
+
# ------------------------------
|
229
|
+
|
230
|
+
# 1) 1st Iteration: Object creation
|
231
|
+
|
232
|
+
# Iterate through the Input params dictionnary
|
233
|
+
self.create_ObjectsInCatchment()
|
234
|
+
|
235
|
+
self.charact_watrshd.set_names_subbasins([(cur.iDSorted, cur.name) for cur in self.subBasinDict.values()])
|
236
|
+
|
237
|
+
# self.add_hyetoToDict()
|
238
|
+
|
239
|
+
|
240
|
+
# 2) 2nd Iteration: Link between objects
|
241
|
+
self.link_objects() # This procedure also creates the first layer of the topo tree by identifying the source ss-basins
|
242
|
+
|
243
|
+
"""
|
244
|
+
The topo tree is organised by level:
|
245
|
+
- The first level contains should only contain subbasins which don't have any input flows.
|
246
|
+
Therefore, they already contains all the information to build their hydrograph
|
247
|
+
- The second and upper levels can contain either RB or subbasins with input flows.
|
248
|
+
|
249
|
+
"""
|
250
|
+
|
251
|
+
# 3) 3rd Iteration: Complete the tree
|
252
|
+
self.complete_topoDict()
|
253
|
+
if(not(self.plotNothing)):
|
254
|
+
flowchart = graphviz.Digraph("Test")
|
255
|
+
flowchart.format = 'png'
|
256
|
+
self.draw_flowChart(flowchart)
|
257
|
+
# flowchart.view()
|
258
|
+
flowchart.save(directory=self.workingDir)
|
259
|
+
flowchart.render(os.path.join(self.workingDir,"Topology"), view=False)
|
260
|
+
# Hello! To DO !!! the following lines problem
|
261
|
+
# topologyImage = Image.open(BytesIO(flowchart.pipe(format="png")))
|
262
|
+
# # topologyImage = Image.open(os.path.join(self.working
|
263
|
+
# # Dir,"Test.png"))
|
264
|
+
|
265
|
+
# # self.topoMapViewer = WolfMapViewer(None, "Topology", treewidth=0)
|
266
|
+
# ratio = topologyImage.width/topologyImage.height
|
267
|
+
# # self.topoMapViewer = WolfMapViewer(None, "Topology",w=topologyImage.width,h=topologyImage.height,treewidth=0)
|
268
|
+
# self.topoMapViewer = WolfMapViewer(None, "Topology",w=4000*ratio,treewidth=0)
|
269
|
+
# self.topoMapViewer.add_object('Other', newobj=genericImagetexture('Other','Topology',self.topoMapViewer,xmin=0, xmax=topologyImage.width,
|
270
|
+
# ymin=0, ymax=topologyImage.height, width=topologyImage.width, height=topologyImage.height,imageObj=topologyImage),
|
271
|
+
# ToCheck=False, id='Topology')
|
272
|
+
# self.topoMapViewer.Autoscale()
|
273
|
+
# self.topoMapViewer.OnPaint(None)
|
274
|
+
# self.topoMapViewer.Show()
|
275
|
+
pass
|
276
|
+
|
277
|
+
# isNotCorrect = True
|
278
|
+
# while(isNotCorrect):
|
279
|
+
# print("Is this Flowchart ok?")
|
280
|
+
# print("Y-Yes, N-No")
|
281
|
+
# answer = input("Your answer:")
|
282
|
+
# if(answer=="N" or answer=="No"):
|
283
|
+
# print("The postprocess was stopped by the user!")
|
284
|
+
# sys.exit()
|
285
|
+
# elif(answer=="Y" or answer=="Yes"):
|
286
|
+
# isNotCorrect = False
|
287
|
+
# else:
|
288
|
+
# print("ERROR: Please enter the correct answer!")
|
289
|
+
|
290
|
+
# r = wx.MessageDialog(None, "Is this Flowchart ok?", "Topology verification", wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION).ShowModal()
|
291
|
+
|
292
|
+
# if r != wx.ID_YES:
|
293
|
+
# print("The postprocess was stopped by the user!")
|
294
|
+
# sys.exit()
|
295
|
+
|
296
|
+
# Definition of junctionOut by default the last junction/element in the topology tree
|
297
|
+
self.junctionOut = self.get_lastJunctionKey()
|
229
298
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
# 1) 1st Iteration: Object creation
|
234
|
-
|
235
|
-
# Iterate through the Input params dictionnary
|
236
|
-
self.create_ObjectsInCatchment()
|
237
|
-
|
238
|
-
self.charact_watrshd.set_names_subbasins([(cur.iDSorted, cur.name) for cur in self.subBasinDict.values()])
|
239
|
-
|
240
|
-
# self.add_hyetoToDict()
|
241
|
-
|
242
|
-
|
243
|
-
# 2) 2nd Iteration: Link between objects
|
244
|
-
self.link_objects() # This procedure also creates the first layer of the topo tree by identifying the source ss-basins
|
299
|
+
# Associate objects linked to timeDelay in all RB objects
|
300
|
+
tmp = self.find_all_timeDelayObj()
|
245
301
|
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
Therefore, they already contains all the information to build their hydrograph
|
250
|
-
- The second and upper levels can contain either RB or subbasins with input flows.
|
302
|
+
# ===============================
|
303
|
+
# Computation of the hydrographs
|
304
|
+
self.construct_hydro()
|
251
305
|
|
252
|
-
|
306
|
+
# Reading of the rain for each subbasin
|
307
|
+
try:
|
308
|
+
self.add_rainToAllObjects()
|
309
|
+
except:
|
310
|
+
print("ERROR! The rain couldn't be created. This might be induced by the lack of the dbf file!")
|
311
|
+
pass
|
253
312
|
|
254
|
-
|
255
|
-
|
256
|
-
if(not(self.plotNothing)):
|
257
|
-
flowchart = graphviz.Digraph("Test")
|
258
|
-
flowchart.format = 'png'
|
259
|
-
self.draw_flowChart(flowchart)
|
260
|
-
# flowchart.view()
|
261
|
-
flowchart.save(directory=self.workingDir)
|
262
|
-
flowchart.render(os.path.join(self.workingDir,"Topology"), view=False)
|
263
|
-
# Hello! To DO !!! the following lines problem
|
264
|
-
# topologyImage = Image.open(BytesIO(flowchart.pipe(format="png")))
|
265
|
-
# # topologyImage = Image.open(os.path.join(self.working
|
266
|
-
# # Dir,"Test.png"))
|
267
|
-
|
268
|
-
# # self.topoMapViewer = WolfMapViewer(None, "Topology", treewidth=0)
|
269
|
-
# ratio = topologyImage.width/topologyImage.height
|
270
|
-
# # self.topoMapViewer = WolfMapViewer(None, "Topology",w=topologyImage.width,h=topologyImage.height,treewidth=0)
|
271
|
-
# self.topoMapViewer = WolfMapViewer(None, "Topology",w=4000*ratio,treewidth=0)
|
272
|
-
# self.topoMapViewer.add_object('Other', newobj=genericImagetexture('Other','Topology',self.topoMapViewer,xmin=0, xmax=topologyImage.width,
|
273
|
-
# ymin=0, ymax=topologyImage.height, width=topologyImage.width, height=topologyImage.height,imageObj=topologyImage),
|
274
|
-
# ToCheck=False, id='Topology')
|
275
|
-
# self.topoMapViewer.Autoscale()
|
276
|
-
# self.topoMapViewer.OnPaint(None)
|
277
|
-
# self.topoMapViewer.Show()
|
278
|
-
pass
|
279
|
-
|
280
|
-
# isNotCorrect = True
|
281
|
-
# while(isNotCorrect):
|
282
|
-
# print("Is this Flowchart ok?")
|
283
|
-
# print("Y-Yes, N-No")
|
284
|
-
# answer = input("Your answer:")
|
285
|
-
# if(answer=="N" or answer=="No"):
|
286
|
-
# print("The postprocess was stopped by the user!")
|
287
|
-
# sys.exit()
|
288
|
-
# elif(answer=="Y" or answer=="Yes"):
|
289
|
-
# isNotCorrect = False
|
290
|
-
# else:
|
291
|
-
# print("ERROR: Please enter the correct answer!")
|
292
|
-
|
293
|
-
# r = wx.MessageDialog(None, "Is this Flowchart ok?", "Topology verification", wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION).ShowModal()
|
294
|
-
|
295
|
-
# if r != wx.ID_YES:
|
296
|
-
# print("The postprocess was stopped by the user!")
|
297
|
-
# sys.exit()
|
298
|
-
|
299
|
-
# Definition of junctionOut by default the last junction/element in the topology tree
|
300
|
-
self.junctionOut = self.get_lastJunctionKey()
|
301
|
-
|
302
|
-
# Associate objects linked to timeDelay in all RB objects
|
303
|
-
tmp = self.find_all_timeDelayObj()
|
304
|
-
|
305
|
-
# ===============================
|
306
|
-
# Computation of the hydrographs
|
307
|
-
self.construct_hydro()
|
308
|
-
|
309
|
-
# Reading of the rain for each subbasin
|
310
|
-
try:
|
311
|
-
self.add_rainToAllObjects()
|
312
|
-
except:
|
313
|
-
print("ERROR! The rain couldn't be created. This might be induced by the lack of the dbf file!")
|
314
|
-
pass
|
313
|
+
# Contruct the effective subbasins
|
314
|
+
self.get_eff_subBasin()
|
315
315
|
|
316
|
-
|
317
|
-
|
316
|
+
# Construct the surface drained by anthropogenic modules
|
317
|
+
self.construct_surfaceDrainedHydro_RB()
|
318
318
|
|
319
|
-
|
320
|
-
|
319
|
+
# Read the landuses of all subbasins
|
320
|
+
self.read_all_landuses()
|
321
321
|
|
322
|
-
|
323
|
-
|
322
|
+
# ==============================
|
323
|
+
# Save in excel file all the hydrographs
|
324
324
|
|
325
|
-
|
326
|
-
|
325
|
+
# Hello! To uncomment!!!!
|
326
|
+
# self.save_ExcelFile()
|
327
|
+
# self.save_ExcelFile_noLagTime()
|
328
|
+
# self.save_ExcelFile_V2()
|
329
|
+
# self.save_characteristics()
|
327
330
|
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
331
|
+
# Plot the of the subbasin or RB with level above 1 in the topo tree
|
332
|
+
self.plotNothing = True
|
333
|
+
if(not(self.plotNothing)):
|
334
|
+
self.plot_intersection()
|
335
|
+
# Plot all the subbasin hydrographs and hyetograph
|
336
|
+
if(self.plotAllSub):
|
337
|
+
self.plot_allSub()
|
333
338
|
|
334
|
-
|
335
|
-
self.plotNothing = True
|
336
|
-
if(not(self.plotNothing)):
|
337
|
-
self.plot_intersection()
|
338
|
-
# Plot all the subbasin hydrographs and hyetograph
|
339
|
-
if(self.plotAllSub):
|
340
|
-
self.plot_allSub()
|
339
|
+
# self.charact_watrshd.impose_sorted_index_subbasins([cur.iDSorted for cur in self.subBasinDict.values()])
|
341
340
|
|
342
|
-
|
341
|
+
self._fill_cloud_retentionbasin()
|
343
342
|
|
344
|
-
|
343
|
+
except:
|
344
|
+
logging.error(_("An error occured during the creation of the Catchment object."))
|
345
|
+
logging.info(_("If you are in a preprocessing step, it could be a normal issue."))
|
346
|
+
return
|
345
347
|
|
346
348
|
def get_subBasin(self, id_sorted_or_name:int | str) -> SubBasin:
|
347
349
|
"""
|
@@ -461,6 +463,10 @@ class Catchment:
|
|
461
463
|
"""
|
462
464
|
fileNameInteriorPoints = os.path.join(workingDir,'simul_sorted_interior_points.txt')
|
463
465
|
|
466
|
+
if not os.path.exists(fileNameInteriorPoints):
|
467
|
+
logging.error("The file simul_sorted_interior_points.txt is not present in the working directory!")
|
468
|
+
return
|
469
|
+
|
464
470
|
# !!! Add the case the file is not present.
|
465
471
|
with open(fileNameInteriorPoints) as fileID:
|
466
472
|
data_reader = csv.reader(fileID, delimiter='\t')
|
@@ -1415,21 +1421,21 @@ class Catchment:
|
|
1415
1421
|
# Extract the column names according to their sorted subbasin indices
|
1416
1422
|
col_time = ["Time [s]"]
|
1417
1423
|
col_subs = [cur_sub.name for cur_sub in sorted(self.subBasinDict.values(), key=lambda sub: sub.iDSorted)]
|
1418
|
-
col_anth = [" : ".join([cur_anth.name, name])
|
1419
|
-
for cur_anth in self.retentionBasinDict.values()
|
1424
|
+
col_anth = [" : ".join([cur_anth.name, name])
|
1425
|
+
for cur_anth in self.retentionBasinDict.values()
|
1420
1426
|
for name in cur_anth.get_outFlow_names()]
|
1421
|
-
|
1422
|
-
|
1427
|
+
|
1428
|
+
|
1423
1429
|
all_data = np.concatenate((time, np.array(data_subs).T, np.array(data_anth).T), axis=1)
|
1424
1430
|
all_columns = "\t".join(col_time+col_subs+col_anth)
|
1425
|
-
|
1431
|
+
|
1426
1432
|
# Save the data in a text file
|
1427
1433
|
np.savetxt(cur_file, all_data, delimiter='\t', newline="\n", header=all_columns, fmt=format)
|
1428
1434
|
|
1429
1435
|
|
1430
1436
|
def save_own_hydro_for_2D(self, fileName:str="HydrosSub_2_simul2D.txt", directory:str="", format:str='%1.5e'):
|
1431
1437
|
"""
|
1432
|
-
Saves subbasins' hydrographs from their own drained surface only (not taking into account the
|
1438
|
+
Saves subbasins' hydrographs from their own drained surface only (not taking into account the
|
1433
1439
|
surface drained by the inlets or upstream elements) to a text file that can be read and used in a 2D model.
|
1434
1440
|
|
1435
1441
|
Args:
|
@@ -1451,11 +1457,11 @@ class Catchment:
|
|
1451
1457
|
# Extract the column names according to their sorted subbasin indices
|
1452
1458
|
col_time = ["Time [s]"]
|
1453
1459
|
col_subs = [cur_sub.name for cur_sub in sorted(self.subBasinDict.values(), key=lambda sub: sub.iDSorted)]
|
1454
|
-
|
1455
|
-
|
1460
|
+
|
1461
|
+
|
1456
1462
|
all_data = np.concatenate((time, np.array(data_subs).T), axis=1)
|
1457
1463
|
all_columns = "\t".join(col_time+col_subs)
|
1458
|
-
|
1464
|
+
|
1459
1465
|
# Save the data in a text file
|
1460
1466
|
np.savetxt(cur_file, all_data, delimiter='\t', newline="\n", header=all_columns, fmt=format)
|
1461
1467
|
|
@@ -1525,34 +1531,40 @@ class Catchment:
|
|
1525
1531
|
|
1526
1532
|
# Read the DBF file to save all the "Ordered To Nb" in the dictionnary
|
1527
1533
|
fileName = os.path.join(self.workingDir, "Whole_basin/Rain_basin_geom.vec.dbf")
|
1528
|
-
|
1529
|
-
|
1530
|
-
|
1531
|
-
|
1532
|
-
|
1533
|
-
|
1534
|
-
|
1535
|
-
|
1536
|
-
|
1537
|
-
|
1538
|
-
|
1539
|
-
|
1540
|
-
|
1541
|
-
|
1542
|
-
|
1543
|
-
|
1544
|
-
|
1545
|
-
|
1546
|
-
|
1547
|
-
|
1548
|
-
|
1549
|
-
|
1550
|
-
|
1551
|
-
|
1552
|
-
|
1553
|
-
|
1554
|
-
|
1555
|
-
|
1534
|
+
|
1535
|
+
if self.type_of_rain == cst.source_municipality_unit_hyeto:
|
1536
|
+
try:
|
1537
|
+
if(os.path.exists(fileName)):
|
1538
|
+
dbfDict = self.read_dbfFile(fileName)
|
1539
|
+
self.hyetoDict['Ordered To Nb'] = {}
|
1540
|
+
self.hyetoDict['Hyetos'] = {}
|
1541
|
+
|
1542
|
+
for i in range(len(dbfDict.records)):
|
1543
|
+
iDsorted = i + 1
|
1544
|
+
iDHyeto = dbfDict.records[i]['data']
|
1545
|
+
self.hyetoDict['Ordered To Nb'][iDsorted] = iDHyeto
|
1546
|
+
|
1547
|
+
# Read all the .hyeto file to save the time and rain arrays
|
1548
|
+
beginFileName = os.path.join(self.workingDir,"Whole_basin/")
|
1549
|
+
endFileName = "rain.hyeto"
|
1550
|
+
for element in self.hyetoDict['Ordered To Nb']:
|
1551
|
+
nbToRead = self.hyetoDict['Ordered To Nb'][element]
|
1552
|
+
fileName = os.path.join(beginFileName, nbToRead+ endFileName)
|
1553
|
+
isOk, fileName = check_path(fileName, applyCWD=True)
|
1554
|
+
if isOk<0:
|
1555
|
+
# print("WARNING: could not find any dbf file! ")
|
1556
|
+
time_mod.sleep(.5)
|
1557
|
+
return
|
1558
|
+
[time, rain] = self.get_hyeto(fileName)
|
1559
|
+
self.hyetoDict['Hyetos'][element] = {}
|
1560
|
+
self.hyetoDict['Hyetos'][element]['time'] = time
|
1561
|
+
self.hyetoDict['Hyetos'][element]['rain'] = rain
|
1562
|
+
except:
|
1563
|
+
print("WARNING: problem in some dbf file! ")
|
1564
|
+
time_mod.sleep(.5)
|
1565
|
+
else:
|
1566
|
+
print("WARNING: could not find any dbf file! ")
|
1567
|
+
time_mod.sleep(.5)
|
1556
1568
|
|
1557
1569
|
|
1558
1570
|
def add_rainToAllObjects(self):
|
@@ -1568,6 +1580,11 @@ class Catchment:
|
|
1568
1580
|
indexName = txt + str(i)
|
1569
1581
|
for element in self.topologyDict[indexName]:
|
1570
1582
|
timeTest = self.topologyDict[indexName][element].add_rain(self.workingDir, tzDelta=datetime.timedelta(hours=self.tz))
|
1583
|
+
|
1584
|
+
if timeTest is None:
|
1585
|
+
print(f"WARNING: No rain data found for {element} in {indexName}.")
|
1586
|
+
continue
|
1587
|
+
|
1571
1588
|
if not(np.array_equal(timeTest,self.time)):
|
1572
1589
|
print("ERROR: the time arrays are different!")
|
1573
1590
|
if type(self.topologyDict[indexName][element]) == SubBasin:
|
@@ -1711,7 +1728,22 @@ class Catchment:
|
|
1711
1728
|
downName += ' [sub'+sortNodeID+']'
|
1712
1729
|
flowchart.edge(nodeName, downName)
|
1713
1730
|
|
1731
|
+
def save_flow_chart(self, filename="Topology", directory="") -> str:
|
1732
|
+
""" This procedure saves the flowchart representing the topo tree
|
1733
|
+
|
1734
|
+
:param filename: Name of the file to save the flowchart as.
|
1735
|
+
:param directory: Directory where the flowchart will be saved. If not provided, it will be saved in the working directory.
|
1736
|
+
"""
|
1737
|
+
|
1738
|
+
flowchart = graphviz.Digraph("Test")
|
1739
|
+
flowchart.format = 'png'
|
1740
|
+
self.draw_flowChart(flowchart)
|
1741
|
+
# flowchart.view()
|
1742
|
+
flowchart.save(directory=self.workingDir)
|
1743
|
+
flowchart.render(os.path.join(self.workingDir,"Topology"), view=False)
|
1714
1744
|
|
1745
|
+
logging.info("Flowchart saved as: " + os.path.join(self.workingDir,"Topology.png"))
|
1746
|
+
return os.path.join(self.workingDir,"Topology.png")
|
1715
1747
|
|
1716
1748
|
def make_stat_distributionOfslope(self):
|
1717
1749
|
""" This procedure plot the stat distribution of slopes.
|
@@ -1927,7 +1959,7 @@ class Catchment:
|
|
1927
1959
|
# paramsInput.ApplytoMemory(None)
|
1928
1960
|
paramsInput.SavetoFile(None)
|
1929
1961
|
|
1930
|
-
|
1962
|
+
|
1931
1963
|
def _correct_Umax_from_old_model(self, adapt_with_rain:bool=True, k_opt:float=0.0, U_max_opt=0.0):
|
1932
1964
|
fileName = "simul_soil.param"
|
1933
1965
|
which="Umax"
|
@@ -1940,7 +1972,7 @@ class Catchment:
|
|
1940
1972
|
|
1941
1973
|
paramsInput = Wolf_Param(to_read=False,toShow=False)
|
1942
1974
|
paramsInput.ReadFile(fileToModif)
|
1943
|
-
|
1975
|
+
|
1944
1976
|
if U_max_opt>0.0:
|
1945
1977
|
maxRain = U_max_opt
|
1946
1978
|
elif adapt_with_rain:
|
@@ -1963,7 +1995,7 @@ class Catchment:
|
|
1963
1995
|
continue
|
1964
1996
|
else:
|
1965
1997
|
k = k_opt
|
1966
|
-
|
1998
|
+
|
1967
1999
|
U_max = maxRain/k
|
1968
2000
|
|
1969
2001
|
paramsInput.change_param("Distributed production model parameters", which, U_max)
|
@@ -2065,16 +2097,16 @@ class Catchment:
|
|
2065
2097
|
|
2066
2098
|
def get_eff_subBasin(self):
|
2067
2099
|
|
2068
|
-
nbIP =
|
2100
|
+
nbIP = self.paramsInput[("Semi distributed model", "How many?")]
|
2069
2101
|
try:
|
2070
|
-
allSub = int(self.paramsInput
|
2102
|
+
allSub = int(self.paramsInput[("Semi distributed model", "Compute all?")])
|
2071
2103
|
except:
|
2072
2104
|
allSub = 1
|
2073
2105
|
|
2074
2106
|
if(allSub==0 or allSub==-1):
|
2075
2107
|
for i in range(1,nbIP+1):
|
2076
|
-
isActive = self.paramsInput
|
2077
|
-
if(isActive==
|
2108
|
+
isActive = self.paramsInput[(f"Interior point {i}", "Active")]
|
2109
|
+
if(isActive==1):
|
2078
2110
|
self.myEffSortSubBasins.append(self.dictIdConversion[i])
|
2079
2111
|
self.myEffSubBasins.append(i)
|
2080
2112
|
else:
|
@@ -2597,14 +2629,20 @@ class Catchment:
|
|
2597
2629
|
|
2598
2630
|
def set_eff_outlet_coord(self):
|
2599
2631
|
|
2600
|
-
|
2601
|
-
|
2602
|
-
|
2603
|
-
|
2604
|
-
|
2605
|
-
|
2606
|
-
|
2607
|
-
|
2632
|
+
try:
|
2633
|
+
all_nodes = [self.charact_watrshd.find_rivers(whichsub=ii+1) for ii in range(self.nbSubBasin)]
|
2634
|
+
if len(all_nodes) == 0:
|
2635
|
+
logging.warning("No effective outlet coordinates found in the watershed. Please check the watershed data.")
|
2636
|
+
return -1
|
2637
|
+
all_nodes = [sublist[0][0] for sublist in all_nodes if len(sublist[0]) > 0]
|
2638
|
+
for el in all_nodes:
|
2639
|
+
el:Node_Watershed
|
2640
|
+
mysubxy=wolfvertex(el.x,el.y)
|
2641
|
+
self.subBasinCloud.add_vertex(mysubxy)
|
2642
|
+
return 0
|
2643
|
+
except Exception as e:
|
2644
|
+
logging.error(f"Error in setting effective outlet coordinates: {e}")
|
2645
|
+
return -1
|
2608
2646
|
|
2609
2647
|
def update_charact_watrshd(self):
|
2610
2648
|
if self.charact_watrshd.to_update_times:
|
@@ -2934,7 +2972,35 @@ class Catchment:
|
|
2934
2972
|
paramsInput.change_param(group, key, values[index])
|
2935
2973
|
paramsInput.SavetoFile(None)
|
2936
2974
|
paramsInput.Reload(None)
|
2937
|
-
|
2975
|
+
|
2976
|
+
|
2977
|
+
def get_outflow(self, station_out:str="") -> np.ndarray:
|
2978
|
+
"""
|
2979
|
+
Set the initial conditions for the outflow of the outlet defined by junctionOut.
|
2980
|
+
|
2981
|
+
Args:
|
2982
|
+
keys (list[str]): List of sub-basin names.
|
2983
|
+
values (np.ndarray): Corresponding outflow values for each sub-basin.
|
2984
|
+
"""
|
2985
|
+
if station_out == "":
|
2986
|
+
key_module = self.junctionOut
|
2987
|
+
else:
|
2988
|
+
key_module = self.get_key_catchmentDict(station_out)
|
2989
|
+
if station_out is None:
|
2990
|
+
logging.error("ERROR : The station name given in 'get_outflow' is not correct ! " + station_out)
|
2991
|
+
return None
|
2992
|
+
cur_module = self.catchmentDict[key_module]
|
2993
|
+
return cur_module.get_outFlow()
|
2994
|
+
|
2995
|
+
|
2996
|
+
@property
|
2997
|
+
def type_of_rain(self) -> int:
|
2998
|
+
rain = self.paramsInput.get_param("Atmospheric data", "Type of rainfall")
|
2999
|
+
if rain == cst.source_custom:
|
3000
|
+
rain = self.paramsInput.get_param("Custom inputs", "Rain data")
|
3001
|
+
|
3002
|
+
return rain
|
3003
|
+
|
2938
3004
|
|
2939
3005
|
def make_nd_array(self, c_pointer, shape, dtype=np.float64, order='C', own_data=True,readonly=False):
|
2940
3006
|
arr_size = np.prod(shape[:]) * np.dtype(dtype).itemsize
|
@@ -26,6 +26,7 @@ class Internal_Variable:
|
|
26
26
|
file:str
|
27
27
|
type_of_var:int
|
28
28
|
linked_param:int
|
29
|
+
id:int
|
29
30
|
|
30
31
|
|
31
32
|
def get_time_serie(self, directory, prefix_file:str="",
|
@@ -135,6 +136,7 @@ class Param_to_Activate:
|
|
135
136
|
"""
|
136
137
|
Activate the parameters for the internal variables.
|
137
138
|
"""
|
139
|
+
# If the key is None, it means that it either always written or it is just not an internal variable but directly an exit
|
138
140
|
if self.key is None or self.group is None:
|
139
141
|
return
|
140
142
|
|
@@ -155,13 +157,16 @@ class Param_to_Activate:
|
|
155
157
|
param_file.change_param(self.group, self.key, 1)
|
156
158
|
param_file.SavetoFile(None)
|
157
159
|
param_file.Reload(None)
|
158
|
-
|
159
|
-
self.deactivate(directory, prefix_file)
|
160
|
+
|
160
161
|
|
161
162
|
def deactivate(self, directory:str, prefix_file:str=""):
|
162
163
|
"""
|
163
164
|
Deactivate the parameters for the internal variables.
|
164
165
|
"""
|
166
|
+
# If the key is None, it means that it either always written or it is just not an internal variable but directly an exit
|
167
|
+
if self.key is None or self.group is None:
|
168
|
+
return
|
169
|
+
|
165
170
|
new_prefix = self._build_prefix(prefix_file)
|
166
171
|
filename = ".".join([new_prefix,"param"])
|
167
172
|
param_filename = join(directory, filename)
|
@@ -281,3 +286,39 @@ class Group_to_Activate:
|
|
281
286
|
all_linked_params.update(param.get_linked_params())
|
282
287
|
|
283
288
|
return all_linked_params
|
289
|
+
|
290
|
+
def get_all_variables_names_from_ids(self, ids:list[int],
|
291
|
+
type_of_var:int=ALL_VAR) -> tuple[list[str], list[int]]:
|
292
|
+
"""
|
293
|
+
Get the names of the internal variables from their IDs.
|
294
|
+
|
295
|
+
:param ids: List of IDs of the internal variables.
|
296
|
+
:return: List of names of the internal variables.
|
297
|
+
"""
|
298
|
+
all_names = []
|
299
|
+
kept_indices = []
|
300
|
+
for param in self.all_params:
|
301
|
+
for var in param.all_variables:
|
302
|
+
if var.id in ids and (var.type_of_var == type_of_var or type_of_var == ALL_VAR):
|
303
|
+
all_names.append(var.name)
|
304
|
+
kept_indices.append(ids.index(var.id))
|
305
|
+
|
306
|
+
return all_names, kept_indices
|
307
|
+
|
308
|
+
def get_dict_from_matrix_and_ids(self, matrix:np.ndarray, ids:list[int],
|
309
|
+
type_of_var:int=ALL_VAR) -> dict[str, np.ndarray]:
|
310
|
+
"""
|
311
|
+
Get a dictionary from a matrix and a list of IDs.
|
312
|
+
|
313
|
+
:param matrix: Matrix containing the data.
|
314
|
+
:param ids: List of IDs corresponding to the data.
|
315
|
+
:return: Dictionary with IDs as keys and data as values.
|
316
|
+
"""
|
317
|
+
# Check the dimensions of the matrix and the length of the IDs
|
318
|
+
if matrix.shape[1] != len(ids):
|
319
|
+
logging.error("Matrix length does not match IDs length!")
|
320
|
+
return {}
|
321
|
+
# Extract the the names of the internal variables from the IDs
|
322
|
+
names, kept_indices = self.get_all_variables_names_from_ids(ids, type_of_var=type_of_var)
|
323
|
+
|
324
|
+
return {key: matrix[:,i] for key, i in zip(names, kept_indices)}
|