NREL-erad 0.0.0a0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. erad/__init__.py +1 -0
  2. erad/constants.py +20 -20
  3. erad/cypher_queries/load_data_v1.cypher +211 -211
  4. erad/data/World_Earthquakes_1960_2016.csv +23410 -23410
  5. erad/db/assets/critical_infras.py +170 -170
  6. erad/db/assets/distribution_lines.py +101 -101
  7. erad/db/credential_model.py +20 -20
  8. erad/db/disaster_input_model.py +23 -23
  9. erad/db/inject_earthquake.py +52 -52
  10. erad/db/inject_flooding.py +53 -53
  11. erad/db/neo4j_.py +162 -162
  12. erad/db/utils.py +13 -13
  13. erad/exceptions.py +68 -68
  14. erad/metrics/check_microgrid.py +208 -208
  15. erad/metrics/metric.py +178 -178
  16. erad/programs/backup.py +61 -61
  17. erad/programs/microgrid.py +44 -44
  18. erad/scenarios/abstract_scenario.py +102 -102
  19. erad/scenarios/common.py +92 -92
  20. erad/scenarios/earthquake_scenario.py +161 -161
  21. erad/scenarios/fire_scenario.py +160 -160
  22. erad/scenarios/flood_scenario.py +493 -493
  23. erad/scenarios/flows.csv +671 -0
  24. erad/scenarios/utilities.py +75 -75
  25. erad/scenarios/wind_scenario.py +89 -89
  26. erad/utils/ditto_utils.py +252 -252
  27. erad/utils/hifld_utils.py +147 -147
  28. erad/utils/opendss_utils.py +357 -357
  29. erad/utils/overpass.py +76 -76
  30. erad/utils/util.py +178 -178
  31. erad/visualization/plot_graph.py +218 -218
  32. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/METADATA +65 -61
  33. nrel_erad-1.0.0.dist-info/RECORD +42 -0
  34. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/WHEEL +1 -2
  35. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info/licenses}/LICENSE.txt +28 -28
  36. NREL_erad-0.0.0a0.dist-info/RECORD +0 -42
  37. NREL_erad-0.0.0a0.dist-info/top_level.txt +0 -1
@@ -1,494 +1,494 @@
1
-
2
- from click import format_filename
3
- import matplotlib.pyplot as plt
4
- from erad.scenarios.utilities import ProbabilityFunctionBuilder, GeoUtilities
5
- from erad.constants import ELEVATION_RASTER_FILE, DATA_FOLDER, FLOOD_HISTORIC_SHP_PATH
6
- from shapely.geometry import MultiPolygon, Point, LineString, Polygon
7
- from erad.scenarios.abstract_scenario import BaseScenario
8
- from datetime import datetime, timedelta
9
- from scipy.optimize import curve_fit
10
- from scipy.spatial import Delaunay
11
- import geopandas as gpd
12
- import pandas as pd
13
- import numpy as np
14
- import stateplane
15
- import xmltodict
16
- import itertools
17
- import requests
18
- import rasterio
19
- import tarfile
20
- import time
21
- import sys
22
- import os
23
-
24
- from erad.scenarios.common import AssetTypes, asset_list
25
- from erad.scenarios.utilities import ProbabilityFunctionBuilder
26
-
27
- #plt.ion()
28
- class FlooadScenario(BaseScenario, GeoUtilities):
29
- """Base class for FlooadScenario. Extends BaseScenario and GeoUtilities
30
-
31
- Attributes:
32
- origin (Point): Earthquake origin point
33
- probability_model (dict): Dictionary mapping asset types to probability funcitons
34
- timestamp (datetime): Scenario occurance time
35
- kwargs (dict): Additional parameters relevant for a particular scenario type
36
- """
37
-
38
- fragility_curves = {
39
- # Electrical Grid Risk Assessment Against Flooding in Barcelona and Bristol Cities
40
- AssetTypes.substation.name : ProbabilityFunctionBuilder("norm", [1.5, 0.2]),
41
- AssetTypes.solar_panels.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
42
- # AssetTypes.buried_lines.name : ProbabilityFunctionBuilder("norm", [80, 10]),
43
- # estimated
44
- AssetTypes.wind_turbines.name : ProbabilityFunctionBuilder("lognorm", [0.8, 3, 2]),
45
- AssetTypes.battery_storage.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
46
- # estimated
47
- AssetTypes.transmission_poles.name : ProbabilityFunctionBuilder("lognorm", [0.8, 5, 3]),
48
- # Tsunami Fragility Functions for Road and Utility Pole Assets Using Field Survey and Remotely Sensed Data from the 2018 Sulawesi Tsunami, Palu, Indonesia
49
- AssetTypes.distribution_poles.name : ProbabilityFunctionBuilder("lognorm", [1, 0.2, 1]),
50
- AssetTypes.transmission_overhead_lines.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
51
- AssetTypes.distribution_overhead_lines.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
52
- }
53
-
54
-
55
- def __init__(self, poly : MultiPolygon , probability_model : dict, timestamp : datetime, **kwargs) -> None:
56
- super(FlooadScenario, self).__init__(poly, probability_model, timestamp, **kwargs)
57
- self.kwargs = kwargs
58
- self.samples = 100
59
- self.use_api = True
60
-
61
- # self.map_elevation()
62
- if 'type' in kwargs and kwargs['type'] == 'live':
63
- self.flows = pd.DataFrame()
64
- self.levels = pd.DataFrame()
65
- self.real_time()
66
- else:
67
- from shapely import wkt
68
- self.flows = pd.read_csv(kwargs['file_flow'],index_col=0, parse_dates=True)
69
- self.levels = pd.read_csv(kwargs['file_levels'], index_col=0, parse_dates=True)
70
- df = pd.read_csv(kwargs['file_gaugues'])
71
- df['geometry'] = df['geometry'].apply(wkt.loads)
72
- crs = {'init': 'epsg:4326'}
73
- self.gauges = gpd.GeoDataFrame(df).set_geometry('geometry')
74
- #self.gauges = gpd.read_file(kwargs['file_gaugues'])
75
- pass
76
- # self.plot = DynamicUpdate(self.X, self.Y, self.Z, self.levels)
77
- return
78
-
79
- @classmethod
80
- def from_historical_flood_by_code(cls, flood_code : str, probability_function : dict = None):
81
- data_file = os.path.join(DATA_FOLDER, FLOOD_HISTORIC_SHP_PATH)
82
- assert os.path.exists(data_file), f"The data file {data_file} not found"
83
- flood_data = gpd.read_file(data_file)
84
- flood_data = flood_data[flood_data['DFIRM_ID'] == flood_code]
85
- raise NotImplementedError("Model has not been implemented")
86
-
87
- def real_time(self):
88
- self.gauges = self.get_flow_measurements(0)
89
- self.gauges.to_csv("gauges.csv")
90
- flows, levels = self.gauges_in_polygon()
91
- for c in flows.columns:
92
- flows[c] = flows[c].interpolate(method='polynomial', order=2)
93
- levels[c] = levels[c].interpolate(method='polynomial', order=2)
94
- flows = flows.ffill(axis = 0)
95
- levels = levels.ffill(axis = 0)
96
- flows = flows.bfill(axis = 0)
97
- levels = levels.bfill(axis = 0)
98
- self.flows = flows.resample("15T").interpolate()
99
- self.levels = levels.resample("15T").interpolate()
100
- self.flows.to_csv("flows.csv")
101
- self.levels.to_csv("levels.csv")
102
-
103
- @property
104
- def valid_timepoints(self):
105
- return list(self.flows.index)
106
-
107
- def gauges_in_polygon(self):
108
- all_flows = pd.DataFrame()
109
- all_levels = pd.DataFrame()
110
- for idx, gauge in self.gauges.iterrows():
111
- flow = pd.DataFrame()
112
- level = pd.DataFrame()
113
- gauge_id = gauge['GaugeLID']
114
- url = f'https://water.weather.gov/ahps2/hydrograph_to_xml.php?gage={gauge_id}&output=xml'
115
- response = requests.get(url)
116
- #print(response.text)
117
- data = xmltodict.parse(response.text)
118
- times_series_data = data['site']['observed']['datum']
119
- time_points = []
120
- flow_points = []
121
- level_points =[]
122
- for data_point in times_series_data:
123
- time_points.append(data_point['valid']['#text'])
124
- flow_points.append(float(data_point['secondary']['#text']))
125
- level_points.append(float(data_point['primary']['#text']))
126
- index =pd.to_datetime(time_points)
127
- flow.index =index
128
- flow[gauge_id] = flow_points
129
- level.index =index
130
- level[gauge_id] = level_points
131
- print(flow)
132
- print(level)
133
- all_flows = flow.merge(all_flows, right_index=True, left_index=True, how="outer")
134
- all_levels = level.merge(all_levels, right_index=True, left_index=True, how="outer")
135
- return all_flows, all_levels
136
-
137
- def get_flow_measurements(self, forecast_day: int = 0):
138
- if forecast_day == 0:
139
- forecast_tag = "obs"
140
- else:
141
- if forecast_day == 1:
142
- forecast_tag = "fcst_f024"
143
- else:
144
- forecast_tag = f"fcst_f{forecast_day*24}"
145
-
146
- url = f"https://water.weather.gov/ahps/download.php?data=tgz_{forecast_tag}"
147
- r = requests.get(url, allow_redirects=True)
148
- date = str(datetime.now()).replace(":", "_").replace(".", "_").replace(" ", "_")
149
- shape_file_path = os.path.join(DATA_FOLDER, f'flood_shapefile_{date}.tgz')
150
- open(shape_file_path, 'wb').write(r.content)
151
- file_path = self.extract(shape_file_path, forecast_tag)
152
- data = gpd.read_file(file_path)
153
- if not data.empty:
154
- water_info = []
155
- for idx, row in data.iterrows():
156
- point = Point(row['Latitude'], row['Longitude'])
157
- if self.multipolygon.contains(point):
158
- water_info.append(row)
159
- return gpd.GeoDataFrame(water_info)
160
- else:
161
- raise Exception("No water measurement found in selected area.")
162
-
163
- def extract(self, tar_url, ext):
164
- extract_path = os.path.join(DATA_FOLDER, "flood_shape_file")
165
- tar = tarfile.open(tar_url, 'r')
166
- for item in tar:
167
- tar.extract(item, extract_path)
168
- if item.name.find(".tgz") != -1 or item.name.find(".tar") != -1:
169
- self.extract(item.name, "./" + item.name[:item.name.rfind('/')])
170
- return os.path.join(extract_path, f'national_shapefile_{ext}.shp')
171
-
172
- @classmethod
173
- def from_live_data(
174
- cls,
175
- poly : MultiPolygon,
176
- probability_function : dict,
177
- startrime: datetime,
178
- duration: timedelta,
179
- timestep: timedelta
180
- ):
181
- kwargs = {
182
- 'startrime' : startrime,
183
- 'duration' : duration,
184
- 'timestep' : timestep,
185
- 'type' : 'live',
186
- }
187
-
188
- return cls(poly, probability_function, startrime, **kwargs)
189
-
190
- def get_elevation_by_latlong(self, lat, lon):
191
- coords = ((lat,lon), (lat,lon))
192
- vals = self.raster.sample(coords)
193
- for val in vals:
194
- elevation=val[0]
195
- return 255-elevation
196
-
197
- def map_elevation(self):
198
- y_min, x_min, y_max, x_max = self.multipolygon.bounds
199
- ys = np.linspace(y_min, y_max, self.samples, endpoint=True)
200
- xs = np.linspace(x_min, x_max, self.samples, endpoint=True)
201
- self.X, self.Y = np.meshgrid(xs, ys)
202
-
203
- if self.use_api:
204
- self.Z = self.create_elevation_profile_using_api()
205
- else:
206
- self.Z = self.create_elevation_profile_using_rasterfile()
207
-
208
- r, c = self.X.shape
209
- for i in range(r):
210
- for j in range(c):
211
- x_sp, y_sp = stateplane.from_lonlat(self.X[i, j], self.Y[i, j])
212
- self.X[i, j] =x_sp
213
- self.Y[i, j] =y_sp
214
-
215
- self.min_elevation = np.min(self.Z)
216
- self.max_elevation = np.max(self.Z)
217
-
218
- pts = np.array([self.X.flatten(), self.Y.flatten(), self.Z.flatten()]).T
219
- self.volume = self.calc_polyhedron_volume(pts)
220
-
221
- return self.X, self.Y, self.Z
222
-
223
- def create_elevation_profile_using_rasterfile(self):
224
- raster_file = os.path.join(DATA_FOLDER, ELEVATION_RASTER_FILE)
225
- self.raster = rasterio.open(raster_file)
226
- r, c = self.X.shape
227
- self.Z = np.zeros(self.X.shape)
228
- for i in range(r):
229
- for j in range(c):
230
- self.Z[i, j] = self.get_elevation_by_latlong(self.X[i, j], self.Y[i, j])
231
- return self.Z
232
-
233
- def create_elevation_using_api(self, lat, lon):
234
- response = requests.get(
235
- f"https://api.airmap.com/elevation/v1/ele?points={lat},{lon}"
236
- )
237
- response = response.json()
238
- if response['status'] == 'success':
239
- # print(response, '--')
240
- return response['data'][0] * 3.28084
241
-
242
- else:
243
- print(response)
244
-
245
- def create_elevation_profile_using_api(self, X=None, Y=None):
246
- coords = []
247
- inputs_passed = True
248
- if X is None or Y is None:
249
- inputs_passed = False
250
- X = self.X.flatten()
251
- Y = self.Y.flatten()
252
-
253
- for x, y in zip(X, Y):
254
- coords.extend([str(y), str(x)])
255
-
256
- offset = 1000
257
- Z_coords = []
258
- for i in range(int(len(coords)/offset) + 1):
259
-
260
- filt_coords = coords[i * offset: (i + 1) * offset]
261
- filt_coords = ",".join(filt_coords)
262
- response = requests.get(
263
- f"https://api.airmap.com/elevation/v1/ele?points={filt_coords}"
264
- )
265
- response = response.json()
266
- if response['status'] == 'success':
267
- Z_coords.extend(response['data'])
268
- # print(response)
269
- else:
270
- print(response)
271
-
272
- Z = np.array(Z_coords)
273
- if inputs_passed:
274
- return Z
275
- else:
276
- Z = np.reshape(Z, self.X.shape) * 3.28084
277
- return (Z)
278
-
279
- def calc_polyhedron_volume(self, pts):
280
-
281
- def tetrahedron_volume(a, b, c, d):
282
- return np.abs(np.einsum('ij,ij->i', a-d, np.cross(b-d, c-d))) / 6
283
-
284
- dt = Delaunay(pts)
285
- tets = dt.points[dt.simplices]
286
- self.polyhedron_volume = np.sum(tetrahedron_volume(tets[:, 0], tets[:, 1],
287
- tets[:, 2], tets[:, 3]))
288
-
289
- return self.polyhedron_volume
290
-
291
- @property
292
- def area(self) -> float:
293
- """Method to calculate area of affected region."""
294
- raise NotImplementedError("Method needs to be defined in derived classes")
295
-
296
- @property
297
- def polygon(self) -> MultiPolygon:
298
- """Method to return polygon for the affected region."""
299
- raise NotImplementedError("Method needs to be defined in derived classes")
300
-
301
- @property
302
- def boundary(self) -> LineString:
303
- """Method to return boundary for the affected region."""
304
- raise NotImplementedError("Method needs to be defined in derived classes")
305
-
306
- @property
307
- def centroid(self) -> Point:
308
- """Method to return the centroid of the affected region."""
309
- return self.multipolygon.centroid
310
-
311
- def increment_time(self) -> dict:
312
- """Method to increment simulation time for time evolviong scenarios."""
313
- raise NotImplementedError("Method needs to be defined in derived classes")
314
-
315
- def calculate_survival_probability(self, assets : dict, timestamp: datetime) -> dict:
316
- """Method to calculate survival probaility of asset types.
317
-
318
- Args:
319
- assets (dict): The dictionary of all assets and their corresponding asset types
320
- """
321
- print('Calculating survival probaiblity ...')
322
- water_elevations = []
323
- coords = [
324
- [],[],[]
325
- ]
326
- z = []
327
- for idx, row in self.gauges.iterrows():
328
-
329
- gauge = row['GaugeLID']
330
- level = self.levels[gauge][timestamp]
331
-
332
- lat = row['Latitude']
333
- lon = row['Longitude']
334
- x_i, y_i = stateplane.from_lonlat(lon, lat)
335
- coords[0].append(x_i)
336
- coords[1].append(y_i)
337
-
338
-
339
- if self.use_api:
340
- # elevation = self.create_elevation_using_api(lat, lon)
341
- # TODO: Don't know the different between level and elevation or
342
- # how they relate
343
- elevation = row['Elevation_ft']
344
- water_elevation = float(level) + elevation
345
- else:
346
- elevation = self.get_elevation_by_latlong(lat, lon)
347
- water_elevation = self.get_elevation_by_latlong(lat, lon) + float(level)
348
-
349
- self.gauges["elevation"] = elevation
350
- z.append(water_elevation) #water_elevation, flow
351
- coords[2].append(f"Gauge: {gauge}\nElevation: {elevation}\nWater level: {level}")
352
- water_elevations.append(water_elevation)
353
- self.gauges["water_level"] = water_elevations
354
-
355
- m = self.polyfit2d(np.array(coords[0]), np.array(coords[1]), np.array(z))
356
- # self.z_ = np.zeros(self.X.shape)
357
-
358
- for asset_type, asset_dict in assets.items():
359
- Xs = []
360
- Ys = []
361
- for asset, asset_data in asset_dict.items():
362
- Xs.append(asset_data['coordinates'][1])
363
- Ys.append(asset_data['coordinates'][0])
364
- x_i, y_i = stateplane.from_lonlat(asset_data['coordinates'][1], asset_data['coordinates'][0])
365
- z_i = self.polyval2d(x_i, y_i, m)
366
- assets[asset_type][asset]['asset_water_level_ft'] = z_i.tolist() #list(self.polyval2d(x_i, y_i, m))
367
-
368
- # if self.use_api:
369
- # asset_elevations_ft = self.create_elevation_profile_using_api(Xs, Ys)
370
- # else:
371
- # asset_elevations_ft = self.get_elevation_by_latlong(lat, lon)
372
-
373
- i = 0
374
- for asset, asset_data in asset_dict.items():
375
- h = asset_data['asset_water_level_ft'] - asset_data['elevation_ft']
376
- assets[asset_type][asset]['submerge_depth_ft'] = h
377
- if asset_type in self.probability_model:
378
- probability_function = self.probability_model[asset_type]
379
- failure_probability = probability_function.probability(h)
380
- assets[asset_type][asset]["survival_probability"] = 1 - failure_probability
381
- else:
382
- assets[asset_type][asset]["survival_probability"] = 1
383
- i+=1
384
-
385
- return assets
386
-
387
- def function_to_fit(self, data, a, b, c):
388
- x = data[0]
389
- y = data[1]
390
- return (a * x) + (y * b) + c
391
-
392
- def polyfit2d(self, x, y, z, order=3):
393
- ncols = (order + 1)**2
394
- G = np.zeros((x.size, ncols))
395
- ij = itertools.product(range(order+1), range(order+1))
396
- for k, (i,j) in enumerate(ij):
397
- G[:,k] = x**i * y**j
398
- m, _, _, _ = np.linalg.lstsq(G, z)
399
- return m
400
-
401
- def polyval2d(self, x, y, m):
402
- order = int(np.sqrt(len(m))) - 1
403
- ij = itertools.product(range(order+1), range(order+1))
404
- z = np.zeros_like(x)
405
- for a, (i,j) in zip(m, ij):
406
- z += a * x**i * y**j
407
- return z
408
-
409
-
410
- class DynamicUpdate():
411
- #Suppose we know the x range
412
- min_x = 0
413
- max_x = 10
414
-
415
- def __init__(self, X, Y, Z, flows):
416
- self.X = X
417
- self.Y = Y
418
- self.Z = Z
419
- self.flows = flows
420
- self.min_elevation = np.min(Z)
421
- self.max_elevation = np.max(Z)
422
-
423
- ncontours = 15
424
- step_size = (self.max_elevation - self.min_elevation) / ncontours
425
- self.levels = np.arange(self.min_elevation, self.max_elevation, step_size)
426
- self.fig = plt.figure()
427
- self.ax1 = self.fig.add_subplot(121, projection='3d')
428
- self.ax2 = self.fig.add_subplot(222)
429
- self.ax3 = self.fig.add_subplot(224)
430
-
431
- self.ax1.plot_surface(X, Y, Z, rstride=1, cstride=1, color='0.99', antialiased=True, edgecolor='0.5')
432
-
433
- self.ax2.contour(X, Y, Z, zdir='z', cmap='coolwarm', levels=self.levels)
434
- self.flows.plot(ax = self.ax3)
435
- self.fig.canvas.draw()
436
- self.fig.canvas.flush_events()
437
- plt.show()
438
- self.fig.savefig(f"topology_0.png")
439
-
440
-
441
- def update(self, water_elevations, scatter_points, water_surface, timestamp):
442
- self.ax1.clear()
443
- self.ax2.clear()
444
- self.ax3.clear()
445
-
446
- X = self.X
447
- Y = self.Y
448
- Z = self.Z
449
-
450
- w = np.where(water_surface > Z, water_surface, np.nan)
451
- w.flat[0] = np.nan
452
- self.ax1.plot_surface(X, Y, w, rstride=1, cstride=1, color='c', antialiased=True, alpha=0.5, shade=False)
453
- self.ax1.plot_surface(X, Y, Z, rstride=1, cstride=1, color='0.99', antialiased=True, edgecolor='0.5')
454
-
455
- self.ax2.contour(X, Y, Z, zdir='z', cmap='coolwarm', levels=self.levels)
456
- self.ax2.scatter(scatter_points[0], scatter_points[1], color='red')
457
-
458
- for x, y, t in zip(scatter_points[0], scatter_points[1], scatter_points[2]):
459
- self.ax2.text(x, y, t, fontsize=8)
460
- print(t)
461
-
462
- self.flows.plot(ax = self.ax3)
463
- self.ax3.axvline(timestamp, color="r")
464
-
465
- self.fig.canvas.draw()
466
- self.fig.canvas.flush_events()
467
- #self.fig.savefig(f"topology_{int(water_elevation)}.png")
468
-
469
-
470
- if __name__ == '__main__':
471
-
472
-
473
-
474
- from erad.scenarios.common import asset_list
475
-
476
- assets, multiploygon = asset_list(38.46, -122.95, 38.53, -122.80)
477
- flood_1 = FlooadScenario(
478
- multiploygon,
479
- None,
480
- None,
481
- file_flow=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\flows.csv',
482
- file_levels=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\levels.csv',
483
- file_gaugues=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\gauges.csv',
484
- )
485
- timestamp = flood_1.valid_timepoints[0]
486
- assets = flood_1.calculate_survival_probability(assets, timestamp)
487
- print(assets)
488
- # flood_1 = FlooadScenario.from_live_data(multiploygon, None, None, None, None)
489
- # timestamp = flood_1.valid_timepoints[-1]
490
- # for timestamp in flood_1.valid_timepoints:
491
- # assets = flood_1.calculate_survival_probability(assets, timestamp)
492
- # print(assets)
493
- # time.sleep(0.01)
1
+
2
+ from click import format_filename
3
+ import matplotlib.pyplot as plt
4
+ from erad.scenarios.utilities import ProbabilityFunctionBuilder, GeoUtilities
5
+ from erad.constants import ELEVATION_RASTER_FILE, DATA_FOLDER, FLOOD_HISTORIC_SHP_PATH
6
+ from shapely.geometry import MultiPolygon, Point, LineString, Polygon
7
+ from erad.scenarios.abstract_scenario import BaseScenario
8
+ from datetime import datetime, timedelta
9
+ from scipy.optimize import curve_fit
10
+ from scipy.spatial import Delaunay
11
+ import geopandas as gpd
12
+ import pandas as pd
13
+ import numpy as np
14
+ import stateplane
15
+ import xmltodict
16
+ import itertools
17
+ import requests
18
+ import rasterio
19
+ import tarfile
20
+ import time
21
+ import sys
22
+ import os
23
+
24
+ from erad.scenarios.common import AssetTypes, asset_list
25
+ from erad.scenarios.utilities import ProbabilityFunctionBuilder
26
+
27
+ #plt.ion()
28
+ class FlooadScenario(BaseScenario, GeoUtilities):
29
+ """Base class for FlooadScenario. Extends BaseScenario and GeoUtilities
30
+
31
+ Attributes:
32
+ origin (Point): Earthquake origin point
33
+ probability_model (dict): Dictionary mapping asset types to probability funcitons
34
+ timestamp (datetime): Scenario occurance time
35
+ kwargs (dict): Additional parameters relevant for a particular scenario type
36
+ """
37
+
38
+ fragility_curves = {
39
+ # Electrical Grid Risk Assessment Against Flooding in Barcelona and Bristol Cities
40
+ AssetTypes.substation.name : ProbabilityFunctionBuilder("norm", [1.5, 0.2]),
41
+ AssetTypes.solar_panels.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
42
+ # AssetTypes.buried_lines.name : ProbabilityFunctionBuilder("norm", [80, 10]),
43
+ # estimated
44
+ AssetTypes.wind_turbines.name : ProbabilityFunctionBuilder("lognorm", [0.8, 3, 2]),
45
+ AssetTypes.battery_storage.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
46
+ # estimated
47
+ AssetTypes.transmission_poles.name : ProbabilityFunctionBuilder("lognorm", [0.8, 5, 3]),
48
+ # Tsunami Fragility Functions for Road and Utility Pole Assets Using Field Survey and Remotely Sensed Data from the 2018 Sulawesi Tsunami, Palu, Indonesia
49
+ AssetTypes.distribution_poles.name : ProbabilityFunctionBuilder("lognorm", [1, 0.2, 1]),
50
+ AssetTypes.transmission_overhead_lines.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
51
+ AssetTypes.distribution_overhead_lines.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
52
+ }
53
+
54
+
55
+ def __init__(self, poly : MultiPolygon , probability_model : dict, timestamp : datetime, **kwargs) -> None:
56
+ super(FlooadScenario, self).__init__(poly, probability_model, timestamp, **kwargs)
57
+ self.kwargs = kwargs
58
+ self.samples = 100
59
+ self.use_api = True
60
+
61
+ # self.map_elevation()
62
+ if 'type' in kwargs and kwargs['type'] == 'live':
63
+ self.flows = pd.DataFrame()
64
+ self.levels = pd.DataFrame()
65
+ self.real_time()
66
+ else:
67
+ from shapely import wkt
68
+ self.flows = pd.read_csv(kwargs['file_flow'],index_col=0, parse_dates=True)
69
+ self.levels = pd.read_csv(kwargs['file_levels'], index_col=0, parse_dates=True)
70
+ df = pd.read_csv(kwargs['file_gaugues'])
71
+ df['geometry'] = df['geometry'].apply(wkt.loads)
72
+ crs = {'init': 'epsg:4326'}
73
+ self.gauges = gpd.GeoDataFrame(df).set_geometry('geometry')
74
+ #self.gauges = gpd.read_file(kwargs['file_gaugues'])
75
+ pass
76
+ # self.plot = DynamicUpdate(self.X, self.Y, self.Z, self.levels)
77
+ return
78
+
79
+ @classmethod
80
+ def from_historical_flood_by_code(cls, flood_code : str, probability_function : dict = None):
81
+ data_file = os.path.join(DATA_FOLDER, FLOOD_HISTORIC_SHP_PATH)
82
+ assert os.path.exists(data_file), f"The data file {data_file} not found"
83
+ flood_data = gpd.read_file(data_file)
84
+ flood_data = flood_data[flood_data['DFIRM_ID'] == flood_code]
85
+ raise NotImplementedError("Model has not been implemented")
86
+
87
+ def real_time(self):
88
+ self.gauges = self.get_flow_measurements(0)
89
+ self.gauges.to_csv("gauges.csv")
90
+ flows, levels = self.gauges_in_polygon()
91
+ for c in flows.columns:
92
+ flows[c] = flows[c].interpolate(method='polynomial', order=2)
93
+ levels[c] = levels[c].interpolate(method='polynomial', order=2)
94
+ flows = flows.ffill(axis = 0)
95
+ levels = levels.ffill(axis = 0)
96
+ flows = flows.bfill(axis = 0)
97
+ levels = levels.bfill(axis = 0)
98
+ self.flows = flows.resample("15T").interpolate()
99
+ self.levels = levels.resample("15T").interpolate()
100
+ self.flows.to_csv("flows.csv")
101
+ self.levels.to_csv("levels.csv")
102
+
103
+ @property
104
+ def valid_timepoints(self):
105
+ return list(self.flows.index)
106
+
107
+ def gauges_in_polygon(self):
108
+ all_flows = pd.DataFrame()
109
+ all_levels = pd.DataFrame()
110
+ for idx, gauge in self.gauges.iterrows():
111
+ flow = pd.DataFrame()
112
+ level = pd.DataFrame()
113
+ gauge_id = gauge['GaugeLID']
114
+ url = f'https://water.weather.gov/ahps2/hydrograph_to_xml.php?gage={gauge_id}&output=xml'
115
+ response = requests.get(url)
116
+ #print(response.text)
117
+ data = xmltodict.parse(response.text)
118
+ times_series_data = data['site']['observed']['datum']
119
+ time_points = []
120
+ flow_points = []
121
+ level_points =[]
122
+ for data_point in times_series_data:
123
+ time_points.append(data_point['valid']['#text'])
124
+ flow_points.append(float(data_point['secondary']['#text']))
125
+ level_points.append(float(data_point['primary']['#text']))
126
+ index =pd.to_datetime(time_points)
127
+ flow.index =index
128
+ flow[gauge_id] = flow_points
129
+ level.index =index
130
+ level[gauge_id] = level_points
131
+ print(flow)
132
+ print(level)
133
+ all_flows = flow.merge(all_flows, right_index=True, left_index=True, how="outer")
134
+ all_levels = level.merge(all_levels, right_index=True, left_index=True, how="outer")
135
+ return all_flows, all_levels
136
+
137
+ def get_flow_measurements(self, forecast_day: int = 0):
138
+ if forecast_day == 0:
139
+ forecast_tag = "obs"
140
+ else:
141
+ if forecast_day == 1:
142
+ forecast_tag = "fcst_f024"
143
+ else:
144
+ forecast_tag = f"fcst_f{forecast_day*24}"
145
+
146
+ url = f"https://water.weather.gov/ahps/download.php?data=tgz_{forecast_tag}"
147
+ r = requests.get(url, allow_redirects=True)
148
+ date = str(datetime.now()).replace(":", "_").replace(".", "_").replace(" ", "_")
149
+ shape_file_path = os.path.join(DATA_FOLDER, f'flood_shapefile_{date}.tgz')
150
+ open(shape_file_path, 'wb').write(r.content)
151
+ file_path = self.extract(shape_file_path, forecast_tag)
152
+ data = gpd.read_file(file_path)
153
+ if not data.empty:
154
+ water_info = []
155
+ for idx, row in data.iterrows():
156
+ point = Point(row['Latitude'], row['Longitude'])
157
+ if self.multipolygon.contains(point):
158
+ water_info.append(row)
159
+ return gpd.GeoDataFrame(water_info)
160
+ else:
161
+ raise Exception("No water measurement found in selected area.")
162
+
163
+ def extract(self, tar_url, ext):
164
+ extract_path = os.path.join(DATA_FOLDER, "flood_shape_file")
165
+ tar = tarfile.open(tar_url, 'r')
166
+ for item in tar:
167
+ tar.extract(item, extract_path)
168
+ if item.name.find(".tgz") != -1 or item.name.find(".tar") != -1:
169
+ self.extract(item.name, "./" + item.name[:item.name.rfind('/')])
170
+ return os.path.join(extract_path, f'national_shapefile_{ext}.shp')
171
+
172
+ @classmethod
173
+ def from_live_data(
174
+ cls,
175
+ poly : MultiPolygon,
176
+ probability_function : dict,
177
+ startrime: datetime,
178
+ duration: timedelta,
179
+ timestep: timedelta
180
+ ):
181
+ kwargs = {
182
+ 'startrime' : startrime,
183
+ 'duration' : duration,
184
+ 'timestep' : timestep,
185
+ 'type' : 'live',
186
+ }
187
+
188
+ return cls(poly, probability_function, startrime, **kwargs)
189
+
190
+ def get_elevation_by_latlong(self, lat, lon):
191
+ coords = ((lat,lon), (lat,lon))
192
+ vals = self.raster.sample(coords)
193
+ for val in vals:
194
+ elevation=val[0]
195
+ return 255-elevation
196
+
197
+ def map_elevation(self):
198
+ y_min, x_min, y_max, x_max = self.multipolygon.bounds
199
+ ys = np.linspace(y_min, y_max, self.samples, endpoint=True)
200
+ xs = np.linspace(x_min, x_max, self.samples, endpoint=True)
201
+ self.X, self.Y = np.meshgrid(xs, ys)
202
+
203
+ if self.use_api:
204
+ self.Z = self.create_elevation_profile_using_api()
205
+ else:
206
+ self.Z = self.create_elevation_profile_using_rasterfile()
207
+
208
+ r, c = self.X.shape
209
+ for i in range(r):
210
+ for j in range(c):
211
+ x_sp, y_sp = stateplane.from_lonlat(self.X[i, j], self.Y[i, j])
212
+ self.X[i, j] =x_sp
213
+ self.Y[i, j] =y_sp
214
+
215
+ self.min_elevation = np.min(self.Z)
216
+ self.max_elevation = np.max(self.Z)
217
+
218
+ pts = np.array([self.X.flatten(), self.Y.flatten(), self.Z.flatten()]).T
219
+ self.volume = self.calc_polyhedron_volume(pts)
220
+
221
+ return self.X, self.Y, self.Z
222
+
223
+ def create_elevation_profile_using_rasterfile(self):
224
+ raster_file = os.path.join(DATA_FOLDER, ELEVATION_RASTER_FILE)
225
+ self.raster = rasterio.open(raster_file)
226
+ r, c = self.X.shape
227
+ self.Z = np.zeros(self.X.shape)
228
+ for i in range(r):
229
+ for j in range(c):
230
+ self.Z[i, j] = self.get_elevation_by_latlong(self.X[i, j], self.Y[i, j])
231
+ return self.Z
232
+
233
+ def create_elevation_using_api(self, lat, lon):
234
+ response = requests.get(
235
+ f"https://api.airmap.com/elevation/v1/ele?points={lat},{lon}"
236
+ )
237
+ response = response.json()
238
+ if response['status'] == 'success':
239
+ # print(response, '--')
240
+ return response['data'][0] * 3.28084
241
+
242
+ else:
243
+ print(response)
244
+
245
+ def create_elevation_profile_using_api(self, X=None, Y=None):
246
+ coords = []
247
+ inputs_passed = True
248
+ if X is None or Y is None:
249
+ inputs_passed = False
250
+ X = self.X.flatten()
251
+ Y = self.Y.flatten()
252
+
253
+ for x, y in zip(X, Y):
254
+ coords.extend([str(y), str(x)])
255
+
256
+ offset = 1000
257
+ Z_coords = []
258
+ for i in range(int(len(coords)/offset) + 1):
259
+
260
+ filt_coords = coords[i * offset: (i + 1) * offset]
261
+ filt_coords = ",".join(filt_coords)
262
+ response = requests.get(
263
+ f"https://api.airmap.com/elevation/v1/ele?points={filt_coords}"
264
+ )
265
+ response = response.json()
266
+ if response['status'] == 'success':
267
+ Z_coords.extend(response['data'])
268
+ # print(response)
269
+ else:
270
+ print(response)
271
+
272
+ Z = np.array(Z_coords)
273
+ if inputs_passed:
274
+ return Z
275
+ else:
276
+ Z = np.reshape(Z, self.X.shape) * 3.28084
277
+ return (Z)
278
+
279
+ def calc_polyhedron_volume(self, pts):
280
+
281
+ def tetrahedron_volume(a, b, c, d):
282
+ return np.abs(np.einsum('ij,ij->i', a-d, np.cross(b-d, c-d))) / 6
283
+
284
+ dt = Delaunay(pts)
285
+ tets = dt.points[dt.simplices]
286
+ self.polyhedron_volume = np.sum(tetrahedron_volume(tets[:, 0], tets[:, 1],
287
+ tets[:, 2], tets[:, 3]))
288
+
289
+ return self.polyhedron_volume
290
+
291
+ @property
292
+ def area(self) -> float:
293
+ """Method to calculate area of affected region."""
294
+ raise NotImplementedError("Method needs to be defined in derived classes")
295
+
296
+ @property
297
+ def polygon(self) -> MultiPolygon:
298
+ """Method to return polygon for the affected region."""
299
+ raise NotImplementedError("Method needs to be defined in derived classes")
300
+
301
+ @property
302
+ def boundary(self) -> LineString:
303
+ """Method to return boundary for the affected region."""
304
+ raise NotImplementedError("Method needs to be defined in derived classes")
305
+
306
+ @property
307
+ def centroid(self) -> Point:
308
+ """Method to return the centroid of the affected region."""
309
+ return self.multipolygon.centroid
310
+
311
+ def increment_time(self) -> dict:
312
+ """Method to increment simulation time for time evolviong scenarios."""
313
+ raise NotImplementedError("Method needs to be defined in derived classes")
314
+
315
+ def calculate_survival_probability(self, assets : dict, timestamp: datetime) -> dict:
316
+ """Method to calculate survival probaility of asset types.
317
+
318
+ Args:
319
+ assets (dict): The dictionary of all assets and their corresponding asset types
320
+ """
321
+ print('Calculating survival probaiblity ...')
322
+ water_elevations = []
323
+ coords = [
324
+ [],[],[]
325
+ ]
326
+ z = []
327
+ for idx, row in self.gauges.iterrows():
328
+
329
+ gauge = row['GaugeLID']
330
+ level = self.levels[gauge][timestamp]
331
+
332
+ lat = row['Latitude']
333
+ lon = row['Longitude']
334
+ x_i, y_i = stateplane.from_lonlat(lon, lat)
335
+ coords[0].append(x_i)
336
+ coords[1].append(y_i)
337
+
338
+
339
+ if self.use_api:
340
+ # elevation = self.create_elevation_using_api(lat, lon)
341
+ # TODO: Don't know the different between level and elevation or
342
+ # how they relate
343
+ elevation = row['Elevation_ft']
344
+ water_elevation = float(level) + elevation
345
+ else:
346
+ elevation = self.get_elevation_by_latlong(lat, lon)
347
+ water_elevation = self.get_elevation_by_latlong(lat, lon) + float(level)
348
+
349
+ self.gauges["elevation"] = elevation
350
+ z.append(water_elevation) #water_elevation, flow
351
+ coords[2].append(f"Gauge: {gauge}\nElevation: {elevation}\nWater level: {level}")
352
+ water_elevations.append(water_elevation)
353
+ self.gauges["water_level"] = water_elevations
354
+
355
+ m = self.polyfit2d(np.array(coords[0]), np.array(coords[1]), np.array(z))
356
+ # self.z_ = np.zeros(self.X.shape)
357
+
358
+ for asset_type, asset_dict in assets.items():
359
+ Xs = []
360
+ Ys = []
361
+ for asset, asset_data in asset_dict.items():
362
+ Xs.append(asset_data['coordinates'][1])
363
+ Ys.append(asset_data['coordinates'][0])
364
+ x_i, y_i = stateplane.from_lonlat(asset_data['coordinates'][1], asset_data['coordinates'][0])
365
+ z_i = self.polyval2d(x_i, y_i, m)
366
+ assets[asset_type][asset]['asset_water_level_ft'] = z_i.tolist() #list(self.polyval2d(x_i, y_i, m))
367
+
368
+ # if self.use_api:
369
+ # asset_elevations_ft = self.create_elevation_profile_using_api(Xs, Ys)
370
+ # else:
371
+ # asset_elevations_ft = self.get_elevation_by_latlong(lat, lon)
372
+
373
+ i = 0
374
+ for asset, asset_data in asset_dict.items():
375
+ h = asset_data['asset_water_level_ft'] - asset_data['elevation_ft']
376
+ assets[asset_type][asset]['submerge_depth_ft'] = h
377
+ if asset_type in self.probability_model:
378
+ probability_function = self.probability_model[asset_type]
379
+ failure_probability = probability_function.probability(h)
380
+ assets[asset_type][asset]["survival_probability"] = 1 - failure_probability
381
+ else:
382
+ assets[asset_type][asset]["survival_probability"] = 1
383
+ i+=1
384
+
385
+ return assets
386
+
387
+ def function_to_fit(self, data, a, b, c):
388
+ x = data[0]
389
+ y = data[1]
390
+ return (a * x) + (y * b) + c
391
+
392
+ def polyfit2d(self, x, y, z, order=3):
393
+ ncols = (order + 1)**2
394
+ G = np.zeros((x.size, ncols))
395
+ ij = itertools.product(range(order+1), range(order+1))
396
+ for k, (i,j) in enumerate(ij):
397
+ G[:,k] = x**i * y**j
398
+ m, _, _, _ = np.linalg.lstsq(G, z)
399
+ return m
400
+
401
+ def polyval2d(self, x, y, m):
402
+ order = int(np.sqrt(len(m))) - 1
403
+ ij = itertools.product(range(order+1), range(order+1))
404
+ z = np.zeros_like(x)
405
+ for a, (i,j) in zip(m, ij):
406
+ z += a * x**i * y**j
407
+ return z
408
+
409
+
410
+ class DynamicUpdate():
411
+ #Suppose we know the x range
412
+ min_x = 0
413
+ max_x = 10
414
+
415
+ def __init__(self, X, Y, Z, flows):
416
+ self.X = X
417
+ self.Y = Y
418
+ self.Z = Z
419
+ self.flows = flows
420
+ self.min_elevation = np.min(Z)
421
+ self.max_elevation = np.max(Z)
422
+
423
+ ncontours = 15
424
+ step_size = (self.max_elevation - self.min_elevation) / ncontours
425
+ self.levels = np.arange(self.min_elevation, self.max_elevation, step_size)
426
+ self.fig = plt.figure()
427
+ self.ax1 = self.fig.add_subplot(121, projection='3d')
428
+ self.ax2 = self.fig.add_subplot(222)
429
+ self.ax3 = self.fig.add_subplot(224)
430
+
431
+ self.ax1.plot_surface(X, Y, Z, rstride=1, cstride=1, color='0.99', antialiased=True, edgecolor='0.5')
432
+
433
+ self.ax2.contour(X, Y, Z, zdir='z', cmap='coolwarm', levels=self.levels)
434
+ self.flows.plot(ax = self.ax3)
435
+ self.fig.canvas.draw()
436
+ self.fig.canvas.flush_events()
437
+ plt.show()
438
+ self.fig.savefig(f"topology_0.png")
439
+
440
+
441
+ def update(self, water_elevations, scatter_points, water_surface, timestamp):
442
+ self.ax1.clear()
443
+ self.ax2.clear()
444
+ self.ax3.clear()
445
+
446
+ X = self.X
447
+ Y = self.Y
448
+ Z = self.Z
449
+
450
+ w = np.where(water_surface > Z, water_surface, np.nan)
451
+ w.flat[0] = np.nan
452
+ self.ax1.plot_surface(X, Y, w, rstride=1, cstride=1, color='c', antialiased=True, alpha=0.5, shade=False)
453
+ self.ax1.plot_surface(X, Y, Z, rstride=1, cstride=1, color='0.99', antialiased=True, edgecolor='0.5')
454
+
455
+ self.ax2.contour(X, Y, Z, zdir='z', cmap='coolwarm', levels=self.levels)
456
+ self.ax2.scatter(scatter_points[0], scatter_points[1], color='red')
457
+
458
+ for x, y, t in zip(scatter_points[0], scatter_points[1], scatter_points[2]):
459
+ self.ax2.text(x, y, t, fontsize=8)
460
+ print(t)
461
+
462
+ self.flows.plot(ax = self.ax3)
463
+ self.ax3.axvline(timestamp, color="r")
464
+
465
+ self.fig.canvas.draw()
466
+ self.fig.canvas.flush_events()
467
+ #self.fig.savefig(f"topology_{int(water_elevation)}.png")
468
+
469
+
470
+ if __name__ == '__main__':
471
+
472
+
473
+
474
+ from erad.scenarios.common import asset_list
475
+
476
+ assets, multiploygon = asset_list(38.46, -122.95, 38.53, -122.80)
477
+ flood_1 = FlooadScenario(
478
+ multiploygon,
479
+ None,
480
+ None,
481
+ file_flow=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\flows.csv',
482
+ file_levels=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\levels.csv',
483
+ file_gaugues=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\gauges.csv',
484
+ )
485
+ timestamp = flood_1.valid_timepoints[0]
486
+ assets = flood_1.calculate_survival_probability(assets, timestamp)
487
+ print(assets)
488
+ # flood_1 = FlooadScenario.from_live_data(multiploygon, None, None, None, None)
489
+ # timestamp = flood_1.valid_timepoints[-1]
490
+ # for timestamp in flood_1.valid_timepoints:
491
+ # assets = flood_1.calculate_survival_probability(assets, timestamp)
492
+ # print(assets)
493
+ # time.sleep(0.01)
494
494