NREL-erad 0.0.0a0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. erad/__init__.py +1 -0
  2. erad/constants.py +80 -11
  3. erad/default_fragility_curves/__init__.py +15 -0
  4. erad/default_fragility_curves/default_fire_boundary_dist.py +94 -0
  5. erad/default_fragility_curves/default_flood_depth.py +108 -0
  6. erad/default_fragility_curves/default_flood_velocity.py +101 -0
  7. erad/default_fragility_curves/default_fragility_curves.py +23 -0
  8. erad/default_fragility_curves/default_peak_ground_acceleration.py +163 -0
  9. erad/default_fragility_curves/default_peak_ground_velocity.py +94 -0
  10. erad/default_fragility_curves/default_wind_speed.py +94 -0
  11. erad/enums.py +40 -0
  12. erad/gdm_mapping.py +83 -0
  13. erad/models/__init__.py +1 -0
  14. erad/models/asset.py +300 -0
  15. erad/models/asset_mapping.py +20 -0
  16. erad/models/edit_store.py +22 -0
  17. erad/models/fragility_curve.py +116 -0
  18. erad/models/hazard/__init__.py +5 -0
  19. erad/models/hazard/base_models.py +12 -0
  20. erad/models/hazard/common.py +26 -0
  21. erad/models/hazard/earthquake.py +93 -0
  22. erad/models/hazard/flood.py +83 -0
  23. erad/models/hazard/wild_fire.py +121 -0
  24. erad/models/hazard/wind.py +143 -0
  25. erad/models/probability.py +76 -0
  26. erad/probability_builder.py +38 -0
  27. erad/quantities.py +31 -0
  28. erad/runner.py +125 -0
  29. erad/systems/__init__.py +2 -0
  30. erad/systems/asset_system.py +462 -0
  31. erad/systems/hazard_system.py +122 -0
  32. nrel_erad-0.1.1.dist-info/METADATA +61 -0
  33. nrel_erad-0.1.1.dist-info/RECORD +36 -0
  34. {NREL_erad-0.0.0a0.dist-info → nrel_erad-0.1.1.dist-info}/WHEEL +1 -1
  35. NREL_erad-0.0.0a0.dist-info/METADATA +0 -61
  36. NREL_erad-0.0.0a0.dist-info/RECORD +0 -42
  37. erad/cypher_queries/load_data_v1.cypher +0 -212
  38. erad/data/World_Earthquakes_1960_2016.csv +0 -23410
  39. erad/db/__init__.py +0 -0
  40. erad/db/assets/__init__.py +0 -0
  41. erad/db/assets/critical_infras.py +0 -171
  42. erad/db/assets/distribution_lines.py +0 -101
  43. erad/db/credential_model.py +0 -20
  44. erad/db/disaster_input_model.py +0 -23
  45. erad/db/inject_earthquake.py +0 -52
  46. erad/db/inject_flooding.py +0 -53
  47. erad/db/neo4j_.py +0 -162
  48. erad/db/utils.py +0 -14
  49. erad/exceptions.py +0 -68
  50. erad/metrics/__init__.py +0 -0
  51. erad/metrics/check_microgrid.py +0 -208
  52. erad/metrics/metric.py +0 -178
  53. erad/programs/__init__.py +0 -0
  54. erad/programs/backup.py +0 -62
  55. erad/programs/microgrid.py +0 -45
  56. erad/scenarios/__init__.py +0 -0
  57. erad/scenarios/abstract_scenario.py +0 -103
  58. erad/scenarios/common.py +0 -93
  59. erad/scenarios/earthquake_scenario.py +0 -161
  60. erad/scenarios/fire_scenario.py +0 -160
  61. erad/scenarios/flood_scenario.py +0 -494
  62. erad/scenarios/utilities.py +0 -76
  63. erad/scenarios/wind_scenario.py +0 -89
  64. erad/utils/__init__.py +0 -0
  65. erad/utils/ditto_utils.py +0 -252
  66. erad/utils/hifld_utils.py +0 -147
  67. erad/utils/opendss_utils.py +0 -357
  68. erad/utils/overpass.py +0 -76
  69. erad/utils/util.py +0 -178
  70. erad/visualization/__init__.py +0 -0
  71. erad/visualization/plot_graph.py +0 -218
  72. {NREL_erad-0.0.0a0.dist-info → nrel_erad-0.1.1.dist-info/licenses}/LICENSE.txt +0 -0
  73. {NREL_erad-0.0.0a0.dist-info → nrel_erad-0.1.1.dist-info}/top_level.txt +0 -0
@@ -1,494 +0,0 @@
1
-
2
- from click import format_filename
3
- import matplotlib.pyplot as plt
4
- from erad.scenarios.utilities import ProbabilityFunctionBuilder, GeoUtilities
5
- from erad.constants import ELEVATION_RASTER_FILE, DATA_FOLDER, FLOOD_HISTORIC_SHP_PATH
6
- from shapely.geometry import MultiPolygon, Point, LineString, Polygon
7
- from erad.scenarios.abstract_scenario import BaseScenario
8
- from datetime import datetime, timedelta
9
- from scipy.optimize import curve_fit
10
- from scipy.spatial import Delaunay
11
- import geopandas as gpd
12
- import pandas as pd
13
- import numpy as np
14
- import stateplane
15
- import xmltodict
16
- import itertools
17
- import requests
18
- import rasterio
19
- import tarfile
20
- import time
21
- import sys
22
- import os
23
-
24
- from erad.scenarios.common import AssetTypes, asset_list
25
- from erad.scenarios.utilities import ProbabilityFunctionBuilder
26
-
27
- #plt.ion()
28
- class FlooadScenario(BaseScenario, GeoUtilities):
29
- """Base class for FlooadScenario. Extends BaseScenario and GeoUtilities
30
-
31
- Attributes:
32
- origin (Point): Earthquake origin point
33
- probability_model (dict): Dictionary mapping asset types to probability funcitons
34
- timestamp (datetime): Scenario occurance time
35
- kwargs (dict): Additional parameters relevant for a particular scenario type
36
- """
37
-
38
- fragility_curves = {
39
- # Electrical Grid Risk Assessment Against Flooding in Barcelona and Bristol Cities
40
- AssetTypes.substation.name : ProbabilityFunctionBuilder("norm", [1.5, 0.2]),
41
- AssetTypes.solar_panels.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
42
- # AssetTypes.buried_lines.name : ProbabilityFunctionBuilder("norm", [80, 10]),
43
- # estimated
44
- AssetTypes.wind_turbines.name : ProbabilityFunctionBuilder("lognorm", [0.8, 3, 2]),
45
- AssetTypes.battery_storage.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
46
- # estimated
47
- AssetTypes.transmission_poles.name : ProbabilityFunctionBuilder("lognorm", [0.8, 5, 3]),
48
- # Tsunami Fragility Functions for Road and Utility Pole Assets Using Field Survey and Remotely Sensed Data from the 2018 Sulawesi Tsunami, Palu, Indonesia
49
- AssetTypes.distribution_poles.name : ProbabilityFunctionBuilder("lognorm", [1, 0.2, 1]),
50
- AssetTypes.transmission_overhead_lines.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
51
- AssetTypes.distribution_overhead_lines.name : ProbabilityFunctionBuilder("norm", [0.04, .01]),
52
- }
53
-
54
-
55
- def __init__(self, poly : MultiPolygon , probability_model : dict, timestamp : datetime, **kwargs) -> None:
56
- super(FlooadScenario, self).__init__(poly, probability_model, timestamp, **kwargs)
57
- self.kwargs = kwargs
58
- self.samples = 100
59
- self.use_api = True
60
-
61
- # self.map_elevation()
62
- if 'type' in kwargs and kwargs['type'] == 'live':
63
- self.flows = pd.DataFrame()
64
- self.levels = pd.DataFrame()
65
- self.real_time()
66
- else:
67
- from shapely import wkt
68
- self.flows = pd.read_csv(kwargs['file_flow'],index_col=0, parse_dates=True)
69
- self.levels = pd.read_csv(kwargs['file_levels'], index_col=0, parse_dates=True)
70
- df = pd.read_csv(kwargs['file_gaugues'])
71
- df['geometry'] = df['geometry'].apply(wkt.loads)
72
- crs = {'init': 'epsg:4326'}
73
- self.gauges = gpd.GeoDataFrame(df).set_geometry('geometry')
74
- #self.gauges = gpd.read_file(kwargs['file_gaugues'])
75
- pass
76
- # self.plot = DynamicUpdate(self.X, self.Y, self.Z, self.levels)
77
- return
78
-
79
- @classmethod
80
- def from_historical_flood_by_code(cls, flood_code : str, probability_function : dict = None):
81
- data_file = os.path.join(DATA_FOLDER, FLOOD_HISTORIC_SHP_PATH)
82
- assert os.path.exists(data_file), f"The data file {data_file} not found"
83
- flood_data = gpd.read_file(data_file)
84
- flood_data = flood_data[flood_data['DFIRM_ID'] == flood_code]
85
- raise NotImplementedError("Model has not been implemented")
86
-
87
- def real_time(self):
88
- self.gauges = self.get_flow_measurements(0)
89
- self.gauges.to_csv("gauges.csv")
90
- flows, levels = self.gauges_in_polygon()
91
- for c in flows.columns:
92
- flows[c] = flows[c].interpolate(method='polynomial', order=2)
93
- levels[c] = levels[c].interpolate(method='polynomial', order=2)
94
- flows = flows.ffill(axis = 0)
95
- levels = levels.ffill(axis = 0)
96
- flows = flows.bfill(axis = 0)
97
- levels = levels.bfill(axis = 0)
98
- self.flows = flows.resample("15T").interpolate()
99
- self.levels = levels.resample("15T").interpolate()
100
- self.flows.to_csv("flows.csv")
101
- self.levels.to_csv("levels.csv")
102
-
103
- @property
104
- def valid_timepoints(self):
105
- return list(self.flows.index)
106
-
107
- def gauges_in_polygon(self):
108
- all_flows = pd.DataFrame()
109
- all_levels = pd.DataFrame()
110
- for idx, gauge in self.gauges.iterrows():
111
- flow = pd.DataFrame()
112
- level = pd.DataFrame()
113
- gauge_id = gauge['GaugeLID']
114
- url = f'https://water.weather.gov/ahps2/hydrograph_to_xml.php?gage={gauge_id}&output=xml'
115
- response = requests.get(url)
116
- #print(response.text)
117
- data = xmltodict.parse(response.text)
118
- times_series_data = data['site']['observed']['datum']
119
- time_points = []
120
- flow_points = []
121
- level_points =[]
122
- for data_point in times_series_data:
123
- time_points.append(data_point['valid']['#text'])
124
- flow_points.append(float(data_point['secondary']['#text']))
125
- level_points.append(float(data_point['primary']['#text']))
126
- index =pd.to_datetime(time_points)
127
- flow.index =index
128
- flow[gauge_id] = flow_points
129
- level.index =index
130
- level[gauge_id] = level_points
131
- print(flow)
132
- print(level)
133
- all_flows = flow.merge(all_flows, right_index=True, left_index=True, how="outer")
134
- all_levels = level.merge(all_levels, right_index=True, left_index=True, how="outer")
135
- return all_flows, all_levels
136
-
137
- def get_flow_measurements(self, forecast_day: int = 0):
138
- if forecast_day == 0:
139
- forecast_tag = "obs"
140
- else:
141
- if forecast_day == 1:
142
- forecast_tag = "fcst_f024"
143
- else:
144
- forecast_tag = f"fcst_f{forecast_day*24}"
145
-
146
- url = f"https://water.weather.gov/ahps/download.php?data=tgz_{forecast_tag}"
147
- r = requests.get(url, allow_redirects=True)
148
- date = str(datetime.now()).replace(":", "_").replace(".", "_").replace(" ", "_")
149
- shape_file_path = os.path.join(DATA_FOLDER, f'flood_shapefile_{date}.tgz')
150
- open(shape_file_path, 'wb').write(r.content)
151
- file_path = self.extract(shape_file_path, forecast_tag)
152
- data = gpd.read_file(file_path)
153
- if not data.empty:
154
- water_info = []
155
- for idx, row in data.iterrows():
156
- point = Point(row['Latitude'], row['Longitude'])
157
- if self.multipolygon.contains(point):
158
- water_info.append(row)
159
- return gpd.GeoDataFrame(water_info)
160
- else:
161
- raise Exception("No water measurement found in selected area.")
162
-
163
- def extract(self, tar_url, ext):
164
- extract_path = os.path.join(DATA_FOLDER, "flood_shape_file")
165
- tar = tarfile.open(tar_url, 'r')
166
- for item in tar:
167
- tar.extract(item, extract_path)
168
- if item.name.find(".tgz") != -1 or item.name.find(".tar") != -1:
169
- self.extract(item.name, "./" + item.name[:item.name.rfind('/')])
170
- return os.path.join(extract_path, f'national_shapefile_{ext}.shp')
171
-
172
- @classmethod
173
- def from_live_data(
174
- cls,
175
- poly : MultiPolygon,
176
- probability_function : dict,
177
- startrime: datetime,
178
- duration: timedelta,
179
- timestep: timedelta
180
- ):
181
- kwargs = {
182
- 'startrime' : startrime,
183
- 'duration' : duration,
184
- 'timestep' : timestep,
185
- 'type' : 'live',
186
- }
187
-
188
- return cls(poly, probability_function, startrime, **kwargs)
189
-
190
- def get_elevation_by_latlong(self, lat, lon):
191
- coords = ((lat,lon), (lat,lon))
192
- vals = self.raster.sample(coords)
193
- for val in vals:
194
- elevation=val[0]
195
- return 255-elevation
196
-
197
- def map_elevation(self):
198
- y_min, x_min, y_max, x_max = self.multipolygon.bounds
199
- ys = np.linspace(y_min, y_max, self.samples, endpoint=True)
200
- xs = np.linspace(x_min, x_max, self.samples, endpoint=True)
201
- self.X, self.Y = np.meshgrid(xs, ys)
202
-
203
- if self.use_api:
204
- self.Z = self.create_elevation_profile_using_api()
205
- else:
206
- self.Z = self.create_elevation_profile_using_rasterfile()
207
-
208
- r, c = self.X.shape
209
- for i in range(r):
210
- for j in range(c):
211
- x_sp, y_sp = stateplane.from_lonlat(self.X[i, j], self.Y[i, j])
212
- self.X[i, j] =x_sp
213
- self.Y[i, j] =y_sp
214
-
215
- self.min_elevation = np.min(self.Z)
216
- self.max_elevation = np.max(self.Z)
217
-
218
- pts = np.array([self.X.flatten(), self.Y.flatten(), self.Z.flatten()]).T
219
- self.volume = self.calc_polyhedron_volume(pts)
220
-
221
- return self.X, self.Y, self.Z
222
-
223
- def create_elevation_profile_using_rasterfile(self):
224
- raster_file = os.path.join(DATA_FOLDER, ELEVATION_RASTER_FILE)
225
- self.raster = rasterio.open(raster_file)
226
- r, c = self.X.shape
227
- self.Z = np.zeros(self.X.shape)
228
- for i in range(r):
229
- for j in range(c):
230
- self.Z[i, j] = self.get_elevation_by_latlong(self.X[i, j], self.Y[i, j])
231
- return self.Z
232
-
233
- def create_elevation_using_api(self, lat, lon):
234
- response = requests.get(
235
- f"https://api.airmap.com/elevation/v1/ele?points={lat},{lon}"
236
- )
237
- response = response.json()
238
- if response['status'] == 'success':
239
- # print(response, '--')
240
- return response['data'][0] * 3.28084
241
-
242
- else:
243
- print(response)
244
-
245
- def create_elevation_profile_using_api(self, X=None, Y=None):
246
- coords = []
247
- inputs_passed = True
248
- if X is None or Y is None:
249
- inputs_passed = False
250
- X = self.X.flatten()
251
- Y = self.Y.flatten()
252
-
253
- for x, y in zip(X, Y):
254
- coords.extend([str(y), str(x)])
255
-
256
- offset = 1000
257
- Z_coords = []
258
- for i in range(int(len(coords)/offset) + 1):
259
-
260
- filt_coords = coords[i * offset: (i + 1) * offset]
261
- filt_coords = ",".join(filt_coords)
262
- response = requests.get(
263
- f"https://api.airmap.com/elevation/v1/ele?points={filt_coords}"
264
- )
265
- response = response.json()
266
- if response['status'] == 'success':
267
- Z_coords.extend(response['data'])
268
- # print(response)
269
- else:
270
- print(response)
271
-
272
- Z = np.array(Z_coords)
273
- if inputs_passed:
274
- return Z
275
- else:
276
- Z = np.reshape(Z, self.X.shape) * 3.28084
277
- return (Z)
278
-
279
- def calc_polyhedron_volume(self, pts):
280
-
281
- def tetrahedron_volume(a, b, c, d):
282
- return np.abs(np.einsum('ij,ij->i', a-d, np.cross(b-d, c-d))) / 6
283
-
284
- dt = Delaunay(pts)
285
- tets = dt.points[dt.simplices]
286
- self.polyhedron_volume = np.sum(tetrahedron_volume(tets[:, 0], tets[:, 1],
287
- tets[:, 2], tets[:, 3]))
288
-
289
- return self.polyhedron_volume
290
-
291
- @property
292
- def area(self) -> float:
293
- """Method to calculate area of affected region."""
294
- raise NotImplementedError("Method needs to be defined in derived classes")
295
-
296
- @property
297
- def polygon(self) -> MultiPolygon:
298
- """Method to return polygon for the affected region."""
299
- raise NotImplementedError("Method needs to be defined in derived classes")
300
-
301
- @property
302
- def boundary(self) -> LineString:
303
- """Method to return boundary for the affected region."""
304
- raise NotImplementedError("Method needs to be defined in derived classes")
305
-
306
- @property
307
- def centroid(self) -> Point:
308
- """Method to return the centroid of the affected region."""
309
- return self.multipolygon.centroid
310
-
311
- def increment_time(self) -> dict:
312
- """Method to increment simulation time for time evolviong scenarios."""
313
- raise NotImplementedError("Method needs to be defined in derived classes")
314
-
315
- def calculate_survival_probability(self, assets : dict, timestamp: datetime) -> dict:
316
- """Method to calculate survival probaility of asset types.
317
-
318
- Args:
319
- assets (dict): The dictionary of all assets and their corresponding asset types
320
- """
321
- print('Calculating survival probaiblity ...')
322
- water_elevations = []
323
- coords = [
324
- [],[],[]
325
- ]
326
- z = []
327
- for idx, row in self.gauges.iterrows():
328
-
329
- gauge = row['GaugeLID']
330
- level = self.levels[gauge][timestamp]
331
-
332
- lat = row['Latitude']
333
- lon = row['Longitude']
334
- x_i, y_i = stateplane.from_lonlat(lon, lat)
335
- coords[0].append(x_i)
336
- coords[1].append(y_i)
337
-
338
-
339
- if self.use_api:
340
- # elevation = self.create_elevation_using_api(lat, lon)
341
- # TODO: Don't know the different between level and elevation or
342
- # how they relate
343
- elevation = row['Elevation_ft']
344
- water_elevation = float(level) + elevation
345
- else:
346
- elevation = self.get_elevation_by_latlong(lat, lon)
347
- water_elevation = self.get_elevation_by_latlong(lat, lon) + float(level)
348
-
349
- self.gauges["elevation"] = elevation
350
- z.append(water_elevation) #water_elevation, flow
351
- coords[2].append(f"Gauge: {gauge}\nElevation: {elevation}\nWater level: {level}")
352
- water_elevations.append(water_elevation)
353
- self.gauges["water_level"] = water_elevations
354
-
355
- m = self.polyfit2d(np.array(coords[0]), np.array(coords[1]), np.array(z))
356
- # self.z_ = np.zeros(self.X.shape)
357
-
358
- for asset_type, asset_dict in assets.items():
359
- Xs = []
360
- Ys = []
361
- for asset, asset_data in asset_dict.items():
362
- Xs.append(asset_data['coordinates'][1])
363
- Ys.append(asset_data['coordinates'][0])
364
- x_i, y_i = stateplane.from_lonlat(asset_data['coordinates'][1], asset_data['coordinates'][0])
365
- z_i = self.polyval2d(x_i, y_i, m)
366
- assets[asset_type][asset]['asset_water_level_ft'] = z_i.tolist() #list(self.polyval2d(x_i, y_i, m))
367
-
368
- # if self.use_api:
369
- # asset_elevations_ft = self.create_elevation_profile_using_api(Xs, Ys)
370
- # else:
371
- # asset_elevations_ft = self.get_elevation_by_latlong(lat, lon)
372
-
373
- i = 0
374
- for asset, asset_data in asset_dict.items():
375
- h = asset_data['asset_water_level_ft'] - asset_data['elevation_ft']
376
- assets[asset_type][asset]['submerge_depth_ft'] = h
377
- if asset_type in self.probability_model:
378
- probability_function = self.probability_model[asset_type]
379
- failure_probability = probability_function.probability(h)
380
- assets[asset_type][asset]["survival_probability"] = 1 - failure_probability
381
- else:
382
- assets[asset_type][asset]["survival_probability"] = 1
383
- i+=1
384
-
385
- return assets
386
-
387
- def function_to_fit(self, data, a, b, c):
388
- x = data[0]
389
- y = data[1]
390
- return (a * x) + (y * b) + c
391
-
392
- def polyfit2d(self, x, y, z, order=3):
393
- ncols = (order + 1)**2
394
- G = np.zeros((x.size, ncols))
395
- ij = itertools.product(range(order+1), range(order+1))
396
- for k, (i,j) in enumerate(ij):
397
- G[:,k] = x**i * y**j
398
- m, _, _, _ = np.linalg.lstsq(G, z)
399
- return m
400
-
401
- def polyval2d(self, x, y, m):
402
- order = int(np.sqrt(len(m))) - 1
403
- ij = itertools.product(range(order+1), range(order+1))
404
- z = np.zeros_like(x)
405
- for a, (i,j) in zip(m, ij):
406
- z += a * x**i * y**j
407
- return z
408
-
409
-
410
- class DynamicUpdate():
411
- #Suppose we know the x range
412
- min_x = 0
413
- max_x = 10
414
-
415
- def __init__(self, X, Y, Z, flows):
416
- self.X = X
417
- self.Y = Y
418
- self.Z = Z
419
- self.flows = flows
420
- self.min_elevation = np.min(Z)
421
- self.max_elevation = np.max(Z)
422
-
423
- ncontours = 15
424
- step_size = (self.max_elevation - self.min_elevation) / ncontours
425
- self.levels = np.arange(self.min_elevation, self.max_elevation, step_size)
426
- self.fig = plt.figure()
427
- self.ax1 = self.fig.add_subplot(121, projection='3d')
428
- self.ax2 = self.fig.add_subplot(222)
429
- self.ax3 = self.fig.add_subplot(224)
430
-
431
- self.ax1.plot_surface(X, Y, Z, rstride=1, cstride=1, color='0.99', antialiased=True, edgecolor='0.5')
432
-
433
- self.ax2.contour(X, Y, Z, zdir='z', cmap='coolwarm', levels=self.levels)
434
- self.flows.plot(ax = self.ax3)
435
- self.fig.canvas.draw()
436
- self.fig.canvas.flush_events()
437
- plt.show()
438
- self.fig.savefig(f"topology_0.png")
439
-
440
-
441
- def update(self, water_elevations, scatter_points, water_surface, timestamp):
442
- self.ax1.clear()
443
- self.ax2.clear()
444
- self.ax3.clear()
445
-
446
- X = self.X
447
- Y = self.Y
448
- Z = self.Z
449
-
450
- w = np.where(water_surface > Z, water_surface, np.nan)
451
- w.flat[0] = np.nan
452
- self.ax1.plot_surface(X, Y, w, rstride=1, cstride=1, color='c', antialiased=True, alpha=0.5, shade=False)
453
- self.ax1.plot_surface(X, Y, Z, rstride=1, cstride=1, color='0.99', antialiased=True, edgecolor='0.5')
454
-
455
- self.ax2.contour(X, Y, Z, zdir='z', cmap='coolwarm', levels=self.levels)
456
- self.ax2.scatter(scatter_points[0], scatter_points[1], color='red')
457
-
458
- for x, y, t in zip(scatter_points[0], scatter_points[1], scatter_points[2]):
459
- self.ax2.text(x, y, t, fontsize=8)
460
- print(t)
461
-
462
- self.flows.plot(ax = self.ax3)
463
- self.ax3.axvline(timestamp, color="r")
464
-
465
- self.fig.canvas.draw()
466
- self.fig.canvas.flush_events()
467
- #self.fig.savefig(f"topology_{int(water_elevation)}.png")
468
-
469
-
470
- if __name__ == '__main__':
471
-
472
-
473
-
474
- from erad.scenarios.common import asset_list
475
-
476
- assets, multiploygon = asset_list(38.46, -122.95, 38.53, -122.80)
477
- flood_1 = FlooadScenario(
478
- multiploygon,
479
- None,
480
- None,
481
- file_flow=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\flows.csv',
482
- file_levels=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\levels.csv',
483
- file_gaugues=r'C:\Users\alatif\Documents\GitHub\erad\erad\scenarios\gauges.csv',
484
- )
485
- timestamp = flood_1.valid_timepoints[0]
486
- assets = flood_1.calculate_survival_probability(assets, timestamp)
487
- print(assets)
488
- # flood_1 = FlooadScenario.from_live_data(multiploygon, None, None, None, None)
489
- # timestamp = flood_1.valid_timepoints[-1]
490
- # for timestamp in flood_1.valid_timepoints:
491
- # assets = flood_1.calculate_survival_probability(assets, timestamp)
492
- # print(assets)
493
- # time.sleep(0.01)
494
-
@@ -1,76 +0,0 @@
1
- from shapely.geometry import MultiPolygon, Point, LineString
2
- from shapely.ops import nearest_points
3
- import matplotlib.pyplot as plt
4
- import scipy.stats as stats
5
- import geopy.distance
6
- import numpy as np
7
- import stateplane
8
-
9
-
10
- class GeoUtilities:
11
-
12
- @property
13
- def identify_stateplane_projection(self) -> str:
14
- """ Automatically identifies stateplane projection ID """
15
- x = self.centroid.x
16
- y = self.centroid.y
17
- return stateplane.identify(x, y)
18
-
19
- def in_polygon(self, point : Point) -> bool:
20
- return self.multipolygon.contains(point)
21
-
22
- def distance_from_boundary(self, point : Point) -> float:
23
- """ Calculates distance of a point to polygon boundary. Correct calculations require conversion to cartesian coordinates"""
24
- if self.multipolygon.contains(point):
25
- p1, p2 = nearest_points(self.boundary, point)
26
- else:
27
- p1, p2 = nearest_points(self.multipolygon, point)
28
- coords_1 = (p1.y, p1.x)
29
- coords_2 = (p2.y, p2.x)
30
- return geopy.distance.geodesic(coords_1, coords_2).km
31
-
32
- def distance_from_centroid(self, point : Point):
33
- """ Calculates distance of a point to polygon centroid. Correct calculations require conversion to cartesian coordinates """
34
- coords_1 = (self.centroid.y, self.centroid.x)
35
- coords_2 = (point.y, point.x)
36
- return geopy.distance.geodesic(coords_1, coords_2).km
37
-
38
-
39
-
40
- class ProbabilityFunctionBuilder:
41
- """Class containing utility fuctions for sceario definations."""
42
-
43
-
44
- def __init__(self, dist, params):
45
- """Constructor for BaseScenario class.
46
-
47
- Args:
48
- dist (str): Name of teh distribution. Should follow Scipy naming convention
49
- params (list): A list of parameters for the chosen distribution function. See Scipy.stats documentation
50
- """
51
-
52
- self.dist = getattr(stats, dist)
53
- self.params = params
54
- return
55
-
56
- def sample(self):
57
- """Sample the distribution """
58
- return self.dist.rvs(*self.params, size=1)[0]
59
-
60
- def plot_cdf(self, x:np.linspace, ax =None, label="") -> None:
61
- """Plot the cumalative distribution fuction"""
62
- cdf = self.dist.cdf
63
- if ax is None:
64
- plt.plot(x,cdf(x, *self.params), label=label)
65
- else:
66
- ax.plot(x,cdf(x, *self.params), label=label)
67
-
68
-
69
- def probability(self, value: float) -> float:
70
- """Calculates survival probability of a given asset.
71
-
72
- Args:
73
- value (float): value for vetor of interest. Will change with scenarions
74
- """
75
- cdf = self.dist.cdf
76
- return cdf(value, *self.params)
@@ -1,89 +0,0 @@
1
- from erad.constants import FIRE_HISTORIC_GEODATAFRAME_PATH, DATA_FOLDER
2
- from shapely.geometry import MultiPolygon, Point, LineString
3
- from erad.scenarios.utilities import ProbabilityFunctionBuilder
4
- from erad.scenarios.abstract_scenario import BaseScenario
5
- from erad.exceptions import FeatureNotImplementedError
6
- from erad.scenarios.utilities import GeoUtilities
7
- import matplotlib.pyplot as plt
8
- from datetime import datetime
9
- import geopandas as gpd
10
- import numpy as np
11
- import random
12
- import pyproj
13
- import os
14
-
15
- from erad.scenarios.common import AssetTypes
16
- from erad.scenarios.utilities import ProbabilityFunctionBuilder
17
-
18
-
19
- class WindScenario(BaseScenario, GeoUtilities):
20
- """Base class for FireScenario. Extends BaseScenario and GeoUtilities
21
-
22
- Attributes:
23
- multipolygon (MultiPolygon): MultiPolygon enclosing wild fire regions
24
- probability_model (dict): Dictionary mapping asset types to probability funcitons
25
- timestamp (datetime): Scenario occurance time
26
- """
27
-
28
- fragility_curves = {
29
- #Extending energy system modelling to include extreme weather risks and application to hurricane events in Puerto Rico
30
- AssetTypes.substation.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
31
- AssetTypes.solar_panels.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
32
- AssetTypes.buried_lines.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
33
- AssetTypes.wind_turbines.name : ProbabilityFunctionBuilder("normal", [0.8, 10, 5]),
34
- #AssetTypes.battery_storage.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
35
- #AssetTypes.transmission_poles.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
36
- AssetTypes.distribution_poles.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
37
- # AssetTypes.transmission_overhead_lines.name : ProbabilityFunctionBuilder("lognorm", [0.8, 10, 5]),
38
- AssetTypes.distribution_overhead_lines.name : ProbabilityFunctionBuilder("beta", [0.8, 10, 5]),
39
- }
40
-
41
- def __init__(self, multipolygon : MultiPolygon , probability_model : dict, timestamp : datetime) -> None:
42
- """Constructor for FireScenario.
43
-
44
- Args:
45
- multipolygon (MultiPolygon): MultiPolygon enclosing wild fire regions
46
- probability_model (dict): Dictionary mapping asset types to probability funcitons
47
- timestamp (datetime): Scenario occurance time
48
- """
49
-
50
- super(WindScenario, self).__init__(multipolygon, probability_model, timestamp)
51
- return
52
-
53
- @property
54
- def area(self) -> float:
55
- """Method to calculate area of affected region."""
56
- geod = pyproj.Geod(ellps="WGS84")
57
- area = abs(geod.geometry_area_perimeter(self.polygon)[0])
58
- return area
59
-
60
- @property
61
- def polygon(self) -> MultiPolygon:
62
- """Method to return polygon for the affected region."""
63
- return self.multipolygon
64
-
65
- @property
66
- def boundary(self) -> LineString:
67
- """Method to return boundary for the affected region."""
68
- return self.multipolygon.boundary
69
-
70
- @property
71
- def centroid(self) -> Point:
72
- """Method to return the centroid of the affected region."""
73
- return self.polygon.centroid
74
-
75
- def increment_time(self):
76
- """Method to increment simulation time for time evolviong scenarios."""
77
- raise FeatureNotImplementedError()
78
-
79
- def calculate_survival_probability(self, assets : dict, timestamp : datetime, plot: bool) -> dict:
80
- """Method to calculate survival probaility of asset types.
81
-
82
- Args:
83
- assets (dict): The dictionary of all assets and their corresponding asset types
84
- plot (bool): Set to true to plot the fire survival model
85
- """
86
- return assets
87
-
88
-
89
-
erad/utils/__init__.py DELETED
File without changes