NREL-erad 0.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. NREL_erad-0.0.0a0.dist-info/LICENSE.txt +29 -0
  2. NREL_erad-0.0.0a0.dist-info/METADATA +61 -0
  3. NREL_erad-0.0.0a0.dist-info/RECORD +42 -0
  4. NREL_erad-0.0.0a0.dist-info/WHEEL +5 -0
  5. NREL_erad-0.0.0a0.dist-info/top_level.txt +1 -0
  6. erad/__init__.py +0 -0
  7. erad/constants.py +20 -0
  8. erad/cypher_queries/load_data_v1.cypher +212 -0
  9. erad/data/World_Earthquakes_1960_2016.csv +23410 -0
  10. erad/db/__init__.py +0 -0
  11. erad/db/assets/__init__.py +0 -0
  12. erad/db/assets/critical_infras.py +171 -0
  13. erad/db/assets/distribution_lines.py +101 -0
  14. erad/db/credential_model.py +20 -0
  15. erad/db/disaster_input_model.py +23 -0
  16. erad/db/inject_earthquake.py +52 -0
  17. erad/db/inject_flooding.py +53 -0
  18. erad/db/neo4j_.py +162 -0
  19. erad/db/utils.py +14 -0
  20. erad/exceptions.py +68 -0
  21. erad/metrics/__init__.py +0 -0
  22. erad/metrics/check_microgrid.py +208 -0
  23. erad/metrics/metric.py +178 -0
  24. erad/programs/__init__.py +0 -0
  25. erad/programs/backup.py +62 -0
  26. erad/programs/microgrid.py +45 -0
  27. erad/scenarios/__init__.py +0 -0
  28. erad/scenarios/abstract_scenario.py +103 -0
  29. erad/scenarios/common.py +93 -0
  30. erad/scenarios/earthquake_scenario.py +161 -0
  31. erad/scenarios/fire_scenario.py +160 -0
  32. erad/scenarios/flood_scenario.py +494 -0
  33. erad/scenarios/utilities.py +76 -0
  34. erad/scenarios/wind_scenario.py +89 -0
  35. erad/utils/__init__.py +0 -0
  36. erad/utils/ditto_utils.py +252 -0
  37. erad/utils/hifld_utils.py +147 -0
  38. erad/utils/opendss_utils.py +357 -0
  39. erad/utils/overpass.py +76 -0
  40. erad/utils/util.py +178 -0
  41. erad/visualization/__init__.py +0 -0
  42. erad/visualization/plot_graph.py +218 -0
@@ -0,0 +1,357 @@
1
+ """ Module for extracting assets from OpenDSS model.
2
+
3
+ Examples:
4
+
5
+ >>> from erad import utils
6
+ >>> extract_opendss_model(
7
+ <path_to_opendss_master_file>,
8
+ <output_folder>
9
+ )
10
+ """
11
+
12
+ # standard imports
13
+ from genericpath import exists
14
+ from pathlib import Path
15
+ import logging
16
+ from typing import List
17
+
18
+ # third-party imports
19
+ import opendssdirect as dss
20
+ import pandas as pd
21
+ import networkx as nx
22
+ from shapely.geometry import MultiPoint
23
+ import stateplane
24
+
25
+ # internal imports
26
+ from erad.utils.util import path_validation, setup_logging
27
+ from erad.exceptions import OpenDSSCommandError, MultiStatePlaneError
28
+
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+
33
+ def get_transformers(dss_instance: dss) -> List:
34
+ """Function to return list of transformers in opendss models.
35
+
36
+ Args:
37
+ dss_instance (dss): OpenDSS instance with models preloaded
38
+
39
+ Returns:
40
+ List: List of transformer metadata object
41
+ """
42
+
43
+ transformer_container = []
44
+ flag = dss_instance.Transformers.First()
45
+ while flag > 0:
46
+ trans_name = dss_instance.CktElement.Name().lower()
47
+ buses = dss_instance.CktElement.BusNames()
48
+ bus1, bus2 = buses[0].split(".")[0], buses[1].split(".")[0]
49
+
50
+ transformer_container.append(
51
+ {
52
+ "name": trans_name,
53
+ "type": "Transformer",
54
+ "source": bus1,
55
+ "target": bus2,
56
+ "kva": dss_instance.Transformers.kVA(),
57
+ "num_phase": dss_instance.CktElement.NumPhases(),
58
+ }
59
+ )
60
+ flag = dss_instance.Transformers.Next()
61
+ return transformer_container
62
+
63
+
64
+ def get_line_sections(dss_instance: dss) -> List:
65
+ """Function to return list of all line segments in opendss model.
66
+
67
+ Args:
68
+ dss_instance (dss): OpenDSS instance with models preloaded
69
+
70
+ Returns:
71
+ List: List of line segment metadata object
72
+ """
73
+ UNIT_MAPPER = {
74
+ 0: 0,
75
+ 1: 1.60934,
76
+ 2: 0.3048,
77
+ 3: 1,
78
+ 4: 0.001,
79
+ 5: 0.0003048,
80
+ 6: 0.0000254,
81
+ 7: 0.00001,
82
+ }
83
+
84
+ sections_container = []
85
+ flag = dss_instance.Lines.First()
86
+ while flag > 0:
87
+ section_name = dss_instance.CktElement.Name().lower()
88
+ buses = dss_instance.CktElement.BusNames()
89
+ bus1, bus2 = buses[0].split(".")[0], buses[1].split(".")[0]
90
+
91
+ sections_container.append(
92
+ {
93
+ "name": section_name,
94
+ "type": "LineSegment",
95
+ "source": bus1,
96
+ "target": bus2,
97
+ "length_km": UNIT_MAPPER[dss_instance.Lines.Units()]
98
+ * dss_instance.Lines.Length(),
99
+ "ampacity": dss_instance.Lines.NormAmps(),
100
+ "num_phase": dss_instance.CktElement.NumPhases(),
101
+ }
102
+ )
103
+
104
+ flag = dss_instance.Lines.Next()
105
+ return sections_container
106
+
107
+
108
+ def get_buses(dss_instance: dss) -> List:
109
+ """Function to return list of all buses in opendss model.
110
+
111
+ Args:
112
+ dss_instance (dss): OpenDSS instance with models preloaded
113
+
114
+ Returns:
115
+ List: List of bus metadata object
116
+ """
117
+ buses_container = []
118
+ for bus in dss_instance.Circuit.AllBusNames():
119
+ dss_instance.Circuit.SetActiveBus(bus)
120
+ buses_container.append(
121
+ {
122
+ "name": bus,
123
+ "type": "Bus",
124
+ "kv": dss_instance.Bus.kVBase(),
125
+ "longitude": dss_instance.Bus.X(),
126
+ "latitude": dss_instance.Bus.Y(),
127
+ }
128
+ )
129
+ return buses_container
130
+
131
+
132
+ def get_capacitors(dss_instance: dss) -> List:
133
+ """Function to return list of all capacitors in opendss model.
134
+
135
+ Args:
136
+ dss_instance (dss): OpenDSS instance with models preloaded
137
+
138
+ Returns:
139
+ List: List of capacitor metadata object
140
+ """
141
+
142
+ capacitors_container = []
143
+ flag = dss_instance.Capacitors.First()
144
+ while flag > 0:
145
+ capacitor_name = dss_instance.CktElement.Name().lower()
146
+ buses = dss_instance.CktElement.BusNames()
147
+ bus1 = buses[0].split(".")[0]
148
+
149
+ capacitors_container.append(
150
+ {
151
+ "name": capacitor_name,
152
+ "type": "Capacitor",
153
+ "source": bus1,
154
+ "kv": dss_instance.Capacitors.kV(),
155
+ "kvar": dss_instance.Capacitors.kvar(),
156
+ }
157
+ )
158
+
159
+ flag = dss_instance.Capacitors.Next()
160
+ return capacitors_container
161
+
162
+
163
+ def get_pvsystems(dss_instance: dss) -> List:
164
+ """Function to return list of all pv systems in opendss model.
165
+
166
+ Args:
167
+ dss_instance (dss): OpenDSS instance with models preloaded
168
+
169
+ Returns:
170
+ List: List of PVsystem metadata object
171
+ """
172
+
173
+ pvs_container = []
174
+ flag = dss_instance.PVsystems.First()
175
+ while flag > 0:
176
+ pv_name = dss_instance.CktElement.Name().lower()
177
+ buses = dss_instance.CktElement.BusNames()
178
+ bus1 = buses[0].split(".")[0]
179
+
180
+ pvs_container.append(
181
+ {
182
+ "name": pv_name,
183
+ "type": "PVSystem",
184
+ "source": bus1,
185
+ "rated_power": dss_instance.PVSystems.Pmpp(),
186
+ }
187
+ )
188
+
189
+ flag = dss_instance.PVsystems.Next()
190
+ return pvs_container
191
+
192
+
193
+ def get_loads(dss_instance):
194
+ """Function to return list of all loads in opendss model.
195
+
196
+ Args:
197
+ dss_instance (dss): OpenDSS instance with models preloaded
198
+
199
+ Returns:
200
+ List: List of load metadata object
201
+ """
202
+
203
+ loads_container = []
204
+ flag = dss_instance.Loads.First()
205
+ while flag > 0:
206
+ load_name = dss_instance.CktElement.Name().lower()
207
+ buses = dss_instance.CktElement.BusNames()
208
+ bus1 = buses[0].split(".")[0]
209
+
210
+ loads_container.append(
211
+ {
212
+ "name": load_name,
213
+ "type": "Load",
214
+ "source": bus1,
215
+ "kw": dss_instance.Loads.kW(),
216
+ "kvar": dss_instance.Loads.kvar(),
217
+ }
218
+ )
219
+
220
+ flag = dss_instance.Loads.Next()
221
+ return loads_container
222
+
223
+
224
+ def execute_dss_command(dss_instance: dss, dss_command: str) -> None:
225
+ """Pass the valid dss command to be executed.
226
+
227
+ Args:
228
+ dss_instance (dss): OpenDSS instance with models preloaded
229
+ dss_command (str): DSS command sring to be executed
230
+
231
+ Raises:
232
+ OpenDSSCommandError: Raises this because opendss command execution
233
+ ran into error
234
+
235
+ """
236
+ error = dss_instance.run_command(dss_command)
237
+ if error:
238
+ logger.error(f"Error executing command {dss_command} >> {error}")
239
+ raise OpenDSSCommandError(
240
+ f"Error executing command {dss_command} >> {error}"
241
+ )
242
+ logger.info(f"Sucessfully executed the command, {dss_command}")
243
+
244
+
245
+ def get_bounding_box(master_file: str, buffer: float = 1000) -> List:
246
+ """Creates a bounding box coordinate for covering region of opendss model.
247
+
248
+ Args:
249
+ master_file (str): Path to master dss file
250
+ buffer (float): Buffer distance around distribution model in meter
251
+
252
+ Raises:
253
+ MultiStatePlaneError: Raises this if opendss models lies in multiple
254
+ state plane coordinates
255
+
256
+ Returns:
257
+ List: List of bounding box coordinates (lower_left, upper_right)
258
+ """
259
+
260
+ # Get bounding box for opendss network
261
+ # Do a basic check on the path
262
+ master_file = Path(master_file)
263
+ path_validation(master_file)
264
+ logger.debug(f"Attempting to read case file >> {master_file}")
265
+
266
+ # Clear memory and compile dss file
267
+ dss.run_command("Clear")
268
+ dss.Basic.ClearAll()
269
+ execute_dss_command(dss, f"Redirect {master_file}")
270
+
271
+ # Get all the points
272
+ points = []
273
+ for bus in dss.Circuit.AllBusNames():
274
+ dss.Circuit.SetActiveBus(bus)
275
+ points.append([dss.Bus.X(), dss.Bus.Y()])
276
+
277
+ # Create a multipoint to get bounds
278
+ multi_points = MultiPoint(points)
279
+ bounds = multi_points.bounds
280
+
281
+ # Get EPSG value for converting into coordinate reference system
282
+ if stateplane.identify(bounds[0], bounds[1]) != stateplane.identify(
283
+ bounds[2], bounds[3]
284
+ ):
285
+ raise MultiStatePlaneError(
286
+ f"The regions uses multiple stateplane coordinate system"
287
+ )
288
+
289
+ epsg_value = stateplane.identify(bounds[0], bounds[1])
290
+
291
+ # Let's project all the WGS84 coordinates into
292
+ # transformed coordinates this will make sure distance is in meter
293
+ transformed_points = [
294
+ stateplane.from_lonlat(*point, epsg_value) for point in points
295
+ ]
296
+
297
+ # Create a multipoint from the transformed coordinates
298
+ transformed_multipoint = MultiPoint(transformed_points).buffer(buffer)
299
+
300
+ # Get the bounds and convert back to wsg84 format
301
+ transformed_bounds = transformed_multipoint.bounds
302
+ bounds_wsg84 = stateplane.to_lonlat(
303
+ transformed_bounds[0], transformed_bounds[1], epsg_value
304
+ ) + stateplane.to_lonlat(
305
+ transformed_bounds[2], transformed_bounds[3], epsg_value
306
+ )
307
+
308
+ return bounds_wsg84
309
+
310
+
311
+ def extract_export_opendss_model(
312
+ master_file: str, output_folder_path: str
313
+ ) -> None:
314
+ """Extract the opendss models and exports into csv file format.
315
+
316
+ Args:
317
+ master_file (str): Path to opendss master file
318
+ output_folder_path (str): Folder path for exporting the models to.
319
+ """
320
+
321
+ # Do a basic check on the path
322
+ master_file = Path(master_file)
323
+ path_validation(master_file)
324
+ logger.debug(f"Attempting to read case file >> {master_file}")
325
+
326
+ # Clear memory and compile dss file
327
+ dss.run_command("Clear")
328
+ dss.Basic.ClearAll()
329
+ execute_dss_command(dss, f"Redirect {master_file}")
330
+
331
+ # Initial container
332
+ transformers = get_transformers(dss)
333
+ line_sections = get_line_sections(dss)
334
+ buses = get_buses(dss)
335
+ capacitors = get_capacitors(dss)
336
+ pv_systems = get_pvsystems(dss)
337
+ loads = get_loads(dss)
338
+
339
+ output_folder_path = Path(output_folder_path)
340
+ output_folder_path.mkdir(exist_ok=True)
341
+ transformers_df = pd.DataFrame(transformers)
342
+ transformers_df.to_csv(output_folder_path / "transformers.csv")
343
+
344
+ line_sections_df = pd.DataFrame(line_sections)
345
+ line_sections_df.to_csv(output_folder_path / "line_sections.csv")
346
+
347
+ buses_df = pd.DataFrame(buses)
348
+ buses_df.to_csv(output_folder_path / "buses.csv")
349
+
350
+ capacitors_df = pd.DataFrame(capacitors)
351
+ capacitors_df.to_csv(output_folder_path / "capacitors.csv")
352
+
353
+ pv_systems_df = pd.DataFrame(pv_systems)
354
+ pv_systems_df.to_csv(output_folder_path / "pv_systems.csv")
355
+
356
+ loads_df = pd.DataFrame(loads)
357
+ loads_df.to_csv(output_folder_path / "loads.csv")
erad/utils/overpass.py ADDED
@@ -0,0 +1,76 @@
1
+ import json
2
+ from typing import List
3
+
4
+ import overpass
5
+ import polars
6
+
7
+ # Define your polygon coordinates as a string
8
+
9
+ GROCERY_TAGS = [
10
+ 'shop=supermarket', 'shop=grocery', 'shop=convenience',
11
+ 'shop=market', 'shop=healthfood', 'shop=organic'
12
+ ]
13
+
14
+ HOSPITAL_TAGS = [
15
+ 'amenity=hospital', 'healthcare=hospital', 'building=hospital',
16
+ 'amenity=clinic', 'healthcare=centre','healthcare=pharmacy',
17
+ ]
18
+
19
+
20
+ def export_json(json_parcels, out_path):
21
+ with open(out_path, 'w') as fp:
22
+ json.dump(json_parcels, fp)
23
+
24
+ def export_csv(json_parcels, out_path:str):
25
+
26
+ csv_data = {
27
+ "id": [],
28
+ "longitude": [],
29
+ "latitude": [],
30
+ }
31
+
32
+ for feature in json_parcels['features']:
33
+ for key in feature['properties']:
34
+ if key not in csv_data:
35
+ csv_data[key] = []
36
+
37
+
38
+ for feature in json_parcels['features']:
39
+ csv_data["id"].append(feature["id"])
40
+ csv_data["longitude"].append(feature["geometry"]["coordinates"][0])
41
+ csv_data["latitude"].append(feature["geometry"]["coordinates"][1])
42
+
43
+ for key in csv_data:
44
+ if key not in ["id", "longitude", "latitude"]:
45
+ csv_data[key].append(feature['properties'].get(key, None))
46
+
47
+ df = polars.from_dict(csv_data)
48
+ df.write_csv(out_path)
49
+
50
+
51
+ def get_sites(polygon: str, tags: List[str] ):
52
+ # polygon = "33.6812,-118.5966, 34.3407,-118.1390" latitude, longitude
53
+ json_parcels = {
54
+ "type": "FeatureCollection",
55
+ "features": []
56
+ }
57
+
58
+ for tag in tags:
59
+
60
+ query = f"""
61
+ node[{tag}]({polygon});
62
+ out;
63
+ """
64
+
65
+ # Create an instance of the Overpass API
66
+ api = overpass.API()
67
+
68
+ # Send the query and retrieve the data
69
+ response = api.Get(query)
70
+
71
+ json_parcels['features'].extend(response['features'])
72
+ print(f'Number of parcels extracted of shop type {tag}: ', len(response['features']))
73
+
74
+ print('Number of parcels extracted: ', len(json_parcels['features']))
75
+ return json_parcels
76
+
erad/utils/util.py ADDED
@@ -0,0 +1,178 @@
1
+ """ Utility functions that can be used in various parts of the code. """
2
+
3
+ # standard libraries
4
+ import json
5
+ from pathlib import Path
6
+ import logging
7
+ import logging.config
8
+ import time
9
+ from typing import Union
10
+
11
+ # third-party libraries
12
+ import yaml
13
+ import geojson
14
+
15
+ # internal imports
16
+ from erad.exceptions import (
17
+ FeatureNotImplementedError,
18
+ PathDoesNotExist,
19
+ NotAFileError,
20
+ InvalidFileTypePassed,
21
+ )
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ def timeit(func):
27
+ """Decorator for timing execution of a function."""
28
+
29
+ def wrapper(*args, **kwargs):
30
+ time_start = time.perf_counter()
31
+ logger.debug(f"Timing for {func} started")
32
+ ret_val = func(*args, **kwargs)
33
+ time_elapsed = time.perf_counter() - time_start
34
+ # memory_mb =resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024.0/1024.0
35
+ logger.debug(
36
+ f"Time took to execute the function {func} \
37
+ with args {args}, kwargs {kwargs} is {time_elapsed} seconds"
38
+ )
39
+ return ret_val
40
+
41
+ return wrapper
42
+
43
+
44
+ def setup_logging(filename: Union[str, None] = None) -> None:
45
+ """Creates log directory and sets up logging via logging.yaml.
46
+
47
+ Args:
48
+ filename (str): Path to logging.yaml file
49
+
50
+ If not providex expects log file in the root of repo.
51
+ """
52
+
53
+ if filename is None:
54
+ filename = Path(__file__).parents[2] / "logging.yaml"
55
+
56
+ logging.config.dictConfig(read_file(filename))
57
+
58
+
59
+ def path_validation(
60
+ file_path: str,
61
+ check_for_file: bool = False,
62
+ check_for_file_type: Union[str, None] = None,
63
+ ) -> None:
64
+ """Utility function for validating the path.
65
+
66
+ Args:
67
+ file_path (str): Path to be validated
68
+ check_for_file (bool): Checks for existence of file
69
+ check_for_file_type (Union[str, None]): Check if file is of
70
+ this type
71
+
72
+ Raises:
73
+ PathDoesNotExist: Raises if path does not exist
74
+ NotAFileError: Raises if file is not present
75
+ InvalidFileTypePassed: Raises if invalid file type is passed
76
+ """
77
+
78
+ file_path = Path(file_path)
79
+ if not file_path.exists():
80
+ logger.error(f"{file_path} does not exist!")
81
+ raise PathDoesNotExist(file_path)
82
+
83
+ if check_for_file and file_path.is_dir():
84
+ logger.error(f"Expected file but got folder : {file_path} ")
85
+ raise NotAFileError(file_path)
86
+
87
+ if check_for_file_type and file_path.suffix != check_for_file_type:
88
+ raise InvalidFileTypePassed(file_path, check_for_file_type)
89
+
90
+ logger.debug(f"{file_path} validated successfully!")
91
+
92
+
93
+ @timeit
94
+ def read_file(file_path: str) -> dict:
95
+ """Utility function to read file into a python dict.
96
+
97
+ Supports json, yaml and geojson.
98
+
99
+ Args:
100
+ file_path (str): Path to a file to be read.
101
+
102
+ Raises:
103
+ FeatureNotImplementedError: Raises if invalid file is passed.
104
+
105
+ Returns:
106
+ dict: Python dict containing content of file.
107
+ """
108
+
109
+ file_path = Path(file_path)
110
+ logger.debug(f"Attempting to read {file_path}")
111
+
112
+ path_validation(file_path, check_for_file=True)
113
+
114
+ # Handle JSON file read
115
+ if file_path.suffix == ".json":
116
+ with open(file_path, "r") as f:
117
+ content = json.load(f)
118
+
119
+ # Handle YAML file read
120
+ elif file_path.suffix == ".yaml":
121
+ with open(file_path, "r") as f:
122
+ content = yaml.safe_load(f)
123
+
124
+ # Handle geojson file read
125
+ elif file_path.suffix == ".geojson":
126
+ with open(file_path, "r") as f:
127
+ content = geojson.load(f)
128
+
129
+ else:
130
+ logger.error(
131
+ f"Could not read the {file_path}, this feature is not yet implemented"
132
+ )
133
+ raise FeatureNotImplementedError(
134
+ f"File of type {file_path.suffix} \
135
+ is not yet implemented for reading purpose"
136
+ )
137
+
138
+ logger.debug(f"{file_path} read successfully")
139
+ return content
140
+
141
+
142
+ def write_file(content: dict, file_path: str, **kwargs) -> None:
143
+ """Utility function to write to a file..
144
+
145
+ Supports json, yaml and geojson.
146
+
147
+ Args:
148
+ content (dict): Python dict content
149
+ file_path (str): Path to a file to be read
150
+ kwargs (dict): Keyword arguments passed to
151
+ relevant writer.
152
+
153
+ Raises:
154
+ FeatureNotImplementedError: Raises if invalid file type is passed.
155
+ """
156
+ file_path = Path(file_path)
157
+ path_validation(file_path.parent)
158
+
159
+ # Handle JSON file write
160
+ if file_path.suffix == ".json":
161
+ with open(file_path, "w") as f:
162
+ json.dump(content, f, **kwargs)
163
+
164
+ # Handle YAML file write
165
+ elif file_path.suffix == ".yaml":
166
+ with open(file_path, "w") as f:
167
+ yaml.safe_dump(content, f, **kwargs)
168
+
169
+ # Handle geojson file write
170
+ elif file_path.suffix == ".geojson":
171
+ with open(file_path, "w") as f:
172
+ geojson.dump(content, f, **kwargs)
173
+
174
+ else:
175
+ raise FeatureNotImplementedError(
176
+ f"File of type {file_path.suffix} \
177
+ is not yet implemented for writing purpose"
178
+ )
File without changes