NREL-erad 0.0.0a0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. erad/__init__.py +1 -0
  2. erad/constants.py +20 -20
  3. erad/cypher_queries/load_data_v1.cypher +211 -211
  4. erad/data/World_Earthquakes_1960_2016.csv +23410 -23410
  5. erad/db/assets/critical_infras.py +170 -170
  6. erad/db/assets/distribution_lines.py +101 -101
  7. erad/db/credential_model.py +20 -20
  8. erad/db/disaster_input_model.py +23 -23
  9. erad/db/inject_earthquake.py +52 -52
  10. erad/db/inject_flooding.py +53 -53
  11. erad/db/neo4j_.py +162 -162
  12. erad/db/utils.py +13 -13
  13. erad/exceptions.py +68 -68
  14. erad/metrics/check_microgrid.py +208 -208
  15. erad/metrics/metric.py +178 -178
  16. erad/programs/backup.py +61 -61
  17. erad/programs/microgrid.py +44 -44
  18. erad/scenarios/abstract_scenario.py +102 -102
  19. erad/scenarios/common.py +92 -92
  20. erad/scenarios/earthquake_scenario.py +161 -161
  21. erad/scenarios/fire_scenario.py +160 -160
  22. erad/scenarios/flood_scenario.py +493 -493
  23. erad/scenarios/flows.csv +671 -0
  24. erad/scenarios/utilities.py +75 -75
  25. erad/scenarios/wind_scenario.py +89 -89
  26. erad/utils/ditto_utils.py +252 -252
  27. erad/utils/hifld_utils.py +147 -147
  28. erad/utils/opendss_utils.py +357 -357
  29. erad/utils/overpass.py +76 -76
  30. erad/utils/util.py +178 -178
  31. erad/visualization/plot_graph.py +218 -218
  32. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/METADATA +65 -61
  33. nrel_erad-1.0.0.dist-info/RECORD +42 -0
  34. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/WHEEL +1 -2
  35. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info/licenses}/LICENSE.txt +28 -28
  36. NREL_erad-0.0.0a0.dist-info/RECORD +0 -42
  37. NREL_erad-0.0.0a0.dist-info/top_level.txt +0 -1
erad/exceptions.py CHANGED
@@ -1,68 +1,68 @@
1
- """ Module for managing exceptions raised by ERAD package."""
2
-
3
- # standard libraries
4
- from pathlib import Path
5
-
6
-
7
- class ERADBaseException(Exception):
8
- """All exception should derive from this."""
9
-
10
-
11
- class FeatureNotImplementedError(ERADBaseException):
12
- """Exception raised because specific feature requested has not been implemented."""
13
-
14
-
15
- class PathDoesNotExist(ERADBaseException):
16
- """Exception raised bacause expected file/folder path does not exist."""
17
-
18
- def __init__(self, path):
19
- self.message = (
20
- f"Expected path {path} does not exist. please check you file path!"
21
- )
22
- super().__init__(self.message)
23
-
24
-
25
- class NotAFileError(ERADBaseException):
26
- """Exception raised because file is expected but folder path is provided."""
27
-
28
- def __init__(self, path):
29
- self.message = f"Expected file path {path} is not a file!"
30
- super().__init__(self.message)
31
-
32
-
33
- class EmptyEnvironmentVariable(ERADBaseException):
34
- """Exception raised because environment variable required is empty."""
35
-
36
-
37
- class DatabaseMissingInfo(ERADBaseException):
38
- """Exception raised because information required to connect to database is missing."""
39
-
40
-
41
- class InvalidFileTypePassed(ERADBaseException):
42
- """Exceptions raised because invalid file type is passed."""
43
-
44
- def __init__(self, path, valid_type):
45
-
46
- self.message = f"Invalid file type of {Path(path).suffix} is passed! Please pass valid file type of {valid_type}"
47
- super().__init__(self.message)
48
-
49
-
50
- class SMARTDSInvalidInput(ERADBaseException):
51
- """Exceptions raised because invalid input is provided for SMART DS data download."""
52
-
53
-
54
- class EmptyScenarioPolygon(ERADBaseException):
55
- """Exceptions raised because no polygons are found."""
56
-
57
-
58
- class OpenDSSCommandError(ERADBaseException):
59
- """Exceptions raised because opendss command execution ran into an error."""
60
-
61
-
62
- class MultiStatePlaneError(ERADBaseException):
63
- """Exceptions raised because the corrdinates are in more than one state plane
64
- coordinates."""
65
-
66
-
67
- class DittoException(ERADBaseException):
68
- """Exceptions raised because application ran into an issus using Ditto."""
1
+ """ Module for managing exceptions raised by ERAD package."""
2
+
3
+ # standard libraries
4
+ from pathlib import Path
5
+
6
+
7
+ class ERADBaseException(Exception):
8
+ """All exception should derive from this."""
9
+
10
+
11
+ class FeatureNotImplementedError(ERADBaseException):
12
+ """Exception raised because specific feature requested has not been implemented."""
13
+
14
+
15
+ class PathDoesNotExist(ERADBaseException):
16
+ """Exception raised bacause expected file/folder path does not exist."""
17
+
18
+ def __init__(self, path):
19
+ self.message = (
20
+ f"Expected path {path} does not exist. please check you file path!"
21
+ )
22
+ super().__init__(self.message)
23
+
24
+
25
+ class NotAFileError(ERADBaseException):
26
+ """Exception raised because file is expected but folder path is provided."""
27
+
28
+ def __init__(self, path):
29
+ self.message = f"Expected file path {path} is not a file!"
30
+ super().__init__(self.message)
31
+
32
+
33
+ class EmptyEnvironmentVariable(ERADBaseException):
34
+ """Exception raised because environment variable required is empty."""
35
+
36
+
37
+ class DatabaseMissingInfo(ERADBaseException):
38
+ """Exception raised because information required to connect to database is missing."""
39
+
40
+
41
+ class InvalidFileTypePassed(ERADBaseException):
42
+ """Exceptions raised because invalid file type is passed."""
43
+
44
+ def __init__(self, path, valid_type):
45
+
46
+ self.message = f"Invalid file type of {Path(path).suffix} is passed! Please pass valid file type of {valid_type}"
47
+ super().__init__(self.message)
48
+
49
+
50
+ class SMARTDSInvalidInput(ERADBaseException):
51
+ """Exceptions raised because invalid input is provided for SMART DS data download."""
52
+
53
+
54
+ class EmptyScenarioPolygon(ERADBaseException):
55
+ """Exceptions raised because no polygons are found."""
56
+
57
+
58
+ class OpenDSSCommandError(ERADBaseException):
59
+ """Exceptions raised because opendss command execution ran into an error."""
60
+
61
+
62
+ class MultiStatePlaneError(ERADBaseException):
63
+ """Exceptions raised because the corrdinates are in more than one state plane
64
+ coordinates."""
65
+
66
+
67
+ class DittoException(ERADBaseException):
68
+ """Exceptions raised because application ran into an issus using Ditto."""
@@ -1,208 +1,208 @@
1
- """ This module contains functions and utilities to check for the
2
- possibility of microgrid formation.
3
- """
4
-
5
- from typing import List, Dict
6
- import math
7
- import json
8
-
9
- from neo4j import GraphDatabase
10
- import networkx as nx
11
- import matplotlib.pyplot as plt
12
-
13
-
14
- def create_directed_graph(
15
- driver: GraphDatabase.driver,
16
- ):
17
- """Creates a directed graph representation of the power network.
18
-
19
- For now we have read all the relationships and nodes. We will need to
20
- filter this by feeder to avoid running into memory issues in future for
21
- larger graph.
22
-
23
- Args:
24
- driver (GraphDatabase.driver): Instance of `GraphDatabase.driver`
25
- instance
26
- """
27
-
28
- # Get the buses, customers, pvs, energystorage and line sections
29
- power_network_query = """
30
- MATCH (sourceNode:Bus)-[relationship:CONNECTS_TO]-(targetNode:Bus)
31
- return relationship{.*} , sourceNode {.*}, targetNode{.*}
32
- """
33
-
34
- # Gettings relations between customers and buses
35
- customer_bus_network_query = """
36
- MATCH (sourceNode:Bus)-[relationship:CONSUMES_POWER_FROM]-(targetNode:Load)
37
- return relationship{.*} , sourceNode {.*}, targetNode{.*}
38
- """
39
-
40
- # Gettings relations between critical infrastructures and buses
41
- critical_infra_bus_network_query = """
42
- MATCH (sourceNode:Bus)-[relationship:GETS_POWER_FROM]-(targetNode)
43
- return relationship{.*} , sourceNode {.*}, targetNode{.*}
44
- """
45
-
46
- # Gettting relations between PVs and buses
47
- pv_bus_network_query = """
48
- MATCH (sourceNode:Bus)-[relationship:INJECTS_ACTIVE_POWER_TO]-(targetNode:Solar)
49
- return relationship{.*} , sourceNode {.*}, targetNode{.*}
50
- """
51
-
52
- # Getting relations between energy storage and buses
53
- es_bus_network_query = """
54
- MATCH (sourceNode:Bus)-[relationship:INJECTS_POWER]-(targetNode:EnergyStorage)
55
- return relationship{.*} , sourceNode {.*}, targetNode{.*}
56
- """
57
-
58
- relations = []
59
- for query in [
60
- power_network_query,
61
- customer_bus_network_query,
62
- pv_bus_network_query,
63
- es_bus_network_query,
64
- critical_infra_bus_network_query
65
- ]:
66
-
67
- with driver.session() as session:
68
- result = session.read_transaction(lambda tx: tx.run(query).data())
69
- relations.extend(result)
70
-
71
- graph = nx.Graph()
72
- for rel in relations:
73
-
74
- # Unpack the relationship data
75
- relationship = rel["relationship"]
76
- source_node = rel["sourceNode"]
77
- target_node = rel["targetNode"]
78
-
79
- # Add nodes if not already present in the graph
80
- for node in [source_node, target_node]:
81
- if not graph.has_node(node["name"]):
82
- graph.add_node(node["name"], **node)
83
-
84
- # Add relationship
85
- graph.add_edge(source_node["name"], target_node["name"], **relationship)
86
-
87
- return graph.to_directed()
88
-
89
- def node_connected_to_substation(
90
- substation_nodes: List[str],
91
- driver: GraphDatabase.driver
92
- ):
93
- """ Gives list of nodes still connected to substation. """
94
- directed_graph = create_directed_graph(driver)
95
- edges_to_be_removed = []
96
-
97
- for edge in directed_graph.edges():
98
- edge_data = directed_graph.get_edge_data(*edge)
99
- if "survive" in edge_data and int(edge_data["survive"]) == 0:
100
- edges_to_be_removed.append(edge)
101
-
102
- if edges_to_be_removed:
103
- directed_graph.remove_edges_from(edges_to_be_removed)
104
- wcc = nx.weakly_connected_components(directed_graph)
105
-
106
- for _, weak_component in enumerate(wcc):
107
- wcc_graph = directed_graph.subgraph(weak_component)
108
- nodes = wcc_graph.nodes()
109
- for sub_node in substation_nodes:
110
- if sub_node in nodes:
111
- return nodes
112
- else:
113
- nodes = []
114
- for edge in directed_graph.edges():
115
- nodes.extend(edge)
116
- return nodes
117
- return []
118
-
119
-
120
- def check_for_microgrid(driver: GraphDatabase.driver, output_json_path: str):
121
- """Checks for possibility of microgrid in each subgraph.
122
-
123
- Args:
124
- driver (GraphDatabase.driver): Instance of `GraphDatabase.driver`
125
- instance
126
- output_json_path (str): JSON file path for exporting the metric.
127
- """
128
-
129
- directed_graph = create_directed_graph(driver)
130
- node_data = {item[0]: item[1] for item in directed_graph.nodes(data=True)}
131
-
132
- edges_to_be_removed = []
133
- subgraphs = {}
134
-
135
- for edge in directed_graph.edges():
136
- edge_data = directed_graph.get_edge_data(*edge)
137
- if "survive" in edge_data and int(edge_data["survive"]) == 0:
138
- edges_to_be_removed.append(edge)
139
-
140
- if edges_to_be_removed:
141
- directed_graph.remove_edges_from(edges_to_be_removed)
142
- wcc = nx.weakly_connected_components(directed_graph)
143
-
144
- for id, weak_component in enumerate(wcc):
145
-
146
- # Let's create a networkx representation to perform
147
- # multiple source multiple sink max flow problem
148
- # https://faculty.math.illinois.edu/~mlavrov/docs/482-fall-2019/lecture27.pdf
149
- source_capacity, sink_capacity = 0, 0
150
- wcc_graph = directed_graph.subgraph(weak_component)
151
- wcc_graph = nx.DiGraph(wcc_graph)
152
-
153
- for new_node in ["infinity_source", "infinity_sink"]:
154
- if not wcc_graph.has_node(new_node):
155
- wcc_graph.add_node(new_node)
156
-
157
- sinks, sources = [], []
158
- for node in wcc_graph.nodes():
159
- # Connect all loads to infinity sink
160
-
161
- if "pv" in node or "es_" in node or node_data.get(node, {}).get('backup', None) == 1:
162
- wcc_graph.add_edge(node, "infinity_source", capacity=1e9)
163
- wcc_graph.add_edge("infinity_source", node, capacity=1e9)
164
- sources.append(node)
165
-
166
- cap_ = None
167
- if 'kw' in node_data[node]:
168
- cap_ = node_data[node]["kw"]
169
- elif 'capacity' in node_data[node]:
170
- cap_ = node_data[node]["capacity"]
171
- elif 'backup_capacity_kw' in node_data[node]:
172
- cap_ = node_data[node]["backup_capacity_kw"]
173
- else:
174
- raise Exception('Not a valid source!')
175
- source_capacity += cap_
176
-
177
- elif "load" in node or node_data.get(node, {}).get('survive', None) is not None :
178
- wcc_graph.add_edge(node, "infinity_sink", capacity=1e9)
179
- wcc_graph.add_edge("infinity_sink", node, capacity=1e9)
180
- sinks.append(node)
181
- sink_capacity += math.sqrt(
182
- node_data[node].get('kW', 0) ** 2
183
- + node_data[node].get('kvar', 0) ** 2
184
- ) * float(node_data[node].get("critical_load_factor", 0))
185
-
186
-
187
- # if id == 2:
188
- # breakpoint()
189
- flow_value, _ = nx.maximum_flow(
190
- wcc_graph, "infinity_source", "infinity_sink", capacity="kva"
191
- )
192
-
193
- subgraphs[f"weak_component_{id}"] = {
194
- "length": len(weak_component),
195
- "max_flow": flow_value,
196
- "sources": sources,
197
- "sinks": sinks,
198
- "source_capacity": source_capacity,
199
- "sink_capacity": sink_capacity,
200
- }
201
-
202
-
203
-
204
- if output_json_path:
205
- with open(output_json_path, "w") as fpointer:
206
- json.dump(subgraphs, fpointer)
207
-
208
- return subgraphs
1
+ """ This module contains functions and utilities to check for the
2
+ possibility of microgrid formation.
3
+ """
4
+
5
+ from typing import List, Dict
6
+ import math
7
+ import json
8
+
9
+ from neo4j import GraphDatabase
10
+ import networkx as nx
11
+ import matplotlib.pyplot as plt
12
+
13
+
14
+ def create_directed_graph(
15
+ driver: GraphDatabase.driver,
16
+ ):
17
+ """Creates a directed graph representation of the power network.
18
+
19
+ For now we have read all the relationships and nodes. We will need to
20
+ filter this by feeder to avoid running into memory issues in future for
21
+ larger graph.
22
+
23
+ Args:
24
+ driver (GraphDatabase.driver): Instance of `GraphDatabase.driver`
25
+ instance
26
+ """
27
+
28
+ # Get the buses, customers, pvs, energystorage and line sections
29
+ power_network_query = """
30
+ MATCH (sourceNode:Bus)-[relationship:CONNECTS_TO]-(targetNode:Bus)
31
+ return relationship{.*} , sourceNode {.*}, targetNode{.*}
32
+ """
33
+
34
+ # Gettings relations between customers and buses
35
+ customer_bus_network_query = """
36
+ MATCH (sourceNode:Bus)-[relationship:CONSUMES_POWER_FROM]-(targetNode:Load)
37
+ return relationship{.*} , sourceNode {.*}, targetNode{.*}
38
+ """
39
+
40
+ # Gettings relations between critical infrastructures and buses
41
+ critical_infra_bus_network_query = """
42
+ MATCH (sourceNode:Bus)-[relationship:GETS_POWER_FROM]-(targetNode)
43
+ return relationship{.*} , sourceNode {.*}, targetNode{.*}
44
+ """
45
+
46
+ # Gettting relations between PVs and buses
47
+ pv_bus_network_query = """
48
+ MATCH (sourceNode:Bus)-[relationship:INJECTS_ACTIVE_POWER_TO]-(targetNode:Solar)
49
+ return relationship{.*} , sourceNode {.*}, targetNode{.*}
50
+ """
51
+
52
+ # Getting relations between energy storage and buses
53
+ es_bus_network_query = """
54
+ MATCH (sourceNode:Bus)-[relationship:INJECTS_POWER]-(targetNode:EnergyStorage)
55
+ return relationship{.*} , sourceNode {.*}, targetNode{.*}
56
+ """
57
+
58
+ relations = []
59
+ for query in [
60
+ power_network_query,
61
+ customer_bus_network_query,
62
+ pv_bus_network_query,
63
+ es_bus_network_query,
64
+ critical_infra_bus_network_query
65
+ ]:
66
+
67
+ with driver.session() as session:
68
+ result = session.read_transaction(lambda tx: tx.run(query).data())
69
+ relations.extend(result)
70
+
71
+ graph = nx.Graph()
72
+ for rel in relations:
73
+
74
+ # Unpack the relationship data
75
+ relationship = rel["relationship"]
76
+ source_node = rel["sourceNode"]
77
+ target_node = rel["targetNode"]
78
+
79
+ # Add nodes if not already present in the graph
80
+ for node in [source_node, target_node]:
81
+ if not graph.has_node(node["name"]):
82
+ graph.add_node(node["name"], **node)
83
+
84
+ # Add relationship
85
+ graph.add_edge(source_node["name"], target_node["name"], **relationship)
86
+
87
+ return graph.to_directed()
88
+
89
+ def node_connected_to_substation(
90
+ substation_nodes: List[str],
91
+ driver: GraphDatabase.driver
92
+ ):
93
+ """ Gives list of nodes still connected to substation. """
94
+ directed_graph = create_directed_graph(driver)
95
+ edges_to_be_removed = []
96
+
97
+ for edge in directed_graph.edges():
98
+ edge_data = directed_graph.get_edge_data(*edge)
99
+ if "survive" in edge_data and int(edge_data["survive"]) == 0:
100
+ edges_to_be_removed.append(edge)
101
+
102
+ if edges_to_be_removed:
103
+ directed_graph.remove_edges_from(edges_to_be_removed)
104
+ wcc = nx.weakly_connected_components(directed_graph)
105
+
106
+ for _, weak_component in enumerate(wcc):
107
+ wcc_graph = directed_graph.subgraph(weak_component)
108
+ nodes = wcc_graph.nodes()
109
+ for sub_node in substation_nodes:
110
+ if sub_node in nodes:
111
+ return nodes
112
+ else:
113
+ nodes = []
114
+ for edge in directed_graph.edges():
115
+ nodes.extend(edge)
116
+ return nodes
117
+ return []
118
+
119
+
120
+ def check_for_microgrid(driver: GraphDatabase.driver, output_json_path: str):
121
+ """Checks for possibility of microgrid in each subgraph.
122
+
123
+ Args:
124
+ driver (GraphDatabase.driver): Instance of `GraphDatabase.driver`
125
+ instance
126
+ output_json_path (str): JSON file path for exporting the metric.
127
+ """
128
+
129
+ directed_graph = create_directed_graph(driver)
130
+ node_data = {item[0]: item[1] for item in directed_graph.nodes(data=True)}
131
+
132
+ edges_to_be_removed = []
133
+ subgraphs = {}
134
+
135
+ for edge in directed_graph.edges():
136
+ edge_data = directed_graph.get_edge_data(*edge)
137
+ if "survive" in edge_data and int(edge_data["survive"]) == 0:
138
+ edges_to_be_removed.append(edge)
139
+
140
+ if edges_to_be_removed:
141
+ directed_graph.remove_edges_from(edges_to_be_removed)
142
+ wcc = nx.weakly_connected_components(directed_graph)
143
+
144
+ for id, weak_component in enumerate(wcc):
145
+
146
+ # Let's create a networkx representation to perform
147
+ # multiple source multiple sink max flow problem
148
+ # https://faculty.math.illinois.edu/~mlavrov/docs/482-fall-2019/lecture27.pdf
149
+ source_capacity, sink_capacity = 0, 0
150
+ wcc_graph = directed_graph.subgraph(weak_component)
151
+ wcc_graph = nx.DiGraph(wcc_graph)
152
+
153
+ for new_node in ["infinity_source", "infinity_sink"]:
154
+ if not wcc_graph.has_node(new_node):
155
+ wcc_graph.add_node(new_node)
156
+
157
+ sinks, sources = [], []
158
+ for node in wcc_graph.nodes():
159
+ # Connect all loads to infinity sink
160
+
161
+ if "pv" in node or "es_" in node or node_data.get(node, {}).get('backup', None) == 1:
162
+ wcc_graph.add_edge(node, "infinity_source", capacity=1e9)
163
+ wcc_graph.add_edge("infinity_source", node, capacity=1e9)
164
+ sources.append(node)
165
+
166
+ cap_ = None
167
+ if 'kw' in node_data[node]:
168
+ cap_ = node_data[node]["kw"]
169
+ elif 'capacity' in node_data[node]:
170
+ cap_ = node_data[node]["capacity"]
171
+ elif 'backup_capacity_kw' in node_data[node]:
172
+ cap_ = node_data[node]["backup_capacity_kw"]
173
+ else:
174
+ raise Exception('Not a valid source!')
175
+ source_capacity += cap_
176
+
177
+ elif "load" in node or node_data.get(node, {}).get('survive', None) is not None :
178
+ wcc_graph.add_edge(node, "infinity_sink", capacity=1e9)
179
+ wcc_graph.add_edge("infinity_sink", node, capacity=1e9)
180
+ sinks.append(node)
181
+ sink_capacity += math.sqrt(
182
+ node_data[node].get('kW', 0) ** 2
183
+ + node_data[node].get('kvar', 0) ** 2
184
+ ) * float(node_data[node].get("critical_load_factor", 0))
185
+
186
+
187
+ # if id == 2:
188
+ # breakpoint()
189
+ flow_value, _ = nx.maximum_flow(
190
+ wcc_graph, "infinity_source", "infinity_sink", capacity="kva"
191
+ )
192
+
193
+ subgraphs[f"weak_component_{id}"] = {
194
+ "length": len(weak_component),
195
+ "max_flow": flow_value,
196
+ "sources": sources,
197
+ "sinks": sinks,
198
+ "source_capacity": source_capacity,
199
+ "sink_capacity": sink_capacity,
200
+ }
201
+
202
+
203
+
204
+ if output_json_path:
205
+ with open(output_json_path, "w") as fpointer:
206
+ json.dump(subgraphs, fpointer)
207
+
208
+ return subgraphs