NREL-erad 0.0.0a0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- erad/__init__.py +1 -0
- erad/constants.py +20 -20
- erad/cypher_queries/load_data_v1.cypher +211 -211
- erad/data/World_Earthquakes_1960_2016.csv +23410 -23410
- erad/db/assets/critical_infras.py +170 -170
- erad/db/assets/distribution_lines.py +101 -101
- erad/db/credential_model.py +20 -20
- erad/db/disaster_input_model.py +23 -23
- erad/db/inject_earthquake.py +52 -52
- erad/db/inject_flooding.py +53 -53
- erad/db/neo4j_.py +162 -162
- erad/db/utils.py +13 -13
- erad/exceptions.py +68 -68
- erad/metrics/check_microgrid.py +208 -208
- erad/metrics/metric.py +178 -178
- erad/programs/backup.py +61 -61
- erad/programs/microgrid.py +44 -44
- erad/scenarios/abstract_scenario.py +102 -102
- erad/scenarios/common.py +92 -92
- erad/scenarios/earthquake_scenario.py +161 -161
- erad/scenarios/fire_scenario.py +160 -160
- erad/scenarios/flood_scenario.py +493 -493
- erad/scenarios/flows.csv +671 -0
- erad/scenarios/utilities.py +75 -75
- erad/scenarios/wind_scenario.py +89 -89
- erad/utils/ditto_utils.py +252 -252
- erad/utils/hifld_utils.py +147 -147
- erad/utils/opendss_utils.py +357 -357
- erad/utils/overpass.py +76 -76
- erad/utils/util.py +178 -178
- erad/visualization/plot_graph.py +218 -218
- {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/METADATA +65 -61
- nrel_erad-1.0.0.dist-info/RECORD +42 -0
- {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/WHEEL +1 -2
- {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info/licenses}/LICENSE.txt +28 -28
- NREL_erad-0.0.0a0.dist-info/RECORD +0 -42
- NREL_erad-0.0.0a0.dist-info/top_level.txt +0 -1
@@ -1,171 +1,171 @@
|
|
1
|
-
""" This module contains functions to update
|
2
|
-
survival probability and survive property
|
3
|
-
based on disaster event. """
|
4
|
-
|
5
|
-
# manage python imports
|
6
|
-
from datetime import datetime
|
7
|
-
import random
|
8
|
-
|
9
|
-
from neo4j import GraphDatabase
|
10
|
-
|
11
|
-
from erad.db.utils import _run_read_query
|
12
|
-
from erad.metrics.check_microgrid import node_connected_to_substation
|
13
|
-
|
14
|
-
|
15
|
-
def _update_critical_infra_based_on_grid_access_fast(
|
16
|
-
critical_infras,
|
17
|
-
driver: GraphDatabase.driver
|
18
|
-
):
|
19
|
-
""" A faster function to update survive attribute
|
20
|
-
based on whether critical infra has access to grid power."""
|
21
|
-
# Let's get substation
|
22
|
-
cypher_query = f"""
|
23
|
-
MATCH (n:Substation)
|
24
|
-
WHERE n.survive is NULL OR n.survive<>0
|
25
|
-
RETURN n.name
|
26
|
-
"""
|
27
|
-
substations = _run_read_query(driver, cypher_query)
|
28
|
-
substations = [item["n.name"] for item in substations]
|
29
|
-
|
30
|
-
nodes = node_connected_to_substation(substations, driver)
|
31
|
-
|
32
|
-
# Get all critical infra and check if they are in above nodes
|
33
|
-
for cri_infra in critical_infras:
|
34
|
-
cypher_query = f"""
|
35
|
-
MATCH (c:{cri_infra})
|
36
|
-
RETURN c.longitude, c.latitude, c.name, c.backup
|
37
|
-
"""
|
38
|
-
infras = _run_read_query(driver, cypher_query)
|
39
|
-
|
40
|
-
for infra in infras:
|
41
|
-
|
42
|
-
cypher_write_query = f"""
|
43
|
-
MATCH (c:{cri_infra})
|
44
|
-
WHERE c.name = $cname
|
45
|
-
SET c.survive = $survive
|
46
|
-
SET c.survival_probability = $s_prob
|
47
|
-
"""
|
48
|
-
|
49
|
-
with driver.session() as session:
|
50
|
-
session.write_transaction(
|
51
|
-
lambda tx: tx.run(
|
52
|
-
cypher_write_query,
|
53
|
-
cname=infra['c.name'],
|
54
|
-
s_prob= 1 if infra['c.name'] in nodes or int(infra['c.backup'])==1 else 0,
|
55
|
-
survive=1 if infra['c.name'] in nodes or int(infra['c.backup'])==1 else 0
|
56
|
-
)
|
57
|
-
)
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
def _update_critical_infra_based_on_grid_access(
|
62
|
-
critical_infras,
|
63
|
-
driver: GraphDatabase.driver
|
64
|
-
):
|
65
|
-
""" A function to update survive attribute
|
66
|
-
based on whether critical infra has access to grid power."""
|
67
|
-
|
68
|
-
# Let's get substation
|
69
|
-
cypher_query = f"""
|
70
|
-
MATCH (n:Substation)
|
71
|
-
WHERE n.survive is NULL OR n.survive<>0
|
72
|
-
RETURN n.name
|
73
|
-
"""
|
74
|
-
substations = _run_read_query(driver, cypher_query)
|
75
|
-
substations = [item["n.name"] for item in substations]
|
76
|
-
|
77
|
-
for cri_infra in critical_infras:
|
78
|
-
cypher_query = f"""
|
79
|
-
MATCH (c:{cri_infra})
|
80
|
-
RETURN c.longitude, c.latitude, c.name, c.backup
|
81
|
-
"""
|
82
|
-
infras = _run_read_query(driver, cypher_query)
|
83
|
-
|
84
|
-
for infra in infras:
|
85
|
-
connected = []
|
86
|
-
for substation in substations:
|
87
|
-
|
88
|
-
_name = "{name:" + f'"{infra["c.name"]}"' + "}"
|
89
|
-
substation_name = "{name:" + f'"{substation}"' + "}"
|
90
|
-
cypher_query = f"""
|
91
|
-
MATCH
|
92
|
-
(g:{cri_infra} {_name})-[:GETS_POWER_FROM]-(b:Bus),
|
93
|
-
(s:Substation {substation_name}),
|
94
|
-
p=shortestPath((b)-[:CONNECTED_TO*]-(s))
|
95
|
-
WHERE all(r in relationships(p) WHERE r.survive is NULL OR r.survive<>0 )
|
96
|
-
RETURN length(p)
|
97
|
-
"""
|
98
|
-
|
99
|
-
if not infra['c.backup']:
|
100
|
-
path = _run_read_query(driver, cypher_query)
|
101
|
-
connected.append(1 if path else 0)
|
102
|
-
else:
|
103
|
-
connected.append(1)
|
104
|
-
|
105
|
-
|
106
|
-
cypher_write_query = f"""
|
107
|
-
MATCH (c:{cri_infra})
|
108
|
-
WHERE c.name = $cname
|
109
|
-
SET c.survive = $survive
|
110
|
-
SET c.survival_probability = $s_prob
|
111
|
-
"""
|
112
|
-
|
113
|
-
with driver.session() as session:
|
114
|
-
session.write_transaction(
|
115
|
-
lambda tx: tx.run(
|
116
|
-
cypher_write_query,
|
117
|
-
cname=infra['c.name'],
|
118
|
-
s_prob= int(any(connected)),
|
119
|
-
survive=int(any(connected))
|
120
|
-
)
|
121
|
-
)
|
122
|
-
|
123
|
-
def _update_critical_infra(
|
124
|
-
scenario,
|
125
|
-
critical_infras,
|
126
|
-
driver: GraphDatabase.driver,
|
127
|
-
timestamp: datetime
|
128
|
-
):
|
129
|
-
|
130
|
-
critical_infras_items = {}
|
131
|
-
assets = {}
|
132
|
-
|
133
|
-
for infra in critical_infras:
|
134
|
-
cypher_query = f"""
|
135
|
-
MATCH (c:{infra})
|
136
|
-
RETURN c.longitude, c.latitude, c.name
|
137
|
-
"""
|
138
|
-
critical_infras_items[infra] = _run_read_query(driver, cypher_query)
|
139
|
-
|
140
|
-
|
141
|
-
for c_infra, data in critical_infras_items.items():
|
142
|
-
assets[c_infra] = {
|
143
|
-
item["c.name"]: {
|
144
|
-
"coordinates": [item["c.longitude"], item["c.latitude"]]
|
145
|
-
} for item in data
|
146
|
-
if all([item["c.longitude"], item["c.latitude"]])
|
147
|
-
}
|
148
|
-
|
149
|
-
survival_prob = scenario.calculate_survival_probability(assets, datetime.now())
|
150
|
-
|
151
|
-
# update the survival probability
|
152
|
-
for infra in critical_infras:
|
153
|
-
cypher_query = f"""
|
154
|
-
MATCH (c:{infra})
|
155
|
-
WHERE c.name = $cname
|
156
|
-
SET c.survive = $survive
|
157
|
-
SET c.survival_probability = $s_prob
|
158
|
-
"""
|
159
|
-
|
160
|
-
with driver.session() as session:
|
161
|
-
for cname, cdict in survival_prob[infra].items():
|
162
|
-
session.write_transaction(
|
163
|
-
lambda tx: tx.run(
|
164
|
-
cypher_query,
|
165
|
-
cname=cname,
|
166
|
-
s_prob=cdict["survival_probability"],
|
167
|
-
survive=int(
|
168
|
-
random.random() < cdict["survival_probability"]
|
169
|
-
),
|
170
|
-
)
|
1
|
+
""" This module contains functions to update
|
2
|
+
survival probability and survive property
|
3
|
+
based on disaster event. """
|
4
|
+
|
5
|
+
# manage python imports
|
6
|
+
from datetime import datetime
|
7
|
+
import random
|
8
|
+
|
9
|
+
from neo4j import GraphDatabase
|
10
|
+
|
11
|
+
from erad.db.utils import _run_read_query
|
12
|
+
from erad.metrics.check_microgrid import node_connected_to_substation
|
13
|
+
|
14
|
+
|
15
|
+
def _update_critical_infra_based_on_grid_access_fast(
|
16
|
+
critical_infras,
|
17
|
+
driver: GraphDatabase.driver
|
18
|
+
):
|
19
|
+
""" A faster function to update survive attribute
|
20
|
+
based on whether critical infra has access to grid power."""
|
21
|
+
# Let's get substation
|
22
|
+
cypher_query = f"""
|
23
|
+
MATCH (n:Substation)
|
24
|
+
WHERE n.survive is NULL OR n.survive<>0
|
25
|
+
RETURN n.name
|
26
|
+
"""
|
27
|
+
substations = _run_read_query(driver, cypher_query)
|
28
|
+
substations = [item["n.name"] for item in substations]
|
29
|
+
|
30
|
+
nodes = node_connected_to_substation(substations, driver)
|
31
|
+
|
32
|
+
# Get all critical infra and check if they are in above nodes
|
33
|
+
for cri_infra in critical_infras:
|
34
|
+
cypher_query = f"""
|
35
|
+
MATCH (c:{cri_infra})
|
36
|
+
RETURN c.longitude, c.latitude, c.name, c.backup
|
37
|
+
"""
|
38
|
+
infras = _run_read_query(driver, cypher_query)
|
39
|
+
|
40
|
+
for infra in infras:
|
41
|
+
|
42
|
+
cypher_write_query = f"""
|
43
|
+
MATCH (c:{cri_infra})
|
44
|
+
WHERE c.name = $cname
|
45
|
+
SET c.survive = $survive
|
46
|
+
SET c.survival_probability = $s_prob
|
47
|
+
"""
|
48
|
+
|
49
|
+
with driver.session() as session:
|
50
|
+
session.write_transaction(
|
51
|
+
lambda tx: tx.run(
|
52
|
+
cypher_write_query,
|
53
|
+
cname=infra['c.name'],
|
54
|
+
s_prob= 1 if infra['c.name'] in nodes or int(infra['c.backup'])==1 else 0,
|
55
|
+
survive=1 if infra['c.name'] in nodes or int(infra['c.backup'])==1 else 0
|
56
|
+
)
|
57
|
+
)
|
58
|
+
|
59
|
+
|
60
|
+
|
61
|
+
def _update_critical_infra_based_on_grid_access(
|
62
|
+
critical_infras,
|
63
|
+
driver: GraphDatabase.driver
|
64
|
+
):
|
65
|
+
""" A function to update survive attribute
|
66
|
+
based on whether critical infra has access to grid power."""
|
67
|
+
|
68
|
+
# Let's get substation
|
69
|
+
cypher_query = f"""
|
70
|
+
MATCH (n:Substation)
|
71
|
+
WHERE n.survive is NULL OR n.survive<>0
|
72
|
+
RETURN n.name
|
73
|
+
"""
|
74
|
+
substations = _run_read_query(driver, cypher_query)
|
75
|
+
substations = [item["n.name"] for item in substations]
|
76
|
+
|
77
|
+
for cri_infra in critical_infras:
|
78
|
+
cypher_query = f"""
|
79
|
+
MATCH (c:{cri_infra})
|
80
|
+
RETURN c.longitude, c.latitude, c.name, c.backup
|
81
|
+
"""
|
82
|
+
infras = _run_read_query(driver, cypher_query)
|
83
|
+
|
84
|
+
for infra in infras:
|
85
|
+
connected = []
|
86
|
+
for substation in substations:
|
87
|
+
|
88
|
+
_name = "{name:" + f'"{infra["c.name"]}"' + "}"
|
89
|
+
substation_name = "{name:" + f'"{substation}"' + "}"
|
90
|
+
cypher_query = f"""
|
91
|
+
MATCH
|
92
|
+
(g:{cri_infra} {_name})-[:GETS_POWER_FROM]-(b:Bus),
|
93
|
+
(s:Substation {substation_name}),
|
94
|
+
p=shortestPath((b)-[:CONNECTED_TO*]-(s))
|
95
|
+
WHERE all(r in relationships(p) WHERE r.survive is NULL OR r.survive<>0 )
|
96
|
+
RETURN length(p)
|
97
|
+
"""
|
98
|
+
|
99
|
+
if not infra['c.backup']:
|
100
|
+
path = _run_read_query(driver, cypher_query)
|
101
|
+
connected.append(1 if path else 0)
|
102
|
+
else:
|
103
|
+
connected.append(1)
|
104
|
+
|
105
|
+
|
106
|
+
cypher_write_query = f"""
|
107
|
+
MATCH (c:{cri_infra})
|
108
|
+
WHERE c.name = $cname
|
109
|
+
SET c.survive = $survive
|
110
|
+
SET c.survival_probability = $s_prob
|
111
|
+
"""
|
112
|
+
|
113
|
+
with driver.session() as session:
|
114
|
+
session.write_transaction(
|
115
|
+
lambda tx: tx.run(
|
116
|
+
cypher_write_query,
|
117
|
+
cname=infra['c.name'],
|
118
|
+
s_prob= int(any(connected)),
|
119
|
+
survive=int(any(connected))
|
120
|
+
)
|
121
|
+
)
|
122
|
+
|
123
|
+
def _update_critical_infra(
|
124
|
+
scenario,
|
125
|
+
critical_infras,
|
126
|
+
driver: GraphDatabase.driver,
|
127
|
+
timestamp: datetime
|
128
|
+
):
|
129
|
+
|
130
|
+
critical_infras_items = {}
|
131
|
+
assets = {}
|
132
|
+
|
133
|
+
for infra in critical_infras:
|
134
|
+
cypher_query = f"""
|
135
|
+
MATCH (c:{infra})
|
136
|
+
RETURN c.longitude, c.latitude, c.name
|
137
|
+
"""
|
138
|
+
critical_infras_items[infra] = _run_read_query(driver, cypher_query)
|
139
|
+
|
140
|
+
|
141
|
+
for c_infra, data in critical_infras_items.items():
|
142
|
+
assets[c_infra] = {
|
143
|
+
item["c.name"]: {
|
144
|
+
"coordinates": [item["c.longitude"], item["c.latitude"]]
|
145
|
+
} for item in data
|
146
|
+
if all([item["c.longitude"], item["c.latitude"]])
|
147
|
+
}
|
148
|
+
|
149
|
+
survival_prob = scenario.calculate_survival_probability(assets, datetime.now())
|
150
|
+
|
151
|
+
# update the survival probability
|
152
|
+
for infra in critical_infras:
|
153
|
+
cypher_query = f"""
|
154
|
+
MATCH (c:{infra})
|
155
|
+
WHERE c.name = $cname
|
156
|
+
SET c.survive = $survive
|
157
|
+
SET c.survival_probability = $s_prob
|
158
|
+
"""
|
159
|
+
|
160
|
+
with driver.session() as session:
|
161
|
+
for cname, cdict in survival_prob[infra].items():
|
162
|
+
session.write_transaction(
|
163
|
+
lambda tx: tx.run(
|
164
|
+
cypher_query,
|
165
|
+
cname=cname,
|
166
|
+
s_prob=cdict["survival_probability"],
|
167
|
+
survive=int(
|
168
|
+
random.random() < cdict["survival_probability"]
|
169
|
+
),
|
170
|
+
)
|
171
171
|
)
|
@@ -1,101 +1,101 @@
|
|
1
|
-
""" This module contains a function to get the distribution
|
2
|
-
line assets and update it's survival probability and survive
|
3
|
-
attribute based on event scenario.
|
4
|
-
"""
|
5
|
-
|
6
|
-
# Manage python imports
|
7
|
-
from datetime import datetime
|
8
|
-
import random
|
9
|
-
import json
|
10
|
-
import random
|
11
|
-
|
12
|
-
random.seed(20)
|
13
|
-
|
14
|
-
from neo4j import GraphDatabase
|
15
|
-
|
16
|
-
from erad.db.utils import _run_read_query
|
17
|
-
|
18
|
-
|
19
|
-
def _create_assets(lines):
|
20
|
-
""" Takes the list of lines and convert into
|
21
|
-
asset dictionary. """
|
22
|
-
|
23
|
-
return {
|
24
|
-
line["r.name"]: {
|
25
|
-
"coordinates": [line["r.latitude"], line["r.longitude"]],
|
26
|
-
"heights_ft": float(line["r.height_m"])*3.28084,
|
27
|
-
"elevation_ft": random.randint(50, 400)
|
28
|
-
}
|
29
|
-
for line in lines
|
30
|
-
if all([line["r.longitude"], line["r.latitude"]])
|
31
|
-
}
|
32
|
-
|
33
|
-
def _update_distribution_lines_survival(
|
34
|
-
survival_probability,
|
35
|
-
driver: GraphDatabase.driver
|
36
|
-
):
|
37
|
-
""" Takes survival probabilty and update survival
|
38
|
-
probability and survive property. """
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
cypher_query = """
|
43
|
-
MATCH (b1:Bus)-[r:CONNECTS_TO {name: $rname}]-(b2:Bus)
|
44
|
-
SET r.survive = $survive
|
45
|
-
SET r.survival_probability = $s_prob
|
46
|
-
"""
|
47
|
-
with driver.session() as session:
|
48
|
-
for rname, rdict in survival_probability.items():
|
49
|
-
session.write_transaction(
|
50
|
-
lambda tx: tx.run(
|
51
|
-
cypher_query,
|
52
|
-
rname=rname,
|
53
|
-
s_prob=rdict.get("survival_probability", 1),
|
54
|
-
survive=int(random.random() < rdict.get("survival_probability", 1)),
|
55
|
-
)
|
56
|
-
)
|
57
|
-
|
58
|
-
|
59
|
-
def _update_distribution_overhead_lines(scenario, driver: GraphDatabase.driver, timestamp: datetime):
|
60
|
-
"""Get overhead lines and update the survival probability."""
|
61
|
-
|
62
|
-
# Cypher query to get overhead line segments
|
63
|
-
cypher_query = """
|
64
|
-
MATCH (b1:Bus)-[r:CONNECTS_TO]-(b2:Bus)
|
65
|
-
WHERE r.ampacity IS NOT NULL AND r.type = 'overhead'
|
66
|
-
RETURN r.longitude, r.latitude, r.name, r.type, r.height_m
|
67
|
-
"""
|
68
|
-
overhead_lines = _run_read_query(driver, cypher_query)
|
69
|
-
|
70
|
-
if overhead_lines:
|
71
|
-
assets = {"distribution_overhead_lines": _create_assets(overhead_lines)}
|
72
|
-
|
73
|
-
survival_prob = scenario.calculate_survival_probability(assets, timestamp)
|
74
|
-
|
75
|
-
# with open('surv.json', "w") as fp:
|
76
|
-
# json.dump(survival_prob,fp)
|
77
|
-
_update_distribution_lines_survival(
|
78
|
-
survival_prob['distribution_overhead_lines'],
|
79
|
-
driver
|
80
|
-
)
|
81
|
-
|
82
|
-
def _update_distribution_underground_lines(scenario, driver: GraphDatabase.driver, timestamp: datetime):
|
83
|
-
"""Get overhead lines and update the survival probability."""
|
84
|
-
|
85
|
-
# Cypher query to get overhead line segments
|
86
|
-
cypher_query = """
|
87
|
-
MATCH (b1:Bus)-[r:CONNECTS_TO]-(b2:Bus)
|
88
|
-
WHERE r.ampacity IS NOT NULL AND r.type = 'underground'
|
89
|
-
RETURN r.longitude, r.latitude, r.name, r.type, r.height_m
|
90
|
-
"""
|
91
|
-
underground_lines = _run_read_query(driver, cypher_query)
|
92
|
-
|
93
|
-
if underground_lines:
|
94
|
-
assets = {"buried_lines": _create_assets(underground_lines)}
|
95
|
-
|
96
|
-
survival_prob = scenario.calculate_survival_probability(assets, timestamp)
|
97
|
-
_update_distribution_lines_survival(
|
98
|
-
survival_prob['buried_lines'],
|
99
|
-
driver
|
100
|
-
)
|
101
|
-
|
1
|
+
""" This module contains a function to get the distribution
|
2
|
+
line assets and update it's survival probability and survive
|
3
|
+
attribute based on event scenario.
|
4
|
+
"""
|
5
|
+
|
6
|
+
# Manage python imports
|
7
|
+
from datetime import datetime
|
8
|
+
import random
|
9
|
+
import json
|
10
|
+
import random
|
11
|
+
|
12
|
+
random.seed(20)
|
13
|
+
|
14
|
+
from neo4j import GraphDatabase
|
15
|
+
|
16
|
+
from erad.db.utils import _run_read_query
|
17
|
+
|
18
|
+
|
19
|
+
def _create_assets(lines):
|
20
|
+
""" Takes the list of lines and convert into
|
21
|
+
asset dictionary. """
|
22
|
+
|
23
|
+
return {
|
24
|
+
line["r.name"]: {
|
25
|
+
"coordinates": [line["r.latitude"], line["r.longitude"]],
|
26
|
+
"heights_ft": float(line["r.height_m"])*3.28084,
|
27
|
+
"elevation_ft": random.randint(50, 400)
|
28
|
+
}
|
29
|
+
for line in lines
|
30
|
+
if all([line["r.longitude"], line["r.latitude"]])
|
31
|
+
}
|
32
|
+
|
33
|
+
def _update_distribution_lines_survival(
|
34
|
+
survival_probability,
|
35
|
+
driver: GraphDatabase.driver
|
36
|
+
):
|
37
|
+
""" Takes survival probabilty and update survival
|
38
|
+
probability and survive property. """
|
39
|
+
|
40
|
+
|
41
|
+
|
42
|
+
cypher_query = """
|
43
|
+
MATCH (b1:Bus)-[r:CONNECTS_TO {name: $rname}]-(b2:Bus)
|
44
|
+
SET r.survive = $survive
|
45
|
+
SET r.survival_probability = $s_prob
|
46
|
+
"""
|
47
|
+
with driver.session() as session:
|
48
|
+
for rname, rdict in survival_probability.items():
|
49
|
+
session.write_transaction(
|
50
|
+
lambda tx: tx.run(
|
51
|
+
cypher_query,
|
52
|
+
rname=rname,
|
53
|
+
s_prob=rdict.get("survival_probability", 1),
|
54
|
+
survive=int(random.random() < rdict.get("survival_probability", 1)),
|
55
|
+
)
|
56
|
+
)
|
57
|
+
|
58
|
+
|
59
|
+
def _update_distribution_overhead_lines(scenario, driver: GraphDatabase.driver, timestamp: datetime):
|
60
|
+
"""Get overhead lines and update the survival probability."""
|
61
|
+
|
62
|
+
# Cypher query to get overhead line segments
|
63
|
+
cypher_query = """
|
64
|
+
MATCH (b1:Bus)-[r:CONNECTS_TO]-(b2:Bus)
|
65
|
+
WHERE r.ampacity IS NOT NULL AND r.type = 'overhead'
|
66
|
+
RETURN r.longitude, r.latitude, r.name, r.type, r.height_m
|
67
|
+
"""
|
68
|
+
overhead_lines = _run_read_query(driver, cypher_query)
|
69
|
+
|
70
|
+
if overhead_lines:
|
71
|
+
assets = {"distribution_overhead_lines": _create_assets(overhead_lines)}
|
72
|
+
|
73
|
+
survival_prob = scenario.calculate_survival_probability(assets, timestamp)
|
74
|
+
|
75
|
+
# with open('surv.json', "w") as fp:
|
76
|
+
# json.dump(survival_prob,fp)
|
77
|
+
_update_distribution_lines_survival(
|
78
|
+
survival_prob['distribution_overhead_lines'],
|
79
|
+
driver
|
80
|
+
)
|
81
|
+
|
82
|
+
def _update_distribution_underground_lines(scenario, driver: GraphDatabase.driver, timestamp: datetime):
|
83
|
+
"""Get overhead lines and update the survival probability."""
|
84
|
+
|
85
|
+
# Cypher query to get overhead line segments
|
86
|
+
cypher_query = """
|
87
|
+
MATCH (b1:Bus)-[r:CONNECTS_TO]-(b2:Bus)
|
88
|
+
WHERE r.ampacity IS NOT NULL AND r.type = 'underground'
|
89
|
+
RETURN r.longitude, r.latitude, r.name, r.type, r.height_m
|
90
|
+
"""
|
91
|
+
underground_lines = _run_read_query(driver, cypher_query)
|
92
|
+
|
93
|
+
if underground_lines:
|
94
|
+
assets = {"buried_lines": _create_assets(underground_lines)}
|
95
|
+
|
96
|
+
survival_prob = scenario.calculate_survival_probability(assets, timestamp)
|
97
|
+
_update_distribution_lines_survival(
|
98
|
+
survival_prob['buried_lines'],
|
99
|
+
driver
|
100
|
+
)
|
101
|
+
|
erad/db/credential_model.py
CHANGED
@@ -1,20 +1,20 @@
|
|
1
|
-
""" Module for storing pydantic model for neo4j credential. """
|
2
|
-
|
3
|
-
from pydantic import BaseModel, validator
|
4
|
-
|
5
|
-
from erad.exceptions import DatabaseMissingInfo
|
6
|
-
|
7
|
-
|
8
|
-
class Neo4jConnectionModel(BaseModel):
|
9
|
-
neo4j_url: str
|
10
|
-
neo4j_username: str
|
11
|
-
neo4j_password: str
|
12
|
-
|
13
|
-
@validator("neo4j_url", "neo4j_username", "neo4j_password")
|
14
|
-
def check_string(cls, value):
|
15
|
-
if not all(value):
|
16
|
-
raise DatabaseMissingInfo(
|
17
|
-
"Credential and/or connection URL missing \
|
18
|
-
to connect to Neo4J database!"
|
19
|
-
)
|
20
|
-
return value
|
1
|
+
""" Module for storing pydantic model for neo4j credential. """
|
2
|
+
|
3
|
+
from pydantic import BaseModel, validator
|
4
|
+
|
5
|
+
from erad.exceptions import DatabaseMissingInfo
|
6
|
+
|
7
|
+
|
8
|
+
class Neo4jConnectionModel(BaseModel):
|
9
|
+
neo4j_url: str
|
10
|
+
neo4j_username: str
|
11
|
+
neo4j_password: str
|
12
|
+
|
13
|
+
@validator("neo4j_url", "neo4j_username", "neo4j_password")
|
14
|
+
def check_string(cls, value):
|
15
|
+
if not all(value):
|
16
|
+
raise DatabaseMissingInfo(
|
17
|
+
"Credential and/or connection URL missing \
|
18
|
+
to connect to Neo4J database!"
|
19
|
+
)
|
20
|
+
return value
|
erad/db/disaster_input_model.py
CHANGED
@@ -1,23 +1,23 @@
|
|
1
|
-
""" Module for storing pydantic model for injecting scenarios. """
|
2
|
-
|
3
|
-
from typing import Dict, Optional, List
|
4
|
-
import datetime
|
5
|
-
|
6
|
-
from pydantic import BaseModel, validator, confloat
|
7
|
-
|
8
|
-
|
9
|
-
class PointEarthquake(BaseModel):
|
10
|
-
longitude: confloat(ge=-180, le=180)
|
11
|
-
latitude: confloat(ge=-90, le=90)
|
12
|
-
probability_model: Optional[Dict]
|
13
|
-
timestamp: datetime.datetime
|
14
|
-
magnitude: confloat(ge=0, le=10)
|
15
|
-
depth: confloat(ge=0)
|
16
|
-
|
17
|
-
class PolygonFlooding(BaseModel):
|
18
|
-
polygon: List
|
19
|
-
probability_model: Optional[Dict]
|
20
|
-
timestamp: datetime.datetime
|
21
|
-
file_flow: str
|
22
|
-
file_levels: str
|
23
|
-
file_gaugues: str
|
1
|
+
""" Module for storing pydantic model for injecting scenarios. """
|
2
|
+
|
3
|
+
from typing import Dict, Optional, List
|
4
|
+
import datetime
|
5
|
+
|
6
|
+
from pydantic import BaseModel, validator, confloat
|
7
|
+
|
8
|
+
|
9
|
+
class PointEarthquake(BaseModel):
|
10
|
+
longitude: confloat(ge=-180, le=180)
|
11
|
+
latitude: confloat(ge=-90, le=90)
|
12
|
+
probability_model: Optional[Dict]
|
13
|
+
timestamp: datetime.datetime
|
14
|
+
magnitude: confloat(ge=0, le=10)
|
15
|
+
depth: confloat(ge=0)
|
16
|
+
|
17
|
+
class PolygonFlooding(BaseModel):
|
18
|
+
polygon: List
|
19
|
+
probability_model: Optional[Dict]
|
20
|
+
timestamp: datetime.datetime
|
21
|
+
file_flow: str
|
22
|
+
file_levels: str
|
23
|
+
file_gaugues: str
|