NREL-erad 0.1.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- erad/__init__.py +1 -1
- erad/constants.py +20 -68
- erad/cypher_queries/load_data_v1.cypher +212 -0
- erad/data/World_Earthquakes_1960_2016.csv +23410 -0
- erad/db/__init__.py +0 -0
- erad/db/assets/__init__.py +0 -0
- erad/db/assets/critical_infras.py +171 -0
- erad/db/assets/distribution_lines.py +101 -0
- erad/db/credential_model.py +20 -0
- erad/db/disaster_input_model.py +23 -0
- erad/db/inject_earthquake.py +52 -0
- erad/db/inject_flooding.py +53 -0
- erad/db/neo4j_.py +162 -0
- erad/db/utils.py +14 -0
- erad/exceptions.py +68 -0
- erad/metrics/__init__.py +0 -0
- erad/metrics/check_microgrid.py +208 -0
- erad/metrics/metric.py +178 -0
- erad/programs/__init__.py +0 -0
- erad/programs/backup.py +62 -0
- erad/programs/microgrid.py +45 -0
- erad/scenarios/__init__.py +0 -0
- erad/scenarios/abstract_scenario.py +103 -0
- erad/scenarios/common.py +93 -0
- erad/scenarios/earthquake_scenario.py +161 -0
- erad/scenarios/fire_scenario.py +160 -0
- erad/scenarios/flood_scenario.py +494 -0
- erad/scenarios/flows.csv +671 -0
- erad/scenarios/utilities.py +76 -0
- erad/scenarios/wind_scenario.py +89 -0
- erad/utils/__init__.py +0 -0
- erad/utils/ditto_utils.py +252 -0
- erad/utils/hifld_utils.py +147 -0
- erad/utils/opendss_utils.py +357 -0
- erad/utils/overpass.py +76 -0
- erad/utils/util.py +178 -0
- erad/visualization/__init__.py +0 -0
- erad/visualization/plot_graph.py +218 -0
- {nrel_erad-0.1.0.dist-info → nrel_erad-1.0.0.dist-info}/METADATA +39 -29
- nrel_erad-1.0.0.dist-info/RECORD +42 -0
- {nrel_erad-0.1.0.dist-info → nrel_erad-1.0.0.dist-info}/WHEEL +1 -2
- {nrel_erad-0.1.0.dist-info → nrel_erad-1.0.0.dist-info}/licenses/LICENSE.txt +28 -28
- erad/default_fragility_curves/__init__.py +0 -15
- erad/default_fragility_curves/default_fire_boundary_dist.py +0 -94
- erad/default_fragility_curves/default_flood_depth.py +0 -108
- erad/default_fragility_curves/default_flood_velocity.py +0 -101
- erad/default_fragility_curves/default_fragility_curves.py +0 -23
- erad/default_fragility_curves/default_peak_ground_acceleration.py +0 -163
- erad/default_fragility_curves/default_peak_ground_velocity.py +0 -94
- erad/default_fragility_curves/default_wind_speed.py +0 -94
- erad/enums.py +0 -40
- erad/gdm_mapping.py +0 -83
- erad/models/__init__.py +0 -1
- erad/models/asset.py +0 -287
- erad/models/asset_mapping.py +0 -20
- erad/models/fragility_curve.py +0 -116
- erad/models/hazard/__init__.py +0 -5
- erad/models/hazard/base_models.py +0 -12
- erad/models/hazard/common.py +0 -26
- erad/models/hazard/earthquake.py +0 -93
- erad/models/hazard/flood.py +0 -83
- erad/models/hazard/wild_fire.py +0 -121
- erad/models/hazard/wind.py +0 -143
- erad/models/probability.py +0 -73
- erad/probability_builder.py +0 -35
- erad/quantities.py +0 -25
- erad/runner.py +0 -122
- erad/systems/__init__.py +0 -2
- erad/systems/asset_system.py +0 -414
- erad/systems/hazard_system.py +0 -122
- nrel_erad-0.1.0.dist-info/RECORD +0 -35
- nrel_erad-0.1.0.dist-info/top_level.txt +0 -1
erad/db/__init__.py
ADDED
File without changes
|
File without changes
|
@@ -0,0 +1,171 @@
|
|
1
|
+
""" This module contains functions to update
|
2
|
+
survival probability and survive property
|
3
|
+
based on disaster event. """
|
4
|
+
|
5
|
+
# manage python imports
|
6
|
+
from datetime import datetime
|
7
|
+
import random
|
8
|
+
|
9
|
+
from neo4j import GraphDatabase
|
10
|
+
|
11
|
+
from erad.db.utils import _run_read_query
|
12
|
+
from erad.metrics.check_microgrid import node_connected_to_substation
|
13
|
+
|
14
|
+
|
15
|
+
def _update_critical_infra_based_on_grid_access_fast(
|
16
|
+
critical_infras,
|
17
|
+
driver: GraphDatabase.driver
|
18
|
+
):
|
19
|
+
""" A faster function to update survive attribute
|
20
|
+
based on whether critical infra has access to grid power."""
|
21
|
+
# Let's get substation
|
22
|
+
cypher_query = f"""
|
23
|
+
MATCH (n:Substation)
|
24
|
+
WHERE n.survive is NULL OR n.survive<>0
|
25
|
+
RETURN n.name
|
26
|
+
"""
|
27
|
+
substations = _run_read_query(driver, cypher_query)
|
28
|
+
substations = [item["n.name"] for item in substations]
|
29
|
+
|
30
|
+
nodes = node_connected_to_substation(substations, driver)
|
31
|
+
|
32
|
+
# Get all critical infra and check if they are in above nodes
|
33
|
+
for cri_infra in critical_infras:
|
34
|
+
cypher_query = f"""
|
35
|
+
MATCH (c:{cri_infra})
|
36
|
+
RETURN c.longitude, c.latitude, c.name, c.backup
|
37
|
+
"""
|
38
|
+
infras = _run_read_query(driver, cypher_query)
|
39
|
+
|
40
|
+
for infra in infras:
|
41
|
+
|
42
|
+
cypher_write_query = f"""
|
43
|
+
MATCH (c:{cri_infra})
|
44
|
+
WHERE c.name = $cname
|
45
|
+
SET c.survive = $survive
|
46
|
+
SET c.survival_probability = $s_prob
|
47
|
+
"""
|
48
|
+
|
49
|
+
with driver.session() as session:
|
50
|
+
session.write_transaction(
|
51
|
+
lambda tx: tx.run(
|
52
|
+
cypher_write_query,
|
53
|
+
cname=infra['c.name'],
|
54
|
+
s_prob= 1 if infra['c.name'] in nodes or int(infra['c.backup'])==1 else 0,
|
55
|
+
survive=1 if infra['c.name'] in nodes or int(infra['c.backup'])==1 else 0
|
56
|
+
)
|
57
|
+
)
|
58
|
+
|
59
|
+
|
60
|
+
|
61
|
+
def _update_critical_infra_based_on_grid_access(
|
62
|
+
critical_infras,
|
63
|
+
driver: GraphDatabase.driver
|
64
|
+
):
|
65
|
+
""" A function to update survive attribute
|
66
|
+
based on whether critical infra has access to grid power."""
|
67
|
+
|
68
|
+
# Let's get substation
|
69
|
+
cypher_query = f"""
|
70
|
+
MATCH (n:Substation)
|
71
|
+
WHERE n.survive is NULL OR n.survive<>0
|
72
|
+
RETURN n.name
|
73
|
+
"""
|
74
|
+
substations = _run_read_query(driver, cypher_query)
|
75
|
+
substations = [item["n.name"] for item in substations]
|
76
|
+
|
77
|
+
for cri_infra in critical_infras:
|
78
|
+
cypher_query = f"""
|
79
|
+
MATCH (c:{cri_infra})
|
80
|
+
RETURN c.longitude, c.latitude, c.name, c.backup
|
81
|
+
"""
|
82
|
+
infras = _run_read_query(driver, cypher_query)
|
83
|
+
|
84
|
+
for infra in infras:
|
85
|
+
connected = []
|
86
|
+
for substation in substations:
|
87
|
+
|
88
|
+
_name = "{name:" + f'"{infra["c.name"]}"' + "}"
|
89
|
+
substation_name = "{name:" + f'"{substation}"' + "}"
|
90
|
+
cypher_query = f"""
|
91
|
+
MATCH
|
92
|
+
(g:{cri_infra} {_name})-[:GETS_POWER_FROM]-(b:Bus),
|
93
|
+
(s:Substation {substation_name}),
|
94
|
+
p=shortestPath((b)-[:CONNECTED_TO*]-(s))
|
95
|
+
WHERE all(r in relationships(p) WHERE r.survive is NULL OR r.survive<>0 )
|
96
|
+
RETURN length(p)
|
97
|
+
"""
|
98
|
+
|
99
|
+
if not infra['c.backup']:
|
100
|
+
path = _run_read_query(driver, cypher_query)
|
101
|
+
connected.append(1 if path else 0)
|
102
|
+
else:
|
103
|
+
connected.append(1)
|
104
|
+
|
105
|
+
|
106
|
+
cypher_write_query = f"""
|
107
|
+
MATCH (c:{cri_infra})
|
108
|
+
WHERE c.name = $cname
|
109
|
+
SET c.survive = $survive
|
110
|
+
SET c.survival_probability = $s_prob
|
111
|
+
"""
|
112
|
+
|
113
|
+
with driver.session() as session:
|
114
|
+
session.write_transaction(
|
115
|
+
lambda tx: tx.run(
|
116
|
+
cypher_write_query,
|
117
|
+
cname=infra['c.name'],
|
118
|
+
s_prob= int(any(connected)),
|
119
|
+
survive=int(any(connected))
|
120
|
+
)
|
121
|
+
)
|
122
|
+
|
123
|
+
def _update_critical_infra(
|
124
|
+
scenario,
|
125
|
+
critical_infras,
|
126
|
+
driver: GraphDatabase.driver,
|
127
|
+
timestamp: datetime
|
128
|
+
):
|
129
|
+
|
130
|
+
critical_infras_items = {}
|
131
|
+
assets = {}
|
132
|
+
|
133
|
+
for infra in critical_infras:
|
134
|
+
cypher_query = f"""
|
135
|
+
MATCH (c:{infra})
|
136
|
+
RETURN c.longitude, c.latitude, c.name
|
137
|
+
"""
|
138
|
+
critical_infras_items[infra] = _run_read_query(driver, cypher_query)
|
139
|
+
|
140
|
+
|
141
|
+
for c_infra, data in critical_infras_items.items():
|
142
|
+
assets[c_infra] = {
|
143
|
+
item["c.name"]: {
|
144
|
+
"coordinates": [item["c.longitude"], item["c.latitude"]]
|
145
|
+
} for item in data
|
146
|
+
if all([item["c.longitude"], item["c.latitude"]])
|
147
|
+
}
|
148
|
+
|
149
|
+
survival_prob = scenario.calculate_survival_probability(assets, datetime.now())
|
150
|
+
|
151
|
+
# update the survival probability
|
152
|
+
for infra in critical_infras:
|
153
|
+
cypher_query = f"""
|
154
|
+
MATCH (c:{infra})
|
155
|
+
WHERE c.name = $cname
|
156
|
+
SET c.survive = $survive
|
157
|
+
SET c.survival_probability = $s_prob
|
158
|
+
"""
|
159
|
+
|
160
|
+
with driver.session() as session:
|
161
|
+
for cname, cdict in survival_prob[infra].items():
|
162
|
+
session.write_transaction(
|
163
|
+
lambda tx: tx.run(
|
164
|
+
cypher_query,
|
165
|
+
cname=cname,
|
166
|
+
s_prob=cdict["survival_probability"],
|
167
|
+
survive=int(
|
168
|
+
random.random() < cdict["survival_probability"]
|
169
|
+
),
|
170
|
+
)
|
171
|
+
)
|
@@ -0,0 +1,101 @@
|
|
1
|
+
""" This module contains a function to get the distribution
|
2
|
+
line assets and update it's survival probability and survive
|
3
|
+
attribute based on event scenario.
|
4
|
+
"""
|
5
|
+
|
6
|
+
# Manage python imports
|
7
|
+
from datetime import datetime
|
8
|
+
import random
|
9
|
+
import json
|
10
|
+
import random
|
11
|
+
|
12
|
+
random.seed(20)
|
13
|
+
|
14
|
+
from neo4j import GraphDatabase
|
15
|
+
|
16
|
+
from erad.db.utils import _run_read_query
|
17
|
+
|
18
|
+
|
19
|
+
def _create_assets(lines):
|
20
|
+
""" Takes the list of lines and convert into
|
21
|
+
asset dictionary. """
|
22
|
+
|
23
|
+
return {
|
24
|
+
line["r.name"]: {
|
25
|
+
"coordinates": [line["r.latitude"], line["r.longitude"]],
|
26
|
+
"heights_ft": float(line["r.height_m"])*3.28084,
|
27
|
+
"elevation_ft": random.randint(50, 400)
|
28
|
+
}
|
29
|
+
for line in lines
|
30
|
+
if all([line["r.longitude"], line["r.latitude"]])
|
31
|
+
}
|
32
|
+
|
33
|
+
def _update_distribution_lines_survival(
|
34
|
+
survival_probability,
|
35
|
+
driver: GraphDatabase.driver
|
36
|
+
):
|
37
|
+
""" Takes survival probabilty and update survival
|
38
|
+
probability and survive property. """
|
39
|
+
|
40
|
+
|
41
|
+
|
42
|
+
cypher_query = """
|
43
|
+
MATCH (b1:Bus)-[r:CONNECTS_TO {name: $rname}]-(b2:Bus)
|
44
|
+
SET r.survive = $survive
|
45
|
+
SET r.survival_probability = $s_prob
|
46
|
+
"""
|
47
|
+
with driver.session() as session:
|
48
|
+
for rname, rdict in survival_probability.items():
|
49
|
+
session.write_transaction(
|
50
|
+
lambda tx: tx.run(
|
51
|
+
cypher_query,
|
52
|
+
rname=rname,
|
53
|
+
s_prob=rdict.get("survival_probability", 1),
|
54
|
+
survive=int(random.random() < rdict.get("survival_probability", 1)),
|
55
|
+
)
|
56
|
+
)
|
57
|
+
|
58
|
+
|
59
|
+
def _update_distribution_overhead_lines(scenario, driver: GraphDatabase.driver, timestamp: datetime):
|
60
|
+
"""Get overhead lines and update the survival probability."""
|
61
|
+
|
62
|
+
# Cypher query to get overhead line segments
|
63
|
+
cypher_query = """
|
64
|
+
MATCH (b1:Bus)-[r:CONNECTS_TO]-(b2:Bus)
|
65
|
+
WHERE r.ampacity IS NOT NULL AND r.type = 'overhead'
|
66
|
+
RETURN r.longitude, r.latitude, r.name, r.type, r.height_m
|
67
|
+
"""
|
68
|
+
overhead_lines = _run_read_query(driver, cypher_query)
|
69
|
+
|
70
|
+
if overhead_lines:
|
71
|
+
assets = {"distribution_overhead_lines": _create_assets(overhead_lines)}
|
72
|
+
|
73
|
+
survival_prob = scenario.calculate_survival_probability(assets, timestamp)
|
74
|
+
|
75
|
+
# with open('surv.json', "w") as fp:
|
76
|
+
# json.dump(survival_prob,fp)
|
77
|
+
_update_distribution_lines_survival(
|
78
|
+
survival_prob['distribution_overhead_lines'],
|
79
|
+
driver
|
80
|
+
)
|
81
|
+
|
82
|
+
def _update_distribution_underground_lines(scenario, driver: GraphDatabase.driver, timestamp: datetime):
|
83
|
+
"""Get overhead lines and update the survival probability."""
|
84
|
+
|
85
|
+
# Cypher query to get overhead line segments
|
86
|
+
cypher_query = """
|
87
|
+
MATCH (b1:Bus)-[r:CONNECTS_TO]-(b2:Bus)
|
88
|
+
WHERE r.ampacity IS NOT NULL AND r.type = 'underground'
|
89
|
+
RETURN r.longitude, r.latitude, r.name, r.type, r.height_m
|
90
|
+
"""
|
91
|
+
underground_lines = _run_read_query(driver, cypher_query)
|
92
|
+
|
93
|
+
if underground_lines:
|
94
|
+
assets = {"buried_lines": _create_assets(underground_lines)}
|
95
|
+
|
96
|
+
survival_prob = scenario.calculate_survival_probability(assets, timestamp)
|
97
|
+
_update_distribution_lines_survival(
|
98
|
+
survival_prob['buried_lines'],
|
99
|
+
driver
|
100
|
+
)
|
101
|
+
|
@@ -0,0 +1,20 @@
|
|
1
|
+
""" Module for storing pydantic model for neo4j credential. """
|
2
|
+
|
3
|
+
from pydantic import BaseModel, validator
|
4
|
+
|
5
|
+
from erad.exceptions import DatabaseMissingInfo
|
6
|
+
|
7
|
+
|
8
|
+
class Neo4jConnectionModel(BaseModel):
|
9
|
+
neo4j_url: str
|
10
|
+
neo4j_username: str
|
11
|
+
neo4j_password: str
|
12
|
+
|
13
|
+
@validator("neo4j_url", "neo4j_username", "neo4j_password")
|
14
|
+
def check_string(cls, value):
|
15
|
+
if not all(value):
|
16
|
+
raise DatabaseMissingInfo(
|
17
|
+
"Credential and/or connection URL missing \
|
18
|
+
to connect to Neo4J database!"
|
19
|
+
)
|
20
|
+
return value
|
@@ -0,0 +1,23 @@
|
|
1
|
+
""" Module for storing pydantic model for injecting scenarios. """
|
2
|
+
|
3
|
+
from typing import Dict, Optional, List
|
4
|
+
import datetime
|
5
|
+
|
6
|
+
from pydantic import BaseModel, validator, confloat
|
7
|
+
|
8
|
+
|
9
|
+
class PointEarthquake(BaseModel):
|
10
|
+
longitude: confloat(ge=-180, le=180)
|
11
|
+
latitude: confloat(ge=-90, le=90)
|
12
|
+
probability_model: Optional[Dict]
|
13
|
+
timestamp: datetime.datetime
|
14
|
+
magnitude: confloat(ge=0, le=10)
|
15
|
+
depth: confloat(ge=0)
|
16
|
+
|
17
|
+
class PolygonFlooding(BaseModel):
|
18
|
+
polygon: List
|
19
|
+
probability_model: Optional[Dict]
|
20
|
+
timestamp: datetime.datetime
|
21
|
+
file_flow: str
|
22
|
+
file_levels: str
|
23
|
+
file_gaugues: str
|
@@ -0,0 +1,52 @@
|
|
1
|
+
""" Module for injecting scenario data into graph database. """
|
2
|
+
|
3
|
+
|
4
|
+
from neo4j import GraphDatabase
|
5
|
+
import shapely
|
6
|
+
|
7
|
+
from erad.db import disaster_input_model
|
8
|
+
from erad.scenarios import earthquake_scenario
|
9
|
+
from erad.db.assets.distribution_lines import (
|
10
|
+
_update_distribution_overhead_lines,
|
11
|
+
_update_distribution_underground_lines
|
12
|
+
)
|
13
|
+
|
14
|
+
from erad.db.assets.critical_infras import (
|
15
|
+
_update_critical_infra_based_on_grid_access_fast
|
16
|
+
)
|
17
|
+
|
18
|
+
|
19
|
+
def inject_point_earthquake(
|
20
|
+
scenario_input: disaster_input_model.PointEarthquake,
|
21
|
+
driver: GraphDatabase.driver,
|
22
|
+
critical_infras: None
|
23
|
+
):
|
24
|
+
"""Function to update the survival probability of different assets
|
25
|
+
due to user input based earthquake scenario.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
scenario_input (scenario_model.PointEarthquake): Should be an instance
|
29
|
+
of `scenario_model.PointEarthquake` data model.
|
30
|
+
driver (GraphDatabase.driver): Instance of `GraphDatabase.driver`
|
31
|
+
instance
|
32
|
+
"""
|
33
|
+
if not critical_infras:
|
34
|
+
critical_infras = []
|
35
|
+
|
36
|
+
|
37
|
+
scenario = earthquake_scenario.EarthquakeScenario(
|
38
|
+
shapely.geometry.Point(
|
39
|
+
scenario_input.longitude, scenario_input.latitude
|
40
|
+
),
|
41
|
+
scenario_input.probability_model,
|
42
|
+
scenario_input.timestamp,
|
43
|
+
Magnitude=scenario_input.magnitude,
|
44
|
+
Depth=scenario_input.depth,
|
45
|
+
)
|
46
|
+
|
47
|
+
_update_distribution_overhead_lines(scenario, driver, scenario_input.timestamp)
|
48
|
+
_update_distribution_underground_lines(scenario, driver, scenario_input.timestamp)
|
49
|
+
if len(critical_infras):
|
50
|
+
_update_critical_infra_based_on_grid_access_fast(critical_infras, driver)
|
51
|
+
|
52
|
+
|
@@ -0,0 +1,53 @@
|
|
1
|
+
""" Module for injecting flooding scenario data into graph database. """
|
2
|
+
|
3
|
+
|
4
|
+
from neo4j import GraphDatabase
|
5
|
+
from shapely import MultiPolygon, Polygon
|
6
|
+
|
7
|
+
from erad.db import disaster_input_model
|
8
|
+
from erad.scenarios import flood_scenario
|
9
|
+
from erad.db.assets.distribution_lines import (
|
10
|
+
_update_distribution_overhead_lines,
|
11
|
+
_update_distribution_underground_lines
|
12
|
+
)
|
13
|
+
|
14
|
+
from erad.db.assets.critical_infras import (
|
15
|
+
_update_critical_infra_based_on_grid_access_fast
|
16
|
+
)
|
17
|
+
|
18
|
+
|
19
|
+
def inject_polygon_flooding(
|
20
|
+
scenario_input: disaster_input_model.PolygonFlooding,
|
21
|
+
driver: GraphDatabase.driver,
|
22
|
+
critical_infras: None
|
23
|
+
):
|
24
|
+
"""Function to update the survival probability of different assets
|
25
|
+
due to user input based flooding scenario.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
scenario_input (scenario_model.PolygonFlooding): Should be an instance
|
29
|
+
of `scenario_model.PolygonFlooding` data model.
|
30
|
+
driver (GraphDatabase.driver): Instance of `GraphDatabase.driver`
|
31
|
+
instance
|
32
|
+
"""
|
33
|
+
if not critical_infras:
|
34
|
+
critical_infras = []
|
35
|
+
|
36
|
+
|
37
|
+
scenario = flood_scenario.FlooadScenario(
|
38
|
+
MultiPolygon(
|
39
|
+
[Polygon(el) for el in scenario_input.polygon]
|
40
|
+
),
|
41
|
+
scenario_input.probability_model,
|
42
|
+
scenario_input.timestamp,
|
43
|
+
file_flow=scenario_input.file_flow,
|
44
|
+
file_levels=scenario_input.file_levels,
|
45
|
+
file_gaugues=scenario_input.file_gaugues
|
46
|
+
)
|
47
|
+
|
48
|
+
_update_distribution_underground_lines(scenario, driver, scenario_input.timestamp)
|
49
|
+
_update_distribution_overhead_lines(scenario, driver, scenario_input.timestamp)
|
50
|
+
if len(critical_infras):
|
51
|
+
_update_critical_infra_based_on_grid_access_fast(critical_infras, driver)
|
52
|
+
|
53
|
+
|
erad/db/neo4j_.py
ADDED
@@ -0,0 +1,162 @@
|
|
1
|
+
"""Module contains class and utility functions to manage
|
2
|
+
interactions with Neo4J database.
|
3
|
+
"""
|
4
|
+
|
5
|
+
# standard libraries
|
6
|
+
import os
|
7
|
+
import logging
|
8
|
+
from typing import Union
|
9
|
+
|
10
|
+
|
11
|
+
# third-party libraries
|
12
|
+
from dotenv import load_dotenv
|
13
|
+
from neo4j import GraphDatabase, basic_auth
|
14
|
+
|
15
|
+
# internal imports
|
16
|
+
from erad.db.credential_model import Neo4jConnectionModel
|
17
|
+
|
18
|
+
|
19
|
+
load_dotenv()
|
20
|
+
NEO4J_URL = os.getenv("NEO4J_URL")
|
21
|
+
NEO4J_USERNAME = os.getenv("NEO4J_USERNAME")
|
22
|
+
NEO4J_PASSWORD = os.getenv("NEO4J_PASSWORD")
|
23
|
+
|
24
|
+
logger = logging.getLogger(__name__)
|
25
|
+
|
26
|
+
|
27
|
+
class Neo4J:
|
28
|
+
"""Class for managing interaction with Neo4J database.
|
29
|
+
|
30
|
+
Attributes:
|
31
|
+
neo4j_url (str): URL for connecting to Neo4j
|
32
|
+
neo4j_username (str): Username for Neo4j database
|
33
|
+
neo4j_password (str): Password for Neo4j database
|
34
|
+
use_env (bool): True if above info are to be collected
|
35
|
+
from env file.
|
36
|
+
driver (GraphDatabase.driver): Neo4J driver instance
|
37
|
+
"""
|
38
|
+
|
39
|
+
def __init__(
|
40
|
+
self,
|
41
|
+
neo4j_url: Union[str, None] = None,
|
42
|
+
neo4j_username: Union[str, None] = None,
|
43
|
+
neo4j_password: Union[str, None] = None,
|
44
|
+
) -> None:
|
45
|
+
"""Constructor for Neo4J class.
|
46
|
+
|
47
|
+
Args:
|
48
|
+
neo4j_url (str): URL for connecting to Neo4j
|
49
|
+
neo4j_username (str): Username for Neo4j database
|
50
|
+
neo4j_password (str): Password for Neo4j database
|
51
|
+
"""
|
52
|
+
|
53
|
+
self.neo4j_url = NEO4J_URL if NEO4J_URL else neo4j_url
|
54
|
+
self.neo4j_username = (
|
55
|
+
NEO4J_USERNAME if NEO4J_USERNAME else neo4j_username
|
56
|
+
)
|
57
|
+
self.neo4j_password = (
|
58
|
+
NEO4J_PASSWORD if NEO4J_PASSWORD else neo4j_password
|
59
|
+
)
|
60
|
+
|
61
|
+
connection = Neo4jConnectionModel(
|
62
|
+
neo4j_url=self.neo4j_url,
|
63
|
+
neo4j_username=self.neo4j_username,
|
64
|
+
neo4j_password=self.neo4j_password,
|
65
|
+
)
|
66
|
+
|
67
|
+
self.driver = GraphDatabase.driver(
|
68
|
+
connection.neo4j_url,
|
69
|
+
auth=basic_auth(
|
70
|
+
connection.neo4j_username, connection.neo4j_password
|
71
|
+
),
|
72
|
+
)
|
73
|
+
|
74
|
+
logger.debug(
|
75
|
+
f"Connected to {connection.neo4j_url} database successfully"
|
76
|
+
)
|
77
|
+
|
78
|
+
@staticmethod
|
79
|
+
def rename_labels(label):
|
80
|
+
"""Method to replace the invalid character."""
|
81
|
+
invalid_chars = ["-", ":", "(", ")", "."]
|
82
|
+
for invalid_char in invalid_chars:
|
83
|
+
if invalid_char in label:
|
84
|
+
label = label.replace(invalid_char, "__")
|
85
|
+
return label
|
86
|
+
|
87
|
+
# def add_node(
|
88
|
+
# self,
|
89
|
+
# labels: Union[List, None] = None,
|
90
|
+
# properties: Union[Dict, None] = None,
|
91
|
+
# ) -> None:
|
92
|
+
# """Method to add node to the Neo4j database.
|
93
|
+
|
94
|
+
# Args:
|
95
|
+
# labels (Union[List, None]): List of labels to be used for node
|
96
|
+
# properties (Union[Dict, None]): Properties to be used for the node
|
97
|
+
# """
|
98
|
+
# if labels is None:
|
99
|
+
# labels = []
|
100
|
+
|
101
|
+
# if properties is None:
|
102
|
+
# properties = {}
|
103
|
+
|
104
|
+
# labels = ":".join([self.rename_labels(label) for label in labels])
|
105
|
+
# cypher_query = "CREATE (:" + labels + " $properties)"
|
106
|
+
|
107
|
+
# with self.driver.session() as session:
|
108
|
+
# session.write_transaction(
|
109
|
+
# lambda tx: tx.run(cypher_query, properties=properties)
|
110
|
+
# )
|
111
|
+
|
112
|
+
# def add_relationship(
|
113
|
+
# self,
|
114
|
+
# from_node_label: str,
|
115
|
+
# to_node_label: str,
|
116
|
+
# from_node: str,
|
117
|
+
# to_node: str,
|
118
|
+
# relationship_label: str,
|
119
|
+
# relationship_properties: Union[Dict, None] = None,
|
120
|
+
# ) -> None:
|
121
|
+
# """Method to create relationship in graph database.
|
122
|
+
|
123
|
+
# Args:
|
124
|
+
# from_node_label (str): Node label for from node
|
125
|
+
# to_node_label (str): Node label for to node
|
126
|
+
# from_node (str): From node name
|
127
|
+
# to_node (str): To node name
|
128
|
+
# relationship_label (str): Relationship label name
|
129
|
+
# relationship_properties (Union[Dict, None]): Properties to be used
|
130
|
+
# for relationship
|
131
|
+
# """
|
132
|
+
|
133
|
+
# if relationship_properties is None:
|
134
|
+
# relationship_properties = {}
|
135
|
+
|
136
|
+
# cypher_query = (
|
137
|
+
# f"MATCH (a:{from_node_label}),(b:{to_node_label}) WHERE a.name = '{from_node}'"
|
138
|
+
# + f" AND b.name = '{to_node}' CREATE (a)-[:"
|
139
|
+
# + self.rename_labels(relationship_label)
|
140
|
+
# + " $properties]->(b)"
|
141
|
+
# )
|
142
|
+
|
143
|
+
# with self.driver.session() as session:
|
144
|
+
# session.write_transaction(
|
145
|
+
# lambda tx: tx.run(
|
146
|
+
# cypher_query, properties=relationship_properties
|
147
|
+
# )
|
148
|
+
# )
|
149
|
+
|
150
|
+
# def read_query(self, cypher_query: str) -> List:
|
151
|
+
# """Executes a Cypher read query."""
|
152
|
+
|
153
|
+
# with self.driver.session() as session:
|
154
|
+
# result = session.read_transaction(
|
155
|
+
# lambda tx: tx.run(cypher_query).data()
|
156
|
+
# )
|
157
|
+
|
158
|
+
# return result
|
159
|
+
|
160
|
+
def close_driver(self):
|
161
|
+
"""Method to close the driver."""
|
162
|
+
self.driver.close()
|
erad/db/utils.py
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
""" This module contains utility functions
|
2
|
+
utilized throughout the modules and subpackages
|
3
|
+
contained within db subpackage."""
|
4
|
+
|
5
|
+
from neo4j import GraphDatabase
|
6
|
+
|
7
|
+
def _run_read_query(driver:GraphDatabase.driver, cypher_query: str):
|
8
|
+
""" Runs a cypher query and returns result. """
|
9
|
+
|
10
|
+
with driver.session() as session:
|
11
|
+
result = session.read_transaction(
|
12
|
+
lambda tx: tx.run(cypher_query).data()
|
13
|
+
)
|
14
|
+
return result
|
erad/exceptions.py
ADDED
@@ -0,0 +1,68 @@
|
|
1
|
+
""" Module for managing exceptions raised by ERAD package."""
|
2
|
+
|
3
|
+
# standard libraries
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
|
7
|
+
class ERADBaseException(Exception):
|
8
|
+
"""All exception should derive from this."""
|
9
|
+
|
10
|
+
|
11
|
+
class FeatureNotImplementedError(ERADBaseException):
|
12
|
+
"""Exception raised because specific feature requested has not been implemented."""
|
13
|
+
|
14
|
+
|
15
|
+
class PathDoesNotExist(ERADBaseException):
|
16
|
+
"""Exception raised bacause expected file/folder path does not exist."""
|
17
|
+
|
18
|
+
def __init__(self, path):
|
19
|
+
self.message = (
|
20
|
+
f"Expected path {path} does not exist. please check you file path!"
|
21
|
+
)
|
22
|
+
super().__init__(self.message)
|
23
|
+
|
24
|
+
|
25
|
+
class NotAFileError(ERADBaseException):
|
26
|
+
"""Exception raised because file is expected but folder path is provided."""
|
27
|
+
|
28
|
+
def __init__(self, path):
|
29
|
+
self.message = f"Expected file path {path} is not a file!"
|
30
|
+
super().__init__(self.message)
|
31
|
+
|
32
|
+
|
33
|
+
class EmptyEnvironmentVariable(ERADBaseException):
|
34
|
+
"""Exception raised because environment variable required is empty."""
|
35
|
+
|
36
|
+
|
37
|
+
class DatabaseMissingInfo(ERADBaseException):
|
38
|
+
"""Exception raised because information required to connect to database is missing."""
|
39
|
+
|
40
|
+
|
41
|
+
class InvalidFileTypePassed(ERADBaseException):
|
42
|
+
"""Exceptions raised because invalid file type is passed."""
|
43
|
+
|
44
|
+
def __init__(self, path, valid_type):
|
45
|
+
|
46
|
+
self.message = f"Invalid file type of {Path(path).suffix} is passed! Please pass valid file type of {valid_type}"
|
47
|
+
super().__init__(self.message)
|
48
|
+
|
49
|
+
|
50
|
+
class SMARTDSInvalidInput(ERADBaseException):
|
51
|
+
"""Exceptions raised because invalid input is provided for SMART DS data download."""
|
52
|
+
|
53
|
+
|
54
|
+
class EmptyScenarioPolygon(ERADBaseException):
|
55
|
+
"""Exceptions raised because no polygons are found."""
|
56
|
+
|
57
|
+
|
58
|
+
class OpenDSSCommandError(ERADBaseException):
|
59
|
+
"""Exceptions raised because opendss command execution ran into an error."""
|
60
|
+
|
61
|
+
|
62
|
+
class MultiStatePlaneError(ERADBaseException):
|
63
|
+
"""Exceptions raised because the corrdinates are in more than one state plane
|
64
|
+
coordinates."""
|
65
|
+
|
66
|
+
|
67
|
+
class DittoException(ERADBaseException):
|
68
|
+
"""Exceptions raised because application ran into an issus using Ditto."""
|
erad/metrics/__init__.py
ADDED
File without changes
|