NREL-erad 0.0.0a0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. erad/__init__.py +1 -0
  2. erad/constants.py +20 -20
  3. erad/cypher_queries/load_data_v1.cypher +211 -211
  4. erad/data/World_Earthquakes_1960_2016.csv +23410 -23410
  5. erad/db/assets/critical_infras.py +170 -170
  6. erad/db/assets/distribution_lines.py +101 -101
  7. erad/db/credential_model.py +20 -20
  8. erad/db/disaster_input_model.py +23 -23
  9. erad/db/inject_earthquake.py +52 -52
  10. erad/db/inject_flooding.py +53 -53
  11. erad/db/neo4j_.py +162 -162
  12. erad/db/utils.py +13 -13
  13. erad/exceptions.py +68 -68
  14. erad/metrics/check_microgrid.py +208 -208
  15. erad/metrics/metric.py +178 -178
  16. erad/programs/backup.py +61 -61
  17. erad/programs/microgrid.py +44 -44
  18. erad/scenarios/abstract_scenario.py +102 -102
  19. erad/scenarios/common.py +92 -92
  20. erad/scenarios/earthquake_scenario.py +161 -161
  21. erad/scenarios/fire_scenario.py +160 -160
  22. erad/scenarios/flood_scenario.py +493 -493
  23. erad/scenarios/flows.csv +671 -0
  24. erad/scenarios/utilities.py +75 -75
  25. erad/scenarios/wind_scenario.py +89 -89
  26. erad/utils/ditto_utils.py +252 -252
  27. erad/utils/hifld_utils.py +147 -147
  28. erad/utils/opendss_utils.py +357 -357
  29. erad/utils/overpass.py +76 -76
  30. erad/utils/util.py +178 -178
  31. erad/visualization/plot_graph.py +218 -218
  32. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/METADATA +65 -61
  33. nrel_erad-1.0.0.dist-info/RECORD +42 -0
  34. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info}/WHEEL +1 -2
  35. {NREL_erad-0.0.0a0.dist-info → nrel_erad-1.0.0.dist-info/licenses}/LICENSE.txt +28 -28
  36. NREL_erad-0.0.0a0.dist-info/RECORD +0 -42
  37. NREL_erad-0.0.0a0.dist-info/top_level.txt +0 -1
erad/utils/overpass.py CHANGED
@@ -1,76 +1,76 @@
1
- import json
2
- from typing import List
3
-
4
- import overpass
5
- import polars
6
-
7
- # Define your polygon coordinates as a string
8
-
9
- GROCERY_TAGS = [
10
- 'shop=supermarket', 'shop=grocery', 'shop=convenience',
11
- 'shop=market', 'shop=healthfood', 'shop=organic'
12
- ]
13
-
14
- HOSPITAL_TAGS = [
15
- 'amenity=hospital', 'healthcare=hospital', 'building=hospital',
16
- 'amenity=clinic', 'healthcare=centre','healthcare=pharmacy',
17
- ]
18
-
19
-
20
- def export_json(json_parcels, out_path):
21
- with open(out_path, 'w') as fp:
22
- json.dump(json_parcels, fp)
23
-
24
- def export_csv(json_parcels, out_path:str):
25
-
26
- csv_data = {
27
- "id": [],
28
- "longitude": [],
29
- "latitude": [],
30
- }
31
-
32
- for feature in json_parcels['features']:
33
- for key in feature['properties']:
34
- if key not in csv_data:
35
- csv_data[key] = []
36
-
37
-
38
- for feature in json_parcels['features']:
39
- csv_data["id"].append(feature["id"])
40
- csv_data["longitude"].append(feature["geometry"]["coordinates"][0])
41
- csv_data["latitude"].append(feature["geometry"]["coordinates"][1])
42
-
43
- for key in csv_data:
44
- if key not in ["id", "longitude", "latitude"]:
45
- csv_data[key].append(feature['properties'].get(key, None))
46
-
47
- df = polars.from_dict(csv_data)
48
- df.write_csv(out_path)
49
-
50
-
51
- def get_sites(polygon: str, tags: List[str] ):
52
- # polygon = "33.6812,-118.5966, 34.3407,-118.1390" latitude, longitude
53
- json_parcels = {
54
- "type": "FeatureCollection",
55
- "features": []
56
- }
57
-
58
- for tag in tags:
59
-
60
- query = f"""
61
- node[{tag}]({polygon});
62
- out;
63
- """
64
-
65
- # Create an instance of the Overpass API
66
- api = overpass.API()
67
-
68
- # Send the query and retrieve the data
69
- response = api.Get(query)
70
-
71
- json_parcels['features'].extend(response['features'])
72
- print(f'Number of parcels extracted of shop type {tag}: ', len(response['features']))
73
-
74
- print('Number of parcels extracted: ', len(json_parcels['features']))
75
- return json_parcels
76
-
1
+ import json
2
+ from typing import List
3
+
4
+ import overpass
5
+ import polars
6
+
7
+ # Define your polygon coordinates as a string
8
+
9
+ GROCERY_TAGS = [
10
+ 'shop=supermarket', 'shop=grocery', 'shop=convenience',
11
+ 'shop=market', 'shop=healthfood', 'shop=organic'
12
+ ]
13
+
14
+ HOSPITAL_TAGS = [
15
+ 'amenity=hospital', 'healthcare=hospital', 'building=hospital',
16
+ 'amenity=clinic', 'healthcare=centre','healthcare=pharmacy',
17
+ ]
18
+
19
+
20
+ def export_json(json_parcels, out_path):
21
+ with open(out_path, 'w') as fp:
22
+ json.dump(json_parcels, fp)
23
+
24
+ def export_csv(json_parcels, out_path:str):
25
+
26
+ csv_data = {
27
+ "id": [],
28
+ "longitude": [],
29
+ "latitude": [],
30
+ }
31
+
32
+ for feature in json_parcels['features']:
33
+ for key in feature['properties']:
34
+ if key not in csv_data:
35
+ csv_data[key] = []
36
+
37
+
38
+ for feature in json_parcels['features']:
39
+ csv_data["id"].append(feature["id"])
40
+ csv_data["longitude"].append(feature["geometry"]["coordinates"][0])
41
+ csv_data["latitude"].append(feature["geometry"]["coordinates"][1])
42
+
43
+ for key in csv_data:
44
+ if key not in ["id", "longitude", "latitude"]:
45
+ csv_data[key].append(feature['properties'].get(key, None))
46
+
47
+ df = polars.from_dict(csv_data)
48
+ df.write_csv(out_path)
49
+
50
+
51
+ def get_sites(polygon: str, tags: List[str] ):
52
+ # polygon = "33.6812,-118.5966, 34.3407,-118.1390" latitude, longitude
53
+ json_parcels = {
54
+ "type": "FeatureCollection",
55
+ "features": []
56
+ }
57
+
58
+ for tag in tags:
59
+
60
+ query = f"""
61
+ node[{tag}]({polygon});
62
+ out;
63
+ """
64
+
65
+ # Create an instance of the Overpass API
66
+ api = overpass.API()
67
+
68
+ # Send the query and retrieve the data
69
+ response = api.Get(query)
70
+
71
+ json_parcels['features'].extend(response['features'])
72
+ print(f'Number of parcels extracted of shop type {tag}: ', len(response['features']))
73
+
74
+ print('Number of parcels extracted: ', len(json_parcels['features']))
75
+ return json_parcels
76
+
erad/utils/util.py CHANGED
@@ -1,178 +1,178 @@
1
- """ Utility functions that can be used in various parts of the code. """
2
-
3
- # standard libraries
4
- import json
5
- from pathlib import Path
6
- import logging
7
- import logging.config
8
- import time
9
- from typing import Union
10
-
11
- # third-party libraries
12
- import yaml
13
- import geojson
14
-
15
- # internal imports
16
- from erad.exceptions import (
17
- FeatureNotImplementedError,
18
- PathDoesNotExist,
19
- NotAFileError,
20
- InvalidFileTypePassed,
21
- )
22
-
23
- logger = logging.getLogger(__name__)
24
-
25
-
26
- def timeit(func):
27
- """Decorator for timing execution of a function."""
28
-
29
- def wrapper(*args, **kwargs):
30
- time_start = time.perf_counter()
31
- logger.debug(f"Timing for {func} started")
32
- ret_val = func(*args, **kwargs)
33
- time_elapsed = time.perf_counter() - time_start
34
- # memory_mb =resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024.0/1024.0
35
- logger.debug(
36
- f"Time took to execute the function {func} \
37
- with args {args}, kwargs {kwargs} is {time_elapsed} seconds"
38
- )
39
- return ret_val
40
-
41
- return wrapper
42
-
43
-
44
- def setup_logging(filename: Union[str, None] = None) -> None:
45
- """Creates log directory and sets up logging via logging.yaml.
46
-
47
- Args:
48
- filename (str): Path to logging.yaml file
49
-
50
- If not providex expects log file in the root of repo.
51
- """
52
-
53
- if filename is None:
54
- filename = Path(__file__).parents[2] / "logging.yaml"
55
-
56
- logging.config.dictConfig(read_file(filename))
57
-
58
-
59
- def path_validation(
60
- file_path: str,
61
- check_for_file: bool = False,
62
- check_for_file_type: Union[str, None] = None,
63
- ) -> None:
64
- """Utility function for validating the path.
65
-
66
- Args:
67
- file_path (str): Path to be validated
68
- check_for_file (bool): Checks for existence of file
69
- check_for_file_type (Union[str, None]): Check if file is of
70
- this type
71
-
72
- Raises:
73
- PathDoesNotExist: Raises if path does not exist
74
- NotAFileError: Raises if file is not present
75
- InvalidFileTypePassed: Raises if invalid file type is passed
76
- """
77
-
78
- file_path = Path(file_path)
79
- if not file_path.exists():
80
- logger.error(f"{file_path} does not exist!")
81
- raise PathDoesNotExist(file_path)
82
-
83
- if check_for_file and file_path.is_dir():
84
- logger.error(f"Expected file but got folder : {file_path} ")
85
- raise NotAFileError(file_path)
86
-
87
- if check_for_file_type and file_path.suffix != check_for_file_type:
88
- raise InvalidFileTypePassed(file_path, check_for_file_type)
89
-
90
- logger.debug(f"{file_path} validated successfully!")
91
-
92
-
93
- @timeit
94
- def read_file(file_path: str) -> dict:
95
- """Utility function to read file into a python dict.
96
-
97
- Supports json, yaml and geojson.
98
-
99
- Args:
100
- file_path (str): Path to a file to be read.
101
-
102
- Raises:
103
- FeatureNotImplementedError: Raises if invalid file is passed.
104
-
105
- Returns:
106
- dict: Python dict containing content of file.
107
- """
108
-
109
- file_path = Path(file_path)
110
- logger.debug(f"Attempting to read {file_path}")
111
-
112
- path_validation(file_path, check_for_file=True)
113
-
114
- # Handle JSON file read
115
- if file_path.suffix == ".json":
116
- with open(file_path, "r") as f:
117
- content = json.load(f)
118
-
119
- # Handle YAML file read
120
- elif file_path.suffix == ".yaml":
121
- with open(file_path, "r") as f:
122
- content = yaml.safe_load(f)
123
-
124
- # Handle geojson file read
125
- elif file_path.suffix == ".geojson":
126
- with open(file_path, "r") as f:
127
- content = geojson.load(f)
128
-
129
- else:
130
- logger.error(
131
- f"Could not read the {file_path}, this feature is not yet implemented"
132
- )
133
- raise FeatureNotImplementedError(
134
- f"File of type {file_path.suffix} \
135
- is not yet implemented for reading purpose"
136
- )
137
-
138
- logger.debug(f"{file_path} read successfully")
139
- return content
140
-
141
-
142
- def write_file(content: dict, file_path: str, **kwargs) -> None:
143
- """Utility function to write to a file..
144
-
145
- Supports json, yaml and geojson.
146
-
147
- Args:
148
- content (dict): Python dict content
149
- file_path (str): Path to a file to be read
150
- kwargs (dict): Keyword arguments passed to
151
- relevant writer.
152
-
153
- Raises:
154
- FeatureNotImplementedError: Raises if invalid file type is passed.
155
- """
156
- file_path = Path(file_path)
157
- path_validation(file_path.parent)
158
-
159
- # Handle JSON file write
160
- if file_path.suffix == ".json":
161
- with open(file_path, "w") as f:
162
- json.dump(content, f, **kwargs)
163
-
164
- # Handle YAML file write
165
- elif file_path.suffix == ".yaml":
166
- with open(file_path, "w") as f:
167
- yaml.safe_dump(content, f, **kwargs)
168
-
169
- # Handle geojson file write
170
- elif file_path.suffix == ".geojson":
171
- with open(file_path, "w") as f:
172
- geojson.dump(content, f, **kwargs)
173
-
174
- else:
175
- raise FeatureNotImplementedError(
176
- f"File of type {file_path.suffix} \
177
- is not yet implemented for writing purpose"
178
- )
1
+ """ Utility functions that can be used in various parts of the code. """
2
+
3
+ # standard libraries
4
+ import json
5
+ from pathlib import Path
6
+ import logging
7
+ import logging.config
8
+ import time
9
+ from typing import Union
10
+
11
+ # third-party libraries
12
+ import yaml
13
+ import geojson
14
+
15
+ # internal imports
16
+ from erad.exceptions import (
17
+ FeatureNotImplementedError,
18
+ PathDoesNotExist,
19
+ NotAFileError,
20
+ InvalidFileTypePassed,
21
+ )
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ def timeit(func):
27
+ """Decorator for timing execution of a function."""
28
+
29
+ def wrapper(*args, **kwargs):
30
+ time_start = time.perf_counter()
31
+ logger.debug(f"Timing for {func} started")
32
+ ret_val = func(*args, **kwargs)
33
+ time_elapsed = time.perf_counter() - time_start
34
+ # memory_mb =resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024.0/1024.0
35
+ logger.debug(
36
+ f"Time took to execute the function {func} \
37
+ with args {args}, kwargs {kwargs} is {time_elapsed} seconds"
38
+ )
39
+ return ret_val
40
+
41
+ return wrapper
42
+
43
+
44
+ def setup_logging(filename: Union[str, None] = None) -> None:
45
+ """Creates log directory and sets up logging via logging.yaml.
46
+
47
+ Args:
48
+ filename (str): Path to logging.yaml file
49
+
50
+ If not providex expects log file in the root of repo.
51
+ """
52
+
53
+ if filename is None:
54
+ filename = Path(__file__).parents[2] / "logging.yaml"
55
+
56
+ logging.config.dictConfig(read_file(filename))
57
+
58
+
59
+ def path_validation(
60
+ file_path: str,
61
+ check_for_file: bool = False,
62
+ check_for_file_type: Union[str, None] = None,
63
+ ) -> None:
64
+ """Utility function for validating the path.
65
+
66
+ Args:
67
+ file_path (str): Path to be validated
68
+ check_for_file (bool): Checks for existence of file
69
+ check_for_file_type (Union[str, None]): Check if file is of
70
+ this type
71
+
72
+ Raises:
73
+ PathDoesNotExist: Raises if path does not exist
74
+ NotAFileError: Raises if file is not present
75
+ InvalidFileTypePassed: Raises if invalid file type is passed
76
+ """
77
+
78
+ file_path = Path(file_path)
79
+ if not file_path.exists():
80
+ logger.error(f"{file_path} does not exist!")
81
+ raise PathDoesNotExist(file_path)
82
+
83
+ if check_for_file and file_path.is_dir():
84
+ logger.error(f"Expected file but got folder : {file_path} ")
85
+ raise NotAFileError(file_path)
86
+
87
+ if check_for_file_type and file_path.suffix != check_for_file_type:
88
+ raise InvalidFileTypePassed(file_path, check_for_file_type)
89
+
90
+ logger.debug(f"{file_path} validated successfully!")
91
+
92
+
93
+ @timeit
94
+ def read_file(file_path: str) -> dict:
95
+ """Utility function to read file into a python dict.
96
+
97
+ Supports json, yaml and geojson.
98
+
99
+ Args:
100
+ file_path (str): Path to a file to be read.
101
+
102
+ Raises:
103
+ FeatureNotImplementedError: Raises if invalid file is passed.
104
+
105
+ Returns:
106
+ dict: Python dict containing content of file.
107
+ """
108
+
109
+ file_path = Path(file_path)
110
+ logger.debug(f"Attempting to read {file_path}")
111
+
112
+ path_validation(file_path, check_for_file=True)
113
+
114
+ # Handle JSON file read
115
+ if file_path.suffix == ".json":
116
+ with open(file_path, "r") as f:
117
+ content = json.load(f)
118
+
119
+ # Handle YAML file read
120
+ elif file_path.suffix == ".yaml":
121
+ with open(file_path, "r") as f:
122
+ content = yaml.safe_load(f)
123
+
124
+ # Handle geojson file read
125
+ elif file_path.suffix == ".geojson":
126
+ with open(file_path, "r") as f:
127
+ content = geojson.load(f)
128
+
129
+ else:
130
+ logger.error(
131
+ f"Could not read the {file_path}, this feature is not yet implemented"
132
+ )
133
+ raise FeatureNotImplementedError(
134
+ f"File of type {file_path.suffix} \
135
+ is not yet implemented for reading purpose"
136
+ )
137
+
138
+ logger.debug(f"{file_path} read successfully")
139
+ return content
140
+
141
+
142
+ def write_file(content: dict, file_path: str, **kwargs) -> None:
143
+ """Utility function to write to a file..
144
+
145
+ Supports json, yaml and geojson.
146
+
147
+ Args:
148
+ content (dict): Python dict content
149
+ file_path (str): Path to a file to be read
150
+ kwargs (dict): Keyword arguments passed to
151
+ relevant writer.
152
+
153
+ Raises:
154
+ FeatureNotImplementedError: Raises if invalid file type is passed.
155
+ """
156
+ file_path = Path(file_path)
157
+ path_validation(file_path.parent)
158
+
159
+ # Handle JSON file write
160
+ if file_path.suffix == ".json":
161
+ with open(file_path, "w") as f:
162
+ json.dump(content, f, **kwargs)
163
+
164
+ # Handle YAML file write
165
+ elif file_path.suffix == ".yaml":
166
+ with open(file_path, "w") as f:
167
+ yaml.safe_dump(content, f, **kwargs)
168
+
169
+ # Handle geojson file write
170
+ elif file_path.suffix == ".geojson":
171
+ with open(file_path, "w") as f:
172
+ geojson.dump(content, f, **kwargs)
173
+
174
+ else:
175
+ raise FeatureNotImplementedError(
176
+ f"File of type {file_path.suffix} \
177
+ is not yet implemented for writing purpose"
178
+ )