ObjectNat 0.2.6__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ObjectNat might be problematic. Click here for more details.
- objectnat/_api.py +6 -8
- objectnat/_config.py +0 -24
- objectnat/_version.py +1 -1
- objectnat/methods/coverage_zones/__init__.py +2 -0
- objectnat/methods/coverage_zones/graph_coverage.py +118 -0
- objectnat/methods/coverage_zones/radius_voronoi.py +45 -0
- objectnat/methods/isochrones/__init__.py +1 -0
- objectnat/methods/isochrones/isochrone_utils.py +130 -0
- objectnat/methods/isochrones/isochrones.py +325 -0
- objectnat/methods/noise/__init__.py +3 -0
- objectnat/methods/noise/noise_exceptions.py +14 -0
- objectnat/methods/noise/noise_init_data.py +10 -0
- objectnat/methods/noise/noise_reduce.py +155 -0
- objectnat/methods/noise/noise_sim.py +423 -0
- objectnat/methods/point_clustering/__init__.py +1 -0
- objectnat/methods/{cluster_points_in_polygons.py → point_clustering/cluster_points_in_polygons.py} +22 -28
- objectnat/methods/provision/__init__.py +1 -0
- objectnat/methods/provision/provision.py +10 -7
- objectnat/methods/provision/provision_exceptions.py +4 -4
- objectnat/methods/provision/provision_model.py +21 -20
- objectnat/methods/utils/__init__.py +0 -0
- objectnat/methods/utils/geom_utils.py +130 -0
- objectnat/methods/utils/graph_utils.py +127 -0
- objectnat/methods/utils/math_utils.py +32 -0
- objectnat/methods/visibility/__init__.py +6 -0
- objectnat/methods/{visibility_analysis.py → visibility/visibility_analysis.py} +222 -243
- objectnat-1.0.0.dist-info/METADATA +143 -0
- objectnat-1.0.0.dist-info/RECORD +32 -0
- objectnat/methods/balanced_buildings.py +0 -69
- objectnat/methods/coverage_zones.py +0 -90
- objectnat/methods/isochrones.py +0 -143
- objectnat/methods/living_buildings_osm.py +0 -168
- objectnat-0.2.6.dist-info/METADATA +0 -113
- objectnat-0.2.6.dist-info/RECORD +0 -19
- {objectnat-0.2.6.dist-info → objectnat-1.0.0.dist-info}/LICENSE.txt +0 -0
- {objectnat-0.2.6.dist-info → objectnat-1.0.0.dist-info}/WHEEL +0 -0
|
@@ -19,11 +19,10 @@ class Provision:
|
|
|
19
19
|
Represents the logic for city provision calculations using a gravity or linear model.
|
|
20
20
|
|
|
21
21
|
Args:
|
|
22
|
-
services (
|
|
23
|
-
demanded_buildings (
|
|
24
|
-
adjacency_matrix (
|
|
22
|
+
services (gpd.GeoDataFrame): GeoDataFrame representing the services available in the city.
|
|
23
|
+
demanded_buildings (gpd.GeoDataFrame): GeoDataFrame representing the buildings with demands for services.
|
|
24
|
+
adjacency_matrix (pd.DataFrame): DataFrame representing the adjacency matrix between buildings.
|
|
25
25
|
threshold (int): Threshold value for the provision calculations.
|
|
26
|
-
calculation_type (str, optional): Type of calculation ("gravity" or "linear"). Defaults to "gravity".
|
|
27
26
|
|
|
28
27
|
Returns:
|
|
29
28
|
Provision: The CityProvision object.
|
|
@@ -48,7 +47,7 @@ class Provision:
|
|
|
48
47
|
adjacency_matrix.copy(), demanded_buildings, services
|
|
49
48
|
).copy()
|
|
50
49
|
self.threshold = threshold
|
|
51
|
-
self.
|
|
50
|
+
self.services.to_crs(self.demanded_buildings.crs, inplace=True)
|
|
52
51
|
pandarallel.initialize(progress_bar=False, verbose=0, use_memory_fs=config.pandarallel_use_file_system)
|
|
53
52
|
|
|
54
53
|
@staticmethod
|
|
@@ -65,12 +64,6 @@ class Provision:
|
|
|
65
64
|
v["capacity_left"] = v["capacity"]
|
|
66
65
|
return v
|
|
67
66
|
|
|
68
|
-
@staticmethod
|
|
69
|
-
def check_crs(demanded_buildings, services):
|
|
70
|
-
assert (
|
|
71
|
-
demanded_buildings.crs == services.crs
|
|
72
|
-
), f"\nThe CRS in the provided geodataframes are different.\nBuildings CRS:{demanded_buildings.crs}\nServices CRS:{services.crs} \n"
|
|
73
|
-
|
|
74
67
|
@staticmethod
|
|
75
68
|
def delete_useless_matrix_rows_columns(adjacency_matrix, demanded_buildings, services):
|
|
76
69
|
adjacency_matrix.index = adjacency_matrix.index.astype(int)
|
|
@@ -266,7 +259,15 @@ def _calc_links(
|
|
|
266
259
|
flat_matrix = destination_matrix.transpose().apply(lambda x: subfunc(x[x > 0]), result_type="reduce")
|
|
267
260
|
|
|
268
261
|
distribution_links = gpd.GeoDataFrame(data=[item for sublist in list(flat_matrix) for item in sublist])
|
|
269
|
-
|
|
262
|
+
if distribution_links.empty:
|
|
263
|
+
logger.warning(
|
|
264
|
+
"Unable to create distribution links - no demand could be matched with service locations. "
|
|
265
|
+
"This is likely because either: "
|
|
266
|
+
"1) The demand column in buildings contains zero values, or "
|
|
267
|
+
"2) The capacity column in services contains zero values, or "
|
|
268
|
+
"3) There are no service locations within the maximum allowed distance"
|
|
269
|
+
)
|
|
270
|
+
return distribution_links
|
|
270
271
|
distribution_links["distance"] = distribution_links.apply(
|
|
271
272
|
lambda x: distance_matrix.loc[x["service_index"]][x["building_index"]],
|
|
272
273
|
axis=1,
|
|
@@ -290,11 +291,11 @@ def _additional_options(
|
|
|
290
291
|
normative_distance,
|
|
291
292
|
):
|
|
292
293
|
buildings["avg_dist"] = 0
|
|
293
|
-
buildings["
|
|
294
|
-
buildings["
|
|
294
|
+
buildings["supplied_demands_within"] = 0
|
|
295
|
+
buildings["supplied_demands_without"] = 0
|
|
295
296
|
services["carried_capacity_within"] = 0
|
|
296
297
|
services["carried_capacity_without"] = 0
|
|
297
|
-
for
|
|
298
|
+
for _, loc in destination_matrix.iterrows():
|
|
298
299
|
distances_all = matrix.loc[loc.name]
|
|
299
300
|
distances = distances_all[distances_all <= normative_distance]
|
|
300
301
|
s = matrix.loc[loc.name] <= normative_distance
|
|
@@ -308,8 +309,8 @@ def _additional_options(
|
|
|
308
309
|
.add(distances_all.multiply(without, fill_value=0), fill_value=0)
|
|
309
310
|
)
|
|
310
311
|
buildings["demand_left"] = buildings["demand_left"].sub(within.add(without, fill_value=0), fill_value=0)
|
|
311
|
-
buildings["
|
|
312
|
-
buildings["
|
|
312
|
+
buildings["supplied_demands_within"] = buildings["supplied_demands_within"].add(within, fill_value=0)
|
|
313
|
+
buildings["supplied_demands_without"] = buildings["supplied_demands_without"].add(without, fill_value=0)
|
|
313
314
|
|
|
314
315
|
services.at[loc.name, "capacity_left"] = (
|
|
315
316
|
services.at[loc.name, "capacity_left"] - within.add(without, fill_value=0).sum()
|
|
@@ -327,10 +328,10 @@ def _additional_options(
|
|
|
327
328
|
buildings["avg_dist"] = buildings.apply(
|
|
328
329
|
lambda x: np.nan if (x["demand"] == x["demand_left"]) else round(x["avg_dist"], 2), axis=1
|
|
329
330
|
)
|
|
330
|
-
buildings["
|
|
331
|
+
buildings["provision_value"] = (buildings["supplied_demands_within"] / buildings["demand"]).astype(float).round(2)
|
|
331
332
|
services["service_load"] = (services["capacity"] - services["capacity_left"]).astype(np.uint16)
|
|
332
|
-
buildings["
|
|
333
|
-
buildings["
|
|
333
|
+
buildings["supplied_demands_within"] = buildings["supplied_demands_within"].astype(np.uint16)
|
|
334
|
+
buildings["supplied_demands_without"] = buildings["supplied_demands_without"].astype(np.uint16)
|
|
334
335
|
services["carried_capacity_within"] = services["carried_capacity_within"].astype(np.uint16)
|
|
335
336
|
services["carried_capacity_without"] = services["carried_capacity_without"].astype(np.uint16)
|
|
336
337
|
logger.debug("Done adding additional options")
|
|
File without changes
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import math
|
|
2
|
+
|
|
3
|
+
import geopandas as gpd
|
|
4
|
+
from shapely import LineString, MultiPolygon, Point, Polygon
|
|
5
|
+
from shapely.ops import polygonize, unary_union
|
|
6
|
+
|
|
7
|
+
from objectnat import config
|
|
8
|
+
|
|
9
|
+
logger = config.logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def polygons_to_multilinestring(geom: Polygon | MultiPolygon):
|
|
13
|
+
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
14
|
+
from shapely import LineString, MultiLineString, MultiPolygon
|
|
15
|
+
|
|
16
|
+
def convert_polygon(polygon: Polygon):
|
|
17
|
+
lines = []
|
|
18
|
+
exterior = LineString(polygon.exterior.coords)
|
|
19
|
+
lines.append(exterior)
|
|
20
|
+
interior = [LineString(p.coords) for p in polygon.interiors]
|
|
21
|
+
lines = lines + interior
|
|
22
|
+
return lines
|
|
23
|
+
|
|
24
|
+
def convert_multipolygon(polygon: MultiPolygon):
|
|
25
|
+
return MultiLineString(sum([convert_polygon(p) for p in polygon.geoms], []))
|
|
26
|
+
|
|
27
|
+
if geom.geom_type == "Polygon":
|
|
28
|
+
return MultiLineString(convert_polygon(geom))
|
|
29
|
+
return convert_multipolygon(geom)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def explode_linestring(geometry: LineString) -> list[LineString]:
|
|
33
|
+
"""A function to return all segments of a linestring as a list of linestrings"""
|
|
34
|
+
coords_ext = geometry.coords # Create a list of all line node coordinates
|
|
35
|
+
result = [LineString(part) for part in zip(coords_ext, coords_ext[1:])]
|
|
36
|
+
return result
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def point_side_of_line(line: LineString, point: Point) -> int:
|
|
40
|
+
"""A positive indicates the left-hand side a negative indicates the right-hand side"""
|
|
41
|
+
x1, y1 = line.coords[0]
|
|
42
|
+
x2, y2 = line.coords[-1]
|
|
43
|
+
x, y = point.coords[0]
|
|
44
|
+
cross_product = (x2 - x1) * (y - y1) - (y2 - y1) * (x - x1)
|
|
45
|
+
if cross_product > 0:
|
|
46
|
+
return 1
|
|
47
|
+
return -1
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_point_from_a_thorough_b(a: Point, b: Point, dist):
|
|
51
|
+
"""
|
|
52
|
+
Func to get Point from point a thorough point b on dist
|
|
53
|
+
"""
|
|
54
|
+
direction = math.atan2(b.y - a.y, b.x - a.x)
|
|
55
|
+
c_x = a.x + dist * math.cos(direction)
|
|
56
|
+
c_y = a.y + dist * math.sin(direction)
|
|
57
|
+
return Point(c_x, c_y)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def gdf_to_circle_zones_from_point(
|
|
61
|
+
gdf: gpd.GeoDataFrame, point_from: Point, zone_radius, resolution=4, explode_multigeom=True
|
|
62
|
+
) -> gpd.GeoDataFrame:
|
|
63
|
+
"""n_segments = 4*resolution,e.g. if resolution = 4 that means there will be 16 segments"""
|
|
64
|
+
crs = gdf.crs
|
|
65
|
+
buffer = point_from.buffer(zone_radius, resolution=resolution)
|
|
66
|
+
gdf_unary = gdf.clip(buffer, keep_geom_type=True).union_all()
|
|
67
|
+
gdf_geometry = (
|
|
68
|
+
gpd.GeoDataFrame(geometry=[gdf_unary], crs=crs)
|
|
69
|
+
.explode(index_parts=True)
|
|
70
|
+
.geometry.apply(polygons_to_multilinestring)
|
|
71
|
+
.union_all()
|
|
72
|
+
)
|
|
73
|
+
zones_lines = [LineString([Point(coords1), Point(point_from)]) for coords1 in buffer.exterior.coords[:-1]]
|
|
74
|
+
if explode_multigeom:
|
|
75
|
+
return (
|
|
76
|
+
gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs)
|
|
77
|
+
.clip(gdf_unary, keep_geom_type=True)
|
|
78
|
+
.explode(index_parts=False)
|
|
79
|
+
)
|
|
80
|
+
return gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs).clip(
|
|
81
|
+
gdf_unary, keep_geom_type=True
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def remove_inner_geom(polygon: Polygon | MultiPolygon):
|
|
86
|
+
"""function to get rid of inner polygons"""
|
|
87
|
+
if isinstance(polygon, Polygon):
|
|
88
|
+
return Polygon(polygon.exterior.coords)
|
|
89
|
+
if isinstance(polygon, MultiPolygon):
|
|
90
|
+
polys = []
|
|
91
|
+
for poly in polygon.geoms:
|
|
92
|
+
polys.append(Polygon(poly.exterior.coords))
|
|
93
|
+
return MultiPolygon(polys)
|
|
94
|
+
else:
|
|
95
|
+
return Polygon()
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def combine_geometry(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
99
|
+
"""
|
|
100
|
+
Combine geometry of intersecting layers into a single GeoDataFrame.
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
gdf: gpd.GeoDataFrame
|
|
104
|
+
A GeoPandas GeoDataFrame
|
|
105
|
+
|
|
106
|
+
Returns
|
|
107
|
+
-------
|
|
108
|
+
gpd.GeoDataFrame
|
|
109
|
+
The combined GeoDataFrame with aggregated in lists columns.
|
|
110
|
+
|
|
111
|
+
Examples
|
|
112
|
+
--------
|
|
113
|
+
>>> gdf = gpd.read_file('path_to_your_file.geojson')
|
|
114
|
+
>>> result = combine_geometry(gdf)
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
crs = gdf.crs
|
|
118
|
+
|
|
119
|
+
enclosures = gpd.GeoDataFrame(
|
|
120
|
+
geometry=list(polygonize(gdf["geometry"].apply(polygons_to_multilinestring).union_all())), crs=crs
|
|
121
|
+
)
|
|
122
|
+
enclosures_points = enclosures.copy()
|
|
123
|
+
enclosures_points.geometry = enclosures.representative_point()
|
|
124
|
+
joined = gpd.sjoin(enclosures_points, gdf, how="inner", predicate="within").reset_index()
|
|
125
|
+
cols = joined.columns.tolist()
|
|
126
|
+
cols.remove("geometry")
|
|
127
|
+
joined = joined.groupby("index").agg({column: list for column in cols})
|
|
128
|
+
joined["geometry"] = enclosures
|
|
129
|
+
joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=crs)
|
|
130
|
+
return joined
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import geopandas as gpd
|
|
2
|
+
import networkx as nx
|
|
3
|
+
import numpy as np
|
|
4
|
+
import pandas as pd
|
|
5
|
+
from loguru import logger
|
|
6
|
+
from scipy.spatial import KDTree
|
|
7
|
+
from shapely import LineString
|
|
8
|
+
from shapely.geometry.point import Point
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _edges_to_gdf(graph: nx.Graph, crs) -> gpd.GeoDataFrame:
|
|
12
|
+
"""
|
|
13
|
+
Converts nx graph to gpd.GeoDataFrame as edges.
|
|
14
|
+
"""
|
|
15
|
+
graph_df = pd.DataFrame(list(graph.edges(data=True)), columns=["u", "v", "data"])
|
|
16
|
+
edge_data_expanded = pd.json_normalize(graph_df["data"])
|
|
17
|
+
graph_df = pd.concat([graph_df.drop(columns=["data"]), edge_data_expanded], axis=1)
|
|
18
|
+
graph_df = gpd.GeoDataFrame(graph_df, geometry="geometry", crs=crs).set_index(["u", "v"])
|
|
19
|
+
graph_df["geometry"] = graph_df["geometry"].fillna(LineString())
|
|
20
|
+
return graph_df
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _nodes_to_gdf(graph: nx.Graph, crs: int) -> gpd.GeoDataFrame:
|
|
24
|
+
"""
|
|
25
|
+
Converts nx graph to gpd.GeoDataFrame as nodes.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
ind, data = zip(*graph.nodes(data=True))
|
|
29
|
+
node_geoms = (Point(d["x"], d["y"]) for d in data)
|
|
30
|
+
gdf_nodes = gpd.GeoDataFrame(data, index=ind, crs=crs, geometry=list(node_geoms))
|
|
31
|
+
|
|
32
|
+
return gdf_nodes
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _restore_edges_geom(nodes_gdf, edges_gdf) -> gpd.GeoDataFrame:
|
|
36
|
+
edges_wout_geom = edges_gdf[edges_gdf["geometry"].is_empty].reset_index()
|
|
37
|
+
edges_wout_geom["geometry"] = [
|
|
38
|
+
LineString((s, e))
|
|
39
|
+
for s, e in zip(
|
|
40
|
+
nodes_gdf.loc[edges_wout_geom["u"], "geometry"], nodes_gdf.loc[edges_wout_geom["v"], "geometry"]
|
|
41
|
+
)
|
|
42
|
+
]
|
|
43
|
+
edges_wout_geom.set_index(["u", "v"], inplace=True)
|
|
44
|
+
edges_gdf.update(edges_wout_geom)
|
|
45
|
+
return edges_gdf
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def graph_to_gdf(
|
|
49
|
+
graph: nx.MultiDiGraph | nx.Graph | nx.DiGraph, edges: bool = True, nodes: bool = True, restore_edge_geom=False
|
|
50
|
+
) -> gpd.GeoDataFrame | tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
|
|
51
|
+
"""
|
|
52
|
+
Converts nx graph to gpd.GeoDataFrame as edges.
|
|
53
|
+
|
|
54
|
+
Parameters
|
|
55
|
+
----------
|
|
56
|
+
graph : nx.MultiDiGraph
|
|
57
|
+
The graph to convert.
|
|
58
|
+
edges: bool, default to True
|
|
59
|
+
Keep edges in GoeDataFrame.
|
|
60
|
+
nodes: bool, default to True
|
|
61
|
+
Keep nodes in GoeDataFrame.
|
|
62
|
+
restore_edge_geom: bool, default to False
|
|
63
|
+
if True, will try to restore edge geometry from nodes.
|
|
64
|
+
Returns
|
|
65
|
+
-------
|
|
66
|
+
gpd.GeoDataFrame | tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]
|
|
67
|
+
Graph representation in GeoDataFrame format, either nodes or nodes,or tuple of them nodes,edges.
|
|
68
|
+
"""
|
|
69
|
+
try:
|
|
70
|
+
crs = graph.graph["crs"]
|
|
71
|
+
except KeyError as exc:
|
|
72
|
+
raise ValueError("Graph does not have crs attribute") from exc
|
|
73
|
+
if not edges and not nodes:
|
|
74
|
+
raise AttributeError("Neither edges or nodes were selected")
|
|
75
|
+
if nodes and not edges:
|
|
76
|
+
nodes_gdf = _nodes_to_gdf(graph, crs)
|
|
77
|
+
return nodes_gdf
|
|
78
|
+
if not nodes and edges:
|
|
79
|
+
edges_gdf = _edges_to_gdf(graph, crs)
|
|
80
|
+
if restore_edge_geom:
|
|
81
|
+
nodes_gdf = _nodes_to_gdf(graph, crs)
|
|
82
|
+
edges_gdf = _restore_edges_geom(nodes_gdf, edges_gdf)
|
|
83
|
+
return edges_gdf
|
|
84
|
+
|
|
85
|
+
nodes_gdf = _nodes_to_gdf(graph, crs)
|
|
86
|
+
edges_gdf = _edges_to_gdf(graph, crs)
|
|
87
|
+
if restore_edge_geom:
|
|
88
|
+
edges_gdf = _restore_edges_geom(nodes_gdf, edges_gdf)
|
|
89
|
+
return nodes_gdf, edges_gdf
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def get_closest_nodes_from_gdf(gdf: gpd.GeoDataFrame, nx_graph: nx.Graph) -> tuple:
|
|
93
|
+
nodes_with_data = list(nx_graph.nodes(data=True))
|
|
94
|
+
try:
|
|
95
|
+
coordinates = np.array([(data["x"], data["y"]) for node, data in nodes_with_data])
|
|
96
|
+
except KeyError as e:
|
|
97
|
+
raise ValueError("Graph does not have coordinates attribute") from e
|
|
98
|
+
tree = KDTree(coordinates)
|
|
99
|
+
target_coord = [(p.x, p.y) for p in gdf.representative_point()]
|
|
100
|
+
distances, indices = tree.query(target_coord)
|
|
101
|
+
nearest_nodes = [nodes_with_data[idx][0] for idx in indices]
|
|
102
|
+
return distances, nearest_nodes
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def remove_weakly_connected_nodes(graph: nx.DiGraph) -> nx.DiGraph:
|
|
106
|
+
graph = graph.copy()
|
|
107
|
+
|
|
108
|
+
weakly_connected_components = list(nx.weakly_connected_components(graph))
|
|
109
|
+
if len(weakly_connected_components) > 1:
|
|
110
|
+
logger.warning(
|
|
111
|
+
f"Found {len(weakly_connected_components)} disconnected subgraphs in the network. "
|
|
112
|
+
f"These are isolated groups of nodes with no connections between them. "
|
|
113
|
+
f"Size of components: {[len(c) for c in weakly_connected_components]}"
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
all_scc = sorted(nx.strongly_connected_components(graph), key=len)
|
|
117
|
+
nodes_to_del = set().union(*all_scc[:-1])
|
|
118
|
+
|
|
119
|
+
if nodes_to_del:
|
|
120
|
+
logger.warning(
|
|
121
|
+
f"Removing {len(nodes_to_del)} nodes that form {len(all_scc) - 1} trap components. "
|
|
122
|
+
f"These are groups where you can enter but can't exit (or vice versa). "
|
|
123
|
+
f"Keeping the largest strongly connected component ({len(all_scc[-1])} nodes)."
|
|
124
|
+
)
|
|
125
|
+
graph.remove_nodes_from(nodes_to_del)
|
|
126
|
+
|
|
127
|
+
return graph
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def min_max_normalization(data, new_min=0, new_max=1):
|
|
5
|
+
"""
|
|
6
|
+
Min-max normalization for a given array of data.
|
|
7
|
+
|
|
8
|
+
Parameters
|
|
9
|
+
----------
|
|
10
|
+
data: numpy.ndarray
|
|
11
|
+
Input data to be normalized.
|
|
12
|
+
new_min: float, optional
|
|
13
|
+
New minimum value for normalization. Defaults to 0.
|
|
14
|
+
new_max: float, optional
|
|
15
|
+
New maximum value for normalization. Defaults to 1.
|
|
16
|
+
|
|
17
|
+
Returns
|
|
18
|
+
-------
|
|
19
|
+
numpy.ndarray
|
|
20
|
+
Normalized data.
|
|
21
|
+
|
|
22
|
+
Examples
|
|
23
|
+
--------
|
|
24
|
+
>>> import numpy as np
|
|
25
|
+
>>> data = np.array([1, 2, 3, 4, 5])
|
|
26
|
+
>>> normalized_data = min_max_normalization(data, new_min=0, new_max=1)
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
min_value = np.min(data)
|
|
30
|
+
max_value = np.max(data)
|
|
31
|
+
normalized_data = (data - min_value) / (max_value - min_value) * (new_max - new_min) + new_min
|
|
32
|
+
return normalized_data
|