ObjectNat 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ObjectNat might be problematic. Click here for more details.

objectnat/_config.py CHANGED
@@ -44,6 +44,7 @@ class Config:
44
44
  self.enable_tqdm_bar = enable_tqdm_bar
45
45
  self.logger = logger
46
46
  self.iduedu_config = iduedu_config
47
+ self.pandarallel_use_file_system = False
47
48
 
48
49
  def change_logger_lvl(self, lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"]):
49
50
  self.logger.remove()
@@ -62,6 +63,9 @@ class Config:
62
63
  self.enable_tqdm_bar = enable
63
64
  self.iduedu_config.set_enable_tqdm(enable)
64
65
 
66
+ def set_pandarallel_use_file_system(self, enable: bool):
67
+ self.pandarallel_use_file_system = enable
68
+
65
69
 
66
70
  config = Config()
67
71
  config.change_logger_lvl("INFO")
objectnat/_version.py CHANGED
@@ -1 +1 @@
1
- VERSION = "0.2.1"
1
+ VERSION = "0.2.3"
@@ -21,10 +21,10 @@ def _get_cluster(services_select, min_dist, min_point, method):
21
21
  return services_select
22
22
 
23
23
 
24
- def _get_service_ratio(loc):
24
+ def _get_service_ratio(loc, service_code_column):
25
25
  all_services = loc.shape[0]
26
- loc["service_code"] = loc["service_code"].astype(str)
27
- services_count = loc.groupby("service_code").size()
26
+ loc[service_code_column] = loc[service_code_column].astype(str)
27
+ services_count = loc.groupby(service_code_column).size()
28
28
  return (services_count / all_services).round(2)
29
29
 
30
30
 
@@ -33,6 +33,7 @@ def get_clusters_polygon(
33
33
  min_dist: float | int = 100,
34
34
  min_point: int = 5,
35
35
  method: Literal["DBSCAN", "HDBSCAN"] = "HDBSCAN",
36
+ service_code_column: str = "service_code",
36
37
  ) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
37
38
  """
38
39
  Generate cluster polygons for given points based on a specified minimum distance and minimum points per cluster.
@@ -49,7 +50,8 @@ def get_clusters_polygon(
49
50
  Minimum number of points required to form a cluster. Defaults to 5.
50
51
  method : Literal["DBSCAN", "HDBSCAN"], optional
51
52
  The clustering method to use. Must be either "DBSCAN" or "HDBSCAN". Defaults to "HDBSCAN".
52
-
53
+ service_code_column : str, optional
54
+ Column, containing service type for relative ratio in clasterized polygons. Defaults to "service_code".
53
55
  Returns
54
56
  -------
55
57
  tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]
@@ -72,17 +74,19 @@ def get_clusters_polygon(
72
74
 
73
75
  services_select = _get_cluster(points, min_dist, min_point, method)
74
76
 
75
- if "service_code" not in points.columns:
77
+ if service_code_column not in points.columns:
76
78
  logger.warning(
77
- "No 'service_code' column in provided GeoDataFrame, cluster polygons will be without relative ratio."
79
+ f"No {service_code_column} column in provided GeoDataFrame, cluster polygons will be without relative ratio"
78
80
  )
79
- points["service_code"] = 1
81
+ points[service_code_column] = service_code_column
80
82
 
81
83
  services_normal = services_select[services_select["cluster"] != -1]
82
84
  services_outlier = services_select[services_select["cluster"] == -1]
83
85
 
84
86
  if len(services_normal) > 0:
85
- cluster_service = services_normal.groupby("cluster", group_keys=True).apply(_get_service_ratio)
87
+ cluster_service = services_normal.groupby("cluster", group_keys=True).apply(
88
+ _get_service_ratio, service_code_column=service_code_column
89
+ )
86
90
  if isinstance(cluster_service, pd.Series):
87
91
  cluster_service = cluster_service.unstack(level=1, fill_value=0)
88
92
 
@@ -98,7 +102,9 @@ def get_clusters_polygon(
98
102
  new_clusters = list(range(clusters_outlier, clusters_outlier + len(services_outlier)))
99
103
  services_outlier.loc[:, "cluster"] = new_clusters
100
104
 
101
- cluster_service = services_outlier.groupby("cluster", group_keys=True).apply(_get_service_ratio)
105
+ cluster_service = services_outlier.groupby("cluster", group_keys=True).apply(
106
+ _get_service_ratio, service_code_column=service_code_column
107
+ )
102
108
  if isinstance(cluster_service, pd.Series):
103
109
  cluster_service = cluster_service.unstack(level=1, fill_value=0)
104
110
 
@@ -45,16 +45,19 @@ def get_accessibility_isochrones(
45
45
  -------
46
46
  tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]
47
47
  A tuple containing:
48
- - isochrones : GeoDataFrame with the calculated isochrone geometries.
49
- - public transport stops (if applicable) : GeoDataFrame with public transport stops within the isochrone, or None if not applicable.
50
- - public transport routes (if applicable) : GeoDataFrame with public transport routes within the isochrone, or None if not applicable.
48
+ - isochrones :
49
+ GeoDataFrame with the calculated isochrone geometries.
50
+ - public transport stops (if applicable) :
51
+ GeoDataFrame with public transport stops within the isochrone, or None if not applicable.
52
+ - public transport routes (if applicable) :
53
+ GeoDataFrame with public transport routes within the isochrone, or None if not applicable.
51
54
 
52
55
  Examples
53
56
  --------
54
57
  >>> from iduedu import get_intermodal_graph
55
58
  >>> graph = get_intermodal_graph(polygon=my_territory_polygon)
56
59
  >>> points = gpd.GeoDataFrame(geometry=[Point(30.33, 59.95)], crs=4326).to_crs(graph.graph['crs'])
57
- >>> isochrones, pt_stops, pt_routes = get_accessibility_isochrones(points, weight_value=15, weight_type="time_min", graph_nx=my_graph)
60
+ >>> isochrones, pt_stops, pt_routes = get_accessibility_isochrones(points,weight_value=15, weight_type="time_min", graph_nx=my_graph)
58
61
 
59
62
  """
60
63
 
@@ -31,7 +31,7 @@ def eval_is_living(row: gpd.GeoSeries):
31
31
  >>> buildings = download_buildings(osm_territory_id=421007)
32
32
  >>> buildings['is_living'] = buildings.apply(eval_is_living, axis=1)
33
33
  """
34
- if row["building"] in (
34
+ return row["building"] in (
35
35
  "apartments",
36
36
  "house",
37
37
  "residential",
@@ -41,10 +41,7 @@ def eval_is_living(row: gpd.GeoSeries):
41
41
  "bungalow",
42
42
  "cabin",
43
43
  "farm",
44
- ):
45
- return True
46
- else:
47
- return False
44
+ )
48
45
 
49
46
 
50
47
  def eval_population(source: gpd.GeoDataFrame, population_column: str, area_per_person: float = 33):
@@ -111,7 +108,8 @@ def download_buildings(
111
108
  area_per_person: float = 33,
112
109
  ) -> gpd.GeoDataFrame | None:
113
110
  """
114
- Download building geometries and evaluate 'is_living' and 'population' attributes for a specified territory from OpenStreetMap.
111
+ Download building geometries and evaluate 'is_living' and 'population'
112
+ attributes for a specified territory from OpenStreetMap.
115
113
 
116
114
  Parameters
117
115
  ----------
@@ -146,27 +144,27 @@ def download_buildings(
146
144
  (buildings["geometry"].geom_type == "Polygon") | (buildings["geometry"].geom_type == "MultiPolygon")
147
145
  ]
148
146
  if buildings.empty:
149
- logger.warning(f"There are no buildings in the specified territory. Output GeoDataFrame is empty.")
147
+ logger.warning("There are no buildings in the specified territory. Output GeoDataFrame is empty.")
150
148
  return buildings
151
- else:
152
- buildings[is_living_column] = buildings.apply(eval_is_living, axis=1)
153
- buildings = eval_population(buildings, population_column, area_per_person)
154
- buildings.reset_index(drop=True, inplace=True)
155
- logger.debug("Done!")
156
- return buildings[
157
- [
158
- "building",
159
- "addr:street",
160
- "addr:housenumber",
161
- "amenity",
162
- "area",
163
- "name",
164
- "building:levels",
165
- "leisure",
166
- "design:year",
167
- is_living_column,
168
- "building:levels_is_real",
169
- population_column,
170
- "geometry",
171
- ]
149
+
150
+ buildings[is_living_column] = buildings.apply(eval_is_living, axis=1)
151
+ buildings = eval_population(buildings, population_column, area_per_person)
152
+ buildings.reset_index(drop=True, inplace=True)
153
+ logger.debug("Done!")
154
+ return buildings[
155
+ [
156
+ "building",
157
+ "addr:street",
158
+ "addr:housenumber",
159
+ "amenity",
160
+ "area",
161
+ "name",
162
+ "building:levels",
163
+ "leisure",
164
+ "design:year",
165
+ is_living_column,
166
+ "building:levels_is_real",
167
+ population_column,
168
+ "geometry",
172
169
  ]
170
+ ]
@@ -4,7 +4,7 @@ import geopandas as gpd
4
4
  import numpy as np
5
5
  import pandas as pd
6
6
 
7
- from .city_provision import CityProvision
7
+ from .provision_model import Provision
8
8
 
9
9
 
10
10
  def get_service_provision(
@@ -12,6 +12,8 @@ def get_service_provision(
12
12
  adjacency_matrix: pd.DataFrame,
13
13
  services: gpd.GeoDataFrame,
14
14
  threshold: int,
15
+ buildings_demand_column: str = "demand",
16
+ services_capacity_column: str = "capacity",
15
17
  ) -> Tuple[gpd.GeoDataFrame, gpd.GeoDataFrame, gpd.GeoDataFrame]:
16
18
  """Calculate load from buildings with demands on the given services using the distances matrix between them.
17
19
 
@@ -20,11 +22,19 @@ def get_service_provision(
20
22
  adjacency_matrix (pd.DataFrame): DataFrame representing the adjacency matrix
21
23
  buildings (gpd.GeoDataFrame): GeoDataFrame of demanded buildings
22
24
  threshold (int): Threshold value
25
+ buildings_demand_column (str): column name of buildings demands
26
+ services_capacity_column (str): column name of services capacity
23
27
  Returns:
24
28
  Tuple[gpd.GeoDataFrame, gpd.GeoDataFrame, gpd.GeoDataFrame]: Tuple of GeoDataFrames representing provision
25
29
  buildings, provision services, and provision links
26
30
  """
27
- provision_buildings, provision_services, provision_links = CityProvision(
31
+ buildings = buildings.copy()
32
+ services = services.copy()
33
+ adjacency_matrix = adjacency_matrix.copy()
34
+ buildings["demand"] = buildings[buildings_demand_column]
35
+ services["capacity"] = services[services_capacity_column]
36
+
37
+ provision_buildings, provision_services, provision_links = Provision(
28
38
  services=services,
29
39
  demanded_buildings=buildings,
30
40
  adjacency_matrix=adjacency_matrix,
@@ -40,6 +50,9 @@ def clip_provision(
40
50
  assert (
41
51
  selection_zone.crs == buildings.crs == services.crs == links.crs
42
52
  ), f"CRS mismatch: buildings_crs:{buildings.crs}, links_crs:{links.crs} , services_crs:{services.crs}, selection_zone_crs:{selection_zone.crs}"
53
+ buildings = buildings.copy()
54
+ links = links.copy()
55
+ services = services.copy()
43
56
 
44
57
  s = buildings.intersects(selection_zone.unary_union)
45
58
  buildings = buildings.loc[s[s].index]
@@ -5,17 +5,15 @@ import geopandas as gpd
5
5
  import numpy as np
6
6
  import pandas as pd
7
7
  from shapely import LineString
8
-
8
+ from pandarallel import pandarallel
9
9
  from objectnat import config
10
10
 
11
11
  from .provision_exceptions import CapacityKeyError, DemandKeyError
12
12
 
13
13
  logger = config.logger
14
14
 
15
- from pandarallel import pandarallel
16
-
17
15
 
18
- class CityProvision:
16
+ class Provision:
19
17
  """
20
18
  Represents the logic for city provision calculations using a gravity or linear model.
21
19
 
@@ -27,7 +25,7 @@ class CityProvision:
27
25
  calculation_type (str, optional): Type of calculation ("gravity" or "linear"). Defaults to "gravity".
28
26
 
29
27
  Returns:
30
- CityProvision: The CityProvision object.
28
+ Provision: The CityProvision object.
31
29
 
32
30
  Raises: KeyError: If the 'demand' column is missing in the provided 'demanded_buildings' GeoDataFrame,
33
31
  or if the 'capacity' column is missing in the provided 'services' GeoDataFrame. ValueError: If the 'capacity'
@@ -45,16 +43,16 @@ class CityProvision:
45
43
  ):
46
44
  self.services = self.ensure_services(services)
47
45
  self.demanded_buildings = self.ensure_buildings(demanded_buildings)
48
- self.adjacency_matrix = self.delete_useless_matrix_rows(adjacency_matrix.copy(), demanded_buildings, services)
46
+ self.adjacency_matrix = self.delete_useless_matrix_rows_columns(adjacency_matrix, demanded_buildings, services)
49
47
  self.threshold = threshold
50
48
  self.check_crs(self.demanded_buildings, self.services)
51
- pandarallel.initialize(progress_bar=False, verbose=0)
49
+ print(config.pandarallel_use_file_system)
50
+ pandarallel.initialize(progress_bar=False, verbose=0, use_memory_fs=config.pandarallel_use_file_system)
52
51
 
53
52
  @staticmethod
54
53
  def ensure_buildings(v: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
55
54
  if "demand" not in v.columns:
56
55
  raise DemandKeyError
57
- v = v.copy()
58
56
  v["demand_left"] = v["demand"]
59
57
  return v
60
58
 
@@ -62,7 +60,6 @@ class CityProvision:
62
60
  def ensure_services(v: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
63
61
  if "capacity" not in v.columns:
64
62
  raise CapacityKeyError
65
- v = v.copy()
66
63
  v["capacity_left"] = v["capacity"]
67
64
  return v
68
65
 
@@ -73,7 +70,7 @@ class CityProvision:
73
70
  ), f"\nThe CRS in the provided geodataframes are different.\nBuildings CRS:{demanded_buildings.crs}\nServices CRS:{services.crs} \n"
74
71
 
75
72
  @staticmethod
76
- def delete_useless_matrix_rows(adjacency_matrix, demanded_buildings, services):
73
+ def delete_useless_matrix_rows_columns(adjacency_matrix, demanded_buildings, services):
77
74
  adjacency_matrix.index = adjacency_matrix.index.astype(int)
78
75
 
79
76
  builds_indexes = set(demanded_buildings.index.astype(int).tolist())
@@ -142,12 +139,11 @@ class CityProvision:
142
139
  def apply_function_based_on_size(df, func, axis, threshold=500):
143
140
  if len(df) > threshold:
144
141
  return df.parallel_apply(func, axis=axis)
145
- else:
146
- return df.apply(func, axis=axis)
142
+ return df.apply(func, axis=axis)
147
143
 
148
144
  def _calculate_flows_y(loc):
149
- import numpy as np
150
- import pandas as pd
145
+ import numpy as np # pylint: disable=redefined-outer-name,reimported,import-outside-toplevel
146
+ import pandas as pd # pylint: disable=redefined-outer-name,reimported,import-outside-toplevel
151
147
 
152
148
  c = services_table.loc[loc.name]["capacity_left"]
153
149
  p = 1 / loc / loc
@@ -165,8 +161,8 @@ class CityProvision:
165
161
  return choice
166
162
 
167
163
  def _balance_flows_to_demands(loc):
168
- import numpy as np
169
- import pandas as pd
164
+ import numpy as np # pylint: disable=redefined-outer-name,reimported,import-outside-toplevel
165
+ import pandas as pd # pylint: disable=redefined-outer-name,reimported,import-outside-toplevel
170
166
 
171
167
  d = houses_table.loc[loc.name]["demand_left"]
172
168
  loc = loc[loc > 0]
@@ -228,6 +224,11 @@ def _calc_links(
228
224
  buildings: gpd.GeoDataFrame,
229
225
  distance_matrix: pd.DataFrame,
230
226
  ):
227
+ buildings_ = buildings.copy()
228
+ services_ = services.copy()
229
+ buildings_.geometry = buildings_.representative_point()
230
+ services_.geometry = services_.representative_point()
231
+
231
232
  def subfunc(loc):
232
233
  try:
233
234
  return [
@@ -244,15 +245,11 @@ def _calc_links(
244
245
  def subfunc_geom(loc):
245
246
  return LineString(
246
247
  (
247
- buildings_["geometry"][loc["building_index"]],
248
- services_["geometry"][loc["service_index"]],
248
+ buildings_.geometry[loc["building_index"]],
249
+ services_.geometry[loc["service_index"]],
249
250
  )
250
251
  )
251
252
 
252
- buildings_ = buildings.copy()
253
- services_ = services.copy()
254
- buildings_.geometry = buildings_.representative_point()
255
- services_.geometry = services_.representative_point()
256
253
  flat_matrix = destination_matrix.transpose().apply(lambda x: subfunc(x[x > 0]), result_type="reduce")
257
254
 
258
255
  distribution_links = gpd.GeoDataFrame(data=[item for sublist in list(flat_matrix) for item in sublist])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ObjectNat
3
- Version: 0.2.1
3
+ Version: 0.2.3
4
4
  Summary: ObjectNat is an open-source library created for geospatial analysis created by IDU team
5
5
  License: BSD-3-Clause
6
6
  Author: DDonnyy
@@ -14,6 +14,7 @@ Classifier: Programming Language :: Python :: 3.12
14
14
  Requires-Dist: geopandas (>=0.14.3,<0.15.0)
15
15
  Requires-Dist: iduedu (>=0.1.4,<0.2.0)
16
16
  Requires-Dist: joblib (>=1.4.2,<2.0.0)
17
+ Requires-Dist: jupyter (>=1.1.1,<2.0.0)
17
18
  Requires-Dist: networkit (>=11.0,<12.0)
18
19
  Requires-Dist: networkx (>=3.2.1,<4.0.0)
19
20
  Requires-Dist: numpy (>=1.23.5,<2.0.0)
@@ -44,7 +45,7 @@ Description-Content-Type: text/markdown
44
45
 
45
46
  ## Features and how to use
46
47
 
47
- 1. **[City graph from OSM (IduEdu)](https://github.com/DDonnyy/IduEdu/blob/main/examples/get_any_graph.ipynb)** - Functions to assemble a road, pedestrian,
48
+ 1. **[City graph from OSM (IduEdu)](./examples/get_any_graph.ipynb)** - Functions to assemble a road, pedestrian,
48
49
  and public transport graph from OpenStreetMap (OSM) and creating Intermodal graph.
49
50
 
50
51
  <img src="https://github.com/user-attachments/assets/8dc98da9-8462-415e-8cc8-bdfca788e206" alt="IntermodalGraph" height="250">
@@ -1,21 +1,21 @@
1
1
  objectnat/__init__.py,sha256=OnDvrLPLEeYIE_9qOVYgMc-PkRzIqShtGxirguEXiRU,260
2
2
  objectnat/_api.py,sha256=oiEO2P-tv6AMDdNoT8d0BWMmgeUJa4bhzGDTU2BWTXI,704
3
- objectnat/_config.py,sha256=t4nv83Tj4fwYjdzwUh0bA8b_12DqL-GlEVfKaG_hccg,2107
4
- objectnat/_version.py,sha256=Vdi6OffDRorPQeWjvXo2MPbidl7CNworxvziT78bjl0,18
3
+ objectnat/_config.py,sha256=sv13J3yMw1cmmkgPMf08zinLwPKYwXHJGfchqmurSg8,2268
4
+ objectnat/_version.py,sha256=b-Z-6_UwMK_FEiFRkdRlWa4UpfELs391bZgkMRRJkm0,18
5
5
  objectnat/methods/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  objectnat/methods/balanced_buildings.py,sha256=hLT2QgmGWpROtnL8SJQIujeP6q9ou15yIdHpv66CfMs,2892
7
- objectnat/methods/cluster_points_in_polygons.py,sha256=2oHK-_CauEz8dZX6r-UGEopkqdUENLGaJPpMTuVV_o8,4678
7
+ objectnat/methods/cluster_points_in_polygons.py,sha256=ANoPHB89Ih6SYUTs0VoYqW7zi9GVIytSOGySoQ3vby4,5073
8
8
  objectnat/methods/coverage_zones.py,sha256=yMeK1DjneMAxxKv9busEKdAsP25xiJMcPCixlJCDI4s,2835
9
- objectnat/methods/isochrones.py,sha256=CeNTVpUnlITaacamB5mJQjnbphXckC1FJ0L1EThswhU,6111
10
- objectnat/methods/living_buildings_osm.py,sha256=pHyeDSKhs4j05Wr3Z_QBxLfLbiZpbwdj_SXz7qQ7V2M,6041
9
+ objectnat/methods/isochrones.py,sha256=CBJprxcyPIYC4RJizqJ1MJL-Zkea4iyr7wHTOOQ7DC8,6146
10
+ objectnat/methods/living_buildings_osm.py,sha256=v0rC8xaqibZq9jZm5HVonmmC9VFXzgZwhqsxHA3sPlc,5904
11
11
  objectnat/methods/provision/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- objectnat/methods/provision/city_provision.py,sha256=71Vg2tZie_C2paeFtbf8-eeQ8H7P3dECHo108bVEm0Q,13026
13
- objectnat/methods/provision/provision.py,sha256=BrwfKcGoRwNcLL9fZ6mU9m6kMuGnAIgxGHfXdgHA39o,4057
12
+ objectnat/methods/provision/provision.py,sha256=jYPcqX-_TBFlUQq0bWK2uuvH6AWMXv8E5mjTeHjchS8,4612
14
13
  objectnat/methods/provision/provision_exceptions.py,sha256=-TK4A-vacUuzlPJGSt2YyawRwKDLCZFlAbuIvIf1FnY,1723
14
+ objectnat/methods/provision/provision_model.py,sha256=BXRRkeDToeL9MzswGpmlfK7wqHjVDRbpMCEPZGIDup0,13356
15
15
  objectnat/methods/visibility_analysis.py,sha256=__S01m4YcIZbUcr6Umzvr4NpaCsajXxKNcfJm3zquVY,20690
16
16
  objectnat/utils/__init__.py,sha256=w8R5V_Ws_GUt4hLwpudMgjXvocG4vCxWSzVw_jTReQ4,44
17
17
  objectnat/utils/utils.py,sha256=_vbCW-XTHwZOR3yNlzf_vgNwbYwonhGlduSznGufEgs,638
18
- objectnat-0.2.1.dist-info/LICENSE.txt,sha256=yPEioMfTd7JAQgAU6J13inS1BSjwd82HFlRSoIb4My8,1498
19
- objectnat-0.2.1.dist-info/METADATA,sha256=ajah19veqGq4DwU3l9UNIyBbE04MRb9_VL6TD_vRHEI,5923
20
- objectnat-0.2.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
21
- objectnat-0.2.1.dist-info/RECORD,,
18
+ objectnat-0.2.3.dist-info/LICENSE.txt,sha256=yPEioMfTd7JAQgAU6J13inS1BSjwd82HFlRSoIb4My8,1498
19
+ objectnat-0.2.3.dist-info/METADATA,sha256=eoRAOVNi8t_DrCuCTU1qEp6P2Eoee7aA1ODCdsmvJr4,5921
20
+ objectnat-0.2.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
21
+ objectnat-0.2.3.dist-info/RECORD,,