giga-spatial 0.6.3__py3-none-any.whl → 0.6.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,25 +15,48 @@ from gigaspatial.generators.zonal.geometry import GeometryBasedZonalViewGenerato
15
15
 
16
16
 
17
17
  class MercatorViewGenerator(GeometryBasedZonalViewGenerator[T]):
18
- """Mid-level class for zonal view generation based on geometries with identifiers.
18
+ """
19
+ Generates zonal views using Mercator tiles as the zones.
19
20
 
20
- This class serves as an intermediate between the abstract ZonalViewGenerator and specific
21
- implementations like MercatorViewGenerator or H3ViewGenerator. It handles the common case
22
- where zones are defined by a mapping between zone identifiers and geometries, either
23
- provided as a dictionary or as a GeoDataFrame.
21
+ This class specializes in creating zonal views where the zones are defined by
22
+ Mercator tiles. It extends the `GeometryBasedZonalViewGenerator` and leverages
23
+ the `MercatorTiles` and `CountryMercatorTiles` classes to generate tiles based on
24
+ various input sources.
24
25
 
25
- The class extends the base functionality with methods for mapping common geospatial
26
- datasets including GHSL (Global Human Settlement Layer), Google Open Buildings,
27
- and Microsoft Global Buildings data.
26
+ The primary input source defines the geographical area of interest. This can be
27
+ a country, a specific geometry, a set of points, or even a list of predefined
28
+ quadkeys. The `zoom_level` determines the granularity of the Mercator tiles.
28
29
 
29
30
  Attributes:
30
- zone_dict (Dict[T, Polygon]): Mapping of zone identifiers to geometries.
31
- zone_id_column (str): Name of the column containing zone identifiers.
32
- zone_data_crs (str): Coordinate reference system of the zone data.
33
- _zone_gdf (gpd.GeoDataFrame): Cached GeoDataFrame representation of zones.
34
- data_store (DataStore): For accessing input data.
35
- generator_config (ZonalViewGeneratorConfig): Configuration for view generation.
36
- logger: Logger instance for this class.
31
+ source (Union[str, BaseGeometry, gpd.GeoDataFrame, List[Union[Point, Tuple[float, float]]], List[str]]):
32
+ Specifies the geographic area or specific tiles to use. Can be:
33
+ - A country name (str): Uses `CountryMercatorTiles` to generate tiles covering the country.
34
+ - A Shapely geometry (BaseGeometry): Uses `MercatorTiles.from_spatial` to create tiles intersecting the geometry.
35
+ - A GeoDataFrame (gpd.GeoDataFrame): Uses `MercatorTiles.from_spatial` to create tiles intersecting the geometries.
36
+ - A list of points (List[Union[Point, Tuple[float, float]]]): Uses `MercatorTiles.from_spatial` to create tiles containing the points.
37
+ - A list of quadkeys (List[str]): Uses `MercatorTiles.from_quadkeys` to use the specified tiles directly.
38
+ zoom_level (int): The zoom level of the Mercator tiles. Higher zoom levels result in smaller, more detailed tiles.
39
+ predicate (str): The spatial predicate used when filtering tiles based on a spatial source (e.g., "intersects", "contains"). Defaults to "intersects".
40
+ config (Optional[ZonalViewGeneratorConfig]): Configuration for the zonal view generation process.
41
+ data_store (Optional[DataStore]): A DataStore instance for accessing data.
42
+ logger (Optional[logging.Logger]): A logger instance for logging.
43
+
44
+ Methods:
45
+ _init_zone_data(source, zoom_level, predicate): Initializes the Mercator tile GeoDataFrame based on the input source.
46
+ # Inherits other methods from GeometryBasedZonalViewGenerator, such as:
47
+ # map_ghsl(), map_google_buildings(), map_ms_buildings(), aggregate_data(), save_view()
48
+
49
+ Example:
50
+ # Create a MercatorViewGenerator for tiles covering Germany at zoom level 6
51
+ generator = MercatorViewGenerator(source="Germany", zoom_level=6)
52
+
53
+ # Create a MercatorViewGenerator for tiles intersecting a specific polygon
54
+ polygon = ... # Define a Shapely Polygon
55
+ generator = MercatorViewGenerator(source=polygon, zoom_level=8)
56
+
57
+ # Create a MercatorViewGenerator from a list of quadkeys
58
+ quadkeys = ["0020023131023032", "0020023131023033"]
59
+ generator = MercatorViewGenerator(source=quadkeys, zoom_level=12)
37
60
  """
38
61
 
39
62
  def __init__(
@@ -53,16 +76,19 @@ class MercatorViewGenerator(GeometryBasedZonalViewGenerator[T]):
53
76
  ):
54
77
 
55
78
  super().__init__(
56
- zone_data=self._init_zone_data(source, zoom_level, predicate),
79
+ zone_data=self._init_zone_data(source, zoom_level, predicate, data_store),
57
80
  zone_id_column="quadkey",
58
81
  config=config,
59
82
  data_store=data_store,
60
83
  logger=logger,
61
84
  )
85
+ self.logger.info(f"Initialized MercatorViewGenerator")
62
86
 
63
- def _init_zone_data(self, source, zoom_level, predicate):
87
+ def _init_zone_data(self, source, zoom_level, predicate, data_store=None):
64
88
  if isinstance(source, str):
65
- tiles = CountryMercatorTiles.create(country=source, zoom_level=zoom_level)
89
+ tiles = CountryMercatorTiles.create(
90
+ country=source, zoom_level=zoom_level, data_store=data_store
91
+ )
66
92
  elif isinstance(source, (BaseGeometry, Iterable)):
67
93
  if isinstance(source, Iterable) and all(
68
94
  isinstance(qk, str) for qk in source
@@ -73,6 +99,11 @@ class MercatorViewGenerator(GeometryBasedZonalViewGenerator[T]):
73
99
  source=source, zoom_level=zoom_level, predicate=predicate
74
100
  )
75
101
  else:
76
- raise ValueError("sadadasfasfkasmf")
102
+ raise TypeError(
103
+ f"Unsupported source type for MercatorViewGenerator. 'source' must be "
104
+ f"a country name (str), a Shapely geometry, a GeoDataFrame, "
105
+ f"a list of quadkeys (str), or a list of (lon, lat) tuples/Shapely Point objects. "
106
+ f"Received type: {type(source)}."
107
+ )
77
108
 
78
109
  return tiles.to_geodataframe()
@@ -1 +1 @@
1
- from gigaspatial.grid.mercator_tiles import *
1
+ from gigaspatial.grid.mercator_tiles import MercatorTiles, CountryMercatorTiles
@@ -4,10 +4,10 @@ import mercantile
4
4
  from shapely.geometry import box
5
5
  from shapely.geometry.base import BaseGeometry
6
6
  from shapely.strtree import STRtree
7
- from shapely import MultiPolygon, Polygon, Point
7
+ from shapely import Point
8
8
  import json
9
9
  from pathlib import Path
10
- from pydantic import BaseModel, Field, PrivateAttr
10
+ from pydantic import BaseModel, Field
11
11
  from typing import List, Union, Iterable, Optional, Tuple, ClassVar
12
12
  import pycountry
13
13
 
@@ -31,6 +31,9 @@ class MercatorTiles(BaseModel):
31
31
  if not quadkeys:
32
32
  cls.logger.warning("No quadkeys provided to from_quadkeys.")
33
33
  return cls(zoom_level=0, quadkeys=[])
34
+ cls.logger.info(
35
+ f"Initializing MercatorTiles from {len(quadkeys)} provided quadkeys."
36
+ )
34
37
  return cls(zoom_level=len(quadkeys[0]), quadkeys=set(quadkeys))
35
38
 
36
39
  @classmethod
@@ -120,14 +123,7 @@ class MercatorTiles(BaseModel):
120
123
  cls.logger.info(
121
124
  f"Creating MercatorTiles from {len(points)} points at zoom level: {zoom_level}"
122
125
  )
123
- quadkeys = {
124
- (
125
- mercantile.quadkey(mercantile.tile(p.x, p.y, zoom_level))
126
- if isinstance(p, Point)
127
- else mercantile.quadkey(mercantile.tile(p[1], p[0], zoom_level))
128
- )
129
- for p in points
130
- }
126
+ quadkeys = set(cls.get_quadkeys_from_points(points, zoom_level))
131
127
  cls.logger.info(f"Generated {len(quadkeys)} unique quadkeys from points.")
132
128
  return cls(zoom_level=zoom_level, quadkeys=list(quadkeys), **kwargs)
133
129
 
@@ -219,6 +215,29 @@ class MercatorTiles(BaseModel):
219
215
  {"quadkey": self.quadkeys, "geometry": self.to_geoms()}, crs="EPSG:4326"
220
216
  )
221
217
 
218
+ @staticmethod
219
+ def get_quadkeys_from_points(
220
+ points: List[Union[Point, Tuple[float, float]]], zoom_level: int
221
+ ) -> List[str]:
222
+ """Get list of quadkeys for the provided points at specified zoom level.
223
+
224
+ Args:
225
+ points: List of points as either shapely Points or (lon, lat) tuples
226
+ zoom_level: Zoom level for the quadkeys
227
+
228
+ Returns:
229
+ List of quadkey strings
230
+ """
231
+ quadkeys = [
232
+ (
233
+ mercantile.quadkey(mercantile.tile(p.x, p.y, zoom_level))
234
+ if isinstance(p, Point)
235
+ else mercantile.quadkey(mercantile.tile(p[1], p[0], zoom_level))
236
+ )
237
+ for p in points
238
+ ]
239
+ return quadkeys
240
+
222
241
  def save(self, file: Union[str, Path], format: str = "json") -> None:
223
242
  """Save MercatorTiles to file in specified format."""
224
243
  with self.data_store.open(str(file), "wb" if format == "parquet" else "w") as f:
@@ -270,6 +289,10 @@ class CountryMercatorTiles(MercatorTiles):
270
289
  country=pycountry.countries.lookup(country).alpha_3,
271
290
  )
272
291
 
292
+ cls.logger.info(
293
+ f"Initializing Mercator zones for country: {country} at zoom level {zoom_level}"
294
+ )
295
+
273
296
  country_geom = (
274
297
  AdminBoundaries.create(
275
298
  country_code=country,
@@ -37,4 +37,8 @@ from gigaspatial.handlers.unicef_georepo import (
37
37
  GeoRepoClient,
38
38
  get_country_boundaries_by_iso3,
39
39
  )
40
- from gigaspatial.handlers.giga import GigaSchoolLocationFetcher
40
+ from gigaspatial.handlers.giga import (
41
+ GigaSchoolLocationFetcher,
42
+ GigaSchoolProfileFetcher,
43
+ GigaSchoolMeasurementsFetcher,
44
+ )
@@ -4,11 +4,13 @@ import geopandas as gpd
4
4
  from pathlib import Path
5
5
  from urllib.error import HTTPError
6
6
  from shapely.geometry import Polygon, MultiPolygon, shape
7
+ import tempfile
7
8
  import pycountry
8
9
 
9
10
  from gigaspatial.core.io.data_store import DataStore
10
11
  from gigaspatial.core.io.readers import read_dataset
11
- from gigaspatial.config import config
12
+ from gigaspatial.handlers.hdx import HDXConfig
13
+ from gigaspatial.config import config as global_config
12
14
 
13
15
 
14
16
  class AdminBoundary(BaseModel):
@@ -31,7 +33,6 @@ class AdminBoundary(BaseModel):
31
33
  )
32
34
 
33
35
  class Config:
34
- # extra = "allow"
35
36
  arbitrary_types_allowed = True
36
37
 
37
38
 
@@ -46,7 +47,7 @@ class AdminBoundaries(BaseModel):
46
47
  description="Administrative level (e.g., 0=country, 1=state, etc.)",
47
48
  )
48
49
 
49
- logger: ClassVar = config.get_logger("AdminBoundaries")
50
+ logger: ClassVar = global_config.get_logger("AdminBoundaries")
50
51
 
51
52
  _schema_config: ClassVar[Dict[str, Dict[str, str]]] = {
52
53
  "gadm": {
@@ -61,8 +62,31 @@ class AdminBoundaries(BaseModel):
61
62
  "name_en": "name_en",
62
63
  "country_code": "iso_3166_1_alpha_3",
63
64
  },
65
+ "geoBoundaries": {
66
+ "id": "shapeID",
67
+ "name": "shapeName",
68
+ "country_code": "shapeGroup",
69
+ },
64
70
  }
65
71
 
72
+ def to_geodataframe(self) -> gpd.GeoDataFrame:
73
+ """Convert the AdminBoundaries to a GeoDataFrame."""
74
+ if not self.boundaries:
75
+ if hasattr(self, "_empty_schema"):
76
+ columns = self._empty_schema
77
+ else:
78
+ columns = ["id", "name", "country_code", "geometry"]
79
+ if self.level > 0:
80
+ columns.append("parent_id")
81
+
82
+ return gpd.GeoDataFrame(columns=columns, geometry="geometry", crs=4326)
83
+
84
+ return gpd.GeoDataFrame(
85
+ [boundary.model_dump() for boundary in self.boundaries],
86
+ geometry="geometry",
87
+ crs=4326,
88
+ )
89
+
66
90
  @classmethod
67
91
  def get_schema_config(cls) -> Dict[str, Dict[str, str]]:
68
92
  """Return field mappings for different data sources"""
@@ -100,6 +124,7 @@ class AdminBoundaries(BaseModel):
100
124
  cls.logger.warning(
101
125
  f"Error loading GADM data for {country_code} at admin level {admin_level}: {str(e)}"
102
126
  )
127
+ cls.logger.info("Falling back to empty instance")
103
128
  return cls._create_empty_instance(country_code, admin_level, "gadm")
104
129
 
105
130
  @classmethod
@@ -138,6 +163,7 @@ class AdminBoundaries(BaseModel):
138
163
  cls.logger.warning(
139
164
  f"No data found at {path} for admin level {admin_level}: {str(e)}"
140
165
  )
166
+ cls.logger.info("Falling back to empty instance")
141
167
  return cls._create_empty_instance(None, admin_level, "internal")
142
168
 
143
169
  @classmethod
@@ -202,6 +228,69 @@ class AdminBoundaries(BaseModel):
202
228
 
203
229
  return cls(boundaries=boundaries, level=admin_level)
204
230
 
231
+ @classmethod
232
+ def from_geoboundaries(cls, country_code, admin_level: int = 0):
233
+ cls.logger.info(
234
+ f"Searching for geoBoundaries data for country: {country_code}, admin level: {admin_level}"
235
+ )
236
+
237
+ country_datasets = HDXConfig.search_datasets(
238
+ query=f'dataseries_name:"geoBoundaries - Subnational Administrative Boundaries" AND groups:"{country_code.lower()}"',
239
+ rows=1,
240
+ )
241
+ if not country_datasets:
242
+ cls.logger.error(f"No datasets found for country: {country_code}")
243
+ raise ValueError(
244
+ "No resources found for the specified country. Please check your search parameters and try again."
245
+ )
246
+
247
+ cls.logger.info(f"Found dataset: {country_datasets[0].get('title', 'Unknown')}")
248
+
249
+ resources = [
250
+ resource
251
+ for resource in country_datasets[0].get_resources()
252
+ if (
253
+ resource.data["name"]
254
+ == f"geoBoundaries-{country_code.upper()}-ADM{admin_level}.geojson"
255
+ )
256
+ ]
257
+
258
+ if not resources:
259
+ cls.logger.error(
260
+ f"No resources found for {country_code} at admin level {admin_level}"
261
+ )
262
+ raise ValueError(
263
+ "No resources found for the specified criteria. Please check your search parameters and try again."
264
+ )
265
+
266
+ cls.logger.info(f"Found resource: {resources[0].data.get('name', 'Unknown')}")
267
+
268
+ try:
269
+ cls.logger.info("Downloading and processing boundary data...")
270
+ with tempfile.TemporaryDirectory() as tmpdir:
271
+ url, local_path = resources[0].download(folder=tmpdir)
272
+ cls.logger.debug(f"Downloaded file to temporary path: {local_path}")
273
+ with open(local_path, "rb") as f:
274
+ gdf = gpd.read_file(f)
275
+
276
+ gdf = cls._map_fields(gdf, "geoBoundaries", admin_level)
277
+ boundaries = [
278
+ AdminBoundary(**row_dict) for row_dict in gdf.to_dict("records")
279
+ ]
280
+ cls.logger.info(
281
+ f"Successfully created {len(boundaries)} AdminBoundary objects"
282
+ )
283
+ return cls(boundaries=boundaries, level=admin_level)
284
+
285
+ except (ValueError, HTTPError, FileNotFoundError) as e:
286
+ cls.logger.warning(
287
+ f"Error loading geoBoundaries data for {country_code} at admin level {admin_level}: {str(e)}"
288
+ )
289
+ cls.logger.info("Falling back to empty instance")
290
+ return cls._create_empty_instance(
291
+ country_code, admin_level, "geoBoundaries"
292
+ )
293
+
205
294
  @classmethod
206
295
  def create(
207
296
  cls,
@@ -211,45 +300,152 @@ class AdminBoundaries(BaseModel):
211
300
  path: Optional[Union[str, "Path"]] = None,
212
301
  **kwargs,
213
302
  ) -> "AdminBoundaries":
214
- """Factory method to create AdminBoundaries instance from either GADM or data store."""
303
+ """
304
+ Factory method to create an AdminBoundaries instance using various data sources,
305
+ depending on the provided parameters and global configuration.
306
+
307
+ Loading Logic:
308
+ 1. If a `data_store` is provided and either a `path` is given or
309
+ `global_config.ADMIN_BOUNDARIES_DATA_DIR` is set:
310
+ - If `path` is not provided but `country_code` is, the path is constructed
311
+ using `global_config.get_admin_path()`.
312
+ - Loads boundaries from the specified data store and path.
313
+
314
+ 2. If only `country_code` is provided (no data_store):
315
+ - Attempts to load boundaries from GeoRepo (if available).
316
+ - If GeoRepo is unavailable, attempts to load from GADM.
317
+ - If GADM fails, falls back to geoBoundaries.
318
+ - Raises an error if all sources fail.
319
+
320
+ 3. If neither `country_code` nor `data_store` is provided:
321
+ - Raises a ValueError.
322
+
323
+ Args:
324
+ country_code (Optional[str]): ISO country code (2 or 3 letter) or country name.
325
+ admin_level (int): Administrative level (0=country, 1=state/province, etc.).
326
+ data_store (Optional[DataStore]): Optional data store instance for loading from existing data.
327
+ path (Optional[Union[str, Path]]): Optional path to data file (used with data_store).
328
+ **kwargs: Additional arguments passed to the underlying creation methods.
329
+
330
+ Returns:
331
+ AdminBoundaries: Configured instance.
332
+
333
+ Raises:
334
+ ValueError: If neither country_code nor (data_store, path) are provided,
335
+ or if country_code lookup fails.
336
+ RuntimeError: If all data sources fail to load boundaries.
337
+
338
+ Examples:
339
+ # Load from a data store (path auto-generated if not provided)
340
+ boundaries = AdminBoundaries.create(country_code="USA", admin_level=1, data_store=store)
341
+
342
+ # Load from a specific file in a data store
343
+ boundaries = AdminBoundaries.create(data_store=store, path="data.shp")
344
+
345
+ # Load from online sources (GeoRepo, GADM, geoBoundaries)
346
+ boundaries = AdminBoundaries.create(country_code="USA", admin_level=1)
347
+ """
215
348
  cls.logger.info(
216
- f"Creating AdminBoundaries instance. Country: {country_code}, admin level: {admin_level}, data_store provided: {data_store is not None}, path provided: {path is not None}"
349
+ f"Creating AdminBoundaries instance. Country: {country_code}, "
350
+ f"admin level: {admin_level}, data_store provided: {data_store is not None}, "
351
+ f"path provided: {path is not None}"
217
352
  )
218
- iso3_code = pycountry.countries.lookup(country_code).alpha_3
219
- if data_store is not None:
220
- if path is None:
221
- if country_code is None:
222
- ValueError(
223
- "If data_store is provided, path or country_code must also be specified."
224
- )
225
- path = config.get_admin_path(
353
+
354
+ from_data_store = data_store is not None and (
355
+ global_config.ADMIN_BOUNDARIES_DATA_DIR is not None or path is not None
356
+ )
357
+
358
+ # Validate input parameters
359
+ if not country_code and not data_store:
360
+ raise ValueError("Either country_code or data_store must be provided.")
361
+
362
+ if from_data_store and not path and not country_code:
363
+ raise ValueError(
364
+ "If data_store is provided, either path or country_code must also be specified."
365
+ )
366
+
367
+ # Handle data store path first
368
+ if from_data_store:
369
+ iso3_code = None
370
+ if country_code:
371
+ try:
372
+ iso3_code = pycountry.countries.lookup(country_code).alpha_3
373
+ except LookupError as e:
374
+ raise ValueError(f"Invalid country code '{country_code}': {e}")
375
+
376
+ # Generate path if not provided
377
+ if path is None and iso3_code:
378
+ path = global_config.get_admin_path(
226
379
  country_code=iso3_code,
227
380
  admin_level=admin_level,
228
381
  )
382
+
229
383
  return cls.from_data_store(data_store, path, admin_level, **kwargs)
230
- elif country_code is not None:
231
- from gigaspatial.handlers.unicef_georepo import GeoRepoClient
232
384
 
385
+ # Handle country code path
386
+ if country_code is not None:
233
387
  try:
234
- client = GeoRepoClient()
235
- if client.check_connection():
236
- cls.logger.info("GeoRepo connection successful.")
237
- return cls.from_georepo(
238
- iso3_code,
239
- admin_level=admin_level,
240
- )
241
- except ValueError as e:
388
+ iso3_code = pycountry.countries.lookup(country_code).alpha_3
389
+ except LookupError as e:
390
+ raise ValueError(f"Invalid country code '{country_code}': {e}")
391
+
392
+ # Try GeoRepo first
393
+ if cls._try_georepo(iso3_code, admin_level):
394
+ return cls.from_georepo(iso3_code, admin_level=admin_level)
395
+
396
+ # Fallback to GADM
397
+ try:
398
+ cls.logger.info("Attempting to load from GADM.")
399
+ return cls.from_gadm(iso3_code, admin_level, **kwargs)
400
+ except Exception as e:
242
401
  cls.logger.warning(
243
- f"GeoRepo initialization failed: {str(e)}. Falling back to GADM."
402
+ f"GADM loading failed: {e}. Falling back to geoBoundaries."
244
403
  )
404
+
405
+ # Final fallback to geoBoundaries
406
+ try:
407
+ return cls.from_geoboundaries(iso3_code, admin_level)
245
408
  except Exception as e:
246
- cls.logger.warning(f"GeoRepo error: {str(e)}. Falling back to GADM.")
409
+ cls.logger.error(f"All data sources failed. geoBoundaries error: {e}")
410
+ raise RuntimeError(
411
+ f"Failed to load administrative boundaries for {country_code} "
412
+ f"from all available sources (GeoRepo, GADM, geoBoundaries)."
413
+ ) from e
247
414
 
248
- return cls.from_gadm(iso3_code, admin_level, **kwargs)
249
- else:
250
- raise ValueError(
251
- "Either country_code or (data_store, path) must be provided."
252
- )
415
+ # This should never be reached due to validation above
416
+ raise ValueError("Unexpected error: no valid data source could be determined.")
417
+
418
+ @classmethod
419
+ def _try_georepo(cls, iso3_code: str, admin_level: int) -> bool:
420
+ """Helper method to test GeoRepo availability.
421
+
422
+ Args:
423
+ iso3_code: ISO3 country code
424
+ admin_level: Administrative level
425
+
426
+ Returns:
427
+ bool: True if GeoRepo is available and working, False otherwise
428
+ """
429
+ try:
430
+ from gigaspatial.handlers.unicef_georepo import GeoRepoClient
431
+
432
+ client = GeoRepoClient()
433
+ if client.check_connection():
434
+ cls.logger.info("GeoRepo connection successful.")
435
+ return True
436
+ else:
437
+ cls.logger.info("GeoRepo connection failed.")
438
+ return False
439
+
440
+ except ImportError:
441
+ cls.logger.info("GeoRepo client not available (import failed).")
442
+ return False
443
+ except ValueError as e:
444
+ cls.logger.warning(f"GeoRepo initialization failed: {e}")
445
+ return False
446
+ except Exception as e:
447
+ cls.logger.warning(f"GeoRepo error: {e}")
448
+ return False
253
449
 
254
450
  @classmethod
255
451
  def _create_empty_instance(
@@ -288,21 +484,3 @@ class AdminBoundaries(BaseModel):
288
484
  field_mapping[v] = k
289
485
 
290
486
  return gdf.rename(columns=field_mapping)
291
-
292
- def to_geodataframe(self) -> gpd.GeoDataFrame:
293
- """Convert the AdminBoundaries to a GeoDataFrame."""
294
- if not self.boundaries:
295
- if hasattr(self, "_empty_schema"):
296
- columns = self._empty_schema
297
- else:
298
- columns = ["id", "name", "country_code", "geometry"]
299
- if self.level > 0:
300
- columns.append("parent_id")
301
-
302
- return gpd.GeoDataFrame(columns=columns, geometry="geometry", crs=4326)
303
-
304
- return gpd.GeoDataFrame(
305
- [boundary.model_dump() for boundary in self.boundaries],
306
- geometry="geometry",
307
- crs=4326,
308
- )