fimeval 0.1.56__py3-none-any.whl → 0.1.58__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,334 @@
1
+ """
2
+ This utility function contains how to retrieve all the necessary metadata of benchmark FIM
3
+ from the s3 bucket during benchmark FIM querying.
4
+
5
+ Authors: Supath Dhital, sdhital@crimson.ua.edu
6
+ Updated date: 25 Nov, 2025
7
+ """
8
+
9
+ from __future__ import annotations
10
+ import os, re, json, datetime as dt
11
+ from typing import List, Dict, Any, Optional
12
+
13
+ import urllib.parse
14
+ import boto3
15
+ from botocore import UNSIGNED
16
+ from botocore.config import Config
17
+
18
+ # constants
19
+ BUCKET = "sdmlab"
20
+ CATALOG_KEY = (
21
+ "FIM_Database/FIM_Viz/catalog_core.json" # Path of the json file in the s3 bucket
22
+ )
23
+
24
+ # s3 client
25
+ _S3 = boto3.client("s3", config=Config(signature_version=UNSIGNED))
26
+
27
+
28
+ # helpers for direct S3 file links
29
+ def s3_http_url(bucket: str, key: str) -> str:
30
+ """Build a public-style S3 HTTPS URL."""
31
+ return f"https://{bucket}.s3.amazonaws.com/{urllib.parse.quote(key, safe='/')}"
32
+
33
+
34
+ # utils
35
+ _YMD_RE = re.compile(r"^\d{4}-\d{2}-\d{2}$")
36
+ _YMD_COMPACT_RE = re.compile(r"^\d{8}$")
37
+ _YMDH_RE = re.compile(r"^\d{4}-\d{2}-\d{2}[ T]\d{2}$")
38
+ _YMDHMS_RE = re.compile(r"^\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}(:\d{2})?$")
39
+
40
+
41
+ #Support functions to ensute the geopackage file is local
42
+ def _s3_bucket_key_from_http_url(url: str) -> Optional[tuple[str, str]]:
43
+ try:
44
+ u = urllib.parse.urlparse(url)
45
+ host = (u.netloc or "").lower()
46
+ path = (u.path or "").lstrip("/")
47
+ if not host or not path:
48
+ return None
49
+
50
+ if ".s3." in host or host.endswith(".s3.amazonaws.com"):
51
+ bucket = host.split(".s3", 1)[0]
52
+ key = path
53
+ return bucket, key
54
+
55
+ return None
56
+ except Exception:
57
+ return None
58
+
59
+
60
+ def _ensure_local_gpkg(uri: str) -> str:
61
+ """
62
+ Ensure we can read a GPKG even when uri is an https S3 URL by caching locally.
63
+ Returns a path usable by geopandas.read_file().
64
+ """
65
+ if not isinstance(uri, str) or not uri.strip():
66
+ return uri
67
+
68
+ u = uri.strip()
69
+ if os.path.exists(u):
70
+ return u
71
+
72
+ # Cache https S3 gpkg locally
73
+ if u.lower().startswith("http") and ".amazonaws.com/" in u.lower() and u.lower().endswith(".gpkg"):
74
+ parsed = _s3_bucket_key_from_http_url(u.split("?", 1)[0])
75
+ if not parsed:
76
+ return u
77
+
78
+ bucket, key = parsed
79
+ cache_dir = os.path.join(os.path.expanduser("~"), ".fimeval_cache", "aoi_gpkg")
80
+ os.makedirs(cache_dir, exist_ok=True)
81
+
82
+ local = os.path.join(cache_dir, os.path.basename(key))
83
+ if not os.path.exists(local):
84
+ _download(bucket, key, local)
85
+ return local
86
+ return u
87
+
88
+ def _normalize_user_dt(s: str) -> str:
89
+ s = s.strip()
90
+ s = s.replace("/", "-")
91
+ s = re.sub(r"\s+", " ", s)
92
+ return s
93
+
94
+ def _display_raster_name(rec: Dict[str, Any]) -> str:
95
+ tif_url = rec.get("tif_url")
96
+ if isinstance(tif_url, str) and tif_url.strip():
97
+ # drop querystring if any
98
+ tif_url = tif_url.split("?", 1)[0]
99
+ return os.path.basename(tif_url)
100
+
101
+ # fallback: last path token of id
102
+ rid = rec.get("id")
103
+ if isinstance(rid, str) and rid.strip():
104
+ return rid.strip().split("/")[-1] + ".tif"
105
+
106
+ return "NA"
107
+
108
+ def _to_date(s: str) -> dt.date:
109
+ s = _normalize_user_dt(s)
110
+ if _YMD_COMPACT_RE.match(s):
111
+ return dt.datetime.strptime(s, "%Y%m%d").date()
112
+ if _YMD_RE.match(s):
113
+ return dt.date.fromisoformat(s)
114
+ try:
115
+ return dt.datetime.fromisoformat(s).date()
116
+ except Exception:
117
+ m = re.match(r"^(\d{4}-\d{2}-\d{2})[ T](\d{2})$", s)
118
+ if m:
119
+ return dt.datetime.fromisoformat(f"{m.group(1)} {m.group(2)}:00:00").date()
120
+ raise ValueError(f"Bad date format: {s}")
121
+
122
+
123
+ def _to_hour_or_none(s: str) -> Optional[int]:
124
+ s = _normalize_user_dt(s)
125
+ if _YMD_RE.match(s) or _YMD_COMPACT_RE.match(s):
126
+ return None
127
+ m = re.match(r"^\d{4}-\d{2}-\d{2}[ T](\d{2})$", s)
128
+ if m:
129
+ return int(m.group(1))
130
+ try:
131
+ dt_obj = dt.datetime.fromisoformat(s)
132
+ return dt_obj.hour
133
+ except Exception:
134
+ m2 = re.match(r"^\d{4}-\d{2}-\d{2}T(\d{2})$", s)
135
+ if m2:
136
+ return int(m2.group(1))
137
+ return None
138
+
139
+
140
+ def _record_day(rec: Dict[str, Any]) -> Optional[dt.date]:
141
+ ymd = rec.get("date_ymd")
142
+ if isinstance(ymd, str):
143
+ try:
144
+ return dt.date.fromisoformat(ymd)
145
+ except Exception:
146
+ pass
147
+ raw = rec.get("date_of_flood")
148
+ if isinstance(raw, str) and len(raw) >= 8:
149
+ try:
150
+ return dt.datetime.strptime(raw[:8], "%Y%m%d").date()
151
+ except Exception:
152
+ return None
153
+ return None
154
+
155
+
156
+ def _record_hour_or_none(rec: Dict[str, Any]) -> Optional[int]:
157
+ raw = rec.get("date_of_flood")
158
+ if isinstance(raw, str) and "T" in raw and len(raw) >= 11:
159
+ try:
160
+ return int(raw.split("T", 1)[1][:2])
161
+ except Exception:
162
+ return None
163
+ return None
164
+
165
+
166
+ # Printing helpers
167
+ def _pretty_date_for_print(rec: Dict[str, Any]) -> str:
168
+ raw = rec.get("date_of_flood")
169
+ if isinstance(raw, str) and "T" in raw and len(raw) >= 11:
170
+ return f"{raw[:4]}-{raw[4:6]}-{raw[6:8]}T{raw.split('T',1)[1][:2]}"
171
+ ymd = rec.get("date_ymd")
172
+ if isinstance(ymd, str) and _YMD_RE.match(ymd):
173
+ return ymd
174
+ if isinstance(raw, str) and len(raw) >= 8:
175
+ return f"{raw[:4]}-{raw[4:6]}-{raw[6:8]}"
176
+ return "unknown"
177
+
178
+
179
+ def _context_str(
180
+ huc8: Optional[str] = None,
181
+ date_input: Optional[str] = None,
182
+ file_name: Optional[str] = None,
183
+ start_date: Optional[str] = None,
184
+ end_date: Optional[str] = None,
185
+ ) -> str:
186
+ """
187
+ Builds a readable context summary for printing headers.
188
+ Example outputs:
189
+ - "HUC 12090301"
190
+ - "HUC 12090301, date '2017-08-30'"
191
+ - "HUC 12090301, range 2017-08-30 to 2017-09-01"
192
+ - "HUC 12090301, file 'PSS_3_0m_20170830T162251_BM.tif'"
193
+ """
194
+ parts = []
195
+ if huc8:
196
+ parts.append(f"HUC {huc8}")
197
+ if date_input:
198
+ parts.append(f"date '{date_input}'")
199
+ if start_date or end_date:
200
+ if start_date and end_date:
201
+ parts.append(f"range {start_date} to {end_date}")
202
+ elif start_date:
203
+ parts.append(f"from {start_date}")
204
+ elif end_date:
205
+ parts.append(f"until {end_date}")
206
+ if file_name:
207
+ parts.append(f"file '{file_name}'")
208
+
209
+ return ", ".join(parts) if parts else "your filters"
210
+
211
+
212
+ def format_records_for_print(
213
+ records: List[Dict[str, Any]], context: Optional[str] = None
214
+ ) -> str:
215
+ if not records:
216
+ ctx = context or "your filters"
217
+ return f"Benchmark FIMs were not matched for {ctx}."
218
+
219
+ header = (
220
+ f"Following are the available benchmark data for {context}:\n"
221
+ if context
222
+ else ""
223
+ )
224
+
225
+ def _is_synthetic_tier_local(r: Dict[str, Any]) -> bool:
226
+ t = str(r.get("tier") or r.get("quality") or "").lower()
227
+ return "tier_4" in t or t.strip() == "4"
228
+
229
+ def _return_period_text_local(r: Dict[str, Any]) -> str:
230
+ rp = (
231
+ r.get("return_period")
232
+ or r.get("return_period_yr")
233
+ or r.get("rp")
234
+ or r.get("rp_years")
235
+ )
236
+ if rp is None:
237
+ return "synthetic flow (return period unknown)"
238
+ try:
239
+ rp_int = int(float(str(rp).strip().replace("yr", "").replace("-year", "")))
240
+ return f"{rp_int}-year synthetic flow"
241
+ except Exception:
242
+ return f"{rp} synthetic flow"
243
+
244
+ blocks: List[str] = []
245
+ for r in records:
246
+ tier = r.get("tier") or r.get("quality") or "Unknown"
247
+ res = r.get("resolution_m")
248
+ res_txt = f"{res}m" if res is not None else "NA"
249
+ fname = _display_raster_name(r)
250
+
251
+ # Build lines with Tier-aware event text
252
+ lines = [f"Data Tier: {tier}"]
253
+ if _is_synthetic_tier_local(r):
254
+ lines.append(f"Return Period: {_return_period_text_local(r)}")
255
+ else:
256
+ date_str = _pretty_date_for_print(r)
257
+ lines.append(f"Benchmark FIM date: {date_str}")
258
+
259
+ lines.extend(
260
+ [
261
+ f"Spatial Resolution: {res_txt}",
262
+ f"Benchmark FIM raster name in DB: {fname}",
263
+ ]
264
+ )
265
+ blocks.append("\n".join(lines))
266
+
267
+ return (header + "\n\n".join(blocks)).strip()
268
+
269
+
270
+ # S3 and json catalog
271
+ def load_catalog_core() -> Dict[str, Any]:
272
+ obj = _S3.get_object(Bucket=BUCKET, Key=CATALOG_KEY)
273
+ return json.loads(obj["Body"].read().decode("utf-8", "replace"))
274
+
275
+
276
+ def _list_prefix(prefix: str) -> List[str]:
277
+ keys: List[str] = []
278
+ paginator = _S3.get_paginator("list_objects_v2")
279
+ for page in paginator.paginate(Bucket=BUCKET, Prefix=prefix):
280
+ for obj in page.get("Contents", []) or []:
281
+ keys.append(obj["Key"])
282
+ return keys
283
+
284
+
285
+ def _download(bucket: str, key: str, dest_path: str) -> str:
286
+ os.makedirs(os.path.dirname(dest_path), exist_ok=True)
287
+ _S3.download_file(bucket, key, dest_path)
288
+ return dest_path
289
+
290
+
291
+ # Get the files from s3 bucket
292
+ def _folder_from_record(rec: Dict[str, Any]) -> str:
293
+ s3_key = rec.get("s3_key")
294
+ if not s3_key or "/" not in s3_key:
295
+ raise ValueError("Record lacks s3_key to derive folder")
296
+ return s3_key.rsplit("/", 1)[0] + "/"
297
+
298
+
299
+ def _tif_key_from_record(rec: Dict[str, Any]) -> Optional[str]:
300
+ tif_url = rec.get("tif_url")
301
+ if isinstance(tif_url, str) and ".amazonaws.com/" in tif_url:
302
+ return tif_url.split(".amazonaws.com/", 1)[1]
303
+ fname = rec.get("file_name")
304
+ if not fname:
305
+ return None
306
+ return _folder_from_record(rec) + fname
307
+
308
+
309
+ # Download that tif and the boundary file --> need to add building footprint automation as well.
310
+ def download_fim_assets(record: Dict[str, Any], dest_dir: str) -> Dict[str, Any]:
311
+ """
312
+ Download the .tif (if present) and any .gpkg from the record's folder to dest_dir.
313
+ """
314
+ os.makedirs(dest_dir, exist_ok=True)
315
+ out = {"tif": None, "gpkg_files": []}
316
+
317
+ # TIF
318
+ tif_key = _tif_key_from_record(record)
319
+ if tif_key:
320
+ local = os.path.join(dest_dir, os.path.basename(tif_key))
321
+ if not os.path.exists(local):
322
+ _download(BUCKET, tif_key, local)
323
+ out["tif"] = local
324
+
325
+ # GPKGs (list folder)
326
+ folder = _folder_from_record(record)
327
+ for key in _list_prefix(folder):
328
+ if key.lower().endswith(".gpkg"):
329
+ local = os.path.join(dest_dir, os.path.basename(key))
330
+ if not os.path.exists(local):
331
+ _download(BUCKET, key, local)
332
+ out["gpkg_files"].append(local)
333
+
334
+ return out
@@ -1,3 +1,4 @@
1
1
  from .evaluationwithBF import EvaluationWithBuildingFootprint
2
+ from .arcgis_API import getBuildingFootprint
2
3
 
3
- __all__ = ["EvaluationWithBuildingFootprint"]
4
+ __all__ = ["EvaluationWithBuildingFootprint", "getBuildingFootprint"]
@@ -0,0 +1,195 @@
1
+ """
2
+ Author: Supath Dhital
3
+ Date CreatedL January 2026
4
+
5
+ Description: This will extract Microsoft Building Footprints using ArcGIS REST API for a given boundary.
6
+ """
7
+
8
+ import geopandas as gpd
9
+ import requests
10
+ import pandas as pd
11
+ from pathlib import Path
12
+ from typing import Union, Optional
13
+
14
+
15
+ # Main class
16
+ class getBuildingFootprint:
17
+ """Extract Microsoft Building Footprints within a boundary using spatial queries."""
18
+
19
+ MSBFP_URL = "https://services.arcgis.com/P3ePLMYs2RVChkJx/arcgis/rest/services/MSBFP2/FeatureServer/0"
20
+
21
+ def __init__(
22
+ self,
23
+ boundary: Union[str, Path, gpd.GeoDataFrame],
24
+ layer: Optional[str] = None,
25
+ output_dir: Optional[Union[str, Path]] = None,
26
+ service_url: Optional[str] = None,
27
+ ):
28
+ """
29
+ Parameters
30
+ ----------
31
+ boundary : str, Path, or GeoDataFrame
32
+ Boundary as file path or GeoDataFrame
33
+ layer : str, optional
34
+ Layer name if boundary is a geopackage with multiple layers
35
+ service_url : str, optional
36
+ Custom ArcGIS feature service URL
37
+ """
38
+ self.boundary = self._load_boundary(boundary, layer)
39
+ self.service_url = service_url or self.MSBFP_URL
40
+
41
+ # Setup output directory
42
+ if output_dir is None:
43
+ output_dir = Path.cwd() / "BFOutputs"
44
+ else:
45
+ output_dir = Path(output_dir)
46
+ output_dir.mkdir(parents=True, exist_ok=True)
47
+
48
+ # Run the code
49
+ self.extract(output_dir=output_dir)
50
+
51
+ def _load_boundary(
52
+ self, boundary: Union[str, Path, gpd.GeoDataFrame], layer: Optional[str]
53
+ ) -> gpd.GeoDataFrame:
54
+ """Load and validate boundary."""
55
+ if isinstance(boundary, gpd.GeoDataFrame):
56
+ gdf = boundary.copy()
57
+ else:
58
+ kwargs = {"layer": layer} if layer else {}
59
+ gdf = gpd.read_file(boundary, **kwargs)
60
+
61
+ # Ensure WGS84
62
+ if gdf.crs != "EPSG:4326":
63
+ gdf = gdf.to_crs("EPSG:4326")
64
+
65
+ return gdf
66
+
67
+ def extract(
68
+ self,
69
+ output_dir: Optional[Union[str, Path]] = None,
70
+ output_filename: str = "building_footprints.gpkg",
71
+ batch_size: int = 2000,
72
+ timeout: int = 60,
73
+ verbose: bool = True,
74
+ ) -> gpd.GeoDataFrame:
75
+ """
76
+ Extract building footprints within the boundary.
77
+
78
+ Parameters
79
+ ----------
80
+ output_dir : str or Path, optional
81
+ Output directory (defaults to cwd/building_footprints)
82
+ output_filename : str, default="building_footprints.gpkg"
83
+ Output filename
84
+ batch_size : int, default=2000
85
+ Number of features to retrieve per request
86
+ timeout : int, default=60
87
+ Request timeout in seconds
88
+ verbose : bool, default=True
89
+ Print progress messages
90
+
91
+ Returns
92
+ -------
93
+ GeoDataFrame
94
+ Extracted building footprints
95
+ """
96
+
97
+ # Get bounding box
98
+ xmin, ymin, xmax, ymax = self.boundary.total_bounds
99
+
100
+ if verbose:
101
+ print(f"Querying {self.service_url}...")
102
+
103
+ # Query the service
104
+ all_features = []
105
+ offset = 0
106
+ query_url = f"{self.service_url}/query"
107
+
108
+ while True:
109
+ params = {
110
+ "f": "geojson",
111
+ "where": "1=1",
112
+ "geometry": f"{xmin},{ymin},{xmax},{ymax}",
113
+ "geometryType": "esriGeometryEnvelope",
114
+ "inSR": "4326",
115
+ "spatialRel": "esriSpatialRelIntersects",
116
+ "outFields": "*",
117
+ "returnGeometry": "true",
118
+ "outSR": "4326",
119
+ "resultOffset": offset,
120
+ "resultRecordCount": batch_size,
121
+ }
122
+
123
+ try:
124
+ response = requests.get(query_url, params=params, timeout=timeout)
125
+
126
+ if response.status_code != 200:
127
+ if verbose:
128
+ print(f"Error {response.status_code}")
129
+ break
130
+
131
+ data = response.json()
132
+
133
+ if "error" in data:
134
+ if verbose:
135
+ print(f"Server error: {data['error'].get('message')}")
136
+ break
137
+
138
+ if "features" in data and data["features"]:
139
+ batch_gdf = gpd.GeoDataFrame.from_features(
140
+ data["features"], crs="EPSG:4326"
141
+ )
142
+ all_features.append(batch_gdf)
143
+
144
+ if verbose:
145
+ total = sum(len(gdf) for gdf in all_features)
146
+
147
+ if len(data["features"]) < batch_size:
148
+ break
149
+ offset += batch_size
150
+ else:
151
+ break
152
+
153
+ except requests.exceptions.Timeout:
154
+ if verbose:
155
+ print(" Request timed out, retrying...")
156
+ continue
157
+ except Exception as e:
158
+ if verbose:
159
+ print(f" Error: {e}")
160
+ break
161
+
162
+ if not all_features:
163
+ if verbose:
164
+ print("No features found.")
165
+ return gpd.GeoDataFrame()
166
+
167
+ # Combine and process
168
+ gdf = pd.concat(all_features, ignore_index=True)
169
+ gdf = gpd.GeoDataFrame(gdf, crs="EPSG:4326")
170
+
171
+ # Remove duplicates
172
+ for id_field in ["OBJECTID", "FID", "ID"]:
173
+ if id_field in gdf.columns:
174
+ initial = len(gdf)
175
+ gdf = gdf.drop_duplicates(subset=[id_field])
176
+ if verbose and (initial - len(gdf)) > 0:
177
+ print(f"Removed {initial - len(gdf)} duplicates")
178
+ break
179
+
180
+ # Clip to exact boundary
181
+ if verbose:
182
+ print(f"Clipping {len(gdf)} features to boundary...")
183
+ gdf = gpd.clip(gdf, self.boundary)
184
+
185
+ # Save
186
+ output_path = output_dir / output_filename
187
+ gdf.to_file(output_path, driver="GPKG")
188
+
189
+ if verbose:
190
+ print(f"\n{'='*60}")
191
+ print(f"SUCCESS: Saved {len(gdf)} buildings to:")
192
+ print(f" {output_path}")
193
+ print(f"{'='*60}")
194
+
195
+ return gdf
@@ -10,6 +10,9 @@ import seaborn as sns
10
10
  import matplotlib.pyplot as plt
11
11
  import matplotlib.gridspec as gridspec
12
12
 
13
+ # Import building footprint module
14
+ from .arcgis_API import getBuildingFootprint
15
+
13
16
 
14
17
  def Changeintogpkg(input_path, output_dir, layer_name):
15
18
  input_path = str(input_path)
@@ -355,38 +358,12 @@ def detect_shapefile(folder):
355
358
  return None
356
359
 
357
360
 
358
- def ensure_pyspark(version: str | None = "3.5.4") -> None:
359
- """Install pyspark at runtime via `uv pip` into this env (no-op if present)."""
360
- import importlib, shutil, subprocess, sys, re
361
-
362
- try:
363
- import importlib.util
364
-
365
- if importlib.util.find_spec("pyspark"):
366
- return
367
- except Exception:
368
- pass
369
- uv = shutil.which("uv")
370
- if not uv:
371
- raise RuntimeError(
372
- "`uv` not found on PATH. Please install uv or add it to PATH."
373
- )
374
- if version is None:
375
- spec = "pyspark"
376
- else:
377
- v = version.strip()
378
- spec = f"pyspark{v}" if re.match(r"^[<>=!~]", v) else f"pyspark=={v}"
379
- subprocess.check_call([uv, "pip", "install", "--python", sys.executable, spec])
380
-
381
-
382
361
  def EvaluationWithBuildingFootprint(
383
362
  main_dir,
384
363
  method_name,
385
364
  output_dir,
386
- country=None,
387
365
  building_footprint=None,
388
366
  shapefile_dir=None,
389
- geeprojectID=None,
390
367
  ):
391
368
  tif_files_main = glob.glob(os.path.join(main_dir, "*.tif"))
392
369
  if tif_files_main:
@@ -410,31 +387,23 @@ def EvaluationWithBuildingFootprint(
410
387
 
411
388
  building_footprintMS = building_footprint
412
389
 
390
+ # If no building footprint provided, extract using ArcGIS API
413
391
  if building_footprintMS is None:
414
- ensure_pyspark()
415
- from .microsoftBF import BuildingFootprintwithISO
416
-
417
392
  out_dir = os.path.join(method_path, "BuildingFootprint")
418
393
  if not os.path.exists(out_dir):
419
394
  os.makedirs(out_dir)
420
395
  EX_building_footprint = find_existing_footprint(out_dir)
421
- if not EX_building_footprint:
396
+ if EX_building_footprint:
397
+ building_footprintMS = EX_building_footprint
398
+ else:
422
399
  boundary_dir = shapefile_dir if shapefile_dir else boundary
423
-
424
- if geeprojectID:
425
- BuildingFootprintwithISO(
426
- country,
427
- boundary_dir,
428
- out_dir,
429
- geeprojectID=geeprojectID,
430
- )
431
- else:
432
- BuildingFootprintwithISO(country, boundary_dir, out_dir)
433
- building_footprintMS = os.path.join(
434
- out_dir, f"building_footprint.gpkg"
400
+ getBuildingFootprint(
401
+ boundary=boundary_dir,
402
+ output_dir=out_dir,
435
403
  )
436
- else:
437
- building_footprintMS = EX_building_footprint
404
+ # After downloading, find the newly created footprint
405
+ building_footprintMS = find_existing_footprint(out_dir)
406
+
438
407
  process_TIFF(
439
408
  tif_files,
440
409
  contingency_files,
@@ -471,33 +440,22 @@ def EvaluationWithBuildingFootprint(
471
440
  building_footprintMS = building_footprint
472
441
 
473
442
  if building_footprintMS is None:
474
- ensure_pyspark()
475
- from .microsoftBF import BuildingFootprintwithISO
476
-
477
443
  out_dir = os.path.join(method_path, "BuildingFootprint")
478
444
  if not os.path.exists(out_dir):
479
445
  os.makedirs(out_dir)
480
446
  EX_building_footprint = find_existing_footprint(out_dir)
481
- if not EX_building_footprint:
447
+ if EX_building_footprint:
448
+ building_footprintMS = EX_building_footprint
449
+ else:
482
450
  boundary_dir = (
483
451
  shapefile_dir if shapefile_dir else boundary
484
452
  )
485
- if geeprojectID:
486
- BuildingFootprintwithISO(
487
- country,
488
- boundary_dir,
489
- out_dir,
490
- geeprojectID=geeprojectID,
491
- )
492
- else:
493
- BuildingFootprintwithISO(
494
- country, boundary_dir, out_dir
495
- )
496
- building_footprintMS = os.path.join(
497
- out_dir, f"building_footprint.gpkg"
453
+ getBuildingFootprint(
454
+ boundary=boundary_dir,
455
+ output_dir=out_dir,
498
456
  )
499
- else:
500
- building_footprintMS = EX_building_footprint
457
+ # After downloading, find the newly created footprint
458
+ building_footprintMS = find_existing_footprint(out_dir)
501
459
 
502
460
  process_TIFF(
503
461
  tif_files,
@@ -1,6 +1,6 @@
1
1
  from .evaluationFIM import EvaluateFIM
2
2
  from .printcontingency import PrintContingencyMap
3
3
  from .plotevaluationmetrics import PlotEvaluationMetrics
4
- from .PWBs3 import get_PWB
4
+ from .water_bodies import get_PWB, ExtractPWB
5
5
 
6
- __all__ = ["EvaluateFIM", "PrintContingencyMap", "PlotEvaluationMetrics", "get_PWB"]
6
+ __all__ = ["EvaluateFIM", "PrintContingencyMap", "PlotEvaluationMetrics", "get_PWB", "ExtractPWB"]