wildfire-analyser 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wildfire-analyser might be problematic. Click here for more details.
- wildfire_analyser/__init__.py +4 -0
- wildfire_analyser/client.py +72 -0
- wildfire_analyser/fire_assessment/__init__.py +0 -0
- wildfire_analyser/fire_assessment/date_utils.py +14 -0
- wildfire_analyser/fire_assessment/deliverable.py +12 -0
- wildfire_analyser/fire_assessment/downloaders.py +106 -0
- wildfire_analyser/fire_assessment/fire_severity.py +15 -0
- wildfire_analyser/fire_assessment/post_fire_assessment.py +447 -0
- wildfire_analyser/fire_assessment/validators.py +60 -0
- wildfire_analyser-0.1.9.dist-info/METADATA +128 -0
- wildfire_analyser-0.1.9.dist-info/RECORD +14 -0
- wildfire_analyser-0.1.9.dist-info/WHEEL +5 -0
- wildfire_analyser-0.1.9.dist-info/licenses/LICENSE +21 -0
- wildfire_analyser-0.1.9.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# client.py
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
from dotenv import load_dotenv
|
|
5
|
+
|
|
6
|
+
from wildfire_analyser import PostFireAssessment, Deliverable, FireSeverity
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def main():
|
|
12
|
+
# Configure global logging format and level
|
|
13
|
+
logging.basicConfig(
|
|
14
|
+
level=logging.INFO,
|
|
15
|
+
format="%(asctime)s [%(levelname)s] %(message)s"
|
|
16
|
+
)
|
|
17
|
+
logger.info("Client starts")
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
# Load the local .env file
|
|
21
|
+
load_dotenv()
|
|
22
|
+
|
|
23
|
+
# Read the environment variable from .env
|
|
24
|
+
gee_key_json = os.getenv("GEE_PRIVATE_KEY_JSON")
|
|
25
|
+
if gee_key_json is None:
|
|
26
|
+
raise ValueError("GEE_PRIVATE_KEY_JSON environment variable is not set in .env")
|
|
27
|
+
|
|
28
|
+
# Path to the GeoJSON polygon used as the Region of Interest (ROI)
|
|
29
|
+
geojson_path = os.path.join("polygons", "APAPiracicabaJuqueriMirimAreaII.geojson")
|
|
30
|
+
#geojson_path = os.path.join("polygons", "eejatai.geojson")
|
|
31
|
+
|
|
32
|
+
# Initialize the wildfire assessment processor with date range
|
|
33
|
+
runner = PostFireAssessment(gee_key_json,
|
|
34
|
+
geojson_path, "2024-09-01", "2024-11-08",
|
|
35
|
+
deliverables=[
|
|
36
|
+
Deliverable.RGB_PRE_FIRE,
|
|
37
|
+
Deliverable.RGB_POST_FIRE,
|
|
38
|
+
Deliverable.NDVI_PRE_FIRE,
|
|
39
|
+
Deliverable.NDVI_POST_FIRE,
|
|
40
|
+
Deliverable.RBR,
|
|
41
|
+
],
|
|
42
|
+
track_timings=True)
|
|
43
|
+
|
|
44
|
+
# Run the analysis
|
|
45
|
+
result = runner.run_analysis()
|
|
46
|
+
|
|
47
|
+
# Print fire severity
|
|
48
|
+
for row in result["area_by_severity"]:
|
|
49
|
+
logger.info(
|
|
50
|
+
f"{row['severity_name']}({row['severity']}): {row['ha']:.2f} ha ({row['percent']:.2f}%) -> {row['color']}"
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# Save each deliverable to local files
|
|
54
|
+
for key, item in result["images"].items():
|
|
55
|
+
with open(item["filename"], "wb") as f:
|
|
56
|
+
f.write(item["data"])
|
|
57
|
+
logger.info(f"Saved file: {item['filename']}")
|
|
58
|
+
|
|
59
|
+
# Print processing time metrics
|
|
60
|
+
timings = result.get("timings", {})
|
|
61
|
+
logger.info("Stats:")
|
|
62
|
+
for key, value in timings.items():
|
|
63
|
+
logger.info(f" → {key}: {value:.2f} sec")
|
|
64
|
+
|
|
65
|
+
logger.info("Client ends")
|
|
66
|
+
|
|
67
|
+
except Exception as e:
|
|
68
|
+
logger.exception("Unexpected error during processing")
|
|
69
|
+
|
|
70
|
+
# Entry point
|
|
71
|
+
if __name__ == "__main__":
|
|
72
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# date_utils.py
|
|
2
|
+
import logging
|
|
3
|
+
from datetime import datetime, timedelta
|
|
4
|
+
|
|
5
|
+
logger = logging.getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def expand_dates(start_date: str, end_date: str, days_before_after: int):
|
|
9
|
+
sd = datetime.strptime(start_date, "%Y-%m-%d")
|
|
10
|
+
ed = datetime.strptime(end_date, "%Y-%m-%d")
|
|
11
|
+
before_start = (sd - timedelta(days=days_before_after)).strftime("%Y-%m-%d")
|
|
12
|
+
after_end = (ed + timedelta(days=days_before_after)).strftime("%Y-%m-%d")
|
|
13
|
+
return before_start, start_date, end_date, after_end
|
|
14
|
+
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# deliverable.py
|
|
2
|
+
from enum import Enum
|
|
3
|
+
|
|
4
|
+
class Deliverable(Enum):
|
|
5
|
+
RGB_PRE_FIRE = "rgb_pre_fire"
|
|
6
|
+
RGB_POST_FIRE = "rgb_post_fire"
|
|
7
|
+
NDVI_PRE_FIRE = "ndvi_pre_fire"
|
|
8
|
+
NDVI_POST_FIRE = "ndvi_post_fire"
|
|
9
|
+
RBR = "rbr"
|
|
10
|
+
|
|
11
|
+
def __str__(self):
|
|
12
|
+
return self.value
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# downloaders.py
|
|
2
|
+
import logging
|
|
3
|
+
import ee
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
# Local cache: { (bbox_key) → best_scale }
|
|
9
|
+
scale_cache = {}
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def bbox_key(region: ee.Geometry):
|
|
13
|
+
"""
|
|
14
|
+
Compute a stable and compact key for a geometry.
|
|
15
|
+
|
|
16
|
+
- Uses the bounding box
|
|
17
|
+
- Rounds coordinates to 3 decimal places
|
|
18
|
+
- Output: tuple usable as dict key
|
|
19
|
+
"""
|
|
20
|
+
coords = region.bounds().coordinates().getInfo()
|
|
21
|
+
flat = [round(c, 3) for pair in coords[0] for c in pair]
|
|
22
|
+
return tuple(flat)
|
|
23
|
+
|
|
24
|
+
def download_image(
|
|
25
|
+
image: ee.Image,
|
|
26
|
+
region: ee.Geometry,
|
|
27
|
+
scale: int = 10,
|
|
28
|
+
format: str = "GEO_TIFF",
|
|
29
|
+
bands: list | None = None,
|
|
30
|
+
) -> bytes:
|
|
31
|
+
"""
|
|
32
|
+
Generic and robust Earth Engine image downloader with caching.
|
|
33
|
+
|
|
34
|
+
- If a successful scale was already found for this region, reuses it directly.
|
|
35
|
+
- Otherwise tries scales: scale → scale+15 → ... → 150.
|
|
36
|
+
- Caches the first scale that works for future downloads.
|
|
37
|
+
- Works for both single-band (TIFF) and multi-band images.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
region_id = bbox_key(region)
|
|
41
|
+
|
|
42
|
+
# Select band(s) if needed
|
|
43
|
+
img = image.select(bands) if bands else image
|
|
44
|
+
|
|
45
|
+
# Try using cached scale first (fast path)
|
|
46
|
+
if region_id in scale_cache:
|
|
47
|
+
cached_scale = scale_cache[region_id]
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
logger.info(f"Using cached scale {cached_scale} m for region {region_id}")
|
|
51
|
+
url = img.getDownloadURL({
|
|
52
|
+
"scale": cached_scale,
|
|
53
|
+
"region": region,
|
|
54
|
+
"format": format
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
resp = requests.get(url, stream=True)
|
|
58
|
+
resp.raise_for_status()
|
|
59
|
+
|
|
60
|
+
logger.info(f"Downloaded successfully with cached scale {cached_scale} m")
|
|
61
|
+
return resp.content
|
|
62
|
+
|
|
63
|
+
except Exception as e:
|
|
64
|
+
logger.warning(
|
|
65
|
+
f"Cached scale {cached_scale} m failed ({e}). Will try fallback loop."
|
|
66
|
+
)
|
|
67
|
+
# continue to fallback progressive search
|
|
68
|
+
|
|
69
|
+
# Progressive search for a working scale (slow path)
|
|
70
|
+
for attempt_scale in range(scale, 151, 15):
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
url = img.getDownloadURL({
|
|
74
|
+
"scale": attempt_scale,
|
|
75
|
+
"region": region,
|
|
76
|
+
"format": format
|
|
77
|
+
})
|
|
78
|
+
|
|
79
|
+
resp = requests.get(url, stream=True)
|
|
80
|
+
resp.raise_for_status()
|
|
81
|
+
|
|
82
|
+
logger.info(f"Downloaded successfully at {attempt_scale} m")
|
|
83
|
+
|
|
84
|
+
# SAVE the working scale in cache for future images
|
|
85
|
+
scale_cache[region_id] = attempt_scale
|
|
86
|
+
logger.info(
|
|
87
|
+
f"Caching scale {attempt_scale} m for region {region_id}"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return resp.content
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
# classic GEE “too large” error
|
|
94
|
+
if "Total request size" in str(e):
|
|
95
|
+
logger.info(
|
|
96
|
+
f"Scale {attempt_scale} m rejected (too large). Trying next..."
|
|
97
|
+
)
|
|
98
|
+
continue
|
|
99
|
+
|
|
100
|
+
# other error → raise immediately
|
|
101
|
+
raise
|
|
102
|
+
|
|
103
|
+
# No scale worked up to 150 m
|
|
104
|
+
raise RuntimeError(
|
|
105
|
+
"Unable to download image even at 150 m — region too large."
|
|
106
|
+
)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# fire_severity.py
|
|
2
|
+
from enum import IntEnum
|
|
3
|
+
|
|
4
|
+
class FireSeverity(IntEnum):
|
|
5
|
+
UNBURNED = (0, "Unburned")
|
|
6
|
+
LOW = (1, "Low")
|
|
7
|
+
MODERATE = (2, "Moderate")
|
|
8
|
+
HIGH = (3, "High")
|
|
9
|
+
VERY_HIGH = (4, "Very High")
|
|
10
|
+
|
|
11
|
+
def __new__(cls, value, label):
|
|
12
|
+
obj = int.__new__(cls, value)
|
|
13
|
+
obj._value_ = value
|
|
14
|
+
obj.label = label
|
|
15
|
+
return obj
|
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
# post_fire_assessment.py
|
|
2
|
+
import logging
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from tempfile import NamedTemporaryFile
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
import ee
|
|
9
|
+
from rasterio.io import MemoryFile
|
|
10
|
+
|
|
11
|
+
from wildfire_analyser.fire_assessment.date_utils import expand_dates
|
|
12
|
+
from wildfire_analyser.fire_assessment.deliverable import Deliverable
|
|
13
|
+
from wildfire_analyser.fire_assessment.validators import (
|
|
14
|
+
validate_date,
|
|
15
|
+
validate_geojson_path,
|
|
16
|
+
validate_deliverables,
|
|
17
|
+
ensure_not_empty
|
|
18
|
+
)
|
|
19
|
+
from wildfire_analyser.fire_assessment.downloaders import download_image
|
|
20
|
+
|
|
21
|
+
CLOUD_THRESHOLD = 100
|
|
22
|
+
COLLECTION_ID = "COPERNICUS/S2_SR_HARMONIZED"
|
|
23
|
+
DAYS_BEFORE_AFTER = 30
|
|
24
|
+
IMAGE_SCALE = 10
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class PostFireAssessment:
|
|
30
|
+
def __init__(self, gee_key_json: str, geojson_path: str, start_date: str, end_date: str,
|
|
31
|
+
deliverables=None, track_timings: bool = False):
|
|
32
|
+
# Validate input parameters
|
|
33
|
+
validate_geojson_path(geojson_path)
|
|
34
|
+
validate_date(start_date, "start_date")
|
|
35
|
+
validate_date(end_date, "end_date")
|
|
36
|
+
validate_deliverables(deliverables)
|
|
37
|
+
|
|
38
|
+
# Check chronological order
|
|
39
|
+
if start_date > end_date:
|
|
40
|
+
raise ValueError(f"'start_date' must be earlier than 'end_date'. Received: {start_date} > {end_date}")
|
|
41
|
+
|
|
42
|
+
# Store parameters
|
|
43
|
+
self.gee = self.gee_authenticate(gee_key_json)
|
|
44
|
+
self.roi = self.load_geojson(geojson_path)
|
|
45
|
+
self.start_date = start_date
|
|
46
|
+
self.end_date = end_date
|
|
47
|
+
self.deliverables = deliverables or []
|
|
48
|
+
self.track_timings = track_timings
|
|
49
|
+
|
|
50
|
+
def gee_authenticate(self, gee_key_json: str) -> ee:
|
|
51
|
+
"""
|
|
52
|
+
Authenticate to Google Earth Engine using a service account key JSON.
|
|
53
|
+
"""
|
|
54
|
+
# Converte a string JSON para dicionário
|
|
55
|
+
try:
|
|
56
|
+
key_dict = json.loads(gee_key_json)
|
|
57
|
+
except json.JSONDecodeError as e:
|
|
58
|
+
raise ValueError(f"Error decoding GEE_PRIVATE_KEY_JSON: {e}") from e
|
|
59
|
+
|
|
60
|
+
# Inicializa GEE usando arquivo temporário
|
|
61
|
+
try:
|
|
62
|
+
with NamedTemporaryFile(mode="w+", suffix=".json") as f:
|
|
63
|
+
json.dump(key_dict, f)
|
|
64
|
+
f.flush()
|
|
65
|
+
credentials = ee.ServiceAccountCredentials(key_dict["client_email"], f.name)
|
|
66
|
+
ee.Initialize(credentials)
|
|
67
|
+
except Exception as e:
|
|
68
|
+
raise RuntimeError(f"Failed to authenticate with Google Earth Engine: {e}") from e
|
|
69
|
+
|
|
70
|
+
return ee
|
|
71
|
+
|
|
72
|
+
def load_geojson(self, path: str) -> ee.Geometry:
|
|
73
|
+
"""Load a GeoJSON file and return an Earth Engine Geometry."""
|
|
74
|
+
file_path = Path(path)
|
|
75
|
+
|
|
76
|
+
if not file_path.exists():
|
|
77
|
+
raise FileNotFoundError(f"GeoJSON not found: {path}")
|
|
78
|
+
|
|
79
|
+
with open(file_path, 'r') as f:
|
|
80
|
+
geojson = json.load(f)
|
|
81
|
+
|
|
82
|
+
# Converts GeoJSON to EE geometry
|
|
83
|
+
try:
|
|
84
|
+
geometry = ee.Geometry(geojson['features'][0]['geometry'])
|
|
85
|
+
except Exception as e:
|
|
86
|
+
raise ValueError(f"Invalid GeoJSON geometry: {e}") from e
|
|
87
|
+
|
|
88
|
+
return geometry
|
|
89
|
+
|
|
90
|
+
def _load_full_collection(self):
|
|
91
|
+
"""Load all images intersecting ROI under cloud threshold, mask clouds, select bands, add reflectance."""
|
|
92
|
+
bands_to_select = ['B2', 'B3', 'B4', 'B8', 'B12', 'QA60']
|
|
93
|
+
|
|
94
|
+
collection = (
|
|
95
|
+
self.gee.ImageCollection(COLLECTION_ID)
|
|
96
|
+
.filterBounds(self.roi)
|
|
97
|
+
.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', CLOUD_THRESHOLD))
|
|
98
|
+
.sort('CLOUDY_PIXEL_PERCENTAGE', False)
|
|
99
|
+
.select(bands_to_select)
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Function to add reflectance (_refl).
|
|
103
|
+
def preprocess(img):
|
|
104
|
+
refl_bands = img.select('B.*').multiply(0.0001)
|
|
105
|
+
refl_names = refl_bands.bandNames().map(lambda b: ee.String(b).cat('_refl'))
|
|
106
|
+
img = img.addBands(refl_bands.rename(refl_names))
|
|
107
|
+
return img
|
|
108
|
+
|
|
109
|
+
collection = collection.map(preprocess)
|
|
110
|
+
|
|
111
|
+
return collection
|
|
112
|
+
|
|
113
|
+
def merge_bands(self, band_tiffs: dict[str, bytes]) -> bytes:
|
|
114
|
+
"""
|
|
115
|
+
Merge multiple single-band GeoTIFFs (raw bytes) into a single multi-band GeoTIFF.
|
|
116
|
+
"""
|
|
117
|
+
memfiles = {b: MemoryFile(tiff_bytes) for b, tiff_bytes in band_tiffs.items()}
|
|
118
|
+
datasets = {b: memfiles[b].open() for b in memfiles}
|
|
119
|
+
|
|
120
|
+
# Reference band to copy metadata
|
|
121
|
+
first = next(iter(datasets.values()))
|
|
122
|
+
profile = first.profile.copy()
|
|
123
|
+
profile.update(count=len(datasets))
|
|
124
|
+
|
|
125
|
+
# Merge bands
|
|
126
|
+
with MemoryFile() as merged_mem:
|
|
127
|
+
with merged_mem.open(**profile) as dst:
|
|
128
|
+
for idx, (band, ds) in enumerate(datasets.items(), start=1):
|
|
129
|
+
dst.write(ds.read(1), idx)
|
|
130
|
+
|
|
131
|
+
return merged_mem.read()
|
|
132
|
+
|
|
133
|
+
def _generate_rgb_pre_fire(self, mosaic: ee.Image) -> dict:
|
|
134
|
+
"""
|
|
135
|
+
Generates two GeoTIFF and JPEG images.
|
|
136
|
+
"""
|
|
137
|
+
# Generate the technical multi-band RGB GeoTIFF
|
|
138
|
+
tiff = self._generate_rgb(mosaic, Deliverable.RGB_PRE_FIRE.value)
|
|
139
|
+
|
|
140
|
+
# Generate the visual RGB JPEG (with overlay)
|
|
141
|
+
rgb_img = mosaic.select(['B4_refl', 'B3_refl', 'B2_refl'])
|
|
142
|
+
vis_params = {"min": 0.0, "max": 0.3}
|
|
143
|
+
jpeg = self._generate_visual_image(rgb_img, "rgb_pre_fire_visual", vis_params)
|
|
144
|
+
|
|
145
|
+
return tiff, jpeg
|
|
146
|
+
|
|
147
|
+
def _generate_rgb_post_fire(self, mosaic: ee.Image) -> dict:
|
|
148
|
+
"""
|
|
149
|
+
Generates two GeoTIFF and JPEG images.
|
|
150
|
+
"""
|
|
151
|
+
# Generate the technical multi-band RGB GeoTIFF
|
|
152
|
+
tiff = self._generate_rgb(mosaic, Deliverable.RGB_POST_FIRE.value)
|
|
153
|
+
|
|
154
|
+
# Generate the visual RGB JPEG (with overlay)
|
|
155
|
+
rgb_img = mosaic.select(['B4_refl', 'B3_refl', 'B2_refl'])
|
|
156
|
+
vis_params = {"min": 0.0, "max": 0.3}
|
|
157
|
+
jpeg = self._generate_visual_image(rgb_img, "rgb_post_fire_visual", vis_params)
|
|
158
|
+
|
|
159
|
+
return tiff, jpeg
|
|
160
|
+
|
|
161
|
+
def _generate_rgb(self, mosaic, filename_prefix):
|
|
162
|
+
"""
|
|
163
|
+
Generates an RGB (B4, B3, B2) as a single multiband GeoTIFF.
|
|
164
|
+
"""
|
|
165
|
+
# Merges into a single multiband TIFF.
|
|
166
|
+
image_bytes = self.merge_bands({
|
|
167
|
+
"B4_refl": download_image(image=mosaic, bands=['B4_refl'], region=self.roi, scale=IMAGE_SCALE, format="GEO_TIFF"),
|
|
168
|
+
"B3_refl": download_image(image=mosaic, bands=['B3_refl'], region=self.roi, scale=IMAGE_SCALE, format="GEO_TIFF"),
|
|
169
|
+
"B2_refl": download_image(image=mosaic, bands=['B2_refl'], region=self.roi, scale=IMAGE_SCALE, format="GEO_TIFF"),
|
|
170
|
+
})
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
"filename": f"{filename_prefix}.tif",
|
|
174
|
+
"content_type": "image/tiff",
|
|
175
|
+
"data": image_bytes
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
def _generate_visual_image(self, img: ee.Image, filename: str, vis_params: dict) -> dict:
|
|
179
|
+
"""
|
|
180
|
+
Generates a JPEG of an Earth Engine image with styled ROI overlay.
|
|
181
|
+
"""
|
|
182
|
+
vis = img.visualize(**vis_params)
|
|
183
|
+
overlay = self._styled_roi_overlay().visualize()
|
|
184
|
+
final = vis.blend(overlay)
|
|
185
|
+
|
|
186
|
+
jpeg_bytes = download_image(
|
|
187
|
+
image=final,
|
|
188
|
+
region=self.roi,
|
|
189
|
+
scale=IMAGE_SCALE,
|
|
190
|
+
format="JPEG"
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
# Return dict just like before
|
|
194
|
+
return {
|
|
195
|
+
"filename": f"{filename}.jpg",
|
|
196
|
+
"content_type": "image/jpeg",
|
|
197
|
+
"data": jpeg_bytes
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
def _generate_ndvi(self, mosaic: ee.Image, filename: str) -> dict:
|
|
201
|
+
"""
|
|
202
|
+
Computes NDVI from a mosaic using reflectance bands (B8_refl and B4_refl).
|
|
203
|
+
Downloads the resulting index as a single-band GeoTIFF and returns it as a
|
|
204
|
+
deliverable object.
|
|
205
|
+
"""
|
|
206
|
+
data = download_image(image=mosaic, bands=['ndvi'], region=self.roi, scale=IMAGE_SCALE, format="GEO_TIFF")
|
|
207
|
+
return {
|
|
208
|
+
"filename": f"{filename}.tif",
|
|
209
|
+
"content_type": "image/tiff",
|
|
210
|
+
"data": data
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
def _generate_rbr(self, rbr_img: ee.Image, severity_img: ee.Image) -> tuple[dict, dict, dict]:
|
|
214
|
+
"""
|
|
215
|
+
Computes RBR and generates deliverables:
|
|
216
|
+
- rbr.tif (GeoTIFF)
|
|
217
|
+
- rbr_severity_visual.jpg (RBR class color)
|
|
218
|
+
- rbr_visual.jpg (RBR color JPEG with rbrVis palette)
|
|
219
|
+
"""
|
|
220
|
+
# GeoTIFF
|
|
221
|
+
image_bytes = download_image(image=rbr_img, bands=['rbr'], region=self.roi, scale=IMAGE_SCALE, format="GEO_TIFF")
|
|
222
|
+
tiff_deliverable = {
|
|
223
|
+
"filename": "rbr.tif",
|
|
224
|
+
"content_type": "image/tiff",
|
|
225
|
+
"data": image_bytes
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
# Visual JPEG
|
|
229
|
+
vis_params = {"min": -0.5, "max": 0.6, "palette": ["black", "yellow", "red"]}
|
|
230
|
+
visual_deliverable = self._generate_visual_image(rbr_img, "rbr_visual", vis_params)
|
|
231
|
+
|
|
232
|
+
# Severity visual JPEG
|
|
233
|
+
severity_vis_params = {
|
|
234
|
+
"min": 0,
|
|
235
|
+
"max": 4,
|
|
236
|
+
"palette": ["00FF00","FFFF00","FFA500","FF0000","8B4513"]
|
|
237
|
+
}
|
|
238
|
+
severity_visual_deliverable = self._generate_visual_image(severity_img, "rbr_severity_visual", severity_vis_params)
|
|
239
|
+
|
|
240
|
+
return tiff_deliverable, severity_visual_deliverable, visual_deliverable
|
|
241
|
+
|
|
242
|
+
def _styled_roi_overlay(self):
|
|
243
|
+
"""Creates a styled overlay of the ROI polygon (purple outline, no fill)."""
|
|
244
|
+
fc = ee.FeatureCollection([ee.Feature(self.roi)])
|
|
245
|
+
styled = fc.style(
|
|
246
|
+
color='#800080', # purple border
|
|
247
|
+
fillColor='00000000', # transparent fill
|
|
248
|
+
width=3
|
|
249
|
+
)
|
|
250
|
+
return styled
|
|
251
|
+
|
|
252
|
+
def _build_mosaic_with_indexes(self, collection: ee.ImageCollection) -> ee.Image:
|
|
253
|
+
"""
|
|
254
|
+
Takes a filtered collection → builds a mosaic → computes NDVI and
|
|
255
|
+
NBR → returns a mosaic with the additional bands.
|
|
256
|
+
"""
|
|
257
|
+
mosaic = collection.mosaic()
|
|
258
|
+
ndvi = mosaic.normalizedDifference(["B8_refl", "B4_refl"]).rename("ndvi")
|
|
259
|
+
nbr = mosaic.normalizedDifference(["B8_refl", "B12_refl"]).rename("nbr")
|
|
260
|
+
return mosaic.addBands([ndvi, nbr])
|
|
261
|
+
|
|
262
|
+
def _compute_rbr(self, before_mosaic: ee.Image, after_mosaic: ee.Image) -> ee.Image:
|
|
263
|
+
"""
|
|
264
|
+
Computes RBR (Relative Burn Ratio) from BEFORE and AFTER mosaics.
|
|
265
|
+
Assumes both mosaics already include band 'nbr'.
|
|
266
|
+
"""
|
|
267
|
+
delta_nbr = before_mosaic.select('nbr').subtract(after_mosaic.select('nbr')).rename('dnbr')
|
|
268
|
+
rbr = delta_nbr.divide(before_mosaic.select('nbr').add(1.001)).rename('rbr')
|
|
269
|
+
return rbr
|
|
270
|
+
|
|
271
|
+
def _compute_area_by_severity(self, severity_img: ee.Image) -> dict[int, float]:
|
|
272
|
+
"""
|
|
273
|
+
Calculates the area per class (in hectares) within the ROI in an optimized way.
|
|
274
|
+
"""
|
|
275
|
+
# 1 Sentinel-2 pixel = 10 m → pixel area = 100 m² = 0.01 ha
|
|
276
|
+
pixel_area_ha = ee.Image.pixelArea().divide(10000)
|
|
277
|
+
|
|
278
|
+
# Creates an image using 'severity' as a mask for each class
|
|
279
|
+
def area_per_class(c):
|
|
280
|
+
mask = severity_img.eq(c)
|
|
281
|
+
return pixel_area_ha.updateMask(mask).rename('area_' + str(c))
|
|
282
|
+
|
|
283
|
+
class_images = [area_per_class(c) for c in range(5)]
|
|
284
|
+
stacked = ee.Image.cat(class_images)
|
|
285
|
+
|
|
286
|
+
# Reduces all bands simultaneously
|
|
287
|
+
areas = stacked.reduceRegion(
|
|
288
|
+
reducer=ee.Reducer.sum(),
|
|
289
|
+
geometry=self.roi,
|
|
290
|
+
scale=IMAGE_SCALE,
|
|
291
|
+
maxPixels=1e12
|
|
292
|
+
).getInfo()
|
|
293
|
+
|
|
294
|
+
return { c: float(areas.get(f'area_{c}', 0) or 0) for c in range(5) }
|
|
295
|
+
|
|
296
|
+
def _classify_rbr_severity(self, rbr_img: ee.Image) -> ee.Image:
|
|
297
|
+
"""
|
|
298
|
+
Classify RBR by severity:
|
|
299
|
+
0 = Unburned (RBR < 0.1)
|
|
300
|
+
1 = Low (0.1 ≤ RBR < 0.27)
|
|
301
|
+
2 = Moderate (0.27 ≤ RBR < 0.44)
|
|
302
|
+
3 = High (0.44 ≤ RBR < 0.66)
|
|
303
|
+
4 = Very High (RBR ≥ 0.66)
|
|
304
|
+
"""
|
|
305
|
+
|
|
306
|
+
severity = rbr_img.expression(
|
|
307
|
+
"""
|
|
308
|
+
(b('rbr') < 0.10) ? 0 :
|
|
309
|
+
(b('rbr') < 0.27) ? 1 :
|
|
310
|
+
(b('rbr') < 0.44) ? 2 :
|
|
311
|
+
(b('rbr') < 0.66) ? 3 :
|
|
312
|
+
4
|
|
313
|
+
"""
|
|
314
|
+
).rename("severity")
|
|
315
|
+
|
|
316
|
+
return severity
|
|
317
|
+
|
|
318
|
+
def format_severity_table(self, area_dict: dict[int, float]) -> list[dict]:
|
|
319
|
+
"""
|
|
320
|
+
Converts the raw {severity: hectares} dict into a structured JSON
|
|
321
|
+
with severity, severity_name, ha, percent, and color.
|
|
322
|
+
"""
|
|
323
|
+
|
|
324
|
+
severity_names = {
|
|
325
|
+
0: "Unburned",
|
|
326
|
+
1: "Low",
|
|
327
|
+
2: "Moderate",
|
|
328
|
+
3: "High",
|
|
329
|
+
4: "Very High",
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
severity_colors = {
|
|
333
|
+
0: "Green",
|
|
334
|
+
1: "Yellow",
|
|
335
|
+
2: "Orange",
|
|
336
|
+
3: "Red",
|
|
337
|
+
4: "Maroon",
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
total_ha = sum(area_dict.values()) or 1 # avoid division by zero
|
|
341
|
+
|
|
342
|
+
table = []
|
|
343
|
+
for s in range(5):
|
|
344
|
+
ha = float(area_dict.get(s, 0))
|
|
345
|
+
pct = (ha / total_ha) * 100
|
|
346
|
+
|
|
347
|
+
table.append({
|
|
348
|
+
"severity": s,
|
|
349
|
+
"severity_name": severity_names[s],
|
|
350
|
+
"ha": round(ha, 2),
|
|
351
|
+
"percent": round(pct, 2),
|
|
352
|
+
"color": severity_colors[s]
|
|
353
|
+
})
|
|
354
|
+
|
|
355
|
+
return table
|
|
356
|
+
|
|
357
|
+
def force_execution(self, obj):
|
|
358
|
+
"""
|
|
359
|
+
Forces GEE to execute pending computations while retrieving the smallest possible data.
|
|
360
|
+
"""
|
|
361
|
+
try:
|
|
362
|
+
# Collections → safest, smallest fetch possible
|
|
363
|
+
if isinstance(obj, ee.ImageCollection) or isinstance(obj, ee.FeatureCollection):
|
|
364
|
+
return obj.size().getInfo()
|
|
365
|
+
|
|
366
|
+
# Images → never call getInfo() directly (too heavy)
|
|
367
|
+
if isinstance(obj, ee.Image):
|
|
368
|
+
# Use a tiny region and simple stats to force execution
|
|
369
|
+
# without downloading the full image
|
|
370
|
+
test = obj.reduceRegion(
|
|
371
|
+
reducer=ee.Reducer.mean(),
|
|
372
|
+
geometry=self.roi.centroid(),
|
|
373
|
+
scale=100,
|
|
374
|
+
maxPixels=1e9
|
|
375
|
+
)
|
|
376
|
+
return test.getInfo()
|
|
377
|
+
|
|
378
|
+
# Numbers / Dictionaries / anything else
|
|
379
|
+
return obj.getInfo()
|
|
380
|
+
|
|
381
|
+
except Exception:
|
|
382
|
+
return None
|
|
383
|
+
|
|
384
|
+
def run_analysis(self):
|
|
385
|
+
timings = {}
|
|
386
|
+
|
|
387
|
+
# Load satellite collection
|
|
388
|
+
if self.track_timings: t0 = time.time()
|
|
389
|
+
full_collection = self._load_full_collection()
|
|
390
|
+
if self.track_timings:
|
|
391
|
+
self.force_execution(full_collection)
|
|
392
|
+
timings["Sat collection loaded"] = time.time() - t0
|
|
393
|
+
|
|
394
|
+
# Expand dates to maximize satellite image coverage
|
|
395
|
+
before_start, before_end, after_start, after_end = expand_dates(
|
|
396
|
+
self.start_date, self.end_date, DAYS_BEFORE_AFTER
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
# Build pre fire mosaic
|
|
400
|
+
if self.track_timings: t1 = time.time()
|
|
401
|
+
before_collection = full_collection.filterDate(before_start, before_end)
|
|
402
|
+
ensure_not_empty(before_collection, before_start, before_end)
|
|
403
|
+
before_mosaic = self._build_mosaic_with_indexes(before_collection)
|
|
404
|
+
|
|
405
|
+
# Build post fire mosaic
|
|
406
|
+
after_collection = full_collection.filterDate(after_start, after_end)
|
|
407
|
+
ensure_not_empty(after_collection, after_start, after_end)
|
|
408
|
+
after_mosaic = self._build_mosaic_with_indexes(after_collection)
|
|
409
|
+
|
|
410
|
+
# Compute RBR
|
|
411
|
+
rbr = self._compute_rbr(before_mosaic, after_mosaic)
|
|
412
|
+
|
|
413
|
+
# Classification and severity extension calculation
|
|
414
|
+
severity = self._classify_rbr_severity(rbr)
|
|
415
|
+
area_stats = self._compute_area_by_severity(severity)
|
|
416
|
+
if self.track_timings:
|
|
417
|
+
self.force_execution(area_stats)
|
|
418
|
+
timings["Indexes calculated"] = time.time() - t1
|
|
419
|
+
|
|
420
|
+
deliverable_registry = {
|
|
421
|
+
Deliverable.RGB_PRE_FIRE: lambda ctx: self._generate_rgb_pre_fire(before_mosaic),
|
|
422
|
+
Deliverable.RGB_POST_FIRE: lambda ctx: self._generate_rgb_post_fire(after_mosaic),
|
|
423
|
+
Deliverable.NDVI_PRE_FIRE: lambda ctx: [self._generate_ndvi(before_mosaic, Deliverable.NDVI_PRE_FIRE.value)],
|
|
424
|
+
Deliverable.NDVI_POST_FIRE: lambda ctx: [self._generate_ndvi(after_mosaic, Deliverable.NDVI_POST_FIRE.value)],
|
|
425
|
+
Deliverable.RBR: lambda ctx: self._generate_rbr(rbr, severity),
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
# Download binaries
|
|
429
|
+
if self.track_timings: t2 = time.time()
|
|
430
|
+
images = {}
|
|
431
|
+
|
|
432
|
+
for d in self.deliverables:
|
|
433
|
+
gen_fn = deliverable_registry.get(d)
|
|
434
|
+
outputs = gen_fn({})
|
|
435
|
+
if isinstance(outputs, tuple) or isinstance(outputs, list):
|
|
436
|
+
for out in outputs:
|
|
437
|
+
images[out["filename"]] = out
|
|
438
|
+
else:
|
|
439
|
+
images[outputs["filename"]] = outputs
|
|
440
|
+
|
|
441
|
+
if self.track_timings: timings["Images downloaded"] = time.time() - t2
|
|
442
|
+
|
|
443
|
+
return {
|
|
444
|
+
"images": images,
|
|
445
|
+
"timings": timings,
|
|
446
|
+
"area_by_severity": self.format_severity_table(area_stats)
|
|
447
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# validators.py
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import ee
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from wildfire_analyser.fire_assessment.deliverable import Deliverable
|
|
7
|
+
|
|
8
|
+
DATE_PATTERN = r"^\d{4}-\d{2}-\d{2}$"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def validate_date(value: str, field_name: str) -> None:
|
|
12
|
+
"""Validate date format YYYY-MM-DD."""
|
|
13
|
+
if not isinstance(value, str):
|
|
14
|
+
raise ValueError(f"{field_name} must be a string in format YYYY-MM-DD.")
|
|
15
|
+
|
|
16
|
+
if not re.match(DATE_PATTERN, value):
|
|
17
|
+
raise ValueError(f"{field_name} must follow format YYYY-MM-DD.")
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
datetime.strptime(value, "%Y-%m-%d")
|
|
21
|
+
except Exception:
|
|
22
|
+
raise ValueError(f"{field_name} is not a valid calendar date.")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def validate_geojson_path(path: str) -> None:
|
|
26
|
+
"""Validate that path exists, is a file, and ends with .geojson."""
|
|
27
|
+
if not isinstance(path, str):
|
|
28
|
+
raise ValueError("geojson_path must be a string.")
|
|
29
|
+
|
|
30
|
+
if not path.endswith(".geojson"):
|
|
31
|
+
raise ValueError("geojson_path must end with .geojson")
|
|
32
|
+
|
|
33
|
+
if not os.path.isfile(path):
|
|
34
|
+
raise ValueError(f"geojson_path does not exist: {path}")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def validate_deliverables(deliverables: list | None) -> None:
|
|
38
|
+
"""
|
|
39
|
+
Validate deliverables list.
|
|
40
|
+
If None → allowed (client wants all deliverables).
|
|
41
|
+
If list → ensure each item is Deliverable.
|
|
42
|
+
"""
|
|
43
|
+
if deliverables is None:
|
|
44
|
+
return # valid, user wants default behavior
|
|
45
|
+
|
|
46
|
+
if not isinstance(deliverables, list):
|
|
47
|
+
raise ValueError("deliverables must be a list of Deliverable values.")
|
|
48
|
+
|
|
49
|
+
invalid = [d for d in deliverables if not isinstance(d, Deliverable)]
|
|
50
|
+
if invalid:
|
|
51
|
+
raise ValueError(f"Invalid deliverables: {invalid}")
|
|
52
|
+
|
|
53
|
+
def ensure_not_empty(collection: ee.ImageCollection, start: str, end: str) -> None:
|
|
54
|
+
try:
|
|
55
|
+
size_val = collection.size().getInfo()
|
|
56
|
+
except Exception:
|
|
57
|
+
size_val = 0
|
|
58
|
+
|
|
59
|
+
if size_val == 0:
|
|
60
|
+
raise ValueError(f"No images found in date range {start} → {end}")
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: wildfire-analyser
|
|
3
|
+
Version: 0.1.9
|
|
4
|
+
Summary: Python library for post-fire assessment and wildfire analysis using Google Earth Engine.
|
|
5
|
+
Author: Marcelo Camargo
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/camargo-advanced/wildfire-analyser
|
|
8
|
+
Project-URL: Source, https://github.com/camargo-advanced/wildfire-analyser
|
|
9
|
+
Project-URL: Issues, https://github.com/camargo-advanced/wildfire-analyser/issues
|
|
10
|
+
Requires-Python: >=3.10
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
13
|
+
Requires-Dist: geemap==0.36.6
|
|
14
|
+
Requires-Dist: geopandas==1.1.1
|
|
15
|
+
Requires-Dist: geedim==2.0.0
|
|
16
|
+
Requires-Dist: python-dotenv==1.0.1
|
|
17
|
+
Dynamic: license-file
|
|
18
|
+
|
|
19
|
+
# wildfire-analyser
|
|
20
|
+
|
|
21
|
+
Python project for analyzing wildfires in natural reserves.
|
|
22
|
+
|
|
23
|
+
## Installation and Usage
|
|
24
|
+
|
|
25
|
+
Follow the steps below to install and test `wildfire-analyser` inside an isolated environment:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
mkdir /tmp/test
|
|
29
|
+
cd /tmp/test
|
|
30
|
+
|
|
31
|
+
python3 -m venv venv
|
|
32
|
+
source venv/bin/activate
|
|
33
|
+
|
|
34
|
+
pip install wildfire-analyser
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### Required Files Before Running the Client
|
|
38
|
+
|
|
39
|
+
Before running the client, you **must** prepare two items:
|
|
40
|
+
|
|
41
|
+
---
|
|
42
|
+
|
|
43
|
+
#### **1. Add a GeoJSON polygon**
|
|
44
|
+
|
|
45
|
+
Create a folder named `polygons` in the project root and place your ROI polygon file inside it:
|
|
46
|
+
|
|
47
|
+
```
|
|
48
|
+
/tmp/test/
|
|
49
|
+
├── polygons/
|
|
50
|
+
│ └── your_polygon.geojson
|
|
51
|
+
└── venv/
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
An example GeoJSON file is available in the repository.
|
|
55
|
+
|
|
56
|
+
---
|
|
57
|
+
|
|
58
|
+
#### **2. Create the `.env` file with GEE authentication data**
|
|
59
|
+
|
|
60
|
+
In the project root, add a `.env` file containing your Google Earth Engine authentication variables.
|
|
61
|
+
|
|
62
|
+
A `.env` template is also available in the GitHub repository.
|
|
63
|
+
|
|
64
|
+
```
|
|
65
|
+
/tmp/test/
|
|
66
|
+
├── .env
|
|
67
|
+
├── polygons/
|
|
68
|
+
└── venv/
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
---
|
|
72
|
+
|
|
73
|
+
### Running the Client
|
|
74
|
+
|
|
75
|
+
After adding the `.env` file and your GeoJSON polygon:
|
|
76
|
+
|
|
77
|
+
```bash
|
|
78
|
+
python3 -m wildfire_analyser.client
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
This will start the analysis process, generate the configured deliverables, and save the output files in the current directory.
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
## Setup Instructions for Developers
|
|
86
|
+
|
|
87
|
+
1. **Clone the repository**
|
|
88
|
+
|
|
89
|
+
```bash
|
|
90
|
+
git clone git@github.com:camargo-advanced/wildfire-analyser.git
|
|
91
|
+
cd wildfire-analyser
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
2. **Create a virtual environment**
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
python3 -m venv venv
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
3. **Activate the virtual environment**
|
|
101
|
+
|
|
102
|
+
```bash
|
|
103
|
+
source venv/bin/activate
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
4. **Install dependencies**
|
|
107
|
+
Make sure the virtual environment is activated, then run:
|
|
108
|
+
|
|
109
|
+
```bash
|
|
110
|
+
pip install -r requirements.txt
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
5. **Configure environment variables**
|
|
114
|
+
Copy your version of `.env` file to the root folder with your GEE authentication credentials. A template file `.env.template` is provided as an example.
|
|
115
|
+
|
|
116
|
+
6. **Run the sample client application**
|
|
117
|
+
|
|
118
|
+
```bash
|
|
119
|
+
python3 -m wildfire_analyser.client
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
## Useful Commands
|
|
123
|
+
|
|
124
|
+
* **Deactivate the virtual environment**:
|
|
125
|
+
|
|
126
|
+
```bash
|
|
127
|
+
deactivate
|
|
128
|
+
```
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
wildfire_analyser/__init__.py,sha256=p3mkaE1aO7EtOIEdPF_7cDvQaiPethy6knsCph5g7Sk,242
|
|
2
|
+
wildfire_analyser/client.py,sha256=BQ9Q5B3yb-Lrl6FoYQdt3iByV9TSfs4yNsQcXiX-QoA,2553
|
|
3
|
+
wildfire_analyser/fire_assessment/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
wildfire_analyser/fire_assessment/date_utils.py,sha256=Ct9K_kuQ6iBumb_Mz_L3Ix2I9tNjDBljTkGTzf9_KVE,503
|
|
5
|
+
wildfire_analyser/fire_assessment/deliverable.py,sha256=a2sXeV6ZKXkgXJsFof3t-cStISw_I0ptt9xRvOgOuWU,275
|
|
6
|
+
wildfire_analyser/fire_assessment/downloaders.py,sha256=vTRq0aGjpighy88xv-TpdOX9Df2y4gBiiyJgQxNzhd4,3151
|
|
7
|
+
wildfire_analyser/fire_assessment/fire_severity.py,sha256=sevITwJ5JsboNH4_UtwRoMmGgD6pT8AWZplEOFvX8O8,361
|
|
8
|
+
wildfire_analyser/fire_assessment/post_fire_assessment.py,sha256=oexoimf7oGUm5_8CFRpR85y7Kcb-LC9DlyfEodQG1no,16827
|
|
9
|
+
wildfire_analyser/fire_assessment/validators.py,sha256=jSZg-jxaBvvqXlcGOo2MunQBweWxzI6yWHrwLTIaC-8,1940
|
|
10
|
+
wildfire_analyser-0.1.9.dist-info/licenses/LICENSE,sha256=h8FQRAMePgzOkXkDuMUXNKk5Y034xsJolubMsDwFdtk,1064
|
|
11
|
+
wildfire_analyser-0.1.9.dist-info/METADATA,sha256=8jT7C_WJcXHMXH1hqqmSZ4SiK2lh0Uew1ushRbXT8A0,2789
|
|
12
|
+
wildfire_analyser-0.1.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
13
|
+
wildfire_analyser-0.1.9.dist-info/top_level.txt,sha256=gvASveJmmFISb6mkorsNLXVGloHHGAt6_kcr9MTfias,18
|
|
14
|
+
wildfire_analyser-0.1.9.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Marcelo
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
wildfire_analyser
|