tunned-geobr 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. tunned_geobr/__init__.py +38 -0
  2. tunned_geobr/constants.py +13 -0
  3. tunned_geobr/data/grid_state_correspondence_table.csv +140 -0
  4. tunned_geobr/list_geobr.py +39 -0
  5. tunned_geobr/lookup_muni.py +111 -0
  6. tunned_geobr/read_amazon.py +42 -0
  7. tunned_geobr/read_amazon_ibas.py +92 -0
  8. tunned_geobr/read_atlantic_forest_ibas.py +93 -0
  9. tunned_geobr/read_biomes.py +43 -0
  10. tunned_geobr/read_census_tract.py +97 -0
  11. tunned_geobr/read_climate_aggressiveness.py +74 -0
  12. tunned_geobr/read_comparable_areas.py +75 -0
  13. tunned_geobr/read_conservation_units.py +43 -0
  14. tunned_geobr/read_country.py +43 -0
  15. tunned_geobr/read_disaster_risk_area.py +47 -0
  16. tunned_geobr/read_geology.py +77 -0
  17. tunned_geobr/read_geomorphology.py +77 -0
  18. tunned_geobr/read_health_facilities.py +49 -0
  19. tunned_geobr/read_health_region.py +52 -0
  20. tunned_geobr/read_immediate_region.py +81 -0
  21. tunned_geobr/read_indigenous_land.py +44 -0
  22. tunned_geobr/read_intermediate_region.py +61 -0
  23. tunned_geobr/read_meso_region.py +78 -0
  24. tunned_geobr/read_metro_area.py +44 -0
  25. tunned_geobr/read_micro_region.py +78 -0
  26. tunned_geobr/read_mining_processes.py +76 -0
  27. tunned_geobr/read_municipal_seat.py +41 -0
  28. tunned_geobr/read_municipality.py +83 -0
  29. tunned_geobr/read_neighborhood.py +39 -0
  30. tunned_geobr/read_pedology.py +77 -0
  31. tunned_geobr/read_pop_arrangements.py +45 -0
  32. tunned_geobr/read_region.py +41 -0
  33. tunned_geobr/read_schools.py +44 -0
  34. tunned_geobr/read_semiarid.py +42 -0
  35. tunned_geobr/read_settlements.py +85 -0
  36. tunned_geobr/read_state.py +88 -0
  37. tunned_geobr/read_statistical_grid.py +127 -0
  38. tunned_geobr/read_urban_area.py +44 -0
  39. tunned_geobr/read_urban_concentrations.py +46 -0
  40. tunned_geobr/read_weighting_area.py +74 -0
  41. tunned_geobr/utils.py +326 -0
  42. tunned_geobr-0.1.0.dist-info/METADATA +103 -0
  43. tunned_geobr-0.1.0.dist-info/RECORD +46 -0
  44. tunned_geobr-0.1.0.dist-info/WHEEL +4 -0
  45. tunned_geobr-0.1.0.dist-info/entry_points.txt +4 -0
  46. tunned_geobr-0.1.0.dist-info/licenses/LICENSE.txt +2 -0
@@ -0,0 +1,38 @@
1
+ from .read_state import read_state
2
+ from .read_amazon import read_amazon
3
+ from .read_biomes import read_biomes
4
+ from .read_country import read_country
5
+ from .read_municipal_seat import read_municipal_seat
6
+ from .read_region import read_region
7
+ from .read_semiarid import read_semiarid
8
+ from .read_disaster_risk_area import read_disaster_risk_area
9
+ from .read_metro_area import read_metro_area
10
+ from .read_conservation_units import read_conservation_units
11
+ from .read_urban_area import read_urban_area
12
+ from .read_health_facilities import read_health_facilities
13
+ from .read_indigenous_land import read_indigenous_land
14
+ from .read_immediate_region import read_immediate_region
15
+ from .list_geobr import list_geobr
16
+ from .read_census_tract import read_census_tract
17
+ from .read_meso_region import read_meso_region
18
+ from .read_micro_region import read_micro_region
19
+ from .read_municipality import read_municipality
20
+ from .read_weighting_area import read_weighting_area
21
+ from .read_neighborhood import read_neighborhood
22
+ from .read_health_region import read_health_region
23
+ from .read_pop_arrangements import read_pop_arrangements
24
+ from .lookup_muni import lookup_muni
25
+ from .read_intermediate_region import read_intermediate_region
26
+ from .read_urban_concentrations import read_urban_concentrations
27
+ from .read_schools import read_schools
28
+ from .read_comparable_areas import read_comparable_areas
29
+ from .read_biomes import read_biomes
30
+ from .read_statistical_grid import read_statistical_grid
31
+ from .read_mining_processes import read_mining_processes
32
+ from .read_geology import read_geology
33
+ from .read_geomorphology import read_geomorphology
34
+ from .read_pedology import read_pedology
35
+ from .read_climate_aggressiveness import read_climate_aggressiveness
36
+ from .read_amazon_ibas import read_amazon_ibas
37
+ from .read_atlantic_forest_ibas import read_atlantic_forest_ibas
38
+ from .read_settlements import read_settlements
@@ -0,0 +1,13 @@
1
+ from enum import Enum
2
+
3
+
4
+ class DataTypes(Enum):
5
+
6
+ code_muni = "float"
7
+ code_cnes = "float"
8
+ code_state = "float"
9
+ year_update = "float"
10
+ code_neighborhood = "float"
11
+ code_subdistrict = "float"
12
+ code_district = "float"
13
+ geo_bater = "float"
@@ -0,0 +1,140 @@
1
+ name_state,abbrev_state,code_grid
2
+ Acre,AC,ID_50
3
+ Acre,AC,ID_51
4
+ Acre,AC,ID_60
5
+ Acre,AC,ID_61
6
+ Amazonas,AM,ID_51
7
+ Amazonas,AM,ID_60
8
+ Amazonas,AM,ID_61
9
+ Amazonas,AM,ID_62
10
+ Amazonas,AM,ID_63
11
+ Amazonas,AM,ID_70
12
+ Amazonas,AM,ID_71
13
+ Amazonas,AM,ID_72
14
+ Amazonas,AM,ID_73
15
+ Amazonas,AM,ID_80
16
+ Amazonas,AM,ID_81
17
+ Amazonas,AM,ID_82
18
+ Roraima,RR,ID_72
19
+ Roraima,RR,ID_81
20
+ Roraima,RR,ID_82
21
+ Roraima,RR,ID_83
22
+ Roraima,RR,ID_92
23
+ Roraima,RR,ID_93
24
+ Amap�,AP,ID_74
25
+ Amap�,AP,ID_75
26
+ Amap�,AP,ID_84
27
+ Amap�,AP,ID_85
28
+ Par�,PA,ID_53
29
+ Par�,PA,ID_54
30
+ Par�,PA,ID_55
31
+ Par�,PA,ID_63
32
+ Par�,PA,ID_64
33
+ Par�,PA,ID_65
34
+ Par�,PA,ID_73
35
+ Par�,PA,ID_74
36
+ Par�,PA,ID_75
37
+ Par�,PA,ID_76
38
+ Par�,PA,ID_83
39
+ Par�,PA,ID_84
40
+ Par�,PA,ID_85
41
+ Maranh�o,MA,ID_55
42
+ Maranh�o,MA,ID_56
43
+ Maranh�o,MA,ID_65
44
+ Maranh�o,MA,ID_66
45
+ Maranh�o,MA,ID_75
46
+ Maranh�o,MA,ID_76
47
+ Maranh�o,MA,ID_77
48
+ Piau�,PI,ID_56
49
+ Piau�,PI,ID_57
50
+ Piau�,PI,ID_66
51
+ Piau�,PI,ID_67
52
+ Piau�,PI,ID_76
53
+ Piau�,PI,ID_77
54
+ Cear�,CE,ID_67
55
+ Cear�,CE,ID_68
56
+ Cear�,CE,ID_77
57
+ Rio Grande do Norte,RN,ID_67
58
+ Rio Grande do Norte,RN,ID_68
59
+ Para�ba,PB,ID_67
60
+ Para�ba,PB,ID_68
61
+ Pernambuco,PE,ID_57
62
+ Pernambuco,PE,ID_58
63
+ Pernambuco,PE,ID_67
64
+ Pernambuco,PE,ID_68
65
+ Pernambuco,PE,ID_69
66
+ Alagoas,AL,ID_57
67
+ Alagoas,AL,ID_58
68
+ Sergipe,SE,ID_57
69
+ Sergipe,SE,ID_58
70
+ Bahia,BA,ID_37
71
+ Bahia,BA,ID_46
72
+ Bahia,BA,ID_47
73
+ Bahia,BA,ID_56
74
+ Bahia,BA,ID_57
75
+ Esp�rito Santo,ES,ID_36
76
+ Esp�rito Santo,ES,ID_37
77
+ Esp�rito Santo,ES,ID_39
78
+ Rio de Janeiro,RJ,ID_26
79
+ Rio de Janeiro,RJ,ID_27
80
+ Rio de Janeiro,RJ,ID_36
81
+ Rio de Janeiro,RJ,ID_37
82
+ S�o Paulo,SP,ID_24
83
+ S�o Paulo,SP,ID_25
84
+ S�o Paulo,SP,ID_26
85
+ S�o Paulo,SP,ID_34
86
+ S�o Paulo,SP,ID_35
87
+ Paran�,PR,ID_24
88
+ Paran�,PR,ID_25
89
+ Santa Catarina,SC,ID_14
90
+ Santa Catarina,SC,ID_15
91
+ Santa Catarina,SC,ID_24
92
+ Santa Catarina,SC,ID_25
93
+ Rio Grande do Sul,RS,ID_4
94
+ Rio Grande do Sul,RS,ID_13
95
+ Rio Grande do Sul,RS,ID_14
96
+ Rio Grande do Sul,RS,ID_15
97
+ Mato Grosso do Sul,MS,ID_23
98
+ Mato Grosso do Sul,MS,ID_24
99
+ Mato Grosso do Sul,MS,ID_33
100
+ Mato Grosso do Sul,MS,ID_34
101
+ Mato Grosso do Sul,MS,ID_35
102
+ Mato Grosso do Sul,MS,ID_43
103
+ Mato Grosso do Sul,MS,ID_44
104
+ Minas Gerais,MG,ID_25
105
+ Minas Gerais,MG,ID_26
106
+ Minas Gerais,MG,ID_35
107
+ Minas Gerais,MG,ID_36
108
+ Minas Gerais,MG,ID_37
109
+ Minas Gerais,MG,ID_45
110
+ Minas Gerais,MG,ID_46
111
+ Minas Gerais,MG,ID_47
112
+ Goi�s,GO,ID_34
113
+ Goi�s,GO,ID_35
114
+ Goi�s,GO,ID_44
115
+ Goi�s,GO,ID_45
116
+ Goi�s,GO,ID_46
117
+ Goi�s,GO,ID_55
118
+ Goi�s,GO,ID_56
119
+ Distrito Federal,DF,ID_45
120
+ Tocantins,TO,ID_45
121
+ Tocantins,TO,ID_55
122
+ Tocantins,TO,ID_56
123
+ Tocantins,TO,ID_65
124
+ Tocantins,TO,ID_66
125
+ Mato Grosso,MT,ID_33
126
+ Mato Grosso,MT,ID_34
127
+ Mato Grosso,MT,ID_43
128
+ Mato Grosso,MT,ID_44
129
+ Mato Grosso,MT,ID_45
130
+ Mato Grosso,MT,ID_52
131
+ Mato Grosso,MT,ID_53
132
+ Mato Grosso,MT,ID_54
133
+ Mato Grosso,MT,ID_55
134
+ Mato Grosso,MT,ID_63
135
+ Rond�nia,RO,ID_42
136
+ Rond�nia,RO,ID_43
137
+ Rond�nia,RO,ID_51
138
+ Rond�nia,RO,ID_52
139
+ Rond�nia,RO,ID_53
140
+ Rond�nia,RO,ID_62
@@ -0,0 +1,39 @@
1
+ from requests import get
2
+ import pandas as pd
3
+ from io import StringIO
4
+ from urllib.error import HTTPError
5
+ import re
6
+
7
+ def list_geobr():
8
+ """Prints available functions, according to latest README.md file
9
+
10
+ Example output
11
+ ------------------------------
12
+ Function: read_immediate_region
13
+ Geographies available: Immediate region
14
+ Years available: 2017
15
+ Source: IBGE
16
+ ------------------------------
17
+
18
+ """
19
+
20
+ try:
21
+ html_data = get("https://github.com/ipeaGIT/geobr/blob/master/README.md").text
22
+ find_emoji = html_data.index("👉")
23
+ html_data = html_data[find_emoji:]
24
+ escaped_data = html_data.replace("\\u003c", "<").replace("\\u003e", ">")
25
+ tables = re.findall("<table>(.+?)</table>", escaped_data)
26
+ available_datasets = "<table>" + tables[0].replace("\\n", "") + "</table>"
27
+ df = pd.DataFrame(pd.read_html(StringIO(available_datasets))[0])
28
+
29
+ except HTTPError:
30
+ print(
31
+ "Geobr url functions list is broken"
32
+ 'Please report an issue at "https://github.com/ipeaGIT/geobr/issues"'
33
+ )
34
+
35
+ for i in range(len(df)):
36
+ for each in df.columns:
37
+ print(f"{each}: {df.loc[i, each]}")
38
+
39
+ print("------------------------------")
@@ -0,0 +1,111 @@
1
+ from geobr import utils
2
+
3
+
4
+ def lookup_muni(name_muni=None, code_muni=None, verbose=False):
5
+ """Lookup municipality codes and names.
6
+
7
+ By default, it looks for all municipalities. You can also use 'all' to in
8
+ `name_muni` or `code_muni` to get all municipalities.
9
+
10
+ Input a municipality NAME or CODE and get the names and codes of
11
+ the municipality's corresponding state, meso, micro, intermediate, and
12
+ immediate regions. You should not select both code_muni and name_muni
13
+
14
+ Parameters
15
+ ----------
16
+
17
+ name_muni : str, optional
18
+ The municipality name to be looked up
19
+
20
+ code_muni: str, optional
21
+ The municipality code to be looked up
22
+
23
+ verbose : bool, optional
24
+ by default False
25
+
26
+ Returns
27
+ -------
28
+ pd.DataFram
29
+ 13 columns identifying the geographies information of that municipality
30
+
31
+ Details Only available from 2010 Census data so far
32
+
33
+ Raise
34
+ -------
35
+ Exception if code_muni or name_muni cannot be found
36
+
37
+ Example
38
+ -------
39
+ >>> import geobr
40
+
41
+ # Lookup table for municipality of Rio de Janeiro
42
+ >>> mun = lookup_muni('Rio de Janeiro)
43
+ or
44
+ >>> mun = lookup_muni(3304557)
45
+
46
+ # lookup table for all municipalities
47
+ >>> mun_all = lookup_muni()
48
+ """
49
+ # Get metadata with data url addresses
50
+ temp_meta = utils.select_metadata(geo="lookup_muni", year=2010)
51
+
52
+ # Read DataFrame available at provided url
53
+ lookup_table = utils.download_metadata(
54
+ temp_meta.loc[:, "download_path"].to_list()[0]
55
+ )
56
+ lookup_table["name_muni_format"] = lookup_table["name_muni_format"].str.lower()
57
+
58
+ # Search by inputs
59
+ if (
60
+ code_muni == "all"
61
+ or name_muni == "all"
62
+ or (code_muni is None and name_muni is None)
63
+ ):
64
+ if verbose:
65
+ print(f"Returning results for all municipalities")
66
+ return lookup_table.iloc[:, :-1]
67
+
68
+ elif code_muni is not None:
69
+ if name_muni is not None:
70
+ if verbose:
71
+ print("Ignoring argument name_muni")
72
+ try:
73
+ output = lookup_table[lookup_table["code_muni"] == int(code_muni)].iloc[
74
+ :, :-1
75
+ ]
76
+ if verbose:
77
+ print(
78
+ "Returning results for municipality ",
79
+ f'{output.loc[:, "name_muni"].to_list()[0]}',
80
+ )
81
+ return output
82
+
83
+ except KeyError:
84
+ raise Exception(
85
+ f"The `code_muni` argument {code_muni}",
86
+ "was not found in the database.",
87
+ )
88
+
89
+ elif name_muni is not None:
90
+ # Cleaning from accents and turning into lower cases without spaces
91
+ name_muni = utils.strip_accents(str(name_muni).lower().strip())
92
+ output = lookup_table[lookup_table["name_muni_format"] == name_muni]
93
+
94
+ if len(output) == 0:
95
+ if verbose:
96
+ print("Please insert a valid municipality name")
97
+ raise Exception(
98
+ f"The `name_muni` argument {name_muni} ",
99
+ "was not found in the database.",
100
+ )
101
+ else:
102
+ if verbose:
103
+ print(
104
+ "Returning results for municipality"
105
+ f'{output.loc[:, "name_muni"].to_list()[0]}'
106
+ )
107
+ return output.iloc[:, :-1]
108
+
109
+ elif code_muni == "all" and name_muni == "all":
110
+ if verbose:
111
+ print("Please insert either a municipality ", "name or a municipality code")
@@ -0,0 +1,42 @@
1
+ from cursed_geobr.utils import select_metadata, download_gpkg
2
+
3
+
4
+ def read_amazon(year=2012, simplified=True, verbose=False):
5
+ """ Download official data of Brazil's Legal Amazon as an sf object.
6
+
7
+ This data set covers the whole of Brazil's Legal Amazon as defined in the federal law n. 12.651/2012). The original
8
+ data comes from the Brazilian Ministry of Environment (MMA) and can be found at http://mapas.mma.gov.br/i3geo/datadownload.htm .
9
+
10
+ Parameters
11
+ ----------
12
+ year : int, optional
13
+ Year of the data, by default 2012
14
+ simplified: boolean, by default True
15
+ Data 'type', indicating whether the function returns the 'original' dataset
16
+ with high resolution or a dataset with 'simplified' borders (Default)
17
+ verbose : bool, optional
18
+ by default False
19
+
20
+ Returns
21
+ -------
22
+ gpd.GeoDataFrame
23
+ Metadata and geopackage of selected states
24
+
25
+ Raises
26
+ ------
27
+ Exception
28
+ If parameters are not found or not well defined
29
+
30
+ Example
31
+ -------
32
+ >>> from cursed_geobr import read_amazon
33
+
34
+ # Read specific state at a given year
35
+ >>> df = read_amazon(year=2012)
36
+ """
37
+
38
+ metadata = select_metadata("amazonia_legal", year=year, simplified=simplified)
39
+
40
+ gdf = download_gpkg(metadata)
41
+
42
+ return gdf
@@ -0,0 +1,92 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ import patoolib
6
+ from zipfile import ZipFile
7
+ from io import BytesIO
8
+
9
+ def read_amazon_ibas(simplified=False):
10
+ """Download Important Bird Areas (IBAs) data for the Amazon region.
11
+
12
+ This function downloads and processes IBAs data from SAVE Brasil. The data includes
13
+ important areas for bird conservation in the Amazon region.
14
+ Original source: SAVE Brasil
15
+
16
+ Parameters
17
+ ----------
18
+ simplified : boolean, by default False
19
+ If True, returns a simplified version of the dataset with fewer columns
20
+
21
+ Returns
22
+ -------
23
+ gpd.GeoDataFrame
24
+ Geodataframe with Amazon IBAs data
25
+
26
+ Example
27
+ -------
28
+ >>> from cursed_geobr import read_amazon_ibas
29
+
30
+ # Read Amazon IBAs data
31
+ >>> ibas = read_amazon_ibas()
32
+ """
33
+
34
+ url = "https://www.savebrasil.org.br/_files/archives/6d1e48_c03ae9708adf4d978220547eaf173103.zip"
35
+
36
+ try:
37
+ # Download the zip file
38
+ response = requests.get(url)
39
+ if response.status_code != 200:
40
+ raise Exception("Failed to download data from SAVE Brasil")
41
+
42
+ # Create a temporary directory
43
+ with tempfile.TemporaryDirectory() as temp_dir:
44
+ # First extract the zip file
45
+ with ZipFile(BytesIO(response.content)) as zip_ref:
46
+ zip_ref.extractall(temp_dir)
47
+
48
+ subfolder_path = os.path.join(temp_dir, "Shapefiles IBAs Amazônia e Mata Atlântica")
49
+ rar_files = [f for f in os.listdir(subfolder_path) if f.endswith('.rar')]
50
+
51
+ if not rar_files:
52
+ raise Exception("No RAR file found in the downloaded data")
53
+
54
+ # Extract the RAR file using patoolib
55
+ rar_path = os.path.join(subfolder_path, rar_files[0])
56
+ patoolib.extract_archive(rar_path, outdir=temp_dir)
57
+
58
+ # Path to the Amazon shapefile directory
59
+ amazon_dir = os.path.join(subfolder_path, "Amazônia", 'Final')
60
+
61
+ # Find the shapefile
62
+ shp_files = [f for f in os.listdir(amazon_dir) if f.endswith('.shp')]
63
+ if not shp_files:
64
+ raise Exception("No shapefile found in the downloaded data")
65
+
66
+ # Read the shapefile
67
+ gdf = gpd.read_file(os.path.join(amazon_dir, shp_files[0]))
68
+ gdf = gdf.to_crs(4674)
69
+
70
+ if simplified:
71
+ # Keep only the most relevant columns
72
+ # Note: These columns are based on typical IBAs data structure
73
+ # You may want to adjust these based on the actual data
74
+ columns_to_keep = [
75
+ 'geometry',
76
+ 'IBA_NAME', # IBA name
77
+ 'IBA_CODE', # IBA code
78
+ 'STATE', # State
79
+ 'AREA_HA', # Area in hectares
80
+ 'PRIORITY', # Conservation priority
81
+ 'THREATS', # Threats to the area
82
+ 'HABITATS', # Main habitats
83
+ ]
84
+
85
+ # Filter columns that actually exist in the dataset
86
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
87
+ gdf = gdf[existing_columns]
88
+
89
+ except Exception as e:
90
+ raise Exception(f"Error downloading Amazon IBAs data: {str(e)}")
91
+
92
+ return gdf
@@ -0,0 +1,93 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ import patoolib
6
+ from zipfile import ZipFile
7
+ from io import BytesIO
8
+
9
+ def read_atlantic_forest_ibas(simplified=False):
10
+ """Download Important Bird Areas (IBAs) data for the Atlantic Forest region.
11
+
12
+ This function downloads and processes IBAs data from SAVE Brasil. The data includes
13
+ important areas for bird conservation in the Atlantic Forest region.
14
+ Original source: SAVE Brasil
15
+
16
+ Parameters
17
+ ----------
18
+ simplified : boolean, by default False
19
+ If True, returns a simplified version of the dataset with fewer columns
20
+
21
+ Returns
22
+ -------
23
+ gpd.GeoDataFrame
24
+ Geodataframe with Atlantic Forest IBAs data
25
+
26
+ Example
27
+ -------
28
+ >>> from cursed_geobr import read_atlantic_forest_ibas
29
+
30
+ # Read Atlantic Forest IBAs data
31
+ >>> ibas = read_atlantic_forest_ibas()
32
+ """
33
+
34
+ url = "https://www.savebrasil.org.br/_files/archives/6d1e48_c03ae9708adf4d978220547eaf173103.zip"
35
+
36
+ try:
37
+ # Download the zip file
38
+ response = requests.get(url)
39
+ if response.status_code != 200:
40
+ raise Exception("Failed to download data from SAVE Brasil")
41
+
42
+ # Create a temporary directory
43
+ with tempfile.TemporaryDirectory() as temp_dir:
44
+ # First extract the zip file
45
+ with ZipFile(BytesIO(response.content)) as zip_ref:
46
+ zip_ref.extractall(temp_dir)
47
+
48
+ subfolder_path = os.path.join(temp_dir, "Shapefiles IBAs Amazônia e Mata Atlântica")
49
+ rar_files = [f for f in os.listdir(subfolder_path) if f.endswith('.rar')]
50
+
51
+ if not rar_files:
52
+ raise Exception("No RAR file found in the downloaded data")
53
+
54
+ # Extract the RAR file using patoolib
55
+ rar_path = os.path.join(subfolder_path, rar_files[0])
56
+ patoolib.extract_archive(rar_path, outdir=temp_dir)
57
+
58
+ # Path to the Atlantic Forest shapefile directory
59
+ atlantic_dir = os.path.join(subfolder_path, "Mata Atlântica")
60
+
61
+ # Find the shapefile
62
+ shp_files = [f for f in os.listdir(atlantic_dir) if f.endswith('.shp')]
63
+ if not shp_files:
64
+ raise Exception("No shapefile found in the downloaded data")
65
+
66
+ # Read the shapefile
67
+ gdf = gpd.read_file(os.path.join(atlantic_dir, shp_files[0]))
68
+ gdf = gdf.to_crs(4674)
69
+
70
+ if simplified:
71
+ # Keep only the most relevant columns
72
+ # Note: These columns are based on typical IBAs data structure
73
+ # You may want to adjust these based on the actual data
74
+ columns_to_keep = [
75
+ 'geometry',
76
+ 'IBA_NAME', # IBA name
77
+ 'IBA_CODE', # IBA code
78
+ 'STATE', # State
79
+ 'AREA_HA', # Area in hectares
80
+ 'PRIORITY', # Conservation priority
81
+ 'THREATS', # Threats to the area
82
+ 'HABITATS', # Main habitats
83
+ ]
84
+
85
+ # Filter columns that actually exist in the dataset
86
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
87
+ gdf = gdf[existing_columns]
88
+
89
+ except Exception as e:
90
+ raise Exception(f"Error downloading Atlantic Forest IBAs data: {str(e)}")
91
+
92
+ return gdf
93
+
@@ -0,0 +1,43 @@
1
+ from cursed_geobr.utils import select_metadata, download_gpkg
2
+
3
+
4
+ def read_biomes(year=2019, simplified=True, verbose=False):
5
+ """ Download official data of Brazilian biomes as an sf object.
6
+
7
+ This data set includes polygons of all biomes present in Brazilian territory and coastal area.
8
+ The latest data set dates to 2019 and it is available at scale 1:250.000. The 2004 data set is at
9
+ the scale 1:5.000.000. The original data comes from IBGE. More information at https://www.ibge.gov.br/apps/biomas/
10
+
11
+ Parameters
12
+ ----------
13
+ year : int, optional
14
+ Year of the data, by default 2019
15
+ simplified: boolean, by default True
16
+ Data 'type', indicating whether the function returns the 'original' dataset
17
+ with high resolution or a dataset with 'simplified' borders (Default)
18
+ verbose : bool, optional
19
+ by default False
20
+
21
+ Returns
22
+ -------
23
+ gpd.GeoDataFrame
24
+ Metadata and geopackage of selected states
25
+
26
+ Raises
27
+ ------
28
+ Exception
29
+ If parameters are not found or not well defined
30
+
31
+ Example
32
+ -------
33
+ >>> from cursed_geobr import read_biomes
34
+
35
+ # Read specific state at a given year
36
+ >>> df = read_biomes(year=2019)
37
+ """
38
+
39
+ metadata = select_metadata("biomes", year=year, simplified=simplified)
40
+
41
+ gdf = download_gpkg(metadata)
42
+
43
+ return gdf