tunned-geobr 0.1.2__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. tunned_geobr/__init__.py +34 -2
  2. tunned_geobr/list_geobr.py +112 -34
  3. tunned_geobr/read_apcb_amazon.py +78 -0
  4. tunned_geobr/read_apcb_caatinga.py +78 -0
  5. tunned_geobr/read_apcb_cerrado_pantanal.py +78 -0
  6. tunned_geobr/read_apcb_mata_atlantica.py +78 -0
  7. tunned_geobr/read_apcb_pampa.py +78 -0
  8. tunned_geobr/read_apcb_zcm.py +78 -0
  9. tunned_geobr/read_archaeological_sites.py +94 -0
  10. tunned_geobr/read_atlantic_forest_law_limits.py +74 -0
  11. tunned_geobr/read_baze_sites.py +155 -0
  12. tunned_geobr/read_biosphere_reserves.py +85 -0
  13. tunned_geobr/read_cave_potential.py +79 -0
  14. tunned_geobr/read_census_tract_2022.py +101 -0
  15. tunned_geobr/read_ebas.py +80 -0
  16. tunned_geobr/read_federal_highways.py +79 -0
  17. tunned_geobr/read_fossil_occurrences.py +94 -0
  18. tunned_geobr/read_geographic_regions.py +88 -0
  19. tunned_geobr/read_heliports.py +81 -0
  20. tunned_geobr/read_municipality_direct.py +127 -0
  21. tunned_geobr/read_natural_caves.py +83 -0
  22. tunned_geobr/read_neighborhoods_2022.py +99 -0
  23. tunned_geobr/read_pan_strategic_areas.py +89 -0
  24. tunned_geobr/read_ports.py +80 -0
  25. tunned_geobr/read_private_aerodromes.py +81 -0
  26. tunned_geobr/read_public_aerodromes.py +81 -0
  27. tunned_geobr/read_quilombola_areas.py +85 -0
  28. tunned_geobr/read_quilombola_areas_temp.py +103 -0
  29. tunned_geobr/read_railways.py +80 -0
  30. tunned_geobr/read_rppn.py +107 -0
  31. tunned_geobr/read_sigef_properties.py +83 -0
  32. tunned_geobr/read_snci_properties.py +83 -0
  33. tunned_geobr/read_state_direct.py +103 -0
  34. tunned_geobr/read_state_highways.py +79 -0
  35. tunned_geobr/read_transmission_lines_ons.py +87 -0
  36. tunned_geobr/read_vegetation.py +84 -0
  37. tunned_geobr/read_water_bodies_ana.py +87 -0
  38. tunned_geobr/read_waterways.py +80 -0
  39. {tunned_geobr-0.1.2.dist-info → tunned_geobr-0.2.1.dist-info}/METADATA +35 -3
  40. tunned_geobr-0.2.1.dist-info/RECORD +82 -0
  41. tunned_geobr-0.1.2.dist-info/RECORD +0 -46
  42. {tunned_geobr-0.1.2.dist-info → tunned_geobr-0.2.1.dist-info}/WHEEL +0 -0
  43. {tunned_geobr-0.1.2.dist-info → tunned_geobr-0.2.1.dist-info}/entry_points.txt +0 -0
  44. {tunned_geobr-0.1.2.dist-info → tunned_geobr-0.2.1.dist-info}/licenses/LICENSE.txt +0 -0
@@ -0,0 +1,99 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ import subprocess
6
+ from io import BytesIO
7
+
8
+ def read_neighborhoods_2022(simplified=False):
9
+ """Download Brazilian Neighborhoods data from IBGE (2022 Census).
10
+
11
+ This function downloads and processes the Brazilian Neighborhoods data
12
+ from IBGE (Brazilian Institute of Geography and Statistics) for the 2022 Census.
13
+ Original source: IBGE - Instituto Brasileiro de Geografia e Estatística
14
+
15
+ Parameters
16
+ ----------
17
+ simplified : boolean, by default False
18
+ If True, returns a simplified version of the dataset with fewer columns
19
+
20
+ Returns
21
+ -------
22
+ gpd.GeoDataFrame
23
+ Geodataframe with Brazilian neighborhoods data
24
+
25
+ Example
26
+ -------
27
+ >>> from tunned_geobr import read_neighborhoods_2022
28
+
29
+ # Read neighborhoods data
30
+ >>> neighborhoods = read_neighborhoods_2022()
31
+ """
32
+
33
+ url = "https://geoftp.ibge.gov.br/organizacao_do_territorio/malhas_territoriais/malhas_de_setores_censitarios__divisoes_intramunicipais/censo_2022/bairros/shp/BR/BR_bairros_CD2022.zip"
34
+
35
+ try:
36
+ # Create a temporary directory
37
+ with tempfile.TemporaryDirectory() as temp_dir:
38
+ # Download the zip file to the temporary directory
39
+ zip_file_path = os.path.join(temp_dir, "neighborhoods.zip")
40
+
41
+ # Download the file
42
+ response = requests.get(url)
43
+ if response.status_code != 200:
44
+ raise Exception("Failed to download neighborhoods data from IBGE")
45
+
46
+ # Save the content to a file
47
+ with open(zip_file_path, 'wb') as f:
48
+ f.write(response.content)
49
+
50
+ # Use unzip command line tool to extract the file (handles more compression methods)
51
+ try:
52
+ subprocess.run(['unzip', '-o', zip_file_path, '-d', temp_dir],
53
+ check=True,
54
+ stdout=subprocess.PIPE,
55
+ stderr=subprocess.PIPE)
56
+ except subprocess.CalledProcessError as e:
57
+ raise Exception(f"Failed to extract zip file: {e.stderr.decode()}")
58
+
59
+ # Find the shapefile
60
+ shp_files = []
61
+ for root, dirs, files in os.walk(temp_dir):
62
+ shp_files.extend([os.path.join(root, f) for f in files if f.endswith('.shp')])
63
+
64
+ if not shp_files:
65
+ raise Exception("No shapefile found in the downloaded data")
66
+
67
+ # Read the shapefile
68
+ gdf = gpd.read_file(shp_files[0])
69
+
70
+ # Convert to SIRGAS 2000 (EPSG:4674) if not already
71
+ if gdf.crs is None or gdf.crs.to_epsg() != 4674:
72
+ gdf = gdf.to_crs(4674)
73
+
74
+ if simplified:
75
+ # Keep only the most relevant columns
76
+ # Note: Column names may need adjustment based on actual data
77
+ columns_to_keep = [
78
+ 'geometry',
79
+ 'CD_BAIRRO', # Neighborhood Code
80
+ 'NM_BAIRRO', # Neighborhood Name
81
+ 'CD_MUN', # Municipality Code
82
+ 'NM_MUN', # Municipality Name
83
+ 'CD_UF', # State Code
84
+ 'NM_UF', # State Name
85
+ 'SIGLA_UF', # State Abbreviation
86
+ 'AREA_KM2' # Area in square kilometers
87
+ ]
88
+
89
+ # Filter columns that actually exist in the dataset
90
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
91
+ gdf = gdf[existing_columns]
92
+
93
+ except Exception as e:
94
+ raise Exception(f"Error downloading neighborhoods data: {str(e)}")
95
+
96
+ return gdf
97
+
98
+ if __name__ == '__main__':
99
+ read_neighborhoods_2022()
@@ -0,0 +1,89 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from io import BytesIO
7
+
8
+ def read_pan_strategic_areas(simplified=False):
9
+ """Download Strategic Areas data from ICMBio's PAN.
10
+
11
+ This function downloads and processes the Strategic Areas data from ICMBio's
12
+ National Action Plans (PAN). These are areas of strategic importance for
13
+ biodiversity conservation in Brazil.
14
+ Original source: ICMBio - Instituto Chico Mendes de Conservação da Biodiversidade
15
+
16
+ Parameters
17
+ ----------
18
+ simplified : boolean, by default False
19
+ If True, returns a simplified version of the dataset with fewer columns
20
+
21
+ Returns
22
+ -------
23
+ gpd.GeoDataFrame
24
+ Geodataframe with PAN strategic areas data
25
+
26
+ Example
27
+ -------
28
+ >>> from tunned_geobr import read_pan_strategic_areas
29
+
30
+ # Read PAN strategic areas data
31
+ >>> strategic_areas = read_pan_strategic_areas()
32
+ """
33
+
34
+ url = "https://geoservicos.inde.gov.br/geoserver/ICMBio/ows?request=GetFeature&service=WFS&version=1.0.0&typeName=ICMBio:pan_icmbio_areas_estrat_052024_a&outputFormat=SHAPE-ZIP"
35
+
36
+ try:
37
+ # Download the zip file
38
+ response = requests.get(url)
39
+ if response.status_code != 200:
40
+ raise Exception("Failed to download strategic areas data from ICMBio")
41
+
42
+ # Create a temporary directory
43
+ with tempfile.TemporaryDirectory() as temp_dir:
44
+ # Extract the zip file
45
+ with ZipFile(BytesIO(response.content)) as zip_ref:
46
+ zip_ref.extractall(temp_dir)
47
+
48
+ # Find the shapefile
49
+ shp_files = []
50
+ for root, dirs, files in os.walk(temp_dir):
51
+ shp_files.extend([os.path.join(root, f) for f in files if f.endswith('.shp')])
52
+
53
+ if not shp_files:
54
+ raise Exception("No shapefile found in the downloaded data")
55
+
56
+ # Read the shapefile
57
+ gdf = gpd.read_file(shp_files[0])
58
+
59
+ # Convert to SIRGAS 2000 (EPSG:4674) if not already
60
+ if gdf.crs is None or gdf.crs.to_epsg() != 4674:
61
+ gdf = gdf.to_crs(4674)
62
+
63
+ if simplified:
64
+ # Keep only the most relevant columns
65
+ # Note: Column names may need adjustment based on actual data
66
+ columns_to_keep = [
67
+ 'geometry',
68
+ 'nome', # Area name
69
+ 'pan', # PAN name
70
+ 'tipo', # Type of strategic area
71
+ 'area_km2', # Area in square kilometers
72
+ 'bioma', # Biome
73
+ 'uf', # State
74
+ 'municipio', # Municipality
75
+ 'importancia', # Importance
76
+ 'descricao' # Description
77
+ ]
78
+
79
+ # Filter columns that actually exist in the dataset
80
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
81
+ gdf = gdf[existing_columns]
82
+
83
+ except Exception as e:
84
+ raise Exception(f"Error downloading PAN strategic areas data: {str(e)}")
85
+
86
+ return gdf
87
+
88
+ if __name__ == '__main__':
89
+ read_pan_strategic_areas()
@@ -0,0 +1,80 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from io import BytesIO
7
+
8
+ def read_ports(simplified=False):
9
+ """Download Brazilian Ports data from SNIRH.
10
+
11
+ This function downloads and processes ports data from SNIRH (Sistema Nacional de
12
+ Informações sobre Recursos Hídricos). The data includes information about ports
13
+ across Brazil.
14
+ Original source: SNIRH (Sistema Nacional de Informações sobre Recursos Hídricos)
15
+
16
+ Parameters
17
+ ----------
18
+ simplified : boolean, by default False
19
+ If True, returns a simplified version of the dataset with fewer columns
20
+
21
+ Returns
22
+ -------
23
+ gpd.GeoDataFrame
24
+ Geodataframe with Brazilian ports data
25
+
26
+ Example
27
+ -------
28
+ >>> from tunned_geobr import read_ports
29
+
30
+ # Read ports data
31
+ >>> ports = read_ports()
32
+ """
33
+
34
+ url = "https://metadados.snirh.gov.br/geonetwork/srv/api/records/0afc9687-db93-4eb1-ab31-3bbd871ff303/attachments/GEOFT_PORTO.zip"
35
+
36
+ try:
37
+ # Download the zip file
38
+ response = requests.get(url)
39
+ if response.status_code != 200:
40
+ raise Exception("Failed to download data from SNIRH")
41
+
42
+ # Create a temporary directory
43
+ with tempfile.TemporaryDirectory() as temp_dir:
44
+ # Extract the zip file
45
+ with ZipFile(BytesIO(response.content)) as zip_ref:
46
+ zip_ref.extractall(temp_dir)
47
+
48
+ # Find the shapefile
49
+ shp_files = []
50
+ for root, dirs, files in os.walk(temp_dir):
51
+ shp_files.extend([os.path.join(root, f) for f in files if f.endswith('.shp')])
52
+
53
+ if not shp_files:
54
+ raise Exception("No shapefile found in the downloaded data")
55
+
56
+ # Read the shapefile
57
+ gdf = gpd.read_file(shp_files[0])
58
+ gdf = gdf.to_crs(4674) # Convert to SIRGAS 2000
59
+
60
+ if simplified:
61
+ # Keep only the most relevant columns
62
+ columns_to_keep = [
63
+ 'geometry',
64
+ 'nome', # Port name
65
+ 'municipio', # Municipality
66
+ 'uf', # State
67
+ 'tipo', # Port type
68
+ 'administra', # Administration
69
+ 'situacao', # Status
70
+ 'localizaca' # Location details
71
+ ]
72
+
73
+ # Filter columns that actually exist in the dataset
74
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
75
+ gdf = gdf[existing_columns]
76
+
77
+ except Exception as e:
78
+ raise Exception(f"Error downloading ports data: {str(e)}")
79
+
80
+ return gdf
@@ -0,0 +1,81 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from io import BytesIO
7
+
8
+ def read_private_aerodromes(simplified=False):
9
+ """Download Private Aerodromes data from MapBiomas.
10
+
11
+ This function downloads and processes private aerodromes data from MapBiomas.
12
+ The data includes information about private airports and aerodromes across Brazil.
13
+ Original source: MapBiomas
14
+
15
+ Parameters
16
+ ----------
17
+ simplified : boolean, by default False
18
+ If True, returns a simplified version of the dataset with fewer columns
19
+
20
+ Returns
21
+ -------
22
+ gpd.GeoDataFrame
23
+ Geodataframe with private aerodromes data
24
+
25
+ Example
26
+ -------
27
+ >>> from tunned_geobr import read_private_aerodromes
28
+
29
+ # Read private aerodromes data
30
+ >>> aerodromes = read_private_aerodromes()
31
+ """
32
+
33
+ url = "https://brasil.mapbiomas.org/wp-content/uploads/sites/4/2023/08/Aerodromos_Privados.zip"
34
+
35
+ try:
36
+ # Download the zip file
37
+ response = requests.get(url)
38
+ if response.status_code != 200:
39
+ raise Exception("Failed to download data from MapBiomas")
40
+
41
+ # Create a temporary directory
42
+ with tempfile.TemporaryDirectory() as temp_dir:
43
+ # Extract the zip file
44
+ with ZipFile(BytesIO(response.content)) as zip_ref:
45
+ zip_ref.extractall(temp_dir)
46
+
47
+ # Find the shapefile
48
+ shp_files = []
49
+ for root, dirs, files in os.walk(temp_dir):
50
+ shp_files.extend([os.path.join(root, f) for f in files if f.endswith('.shp')])
51
+
52
+ if not shp_files:
53
+ raise Exception("No shapefile found in the downloaded data")
54
+
55
+ # Read the shapefile
56
+ gdf = gpd.read_file(shp_files[0])
57
+ gdf = gdf.to_crs(4674) # Convert to SIRGAS 2000
58
+
59
+ if simplified:
60
+ # Keep only the most relevant columns
61
+ columns_to_keep = [
62
+ 'geometry',
63
+ 'nome', # Aerodrome name
64
+ 'municipio', # Municipality
65
+ 'uf', # State
66
+ 'codigo_oaci', # ICAO code
67
+ 'altitude', # Altitude
68
+ 'tipo_uso', # Usage type
69
+ 'compriment', # Runway length
70
+ 'largura', # Runway width
71
+ 'tipo_pista' # Runway type
72
+ ]
73
+
74
+ # Filter columns that actually exist in the dataset
75
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
76
+ gdf = gdf[existing_columns]
77
+
78
+ except Exception as e:
79
+ raise Exception(f"Error downloading private aerodromes data: {str(e)}")
80
+
81
+ return gdf
@@ -0,0 +1,81 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from io import BytesIO
7
+
8
+ def read_public_aerodromes(simplified=False):
9
+ """Download Public Aerodromes data from MapBiomas.
10
+
11
+ This function downloads and processes public aerodromes data from MapBiomas.
12
+ The data includes information about public airports and aerodromes across Brazil.
13
+ Original source: MapBiomas
14
+
15
+ Parameters
16
+ ----------
17
+ simplified : boolean, by default False
18
+ If True, returns a simplified version of the dataset with fewer columns
19
+
20
+ Returns
21
+ -------
22
+ gpd.GeoDataFrame
23
+ Geodataframe with public aerodromes data
24
+
25
+ Example
26
+ -------
27
+ >>> from tunned_geobr import read_public_aerodromes
28
+
29
+ # Read public aerodromes data
30
+ >>> aerodromes = read_public_aerodromes()
31
+ """
32
+
33
+ url = "https://brasil.mapbiomas.org/wp-content/uploads/sites/4/2023/08/Aerodromos_publicos.zip"
34
+
35
+ try:
36
+ # Download the zip file
37
+ response = requests.get(url)
38
+ if response.status_code != 200:
39
+ raise Exception("Failed to download data from MapBiomas")
40
+
41
+ # Create a temporary directory
42
+ with tempfile.TemporaryDirectory() as temp_dir:
43
+ # Extract the zip file
44
+ with ZipFile(BytesIO(response.content)) as zip_ref:
45
+ zip_ref.extractall(temp_dir)
46
+
47
+ # Find the shapefile
48
+ shp_files = []
49
+ for root, dirs, files in os.walk(temp_dir):
50
+ shp_files.extend([os.path.join(root, f) for f in files if f.endswith('.shp')])
51
+
52
+ if not shp_files:
53
+ raise Exception("No shapefile found in the downloaded data")
54
+
55
+ # Read the shapefile
56
+ gdf = gpd.read_file(shp_files[0])
57
+ gdf = gdf.to_crs(4674) # Convert to SIRGAS 2000
58
+
59
+ if simplified:
60
+ # Keep only the most relevant columns
61
+ columns_to_keep = [
62
+ 'geometry',
63
+ 'nome', # Aerodrome name
64
+ 'municipio', # Municipality
65
+ 'uf', # State
66
+ 'codigo_oaci', # ICAO code
67
+ 'altitude', # Altitude
68
+ 'tipo_uso', # Usage type
69
+ 'compriment', # Runway length
70
+ 'largura', # Runway width
71
+ 'tipo_pista' # Runway type
72
+ ]
73
+
74
+ # Filter columns that actually exist in the dataset
75
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
76
+ gdf = gdf[existing_columns]
77
+
78
+ except Exception as e:
79
+ raise Exception(f"Error downloading public aerodromes data: {str(e)}")
80
+
81
+ return gdf
@@ -0,0 +1,85 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from io import BytesIO
7
+
8
+ def read_settlements(simplified=False):
9
+ """Download official settlements data from INCRA.
10
+
11
+ This function downloads and processes data about settlements (assentamentos)
12
+ from INCRA (Instituto Nacional de Colonização e Reforma Agrária).
13
+ Original source: INCRA - Certificação de Imóveis Rurais
14
+
15
+ Parameters
16
+ ----------
17
+ simplified : boolean, by default False
18
+ If True, returns a simplified version of the dataset with fewer columns
19
+
20
+ Returns
21
+ -------
22
+ gpd.GeoDataFrame
23
+ Geodataframe with settlements data
24
+
25
+ Example
26
+ -------
27
+ >>> from geobr import read_settlements
28
+
29
+ # Read settlements data
30
+ >>> settlements = read_settlements()
31
+ """
32
+
33
+ url = "https://certificacao.incra.gov.br/csv_shp/zip/Assentamento%20Brasil.zip"
34
+
35
+ try:
36
+ # Download the zip file
37
+ # Disable SSL verification due to INCRA's certificate issues
38
+ response = requests.get(url, verify=False)
39
+ if response.status_code != 200:
40
+ raise Exception("Failed to download data from INCRA")
41
+
42
+ # Suppress the InsecureRequestWarning
43
+ import urllib3
44
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
45
+
46
+ # Create a temporary directory
47
+ with tempfile.TemporaryDirectory() as temp_dir:
48
+ # Extract the zip file
49
+ with ZipFile(BytesIO(response.content)) as zip_ref:
50
+ zip_ref.extractall(temp_dir)
51
+
52
+ # Find the shapefile
53
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
54
+ if not shp_files:
55
+ raise Exception("No shapefile found in the downloaded data")
56
+
57
+ # Read the shapefile
58
+ gdf = gpd.read_file(os.path.join(temp_dir, shp_files[0]))
59
+
60
+ if simplified:
61
+ # Keep only the most relevant columns
62
+ columns_to_keep = [
63
+ 'geometry',
64
+ 'NOME_PROJE', # Nome do Projeto de Assentamento
65
+ 'MUNICIPIO', # Município
66
+ 'UF', # Estado
67
+ 'AREA_HA', # Área em hectares
68
+ 'NUM_FAMILI', # Número de famílias
69
+ 'CAPACIDADE', # Capacidade de famílias
70
+ 'DT_CRIACAO', # Data de criação
71
+ 'SITUACAO' # Situação do assentamento
72
+ ]
73
+
74
+ # Filter columns that actually exist in the dataset
75
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
76
+ gdf = gdf[existing_columns]
77
+
78
+ except Exception as e:
79
+ raise Exception(f"Error downloading settlements data: {str(e)}")
80
+
81
+ return gdf
82
+
83
+ if __name__ == '__main__':
84
+ settlements = read_settlements()
85
+ print(settlements)
@@ -0,0 +1,103 @@
1
+ import geopandas as gpd
2
+ import tempfile
3
+ import os
4
+ import requests
5
+ from zipfile import ZipFile
6
+ from io import BytesIO
7
+ import urllib3
8
+ import time
9
+ from pathlib import Path
10
+
11
+ def read_quilombola_areas(simplified=False, local_file=None):
12
+ """Download Quilombola Areas data from INCRA.
13
+
14
+ This function downloads and processes data about Quilombola Areas (Áreas Quilombolas)
15
+ in Brazil. These are territories recognized and titled to remaining quilombo communities.
16
+ Original source: INCRA - Instituto Nacional de Colonização e Reforma Agrária
17
+
18
+ Parameters
19
+ ----------
20
+ simplified : boolean, by default False
21
+ If True, returns a simplified version of the dataset with fewer columns
22
+ local_file : string, optional
23
+ Path to a local zip file containing the data, by default None
24
+ If provided, the function will use this file instead of downloading from INCRA
25
+
26
+ Returns
27
+ -------
28
+ gpd.GeoDataFrame
29
+ Geodataframe with Quilombola Areas data
30
+ Columns:
31
+ - geometry: Geometry of the area
32
+ - nome: Area name
33
+ - municipio: Municipality
34
+ - uf: State
35
+ - area_ha: Area in hectares
36
+ - fase: Current phase in the titling process
37
+ - familias: Number of families
38
+ - portaria: Ordinance number
39
+ - decreto: Decree number
40
+ - titulo: Title number
41
+ - data_titulo: Title date
42
+
43
+ Example
44
+ -------
45
+ >>> from tunned_geobr import read_quilombola_areas
46
+
47
+ # Read Quilombola Areas data
48
+ >>> quilombos = read_quilombola_areas()
49
+
50
+ # Or use a local file that was previously downloaded
51
+ >>> quilombos = read_quilombola_areas(local_file="path/to/Áreas de Quilombolas.zip")
52
+ """
53
+
54
+ url = "https://certificacao.incra.gov.br/csv_shp/zip/Áreas%20de%20Quilombolas.zip"
55
+
56
+ # If a local file is provided, use it instead of downloading
57
+ if local_file and os.path.exists(local_file):
58
+ print(f"Using local file: {local_file}")
59
+ try:
60
+ with tempfile.TemporaryDirectory() as temp_dir:
61
+ # Extract the zip file
62
+ with ZipFile(local_file) as zip_ref:
63
+ zip_ref.extractall(temp_dir)
64
+
65
+ # Find the shapefile
66
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
67
+ if not shp_files:
68
+ raise Exception("No shapefile found in the local file")
69
+
70
+ print(f"Found shapefile: {shp_files[0]}")
71
+
72
+ # Read the shapefile
73
+ gdf = gpd.read_file(os.path.join(temp_dir, shp_files[0]))
74
+ gdf = gdf.to_crs(4674) # Convert to SIRGAS 2000
75
+
76
+ print(f"Successfully loaded {len(gdf)} Quilombola Areas from local file")
77
+
78
+ if simplified:
79
+ # Keep only the most relevant columns
80
+ columns_to_keep = [
81
+ 'geometry',
82
+ 'nome', # Area name
83
+ 'municipio', # Municipality
84
+ 'uf', # State
85
+ 'area_ha', # Area in hectares
86
+ 'fase' # Current phase
87
+ ]
88
+
89
+ # Filter columns that actually exist in the dataset
90
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
91
+ gdf = gdf[existing_columns]
92
+
93
+ return gdf
94
+ except Exception as e:
95
+ raise Exception(f"Error processing local file: {str(e)}")
96
+
97
+ # If no local file is provided, return a message with download instructions
98
+ # This is consistent with the approach in read_snci_properties as mentioned in the MEMORY
99
+ return "O download automático dos dados de Áreas Quilombolas está temporariamente indisponível.\nPor favor, faça o download manual através do link:\n" + url + "\n\nApós o download, você pode usar o parâmetro local_file:\nquilombos = read_quilombola_areas(local_file='caminho/para/Áreas de Quilombolas.zip')"
100
+
101
+ if __name__ == '__main__':
102
+ quilombos = read_quilombola_areas()
103
+ print(quilombos)