tunned-geobr 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. tunned_geobr/__init__.py +2 -3
  2. tunned_geobr/list_geobr.py +1 -5
  3. tunned_geobr/read_ama_anemometric_towers.py +8 -6
  4. tunned_geobr/read_areas_under_contract.py +3 -2
  5. tunned_geobr/read_biodiesel_plants.py +3 -2
  6. tunned_geobr/read_biomes.py +1 -1
  7. tunned_geobr/read_biomethane_plants.py +3 -2
  8. tunned_geobr/read_census_tract.py +90 -88
  9. tunned_geobr/read_comparable_areas.py +1 -1
  10. tunned_geobr/read_compression_stations.py +3 -2
  11. tunned_geobr/read_conservation_units.py +1 -1
  12. tunned_geobr/read_country.py +1 -1
  13. tunned_geobr/read_disaster_risk_area.py +1 -1
  14. tunned_geobr/read_drainage_ducts.py +3 -2
  15. tunned_geobr/read_etanol_plants.py +3 -2
  16. tunned_geobr/read_existent_biomass_ute.py +6 -2
  17. tunned_geobr/read_existent_fossile_ute.py +7 -3
  18. tunned_geobr/read_existent_nuclear_ute.py +5 -4
  19. tunned_geobr/read_existent_substations.py +5 -4
  20. tunned_geobr/read_existent_transmission_lines.py +6 -5
  21. tunned_geobr/read_exploration_production_environment.py +6 -8
  22. tunned_geobr/read_federal_union_areas.py +3 -2
  23. tunned_geobr/read_fuel_bases.py +3 -2
  24. tunned_geobr/read_gas_distribution_pipelines.py +3 -2
  25. tunned_geobr/read_gas_transport_pipelines.py +3 -2
  26. tunned_geobr/read_glp_bases.py +3 -2
  27. tunned_geobr/read_health_region.py +1 -1
  28. tunned_geobr/read_hydroelectric_feasibility_studies.py +9 -8
  29. tunned_geobr/read_hydroelectric_inventory_aai_studies.py +9 -8
  30. tunned_geobr/read_immediate_region.py +1 -1
  31. tunned_geobr/read_indigenous_land.py +1 -1
  32. tunned_geobr/read_intermediate_region.py +1 -1
  33. tunned_geobr/read_isolated_systems.py +5 -4
  34. tunned_geobr/read_meso_region.py +1 -1
  35. tunned_geobr/read_metro_area.py +1 -1
  36. tunned_geobr/read_micro_region.py +1 -1
  37. tunned_geobr/read_municipality.py +1 -1
  38. tunned_geobr/read_municipality_direct.py +1 -1
  39. tunned_geobr/read_neighborhood.py +1 -1
  40. tunned_geobr/read_og_basement.py +9 -8
  41. tunned_geobr/read_og_effective_geographic_basin.py +3 -2
  42. tunned_geobr/read_og_ipa_direct_evidence.py +6 -8
  43. tunned_geobr/read_og_ipa_exploratory_activity.py +9 -8
  44. tunned_geobr/read_og_ipa_exploratory_intensity.py +3 -0
  45. tunned_geobr/read_og_ipa_need_for_knowledge.py +6 -8
  46. tunned_geobr/read_og_ipa_prospectiveness.py +6 -8
  47. tunned_geobr/read_og_ipa_supply_infrastructure.py +6 -8
  48. tunned_geobr/read_og_legal_pre_salt_polygon.py +6 -8
  49. tunned_geobr/read_og_predominant_fluid_type.py +3 -2
  50. tunned_geobr/read_og_probabilistic_effective_basin.py +3 -2
  51. tunned_geobr/read_og_total_ipa.py +3 -2
  52. tunned_geobr/read_og_unconventional_resources.py +3 -2
  53. tunned_geobr/read_oil_and_derivatives_terminal.py +3 -2
  54. tunned_geobr/read_pio_terminals.py +3 -2
  55. tunned_geobr/read_pop_arrangements.py +1 -1
  56. tunned_geobr/read_processing_facilities.py +3 -2
  57. tunned_geobr/read_quilombola_areas.py +74 -56
  58. tunned_geobr/read_region.py +1 -1
  59. tunned_geobr/read_sedimentary_basins.py +6 -8
  60. tunned_geobr/read_semiarid.py +1 -1
  61. tunned_geobr/read_state.py +1 -1
  62. tunned_geobr/read_state_direct.py +1 -1
  63. tunned_geobr/read_urban_area.py +1 -1
  64. tunned_geobr/read_urban_concentrations.py +1 -1
  65. tunned_geobr/read_water_bodies_ana.py +1 -1
  66. tunned_geobr/read_weighting_area.py +1 -1
  67. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/METADATA +1 -1
  68. tunned_geobr-0.2.6.dist-info/RECORD +138 -0
  69. tunned_geobr/read_census_tract_2022.py +0 -101
  70. tunned_geobr/read_quilombola_areas_temp.py +0 -103
  71. tunned_geobr-0.2.4.dist-info/RECORD +0 -140
  72. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/WHEEL +0 -0
  73. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/entry_points.txt +0 -0
  74. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/licenses/LICENSE.txt +0 -0
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_prospectiveness(simplified=True, verbose=False):
10
+ def read_og_ipa_prospectiveness(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Prospectiveness in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Prospectiveness
@@ -45,10 +45,7 @@ def read_og_ipa_prospectiveness(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_prospectiveness(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_prospectiveness(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
10
+ def read_og_ipa_supply_infrastructure(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Supply Infrastructure in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Supply Infrastructure
@@ -45,10 +45,7 @@ def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
10
+ def read_og_legal_pre_salt_polygon(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas Legal Pre-Salt Polygon in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas Legal Pre-Salt Polygon
@@ -45,10 +45,7 @@ def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -86,8 +86,9 @@ def read_og_predominant_fluid_type(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_predominant_fluid_type(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -86,8 +86,9 @@ def read_og_probabilistic_effective_basin(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_probabilistic_effective_basin(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -86,8 +86,9 @@ def read_og_total_ipa(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_total_ipa(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -86,8 +86,9 @@ def read_og_unconventional_resources(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_unconventional_resources(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_oil_and_derivatives_terminal(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_oil_and_derivatives_terminal(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_pio_terminals(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_pio_terminals(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -2,7 +2,7 @@
2
2
  from geobr.utils import select_metadata, download_gpkg
3
3
 
4
4
 
5
- def read_pop_arrangements(year=2015, simplified=True, verbose=False):
5
+ def read_pop_arrangements(year=2015, simplified=False, verbose=False):
6
6
  r""" Download population arrangements in Brazil
7
7
 
8
8
  This function reads the official data on population arrangements (Arranjos
@@ -85,8 +85,9 @@ def read_processing_facilities(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_processing_facilities(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -4,82 +4,100 @@ import os
4
4
  import requests
5
5
  from zipfile import ZipFile
6
6
  from io import BytesIO
7
+ import urllib3
8
+ import time
9
+ from pathlib import Path
7
10
 
8
- def read_settlements(simplified=False):
9
- """Download official settlements data from INCRA.
11
+ def read_quilombola_areas(simplified=False, local_file=None):
12
+ """Download Quilombola Areas data from INCRA.
10
13
 
11
- This function downloads and processes data about settlements (assentamentos)
12
- from INCRA (Instituto Nacional de Colonização e Reforma Agrária).
13
- Original source: INCRA - Certificação de Imóveis Rurais
14
+ This function downloads and processes data about Quilombola Areas (Áreas Quilombolas)
15
+ in Brazil. These are territories recognized and titled to remaining quilombo communities.
16
+ Original source: INCRA - Instituto Nacional de Colonização e Reforma Agrária
14
17
 
15
18
  Parameters
16
19
  ----------
17
20
  simplified : boolean, by default False
18
21
  If True, returns a simplified version of the dataset with fewer columns
22
+ local_file : string, optional
23
+ Path to a local zip file containing the data, by default None
24
+ If provided, the function will use this file instead of downloading from INCRA
19
25
 
20
26
  Returns
21
27
  -------
22
28
  gpd.GeoDataFrame
23
- Geodataframe with settlements data
29
+ Geodataframe with Quilombola Areas data
30
+ Columns:
31
+ - geometry: Geometry of the area
32
+ - nome: Area name
33
+ - municipio: Municipality
34
+ - uf: State
35
+ - area_ha: Area in hectares
36
+ - fase: Current phase in the titling process
37
+ - familias: Number of families
38
+ - portaria: Ordinance number
39
+ - decreto: Decree number
40
+ - titulo: Title number
41
+ - data_titulo: Title date
24
42
 
25
43
  Example
26
44
  -------
27
- >>> from geobr import read_settlements
45
+ >>> from tunned_geobr import read_quilombola_areas
28
46
 
29
- # Read settlements data
30
- >>> settlements = read_settlements()
47
+ # Read Quilombola Areas data
48
+ >>> quilombos = read_quilombola_areas()
49
+
50
+ # Or use a local file that was previously downloaded
51
+ >>> quilombos = read_quilombola_areas(local_file="path/to/Áreas de Quilombolas.zip")
31
52
  """
32
53
 
33
- url = "https://certificacao.incra.gov.br/csv_shp/zip/Assentamento%20Brasil.zip"
54
+ url = "https://certificacao.incra.gov.br/csv_shp/zip/Áreas%20de%20Quilombolas.zip"
34
55
 
35
- try:
36
- # Download the zip file
37
- # Disable SSL verification due to INCRA's certificate issues
38
- response = requests.get(url, verify=False)
39
- if response.status_code != 200:
40
- raise Exception("Failed to download data from INCRA")
41
-
42
- # Suppress the InsecureRequestWarning
43
- import urllib3
44
- urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
45
-
46
- # Create a temporary directory
47
- with tempfile.TemporaryDirectory() as temp_dir:
48
- # Extract the zip file
49
- with ZipFile(BytesIO(response.content)) as zip_ref:
50
- zip_ref.extractall(temp_dir)
51
-
52
- # Find the shapefile
53
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
54
- if not shp_files:
55
- raise Exception("No shapefile found in the downloaded data")
56
+ # If a local file is provided, use it instead of downloading
57
+ if local_file and os.path.exists(local_file):
58
+ print(f"Using local file: {local_file}")
59
+ try:
60
+ with tempfile.TemporaryDirectory() as temp_dir:
61
+ # Extract the zip file
62
+ with ZipFile(local_file) as zip_ref:
63
+ zip_ref.extractall(temp_dir)
64
+
65
+ # Find the shapefile
66
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
67
+ if not shp_files:
68
+ raise Exception("No shapefile found in the local file")
69
+
70
+ print(f"Found shapefile: {shp_files[0]}")
56
71
 
57
- # Read the shapefile
58
- gdf = gpd.read_file(os.path.join(temp_dir, shp_files[0]))
59
-
60
- if simplified:
61
- # Keep only the most relevant columns
62
- columns_to_keep = [
63
- 'geometry',
64
- 'NOME_PROJE', # Nome do Projeto de Assentamento
65
- 'MUNICIPIO', # Município
66
- 'UF', # Estado
67
- 'AREA_HA', # Área em hectares
68
- 'NUM_FAMILI', # Número de famílias
69
- 'CAPACIDADE', # Capacidade de famílias
70
- 'DT_CRIACAO', # Data de criação
71
- 'SITUACAO' # Situação do assentamento
72
- ]
72
+ # Read the shapefile
73
+ gdf = gpd.read_file(os.path.join(temp_dir, shp_files[0]))
74
+ gdf = gdf.to_crs(4674) # Convert to SIRGAS 2000
73
75
 
74
- # Filter columns that actually exist in the dataset
75
- existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
76
- gdf = gdf[existing_columns]
76
+ print(f"Successfully loaded {len(gdf)} Quilombola Areas from local file")
77
+
78
+ if simplified:
79
+ # Keep only the most relevant columns
80
+ columns_to_keep = [
81
+ 'geometry',
82
+ 'nome', # Area name
83
+ 'municipio', # Municipality
84
+ 'uf', # State
85
+ 'area_ha', # Area in hectares
86
+ 'fase' # Current phase
87
+ ]
88
+
89
+ # Filter columns that actually exist in the dataset
90
+ existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
91
+ gdf = gdf[existing_columns]
92
+
93
+ return gdf
94
+ except Exception as e:
95
+ raise Exception(f"Error processing local file: {str(e)}")
77
96
 
78
- except Exception as e:
79
- raise Exception(f"Error downloading settlements data: {str(e)}")
80
-
81
- return gdf
97
+ # If no local file is provided, return a message with download instructions
98
+ # This is consistent with the approach in read_snci_properties as mentioned in the MEMORY
99
+ return "O download automático dos dados de Áreas Quilombolas está temporariamente indisponível.\nPor favor, faça o download manual através do link:\n" + url + "\n\nApós o download, você pode usar o parâmetro local_file:\nquilombos = read_quilombola_areas(local_file='caminho/para/Áreas de Quilombolas.zip')"
82
100
 
83
101
  if __name__ == '__main__':
84
- settlements = read_settlements()
85
- print(settlements)
102
+ quilombos = read_quilombola_areas()
103
+ print(quilombos)
@@ -1,7 +1,7 @@
1
1
  from geobr import read_region as _read_region
2
2
 
3
3
 
4
- def read_region(year=2010, simplified=True, verbose=False):
4
+ def read_region(year=2010, simplified=False, verbose=False):
5
5
  """ Download shape file of Brazil Regions as sf objects.
6
6
 
7
7
  Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_sedimentary_basins(simplified=True, verbose=False):
10
+ def read_sedimentary_basins(simplified=False, verbose=False):
11
11
  """Download data for Sedimentary Basins in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Sedimentary Basins
@@ -45,10 +45,7 @@ def read_sedimentary_basins(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_sedimentary_basins(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_sedimentary_basins(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_semiarid(year=2017, simplified=True, verbose=False):
4
+ def read_semiarid(year=2017, simplified=False, verbose=False):
5
5
  """ Download official data of Brazilian Semiarid as an sf object.
6
6
 
7
7
  This data set covers the whole of Brazilian Semiarid as defined in the resolution in 23/11/2017). The original
@@ -3,7 +3,7 @@ import geopandas as gpd
3
3
  from geobr.utils import select_metadata, download_gpkg
4
4
 
5
5
 
6
- def read_state(code_state="all", year=2010, simplified=True, verbose=False):
6
+ def read_state(code_state="all", year=2010, simplified=False, verbose=False):
7
7
  """Download shapefiles of Brazilian states as geopandas objects.
8
8
 
9
9
  Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
@@ -5,7 +5,7 @@ import requests
5
5
  from zipfile import ZipFile
6
6
  from io import BytesIO
7
7
 
8
- def read_state(code_state="all", simplified=True):
8
+ def read_state_direct(code_state="all", simplified=False):
9
9
  """Download shapefiles of Brazilian states as geopandas objects.
10
10
 
11
11
  This function downloads and processes state data directly from IBGE (Brazilian Institute of Geography and Statistics).
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_urban_area(year=2015, simplified=True, verbose=False):
4
+ def read_urban_area(year=2015, simplified=False, verbose=False):
5
5
  """ Download official data of urbanized areas in Brazil as an sf object.
6
6
 
7
7
  This function reads the official data on the urban footprint of Brazilian cities
@@ -2,7 +2,7 @@
2
2
  from geobr.utils import select_metadata, download_gpkg
3
3
 
4
4
 
5
- def read_urban_concentrations(year=2015, simplified=True, verbose=False):
5
+ def read_urban_concentrations(year=2015, simplified=False, verbose=False):
6
6
  r""" Download urban concentration areas in Brazil
7
7
 
8
8
  @description
@@ -31,7 +31,7 @@ def read_water_bodies_ana(simplified=False):
31
31
  """
32
32
 
33
33
  url = "https://metadados.snirh.gov.br/files/7d054e5a-8cc9-403c-9f1a-085fd933610c/geoft_bho_massa_dagua_v2019.zip"
34
-
34
+
35
35
  try:
36
36
  # Download the zip file
37
37
  response = requests.get(url)
@@ -2,7 +2,7 @@ from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
4
  def read_weighting_area(
5
- code_weighting="all", year=2010, simplified=True, verbose=False
5
+ code_weighting="all", year=2010, simplified=False, verbose=False
6
6
  ):
7
7
  """Download shape files of Census Weighting Areas (area de ponderacao) of the Brazilian Population Census.
8
8
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tunned-geobr
3
- Version: 0.2.4
3
+ Version: 0.2.6
4
4
  Summary: Fork personalizado do geobr com funcionalidades extras como download de dados da ANM
5
5
  Author: Anderson Stolfi
6
6
  License: MIT