tunned-geobr 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. tunned_geobr/__init__.py +2 -3
  2. tunned_geobr/list_geobr.py +1 -5
  3. tunned_geobr/read_ama_anemometric_towers.py +8 -6
  4. tunned_geobr/read_areas_under_contract.py +3 -2
  5. tunned_geobr/read_biodiesel_plants.py +3 -2
  6. tunned_geobr/read_biomes.py +1 -1
  7. tunned_geobr/read_biomethane_plants.py +3 -2
  8. tunned_geobr/read_census_tract.py +90 -88
  9. tunned_geobr/read_comparable_areas.py +1 -1
  10. tunned_geobr/read_compression_stations.py +3 -2
  11. tunned_geobr/read_conservation_units.py +1 -1
  12. tunned_geobr/read_country.py +1 -1
  13. tunned_geobr/read_disaster_risk_area.py +1 -1
  14. tunned_geobr/read_drainage_ducts.py +3 -2
  15. tunned_geobr/read_etanol_plants.py +3 -2
  16. tunned_geobr/read_existent_biomass_ute.py +6 -2
  17. tunned_geobr/read_existent_fossile_ute.py +7 -3
  18. tunned_geobr/read_existent_nuclear_ute.py +5 -4
  19. tunned_geobr/read_existent_substations.py +5 -4
  20. tunned_geobr/read_existent_transmission_lines.py +6 -5
  21. tunned_geobr/read_exploration_production_environment.py +6 -8
  22. tunned_geobr/read_federal_union_areas.py +3 -2
  23. tunned_geobr/read_fuel_bases.py +3 -2
  24. tunned_geobr/read_gas_distribution_pipelines.py +3 -2
  25. tunned_geobr/read_gas_transport_pipelines.py +3 -2
  26. tunned_geobr/read_glp_bases.py +3 -2
  27. tunned_geobr/read_health_region.py +1 -1
  28. tunned_geobr/read_hydroelectric_feasibility_studies.py +9 -8
  29. tunned_geobr/read_hydroelectric_inventory_aai_studies.py +9 -8
  30. tunned_geobr/read_immediate_region.py +1 -1
  31. tunned_geobr/read_indigenous_land.py +1 -1
  32. tunned_geobr/read_intermediate_region.py +1 -1
  33. tunned_geobr/read_isolated_systems.py +5 -4
  34. tunned_geobr/read_meso_region.py +1 -1
  35. tunned_geobr/read_metro_area.py +1 -1
  36. tunned_geobr/read_micro_region.py +1 -1
  37. tunned_geobr/read_municipality.py +111 -67
  38. tunned_geobr/read_municipality_direct.py +1 -1
  39. tunned_geobr/read_neighborhood.py +1 -1
  40. tunned_geobr/read_og_basement.py +9 -8
  41. tunned_geobr/read_og_effective_geographic_basin.py +3 -2
  42. tunned_geobr/read_og_ipa_direct_evidence.py +6 -8
  43. tunned_geobr/read_og_ipa_exploratory_activity.py +9 -8
  44. tunned_geobr/read_og_ipa_exploratory_intensity.py +3 -0
  45. tunned_geobr/read_og_ipa_need_for_knowledge.py +6 -8
  46. tunned_geobr/read_og_ipa_prospectiveness.py +6 -8
  47. tunned_geobr/read_og_ipa_supply_infrastructure.py +6 -8
  48. tunned_geobr/read_og_legal_pre_salt_polygon.py +6 -8
  49. tunned_geobr/read_og_predominant_fluid_type.py +3 -2
  50. tunned_geobr/read_og_probabilistic_effective_basin.py +3 -2
  51. tunned_geobr/read_og_total_ipa.py +3 -2
  52. tunned_geobr/read_og_unconventional_resources.py +3 -2
  53. tunned_geobr/read_oil_and_derivatives_terminal.py +3 -2
  54. tunned_geobr/read_pio_terminals.py +3 -2
  55. tunned_geobr/read_pop_arrangements.py +1 -1
  56. tunned_geobr/read_processing_facilities.py +3 -2
  57. tunned_geobr/read_region.py +1 -1
  58. tunned_geobr/read_sedimentary_basins.py +6 -8
  59. tunned_geobr/read_semiarid.py +1 -1
  60. tunned_geobr/read_state.py +83 -68
  61. tunned_geobr/read_state_direct.py +1 -1
  62. tunned_geobr/read_urban_area.py +1 -1
  63. tunned_geobr/read_urban_concentrations.py +1 -1
  64. tunned_geobr/read_water_bodies_ana.py +1 -1
  65. tunned_geobr/read_weighting_area.py +1 -1
  66. {tunned_geobr-0.2.5.dist-info → tunned_geobr-0.2.7.dist-info}/METADATA +1 -1
  67. tunned_geobr-0.2.7.dist-info/RECORD +138 -0
  68. tunned_geobr/read_census_tract_2022.py +0 -101
  69. tunned_geobr-0.2.5.dist-info/RECORD +0 -139
  70. {tunned_geobr-0.2.5.dist-info → tunned_geobr-0.2.7.dist-info}/WHEEL +0 -0
  71. {tunned_geobr-0.2.5.dist-info → tunned_geobr-0.2.7.dist-info}/entry_points.txt +0 -0
  72. {tunned_geobr-0.2.5.dist-info → tunned_geobr-0.2.7.dist-info}/licenses/LICENSE.txt +0 -0
@@ -86,8 +86,9 @@ def read_og_effective_geographic_basin(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_effective_geographic_basin(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_direct_evidence(simplified=True, verbose=False):
10
+ def read_og_ipa_direct_evidence(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Direct Evidence of Hydrocarbons in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Direct Evidence of Hydrocarbons
@@ -45,10 +45,7 @@ def read_og_ipa_direct_evidence(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_direct_evidence(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_direct_evidence(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
10
+ def read_og_ipa_exploratory_activity(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Exploratory Activity in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Exploratory Activity
@@ -45,10 +45,7 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -117,3 +115,6 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
117
115
  except Exception as e:
118
116
  warnings.warn(f"Unexpected error: {e}")
119
117
  return None
118
+
119
+ if __name__ == '__main__':
120
+ read_og_ipa_exploratory_activity()
@@ -127,3 +127,6 @@ def read_og_ipa_exploratory_intensity(simplified=False, verbose=False):
127
127
 
128
128
  except Exception as e:
129
129
  raise Exception(f"Failed to download or process IPA exploratory intensity data: {str(e)}")
130
+
131
+ if __name__ == '__main__':
132
+ read_og_ipa_exploratory_intensity()
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
10
+ def read_og_ipa_need_for_knowledge(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Need for Knowledge in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Need for Knowledge
@@ -45,10 +45,7 @@ def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_prospectiveness(simplified=True, verbose=False):
10
+ def read_og_ipa_prospectiveness(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Prospectiveness in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Prospectiveness
@@ -45,10 +45,7 @@ def read_og_ipa_prospectiveness(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_prospectiveness(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_prospectiveness(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
10
+ def read_og_ipa_supply_infrastructure(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Supply Infrastructure in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Supply Infrastructure
@@ -45,10 +45,7 @@ def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
10
+ def read_og_legal_pre_salt_polygon(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas Legal Pre-Salt Polygon in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas Legal Pre-Salt Polygon
@@ -45,10 +45,7 @@ def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -86,8 +86,9 @@ def read_og_predominant_fluid_type(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_predominant_fluid_type(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -86,8 +86,9 @@ def read_og_probabilistic_effective_basin(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_probabilistic_effective_basin(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -86,8 +86,9 @@ def read_og_total_ipa(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_total_ipa(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -86,8 +86,9 @@ def read_og_unconventional_resources(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_unconventional_resources(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_oil_and_derivatives_terminal(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_oil_and_derivatives_terminal(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_pio_terminals(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_pio_terminals(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -2,7 +2,7 @@
2
2
  from geobr.utils import select_metadata, download_gpkg
3
3
 
4
4
 
5
- def read_pop_arrangements(year=2015, simplified=True, verbose=False):
5
+ def read_pop_arrangements(year=2015, simplified=False, verbose=False):
6
6
  r""" Download population arrangements in Brazil
7
7
 
8
8
  This function reads the official data on population arrangements (Arranjos
@@ -85,8 +85,9 @@ def read_processing_facilities(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_processing_facilities(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -1,7 +1,7 @@
1
1
  from geobr import read_region as _read_region
2
2
 
3
3
 
4
- def read_region(year=2010, simplified=True, verbose=False):
4
+ def read_region(year=2010, simplified=False, verbose=False):
5
5
  """ Download shape file of Brazil Regions as sf objects.
6
6
 
7
7
  Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_sedimentary_basins(simplified=True, verbose=False):
10
+ def read_sedimentary_basins(simplified=False, verbose=False):
11
11
  """Download data for Sedimentary Basins in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Sedimentary Basins
@@ -45,10 +45,7 @@ def read_sedimentary_basins(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_sedimentary_basins(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_sedimentary_basins(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_semiarid(year=2017, simplified=True, verbose=False):
4
+ def read_semiarid(year=2017, simplified=False, verbose=False):
5
5
  """ Download official data of Brazilian Semiarid as an sf object.
6
6
 
7
7
  This data set covers the whole of Brazilian Semiarid as defined in the resolution in 23/11/2017). The original