tunned-geobr 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. tunned_geobr/__init__.py +2 -3
  2. tunned_geobr/list_geobr.py +1 -5
  3. tunned_geobr/read_ama_anemometric_towers.py +8 -6
  4. tunned_geobr/read_areas_under_contract.py +3 -2
  5. tunned_geobr/read_biodiesel_plants.py +3 -2
  6. tunned_geobr/read_biomes.py +1 -1
  7. tunned_geobr/read_biomethane_plants.py +3 -2
  8. tunned_geobr/read_census_tract.py +90 -88
  9. tunned_geobr/read_comparable_areas.py +1 -1
  10. tunned_geobr/read_compression_stations.py +3 -2
  11. tunned_geobr/read_conservation_units.py +1 -1
  12. tunned_geobr/read_country.py +1 -1
  13. tunned_geobr/read_disaster_risk_area.py +1 -1
  14. tunned_geobr/read_drainage_ducts.py +3 -2
  15. tunned_geobr/read_etanol_plants.py +3 -2
  16. tunned_geobr/read_existent_biomass_ute.py +6 -2
  17. tunned_geobr/read_existent_fossile_ute.py +7 -3
  18. tunned_geobr/read_existent_nuclear_ute.py +5 -4
  19. tunned_geobr/read_existent_substations.py +5 -4
  20. tunned_geobr/read_existent_transmission_lines.py +6 -5
  21. tunned_geobr/read_exploration_production_environment.py +6 -8
  22. tunned_geobr/read_federal_union_areas.py +3 -2
  23. tunned_geobr/read_fuel_bases.py +3 -2
  24. tunned_geobr/read_gas_distribution_pipelines.py +3 -2
  25. tunned_geobr/read_gas_transport_pipelines.py +3 -2
  26. tunned_geobr/read_glp_bases.py +3 -2
  27. tunned_geobr/read_health_region.py +1 -1
  28. tunned_geobr/read_hydroelectric_feasibility_studies.py +9 -8
  29. tunned_geobr/read_hydroelectric_inventory_aai_studies.py +9 -8
  30. tunned_geobr/read_immediate_region.py +1 -1
  31. tunned_geobr/read_indigenous_land.py +1 -1
  32. tunned_geobr/read_intermediate_region.py +1 -1
  33. tunned_geobr/read_isolated_systems.py +5 -4
  34. tunned_geobr/read_meso_region.py +1 -1
  35. tunned_geobr/read_metro_area.py +1 -1
  36. tunned_geobr/read_micro_region.py +1 -1
  37. tunned_geobr/read_municipality.py +1 -1
  38. tunned_geobr/read_municipality_direct.py +1 -1
  39. tunned_geobr/read_neighborhood.py +1 -1
  40. tunned_geobr/read_og_basement.py +9 -8
  41. tunned_geobr/read_og_effective_geographic_basin.py +3 -2
  42. tunned_geobr/read_og_ipa_direct_evidence.py +6 -8
  43. tunned_geobr/read_og_ipa_exploratory_activity.py +9 -8
  44. tunned_geobr/read_og_ipa_exploratory_intensity.py +3 -0
  45. tunned_geobr/read_og_ipa_need_for_knowledge.py +6 -8
  46. tunned_geobr/read_og_ipa_prospectiveness.py +6 -8
  47. tunned_geobr/read_og_ipa_supply_infrastructure.py +6 -8
  48. tunned_geobr/read_og_legal_pre_salt_polygon.py +6 -8
  49. tunned_geobr/read_og_predominant_fluid_type.py +3 -2
  50. tunned_geobr/read_og_probabilistic_effective_basin.py +3 -2
  51. tunned_geobr/read_og_total_ipa.py +3 -2
  52. tunned_geobr/read_og_unconventional_resources.py +3 -2
  53. tunned_geobr/read_oil_and_derivatives_terminal.py +3 -2
  54. tunned_geobr/read_pio_terminals.py +3 -2
  55. tunned_geobr/read_pop_arrangements.py +1 -1
  56. tunned_geobr/read_processing_facilities.py +3 -2
  57. tunned_geobr/read_quilombola_areas.py +74 -56
  58. tunned_geobr/read_region.py +1 -1
  59. tunned_geobr/read_sedimentary_basins.py +6 -8
  60. tunned_geobr/read_semiarid.py +1 -1
  61. tunned_geobr/read_state.py +1 -1
  62. tunned_geobr/read_state_direct.py +1 -1
  63. tunned_geobr/read_urban_area.py +1 -1
  64. tunned_geobr/read_urban_concentrations.py +1 -1
  65. tunned_geobr/read_water_bodies_ana.py +1 -1
  66. tunned_geobr/read_weighting_area.py +1 -1
  67. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/METADATA +1 -1
  68. tunned_geobr-0.2.6.dist-info/RECORD +138 -0
  69. tunned_geobr/read_census_tract_2022.py +0 -101
  70. tunned_geobr/read_quilombola_areas_temp.py +0 -103
  71. tunned_geobr-0.2.4.dist-info/RECORD +0 -140
  72. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/WHEEL +0 -0
  73. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/entry_points.txt +0 -0
  74. {tunned_geobr-0.2.4.dist-info → tunned_geobr-0.2.6.dist-info}/licenses/LICENSE.txt +0 -0
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_exploration_production_environment(simplified=True, verbose=False):
10
+ def read_exploration_production_environment(simplified=False, verbose=False):
11
11
  """Download data for Exploration and Production Environment in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Exploration and Production Environment
@@ -45,10 +45,7 @@ def read_exploration_production_environment(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_exploration_production_environment(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_exploration_production_environment(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "DESCRICAO", "TIPO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -86,8 +86,9 @@ def read_federal_union_areas(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_federal_union_areas(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_fuel_bases(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_fuel_bases(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_gas_distribution_pipelines(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_gas_distribution_pipelines(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_gas_transport_pipelines(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_gas_transport_pipelines(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -85,8 +85,9 @@ def read_glp_bases(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_glp_bases(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -2,7 +2,7 @@ from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
4
  def read_health_region(year=2013, macro=False, simplified=True, verbose=False):
5
- """Download official data of Brazilian health regions as an sf object.
5
+ """Download official data of Brazilian health regions ascii(object) an sf object.
6
6
 
7
7
  Health regions are used to guide the the regional and state planning of health services.
8
8
  Macro health regions, in particular, are used to guide the planning of high complexity
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_hydroelectric_feasibility_studies(simplified=True, verbose=False):
10
+ def read_hydroelectric_feasibility_studies(simplified=False, verbose=False):
11
11
  """Download data for Hydroelectric Feasibility Studies in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Hydroelectric Feasibility Studies
@@ -45,10 +45,7 @@ def read_hydroelectric_feasibility_studies(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_hydroelectric_feasibility_studies(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_hydroelectric_feasibility_studies(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "BACIA", "RIO", "POTENCIA", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -117,3 +115,6 @@ def read_hydroelectric_feasibility_studies(simplified=True, verbose=False):
117
115
  except Exception as e:
118
116
  warnings.warn(f"Unexpected error: {e}")
119
117
  return None
118
+
119
+ if __name__ == '__main__':
120
+ read_hydroelectric_feasibility_studies()
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_hydroelectric_inventory_aai_studies(simplified=True, verbose=False):
10
+ def read_hydroelectric_inventory_aai_studies(simplified=False, verbose=False):
11
11
  """Download data for Hydroelectric Inventory and AAI Studies in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Hydroelectric Inventory and AAI Studies
@@ -45,10 +45,7 @@ def read_hydroelectric_inventory_aai_studies(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_hydroelectric_inventory_aai_studies(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_hydroelectric_inventory_aai_studies(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "BACIA", "RIO", "TIPO", "SITUACAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -117,3 +115,6 @@ def read_hydroelectric_inventory_aai_studies(simplified=True, verbose=False):
117
115
  except Exception as e:
118
116
  warnings.warn(f"Unexpected error: {e}")
119
117
  return None
118
+
119
+ if __name__ == '__main__':
120
+ read_hydroelectric_inventory_aai_studies()
@@ -2,7 +2,7 @@ from geobr.utils import select_metadata, download_gpkg, change_type_list, test_o
2
2
 
3
3
 
4
4
  def read_immediate_region(
5
- code_immediate="all", year=2017, simplified=True, verbose=False
5
+ code_immediate="all", year=2017, simplified=False, verbose=False
6
6
  ):
7
7
  """ Download shape files of Brazil's Immediate Geographic Areas as sf objects
8
8
 
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_indigenous_land(date=201907, simplified=True, verbose=False):
4
+ def read_indigenous_land(date=201907, simplified=False, verbose=False):
5
5
  """ Download official data of indigenous lands as an sf object.
6
6
 
7
7
  The data set covers the whole of Brazil and it includes indigenous lands from all ethnicities and
@@ -2,7 +2,7 @@ from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
4
  def read_intermediate_region(
5
- code_intermadiate="all", year=2019, simplified=True, verbose=False
5
+ code_intermadiate="all", year=2019, simplified=False, verbose=False
6
6
  ):
7
7
  r"""Download spatial data of Brazil's Intermediate Geographic Areas
8
8
 
@@ -85,8 +85,9 @@ def read_isolated_systems(simplified=False, verbose=False):
85
85
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
86
  zip_ref.extractall(temp_dir)
87
87
 
88
+ zip_dir = os.path.join(temp_dir,'zipfolder')
88
89
  # Find the shapefile in the extracted files
89
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
90
91
 
91
92
  if not shp_files:
92
93
  raise Exception("No shapefile found in the downloaded zip file")
@@ -95,7 +96,7 @@ def read_isolated_systems(simplified=False, verbose=False):
95
96
  if verbose:
96
97
  print("Reading shapefile")
97
98
 
98
- shp_path = os.path.join(temp_dir, shp_files[0])
99
+ shp_path = os.path.join(zip_dir, shp_files[0])
99
100
  gdf = gpd.read_file(shp_path)
100
101
 
101
102
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -111,7 +112,7 @@ def read_isolated_systems(simplified=False, verbose=False):
111
112
 
112
113
  # Select only the most important columns
113
114
  # Adjust these columns based on the actual data structure
114
- cols_to_keep = ['NOME', 'UF', 'MUNICIPIO', 'geometry']
115
+ cols_to_keep = ['NOME', 'POTENCIA', 'COMBUSTIVE', 'PROPRIETAR', 'UF', 'MUNICIPIO', 'geometry']
115
116
  cols_available = [col for col in cols_to_keep if col in gdf.columns]
116
117
 
117
118
  if not cols_available:
@@ -120,7 +121,7 @@ def read_isolated_systems(simplified=False, verbose=False):
120
121
  gdf = gdf[cols_available]
121
122
 
122
123
  if verbose:
123
- print("Finished processing isolated electrical systems data")
124
+ print("Finished processing existing biomass thermoelectric power plants data")
124
125
 
125
126
  return gdf
126
127
 
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_meso_region(code_meso="all", year=2010, simplified=True, verbose=False):
4
+ def read_meso_region(code_meso="all", year=2010, simplified=False, verbose=False):
5
5
  """Download shape files of meso region as sf objects. Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
6
6
 
7
7
  Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_metro_area(year=2018, simplified=True, verbose=False):
4
+ def read_metro_area(year=2018, simplified=False, verbose=False):
5
5
  """ Download shape files of official metropolitan areas in Brazil as an sf object.
6
6
 
7
7
  The function returns the shapes of municipalities grouped by their respective metro areas.
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_micro_region(code_micro="all", year=2010, simplified=True, verbose=False):
4
+ def read_micro_region(code_micro="all", year=2010, simplified=False, verbose=False):
5
5
  """Download shape files of micro region as sf objects
6
6
 
7
7
  Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_municipality(code_muni="all", year=2010, simplified=True, verbose=False):
4
+ def read_municipality(code_muni="all", year=2010, simplified=False, verbose=False):
5
5
  """Download shape files of Brazilian municipalities as sf objects.
6
6
 
7
7
  Data at scale 1:250,000, using Geodetic reference system "SIRGAS2000" and CRS(4674)
@@ -5,7 +5,7 @@ import requests
5
5
  from zipfile import ZipFile
6
6
  from io import BytesIO
7
7
 
8
- def read_municipality(code_muni="all", simplified=True):
8
+ def read_municipality_direct(code_muni="all", simplified=False):
9
9
  """Download shapefiles of Brazilian municipalities as geopandas objects.
10
10
 
11
11
  This function downloads and processes municipality data directly from IBGE (Brazilian Institute of Geography and Statistics).
@@ -1,7 +1,7 @@
1
1
  from geobr.utils import select_metadata, download_gpkg
2
2
 
3
3
 
4
- def read_neighborhood(year=2010, simplified=True, verbose=False):
4
+ def read_neighborhood(year=2010, simplified=False, verbose=False):
5
5
  """ Download neighborhood limits of Brazilian municipalities as a geopandas geodataframe object
6
6
 
7
7
  Parameters
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_basement(simplified=True, verbose=False):
10
+ def read_og_basement(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas Basement in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas Basement
@@ -45,10 +45,7 @@ def read_og_basement(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_basement(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_basement(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -117,3 +115,6 @@ def read_og_basement(simplified=True, verbose=False):
117
115
  except Exception as e:
118
116
  warnings.warn(f"Unexpected error: {e}")
119
117
  return None
118
+
119
+ if __name__ == '__main__':
120
+ read_og_basement()
@@ -86,8 +86,9 @@ def read_og_effective_geographic_basin(simplified=False, verbose=False):
86
86
  with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
87
  zip_ref.extractall(temp_dir)
88
88
 
89
+ zip_dir = os.path.join(temp_dir,'zipfolder')
89
90
  # Find the shapefile in the extracted files
90
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith('.shp')]
91
92
 
92
93
  if not shp_files:
93
94
  raise Exception("No shapefile found in the downloaded zip file")
@@ -96,7 +97,7 @@ def read_og_effective_geographic_basin(simplified=False, verbose=False):
96
97
  if verbose:
97
98
  print("Reading shapefile")
98
99
 
99
- shp_path = os.path.join(temp_dir, shp_files[0])
100
+ shp_path = os.path.join(zip_dir, shp_files[0])
100
101
  gdf = gpd.read_file(shp_path)
101
102
 
102
103
  # Convert to SIRGAS 2000 (EPSG:4674)
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_direct_evidence(simplified=True, verbose=False):
10
+ def read_og_ipa_direct_evidence(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Direct Evidence of Hydrocarbons in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Direct Evidence of Hydrocarbons
@@ -45,10 +45,7 @@ def read_og_ipa_direct_evidence(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_direct_evidence(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_direct_evidence(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
10
+ def read_og_ipa_exploratory_activity(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Exploratory Activity in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Exploratory Activity
@@ -45,10 +45,7 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103
 
@@ -117,3 +115,6 @@ def read_og_ipa_exploratory_activity(simplified=True, verbose=False):
117
115
  except Exception as e:
118
116
  warnings.warn(f"Unexpected error: {e}")
119
117
  return None
118
+
119
+ if __name__ == '__main__':
120
+ read_og_ipa_exploratory_activity()
@@ -127,3 +127,6 @@ def read_og_ipa_exploratory_intensity(simplified=False, verbose=False):
127
127
 
128
128
  except Exception as e:
129
129
  raise Exception(f"Failed to download or process IPA exploratory intensity data: {str(e)}")
130
+
131
+ if __name__ == '__main__':
132
+ read_og_ipa_exploratory_intensity()
@@ -7,7 +7,7 @@ import warnings
7
7
  import shutil
8
8
 
9
9
 
10
- def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
10
+ def read_og_ipa_need_for_knowledge(simplified=False, verbose=False):
11
11
  """Download data for Oil and Gas IPA Need for Knowledge in Brazil.
12
12
 
13
13
  This function downloads, processes, and returns data for Oil and Gas IPA Need for Knowledge
@@ -45,10 +45,7 @@ def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
45
45
  response.raise_for_status()
46
46
  response_json = response.json()
47
47
 
48
- if "value" not in response_json or not response_json["value"]:
49
- raise ValueError("No data found in the response")
50
-
51
- download_url = response_json["value"]["itemUrl"]
48
+ download_url = response_json['results'][0]['value']['url']
52
49
 
53
50
  if verbose:
54
51
  print(f"Download URL: {download_url}")
@@ -74,12 +71,13 @@ def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
74
71
  zip_ref.extractall(temp_dir)
75
72
 
76
73
  # Find the shapefile
77
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
74
+ zip_dir = os.path.join(temp_dir, 'zipfolder')
75
+ shp_files = [f for f in os.listdir(zip_dir) if f.endswith(".shp")]
78
76
 
79
77
  if not shp_files:
80
78
  raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
79
 
82
- shp_path = os.path.join(temp_dir, shp_files[0])
80
+ shp_path = os.path.join(zip_dir, shp_files[0])
83
81
 
84
82
  if verbose:
85
83
  print(f"Reading shapefile from {shp_path}")
@@ -99,7 +97,7 @@ def read_og_ipa_need_for_knowledge(simplified=True, verbose=False):
99
97
  essential_cols = ["geometry"]
100
98
 
101
99
  # Add any other essential columns that exist in the dataset
102
- for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
100
+ for col in ["NOME", "MUNICIPIO", "UF", "ALTURA", "SITUACAO"]:
103
101
  if col in gdf.columns:
104
102
  essential_cols.append(col)
105
103