tunned-geobr 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tunned_geobr/__init__.py +59 -1
- tunned_geobr/list_geobr.py +74 -3
- tunned_geobr/read_ama_anemometric_towers.py +119 -0
- tunned_geobr/read_areas_under_contract.py +129 -0
- tunned_geobr/read_biodiesel_plants.py +128 -0
- tunned_geobr/read_biomethane_plants.py +128 -0
- tunned_geobr/read_compression_stations.py +128 -0
- tunned_geobr/read_drainage_ducts.py +128 -0
- tunned_geobr/read_etanol_plants.py +128 -0
- tunned_geobr/read_existent_biomass_ute.py +128 -0
- tunned_geobr/read_existent_cgh.py +168 -0
- tunned_geobr/read_existent_eolic.py +165 -0
- tunned_geobr/read_existent_fossile_ute.py +128 -0
- tunned_geobr/read_existent_nuclear_ute.py +128 -0
- tunned_geobr/read_existent_pch.py +168 -0
- tunned_geobr/read_existent_solar.py +165 -0
- tunned_geobr/read_existent_substations.py +128 -0
- tunned_geobr/read_existent_transmission_lines.py +128 -0
- tunned_geobr/read_existent_uhe.py +168 -0
- tunned_geobr/read_exploration_production_environment.py +119 -0
- tunned_geobr/read_federal_union_areas.py +129 -0
- tunned_geobr/read_fuel_bases.py +128 -0
- tunned_geobr/read_gas_distribution_pipelines.py +128 -0
- tunned_geobr/read_gas_transport_pipelines.py +128 -0
- tunned_geobr/read_glp_bases.py +128 -0
- tunned_geobr/read_gnl_terminals.py +128 -0
- tunned_geobr/read_hydroelectric_feasibility_studies.py +119 -0
- tunned_geobr/read_hydroelectric_inventory_aai_studies.py +119 -0
- tunned_geobr/read_isolated_systems.py +128 -0
- tunned_geobr/read_natural_gas_delivery_points.py +128 -0
- tunned_geobr/read_natural_gas_processing_hub.py +128 -0
- tunned_geobr/read_og_basement.py +119 -0
- tunned_geobr/read_og_effective_geographic_basin.py +129 -0
- tunned_geobr/read_og_ipa_direct_evidence.py +119 -0
- tunned_geobr/read_og_ipa_exploratory_activity.py +119 -0
- tunned_geobr/read_og_ipa_exploratory_intensity.py +129 -0
- tunned_geobr/read_og_ipa_need_for_knowledge.py +119 -0
- tunned_geobr/read_og_ipa_prospectiveness.py +119 -0
- tunned_geobr/read_og_ipa_supply_infrastructure.py +119 -0
- tunned_geobr/read_og_legal_pre_salt_polygon.py +119 -0
- tunned_geobr/read_og_predominant_fluid_type.py +129 -0
- tunned_geobr/read_og_probabilistic_effective_basin.py +129 -0
- tunned_geobr/read_og_total_ipa.py +129 -0
- tunned_geobr/read_og_unconventional_resources.py +129 -0
- tunned_geobr/read_oil_and_derivatives_terminal.py +128 -0
- tunned_geobr/read_pan_strategic_areas 2.py +71 -0
- tunned_geobr/read_pio_ducts.py +128 -0
- tunned_geobr/read_pio_terminals.py +128 -0
- tunned_geobr/read_planned_biomass_ute.py +166 -0
- tunned_geobr/read_planned_cgh.py +166 -0
- tunned_geobr/read_planned_eolic.py +165 -0
- tunned_geobr/read_planned_fossile_ute.py +166 -0
- tunned_geobr/read_planned_nuclear_ute.py +165 -0
- tunned_geobr/read_planned_pch.py +166 -0
- tunned_geobr/read_planned_solar.py +165 -0
- tunned_geobr/read_planned_substations.py +164 -0
- tunned_geobr/read_planned_transmission_lines.py +165 -0
- tunned_geobr/read_planned_uhe.py +166 -0
- tunned_geobr/read_processing_facilities.py +128 -0
- tunned_geobr/read_sedimentary_basins.py +119 -0
- tunned_geobr/read_subsystem_interconnected.py +163 -0
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/METADATA +1 -1
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/RECORD +66 -8
- tunned_geobr/constants.py +0 -13
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/WHEEL +0 -0
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/entry_points.txt +0 -0
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/licenses/LICENSE.txt +0 -0
@@ -0,0 +1,129 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_og_unconventional_resources(simplified=False, verbose=False):
|
12
|
+
"""Download data of unconventional oil and gas resources in Brazil.
|
13
|
+
|
14
|
+
This function downloads and returns data of unconventional oil and gas resources
|
15
|
+
(Recursos Não Convencionais) in Brazil as a GeoPandas GeoDataFrame.
|
16
|
+
The data comes from EPE (Energy Research Company).
|
17
|
+
|
18
|
+
Parameters
|
19
|
+
----------
|
20
|
+
simplified : bool, optional
|
21
|
+
If True, returns a simplified version of the dataset with only the most
|
22
|
+
important columns. If False, returns the complete dataset. Default is False.
|
23
|
+
verbose : bool, optional
|
24
|
+
If True, displays detailed messages about the download and processing
|
25
|
+
steps. Default is False.
|
26
|
+
|
27
|
+
Returns
|
28
|
+
-------
|
29
|
+
gpd.GeoDataFrame
|
30
|
+
A GeoDataFrame containing data on unconventional oil and gas resources in Brazil.
|
31
|
+
|
32
|
+
Raises
|
33
|
+
------
|
34
|
+
Exception
|
35
|
+
If the download or processing of the data fails.
|
36
|
+
|
37
|
+
Example
|
38
|
+
-------
|
39
|
+
>>> from tunned_geobr import read_og_unconventional_resources
|
40
|
+
>>>
|
41
|
+
>>> # Read the data
|
42
|
+
>>> unconventional = read_og_unconventional_resources()
|
43
|
+
>>>
|
44
|
+
>>> # Plot the data
|
45
|
+
>>> unconventional.plot()
|
46
|
+
"""
|
47
|
+
|
48
|
+
if verbose:
|
49
|
+
print("Downloading data of unconventional oil and gas resources in Brazil")
|
50
|
+
|
51
|
+
# Define the URL for the API request
|
52
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Recursos%20N%C3%A3o%20Convencionais%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
53
|
+
|
54
|
+
try:
|
55
|
+
# Make the API request
|
56
|
+
response = requests.get(url)
|
57
|
+
response.raise_for_status()
|
58
|
+
|
59
|
+
# Parse the JSON response
|
60
|
+
data = response.json()
|
61
|
+
|
62
|
+
# Extract the URL for the zip file
|
63
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
64
|
+
download_url = data['results'][0]['value']['url']
|
65
|
+
else:
|
66
|
+
raise Exception("Failed to extract download URL from API response")
|
67
|
+
|
68
|
+
# Create a temporary directory to store the downloaded files
|
69
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
70
|
+
# Download the zip file
|
71
|
+
zip_path = os.path.join(temp_dir, "og_unconventional_resources.zip")
|
72
|
+
if verbose:
|
73
|
+
print("Downloading zip file")
|
74
|
+
|
75
|
+
response = requests.get(download_url, stream=True)
|
76
|
+
response.raise_for_status()
|
77
|
+
|
78
|
+
with open(zip_path, 'wb') as f:
|
79
|
+
response.raw.decode_content = True
|
80
|
+
shutil.copyfileobj(response.raw, f)
|
81
|
+
|
82
|
+
# Extract the zip file
|
83
|
+
if verbose:
|
84
|
+
print("Extracting files")
|
85
|
+
|
86
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
87
|
+
zip_ref.extractall(temp_dir)
|
88
|
+
|
89
|
+
# Find the shapefile in the extracted files
|
90
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
91
|
+
|
92
|
+
if not shp_files:
|
93
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
94
|
+
|
95
|
+
# Read the shapefile
|
96
|
+
if verbose:
|
97
|
+
print("Reading shapefile")
|
98
|
+
|
99
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
100
|
+
gdf = gpd.read_file(shp_path)
|
101
|
+
|
102
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
103
|
+
if verbose:
|
104
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
105
|
+
|
106
|
+
gdf = gdf.to_crs(epsg=4674)
|
107
|
+
|
108
|
+
# Simplify the dataset if requested
|
109
|
+
if simplified:
|
110
|
+
if verbose:
|
111
|
+
print("Simplifying the dataset")
|
112
|
+
|
113
|
+
# Select only the most important columns
|
114
|
+
# Adjust these columns based on the actual data structure
|
115
|
+
cols_to_keep = ['TIPO', 'BACIA', 'FORMACAO', 'UF', 'geometry']
|
116
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
117
|
+
|
118
|
+
if not cols_available:
|
119
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
120
|
+
else:
|
121
|
+
gdf = gdf[cols_available]
|
122
|
+
|
123
|
+
if verbose:
|
124
|
+
print("Finished processing unconventional oil and gas resources data")
|
125
|
+
|
126
|
+
return gdf
|
127
|
+
|
128
|
+
except Exception as e:
|
129
|
+
raise Exception(f"Failed to download or process unconventional oil and gas resources data: {str(e)}")
|
@@ -0,0 +1,128 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_oil_and_derivatives_terminal(simplified=False, verbose=False):
|
12
|
+
"""Download data of oil and derivatives terminals in Brazil.
|
13
|
+
|
14
|
+
This function downloads and returns data of oil and derivatives terminals
|
15
|
+
in Brazil as a GeoPandas GeoDataFrame. The data comes from EPE (Energy Research Company).
|
16
|
+
|
17
|
+
Parameters
|
18
|
+
----------
|
19
|
+
simplified : bool, optional
|
20
|
+
If True, returns a simplified version of the dataset with only the most
|
21
|
+
important columns. If False, returns the complete dataset. Default is False.
|
22
|
+
verbose : bool, optional
|
23
|
+
If True, displays detailed messages about the download and processing
|
24
|
+
steps. Default is False.
|
25
|
+
|
26
|
+
Returns
|
27
|
+
-------
|
28
|
+
gpd.GeoDataFrame
|
29
|
+
A GeoDataFrame containing data on oil and derivatives terminals in Brazil.
|
30
|
+
|
31
|
+
Raises
|
32
|
+
------
|
33
|
+
Exception
|
34
|
+
If the download or processing of the data fails.
|
35
|
+
|
36
|
+
Example
|
37
|
+
-------
|
38
|
+
>>> from tunned_geobr import read_oil_and_derivatives_terminal
|
39
|
+
>>>
|
40
|
+
>>> # Read the data
|
41
|
+
>>> oil_terminals = read_oil_and_derivatives_terminal()
|
42
|
+
>>>
|
43
|
+
>>> # Plot the data
|
44
|
+
>>> oil_terminals.plot()
|
45
|
+
"""
|
46
|
+
|
47
|
+
if verbose:
|
48
|
+
print("Downloading data of oil and derivatives terminals in Brazil")
|
49
|
+
|
50
|
+
# Define the URL for the API request
|
51
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Terminais%20de%20Petr%C3%B3leo%20e%20Derivados%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-8655251.47456396%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C-4787514.465591563%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
52
|
+
|
53
|
+
try:
|
54
|
+
# Make the API request
|
55
|
+
response = requests.get(url)
|
56
|
+
response.raise_for_status()
|
57
|
+
|
58
|
+
# Parse the JSON response
|
59
|
+
data = response.json()
|
60
|
+
|
61
|
+
# Extract the URL for the zip file
|
62
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
63
|
+
download_url = data['results'][0]['value']['url']
|
64
|
+
else:
|
65
|
+
raise Exception("Failed to extract download URL from API response")
|
66
|
+
|
67
|
+
# Create a temporary directory to store the downloaded files
|
68
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
69
|
+
# Download the zip file
|
70
|
+
zip_path = os.path.join(temp_dir, "oil_terminals.zip")
|
71
|
+
if verbose:
|
72
|
+
print("Downloading zip file")
|
73
|
+
|
74
|
+
response = requests.get(download_url, stream=True)
|
75
|
+
response.raise_for_status()
|
76
|
+
|
77
|
+
with open(zip_path, 'wb') as f:
|
78
|
+
response.raw.decode_content = True
|
79
|
+
shutil.copyfileobj(response.raw, f)
|
80
|
+
|
81
|
+
# Extract the zip file
|
82
|
+
if verbose:
|
83
|
+
print("Extracting files")
|
84
|
+
|
85
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
86
|
+
zip_ref.extractall(temp_dir)
|
87
|
+
|
88
|
+
# Find the shapefile in the extracted files
|
89
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
90
|
+
|
91
|
+
if not shp_files:
|
92
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
93
|
+
|
94
|
+
# Read the shapefile
|
95
|
+
if verbose:
|
96
|
+
print("Reading shapefile")
|
97
|
+
|
98
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
99
|
+
gdf = gpd.read_file(shp_path)
|
100
|
+
|
101
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
102
|
+
if verbose:
|
103
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
104
|
+
|
105
|
+
gdf = gdf.to_crs(epsg=4674)
|
106
|
+
|
107
|
+
# Simplify the dataset if requested
|
108
|
+
if simplified:
|
109
|
+
if verbose:
|
110
|
+
print("Simplifying the dataset")
|
111
|
+
|
112
|
+
# Select only the most important columns
|
113
|
+
# Adjust these columns based on the actual data structure
|
114
|
+
cols_to_keep = ['NOME', 'EMPRESA', 'TIPO', 'CAPACIDADE', 'UF', 'MUNICIPIO', 'geometry']
|
115
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
116
|
+
|
117
|
+
if not cols_available:
|
118
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
119
|
+
else:
|
120
|
+
gdf = gdf[cols_available]
|
121
|
+
|
122
|
+
if verbose:
|
123
|
+
print("Finished processing oil and derivatives terminals data")
|
124
|
+
|
125
|
+
return gdf
|
126
|
+
|
127
|
+
except Exception as e:
|
128
|
+
raise Exception(f"Failed to download or process oil and derivatives terminals data: {str(e)}")
|
@@ -0,0 +1,71 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
from io import BytesIO
|
4
|
+
|
5
|
+
def read_pan_strategic_areas(simplified=False):
|
6
|
+
"""Download ICMBio's Strategic Areas data.
|
7
|
+
|
8
|
+
This function downloads and processes the Strategic Areas data from ICMBio
|
9
|
+
(Chico Mendes Institute for Biodiversity Conservation) using their WFS service.
|
10
|
+
The data includes strategic areas for biodiversity conservation planning.
|
11
|
+
Original source: ICMBio - Instituto Chico Mendes de Conservação da Biodiversidade
|
12
|
+
|
13
|
+
Parameters
|
14
|
+
----------
|
15
|
+
simplified : boolean, by default False
|
16
|
+
If True, returns a simplified version of the dataset with fewer columns
|
17
|
+
|
18
|
+
Returns
|
19
|
+
-------
|
20
|
+
gpd.GeoDataFrame
|
21
|
+
Geodataframe with ICMBio's strategic areas data
|
22
|
+
|
23
|
+
Example
|
24
|
+
-------
|
25
|
+
>>> from tunned_geobr import read_pan_strategic_areas
|
26
|
+
|
27
|
+
# Read strategic areas data
|
28
|
+
>>> strategic_areas = read_pan_strategic_areas()
|
29
|
+
"""
|
30
|
+
|
31
|
+
url = "https://geoservicos.inde.gov.br/geoserver/ICMBio/ows?request=GetFeature&service=WFS&version=1.0.0&typeName=ICMBio:pan_icmbio_areas_estrat_052024_a&outputFormat=json"
|
32
|
+
|
33
|
+
try:
|
34
|
+
# Download the GeoJSON data
|
35
|
+
response = requests.get(url)
|
36
|
+
if response.status_code != 200:
|
37
|
+
raise Exception("Failed to download strategic areas data from ICMBio WFS")
|
38
|
+
|
39
|
+
# Read the GeoJSON directly into a GeoDataFrame
|
40
|
+
gdf = gpd.read_file(BytesIO(response.content))
|
41
|
+
|
42
|
+
# Convert to SIRGAS 2000 (EPSG:4674) if not already
|
43
|
+
if gdf.crs is None or gdf.crs.to_epsg() != 4674:
|
44
|
+
gdf = gdf.to_crs(4674)
|
45
|
+
|
46
|
+
if simplified:
|
47
|
+
# Keep only the most relevant columns
|
48
|
+
# Note: Column names may need adjustment based on actual data
|
49
|
+
columns_to_keep = [
|
50
|
+
'geometry',
|
51
|
+
'nome', # Area name
|
52
|
+
'tipo', # Type of strategic area
|
53
|
+
'bioma', # Biome
|
54
|
+
'uf', # State
|
55
|
+
'area_ha', # Area in hectares
|
56
|
+
'descricao', # Description
|
57
|
+
'importancia', # Importance
|
58
|
+
'data_criacao' # Creation date
|
59
|
+
]
|
60
|
+
|
61
|
+
# Filter columns that actually exist in the dataset
|
62
|
+
existing_columns = ['geometry'] + [col for col in columns_to_keep[1:] if col in gdf.columns]
|
63
|
+
gdf = gdf[existing_columns]
|
64
|
+
|
65
|
+
except Exception as e:
|
66
|
+
raise Exception(f"Error downloading strategic areas data: {str(e)}")
|
67
|
+
|
68
|
+
return gdf
|
69
|
+
|
70
|
+
if __name__ == '__main__':
|
71
|
+
read_pan_strategic_areas()
|
@@ -0,0 +1,128 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_pio_ducts(simplified=False, verbose=False):
|
12
|
+
"""Download data of PIO (Oil Products and Other Liquids) ducts in Brazil.
|
13
|
+
|
14
|
+
This function downloads and returns data of PIO ducts
|
15
|
+
in Brazil as a GeoPandas GeoDataFrame. The data comes from EPE (Energy Research Company).
|
16
|
+
|
17
|
+
Parameters
|
18
|
+
----------
|
19
|
+
simplified : bool, optional
|
20
|
+
If True, returns a simplified version of the dataset with only the most
|
21
|
+
important columns. If False, returns the complete dataset. Default is False.
|
22
|
+
verbose : bool, optional
|
23
|
+
If True, displays detailed messages about the download and processing
|
24
|
+
steps. Default is False.
|
25
|
+
|
26
|
+
Returns
|
27
|
+
-------
|
28
|
+
gpd.GeoDataFrame
|
29
|
+
A GeoDataFrame containing data on PIO ducts in Brazil.
|
30
|
+
|
31
|
+
Raises
|
32
|
+
------
|
33
|
+
Exception
|
34
|
+
If the download or processing of the data fails.
|
35
|
+
|
36
|
+
Example
|
37
|
+
-------
|
38
|
+
>>> from tunned_geobr import read_pio_ducts
|
39
|
+
>>>
|
40
|
+
>>> # Read the data
|
41
|
+
>>> pio_ducts = read_pio_ducts()
|
42
|
+
>>>
|
43
|
+
>>> # Plot the data
|
44
|
+
>>> pio_ducts.plot()
|
45
|
+
"""
|
46
|
+
|
47
|
+
if verbose:
|
48
|
+
print("Downloading data of PIO ducts in Brazil")
|
49
|
+
|
50
|
+
# Define the URL for the API request
|
51
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Dutos%20PIO%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-8655251.47456396%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C-4787514.465591563%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
52
|
+
|
53
|
+
try:
|
54
|
+
# Make the API request
|
55
|
+
response = requests.get(url)
|
56
|
+
response.raise_for_status()
|
57
|
+
|
58
|
+
# Parse the JSON response
|
59
|
+
data = response.json()
|
60
|
+
|
61
|
+
# Extract the URL for the zip file
|
62
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
63
|
+
download_url = data['results'][0]['value']['url']
|
64
|
+
else:
|
65
|
+
raise Exception("Failed to extract download URL from API response")
|
66
|
+
|
67
|
+
# Create a temporary directory to store the downloaded files
|
68
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
69
|
+
# Download the zip file
|
70
|
+
zip_path = os.path.join(temp_dir, "pio_ducts.zip")
|
71
|
+
if verbose:
|
72
|
+
print("Downloading zip file")
|
73
|
+
|
74
|
+
response = requests.get(download_url, stream=True)
|
75
|
+
response.raise_for_status()
|
76
|
+
|
77
|
+
with open(zip_path, 'wb') as f:
|
78
|
+
response.raw.decode_content = True
|
79
|
+
shutil.copyfileobj(response.raw, f)
|
80
|
+
|
81
|
+
# Extract the zip file
|
82
|
+
if verbose:
|
83
|
+
print("Extracting files")
|
84
|
+
|
85
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
86
|
+
zip_ref.extractall(temp_dir)
|
87
|
+
|
88
|
+
# Find the shapefile in the extracted files
|
89
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
90
|
+
|
91
|
+
if not shp_files:
|
92
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
93
|
+
|
94
|
+
# Read the shapefile
|
95
|
+
if verbose:
|
96
|
+
print("Reading shapefile")
|
97
|
+
|
98
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
99
|
+
gdf = gpd.read_file(shp_path)
|
100
|
+
|
101
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
102
|
+
if verbose:
|
103
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
104
|
+
|
105
|
+
gdf = gdf.to_crs(epsg=4674)
|
106
|
+
|
107
|
+
# Simplify the dataset if requested
|
108
|
+
if simplified:
|
109
|
+
if verbose:
|
110
|
+
print("Simplifying the dataset")
|
111
|
+
|
112
|
+
# Select only the most important columns
|
113
|
+
# Adjust these columns based on the actual data structure
|
114
|
+
cols_to_keep = ['NOME', 'EMPRESA', 'EXTENSAO', 'DIAMETRO', 'UF', 'geometry']
|
115
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
116
|
+
|
117
|
+
if not cols_available:
|
118
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
119
|
+
else:
|
120
|
+
gdf = gdf[cols_available]
|
121
|
+
|
122
|
+
if verbose:
|
123
|
+
print("Finished processing PIO ducts data")
|
124
|
+
|
125
|
+
return gdf
|
126
|
+
|
127
|
+
except Exception as e:
|
128
|
+
raise Exception(f"Failed to download or process PIO ducts data: {str(e)}")
|
@@ -0,0 +1,128 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_pio_terminals(simplified=False, verbose=False):
|
12
|
+
"""Download data of PIO (Oil Products and Other Liquids) terminals in Brazil.
|
13
|
+
|
14
|
+
This function downloads and returns data of PIO terminals
|
15
|
+
in Brazil as a GeoPandas GeoDataFrame. The data comes from EPE (Energy Research Company).
|
16
|
+
|
17
|
+
Parameters
|
18
|
+
----------
|
19
|
+
simplified : bool, optional
|
20
|
+
If True, returns a simplified version of the dataset with only the most
|
21
|
+
important columns. If False, returns the complete dataset. Default is False.
|
22
|
+
verbose : bool, optional
|
23
|
+
If True, displays detailed messages about the download and processing
|
24
|
+
steps. Default is False.
|
25
|
+
|
26
|
+
Returns
|
27
|
+
-------
|
28
|
+
gpd.GeoDataFrame
|
29
|
+
A GeoDataFrame containing data on PIO terminals in Brazil.
|
30
|
+
|
31
|
+
Raises
|
32
|
+
------
|
33
|
+
Exception
|
34
|
+
If the download or processing of the data fails.
|
35
|
+
|
36
|
+
Example
|
37
|
+
-------
|
38
|
+
>>> from tunned_geobr import read_pio_terminals
|
39
|
+
>>>
|
40
|
+
>>> # Read the data
|
41
|
+
>>> pio_terminals = read_pio_terminals()
|
42
|
+
>>>
|
43
|
+
>>> # Plot the data
|
44
|
+
>>> pio_terminals.plot()
|
45
|
+
"""
|
46
|
+
|
47
|
+
if verbose:
|
48
|
+
print("Downloading data of PIO terminals in Brazil")
|
49
|
+
|
50
|
+
# Define the URL for the API request
|
51
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Terminais%20PIO%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-8655251.47456396%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C-4787514.465591563%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
52
|
+
|
53
|
+
try:
|
54
|
+
# Make the API request
|
55
|
+
response = requests.get(url)
|
56
|
+
response.raise_for_status()
|
57
|
+
|
58
|
+
# Parse the JSON response
|
59
|
+
data = response.json()
|
60
|
+
|
61
|
+
# Extract the URL for the zip file
|
62
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
63
|
+
download_url = data['results'][0]['value']['url']
|
64
|
+
else:
|
65
|
+
raise Exception("Failed to extract download URL from API response")
|
66
|
+
|
67
|
+
# Create a temporary directory to store the downloaded files
|
68
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
69
|
+
# Download the zip file
|
70
|
+
zip_path = os.path.join(temp_dir, "pio_terminals.zip")
|
71
|
+
if verbose:
|
72
|
+
print("Downloading zip file")
|
73
|
+
|
74
|
+
response = requests.get(download_url, stream=True)
|
75
|
+
response.raise_for_status()
|
76
|
+
|
77
|
+
with open(zip_path, 'wb') as f:
|
78
|
+
response.raw.decode_content = True
|
79
|
+
shutil.copyfileobj(response.raw, f)
|
80
|
+
|
81
|
+
# Extract the zip file
|
82
|
+
if verbose:
|
83
|
+
print("Extracting files")
|
84
|
+
|
85
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
86
|
+
zip_ref.extractall(temp_dir)
|
87
|
+
|
88
|
+
# Find the shapefile in the extracted files
|
89
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
90
|
+
|
91
|
+
if not shp_files:
|
92
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
93
|
+
|
94
|
+
# Read the shapefile
|
95
|
+
if verbose:
|
96
|
+
print("Reading shapefile")
|
97
|
+
|
98
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
99
|
+
gdf = gpd.read_file(shp_path)
|
100
|
+
|
101
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
102
|
+
if verbose:
|
103
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
104
|
+
|
105
|
+
gdf = gdf.to_crs(epsg=4674)
|
106
|
+
|
107
|
+
# Simplify the dataset if requested
|
108
|
+
if simplified:
|
109
|
+
if verbose:
|
110
|
+
print("Simplifying the dataset")
|
111
|
+
|
112
|
+
# Select only the most important columns
|
113
|
+
# Adjust these columns based on the actual data structure
|
114
|
+
cols_to_keep = ['NOME', 'EMPRESA', 'TIPO', 'CAPACIDADE', 'UF', 'MUNICIPIO', 'geometry']
|
115
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
116
|
+
|
117
|
+
if not cols_available:
|
118
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
119
|
+
else:
|
120
|
+
gdf = gdf[cols_available]
|
121
|
+
|
122
|
+
if verbose:
|
123
|
+
print("Finished processing PIO terminals data")
|
124
|
+
|
125
|
+
return gdf
|
126
|
+
|
127
|
+
except Exception as e:
|
128
|
+
raise Exception(f"Failed to download or process PIO terminals data: {str(e)}")
|