tunned-geobr 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tunned_geobr/__init__.py +59 -1
- tunned_geobr/list_geobr.py +74 -3
- tunned_geobr/read_ama_anemometric_towers.py +119 -0
- tunned_geobr/read_areas_under_contract.py +129 -0
- tunned_geobr/read_biodiesel_plants.py +128 -0
- tunned_geobr/read_biomethane_plants.py +128 -0
- tunned_geobr/read_compression_stations.py +128 -0
- tunned_geobr/read_drainage_ducts.py +128 -0
- tunned_geobr/read_etanol_plants.py +128 -0
- tunned_geobr/read_existent_biomass_ute.py +128 -0
- tunned_geobr/read_existent_cgh.py +168 -0
- tunned_geobr/read_existent_eolic.py +165 -0
- tunned_geobr/read_existent_fossile_ute.py +128 -0
- tunned_geobr/read_existent_nuclear_ute.py +128 -0
- tunned_geobr/read_existent_pch.py +168 -0
- tunned_geobr/read_existent_solar.py +165 -0
- tunned_geobr/read_existent_substations.py +128 -0
- tunned_geobr/read_existent_transmission_lines.py +128 -0
- tunned_geobr/read_existent_uhe.py +168 -0
- tunned_geobr/read_exploration_production_environment.py +119 -0
- tunned_geobr/read_federal_union_areas.py +129 -0
- tunned_geobr/read_fuel_bases.py +128 -0
- tunned_geobr/read_gas_distribution_pipelines.py +128 -0
- tunned_geobr/read_gas_transport_pipelines.py +128 -0
- tunned_geobr/read_glp_bases.py +128 -0
- tunned_geobr/read_gnl_terminals.py +128 -0
- tunned_geobr/read_hydroelectric_feasibility_studies.py +119 -0
- tunned_geobr/read_hydroelectric_inventory_aai_studies.py +119 -0
- tunned_geobr/read_isolated_systems.py +128 -0
- tunned_geobr/read_natural_gas_delivery_points.py +128 -0
- tunned_geobr/read_natural_gas_processing_hub.py +128 -0
- tunned_geobr/read_og_basement.py +119 -0
- tunned_geobr/read_og_effective_geographic_basin.py +129 -0
- tunned_geobr/read_og_ipa_direct_evidence.py +119 -0
- tunned_geobr/read_og_ipa_exploratory_activity.py +119 -0
- tunned_geobr/read_og_ipa_exploratory_intensity.py +129 -0
- tunned_geobr/read_og_ipa_need_for_knowledge.py +119 -0
- tunned_geobr/read_og_ipa_prospectiveness.py +119 -0
- tunned_geobr/read_og_ipa_supply_infrastructure.py +119 -0
- tunned_geobr/read_og_legal_pre_salt_polygon.py +119 -0
- tunned_geobr/read_og_predominant_fluid_type.py +129 -0
- tunned_geobr/read_og_probabilistic_effective_basin.py +129 -0
- tunned_geobr/read_og_total_ipa.py +129 -0
- tunned_geobr/read_og_unconventional_resources.py +129 -0
- tunned_geobr/read_oil_and_derivatives_terminal.py +128 -0
- tunned_geobr/read_pan_strategic_areas 2.py +71 -0
- tunned_geobr/read_pio_ducts.py +128 -0
- tunned_geobr/read_pio_terminals.py +128 -0
- tunned_geobr/read_planned_biomass_ute.py +166 -0
- tunned_geobr/read_planned_cgh.py +166 -0
- tunned_geobr/read_planned_eolic.py +165 -0
- tunned_geobr/read_planned_fossile_ute.py +166 -0
- tunned_geobr/read_planned_nuclear_ute.py +165 -0
- tunned_geobr/read_planned_pch.py +166 -0
- tunned_geobr/read_planned_solar.py +165 -0
- tunned_geobr/read_planned_substations.py +164 -0
- tunned_geobr/read_planned_transmission_lines.py +165 -0
- tunned_geobr/read_planned_uhe.py +166 -0
- tunned_geobr/read_processing_facilities.py +128 -0
- tunned_geobr/read_sedimentary_basins.py +119 -0
- tunned_geobr/read_subsystem_interconnected.py +163 -0
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/METADATA +1 -1
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/RECORD +66 -8
- tunned_geobr/constants.py +0 -13
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/WHEEL +0 -0
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/entry_points.txt +0 -0
- {tunned_geobr-0.2.2.dist-info → tunned_geobr-0.2.4.dist-info}/licenses/LICENSE.txt +0 -0
@@ -0,0 +1,119 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import zipfile
|
4
|
+
import tempfile
|
5
|
+
import os
|
6
|
+
import warnings
|
7
|
+
import shutil
|
8
|
+
|
9
|
+
|
10
|
+
def read_og_ipa_supply_infrastructure(simplified=True, verbose=False):
|
11
|
+
"""Download data for Oil and Gas IPA Supply Infrastructure in Brazil.
|
12
|
+
|
13
|
+
This function downloads, processes, and returns data for Oil and Gas IPA Supply Infrastructure
|
14
|
+
in Brazil as a geopandas GeoDataFrame.
|
15
|
+
|
16
|
+
Parameters
|
17
|
+
----------
|
18
|
+
simplified : bool, optional
|
19
|
+
If True, returns a simplified version of the dataset with only essential columns.
|
20
|
+
If False, returns the complete dataset with all columns.
|
21
|
+
Default is True.
|
22
|
+
verbose : bool, optional
|
23
|
+
If True, prints detailed information about the data download and processing.
|
24
|
+
Default is False.
|
25
|
+
|
26
|
+
Returns
|
27
|
+
-------
|
28
|
+
geopandas.GeoDataFrame
|
29
|
+
A GeoDataFrame containing Oil and Gas IPA Supply Infrastructure data.
|
30
|
+
|
31
|
+
Examples
|
32
|
+
--------
|
33
|
+
>>> # Download Oil and Gas IPA Supply Infrastructure data
|
34
|
+
>>> df = read_og_ipa_supply_infrastructure()
|
35
|
+
>>> df.head()
|
36
|
+
"""
|
37
|
+
|
38
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22IPA%20Infraestrutura%20de%20Abastecimento%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
39
|
+
|
40
|
+
if verbose:
|
41
|
+
print("Downloading data...")
|
42
|
+
|
43
|
+
try:
|
44
|
+
response = requests.get(url)
|
45
|
+
response.raise_for_status()
|
46
|
+
response_json = response.json()
|
47
|
+
|
48
|
+
if "value" not in response_json or not response_json["value"]:
|
49
|
+
raise ValueError("No data found in the response")
|
50
|
+
|
51
|
+
download_url = response_json["value"]["itemUrl"]
|
52
|
+
|
53
|
+
if verbose:
|
54
|
+
print(f"Download URL: {download_url}")
|
55
|
+
print("Downloading zip file...")
|
56
|
+
|
57
|
+
zip_response = requests.get(download_url)
|
58
|
+
zip_response.raise_for_status()
|
59
|
+
|
60
|
+
# Create a temporary directory to extract the files
|
61
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
62
|
+
zip_path = os.path.join(temp_dir, "data.zip")
|
63
|
+
|
64
|
+
# Save the zip file
|
65
|
+
with open(zip_path, "wb") as f:
|
66
|
+
f.write(zip_response.content)
|
67
|
+
|
68
|
+
if verbose:
|
69
|
+
print(f"Zip file saved to {zip_path}")
|
70
|
+
print("Extracting files...")
|
71
|
+
|
72
|
+
# Extract the zip file
|
73
|
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
74
|
+
zip_ref.extractall(temp_dir)
|
75
|
+
|
76
|
+
# Find the shapefile
|
77
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
|
78
|
+
|
79
|
+
if not shp_files:
|
80
|
+
raise FileNotFoundError("No shapefile found in the downloaded zip file")
|
81
|
+
|
82
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
83
|
+
|
84
|
+
if verbose:
|
85
|
+
print(f"Reading shapefile from {shp_path}")
|
86
|
+
|
87
|
+
# Read the shapefile
|
88
|
+
gdf = gpd.read_file(shp_path)
|
89
|
+
|
90
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
91
|
+
gdf = gdf.to_crs(epsg=4674)
|
92
|
+
|
93
|
+
if simplified:
|
94
|
+
# Select only essential columns
|
95
|
+
if verbose:
|
96
|
+
print("Simplifying the dataset...")
|
97
|
+
|
98
|
+
# Identify the essential columns
|
99
|
+
essential_cols = ["geometry"]
|
100
|
+
|
101
|
+
# Add any other essential columns that exist in the dataset
|
102
|
+
for col in ["CLASSE", "NOME", "DESCRICAO", "AREA_KM2"]:
|
103
|
+
if col in gdf.columns:
|
104
|
+
essential_cols.append(col)
|
105
|
+
|
106
|
+
# Select only the essential columns
|
107
|
+
gdf = gdf[essential_cols]
|
108
|
+
|
109
|
+
return gdf
|
110
|
+
|
111
|
+
except requests.exceptions.RequestException as e:
|
112
|
+
warnings.warn(f"Error downloading data: {e}")
|
113
|
+
return None
|
114
|
+
except (ValueError, FileNotFoundError, zipfile.BadZipFile) as e:
|
115
|
+
warnings.warn(f"Error processing data: {e}")
|
116
|
+
return None
|
117
|
+
except Exception as e:
|
118
|
+
warnings.warn(f"Unexpected error: {e}")
|
119
|
+
return None
|
@@ -0,0 +1,119 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import zipfile
|
4
|
+
import tempfile
|
5
|
+
import os
|
6
|
+
import warnings
|
7
|
+
import shutil
|
8
|
+
|
9
|
+
|
10
|
+
def read_og_legal_pre_salt_polygon(simplified=True, verbose=False):
|
11
|
+
"""Download data for Oil and Gas Legal Pre-Salt Polygon in Brazil.
|
12
|
+
|
13
|
+
This function downloads, processes, and returns data for Oil and Gas Legal Pre-Salt Polygon
|
14
|
+
in Brazil as a geopandas GeoDataFrame.
|
15
|
+
|
16
|
+
Parameters
|
17
|
+
----------
|
18
|
+
simplified : bool, optional
|
19
|
+
If True, returns a simplified version of the dataset with only essential columns.
|
20
|
+
If False, returns the complete dataset with all columns.
|
21
|
+
Default is True.
|
22
|
+
verbose : bool, optional
|
23
|
+
If True, prints detailed information about the data download and processing.
|
24
|
+
Default is False.
|
25
|
+
|
26
|
+
Returns
|
27
|
+
-------
|
28
|
+
geopandas.GeoDataFrame
|
29
|
+
A GeoDataFrame containing Oil and Gas Legal Pre-Salt Polygon data.
|
30
|
+
|
31
|
+
Examples
|
32
|
+
--------
|
33
|
+
>>> # Download Oil and Gas Legal Pre-Salt Polygon data
|
34
|
+
>>> df = read_og_legal_pre_salt_polygon()
|
35
|
+
>>> df.head()
|
36
|
+
"""
|
37
|
+
|
38
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Pol%C3%ADgono%20do%20Pr%C3%A9%20Sal%20Legal%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
39
|
+
|
40
|
+
if verbose:
|
41
|
+
print("Downloading data...")
|
42
|
+
|
43
|
+
try:
|
44
|
+
response = requests.get(url)
|
45
|
+
response.raise_for_status()
|
46
|
+
response_json = response.json()
|
47
|
+
|
48
|
+
if "value" not in response_json or not response_json["value"]:
|
49
|
+
raise ValueError("No data found in the response")
|
50
|
+
|
51
|
+
download_url = response_json["value"]["itemUrl"]
|
52
|
+
|
53
|
+
if verbose:
|
54
|
+
print(f"Download URL: {download_url}")
|
55
|
+
print("Downloading zip file...")
|
56
|
+
|
57
|
+
zip_response = requests.get(download_url)
|
58
|
+
zip_response.raise_for_status()
|
59
|
+
|
60
|
+
# Create a temporary directory to extract the files
|
61
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
62
|
+
zip_path = os.path.join(temp_dir, "data.zip")
|
63
|
+
|
64
|
+
# Save the zip file
|
65
|
+
with open(zip_path, "wb") as f:
|
66
|
+
f.write(zip_response.content)
|
67
|
+
|
68
|
+
if verbose:
|
69
|
+
print(f"Zip file saved to {zip_path}")
|
70
|
+
print("Extracting files...")
|
71
|
+
|
72
|
+
# Extract the zip file
|
73
|
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
74
|
+
zip_ref.extractall(temp_dir)
|
75
|
+
|
76
|
+
# Find the shapefile
|
77
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
|
78
|
+
|
79
|
+
if not shp_files:
|
80
|
+
raise FileNotFoundError("No shapefile found in the downloaded zip file")
|
81
|
+
|
82
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
83
|
+
|
84
|
+
if verbose:
|
85
|
+
print(f"Reading shapefile from {shp_path}")
|
86
|
+
|
87
|
+
# Read the shapefile
|
88
|
+
gdf = gpd.read_file(shp_path)
|
89
|
+
|
90
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
91
|
+
gdf = gdf.to_crs(epsg=4674)
|
92
|
+
|
93
|
+
if simplified:
|
94
|
+
# Select only essential columns
|
95
|
+
if verbose:
|
96
|
+
print("Simplifying the dataset...")
|
97
|
+
|
98
|
+
# Identify the essential columns
|
99
|
+
essential_cols = ["geometry"]
|
100
|
+
|
101
|
+
# Add any other essential columns that exist in the dataset
|
102
|
+
for col in ["NOME", "DESCRICAO", "AREA_KM2"]:
|
103
|
+
if col in gdf.columns:
|
104
|
+
essential_cols.append(col)
|
105
|
+
|
106
|
+
# Select only the essential columns
|
107
|
+
gdf = gdf[essential_cols]
|
108
|
+
|
109
|
+
return gdf
|
110
|
+
|
111
|
+
except requests.exceptions.RequestException as e:
|
112
|
+
warnings.warn(f"Error downloading data: {e}")
|
113
|
+
return None
|
114
|
+
except (ValueError, FileNotFoundError, zipfile.BadZipFile) as e:
|
115
|
+
warnings.warn(f"Error processing data: {e}")
|
116
|
+
return None
|
117
|
+
except Exception as e:
|
118
|
+
warnings.warn(f"Unexpected error: {e}")
|
119
|
+
return None
|
@@ -0,0 +1,129 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_og_predominant_fluid_type(simplified=False, verbose=False):
|
12
|
+
"""Download data of predominant fluid type areas in Brazil's oil and gas basins.
|
13
|
+
|
14
|
+
This function downloads and returns data of predominant fluid type areas
|
15
|
+
(Tipo de Fluido Predominante) in Brazil's oil and gas basins as a GeoPandas GeoDataFrame.
|
16
|
+
The data comes from EPE (Energy Research Company).
|
17
|
+
|
18
|
+
Parameters
|
19
|
+
----------
|
20
|
+
simplified : bool, optional
|
21
|
+
If True, returns a simplified version of the dataset with only the most
|
22
|
+
important columns. If False, returns the complete dataset. Default is False.
|
23
|
+
verbose : bool, optional
|
24
|
+
If True, displays detailed messages about the download and processing
|
25
|
+
steps. Default is False.
|
26
|
+
|
27
|
+
Returns
|
28
|
+
-------
|
29
|
+
gpd.GeoDataFrame
|
30
|
+
A GeoDataFrame containing data on predominant fluid type areas in Brazil's oil and gas basins.
|
31
|
+
|
32
|
+
Raises
|
33
|
+
------
|
34
|
+
Exception
|
35
|
+
If the download or processing of the data fails.
|
36
|
+
|
37
|
+
Example
|
38
|
+
-------
|
39
|
+
>>> from tunned_geobr import read_og_predominant_fluid_type
|
40
|
+
>>>
|
41
|
+
>>> # Read the data
|
42
|
+
>>> fluid_types = read_og_predominant_fluid_type()
|
43
|
+
>>>
|
44
|
+
>>> # Plot the data
|
45
|
+
>>> fluid_types.plot()
|
46
|
+
"""
|
47
|
+
|
48
|
+
if verbose:
|
49
|
+
print("Downloading data of predominant fluid type areas in Brazil's oil and gas basins")
|
50
|
+
|
51
|
+
# Define the URL for the API request
|
52
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Tipo%20de%20Fluido%20Predominante%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
53
|
+
|
54
|
+
try:
|
55
|
+
# Make the API request
|
56
|
+
response = requests.get(url)
|
57
|
+
response.raise_for_status()
|
58
|
+
|
59
|
+
# Parse the JSON response
|
60
|
+
data = response.json()
|
61
|
+
|
62
|
+
# Extract the URL for the zip file
|
63
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
64
|
+
download_url = data['results'][0]['value']['url']
|
65
|
+
else:
|
66
|
+
raise Exception("Failed to extract download URL from API response")
|
67
|
+
|
68
|
+
# Create a temporary directory to store the downloaded files
|
69
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
70
|
+
# Download the zip file
|
71
|
+
zip_path = os.path.join(temp_dir, "og_predominant_fluid_type.zip")
|
72
|
+
if verbose:
|
73
|
+
print("Downloading zip file")
|
74
|
+
|
75
|
+
response = requests.get(download_url, stream=True)
|
76
|
+
response.raise_for_status()
|
77
|
+
|
78
|
+
with open(zip_path, 'wb') as f:
|
79
|
+
response.raw.decode_content = True
|
80
|
+
shutil.copyfileobj(response.raw, f)
|
81
|
+
|
82
|
+
# Extract the zip file
|
83
|
+
if verbose:
|
84
|
+
print("Extracting files")
|
85
|
+
|
86
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
87
|
+
zip_ref.extractall(temp_dir)
|
88
|
+
|
89
|
+
# Find the shapefile in the extracted files
|
90
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
91
|
+
|
92
|
+
if not shp_files:
|
93
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
94
|
+
|
95
|
+
# Read the shapefile
|
96
|
+
if verbose:
|
97
|
+
print("Reading shapefile")
|
98
|
+
|
99
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
100
|
+
gdf = gpd.read_file(shp_path)
|
101
|
+
|
102
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
103
|
+
if verbose:
|
104
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
105
|
+
|
106
|
+
gdf = gdf.to_crs(epsg=4674)
|
107
|
+
|
108
|
+
# Simplify the dataset if requested
|
109
|
+
if simplified:
|
110
|
+
if verbose:
|
111
|
+
print("Simplifying the dataset")
|
112
|
+
|
113
|
+
# Select only the most important columns
|
114
|
+
# Adjust these columns based on the actual data structure
|
115
|
+
cols_to_keep = ['TIPO', 'BACIA', 'UF', 'geometry']
|
116
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
117
|
+
|
118
|
+
if not cols_available:
|
119
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
120
|
+
else:
|
121
|
+
gdf = gdf[cols_available]
|
122
|
+
|
123
|
+
if verbose:
|
124
|
+
print("Finished processing predominant fluid type areas data")
|
125
|
+
|
126
|
+
return gdf
|
127
|
+
|
128
|
+
except Exception as e:
|
129
|
+
raise Exception(f"Failed to download or process predominant fluid type areas data: {str(e)}")
|
@@ -0,0 +1,129 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_og_probabilistic_effective_basin(simplified=False, verbose=False):
|
12
|
+
"""Download data of oil and gas probabilistic effective basins in Brazil.
|
13
|
+
|
14
|
+
This function downloads and returns data of oil and gas probabilistic effective basins
|
15
|
+
(Bacia Efetiva Probabilística) in Brazil as a GeoPandas GeoDataFrame.
|
16
|
+
The data comes from EPE (Energy Research Company).
|
17
|
+
|
18
|
+
Parameters
|
19
|
+
----------
|
20
|
+
simplified : bool, optional
|
21
|
+
If True, returns a simplified version of the dataset with only the most
|
22
|
+
important columns. If False, returns the complete dataset. Default is False.
|
23
|
+
verbose : bool, optional
|
24
|
+
If True, displays detailed messages about the download and processing
|
25
|
+
steps. Default is False.
|
26
|
+
|
27
|
+
Returns
|
28
|
+
-------
|
29
|
+
gpd.GeoDataFrame
|
30
|
+
A GeoDataFrame containing data on oil and gas probabilistic effective basins in Brazil.
|
31
|
+
|
32
|
+
Raises
|
33
|
+
------
|
34
|
+
Exception
|
35
|
+
If the download or processing of the data fails.
|
36
|
+
|
37
|
+
Example
|
38
|
+
-------
|
39
|
+
>>> from tunned_geobr import read_og_probabilistic_effective_basin
|
40
|
+
>>>
|
41
|
+
>>> # Read the data
|
42
|
+
>>> prob_basins = read_og_probabilistic_effective_basin()
|
43
|
+
>>>
|
44
|
+
>>> # Plot the data
|
45
|
+
>>> prob_basins.plot()
|
46
|
+
"""
|
47
|
+
|
48
|
+
if verbose:
|
49
|
+
print("Downloading data of oil and gas probabilistic effective basins in Brazil")
|
50
|
+
|
51
|
+
# Define the URL for the API request
|
52
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Bacia%20Efetiva%20Probabil%C3%ADstica%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
53
|
+
|
54
|
+
try:
|
55
|
+
# Make the API request
|
56
|
+
response = requests.get(url)
|
57
|
+
response.raise_for_status()
|
58
|
+
|
59
|
+
# Parse the JSON response
|
60
|
+
data = response.json()
|
61
|
+
|
62
|
+
# Extract the URL for the zip file
|
63
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
64
|
+
download_url = data['results'][0]['value']['url']
|
65
|
+
else:
|
66
|
+
raise Exception("Failed to extract download URL from API response")
|
67
|
+
|
68
|
+
# Create a temporary directory to store the downloaded files
|
69
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
70
|
+
# Download the zip file
|
71
|
+
zip_path = os.path.join(temp_dir, "og_probabilistic_effective_basin.zip")
|
72
|
+
if verbose:
|
73
|
+
print("Downloading zip file")
|
74
|
+
|
75
|
+
response = requests.get(download_url, stream=True)
|
76
|
+
response.raise_for_status()
|
77
|
+
|
78
|
+
with open(zip_path, 'wb') as f:
|
79
|
+
response.raw.decode_content = True
|
80
|
+
shutil.copyfileobj(response.raw, f)
|
81
|
+
|
82
|
+
# Extract the zip file
|
83
|
+
if verbose:
|
84
|
+
print("Extracting files")
|
85
|
+
|
86
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
87
|
+
zip_ref.extractall(temp_dir)
|
88
|
+
|
89
|
+
# Find the shapefile in the extracted files
|
90
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
91
|
+
|
92
|
+
if not shp_files:
|
93
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
94
|
+
|
95
|
+
# Read the shapefile
|
96
|
+
if verbose:
|
97
|
+
print("Reading shapefile")
|
98
|
+
|
99
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
100
|
+
gdf = gpd.read_file(shp_path)
|
101
|
+
|
102
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
103
|
+
if verbose:
|
104
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
105
|
+
|
106
|
+
gdf = gdf.to_crs(epsg=4674)
|
107
|
+
|
108
|
+
# Simplify the dataset if requested
|
109
|
+
if simplified:
|
110
|
+
if verbose:
|
111
|
+
print("Simplifying the dataset")
|
112
|
+
|
113
|
+
# Select only the most important columns
|
114
|
+
# Adjust these columns based on the actual data structure
|
115
|
+
cols_to_keep = ['NOME', 'TIPO', 'AMBIENTE', 'UF', 'geometry']
|
116
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
117
|
+
|
118
|
+
if not cols_available:
|
119
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
120
|
+
else:
|
121
|
+
gdf = gdf[cols_available]
|
122
|
+
|
123
|
+
if verbose:
|
124
|
+
print("Finished processing oil and gas probabilistic effective basins data")
|
125
|
+
|
126
|
+
return gdf
|
127
|
+
|
128
|
+
except Exception as e:
|
129
|
+
raise Exception(f"Failed to download or process oil and gas probabilistic effective basins data: {str(e)}")
|
@@ -0,0 +1,129 @@
|
|
1
|
+
import geopandas as gpd
|
2
|
+
import requests
|
3
|
+
import shutil
|
4
|
+
import zipfile
|
5
|
+
import tempfile
|
6
|
+
import warnings
|
7
|
+
import os
|
8
|
+
from shapely.geometry.point import Point
|
9
|
+
|
10
|
+
|
11
|
+
def read_og_total_ipa(simplified=False, verbose=False):
|
12
|
+
"""Download data of total IPA (Accumulated Productivity Index) for oil and gas in Brazil.
|
13
|
+
|
14
|
+
This function downloads and returns data of total IPA (Índice de Produtividade Acumulada)
|
15
|
+
for oil and gas areas in Brazil as a GeoPandas GeoDataFrame.
|
16
|
+
The data comes from EPE (Energy Research Company).
|
17
|
+
|
18
|
+
Parameters
|
19
|
+
----------
|
20
|
+
simplified : bool, optional
|
21
|
+
If True, returns a simplified version of the dataset with only the most
|
22
|
+
important columns. If False, returns the complete dataset. Default is False.
|
23
|
+
verbose : bool, optional
|
24
|
+
If True, displays detailed messages about the download and processing
|
25
|
+
steps. Default is False.
|
26
|
+
|
27
|
+
Returns
|
28
|
+
-------
|
29
|
+
gpd.GeoDataFrame
|
30
|
+
A GeoDataFrame containing data on total IPA for oil and gas areas in Brazil.
|
31
|
+
|
32
|
+
Raises
|
33
|
+
------
|
34
|
+
Exception
|
35
|
+
If the download or processing of the data fails.
|
36
|
+
|
37
|
+
Example
|
38
|
+
-------
|
39
|
+
>>> from tunned_geobr import read_og_total_ipa
|
40
|
+
>>>
|
41
|
+
>>> # Read the data
|
42
|
+
>>> total_ipa = read_og_total_ipa()
|
43
|
+
>>>
|
44
|
+
>>> # Plot the data
|
45
|
+
>>> total_ipa.plot()
|
46
|
+
"""
|
47
|
+
|
48
|
+
if verbose:
|
49
|
+
print("Downloading data of total IPA for oil and gas areas in Brazil")
|
50
|
+
|
51
|
+
# Define the URL for the API request
|
52
|
+
url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22IPA%20Total%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
|
53
|
+
|
54
|
+
try:
|
55
|
+
# Make the API request
|
56
|
+
response = requests.get(url)
|
57
|
+
response.raise_for_status()
|
58
|
+
|
59
|
+
# Parse the JSON response
|
60
|
+
data = response.json()
|
61
|
+
|
62
|
+
# Extract the URL for the zip file
|
63
|
+
if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
|
64
|
+
download_url = data['results'][0]['value']['url']
|
65
|
+
else:
|
66
|
+
raise Exception("Failed to extract download URL from API response")
|
67
|
+
|
68
|
+
# Create a temporary directory to store the downloaded files
|
69
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
70
|
+
# Download the zip file
|
71
|
+
zip_path = os.path.join(temp_dir, "og_total_ipa.zip")
|
72
|
+
if verbose:
|
73
|
+
print("Downloading zip file")
|
74
|
+
|
75
|
+
response = requests.get(download_url, stream=True)
|
76
|
+
response.raise_for_status()
|
77
|
+
|
78
|
+
with open(zip_path, 'wb') as f:
|
79
|
+
response.raw.decode_content = True
|
80
|
+
shutil.copyfileobj(response.raw, f)
|
81
|
+
|
82
|
+
# Extract the zip file
|
83
|
+
if verbose:
|
84
|
+
print("Extracting files")
|
85
|
+
|
86
|
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
87
|
+
zip_ref.extractall(temp_dir)
|
88
|
+
|
89
|
+
# Find the shapefile in the extracted files
|
90
|
+
shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
|
91
|
+
|
92
|
+
if not shp_files:
|
93
|
+
raise Exception("No shapefile found in the downloaded zip file")
|
94
|
+
|
95
|
+
# Read the shapefile
|
96
|
+
if verbose:
|
97
|
+
print("Reading shapefile")
|
98
|
+
|
99
|
+
shp_path = os.path.join(temp_dir, shp_files[0])
|
100
|
+
gdf = gpd.read_file(shp_path)
|
101
|
+
|
102
|
+
# Convert to SIRGAS 2000 (EPSG:4674)
|
103
|
+
if verbose:
|
104
|
+
print("Converting to SIRGAS 2000 (EPSG:4674)")
|
105
|
+
|
106
|
+
gdf = gdf.to_crs(epsg=4674)
|
107
|
+
|
108
|
+
# Simplify the dataset if requested
|
109
|
+
if simplified:
|
110
|
+
if verbose:
|
111
|
+
print("Simplifying the dataset")
|
112
|
+
|
113
|
+
# Select only the most important columns
|
114
|
+
# Adjust these columns based on the actual data structure
|
115
|
+
cols_to_keep = ['CLASSE', 'BACIA', 'UF', 'geometry']
|
116
|
+
cols_available = [col for col in cols_to_keep if col in gdf.columns]
|
117
|
+
|
118
|
+
if not cols_available:
|
119
|
+
warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
|
120
|
+
else:
|
121
|
+
gdf = gdf[cols_available]
|
122
|
+
|
123
|
+
if verbose:
|
124
|
+
print("Finished processing total IPA data")
|
125
|
+
|
126
|
+
return gdf
|
127
|
+
|
128
|
+
except Exception as e:
|
129
|
+
raise Exception(f"Failed to download or process total IPA data: {str(e)}")
|