tunned-geobr 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. tunned_geobr/__init__.py +59 -1
  2. tunned_geobr/list_geobr.py +72 -1
  3. tunned_geobr/read_ama_anemometric_towers.py +119 -0
  4. tunned_geobr/read_areas_under_contract.py +129 -0
  5. tunned_geobr/read_biodiesel_plants.py +128 -0
  6. tunned_geobr/read_biomethane_plants.py +128 -0
  7. tunned_geobr/read_compression_stations.py +128 -0
  8. tunned_geobr/read_drainage_ducts.py +128 -0
  9. tunned_geobr/read_etanol_plants.py +128 -0
  10. tunned_geobr/read_existent_biomass_ute.py +128 -0
  11. tunned_geobr/read_existent_cgh.py +168 -0
  12. tunned_geobr/read_existent_eolic.py +165 -0
  13. tunned_geobr/read_existent_fossile_ute.py +128 -0
  14. tunned_geobr/read_existent_nuclear_ute.py +128 -0
  15. tunned_geobr/read_existent_pch.py +168 -0
  16. tunned_geobr/read_existent_solar.py +165 -0
  17. tunned_geobr/read_existent_substations.py +128 -0
  18. tunned_geobr/read_existent_transmission_lines.py +128 -0
  19. tunned_geobr/read_existent_uhe.py +168 -0
  20. tunned_geobr/read_exploration_production_environment.py +119 -0
  21. tunned_geobr/read_federal_union_areas.py +129 -0
  22. tunned_geobr/read_fuel_bases.py +128 -0
  23. tunned_geobr/read_gas_distribution_pipelines.py +128 -0
  24. tunned_geobr/read_gas_transport_pipelines.py +128 -0
  25. tunned_geobr/read_glp_bases.py +128 -0
  26. tunned_geobr/read_gnl_terminals.py +128 -0
  27. tunned_geobr/read_hydroelectric_feasibility_studies.py +119 -0
  28. tunned_geobr/read_hydroelectric_inventory_aai_studies.py +119 -0
  29. tunned_geobr/read_isolated_systems.py +128 -0
  30. tunned_geobr/read_natural_gas_delivery_points.py +128 -0
  31. tunned_geobr/read_natural_gas_processing_hub.py +128 -0
  32. tunned_geobr/read_og_basement.py +119 -0
  33. tunned_geobr/read_og_effective_geographic_basin.py +129 -0
  34. tunned_geobr/read_og_ipa_direct_evidence.py +119 -0
  35. tunned_geobr/read_og_ipa_exploratory_activity.py +119 -0
  36. tunned_geobr/read_og_ipa_exploratory_intensity.py +129 -0
  37. tunned_geobr/read_og_ipa_need_for_knowledge.py +119 -0
  38. tunned_geobr/read_og_ipa_prospectiveness.py +119 -0
  39. tunned_geobr/read_og_ipa_supply_infrastructure.py +119 -0
  40. tunned_geobr/read_og_legal_pre_salt_polygon.py +119 -0
  41. tunned_geobr/read_og_predominant_fluid_type.py +129 -0
  42. tunned_geobr/read_og_probabilistic_effective_basin.py +129 -0
  43. tunned_geobr/read_og_total_ipa.py +129 -0
  44. tunned_geobr/read_og_unconventional_resources.py +129 -0
  45. tunned_geobr/read_oil_and_derivatives_terminal.py +128 -0
  46. tunned_geobr/read_pan_strategic_areas 2.py +71 -0
  47. tunned_geobr/read_pio_ducts.py +128 -0
  48. tunned_geobr/read_pio_terminals.py +128 -0
  49. tunned_geobr/read_planned_biomass_ute.py +166 -0
  50. tunned_geobr/read_planned_cgh.py +166 -0
  51. tunned_geobr/read_planned_eolic.py +165 -0
  52. tunned_geobr/read_planned_fossile_ute.py +166 -0
  53. tunned_geobr/read_planned_nuclear_ute.py +165 -0
  54. tunned_geobr/read_planned_pch.py +166 -0
  55. tunned_geobr/read_planned_solar.py +165 -0
  56. tunned_geobr/read_planned_substations.py +164 -0
  57. tunned_geobr/read_planned_transmission_lines.py +165 -0
  58. tunned_geobr/read_planned_uhe.py +166 -0
  59. tunned_geobr/read_processing_facilities.py +128 -0
  60. tunned_geobr/read_quilombola_areas.py +74 -56
  61. tunned_geobr/read_sedimentary_basins.py +119 -0
  62. tunned_geobr/read_subsystem_interconnected.py +163 -0
  63. {tunned_geobr-0.2.3.dist-info → tunned_geobr-0.2.5.dist-info}/METADATA +1 -1
  64. {tunned_geobr-0.2.3.dist-info → tunned_geobr-0.2.5.dist-info}/RECORD +67 -10
  65. tunned_geobr/constants.py +0 -13
  66. tunned_geobr/read_quilombola_areas_temp.py +0 -103
  67. {tunned_geobr-0.2.3.dist-info → tunned_geobr-0.2.5.dist-info}/WHEEL +0 -0
  68. {tunned_geobr-0.2.3.dist-info → tunned_geobr-0.2.5.dist-info}/entry_points.txt +0 -0
  69. {tunned_geobr-0.2.3.dist-info → tunned_geobr-0.2.5.dist-info}/licenses/LICENSE.txt +0 -0
@@ -0,0 +1,119 @@
1
+ import geopandas as gpd
2
+ import requests
3
+ import zipfile
4
+ import tempfile
5
+ import os
6
+ import warnings
7
+ import shutil
8
+
9
+
10
+ def read_exploration_production_environment(simplified=True, verbose=False):
11
+ """Download data for Exploration and Production Environment in Brazil.
12
+
13
+ This function downloads, processes, and returns data for Exploration and Production Environment
14
+ in Brazil as a geopandas GeoDataFrame.
15
+
16
+ Parameters
17
+ ----------
18
+ simplified : bool, optional
19
+ If True, returns a simplified version of the dataset with only essential columns.
20
+ If False, returns the complete dataset with all columns.
21
+ Default is True.
22
+ verbose : bool, optional
23
+ If True, prints detailed information about the data download and processing.
24
+ Default is False.
25
+
26
+ Returns
27
+ -------
28
+ geopandas.GeoDataFrame
29
+ A GeoDataFrame containing Exploration and Production Environment data.
30
+
31
+ Examples
32
+ --------
33
+ >>> # Download Exploration and Production Environment data
34
+ >>> df = read_exploration_production_environment()
35
+ >>> df.head()
36
+ """
37
+
38
+ url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Ambiente%20de%20E%26P%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
39
+
40
+ if verbose:
41
+ print("Downloading data...")
42
+
43
+ try:
44
+ response = requests.get(url)
45
+ response.raise_for_status()
46
+ response_json = response.json()
47
+
48
+ if "value" not in response_json or not response_json["value"]:
49
+ raise ValueError("No data found in the response")
50
+
51
+ download_url = response_json["value"]["itemUrl"]
52
+
53
+ if verbose:
54
+ print(f"Download URL: {download_url}")
55
+ print("Downloading zip file...")
56
+
57
+ zip_response = requests.get(download_url)
58
+ zip_response.raise_for_status()
59
+
60
+ # Create a temporary directory to extract the files
61
+ with tempfile.TemporaryDirectory() as temp_dir:
62
+ zip_path = os.path.join(temp_dir, "data.zip")
63
+
64
+ # Save the zip file
65
+ with open(zip_path, "wb") as f:
66
+ f.write(zip_response.content)
67
+
68
+ if verbose:
69
+ print(f"Zip file saved to {zip_path}")
70
+ print("Extracting files...")
71
+
72
+ # Extract the zip file
73
+ with zipfile.ZipFile(zip_path, "r") as zip_ref:
74
+ zip_ref.extractall(temp_dir)
75
+
76
+ # Find the shapefile
77
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
78
+
79
+ if not shp_files:
80
+ raise FileNotFoundError("No shapefile found in the downloaded zip file")
81
+
82
+ shp_path = os.path.join(temp_dir, shp_files[0])
83
+
84
+ if verbose:
85
+ print(f"Reading shapefile from {shp_path}")
86
+
87
+ # Read the shapefile
88
+ gdf = gpd.read_file(shp_path)
89
+
90
+ # Convert to SIRGAS 2000 (EPSG:4674)
91
+ gdf = gdf.to_crs(epsg=4674)
92
+
93
+ if simplified:
94
+ # Select only essential columns
95
+ if verbose:
96
+ print("Simplifying the dataset...")
97
+
98
+ # Identify the essential columns
99
+ essential_cols = ["geometry"]
100
+
101
+ # Add any other essential columns that exist in the dataset
102
+ for col in ["NOME", "DESCRICAO", "TIPO", "AREA_KM2"]:
103
+ if col in gdf.columns:
104
+ essential_cols.append(col)
105
+
106
+ # Select only the essential columns
107
+ gdf = gdf[essential_cols]
108
+
109
+ return gdf
110
+
111
+ except requests.exceptions.RequestException as e:
112
+ warnings.warn(f"Error downloading data: {e}")
113
+ return None
114
+ except (ValueError, FileNotFoundError, zipfile.BadZipFile) as e:
115
+ warnings.warn(f"Error processing data: {e}")
116
+ return None
117
+ except Exception as e:
118
+ warnings.warn(f"Unexpected error: {e}")
119
+ return None
@@ -0,0 +1,129 @@
1
+ import geopandas as gpd
2
+ import requests
3
+ import shutil
4
+ import zipfile
5
+ import tempfile
6
+ import warnings
7
+ import os
8
+ from shapely.geometry.point import Point
9
+
10
+
11
+ def read_federal_union_areas(simplified=False, verbose=False):
12
+ """Download data of Federal Union areas for oil and gas in Brazil.
13
+
14
+ This function downloads and returns data of Federal Union areas (Áreas da União)
15
+ for oil and gas exploration in Brazil as a GeoPandas GeoDataFrame.
16
+ The data comes from EPE (Energy Research Company).
17
+
18
+ Parameters
19
+ ----------
20
+ simplified : bool, optional
21
+ If True, returns a simplified version of the dataset with only the most
22
+ important columns. If False, returns the complete dataset. Default is False.
23
+ verbose : bool, optional
24
+ If True, displays detailed messages about the download and processing
25
+ steps. Default is False.
26
+
27
+ Returns
28
+ -------
29
+ gpd.GeoDataFrame
30
+ A GeoDataFrame containing data on Federal Union areas for oil and gas in Brazil.
31
+
32
+ Raises
33
+ ------
34
+ Exception
35
+ If the download or processing of the data fails.
36
+
37
+ Example
38
+ -------
39
+ >>> from tunned_geobr import read_federal_union_areas
40
+ >>>
41
+ >>> # Read the data
42
+ >>> union_areas = read_federal_union_areas()
43
+ >>>
44
+ >>> # Plot the data
45
+ >>> union_areas.plot()
46
+ """
47
+
48
+ if verbose:
49
+ print("Downloading data of Federal Union areas for oil and gas in Brazil")
50
+
51
+ # Define the URL for the API request
52
+ url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22%C3%81reas%20da%20Uni%C3%A3o%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-9237395.881983705%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C1219824.4613954136%5D%2C%5B-2349502.3891517334%2C-4650539.310904562%5D%2C%5B-9237395.881983705%2C-4650539.310904562%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
53
+
54
+ try:
55
+ # Make the API request
56
+ response = requests.get(url)
57
+ response.raise_for_status()
58
+
59
+ # Parse the JSON response
60
+ data = response.json()
61
+
62
+ # Extract the URL for the zip file
63
+ if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
64
+ download_url = data['results'][0]['value']['url']
65
+ else:
66
+ raise Exception("Failed to extract download URL from API response")
67
+
68
+ # Create a temporary directory to store the downloaded files
69
+ with tempfile.TemporaryDirectory() as temp_dir:
70
+ # Download the zip file
71
+ zip_path = os.path.join(temp_dir, "federal_union_areas.zip")
72
+ if verbose:
73
+ print("Downloading zip file")
74
+
75
+ response = requests.get(download_url, stream=True)
76
+ response.raise_for_status()
77
+
78
+ with open(zip_path, 'wb') as f:
79
+ response.raw.decode_content = True
80
+ shutil.copyfileobj(response.raw, f)
81
+
82
+ # Extract the zip file
83
+ if verbose:
84
+ print("Extracting files")
85
+
86
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
87
+ zip_ref.extractall(temp_dir)
88
+
89
+ # Find the shapefile in the extracted files
90
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
91
+
92
+ if not shp_files:
93
+ raise Exception("No shapefile found in the downloaded zip file")
94
+
95
+ # Read the shapefile
96
+ if verbose:
97
+ print("Reading shapefile")
98
+
99
+ shp_path = os.path.join(temp_dir, shp_files[0])
100
+ gdf = gpd.read_file(shp_path)
101
+
102
+ # Convert to SIRGAS 2000 (EPSG:4674)
103
+ if verbose:
104
+ print("Converting to SIRGAS 2000 (EPSG:4674)")
105
+
106
+ gdf = gdf.to_crs(epsg=4674)
107
+
108
+ # Simplify the dataset if requested
109
+ if simplified:
110
+ if verbose:
111
+ print("Simplifying the dataset")
112
+
113
+ # Select only the most important columns
114
+ # Adjust these columns based on the actual data structure
115
+ cols_to_keep = ['NOME', 'BACIA', 'TIPO', 'UF', 'geometry']
116
+ cols_available = [col for col in cols_to_keep if col in gdf.columns]
117
+
118
+ if not cols_available:
119
+ warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
120
+ else:
121
+ gdf = gdf[cols_available]
122
+
123
+ if verbose:
124
+ print("Finished processing Federal Union areas data")
125
+
126
+ return gdf
127
+
128
+ except Exception as e:
129
+ raise Exception(f"Failed to download or process Federal Union areas data: {str(e)}")
@@ -0,0 +1,128 @@
1
+ import geopandas as gpd
2
+ import requests
3
+ import shutil
4
+ import zipfile
5
+ import tempfile
6
+ import warnings
7
+ import os
8
+ from shapely.geometry.point import Point
9
+
10
+
11
+ def read_fuel_bases(simplified=False, verbose=False):
12
+ """Download data of fuel bases in Brazil.
13
+
14
+ This function downloads and returns data of fuel bases
15
+ in Brazil as a GeoPandas GeoDataFrame. The data comes from EPE (Energy Research Company).
16
+
17
+ Parameters
18
+ ----------
19
+ simplified : bool, optional
20
+ If True, returns a simplified version of the dataset with only the most
21
+ important columns. If False, returns the complete dataset. Default is False.
22
+ verbose : bool, optional
23
+ If True, displays detailed messages about the download and processing
24
+ steps. Default is False.
25
+
26
+ Returns
27
+ -------
28
+ gpd.GeoDataFrame
29
+ A GeoDataFrame containing data on fuel bases in Brazil.
30
+
31
+ Raises
32
+ ------
33
+ Exception
34
+ If the download or processing of the data fails.
35
+
36
+ Example
37
+ -------
38
+ >>> from tunned_geobr import read_fuel_bases
39
+ >>>
40
+ >>> # Read the data
41
+ >>> fuel_bases = read_fuel_bases()
42
+ >>>
43
+ >>> # Plot the data
44
+ >>> fuel_bases.plot()
45
+ """
46
+
47
+ if verbose:
48
+ print("Downloading data of fuel bases in Brazil")
49
+
50
+ # Define the URL for the API request
51
+ url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Bases%20de%20Combust%C3%ADveis%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-8655251.47456396%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C-4787514.465591563%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
52
+
53
+ try:
54
+ # Make the API request
55
+ response = requests.get(url)
56
+ response.raise_for_status()
57
+
58
+ # Parse the JSON response
59
+ data = response.json()
60
+
61
+ # Extract the URL for the zip file
62
+ if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
63
+ download_url = data['results'][0]['value']['url']
64
+ else:
65
+ raise Exception("Failed to extract download URL from API response")
66
+
67
+ # Create a temporary directory to store the downloaded files
68
+ with tempfile.TemporaryDirectory() as temp_dir:
69
+ # Download the zip file
70
+ zip_path = os.path.join(temp_dir, "fuel_bases.zip")
71
+ if verbose:
72
+ print("Downloading zip file")
73
+
74
+ response = requests.get(download_url, stream=True)
75
+ response.raise_for_status()
76
+
77
+ with open(zip_path, 'wb') as f:
78
+ response.raw.decode_content = True
79
+ shutil.copyfileobj(response.raw, f)
80
+
81
+ # Extract the zip file
82
+ if verbose:
83
+ print("Extracting files")
84
+
85
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
+ zip_ref.extractall(temp_dir)
87
+
88
+ # Find the shapefile in the extracted files
89
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+
91
+ if not shp_files:
92
+ raise Exception("No shapefile found in the downloaded zip file")
93
+
94
+ # Read the shapefile
95
+ if verbose:
96
+ print("Reading shapefile")
97
+
98
+ shp_path = os.path.join(temp_dir, shp_files[0])
99
+ gdf = gpd.read_file(shp_path)
100
+
101
+ # Convert to SIRGAS 2000 (EPSG:4674)
102
+ if verbose:
103
+ print("Converting to SIRGAS 2000 (EPSG:4674)")
104
+
105
+ gdf = gdf.to_crs(epsg=4674)
106
+
107
+ # Simplify the dataset if requested
108
+ if simplified:
109
+ if verbose:
110
+ print("Simplifying the dataset")
111
+
112
+ # Select only the most important columns
113
+ # Adjust these columns based on the actual data structure
114
+ cols_to_keep = ['NOME', 'EMPRESA', 'TIPO', 'UF', 'MUNICIPIO', 'geometry']
115
+ cols_available = [col for col in cols_to_keep if col in gdf.columns]
116
+
117
+ if not cols_available:
118
+ warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
119
+ else:
120
+ gdf = gdf[cols_available]
121
+
122
+ if verbose:
123
+ print("Finished processing fuel bases data")
124
+
125
+ return gdf
126
+
127
+ except Exception as e:
128
+ raise Exception(f"Failed to download or process fuel bases data: {str(e)}")
@@ -0,0 +1,128 @@
1
+ import geopandas as gpd
2
+ import requests
3
+ import shutil
4
+ import zipfile
5
+ import tempfile
6
+ import warnings
7
+ import os
8
+ from shapely.geometry.point import Point
9
+
10
+
11
+ def read_gas_distribution_pipelines(simplified=False, verbose=False):
12
+ """Download data of gas distribution pipelines in Brazil.
13
+
14
+ This function downloads and returns data of gas distribution pipelines (gasodutos de distribuição)
15
+ in Brazil as a GeoPandas GeoDataFrame. The data comes from EPE (Energy Research Company).
16
+
17
+ Parameters
18
+ ----------
19
+ simplified : bool, optional
20
+ If True, returns a simplified version of the dataset with only the most
21
+ important columns. If False, returns the complete dataset. Default is False.
22
+ verbose : bool, optional
23
+ If True, displays detailed messages about the download and processing
24
+ steps. Default is False.
25
+
26
+ Returns
27
+ -------
28
+ gpd.GeoDataFrame
29
+ A GeoDataFrame containing data on gas distribution pipelines in Brazil.
30
+
31
+ Raises
32
+ ------
33
+ Exception
34
+ If the download or processing of the data fails.
35
+
36
+ Example
37
+ -------
38
+ >>> from tunned_geobr import read_gas_distribution_pipelines
39
+ >>>
40
+ >>> # Read the data
41
+ >>> gas_dist_pipelines = read_gas_distribution_pipelines()
42
+ >>>
43
+ >>> # Plot the data
44
+ >>> gas_dist_pipelines.plot()
45
+ """
46
+
47
+ if verbose:
48
+ print("Downloading data of gas distribution pipelines in Brazil")
49
+
50
+ # Define the URL for the API request
51
+ url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Gasodutos%20de%20distribui%C3%A7%C3%A3o%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-8655251.47456396%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C-4787514.465591563%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
52
+
53
+ try:
54
+ # Make the API request
55
+ response = requests.get(url)
56
+ response.raise_for_status()
57
+
58
+ # Parse the JSON response
59
+ data = response.json()
60
+
61
+ # Extract the URL for the zip file
62
+ if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
63
+ download_url = data['results'][0]['value']['url']
64
+ else:
65
+ raise Exception("Failed to extract download URL from API response")
66
+
67
+ # Create a temporary directory to store the downloaded files
68
+ with tempfile.TemporaryDirectory() as temp_dir:
69
+ # Download the zip file
70
+ zip_path = os.path.join(temp_dir, "gas_distribution_pipelines.zip")
71
+ if verbose:
72
+ print("Downloading zip file")
73
+
74
+ response = requests.get(download_url, stream=True)
75
+ response.raise_for_status()
76
+
77
+ with open(zip_path, 'wb') as f:
78
+ response.raw.decode_content = True
79
+ shutil.copyfileobj(response.raw, f)
80
+
81
+ # Extract the zip file
82
+ if verbose:
83
+ print("Extracting files")
84
+
85
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
+ zip_ref.extractall(temp_dir)
87
+
88
+ # Find the shapefile in the extracted files
89
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+
91
+ if not shp_files:
92
+ raise Exception("No shapefile found in the downloaded zip file")
93
+
94
+ # Read the shapefile
95
+ if verbose:
96
+ print("Reading shapefile")
97
+
98
+ shp_path = os.path.join(temp_dir, shp_files[0])
99
+ gdf = gpd.read_file(shp_path)
100
+
101
+ # Convert to SIRGAS 2000 (EPSG:4674)
102
+ if verbose:
103
+ print("Converting to SIRGAS 2000 (EPSG:4674)")
104
+
105
+ gdf = gdf.to_crs(epsg=4674)
106
+
107
+ # Simplify the dataset if requested
108
+ if simplified:
109
+ if verbose:
110
+ print("Simplifying the dataset")
111
+
112
+ # Select only the most important columns
113
+ # Adjust these columns based on the actual data structure
114
+ cols_to_keep = ['NOME', 'EMPRESA', 'EXTENSAO', 'DIAMETRO', 'UF', 'geometry']
115
+ cols_available = [col for col in cols_to_keep if col in gdf.columns]
116
+
117
+ if not cols_available:
118
+ warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
119
+ else:
120
+ gdf = gdf[cols_available]
121
+
122
+ if verbose:
123
+ print("Finished processing gas distribution pipelines data")
124
+
125
+ return gdf
126
+
127
+ except Exception as e:
128
+ raise Exception(f"Failed to download or process gas distribution pipelines data: {str(e)}")
@@ -0,0 +1,128 @@
1
+ import geopandas as gpd
2
+ import requests
3
+ import shutil
4
+ import zipfile
5
+ import tempfile
6
+ import warnings
7
+ import os
8
+ from shapely.geometry.point import Point
9
+
10
+
11
+ def read_gas_transport_pipelines(simplified=False, verbose=False):
12
+ """Download data of gas transport pipelines in Brazil.
13
+
14
+ This function downloads and returns data of gas transport pipelines (gasodutos de transporte)
15
+ in Brazil as a GeoPandas GeoDataFrame. The data comes from EPE (Energy Research Company).
16
+
17
+ Parameters
18
+ ----------
19
+ simplified : bool, optional
20
+ If True, returns a simplified version of the dataset with only the most
21
+ important columns. If False, returns the complete dataset. Default is False.
22
+ verbose : bool, optional
23
+ If True, displays detailed messages about the download and processing
24
+ steps. Default is False.
25
+
26
+ Returns
27
+ -------
28
+ gpd.GeoDataFrame
29
+ A GeoDataFrame containing data on gas transport pipelines in Brazil.
30
+
31
+ Raises
32
+ ------
33
+ Exception
34
+ If the download or processing of the data fails.
35
+
36
+ Example
37
+ -------
38
+ >>> from tunned_geobr import read_gas_transport_pipelines
39
+ >>>
40
+ >>> # Read the data
41
+ >>> gas_pipelines = read_gas_transport_pipelines()
42
+ >>>
43
+ >>> # Plot the data
44
+ >>> gas_pipelines.plot()
45
+ """
46
+
47
+ if verbose:
48
+ print("Downloading data of gas transport pipelines in Brazil")
49
+
50
+ # Define the URL for the API request
51
+ url = "https://gisepeprd2.epe.gov.br/arcgis/rest/services/Download_Dados_Webmap_EPE/GPServer/Extract%20Data%20Task/execute?f=json&env%3AoutSR=102100&Layers_to_Clip=%5B%22Gasodutos%20de%20transporte%22%5D&Area_of_Interest=%7B%22geometryType%22%3A%22esriGeometryPolygon%22%2C%22features%22%3A%5B%7B%22geometry%22%3A%7B%22rings%22%3A%5B%5B%5B-8655251.47456396%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C1229608.401015912%5D%2C%5B-3508899.2341809804%2C-4787514.465591563%5D%2C%5B-8655251.47456396%2C-4787514.465591563%5D%5D%5D%2C%22spatialReference%22%3A%7B%22wkid%22%3A102100%7D%7D%7D%5D%2C%22sr%22%3A%7B%22wkid%22%3A102100%7D%7D&Feature_Format=Shapefile%20-%20SHP%20-%20.shp&Raster_Format=Tagged%20Image%20File%20Format%20-%20TIFF%20-%20.tif"
52
+
53
+ try:
54
+ # Make the API request
55
+ response = requests.get(url)
56
+ response.raise_for_status()
57
+
58
+ # Parse the JSON response
59
+ data = response.json()
60
+
61
+ # Extract the URL for the zip file
62
+ if 'results' in data and len(data['results']) > 0 and 'value' in data['results'][0]:
63
+ download_url = data['results'][0]['value']['url']
64
+ else:
65
+ raise Exception("Failed to extract download URL from API response")
66
+
67
+ # Create a temporary directory to store the downloaded files
68
+ with tempfile.TemporaryDirectory() as temp_dir:
69
+ # Download the zip file
70
+ zip_path = os.path.join(temp_dir, "gas_transport_pipelines.zip")
71
+ if verbose:
72
+ print("Downloading zip file")
73
+
74
+ response = requests.get(download_url, stream=True)
75
+ response.raise_for_status()
76
+
77
+ with open(zip_path, 'wb') as f:
78
+ response.raw.decode_content = True
79
+ shutil.copyfileobj(response.raw, f)
80
+
81
+ # Extract the zip file
82
+ if verbose:
83
+ print("Extracting files")
84
+
85
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
86
+ zip_ref.extractall(temp_dir)
87
+
88
+ # Find the shapefile in the extracted files
89
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith('.shp')]
90
+
91
+ if not shp_files:
92
+ raise Exception("No shapefile found in the downloaded zip file")
93
+
94
+ # Read the shapefile
95
+ if verbose:
96
+ print("Reading shapefile")
97
+
98
+ shp_path = os.path.join(temp_dir, shp_files[0])
99
+ gdf = gpd.read_file(shp_path)
100
+
101
+ # Convert to SIRGAS 2000 (EPSG:4674)
102
+ if verbose:
103
+ print("Converting to SIRGAS 2000 (EPSG:4674)")
104
+
105
+ gdf = gdf.to_crs(epsg=4674)
106
+
107
+ # Simplify the dataset if requested
108
+ if simplified:
109
+ if verbose:
110
+ print("Simplifying the dataset")
111
+
112
+ # Select only the most important columns
113
+ # Adjust these columns based on the actual data structure
114
+ cols_to_keep = ['NOME', 'EMPRESA', 'EXTENSAO', 'DIAMETRO', 'CAPACIDADE', 'UF', 'geometry']
115
+ cols_available = [col for col in cols_to_keep if col in gdf.columns]
116
+
117
+ if not cols_available:
118
+ warnings.warn("None of the specified columns for simplification are available. Returning the full dataset.")
119
+ else:
120
+ gdf = gdf[cols_available]
121
+
122
+ if verbose:
123
+ print("Finished processing gas transport pipelines data")
124
+
125
+ return gdf
126
+
127
+ except Exception as e:
128
+ raise Exception(f"Failed to download or process gas transport pipelines data: {str(e)}")