tunned-geobr 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. tunned_geobr/__init__.py +38 -0
  2. tunned_geobr/constants.py +13 -0
  3. tunned_geobr/data/grid_state_correspondence_table.csv +140 -0
  4. tunned_geobr/list_geobr.py +39 -0
  5. tunned_geobr/lookup_muni.py +111 -0
  6. tunned_geobr/read_amazon.py +42 -0
  7. tunned_geobr/read_amazon_ibas.py +92 -0
  8. tunned_geobr/read_atlantic_forest_ibas.py +93 -0
  9. tunned_geobr/read_biomes.py +43 -0
  10. tunned_geobr/read_census_tract.py +97 -0
  11. tunned_geobr/read_climate_aggressiveness.py +74 -0
  12. tunned_geobr/read_comparable_areas.py +75 -0
  13. tunned_geobr/read_conservation_units.py +43 -0
  14. tunned_geobr/read_country.py +43 -0
  15. tunned_geobr/read_disaster_risk_area.py +47 -0
  16. tunned_geobr/read_geology.py +77 -0
  17. tunned_geobr/read_geomorphology.py +77 -0
  18. tunned_geobr/read_health_facilities.py +49 -0
  19. tunned_geobr/read_health_region.py +52 -0
  20. tunned_geobr/read_immediate_region.py +81 -0
  21. tunned_geobr/read_indigenous_land.py +44 -0
  22. tunned_geobr/read_intermediate_region.py +61 -0
  23. tunned_geobr/read_meso_region.py +78 -0
  24. tunned_geobr/read_metro_area.py +44 -0
  25. tunned_geobr/read_micro_region.py +78 -0
  26. tunned_geobr/read_mining_processes.py +76 -0
  27. tunned_geobr/read_municipal_seat.py +41 -0
  28. tunned_geobr/read_municipality.py +83 -0
  29. tunned_geobr/read_neighborhood.py +39 -0
  30. tunned_geobr/read_pedology.py +77 -0
  31. tunned_geobr/read_pop_arrangements.py +45 -0
  32. tunned_geobr/read_region.py +41 -0
  33. tunned_geobr/read_schools.py +44 -0
  34. tunned_geobr/read_semiarid.py +42 -0
  35. tunned_geobr/read_settlements.py +85 -0
  36. tunned_geobr/read_state.py +88 -0
  37. tunned_geobr/read_statistical_grid.py +127 -0
  38. tunned_geobr/read_urban_area.py +44 -0
  39. tunned_geobr/read_urban_concentrations.py +46 -0
  40. tunned_geobr/read_weighting_area.py +74 -0
  41. tunned_geobr/utils.py +326 -0
  42. tunned_geobr-0.1.0.dist-info/METADATA +103 -0
  43. tunned_geobr-0.1.0.dist-info/RECORD +46 -0
  44. tunned_geobr-0.1.0.dist-info/WHEEL +4 -0
  45. tunned_geobr-0.1.0.dist-info/entry_points.txt +4 -0
  46. tunned_geobr-0.1.0.dist-info/licenses/LICENSE.txt +2 -0
@@ -0,0 +1,74 @@
1
+ from cursed_geobr.utils import select_metadata, download_gpkg
2
+
3
+
4
+ def read_weighting_area(
5
+ code_weighting="all", year=2010, simplified=True, verbose=False
6
+ ):
7
+ """Download shape files of Census Weighting Areas (area de ponderacao) of the Brazilian Population Census.
8
+
9
+ Only 2010 data is currently available.
10
+
11
+ Parameters
12
+ ----------
13
+ code_weighting:
14
+ The 7-digit code of a Municipality. If the two-digit code or a two-letter uppercase abbreviation of
15
+ a state is passed, (e.g. 33 or "RJ") the function will load all weighting areas of that state.
16
+ If code_weighting="all", all weighting areas of the country are loaded.
17
+ year : int, optional
18
+ Year of the data, by default 2010
19
+ simplified: boolean, by default True
20
+ Data 'type', indicating whether the function returns the 'original' dataset
21
+ with high resolution or a dataset with 'simplified' borders (Default)
22
+ verbose : bool, optional
23
+ by default False
24
+
25
+ Returns
26
+ -------
27
+ gpd.GeoDataFrame
28
+ Metadata and geopackage of selected states
29
+
30
+ Raises
31
+ ------
32
+ Exception
33
+ If parameters are not found or not well defined
34
+
35
+ Example
36
+ -------
37
+ >>> from cursed_geobr import read_weighting_area
38
+
39
+ # Read specific state at a given year
40
+ >>> df = read_weighting_area(year=2010)
41
+ """
42
+
43
+ metadata = select_metadata("weighting_area", year=year, simplified=simplified)
44
+
45
+ if code_weighting == "all":
46
+
47
+ if verbose:
48
+ print("Loading data for the whole country. This might take a few minutes.")
49
+
50
+ return download_gpkg(metadata)
51
+
52
+ metadata = metadata[
53
+ metadata[["code", "code_abbrev"]].apply(
54
+ lambda x: str(code_weighting)[:2] in str(x["code"])
55
+ or str(code_weighting)[:2] # if number e.g. 12
56
+ in str(x["code_abbrev"]), # if UF e.g. RO
57
+ 1,
58
+ )
59
+ ]
60
+
61
+ if not len(metadata):
62
+ raise Exception("Invalid Value to argument code_weighting.")
63
+
64
+ gdf = download_gpkg(metadata)
65
+
66
+ if len(str(code_weighting)) == 2:
67
+ return gdf
68
+
69
+ elif code_weighting in gdf["code_muni"].tolist():
70
+ return gdf.query(f"code_muni == {code_weighting}")
71
+
72
+ elif code_weighting in gdf["code_weighting"].tolist():
73
+ return gdf.query(f"code_weighting == {code_weighting}")
74
+ return gdf
tunned_geobr/utils.py ADDED
@@ -0,0 +1,326 @@
1
+ import os
2
+ from functools import lru_cache
3
+ from urllib.error import HTTPError
4
+ import tempfile
5
+
6
+ import geopandas as gpd
7
+ import pandas as pd
8
+ import requests
9
+ import unicodedata
10
+ from io import StringIO
11
+
12
+ from cursed_geobr.constants import DataTypes
13
+
14
+ MIRRORS = ["https://github.com/ipeaGIT/geobr/releases/download/v1.7.0/"]
15
+
16
+
17
+ def _get_unique_values(_df, column):
18
+
19
+ return ", ".join([str(i) for i in _df[column].unique()])
20
+
21
+
22
+ def url_solver(url):
23
+
24
+ file_id = url.split("/")[-1]
25
+ urls = [url] + [mirror + file_id for mirror in MIRRORS]
26
+
27
+ for url in urls:
28
+
29
+ try:
30
+ response = requests.get(url)
31
+ if response.status_code == 200:
32
+ return response
33
+ except:
34
+ continue
35
+
36
+ raise ConnectionError(
37
+ "No mirrors are active. Please report to https://github.com/ipeaGIT/geobr/issues"
38
+ )
39
+
40
+
41
+ @lru_cache(maxsize=124)
42
+ def download_metadata(
43
+ url="http://www.ipea.gov.br/geobr/metadata/metadata_1.7.0_gpkg.csv",
44
+ ):
45
+ """Support function to download metadata internally used in geobr.
46
+
47
+ It caches the metadata file to avoid reloading it in the same session.
48
+
49
+ Parameters
50
+ ----------
51
+ url : str, optional
52
+ Metadata url, by default 'http://www.ipea.gov.br/geobr/metadata/metadata_1.7.0_gpkg.csv'
53
+
54
+ Returns
55
+ -------
56
+ pd.DataFrame
57
+ Table with all metadata of geopackages
58
+
59
+ Raises
60
+ ------
61
+ Exception
62
+ Leads user to Github issue page if metadata url is not found
63
+
64
+ Examples
65
+ --------
66
+ >>> metadata = download_metadata()
67
+ >>> metadata.head(1)
68
+ geo year code download_path code_abbrev
69
+ 0 amazonia_legal 2012 am http://www.ipea.gov.br/geobr/data_gpkg/amazoni... amazonia_legal
70
+ """
71
+
72
+ try:
73
+ return pd.read_csv(StringIO(url_solver(url).text))
74
+
75
+ except HTTPError:
76
+ raise Exception(
77
+ "Perhaps this is an internet connection problem."
78
+ "If this is not a connection problem in your network, "
79
+ " please try geobr again in a few minutes. "
80
+ "Please report to https://github.com/ipeaGIT/geobr/issues"
81
+ )
82
+
83
+
84
+ def select_year(metadata, year):
85
+ """Apply year to metadata and checks its existence.
86
+
87
+ If it do not exist, raises an informative error.
88
+
89
+ Parameters
90
+ ----------
91
+ metadata : pd.DataFrame
92
+ Filtered metadata table
93
+ year : int
94
+ Year selected by user
95
+
96
+ Returns
97
+ -------
98
+ pd.DataFrame
99
+ Filtered dataframe by year.
100
+
101
+ Raises
102
+ ------
103
+ Exception
104
+ If year does not exists, raises exception with available years.
105
+ """
106
+
107
+ if year is None:
108
+ year = max(metadata["year"])
109
+
110
+ elif year not in list(metadata["year"]):
111
+
112
+ years = ", ".join([str(i) for i in metadata["year"].unique()])
113
+
114
+ raise Exception(
115
+ "Error: Invalid Value to argument 'year/date'. "
116
+ "It must be one of the following: "
117
+ f'{_get_unique_values(metadata, "year")}'
118
+ )
119
+
120
+ return metadata.query(f"year == {year}")
121
+
122
+
123
+ def select_simplified(metadata, simplified):
124
+ """Filter metadata by data type. It can be simplified or normal.
125
+ If 'simplified' is True, it returns a simplified version of the shapefiles.
126
+ 'normal' returns the complete version. Usually, the complete version
127
+ if heavier than the simplified, demanding more resources.
128
+
129
+ Parameters
130
+ ----------
131
+ metadata : pd.DataFrame
132
+ Filtered metadata table
133
+ simplified : boolean
134
+ Data type, either True for 'simplified' or False for 'normal'
135
+
136
+ Returns
137
+ -------
138
+ pd.DataFrame
139
+ Filtered metadata table by type
140
+
141
+ """
142
+
143
+ if simplified:
144
+ return metadata[metadata["download_path"].str.contains("simplified")]
145
+
146
+ else:
147
+ return metadata[~metadata["download_path"].str.contains("simplified")]
148
+
149
+
150
+ @lru_cache(maxsize=1240)
151
+ def load_gpkg(url):
152
+ """Internal function to donwload and convert to geopandas one url.
153
+
154
+ It caches url result for the active session.
155
+
156
+ Parameters
157
+ ----------
158
+ url : str
159
+ Address with gpkg
160
+
161
+ Returns
162
+ -------
163
+ gpd.GeoDataFrame
164
+ Table with metadata and shapefiles contained in url.
165
+ """
166
+
167
+ try:
168
+ content = url_solver(url).content
169
+
170
+ except Exception as e:
171
+
172
+ raise Exception(
173
+ "Some internal url is broken."
174
+ "Please report to https://github.com/ipeaGIT/geobr/issues"
175
+ ) from e
176
+
177
+ # This below does not work in Windows -- see the Docs
178
+ # Whether the name can be used to open the file a second time, while the named temporary file is still open,
179
+ # varies across platforms (it can be so used on Unix; it cannot on Windows NT or later).
180
+ # https://docs.python.org/2/library/tempfile.html
181
+
182
+ # Create a temporary file with .gpkg extension that is automatically deleted when closed
183
+ with tempfile.NamedTemporaryFile(suffix=".gpkg", delete=False) as fp:
184
+ fp.write(content)
185
+ # Need to close file before reading on Windows
186
+ fp.close()
187
+ gdf = gpd.read_file(fp.name)
188
+ # Clean up temp file
189
+ os.unlink(fp.name)
190
+
191
+ return gdf
192
+
193
+
194
+ def enforce_types(df):
195
+ """Enforce correct datatypes according to DataTypes constant
196
+
197
+ Parameters
198
+ ----------
199
+ df : gpd.GeoDataFrame
200
+ Raw output data
201
+
202
+ Returns
203
+ -------
204
+ gpd.GeoDataFrame
205
+ Output data with correct types
206
+ """
207
+
208
+ for column in df.columns:
209
+
210
+ if column in DataTypes.__members__.keys():
211
+
212
+ df[column] = df[column].astype(DataTypes[column].value)
213
+
214
+ return df
215
+
216
+
217
+ def download_gpkg(metadata):
218
+ """Generalizes gpkg dowload and conversion to geopandas
219
+ for one or many urls
220
+
221
+ Parameters
222
+ ----------
223
+ metadata : pd.DataFrame
224
+ Filtered metadata
225
+
226
+ Returns
227
+ -------
228
+ gpd.GeoDataFrame
229
+ Table with metadata and shapefiles contained in urls.
230
+ """
231
+
232
+ urls = metadata["download_path"].tolist()
233
+
234
+ gpkgs = [load_gpkg(url) for url in urls]
235
+
236
+ df = gpd.GeoDataFrame(pd.concat(gpkgs, ignore_index=True))
237
+
238
+ df = enforce_types(df)
239
+
240
+ return df
241
+
242
+
243
+ def select_metadata(geo, simplified=None, year=False):
244
+ """Downloads and filters metadata given `geo`, `simplified` and `year`.
245
+
246
+ Parameters
247
+ ----------
248
+ geo : str
249
+ Shapefile category. I.e: state, biome, etc...
250
+ simplified : boolean
251
+ `simplified` or `normal` shapefiles
252
+ year : int
253
+ Year of the data
254
+
255
+ Returns
256
+ -------
257
+ pd.DataFrame
258
+ Filtered metadata
259
+
260
+ Raises
261
+ ------
262
+ Exception
263
+ if a parameter is not found in metadata table
264
+ """
265
+
266
+ # Get metadata with data addresses
267
+ metadata = download_metadata()
268
+
269
+ if len(metadata.query(f'geo == "{geo}"')) == 0:
270
+ raise Exception(
271
+ f"The `geo` argument {geo} does not exist."
272
+ "Please, use one of the following:"
273
+ f'{_get_unique_values(metadata, "geo")}'
274
+ )
275
+
276
+ # Select geo
277
+ metadata = metadata.query(f'geo == "{geo}"')
278
+
279
+ if simplified is not None:
280
+ # Select data type
281
+ metadata = select_simplified(metadata, simplified)
282
+
283
+ if year != False:
284
+ # Verify year input
285
+ metadata = select_year(metadata, year)
286
+
287
+ return metadata
288
+
289
+
290
+ def change_type_list(lst, astype=str):
291
+ return [astype(l) for l in lst]
292
+
293
+
294
+ def test_options(choosen, name, allowed=None, not_allowed=None):
295
+
296
+ if allowed is not None:
297
+ if choosen not in allowed:
298
+ raise Exception(
299
+ f"Invalid value to argument '{name}'. "
300
+ f"It must be either {' or '.join(change_type_list(allowed))}"
301
+ )
302
+
303
+ if not_allowed is not None:
304
+ if choosen in not_allowed:
305
+ raise Exception(
306
+ f"Invalid value to argument '{name}'. "
307
+ f"It cannot be {' or '.join(change_type_list(allowed))}"
308
+ )
309
+
310
+
311
+ def strip_accents(text):
312
+ """
313
+ Strip accents from input String.
314
+
315
+ Parameters
316
+ ----------
317
+ text: str, The input string
318
+
319
+ Returns
320
+ ----------
321
+ str, The processed string
322
+ """
323
+ text = unicodedata.normalize("NFD", text)
324
+ text = text.encode("ascii", "ignore")
325
+ text = text.decode("utf-8")
326
+ return str(text)
@@ -0,0 +1,103 @@
1
+ Metadata-Version: 2.1
2
+ Name: tunned-geobr
3
+ Version: 0.1.0
4
+ Summary: Fork personalizado do geobr com funcionalidades extras como download de dados da ANM
5
+ Author: Anderson Stolfi
6
+ License: MIT
7
+ Classifier: Intended Audience :: Science/Research
8
+ Classifier: Intended Audience :: Developers
9
+ Classifier: Intended Audience :: Education
10
+ Classifier: Topic :: Scientific/Engineering :: GIS
11
+ Classifier: Topic :: Scientific/Engineering :: Visualization
12
+ Classifier: Programming Language :: Python
13
+ Project-URL: homepage, https://github.com/popogis24/tunned_geobr
14
+ Project-URL: repository, https://github.com/popogis24/tunned_geobr
15
+ Requires-Python: <4.0,>=3.9
16
+ Requires-Dist: geopandas<=1.1,>=1.0.0
17
+ Requires-Dist: shapely<=2.1.0,>=1.7.0
18
+ Requires-Dist: requests<3.0.0,>=2.25.1
19
+ Requires-Dist: lxml<6.0.0,>=5.1.0
20
+ Requires-Dist: html5lib==1.1
21
+ Requires-Dist: geobr<0.3.0,>=0.2.2
22
+ Requires-Dist: patool>=1.15.0
23
+ Description-Content-Type: text/markdown
24
+
25
+ # geobr: Download Official Spatial Data Sets of Brazil
26
+
27
+ <img align="right" src="https://github.com/ipeaGIT/geobr/blob/master/r-package/man/figures/geobr_logo_b.png?raw=true" alt="logo" width="140">
28
+ <img align="right" src="https://github.com/ipeaGIT/geobr/blob/master/r-package/man/figures/geobr_logo_y.png?raw=true" alt="logo" width="140">
29
+ <p align="justify">geobr is a computational package to download official spatial data sets of Brazil. The package includes a wide range of geospatial data in geopackage format (like shapefiles but better), available at various geographic scales and for various years with harmonized attributes, projection and topology (see detailed list of available data sets below). </p>
30
+
31
+ ## [READ FULL DOCS](https://github.com/ipeaGIT/geobr)
32
+
33
+ ## Contribute
34
+
35
+ To start the development environment run
36
+
37
+ ```sh
38
+ uv sync
39
+ ```
40
+
41
+ Test with
42
+
43
+ `uv run pytest -n auto`
44
+
45
+ You can use a helper to translate a function from R.
46
+ If you want to add `read_biomes`, just run
47
+
48
+ ```sh
49
+ uv run python helpers/translate_from_R.py read_biomes
50
+ ```
51
+
52
+ It will scrape the original R function to get documentation and metadata.
53
+ It adds:
54
+ - default year
55
+ - function name
56
+ - documentation one liner
57
+ - larger documentation
58
+ - very basic tests
59
+
60
+ ! Be aware that if the function that you are adding is more complicated than the template. So, always double check !
61
+
62
+
63
+
64
+
65
+ ## Translation Status
66
+
67
+ | Function | Translated? | Easy? |
68
+ | ------------------------- | ----------- | ----- |
69
+ | read_amazon | Yes | Super |
70
+ | read_biomes | Yes | Super |
71
+ | read_census_tract | Yes | No |
72
+ | read_comparable_areas | Yes | Yes |
73
+ | read_conservation_units | Yes | Super |
74
+ | read_country | Yes | Super |
75
+ | read_disaster_risk_area | Yes | Super |
76
+ | read_health_facilities | Yes | Super |
77
+ | read_health_region | Yes | Super |
78
+ | read_immediate_region | Yes | Yes |
79
+ | read_indigenous_land | Yes | Super |
80
+ | read_intermediate_region | Yes | Yes |
81
+ | read_meso_region | Yes | No |
82
+ | read_metro_area | Yes | Super |
83
+ | read_micro_region | Yes | No |
84
+ | read_municipal_seat | Yes | Super |
85
+ | read_municipality | Yes | No |
86
+ | read_region | Yes | Super |
87
+ | read_semiarid | Yes | Super |
88
+ | read_state | Yes | Super |
89
+ | read_statistical_grid | Yes | No |
90
+ | read_urban_area | Yes | Super |
91
+ | read_urban_concentrations | Yes | Super |
92
+ | read_weighting_area | Yes | No |
93
+ | list_geobr | Yes | Yes |
94
+ | lookup_muni | Yes | No |
95
+ | read_neighborhood | Yes | Yes |
96
+
97
+
98
+ # Release new version
99
+
100
+ ```
101
+ poetry version [patch|minor|major]
102
+ poetry publish --build
103
+ ```
@@ -0,0 +1,46 @@
1
+ tunned_geobr-0.1.0.dist-info/METADATA,sha256=Rq_0FSHR6SbXupwxbHI_3poqz-sToBb_JurPUy9Z-9Y,3896
2
+ tunned_geobr-0.1.0.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ tunned_geobr-0.1.0.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
+ tunned_geobr-0.1.0.dist-info/licenses/LICENSE.txt,sha256=mECZRcbde3HssOKe1Co4zgqBLGVN0OWpTsEy3LIbcRA,75
5
+ tunned_geobr/__init__.py,sha256=zeKiOihNUT5yqeOzWo84E_rLZSZOwxgyEsrOuAdqT4M,1872
6
+ tunned_geobr/constants.py,sha256=ZHj4pKtrxoUMFFgw-4ikuFcCkxEjzIbWL_gzhutGDB4,262
7
+ tunned_geobr/data/grid_state_correspondence_table.csv,sha256=FpkBuX_-lRXQ1yBrQODxQgG9oha9Fd8A8zGKfdsDAmk,2660
8
+ tunned_geobr/list_geobr.py,sha256=uIH11FOltrcjIQOqFk6uHgHj2moCWH_0vWyxbMj-xtA,1252
9
+ tunned_geobr/lookup_muni.py,sha256=ny1zU4i6OagvL4Mrc6XQWPgn2RrJa_mXlKXh81oVYsM,3462
10
+ tunned_geobr/read_amazon.py,sha256=CpUDhNEeZPAXHEuCUl0fsKi6KyeVBFOvZrFAcLe8OEg,1299
11
+ tunned_geobr/read_amazon_ibas.py,sha256=RtOo5wPfc26S2HYJCLylNCPM5cHBOLGTP4uKEtGC3Bw,3500
12
+ tunned_geobr/read_atlantic_forest_ibas.py,sha256=GjaNy8bKMOYRA4G5IR7gfmyAGm8EltojqdqBHZDOg7U,3577
13
+ tunned_geobr/read_biomes.py,sha256=5W2toakngPWG0V7c9QP_xon__38gIkJr2xrKc2mYaN0,1351
14
+ tunned_geobr/read_census_tract.py,sha256=lFIlgDqsz6zkiVWEBE97SlxgdmxuI6_fgqgWhaXcgYg,3272
15
+ tunned_geobr/read_climate_aggressiveness.py,sha256=r7dNvFQ7mo0B5PUWEv28x4aYAD-jF6JiSV7Oydq6N-0,2811
16
+ tunned_geobr/read_comparable_areas.py,sha256=NUyHfGaKoqNBfQUID2uZc-CrZLTxGN_5gJog-I7XZS0,2122
17
+ tunned_geobr/read_conservation_units.py,sha256=-RYV3x06LZDqQKAtmiDLwEV07TxKxCbXI4mKENxJZbk,1393
18
+ tunned_geobr/read_country.py,sha256=wDfG95wFz16iy0WpSxyrezO3hqr3v5xJRK94pDJAneM,1371
19
+ tunned_geobr/read_disaster_risk_area.py,sha256=r6Ccf8yr-zr2IZSPc5jHNsECvesnlG0Yfek1Q5-S5Rw,1859
20
+ tunned_geobr/read_geology.py,sha256=q6o3E8DQ5NBc2wJ9rJfGLN5qSI9_0vQG48xil-f7jzM,2773
21
+ tunned_geobr/read_geomorphology.py,sha256=h2Si0OXHBBna1eMN9LzgiV5IttRyzcnCSe2tqx6frYk,2850
22
+ tunned_geobr/read_health_facilities.py,sha256=ujfg6nbuoefUvhGV69sETKXgog90s6r3wwU8JQa-jUM,2026
23
+ tunned_geobr/read_health_region.py,sha256=3j-Y4NTTrnUuTpFWhWfJDtsfmlRzJBRBv3n9QCvOEtU,1846
24
+ tunned_geobr/read_immediate_region.py,sha256=nIDFssnaPzmvAPYrNuRhLIqczampQct4l8JaoWv90L0,2568
25
+ tunned_geobr/read_indigenous_land.py,sha256=ByE_VUwHhVf1V-rOBzuyCmwKnHdpk2AcSWZBQ3UsWQo,1473
26
+ tunned_geobr/read_intermediate_region.py,sha256=oo9lvrttInePaaSpHPZSrxDlx80u900HA5BvrVP1tTU,2199
27
+ tunned_geobr/read_meso_region.py,sha256=LDW2lufe8yqToE79intlHBWmXWB5PS2j3C4Zh8ElOhg,2615
28
+ tunned_geobr/read_metro_area.py,sha256=xvpdASJJamFN7qikPKjrytMKoNwIVRfRoR37lUsQo0g,1511
29
+ tunned_geobr/read_micro_region.py,sha256=n0oAeIdyZGo2tkoSYzcP8OfvAkwTRTkH4rMOnfKzgI4,2523
30
+ tunned_geobr/read_mining_processes.py,sha256=oVhCqhIQwLTXmaL6RRCmF-i81OLj-0Lb2k7SHiYgaf8,2576
31
+ tunned_geobr/read_municipal_seat.py,sha256=jiDn6jbULbl5KQlmpowqZTLW2P0z1H9P8zIzMcPETKk,1156
32
+ tunned_geobr/read_municipality.py,sha256=RrCg8MULpCgTwX3ngcIH0YpHcI0GR4CubF4AIgXdwqE,2607
33
+ tunned_geobr/read_neighborhood.py,sha256=rk7U9ZamkkdKrYcd18TNu0Pjhr7OQN4RY8BUVfa9Xcw,1098
34
+ tunned_geobr/read_pedology.py,sha256=yqzn6lEBS__6_SIUtneJzJVCc3l0XGs3vqO68ylDUSs,2790
35
+ tunned_geobr/read_pop_arrangements.py,sha256=NN4zvU4P4G6_3PDshQFpmYgRlcCqwPqD8DdKyw6a714,1399
36
+ tunned_geobr/read_region.py,sha256=osFZJI-DPndNABs3dqtZamcJGW5hNRx4CgQmU9Mo7cA,1118
37
+ tunned_geobr/read_schools.py,sha256=RZbb_glUfppPF2RxJwKIMhHB7BAZzfY2tnVkyIE9HuQ,1378
38
+ tunned_geobr/read_semiarid.py,sha256=9zNQepAnC1Sc89E-1YMd_QSFz5dH0Z-I7W7469-0jHo,1371
39
+ tunned_geobr/read_settlements.py,sha256=jh7hbXX63NBnG-q1meO4ed0NvBfLzdY17_PT7p-lbDs,3067
40
+ tunned_geobr/read_state.py,sha256=cM44s3hBOknwbOQxAP9J7SRWuSrA97YAW1GEw_G5bFE,2712
41
+ tunned_geobr/read_statistical_grid.py,sha256=2ZoT-kbuvVkKEoTIfIiAc2jLM789cWWE3E8-HA_S-VA,4493
42
+ tunned_geobr/read_urban_area.py,sha256=3n31dj-21glSA2qQI88ELz00-nRGcpNunBpPYb0h3RQ,1377
43
+ tunned_geobr/read_urban_concentrations.py,sha256=mPEZIDyyGUxriqhtDwD_cGbVaJNYX11_11Vj42s-cls,1441
44
+ tunned_geobr/read_weighting_area.py,sha256=ggXpUnNtXBGsXdBjhwuaCWgKReQjb1wNkFFtB2bkuNk,2338
45
+ tunned_geobr/utils.py,sha256=by8mOgQlpY6uXX2dtstTLdEgjMcI0lnLzQ8deA3DYus,8182
46
+ tunned_geobr-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: pdm-backend (2.4.3)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,4 @@
1
+ [console_scripts]
2
+
3
+ [gui_scripts]
4
+
@@ -0,0 +1,2 @@
1
+ YEAR: 2020
2
+ COPYRIGHT HOLDER: Institute for Appled Economic Research (Ipea)