ngiab-data-preprocess 4.1.1__tar.gz → 4.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/PKG-INFO +1 -1
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/datasets.py +32 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/gpkg_utils.py +24 -16
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/graph_utils.py +8 -1
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/subset.py +10 -20
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/__main__.py +5 -2
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_preprocess.egg-info/PKG-INFO +1 -1
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/pyproject.toml +1 -1
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/.github/workflows/build_only.yml +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/.github/workflows/publish.yml +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/.gitignore +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/LICENSE +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/README.md +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/map.html +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/create_realization.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/dataset_utils.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/file_paths.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/forcings.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/s3fs_utils.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/cfe-nowpm-realization-template.json +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/cfe-template.ini +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/em-catchment-template.yml +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/em-config.yml +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/em-realization-template.json +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/forcing_template.nc +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/ngen-routing-template.yaml +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/noah-owp-modular-init.namelist.input +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/source_validation.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/template.sql +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/triggers.sql +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/__init__.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/__main__.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/css/console.css +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/css/main.css +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/css/toggle.css +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/js/console.js +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/js/data_processing.js +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/js/main.js +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/resources/dark-style.json +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/resources/light-style.json +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/resources/loading.gif +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/resources/screenshot.jpg +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/templates/index.html +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/views.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/arguments.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/custom_logging.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/forcing_cli.py +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_preprocess.egg-info/SOURCES.txt +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_preprocess.egg-info/dependency_links.txt +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_preprocess.egg-info/entry_points.txt +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_preprocess.egg-info/requires.txt +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_preprocess.egg-info/top_level.txt +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/output/.gitkeep +0 -0
- {ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ngiab_data_preprocess
|
|
3
|
-
Version: 4.
|
|
3
|
+
Version: 4.2.1
|
|
4
4
|
Summary: Graphical Tools for creating Next Gen Water model input data.
|
|
5
5
|
Author-email: Josh Cunningham <jcunningham8@ua.edu>
|
|
6
6
|
Project-URL: Homepage, https://github.com/CIROH-UA/NGIAB_data_preprocess
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/datasets.py
RENAMED
|
@@ -85,3 +85,35 @@ def load_aorc_zarr(start_year: int = None, end_year: int = None) -> xr.Dataset:
|
|
|
85
85
|
|
|
86
86
|
validate_dataset_format(dataset)
|
|
87
87
|
return dataset
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def load_swe_zarr() -> xr.Dataset:
|
|
91
|
+
"""Load the swe zarr dataset from S3."""
|
|
92
|
+
s3_urls = [
|
|
93
|
+
f"s3://noaa-nwm-retrospective-3-0-pds/CONUS/zarr/ldasout.zarr"
|
|
94
|
+
]
|
|
95
|
+
# default cache is readahead which is detrimental to performance in this case
|
|
96
|
+
fs = S3ParallelFileSystem(anon=True, default_cache_type="none") # default_block_size
|
|
97
|
+
s3_stores = [s3fs.S3Map(url, s3=fs) for url in s3_urls]
|
|
98
|
+
# the cache option here just holds accessed data in memory to prevent s3 being queried multiple times
|
|
99
|
+
# most of the data is read once and written to disk but some of the coordinate data is read multiple times
|
|
100
|
+
dataset = xr.open_mfdataset(s3_stores, parallel=True, engine="zarr", cache=True)
|
|
101
|
+
|
|
102
|
+
# set the crs attribute to conform with the format
|
|
103
|
+
esri_pe_string = dataset.crs.esri_pe_string
|
|
104
|
+
dataset = dataset.drop_vars(["crs"])
|
|
105
|
+
dataset.attrs["crs"] = esri_pe_string
|
|
106
|
+
# drop everything except SNEQV
|
|
107
|
+
vars_to_drop = list(dataset.data_vars)
|
|
108
|
+
vars_to_drop.remove('SNEQV')
|
|
109
|
+
dataset = dataset.drop_vars(vars_to_drop)
|
|
110
|
+
dataset.attrs["name"] = "v3_swe_zarr"
|
|
111
|
+
|
|
112
|
+
# rename the data vars to work with ngen
|
|
113
|
+
variables = {
|
|
114
|
+
"SNEQV": "swe"
|
|
115
|
+
}
|
|
116
|
+
dataset = dataset.rename_vars(variables)
|
|
117
|
+
|
|
118
|
+
validate_dataset_format(dataset)
|
|
119
|
+
return dataset
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/gpkg_utils.py
RENAMED
|
@@ -69,7 +69,7 @@ def verify_indices(gpkg: str = file_paths.conus_hydrofabric) -> None:
|
|
|
69
69
|
con.close()
|
|
70
70
|
|
|
71
71
|
|
|
72
|
-
def create_empty_gpkg(gpkg:
|
|
72
|
+
def create_empty_gpkg(gpkg: Path) -> None:
|
|
73
73
|
"""
|
|
74
74
|
Create an empty geopackage with the necessary tables and indices.
|
|
75
75
|
"""
|
|
@@ -80,7 +80,7 @@ def create_empty_gpkg(gpkg: str) -> None:
|
|
|
80
80
|
conn.executescript(sql_script)
|
|
81
81
|
|
|
82
82
|
|
|
83
|
-
def add_triggers_to_gpkg(gpkg:
|
|
83
|
+
def add_triggers_to_gpkg(gpkg: Path) -> None:
|
|
84
84
|
"""
|
|
85
85
|
Adds geopackage triggers required to maintain spatial index integrity
|
|
86
86
|
"""
|
|
@@ -256,7 +256,7 @@ def insert_data(con: sqlite3.Connection, table: str, contents: List[Tuple]) -> N
|
|
|
256
256
|
con.commit()
|
|
257
257
|
|
|
258
258
|
|
|
259
|
-
def update_geopackage_metadata(gpkg:
|
|
259
|
+
def update_geopackage_metadata(gpkg: Path) -> None:
|
|
260
260
|
"""
|
|
261
261
|
Update the contents of the gpkg_contents table in the specified geopackage.
|
|
262
262
|
"""
|
|
@@ -290,6 +290,7 @@ def update_geopackage_metadata(gpkg: str) -> None:
|
|
|
290
290
|
|
|
291
291
|
con.close()
|
|
292
292
|
|
|
293
|
+
|
|
293
294
|
def subset_table_by_vpu(table: str, vpu: str, hydrofabric: Path, subset_gpkg_name: Path) -> None:
|
|
294
295
|
"""
|
|
295
296
|
Subset the specified table from the hydrofabric database by vpuid and save it to the subset geopackage.
|
|
@@ -306,9 +307,9 @@ def subset_table_by_vpu(table: str, vpu: str, hydrofabric: Path, subset_gpkg_nam
|
|
|
306
307
|
dest_db = sqlite3.connect(subset_gpkg_name)
|
|
307
308
|
|
|
308
309
|
if vpu == "03":
|
|
309
|
-
vpus = ["03N","03S","03W"]
|
|
310
|
+
vpus = ["03N", "03S", "03W"]
|
|
310
311
|
elif vpu == "10":
|
|
311
|
-
vpus = ["10L","10U"]
|
|
312
|
+
vpus = ["10L", "10U"]
|
|
312
313
|
else:
|
|
313
314
|
vpus = [vpu]
|
|
314
315
|
|
|
@@ -318,10 +319,10 @@ def subset_table_by_vpu(table: str, vpu: str, hydrofabric: Path, subset_gpkg_nam
|
|
|
318
319
|
contents = source_db.execute(sql_query).fetchall()
|
|
319
320
|
|
|
320
321
|
if table == "network":
|
|
321
|
-
# Look for the network entry that has a toid not in the flowpath or nexus tables
|
|
322
|
+
# Look for the network entry that has a toid not in the flowpath or nexus tables
|
|
322
323
|
network_toids = [x[2] for x in contents]
|
|
323
324
|
print(f"Network toids: {len(network_toids)}")
|
|
324
|
-
sql = "SELECT id FROM flowpaths"
|
|
325
|
+
sql = "SELECT id FROM flowpaths"
|
|
325
326
|
flowpath_ids = [x[0] for x in dest_db.execute(sql).fetchall()]
|
|
326
327
|
print(f"Flowpath ids: {len(flowpath_ids)}")
|
|
327
328
|
sql = "SELECT id FROM nexus"
|
|
@@ -335,15 +336,14 @@ def subset_table_by_vpu(table: str, vpu: str, hydrofabric: Path, subset_gpkg_nam
|
|
|
335
336
|
|
|
336
337
|
insert_data(dest_db, table, contents)
|
|
337
338
|
|
|
338
|
-
|
|
339
339
|
if table in get_feature_tables(file_paths.conus_hydrofabric):
|
|
340
340
|
fids = [str(x[0]) for x in contents]
|
|
341
341
|
copy_rTree_tables(table, fids, source_db, dest_db)
|
|
342
342
|
|
|
343
343
|
dest_db.commit()
|
|
344
344
|
source_db.close()
|
|
345
|
-
dest_db.close()
|
|
346
|
-
|
|
345
|
+
dest_db.close()
|
|
346
|
+
|
|
347
347
|
|
|
348
348
|
def subset_table(table: str, ids: List[str], hydrofabric: Path, subset_gpkg_name: Path) -> None:
|
|
349
349
|
"""
|
|
@@ -359,7 +359,7 @@ def subset_table(table: str, ids: List[str], hydrofabric: Path, subset_gpkg_name
|
|
|
359
359
|
source_db = sqlite3.connect(f"file:{hydrofabric}?mode=ro", uri=True)
|
|
360
360
|
dest_db = sqlite3.connect(subset_gpkg_name)
|
|
361
361
|
|
|
362
|
-
table_keys = {"
|
|
362
|
+
table_keys = {"divide-attributes": "divide_id", "lakes": "poi_id"}
|
|
363
363
|
|
|
364
364
|
if table == "lakes":
|
|
365
365
|
# lakes subset we get from the pois table which was already subset by water body id
|
|
@@ -372,12 +372,20 @@ def subset_table(table: str, ids: List[str], hydrofabric: Path, subset_gpkg_name
|
|
|
372
372
|
sql_query = "SELECT divide_id FROM 'divides'"
|
|
373
373
|
contents = dest_db.execute(sql_query).fetchall()
|
|
374
374
|
ids = [str(x[0]) for x in contents]
|
|
375
|
+
|
|
376
|
+
if table == "nexus":
|
|
377
|
+
# add the nexuses in the toid column from the flowpaths table
|
|
378
|
+
sql_query = "SELECT toid FROM 'flowpaths'"
|
|
379
|
+
contents = dest_db.execute(sql_query).fetchall()
|
|
380
|
+
new_ids = [str(x[0]) for x in contents]
|
|
381
|
+
ids.extend(new_ids)
|
|
382
|
+
|
|
375
383
|
ids = [f"'{x}'" for x in ids]
|
|
376
384
|
key_name = "id"
|
|
377
385
|
if table in table_keys:
|
|
378
386
|
key_name = table_keys[table]
|
|
379
387
|
sql_query = f"SELECT * FROM '{table}' WHERE {key_name} IN ({','.join(ids)})"
|
|
380
|
-
contents = source_db.execute(sql_query).fetchall()
|
|
388
|
+
contents = source_db.execute(sql_query).fetchall()
|
|
381
389
|
|
|
382
390
|
insert_data(dest_db, table, contents)
|
|
383
391
|
|
|
@@ -409,6 +417,7 @@ def get_table_crs_short(gpkg: str, table: str) -> str:
|
|
|
409
417
|
crs = con.execute(sql_query).fetchone()[0]
|
|
410
418
|
return crs
|
|
411
419
|
|
|
420
|
+
|
|
412
421
|
def get_table_crs(gpkg: str, table: str) -> str:
|
|
413
422
|
"""
|
|
414
423
|
Get the CRS of the specified table in the specified geopackage.
|
|
@@ -429,16 +438,16 @@ def get_table_crs(gpkg: str, table: str) -> str:
|
|
|
429
438
|
|
|
430
439
|
def get_cat_from_gage_id(gage_id: str, gpkg: Path = file_paths.conus_hydrofabric) -> str:
|
|
431
440
|
"""
|
|
432
|
-
Get the
|
|
441
|
+
Get the catchment id associated with a gage id.
|
|
433
442
|
|
|
434
443
|
Args:
|
|
435
444
|
gage_id (str): The gage ID.
|
|
436
445
|
|
|
437
446
|
Returns:
|
|
438
|
-
str: The
|
|
447
|
+
str: The catchment id of the watershed containing the gage ID.
|
|
439
448
|
|
|
440
449
|
Raises:
|
|
441
|
-
IndexError: If
|
|
450
|
+
IndexError: If catchment is found for the given gage ID.
|
|
442
451
|
|
|
443
452
|
"""
|
|
444
453
|
gage_id = "".join([x for x in gage_id if x.isdigit()])
|
|
@@ -510,7 +519,6 @@ def get_available_tables(gpkg: Path) -> List[str]:
|
|
|
510
519
|
|
|
511
520
|
|
|
512
521
|
def get_cat_to_nhd_feature_id(gpkg: Path = file_paths.conus_hydrofabric) -> dict:
|
|
513
|
-
|
|
514
522
|
available_tables = get_available_tables(gpkg)
|
|
515
523
|
possible_tables = ["flowpath_edge_list", "network"]
|
|
516
524
|
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/graph_utils.py
RENAMED
|
@@ -165,6 +165,10 @@ def get_upstream_cats(names: Union[str, List[str]]) -> Set[str]:
|
|
|
165
165
|
if name in parent_ids:
|
|
166
166
|
continue
|
|
167
167
|
try:
|
|
168
|
+
if "cat" in name:
|
|
169
|
+
node_index = graph.vs.find(cat=name).index
|
|
170
|
+
else:
|
|
171
|
+
node_index = graph.vs.find(name=name).index
|
|
168
172
|
node_index = graph.vs.find(cat=name).index
|
|
169
173
|
upstream_nodes = graph.subcomponent(node_index, mode="IN")
|
|
170
174
|
for node in upstream_nodes:
|
|
@@ -205,7 +209,10 @@ def get_upstream_ids(names: Union[str, List[str]], include_outlet: bool = True)
|
|
|
205
209
|
if name in parent_ids:
|
|
206
210
|
continue
|
|
207
211
|
try:
|
|
208
|
-
|
|
212
|
+
if "cat" in name:
|
|
213
|
+
node_index = graph.vs.find(cat=name).index
|
|
214
|
+
else:
|
|
215
|
+
node_index = graph.vs.find(name=name).index
|
|
209
216
|
upstream_nodes = graph.subcomponent(node_index, mode="IN")
|
|
210
217
|
for node in upstream_nodes:
|
|
211
218
|
parent_ids.add(graph.vs[node]["name"])
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/subset.py
RENAMED
|
@@ -22,7 +22,7 @@ subset_tables = [
|
|
|
22
22
|
"flowpath-attributes-ml",
|
|
23
23
|
"flowpaths",
|
|
24
24
|
"hydrolocations",
|
|
25
|
-
"nexus",
|
|
25
|
+
"nexus", # depends on flowpaths in some cases e.g. gage delineation
|
|
26
26
|
"pois", # requires flowpaths
|
|
27
27
|
"lakes", # requires pois
|
|
28
28
|
"network",
|
|
@@ -31,7 +31,7 @@ subset_tables = [
|
|
|
31
31
|
|
|
32
32
|
def create_subset_gpkg(
|
|
33
33
|
ids: Union[List[str], str], hydrofabric: Path, output_gpkg_path: Path, is_vpu: bool = False
|
|
34
|
-
)
|
|
34
|
+
):
|
|
35
35
|
# ids is a list of nexus and wb ids, or a single vpu id
|
|
36
36
|
if not isinstance(ids, list):
|
|
37
37
|
ids = [ids]
|
|
@@ -52,8 +52,9 @@ def create_subset_gpkg(
|
|
|
52
52
|
update_geopackage_metadata(output_gpkg_path)
|
|
53
53
|
|
|
54
54
|
|
|
55
|
-
def subset_vpu(
|
|
56
|
-
|
|
55
|
+
def subset_vpu(
|
|
56
|
+
vpu_id: str, output_gpkg_path: Path, hydrofabric: Path = file_paths.conus_hydrofabric
|
|
57
|
+
):
|
|
57
58
|
if output_gpkg_path.exists():
|
|
58
59
|
os.remove(output_gpkg_path)
|
|
59
60
|
|
|
@@ -65,10 +66,11 @@ def subset_vpu(vpu_id: str, output_gpkg_path: Path, hydrofabric: Path = file_pat
|
|
|
65
66
|
def subset(
|
|
66
67
|
cat_ids: List[str],
|
|
67
68
|
hydrofabric: Path = file_paths.conus_hydrofabric,
|
|
68
|
-
output_gpkg_path: Path =
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
69
|
+
output_gpkg_path: Path = Path(),
|
|
70
|
+
include_outlet: bool = True,
|
|
71
|
+
):
|
|
72
|
+
print(cat_ids)
|
|
73
|
+
upstream_ids = list(get_upstream_ids(cat_ids, include_outlet))
|
|
72
74
|
|
|
73
75
|
if not output_gpkg_path:
|
|
74
76
|
# if the name isn't provided, use the first upstream id
|
|
@@ -80,15 +82,3 @@ def subset(
|
|
|
80
82
|
create_subset_gpkg(upstream_ids, hydrofabric, output_gpkg_path)
|
|
81
83
|
logger.info(f"Subset complete for {len(upstream_ids)} features (catchments + nexuses)")
|
|
82
84
|
logger.debug(f"Subset complete for {upstream_ids} catchments")
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def move_files_to_config_dir(subset_output_dir: str) -> None:
|
|
86
|
-
config_dir = subset_output_dir / "config"
|
|
87
|
-
config_dir.mkdir(parents=True, exist_ok=True)
|
|
88
|
-
|
|
89
|
-
files = [x for x in subset_output_dir.iterdir()]
|
|
90
|
-
for file in files:
|
|
91
|
-
if file.suffix in [".csv", ".json", ".geojson"]:
|
|
92
|
-
if "partitions" in file.name:
|
|
93
|
-
continue
|
|
94
|
-
os.system(f"mv {file} {config_dir}")
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/__main__.py
RENAMED
|
@@ -147,7 +147,10 @@ def main() -> None:
|
|
|
147
147
|
logging.info("Subsetting complete.")
|
|
148
148
|
else:
|
|
149
149
|
logging.info(f"Subsetting hydrofabric")
|
|
150
|
-
|
|
150
|
+
include_outlet = True
|
|
151
|
+
if args.gage:
|
|
152
|
+
include_outlet = False
|
|
153
|
+
subset(feature_to_subset, output_gpkg_path=paths.geopackage_path, include_outlet=include_outlet)
|
|
151
154
|
logging.info("Subsetting complete.")
|
|
152
155
|
|
|
153
156
|
if args.forcings:
|
|
@@ -237,7 +240,7 @@ def main() -> None:
|
|
|
237
240
|
|
|
238
241
|
if args.vis:
|
|
239
242
|
try:
|
|
240
|
-
command = f'docker run --rm -it -p 3000:3000 -v "{str(paths.subset_dir)}:/ngen/ngen/data/" joshcu/ngiab_grafana:v0.2.
|
|
243
|
+
command = f'docker run --rm -it -p 3000:3000 -v "{str(paths.subset_dir)}:/ngen/ngen/data/" joshcu/ngiab_grafana:v0.2.1'
|
|
241
244
|
subprocess.run(command, shell=True)
|
|
242
245
|
except:
|
|
243
246
|
logging.error("Failed to launch docker container.")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ngiab_data_preprocess
|
|
3
|
-
Version: 4.
|
|
3
|
+
Version: 4.2.1
|
|
4
4
|
Summary: Graphical Tools for creating Next Gen Water model input data.
|
|
5
5
|
Author-email: Josh Cunningham <jcunningham8@ua.edu>
|
|
6
6
|
Project-URL: Homepage, https://github.com/CIROH-UA/NGIAB_data_preprocess
|
|
@@ -12,7 +12,7 @@ exclude = ["tests*"]
|
|
|
12
12
|
|
|
13
13
|
[project]
|
|
14
14
|
name = "ngiab_data_preprocess"
|
|
15
|
-
version = "v4.
|
|
15
|
+
version = "v4.2.1"
|
|
16
16
|
authors = [{ name = "Josh Cunningham", email = "jcunningham8@ua.edu" }]
|
|
17
17
|
description = "Graphical Tools for creating Next Gen Water model input data."
|
|
18
18
|
readme = "README.md"
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/.github/workflows/build_only.yml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/dataset_utils.py
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/file_paths.py
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/forcings.py
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_processing/s3fs_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/cfe-template.ini
RENAMED
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/em-config.yml
RENAMED
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/forcing_template.nc
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/template.sql
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/data_sources/triggers.sql
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/css/console.css
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/css/main.css
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/css/toggle.css
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/js/console.js
RENAMED
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/static/js/main.js
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/map_app/templates/index.html
RENAMED
|
File without changes
|
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/arguments.py
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/custom_logging.py
RENAMED
|
File without changes
|
{ngiab_data_preprocess-4.1.1 → ngiab_data_preprocess-4.2.1}/modules/ngiab_data_cli/forcing_cli.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|