ngiab-data-preprocess 4.0.5__tar.gz → 4.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/PKG-INFO +48 -14
  2. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/README.md +41 -8
  3. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/create_realization.py +23 -3
  4. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/gpkg_utils.py +19 -1
  5. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/subset.py +8 -3
  6. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_cli/__main__.py +9 -2
  7. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_preprocess.egg-info/PKG-INFO +48 -14
  8. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_preprocess.egg-info/requires.txt +5 -4
  9. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/pyproject.toml +8 -7
  10. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/.github/workflows/build_only.yml +0 -0
  11. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/.github/workflows/publish.yml +0 -0
  12. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/.gitignore +0 -0
  13. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/LICENSE +0 -0
  14. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/map.html +0 -0
  15. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/dataset_utils.py +0 -0
  16. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/datasets.py +0 -0
  17. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/file_paths.py +0 -0
  18. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/forcings.py +0 -0
  19. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/graph_utils.py +0 -0
  20. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_processing/s3fs_utils.py +0 -0
  21. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/cfe-nowpm-realization-template.json +0 -0
  22. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/cfe-template.ini +0 -0
  23. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/em-catchment-template.yml +0 -0
  24. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/em-config.yml +0 -0
  25. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/em-realization-template.json +0 -0
  26. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/forcing_template.nc +0 -0
  27. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/ngen-routing-template.yaml +0 -0
  28. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/noah-owp-modular-init.namelist.input +0 -0
  29. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/source_validation.py +0 -0
  30. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/template.sql +0 -0
  31. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/data_sources/triggers.sql +0 -0
  32. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/__init__.py +0 -0
  33. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/__main__.py +0 -0
  34. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/css/console.css +0 -0
  35. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/css/main.css +0 -0
  36. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/css/toggle.css +0 -0
  37. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/js/console.js +0 -0
  38. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/js/data_processing.js +0 -0
  39. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/js/main.js +0 -0
  40. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/resources/dark-style.json +0 -0
  41. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/resources/light-style.json +0 -0
  42. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/resources/loading.gif +0 -0
  43. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/static/resources/screenshot.jpg +0 -0
  44. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/templates/index.html +0 -0
  45. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/map_app/views.py +0 -0
  46. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_cli/arguments.py +0 -0
  47. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_cli/custom_logging.py +0 -0
  48. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_cli/forcing_cli.py +0 -0
  49. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_preprocess.egg-info/SOURCES.txt +0 -0
  50. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_preprocess.egg-info/dependency_links.txt +0 -0
  51. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_preprocess.egg-info/entry_points.txt +0 -0
  52. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/modules/ngiab_data_preprocess.egg-info/top_level.txt +0 -0
  53. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/output/.gitkeep +0 -0
  54. {ngiab_data_preprocess-4.0.5 → ngiab_data_preprocess-4.1.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngiab_data_preprocess
3
- Version: 4.0.5
3
+ Version: 4.1.1
4
4
  Summary: Graphical Tools for creating Next Gen Water model input data.
5
5
  Author-email: Josh Cunningham <jcunningham8@ua.edu>
6
6
  Project-URL: Homepage, https://github.com/CIROH-UA/NGIAB_data_preprocess
@@ -8,11 +8,11 @@ Project-URL: Issues, https://github.com/CIROH-UA/NGIAB_data_preprocess/issues
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Operating System :: OS Independent
11
- Requires-Python: <3.13,>=3.10
11
+ Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: pyogrio==0.7.2
15
- Requires-Dist: pyproj==3.6.1
14
+ Requires-Dist: pyogrio>=0.7.2
15
+ Requires-Dist: pyproj>=3.6.1
16
16
  Requires-Dist: Flask==3.0.2
17
17
  Requires-Dist: geopandas>=1.0.0
18
18
  Requires-Dist: requests==2.32.2
@@ -20,19 +20,20 @@ Requires-Dist: igraph==0.11.4
20
20
  Requires-Dist: s3fs==2024.3.1
21
21
  Requires-Dist: xarray==2024.2.0
22
22
  Requires-Dist: zarr==2.17.1
23
- Requires-Dist: netCDF4==1.6.5
23
+ Requires-Dist: netCDF4>=1.6.5
24
24
  Requires-Dist: dask==2024.4.1
25
25
  Requires-Dist: dask[distributed]==2024.4.1
26
26
  Requires-Dist: black==24.3.0
27
27
  Requires-Dist: isort==5.13.2
28
28
  Requires-Dist: h5netcdf==1.3.0
29
29
  Requires-Dist: exactextract==0.2.0
30
- Requires-Dist: numpy==1.26.4
30
+ Requires-Dist: numpy>=1.26.4
31
31
  Requires-Dist: tqdm==4.66.4
32
32
  Requires-Dist: rich==13.7.1
33
33
  Requires-Dist: colorama==0.4.6
34
34
  Requires-Dist: bokeh==3.5.1
35
35
  Requires-Dist: boto3
36
+ Requires-Dist: numcodecs<0.16.0
36
37
  Provides-Extra: eval
37
38
  Requires-Dist: ngiab_eval; extra == "eval"
38
39
  Provides-Extra: plot
@@ -85,6 +86,41 @@ For automatic interactive visualisation, please run [NGIAB](https://github.com/C
85
86
  * This tool is officially supported on macOS or Ubuntu (tested on 22.04 & 24.04). To use it on Windows, please install [WSL](https://learn.microsoft.com/en-us/windows/wsl/install).
86
87
 
87
88
  ## Installation and Running
89
+ It is highly recommended to use [Astral UV](https://docs.astral.sh/uv/) to install and run this tool. It works similarly to pip and conda, and I would also recommend you use it for other python projects as it is so useful.
90
+
91
+ ```bash
92
+ # Install UV
93
+ curl -LsSf https://astral.sh/uv/install.sh | sh
94
+ # It can be installed via pip if that fails
95
+ # pip install uv
96
+
97
+ # Create a virtual environment in the current directory
98
+ uv venv
99
+
100
+ # Install the tool in the virtual environment
101
+ uv pip install ngiab_data_preprocess
102
+
103
+ # To run the cli
104
+ uv run cli --help
105
+
106
+ # To run the map
107
+ uv run map_app
108
+ ```
109
+
110
+ UV automatically detects any virtual environments in the current directory and will use them when you use `uv run`.
111
+
112
+ ### Running without install
113
+ This package supports pipx and uvx which means you can run the tool without installing it. No virtual environment needed, just UV.
114
+ ```bash
115
+ # run this from anywhere
116
+ uvx --from ngiab_data_preprocess cli --help
117
+ # for the map
118
+ uvx --from ngiab_data_preprocess map_app
119
+ ```
120
+
121
+ ## For legacy pip installation
122
+ <details>
123
+ <summary>Click here to expand</summary>
88
124
 
89
125
  ```bash
90
126
  # If you're installing this on jupyterhub / 2i2c you HAVE TO DEACTIVATE THE CONDA ENV
@@ -102,8 +138,7 @@ pip install 'ngiab_data_preprocess'
102
138
  python -m map_app
103
139
  # CLI instructions at the bottom of the README
104
140
  ```
105
-
106
- The first time you run this command, it will download the hydrofabric from Lynker Spatial. If you already have it, place `conus_nextgen.gpkg` into `~/.ngiab/hydrofabric/v2.2/`.
141
+ </details>
107
142
 
108
143
  ## Development Installation
109
144
 
@@ -117,24 +152,23 @@ To install and run the tool, follow these steps:
117
152
  git clone https://github.com/CIROH-UA/NGIAB_data_preprocess
118
153
  cd NGIAB_data_preprocess
119
154
  ```
120
- 2. Create a virtual environment and activate it:
155
+ 2. Create a virtual environment:
121
156
  ```bash
122
- python3 -m venv env
123
- source env/bin/activate
157
+ uv venv
124
158
  ```
125
159
  3. Install the tool:
126
160
  ```bash
127
- pip install -e .
161
+ uv pip install -e .
128
162
  ```
129
163
  4. Run the map app:
130
164
  ```bash
131
- python -m map_app
165
+ uv run map_app
132
166
  ```
133
167
  </details>
134
168
 
135
169
  ## Usage
136
170
 
137
- Running the command `python -m map_app` will open the app in a new browser tab.
171
+ Running the command `uv run map_app` will open the app in a new browser tab.
138
172
 
139
173
  To use the tool:
140
174
  1. Select the catchment you're interested in on the map.
@@ -44,6 +44,41 @@ For automatic interactive visualisation, please run [NGIAB](https://github.com/C
44
44
  * This tool is officially supported on macOS or Ubuntu (tested on 22.04 & 24.04). To use it on Windows, please install [WSL](https://learn.microsoft.com/en-us/windows/wsl/install).
45
45
 
46
46
  ## Installation and Running
47
+ It is highly recommended to use [Astral UV](https://docs.astral.sh/uv/) to install and run this tool. It works similarly to pip and conda, and I would also recommend you use it for other python projects as it is so useful.
48
+
49
+ ```bash
50
+ # Install UV
51
+ curl -LsSf https://astral.sh/uv/install.sh | sh
52
+ # It can be installed via pip if that fails
53
+ # pip install uv
54
+
55
+ # Create a virtual environment in the current directory
56
+ uv venv
57
+
58
+ # Install the tool in the virtual environment
59
+ uv pip install ngiab_data_preprocess
60
+
61
+ # To run the cli
62
+ uv run cli --help
63
+
64
+ # To run the map
65
+ uv run map_app
66
+ ```
67
+
68
+ UV automatically detects any virtual environments in the current directory and will use them when you use `uv run`.
69
+
70
+ ### Running without install
71
+ This package supports pipx and uvx which means you can run the tool without installing it. No virtual environment needed, just UV.
72
+ ```bash
73
+ # run this from anywhere
74
+ uvx --from ngiab_data_preprocess cli --help
75
+ # for the map
76
+ uvx --from ngiab_data_preprocess map_app
77
+ ```
78
+
79
+ ## For legacy pip installation
80
+ <details>
81
+ <summary>Click here to expand</summary>
47
82
 
48
83
  ```bash
49
84
  # If you're installing this on jupyterhub / 2i2c you HAVE TO DEACTIVATE THE CONDA ENV
@@ -61,8 +96,7 @@ pip install 'ngiab_data_preprocess'
61
96
  python -m map_app
62
97
  # CLI instructions at the bottom of the README
63
98
  ```
64
-
65
- The first time you run this command, it will download the hydrofabric from Lynker Spatial. If you already have it, place `conus_nextgen.gpkg` into `~/.ngiab/hydrofabric/v2.2/`.
99
+ </details>
66
100
 
67
101
  ## Development Installation
68
102
 
@@ -76,24 +110,23 @@ To install and run the tool, follow these steps:
76
110
  git clone https://github.com/CIROH-UA/NGIAB_data_preprocess
77
111
  cd NGIAB_data_preprocess
78
112
  ```
79
- 2. Create a virtual environment and activate it:
113
+ 2. Create a virtual environment:
80
114
  ```bash
81
- python3 -m venv env
82
- source env/bin/activate
115
+ uv venv
83
116
  ```
84
117
  3. Install the tool:
85
118
  ```bash
86
- pip install -e .
119
+ uv pip install -e .
87
120
  ```
88
121
  4. Run the map app:
89
122
  ```bash
90
- python -m map_app
123
+ uv run map_app
91
124
  ```
92
125
  </details>
93
126
 
94
127
  ## Usage
95
128
 
96
- Running the command `python -m map_app` will open the app in a new browser tab.
129
+ Running the command `uv run map_app` will open the app in a new browser tab.
97
130
 
98
131
  To use the tool:
99
132
  1. Select the catchment you're interested in on the map.
@@ -4,6 +4,7 @@ import sqlite3
4
4
  from datetime import datetime
5
5
  from pathlib import Path
6
6
  import shutil
7
+ import requests
7
8
 
8
9
  import pandas
9
10
  import s3fs
@@ -309,18 +310,37 @@ def create_em_realization(cat_id: str, start_time: datetime, end_time: datetime)
309
310
  create_partitions(paths)
310
311
 
311
312
 
312
- def create_realization(cat_id: str, start_time: datetime, end_time: datetime, use_nwm_gw: bool = False):
313
+ def create_realization(
314
+ cat_id: str,
315
+ start_time: datetime,
316
+ end_time: datetime,
317
+ use_nwm_gw: bool = False,
318
+ gage_id: str = None,
319
+ ):
313
320
  paths = file_paths(cat_id)
314
321
 
315
- # get approximate groundwater levels from nwm output
316
322
  template_path = paths.template_cfe_nowpm_realization_config
317
-
323
+
324
+ if gage_id is not None:
325
+ # try and download s3:communityhydrofabric/hydrofabrics/community/gage_parameters/gage_id
326
+ # if it doesn't exist, use the default
327
+ try:
328
+ url = f"https://communityhydrofabric.s3.us-east-1.amazonaws.com/hydrofabrics/community/gage_parameters/{gage_id}.json"
329
+
330
+ new_template = requests.get(url).json()
331
+ template_path = paths.config_dir / "calibrated_params.json"
332
+ with open(template_path, "w") as f:
333
+ json.dump(new_template, f)
334
+ except Exception as e:
335
+ logger.warning(f"Failed to download gage parameters")
336
+
318
337
  conf_df = get_model_attributes(paths.geopackage_path)
319
338
 
320
339
  if use_nwm_gw:
321
340
  gw_levels = get_approximate_gw_storage(paths, start_time)
322
341
  else:
323
342
  gw_levels = dict()
343
+
324
344
  make_cfe_config(conf_df, paths, gw_levels)
325
345
 
326
346
  make_noahowp_config(paths.config_dir, conf_df, start_time, end_time)
@@ -317,15 +317,33 @@ def subset_table_by_vpu(table: str, vpu: str, hydrofabric: Path, subset_gpkg_nam
317
317
  sql_query = f"SELECT * FROM '{table}' WHERE vpuid IN ({','.join(vpus)})"
318
318
  contents = source_db.execute(sql_query).fetchall()
319
319
 
320
+ if table == "network":
321
+ # Look for the network entry that has a toid not in the flowpath or nexus tables
322
+ network_toids = [x[2] for x in contents]
323
+ print(f"Network toids: {len(network_toids)}")
324
+ sql = "SELECT id FROM flowpaths"
325
+ flowpath_ids = [x[0] for x in dest_db.execute(sql).fetchall()]
326
+ print(f"Flowpath ids: {len(flowpath_ids)}")
327
+ sql = "SELECT id FROM nexus"
328
+ nexus_ids = [x[0] for x in dest_db.execute(sql).fetchall()]
329
+ print(f"Nexus ids: {len(nexus_ids)}")
330
+ bad_ids = set(network_toids) - set(flowpath_ids + nexus_ids)
331
+ print(bad_ids)
332
+ print(f"Removing {len(bad_ids)} network entries that are not in flowpaths or nexuses")
333
+ # id column is second after fid
334
+ contents = [x for x in contents if x[1] not in bad_ids]
335
+
320
336
  insert_data(dest_db, table, contents)
321
337
 
338
+
322
339
  if table in get_feature_tables(file_paths.conus_hydrofabric):
323
340
  fids = [str(x[0]) for x in contents]
324
341
  copy_rTree_tables(table, fids, source_db, dest_db)
325
342
 
326
343
  dest_db.commit()
327
344
  source_db.close()
328
- dest_db.close()
345
+ dest_db.close()
346
+
329
347
 
330
348
  def subset_table(table: str, ids: List[str], hydrofabric: Path, subset_gpkg_name: Path) -> None:
331
349
  """
@@ -22,13 +22,16 @@ subset_tables = [
22
22
  "flowpath-attributes-ml",
23
23
  "flowpaths",
24
24
  "hydrolocations",
25
- "network",
26
25
  "nexus",
27
26
  "pois", # requires flowpaths
28
27
  "lakes", # requires pois
28
+ "network",
29
29
  ]
30
30
 
31
- def create_subset_gpkg(ids: Union[List[str],str], hydrofabric: Path, output_gpkg_path: Path, is_vpu: bool = False) -> Path:
31
+
32
+ def create_subset_gpkg(
33
+ ids: Union[List[str], str], hydrofabric: Path, output_gpkg_path: Path, is_vpu: bool = False
34
+ ) -> Path:
32
35
  # ids is a list of nexus and wb ids, or a single vpu id
33
36
  if not isinstance(ids, list):
34
37
  ids = [ids]
@@ -42,12 +45,13 @@ def create_subset_gpkg(ids: Union[List[str],str], hydrofabric: Path, output_gpkg
42
45
  for table in subset_tables:
43
46
  if is_vpu:
44
47
  subset_table_by_vpu(table, ids[0], hydrofabric, output_gpkg_path)
45
- else:
48
+ else:
46
49
  subset_table(table, ids, hydrofabric, output_gpkg_path)
47
50
 
48
51
  add_triggers_to_gpkg(output_gpkg_path)
49
52
  update_geopackage_metadata(output_gpkg_path)
50
53
 
54
+
51
55
  def subset_vpu(vpu_id: str, output_gpkg_path: Path, hydrofabric: Path = file_paths.conus_hydrofabric):
52
56
 
53
57
  if output_gpkg_path.exists():
@@ -77,6 +81,7 @@ def subset(
77
81
  logger.info(f"Subset complete for {len(upstream_ids)} features (catchments + nexuses)")
78
82
  logger.debug(f"Subset complete for {upstream_ids} catchments")
79
83
 
84
+
80
85
  def move_files_to_config_dir(subset_output_dir: str) -> None:
81
86
  config_dir = subset_output_dir / "config"
82
87
  config_dir.mkdir(parents=True, exist_ok=True)
@@ -169,13 +169,20 @@ def main() -> None:
169
169
 
170
170
  if args.realization:
171
171
  logging.info(f"Creating realization from {args.start_date} to {args.end_date}...")
172
+ gage_id = None
173
+ if args.gage:
174
+ gage_id = args.input_feature
172
175
  if args.empirical_model:
173
176
  create_em_realization(
174
177
  output_folder, start_time=args.start_date, end_time=args.end_date
175
178
  )
176
179
  else:
177
180
  create_realization(
178
- output_folder, start_time=args.start_date, end_time=args.end_date, use_nwm_gw=args.nwm_gw
181
+ output_folder,
182
+ start_time=args.start_date,
183
+ end_time=args.end_date,
184
+ use_nwm_gw=args.nwm_gw,
185
+ gage_id=gage_id,
179
186
  )
180
187
  logging.info("Realization creation complete.")
181
188
 
@@ -198,7 +205,7 @@ def main() -> None:
198
205
  except:
199
206
  logging.error("Docker is not running, please start Docker and try again.")
200
207
  try:
201
- #command = f'docker run --rm -it -v "{str(paths.subset_dir)}:/ngen/ngen/data" joshcu/ngiab /ngen/ngen/data/ auto {num_partitions} local'
208
+ # command = f'docker run --rm -it -v "{str(paths.subset_dir)}:/ngen/ngen/data" joshcu/ngiab /ngen/ngen/data/ auto {num_partitions} local'
202
209
  command = f'docker run --rm -it -v "{str(paths.subset_dir)}:/ngen/ngen/data" awiciroh/ciroh-ngen-image:latest /ngen/ngen/data/ auto {num_partitions} local'
203
210
  subprocess.run(command, shell=True)
204
211
  logging.info("Next Gen run complete.")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngiab_data_preprocess
3
- Version: 4.0.5
3
+ Version: 4.1.1
4
4
  Summary: Graphical Tools for creating Next Gen Water model input data.
5
5
  Author-email: Josh Cunningham <jcunningham8@ua.edu>
6
6
  Project-URL: Homepage, https://github.com/CIROH-UA/NGIAB_data_preprocess
@@ -8,11 +8,11 @@ Project-URL: Issues, https://github.com/CIROH-UA/NGIAB_data_preprocess/issues
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Operating System :: OS Independent
11
- Requires-Python: <3.13,>=3.10
11
+ Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: pyogrio==0.7.2
15
- Requires-Dist: pyproj==3.6.1
14
+ Requires-Dist: pyogrio>=0.7.2
15
+ Requires-Dist: pyproj>=3.6.1
16
16
  Requires-Dist: Flask==3.0.2
17
17
  Requires-Dist: geopandas>=1.0.0
18
18
  Requires-Dist: requests==2.32.2
@@ -20,19 +20,20 @@ Requires-Dist: igraph==0.11.4
20
20
  Requires-Dist: s3fs==2024.3.1
21
21
  Requires-Dist: xarray==2024.2.0
22
22
  Requires-Dist: zarr==2.17.1
23
- Requires-Dist: netCDF4==1.6.5
23
+ Requires-Dist: netCDF4>=1.6.5
24
24
  Requires-Dist: dask==2024.4.1
25
25
  Requires-Dist: dask[distributed]==2024.4.1
26
26
  Requires-Dist: black==24.3.0
27
27
  Requires-Dist: isort==5.13.2
28
28
  Requires-Dist: h5netcdf==1.3.0
29
29
  Requires-Dist: exactextract==0.2.0
30
- Requires-Dist: numpy==1.26.4
30
+ Requires-Dist: numpy>=1.26.4
31
31
  Requires-Dist: tqdm==4.66.4
32
32
  Requires-Dist: rich==13.7.1
33
33
  Requires-Dist: colorama==0.4.6
34
34
  Requires-Dist: bokeh==3.5.1
35
35
  Requires-Dist: boto3
36
+ Requires-Dist: numcodecs<0.16.0
36
37
  Provides-Extra: eval
37
38
  Requires-Dist: ngiab_eval; extra == "eval"
38
39
  Provides-Extra: plot
@@ -85,6 +86,41 @@ For automatic interactive visualisation, please run [NGIAB](https://github.com/C
85
86
  * This tool is officially supported on macOS or Ubuntu (tested on 22.04 & 24.04). To use it on Windows, please install [WSL](https://learn.microsoft.com/en-us/windows/wsl/install).
86
87
 
87
88
  ## Installation and Running
89
+ It is highly recommended to use [Astral UV](https://docs.astral.sh/uv/) to install and run this tool. It works similarly to pip and conda, and I would also recommend you use it for other python projects as it is so useful.
90
+
91
+ ```bash
92
+ # Install UV
93
+ curl -LsSf https://astral.sh/uv/install.sh | sh
94
+ # It can be installed via pip if that fails
95
+ # pip install uv
96
+
97
+ # Create a virtual environment in the current directory
98
+ uv venv
99
+
100
+ # Install the tool in the virtual environment
101
+ uv pip install ngiab_data_preprocess
102
+
103
+ # To run the cli
104
+ uv run cli --help
105
+
106
+ # To run the map
107
+ uv run map_app
108
+ ```
109
+
110
+ UV automatically detects any virtual environments in the current directory and will use them when you use `uv run`.
111
+
112
+ ### Running without install
113
+ This package supports pipx and uvx which means you can run the tool without installing it. No virtual environment needed, just UV.
114
+ ```bash
115
+ # run this from anywhere
116
+ uvx --from ngiab_data_preprocess cli --help
117
+ # for the map
118
+ uvx --from ngiab_data_preprocess map_app
119
+ ```
120
+
121
+ ## For legacy pip installation
122
+ <details>
123
+ <summary>Click here to expand</summary>
88
124
 
89
125
  ```bash
90
126
  # If you're installing this on jupyterhub / 2i2c you HAVE TO DEACTIVATE THE CONDA ENV
@@ -102,8 +138,7 @@ pip install 'ngiab_data_preprocess'
102
138
  python -m map_app
103
139
  # CLI instructions at the bottom of the README
104
140
  ```
105
-
106
- The first time you run this command, it will download the hydrofabric from Lynker Spatial. If you already have it, place `conus_nextgen.gpkg` into `~/.ngiab/hydrofabric/v2.2/`.
141
+ </details>
107
142
 
108
143
  ## Development Installation
109
144
 
@@ -117,24 +152,23 @@ To install and run the tool, follow these steps:
117
152
  git clone https://github.com/CIROH-UA/NGIAB_data_preprocess
118
153
  cd NGIAB_data_preprocess
119
154
  ```
120
- 2. Create a virtual environment and activate it:
155
+ 2. Create a virtual environment:
121
156
  ```bash
122
- python3 -m venv env
123
- source env/bin/activate
157
+ uv venv
124
158
  ```
125
159
  3. Install the tool:
126
160
  ```bash
127
- pip install -e .
161
+ uv pip install -e .
128
162
  ```
129
163
  4. Run the map app:
130
164
  ```bash
131
- python -m map_app
165
+ uv run map_app
132
166
  ```
133
167
  </details>
134
168
 
135
169
  ## Usage
136
170
 
137
- Running the command `python -m map_app` will open the app in a new browser tab.
171
+ Running the command `uv run map_app` will open the app in a new browser tab.
138
172
 
139
173
  To use the tool:
140
174
  1. Select the catchment you're interested in on the map.
@@ -1,5 +1,5 @@
1
- pyogrio==0.7.2
2
- pyproj==3.6.1
1
+ pyogrio>=0.7.2
2
+ pyproj>=3.6.1
3
3
  Flask==3.0.2
4
4
  geopandas>=1.0.0
5
5
  requests==2.32.2
@@ -7,19 +7,20 @@ igraph==0.11.4
7
7
  s3fs==2024.3.1
8
8
  xarray==2024.2.0
9
9
  zarr==2.17.1
10
- netCDF4==1.6.5
10
+ netCDF4>=1.6.5
11
11
  dask==2024.4.1
12
12
  dask[distributed]==2024.4.1
13
13
  black==24.3.0
14
14
  isort==5.13.2
15
15
  h5netcdf==1.3.0
16
16
  exactextract==0.2.0
17
- numpy==1.26.4
17
+ numpy>=1.26.4
18
18
  tqdm==4.66.4
19
19
  rich==13.7.1
20
20
  colorama==0.4.6
21
21
  bokeh==3.5.1
22
22
  boto3
23
+ numcodecs<0.16.0
23
24
 
24
25
  [eval]
25
26
  ngiab_eval
@@ -12,19 +12,19 @@ exclude = ["tests*"]
12
12
 
13
13
  [project]
14
14
  name = "ngiab_data_preprocess"
15
- version = "v4.0.5"
15
+ version = "v4.1.1"
16
16
  authors = [{ name = "Josh Cunningham", email = "jcunningham8@ua.edu" }]
17
17
  description = "Graphical Tools for creating Next Gen Water model input data."
18
18
  readme = "README.md"
19
- requires-python = ">=3.10,<3.13"
19
+ requires-python = ">=3.10"
20
20
  classifiers = [
21
21
  "Programming Language :: Python :: 3",
22
22
  "License :: OSI Approved :: MIT License",
23
23
  "Operating System :: OS Independent",
24
24
  ]
25
25
  dependencies = [
26
- "pyogrio==0.7.2",
27
- "pyproj==3.6.1",
26
+ "pyogrio>=0.7.2",
27
+ "pyproj>=3.6.1",
28
28
  "Flask==3.0.2",
29
29
  "geopandas>=1.0.0",
30
30
  "requests==2.32.2",
@@ -32,19 +32,20 @@ dependencies = [
32
32
  "s3fs==2024.3.1",
33
33
  "xarray==2024.2.0",
34
34
  "zarr==2.17.1",
35
- "netCDF4==1.6.5",
35
+ "netCDF4>=1.6.5",
36
36
  "dask==2024.4.1",
37
37
  "dask[distributed]==2024.4.1",
38
38
  "black==24.3.0",
39
39
  "isort==5.13.2",
40
40
  "h5netcdf==1.3.0",
41
41
  "exactextract==0.2.0",
42
- "numpy==1.26.4",
42
+ "numpy>=1.26.4",
43
43
  "tqdm==4.66.4",
44
44
  "rich==13.7.1",
45
45
  "colorama==0.4.6",
46
46
  "bokeh==3.5.1",
47
- "boto3"
47
+ "boto3",
48
+ "numcodecs<0.16.0"
48
49
  ]
49
50
 
50
51
  [project.optional-dependencies]