ngiab-data-preprocess 4.6.7__py3-none-any.whl → 4.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,6 +20,7 @@ from data_processing.gpkg_utils import (
20
20
  get_cat_to_nhd_feature_id,
21
21
  get_table_crs_short,
22
22
  )
23
+ from data_sources.source_validation import download_dhbv_attributes
23
24
  from pyproj import Transformer
24
25
  from tqdm.rich import tqdm
25
26
 
@@ -112,19 +113,20 @@ def make_noahowp_config(
112
113
  )
113
114
 
114
115
 
115
- def get_model_attributes(hydrofabric: Path) -> pandas.DataFrame:
116
+ def get_model_attributes(hydrofabric: Path, layer: str = "divides") -> pandas.DataFrame:
116
117
  with sqlite3.connect(hydrofabric) as conn:
117
118
  conf_df = pandas.read_sql_query(
118
119
  """
119
120
  SELECT
120
121
  d.areasqkm,
122
+ d.lengthkm,
121
123
  da.*
122
124
  FROM divides AS d
123
125
  JOIN 'divide-attributes' AS da ON d.divide_id = da.divide_id
124
126
  """,
125
127
  conn,
126
128
  )
127
- source_crs = get_table_crs_short(hydrofabric, "divides")
129
+ source_crs = get_table_crs_short(hydrofabric, layer)
128
130
  transformer = Transformer.from_crs(source_crs, "EPSG:4326", always_xy=True)
129
131
  lon, lat = transformer.transform(conf_df["centroid_x"].values, conf_df["centroid_y"].values)
130
132
  conf_df["longitude"] = lon
@@ -172,6 +174,44 @@ def make_lstm_config(
172
174
  )
173
175
 
174
176
 
177
+ def make_dhbv2_config(
178
+ hydrofabric: Path,
179
+ output_dir: Path,
180
+ start_time: datetime,
181
+ end_time: datetime,
182
+ template_path: Path = FilePaths.template_dhbv2_config,
183
+ ):
184
+ divide_conf_df = get_model_attributes(hydrofabric)
185
+ divide_ids = divide_conf_df["divide_id"].to_list()
186
+
187
+ download_dhbv_attributes()
188
+ dhbv_atts = pandas.read_parquet(FilePaths.dhbv_attributes)
189
+ atts_df = dhbv_atts.loc[dhbv_atts["divide_id"].isin(divide_ids)]
190
+
191
+ cat_config_dir = output_dir / "cat_config" / "dhbv2"
192
+ if cat_config_dir.exists():
193
+ shutil.rmtree(cat_config_dir)
194
+ cat_config_dir.mkdir(parents=True, exist_ok=True)
195
+
196
+ with open(template_path, "r") as file:
197
+ template = file.read()
198
+
199
+ for _, row in atts_df.iterrows():
200
+ divide = row["divide_id"]
201
+ divide_conf_df_row = divide_conf_df.loc[divide_conf_df["divide_id"] == divide]
202
+
203
+ with open(cat_config_dir / f"{divide}.yml", "w") as file:
204
+ file.write(
205
+ template.format(
206
+ **row,
207
+ catchsize=divide_conf_df_row["areasqkm"].values[0],
208
+ lengthkm=divide_conf_df_row["lengthkm"].values[0],
209
+ start_time=start_time,
210
+ end_time=end_time,
211
+ )
212
+ )
213
+
214
+
175
215
  def configure_troute(
176
216
  cat_id: str, config_dir: Path, start_time: datetime, end_time: datetime
177
217
  ) -> None:
@@ -218,7 +258,10 @@ def configure_troute(
218
258
 
219
259
 
220
260
  def make_ngen_realization_json(
221
- config_dir: Path, template_path: Path, start_time: datetime, end_time: datetime
261
+ config_dir: Path,
262
+ template_path: Path,
263
+ start_time: datetime,
264
+ end_time: datetime,
222
265
  ) -> None:
223
266
  with open(template_path, "r") as file:
224
267
  realization = json.load(file)
@@ -252,7 +295,20 @@ def create_lstm_realization(
252
295
  (paths.config_dir / "python_lstm_real.json").rename(realization_path)
253
296
 
254
297
  make_lstm_config(paths.geopackage_path, paths.config_dir)
255
- # create some partitions for parallelization
298
+ paths.setup_run_folders()
299
+
300
+
301
+ def create_dhbv2_realization(cat_id: str, start_time: datetime, end_time: datetime):
302
+ paths = FilePaths(cat_id)
303
+ configure_troute(cat_id, paths.config_dir, start_time, end_time)
304
+
305
+ make_ngen_realization_json(
306
+ paths.config_dir,
307
+ FilePaths.template_dhbv2_realization_config,
308
+ start_time,
309
+ end_time,
310
+ )
311
+ make_dhbv2_config(paths.geopackage_path, paths.config_dir, start_time, end_time)
256
312
  paths.setup_run_folders()
257
313
 
258
314
 
@@ -22,6 +22,7 @@ class FilePaths:
22
22
  template_sql = data_sources / "template.sql"
23
23
  triggers_sql = data_sources / "triggers.sql"
24
24
  conus_hydrofabric = hydrofabric_dir / "conus_nextgen.gpkg"
25
+ dhbv_attributes = hydrofabric_dir / "dhbv_attrs.parquet"
25
26
  hydrofabric_graph = hydrofabric_dir / "conus_igraph_network.gpickle"
26
27
  template_nc = data_sources / "forcing_template.nc"
27
28
  dev_file = Path(__file__).parent.parent.parent / ".dev"
@@ -32,6 +33,8 @@ class FilePaths:
32
33
  template_noahowp_config = data_sources / "noah-owp-modular-init.namelist.input"
33
34
  template_cfe_config = data_sources / "cfe-template.ini"
34
35
  template_lstm_config = data_sources / "lstm-catchment-template.yml"
36
+ template_dhbv2_realization_config = data_sources / "dhbv2-realization-template.json"
37
+ template_dhbv2_config = data_sources / "dhbv2-catchment-template.yaml"
35
38
 
36
39
  def __init__(self, folder_name: Optional[str] = None, output_dir: Optional[Path] = None):
37
40
  """
@@ -117,6 +117,7 @@ def get_cell_weights(raster: xr.Dataset, gdf: gpd.GeoDataFrame, wkt: str) -> pd.
117
117
  ) # type: ignore
118
118
  return output.set_index("divide_id")
119
119
 
120
+
120
121
  def add_APCP_SURFACE_to_dataset(dataset: xr.Dataset) -> xr.Dataset:
121
122
  """Convert precipitation value to correct units."""
122
123
  # precip_rate is mm/s
@@ -311,6 +312,7 @@ def get_units(dataset: xr.Dataset) -> dict:
311
312
  units[var] = dataset[var].attrs["units"]
312
313
  return units
313
314
 
315
+
314
316
  def interpolate_nan_values(
315
317
  dataset: xr.Dataset,
316
318
  dim: str = "time",
@@ -383,9 +385,12 @@ def compute_zonal_stats(
383
385
  progress_file = FilePaths(output_dir=forcings_dir.parent).forcing_progress_file
384
386
  ex_var_name = list(gridded_data.data_vars)[0]
385
387
  example_time_chunks = get_index_chunks(gridded_data[ex_var_name])
386
- all_steps = len(example_time_chunks) * len(gridded_data.data_vars)
388
+
389
+ data_vars = gridded_data.data_vars
390
+
391
+ all_steps = len(example_time_chunks) * len(data_vars)
387
392
  logger.info(
388
- f"Total steps: {all_steps}, Number of time chunks: {len(example_time_chunks)}, Number of variables: {len(gridded_data.data_vars)}"
393
+ f"Total steps: {all_steps}, Number of time chunks: {len(example_time_chunks)}, Number of variables: {len(data_vars)}"
389
394
  )
390
395
  steps_completed = 0
391
396
  with open(progress_file, "w") as f:
@@ -405,10 +410,11 @@ def compute_zonal_stats(
405
410
 
406
411
  timer = time.perf_counter()
407
412
  variable_task = progress.add_task(
408
- "[cyan]Processing variables...", total=len(gridded_data.data_vars), elapsed=0
413
+ "[cyan]Processing variables...", total=len(data_vars), elapsed=0
409
414
  )
410
415
  progress.start()
411
- for data_var_name in list(gridded_data.data_vars):
416
+
417
+ for data_var_name in list(data_vars):
412
418
  data_var_name: str
413
419
  progress.update(variable_task, advance=1)
414
420
  progress.update(variable_task, description=f"Processing {data_var_name}")
@@ -524,6 +530,7 @@ def write_outputs(forcings_dir: Path, units: dict) -> None:
524
530
  # time is stored as unix timestamps, units have to be set
525
531
  # add the catchment ids as a 1d data var
526
532
  final_ds["ids"] = final_ds["catchment"].astype(str)
533
+
527
534
  # time needs to be a 2d array of the same time array as unix timestamps for every catchment
528
535
  with warnings.catch_warnings():
529
536
  warnings.simplefilter("ignore")
@@ -575,5 +582,7 @@ def create_forcings(dataset: xr.Dataset, output_folder_name: str) -> None:
575
582
  gdf = gpd.read_file(forcing_paths.geopackage_path, layer="divides")
576
583
  logger.debug(f"gdf bounds: {gdf.total_bounds}")
577
584
  gdf = gdf.to_crs(dataset.crs)
578
- dataset = dataset.isel(y=slice(None, None, -1)) # Flip y-axis: source data has y ordered from top-to-bottom (as in image arrays), but geospatial operations expect y to increase from bottom-to-top (increasing latitude).
585
+ dataset = dataset.isel(
586
+ y=slice(None, None, -1)
587
+ ) # Flip y-axis: source data has y ordered from top-to-bottom (as in image arrays), but geospatial operations expect y to increase from bottom-to-top (increasing latitude).
579
588
  compute_zonal_stats(gdf, dataset, forcing_paths.forcings_dir)
@@ -0,0 +1,46 @@
1
+ ### Single-catchment configuration ###
2
+ catchment_id: {divide_id}
3
+ catchment_name: {divide_id}
4
+
5
+ # Static catchment attributes
6
+ aridity: {aridity}
7
+ meanP: {meanP}
8
+ ETPOT_Hargr: {ETPOT_Hargr}
9
+ NDVI: {NDVI}
10
+ FW: {FW}
11
+ meanslope: {meanslope}
12
+ SoilGrids1km_sand: {SoilGrids1km_sand}
13
+ SoilGrids1km_clay: {SoilGrids1km_clay}
14
+ SoilGrids1km_silt: {SoilGrids1km_silt}
15
+ glaciers: {glaciers}
16
+ HWSD_clay: {HWSD_clay}
17
+ HWSD_gravel: {HWSD_gravel}
18
+ HWSD_sand: {HWSD_sand}
19
+ HWSD_silt: {HWSD_silt}
20
+ meanelevation: {meanelevation}
21
+ meanTa: {meanTa}
22
+ permafrost: {permafrost}
23
+ permeability: {permeability}
24
+ seasonality_P: {seasonality_P}
25
+ seasonality_PET: {seasonality_PET}
26
+ snow_fraction: {snow_fraction}
27
+ snowfall_fraction: {snowfall_fraction}
28
+ T_clay: {T_clay}
29
+ T_gravel: {T_gravel}
30
+ T_sand: {T_sand}
31
+ T_silt: {T_silt}
32
+ Porosity: {Porosity}
33
+ uparea: {uparea}
34
+ catchsize: {catchsize}
35
+ lengthkm: {lengthkm}
36
+
37
+ model_dir: /ngen/ngen/extern/dhbv2/ngen_resources/data/dhbv_2_mts/model/dhbv_2_mts
38
+ stepwise: True # True: stepwise inference, False: Single forward simulation on all data in one go
39
+ initial_state: zero # zero: set initial states of the model to zero
40
+ dtype: float32
41
+ verbose: 1 # 0: no output, 1: print output
42
+
43
+ # Simulation window
44
+ time_step: 1 hour
45
+ start_time: {start_time}
46
+ end_time: {end_time}
@@ -0,0 +1,56 @@
1
+ {
2
+ "global": {
3
+ "formulations": [
4
+ {
5
+ "name": "bmi_multi",
6
+ "params": {
7
+ "model_type_name": "dhbv2",
8
+ "forcing_file": "",
9
+ "init_config": "",
10
+ "allow_exceed_end_time": true,
11
+ "main_output_variable": "land_surface_water__runoff_volume_flux",
12
+ "modules": [
13
+ {
14
+ "name": "bmi_python",
15
+ "params": {
16
+ "python_type": "dhbv2.mts_bmi.MtsDeltaModelBmi",
17
+ "model_type_name": "DeltaModelBmi",
18
+ "init_config": "./config/cat_config/dhbv2/{{id}}.yml",
19
+ "allow_exceed_end_time": true,
20
+ "main_output_variable": "land_surface_water__runoff_volume_flux",
21
+ "fixed_time_step": false,
22
+ "uses_forcing_file": false,
23
+ "variables_names_map" : {
24
+ "atmosphere_water__liquid_equivalent_precipitation_rate":"precip_rate",
25
+ "land_surface_air__temperature":"TMP_2maboveground",
26
+ "atmosphere_air_water~vapor__relative_saturation":"SPFH_2maboveground",
27
+ "land_surface_radiation~incoming~longwave__energy_flux":"DLWRF_surface",
28
+ "land_surface_radiation~incoming~shortwave__energy_flux":"DSWRF_surface",
29
+ "land_surface_air__pressure":"PRES_surface",
30
+ "land_surface_wind__x_component_of_velocity":"UGRD_10maboveground",
31
+ "land_surface_wind__y_component_of_velocity":"VGRD_10maboveground",
32
+ "land_surface_water__runoff_volume_flux":"streamflow_cms"
33
+
34
+ }
35
+
36
+ }
37
+ }
38
+ ]
39
+ }
40
+ }
41
+ ],
42
+ "forcing": {
43
+ "path": "./forcings/forcings.nc",
44
+ "provider": "NetCDF"
45
+ }
46
+ },
47
+ "time": {
48
+ "start_time": "2000-01-01 00:00:00",
49
+ "end_time": "2005-12-30 23:00:00",
50
+ "output_interval": 3600
51
+ },
52
+ "routing": {
53
+ "t_route_config_file_with_path": "./config/troute.yaml"
54
+ },
55
+ "output_root": "./outputs/ngen"
56
+ }
@@ -126,16 +126,40 @@ def download_from_s3(save_path, bucket=S3_BUCKET, key=S3_KEY, region=S3_REGION):
126
126
  return False
127
127
 
128
128
 
129
- def get_headers():
129
+ def get_headers(url: str = hydrofabric_url):
130
130
  # for versioning
131
131
  # Useful Headers: { 'Last-Modified': 'Wed, 20 Nov 2024 18:45:59 GMT', 'ETag': '"cc1452838886a7ab3065a61073fa991b-207"'}
132
132
  try:
133
- response = requests.head(hydrofabric_url)
133
+ response = requests.head(url)
134
134
  except requests.exceptions.ConnectionError:
135
135
  return 500, {}
136
136
  return response.status_code, response.headers
137
137
 
138
138
 
139
+ def download_dhbv_attributes():
140
+ s3_key = "hydrofabrics/community/resources/dhbv_attrs.parquet"
141
+ attributes_url = f"https://{S3_BUCKET}.s3.{S3_REGION}.amazonaws.com/{s3_key}"
142
+
143
+ status, headers = get_headers(attributes_url)
144
+ download_log = FilePaths.dhbv_attributes.with_suffix(".log")
145
+ if download_log.exists():
146
+ with open(download_log, "r") as f:
147
+ local_headers = json.load(f)
148
+ else:
149
+ local_headers = {}
150
+
151
+ if not FilePaths.dhbv_attributes.exists() or headers.get("ETag", "") != local_headers.get(
152
+ "ETag", ""
153
+ ):
154
+ download_from_s3(
155
+ FilePaths.dhbv_attributes,
156
+ bucket=S3_BUCKET,
157
+ key=s3_key,
158
+ )
159
+ with open(FilePaths.dhbv_attributes.with_suffix(".log"), "w") as f:
160
+ json.dump(dict(headers), f)
161
+
162
+
139
163
  def download_and_update_hf():
140
164
  if FilePaths.conus_hydrofabric.is_file():
141
165
  console.print(
map_app/__init__.py CHANGED
@@ -1,16 +1,19 @@
1
1
  from flask import Flask
2
2
  import logging
3
+ from pathlib import Path
3
4
  from map_app.views import main, intra_module_db
4
5
  from data_sources.source_validation import validate_all
5
6
 
6
- with open("app.log", "w") as f:
7
+ LOG_PATH = Path.home() / ".ngiab" / "app.log"
8
+
9
+ with open(LOG_PATH, "w") as f:
7
10
  f.write("")
8
11
  f.write("Starting Application!\n")
9
12
 
10
13
  logging.basicConfig(
11
14
  level=logging.INFO,
12
15
  format="%(name)-12s: %(levelname)s - %(message)s",
13
- filename="app.log",
16
+ filename=LOG_PATH,
14
17
  filemode="a",
15
18
  ) # Append mode
16
19
  # Example: Adding a console handler to root logger (optional)
map_app/__main__.py CHANGED
@@ -3,18 +3,20 @@
3
3
  import logging
4
4
  import webbrowser
5
5
  from threading import Timer
6
+ from pathlib import Path
6
7
 
7
8
  from data_processing.file_paths import FilePaths
8
9
  from data_processing.graph_utils import get_graph
9
10
 
10
11
  from map_app import app, console_handler
11
12
 
13
+ LOG_PATH = Path.home() / ".ngiab" / "app.log"
12
14
 
13
15
  def open_browser():
14
16
  # find the last line in the log file that contains the port number
15
17
  # * running on http://0.0.0.0:port_number
16
18
  port_number = None
17
- with open("app.log", "r") as f:
19
+ with open(LOG_PATH, 'r') as f:
18
20
  lines = f.readlines()
19
21
  for line in reversed(lines):
20
22
  if "Running on http" in line:
@@ -38,12 +40,12 @@ def main():
38
40
 
39
41
  if FilePaths.dev_file.is_file():
40
42
  Timer(2, set_logs_to_warning).start()
41
- with open("app.log", "a") as f:
43
+ with open(LOG_PATH, "a") as f:
42
44
  f.write("Running in debug mode\n")
43
45
  app.run(debug=True, host="0.0.0.0", port="8080") # type: ignore
44
46
  else:
45
47
  Timer(1, open_browser).start()
46
- with open("app.log", "a") as f:
48
+ with open(LOG_PATH, "a") as f:
47
49
  f.write("Running in production mode\n")
48
50
  app.run(host="0.0.0.0", port="0") # type: ignore
49
51
 
map_app/views.py CHANGED
@@ -136,7 +136,7 @@ def download_forcings(data_source, start_time, end_time, paths):
136
136
 
137
137
  def compute_forcings(cached_data, paths):
138
138
  create_forcings(cached_data, paths.output_dir.stem) # type: ignore
139
-
139
+
140
140
  @main.route("/forcings", methods=["POST"])
141
141
  def get_forcings():
142
142
  # body: JSON.stringify({'forcing_dir': forcing_dir, 'start_time': start_time, 'end_time': end_time}),
@@ -190,7 +190,7 @@ def get_catids_from_vpu():
190
190
 
191
191
  @main.route("/logs", methods=["GET"])
192
192
  def get_logs():
193
- log_file_path = "app.log"
193
+ log_file_path = Path.home() / ".ngiab" / "app.log"
194
194
  try:
195
195
  with open(log_file_path, "r") as file:
196
196
  lines = file.readlines()
@@ -12,7 +12,11 @@ with rich.status.Status("loading") as status:
12
12
  from pathlib import Path
13
13
 
14
14
  import geopandas as gpd
15
- from data_processing.create_realization import create_lstm_realization, create_realization
15
+ from data_processing.create_realization import (
16
+ create_dhbv2_realization,
17
+ create_lstm_realization,
18
+ create_realization,
19
+ )
16
20
  from data_processing.dask_utils import shutdown_cluster
17
21
  from data_processing.dataset_utils import save_and_clip_dataset
18
22
  from data_processing.datasets import load_aorc_zarr, load_v3_retrospective_zarr
@@ -217,6 +221,12 @@ def main() -> None:
217
221
  end_time=args.end_date,
218
222
  use_rust=args.lstm_rust,
219
223
  )
224
+ if args.dhbv2:
225
+ create_dhbv2_realization(
226
+ output_folder,
227
+ start_time=args.start_date,
228
+ end_time=args.end_date,
229
+ )
220
230
  else:
221
231
  create_realization(
222
232
  output_folder,
@@ -92,13 +92,17 @@ def parse_arguments() -> argparse.Namespace:
92
92
  parser.add_argument(
93
93
  "--start_date",
94
94
  "--start",
95
- type=lambda s: datetime.strptime(s, DATE_FORMAT) if len(s) == 10 else datetime.strptime(s, DATE_FORMAT2),
95
+ type=lambda s: datetime.strptime(s, DATE_FORMAT)
96
+ if len(s) == 10
97
+ else datetime.strptime(s, DATE_FORMAT2),
96
98
  help=f"Start date for forcings/realization (format {DATE_FORMAT_HINT})",
97
99
  )
98
100
  parser.add_argument(
99
101
  "--end_date",
100
102
  "--end",
101
- type=lambda s: datetime.strptime(s, DATE_FORMAT) if len(s) == 10 else datetime.strptime(s, DATE_FORMAT2),
103
+ type=lambda s: datetime.strptime(s, DATE_FORMAT)
104
+ if len(s) == 10
105
+ else datetime.strptime(s, DATE_FORMAT2),
102
106
  help=f"End date for forcings/realization (format {DATE_FORMAT_HINT})",
103
107
  )
104
108
  parser.add_argument(
@@ -124,6 +128,13 @@ def parse_arguments() -> argparse.Namespace:
124
128
  action="store_true",
125
129
  help="enable experimental high speed Rust bindings of LSTM model realization and forcings",
126
130
  )
131
+
132
+ parser.add_argument(
133
+ "--dhbv2",
134
+ action="store_true",
135
+ help="enable dHBV2 model realization and forcings",
136
+ )
137
+
127
138
  parser.add_argument(
128
139
  "--nwm_gw",
129
140
  action="store_true",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngiab_data_preprocess
3
- Version: 4.6.7
3
+ Version: 4.7.0
4
4
  Summary: Graphical Tools for creating Next Gen Water model input data.
5
5
  Author-email: Josh Cunningham <jcunningham8@ua.edu>
6
6
  Project-URL: Homepage, https://github.com/CIROH-UA/NGIAB_data_preprocess
@@ -13,6 +13,7 @@ Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: pyogrio>=0.7.2
15
15
  Requires-Dist: pyproj>=3.6.1
16
+ Requires-Dist: pandas<3.0.0
16
17
  Requires-Dist: Flask==3.0.2
17
18
  Requires-Dist: geopandas>=1.0.0
18
19
  Requires-Dist: requests==2.32.4
@@ -28,10 +29,11 @@ Requires-Dist: numpy>=1.26.4
28
29
  Requires-Dist: tqdm==4.66.4
29
30
  Requires-Dist: rich==13.7.1
30
31
  Requires-Dist: colorama==0.4.6
31
- Requires-Dist: bokeh==3.5.1
32
+ Requires-Dist: bokeh==3.8.2
32
33
  Requires-Dist: boto3
33
34
  Requires-Dist: numcodecs<0.16.0
34
35
  Requires-Dist: scipy>=1.15.3
36
+ Requires-Dist: pyarrow
35
37
  Provides-Extra: eval
36
38
  Requires-Dist: ngiab_eval; extra == "eval"
37
39
  Provides-Extra: plot
@@ -1,27 +1,29 @@
1
- data_processing/create_realization.py,sha256=b1a9Wuld9saJ-zzVDPY_kba3-ZOVbRuobyR-QiHLXyY,11550
1
+ data_processing/create_realization.py,sha256=yKU_77-kidO0ePMoyIC1APGjH81K9gzaHDEfNcjTqjg,13364
2
2
  data_processing/dask_utils.py,sha256=A2IP94WAz8W9nek3etXKEKTOxGPf0NWSFLh8cZ5S-xU,2454
3
3
  data_processing/dataset_utils.py,sha256=iLY_3dSRxd8O0wI24IM2zuJru8vtmRlph0O_ZU_mAr4,8597
4
4
  data_processing/datasets.py,sha256=_EJ1uZSWTU1HWpvF7TQSikneJqWZFikTrdo9usCV8A0,4665
5
- data_processing/file_paths.py,sha256=7MpwfIQewLRrDpAw1dxTjTperUwOk3EC_kthmnJSRII,4851
6
- data_processing/forcings.py,sha256=G_g3VSM_YN-k4FrbnUByrDR4n3fk1GVfv74kamit2CI,21775
5
+ data_processing/file_paths.py,sha256=_XGjr2msle2TmcgTuo8ZirOpXnxeYzLfQ3tOqYB8vks,5076
6
+ data_processing/forcings.py,sha256=blqf2wlr59-eOmUsekRO1lucM6KTei5c-gPrkXaZqkE,21783
7
7
  data_processing/gpkg_utils.py,sha256=uERtQAMz9msJib_suffILuhUY8EcrvYw-pqkJOm2s9E,20673
8
8
  data_processing/graph_utils.py,sha256=4D72wMSiCRKCPC7JUz7XCoaISRGLuqDx6wpeO_VP8fk,8301
9
9
  data_processing/s3fs_utils.py,sha256=ki1EmA0ezV0r26re6dRWIGzL5FudGdwF9Qw1eVLR0Bc,2747
10
10
  data_processing/subset.py,sha256=EwDii-EsOiJBaDMjvYWfz7V9wQSXb887w_fk28u8dlw,3789
11
11
  data_sources/cfe-nowpm-realization-template.json,sha256=8an6q1drWD8wU1ocvdPab-GvZDvlQ-0di_-NommH3QI,3528
12
12
  data_sources/cfe-template.ini,sha256=6e5-usqjWtm3MWVvtm8CTeZTJJMxO1ZswkOXq0L9mnc,2033
13
+ data_sources/dhbv2-catchment-template.yaml,sha256=BX_k1HXwQ7lHS4ZutBJx9rt1SZgCaIK8NpF6JLypSPU,1251
14
+ data_sources/dhbv2-realization-template.json,sha256=zYzCdG8twHZqVNHDv4PYnzAlgx1I-R3Q1jv6C0Kjggo,2021
13
15
  data_sources/forcing_template.nc,sha256=uRuVAqX3ngdlougZINavtwl_wC2VLD8fHqG7_CLim1s,85284
14
16
  data_sources/lstm-catchment-template.yml,sha256=LtknqvxbWrtLLZIXxFgTfbQmM4x8XnHBDFvRIh2EIFI,965
15
17
  data_sources/lstm-realization-template.json,sha256=ndz3h5NGhtUSnsZwscgNuXYBG9mlAuz7Lxx7iCw22UY,1270
16
18
  data_sources/lstm-rust-realization-template.json,sha256=wuHiwbxo5muA0bv7ONRItj4dnDvLWJ_P2dfnq-S9Neg,1339
17
19
  data_sources/ngen-routing-template.yaml,sha256=wM5v6jj0kwcJBVatLFuy2big6g8nlSXxzc8a23nwI5s,4655
18
20
  data_sources/noah-owp-modular-init.namelist.input,sha256=Vb7mp40hFpJogruOrXrDHwVW1bKi9h1ciDNyDvTzn20,3045
19
- data_sources/source_validation.py,sha256=DMCTo-Tad9QSXFqSDB5guI8hMzY_kQE6aW78-5nqwlU,9457
21
+ data_sources/source_validation.py,sha256=pYKjHnfYSOqQ4yXuXI7bzGrHMnvDBwNTNmuY3kQi0Xg,10288
20
22
  data_sources/template.sql,sha256=ZnFqAqleEq9wgmAhNO90Wue_L9k0JAn8KF99DYtcxgs,10457
21
23
  data_sources/triggers.sql,sha256=G0d_175eNsamKAFhsbphPATvzMPuPL_iCleIhlToduQ,14906
22
- map_app/__init__.py,sha256=OarJao9X98kcbLyiwewN4ObWNAYkKDichcxbuWywTsA,818
23
- map_app/__main__.py,sha256=5qJGypfesfdWDWRsbTQiMs3uL5zPZKRZQq31l7qXqCc,1649
24
- map_app/views.py,sha256=AxLsXL8ZSnRJzg5zVYDGfVFA_6amgae41AO_E1G7W58,7923
24
+ map_app/__init__.py,sha256=HvjuTiuf0vTvNLDsgmABqPPMQxOcTljX2nzgj6Pdp_g,888
25
+ map_app/__main__.py,sha256=9wnCunreKNtvI-NEFB-bTHoKXuI5PtPznj13ctrDAOc,1717
26
+ map_app/views.py,sha256=LRpHHw9FOzxEPY42LjJZ3XWFbKyZFKtPu9GZftdXrUY,7944
25
27
  map_app/static/css/console.css,sha256=xN6G2MMFyKc9YW9HEVpUUTUjx2o2nokBR4nCX5c18UM,803
26
28
  map_app/static/css/main.css,sha256=pYIIk-dXW6YMpliSJKATdvgPhFVY6K26NeUuHoYyJqg,8736
27
29
  map_app/static/css/toggle.css,sha256=aUIe9AL1-_mzKvWir3lJ9W8r2oXsOjkFbtvcrohnV10,4149
@@ -31,13 +33,13 @@ map_app/static/js/main.js,sha256=wtLZ18hPZbnyo28dRGsOf-CAHCGxqe1PWEAGa6-S4tg,109
31
33
  map_app/static/resources/loading.gif,sha256=ggdkZf1AD7rSwIpSJwfiIqANgmVV1WHlxGuKxQKv7uY,72191
32
34
  map_app/static/resources/screenshot.jpg,sha256=Ia358aX-OHM9BP4B8lX05cLnguF2fHUIimno9bnFLYw,253730
33
35
  map_app/templates/index.html,sha256=qgQxvaJgkU8Ul57dMLX5n0O3BrwMow-CxGPH7_8yjXc,12195
34
- ngiab_data_cli/__main__.py,sha256=FmHxibQtFm15aEUG1BrIIqIWNU_BoXzWCTXoR9EA-YI,11353
35
- ngiab_data_cli/arguments.py,sha256=Bi_Q6FRXvRxI50Pgk7goyASe-2lfUh-oDdnYkaY4KTc,5262
36
+ ngiab_data_cli/__main__.py,sha256=aYJSQi1ImRuXV-5OugGc-vsXebrT0hHDFYRedx2SHmE,11626
37
+ ngiab_data_cli/arguments.py,sha256=og6hRCdjfOJp0eomHH0nbWj4SkIi0K3gb4NCq-S64nE,5435
36
38
  ngiab_data_cli/custom_logging.py,sha256=iS2XozaxudcxQj17qAsrCgbVK9LJAYAPmarJuVWJo1k,1280
37
39
  ngiab_data_cli/forcing_cli.py,sha256=eIWRxRWUwPqR16fihFDEIV4VzGlNuvcD6lJW5VYjkPU,3635
38
- ngiab_data_preprocess-4.6.7.dist-info/licenses/LICENSE,sha256=6dMSprwwnsRzEm02mEDbKHD9dUbL8bPIt9Vhrhb0Ulk,1081
39
- ngiab_data_preprocess-4.6.7.dist-info/METADATA,sha256=3U2P21OHELN_hol-fu43soXXozFw3r6Y57rLwCK2GLc,14513
40
- ngiab_data_preprocess-4.6.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
41
- ngiab_data_preprocess-4.6.7.dist-info/entry_points.txt,sha256=spwlhKEJ3ZnNETQsJGeTjD7Vwy8O_zGHb9GdX8ACCtw,128
42
- ngiab_data_preprocess-4.6.7.dist-info/top_level.txt,sha256=CjhYAUZrdveR2fOK6rxffU09VIN2IuPD7hk4V3l3pV0,52
43
- ngiab_data_preprocess-4.6.7.dist-info/RECORD,,
40
+ ngiab_data_preprocess-4.7.0.dist-info/licenses/LICENSE,sha256=6dMSprwwnsRzEm02mEDbKHD9dUbL8bPIt9Vhrhb0Ulk,1081
41
+ ngiab_data_preprocess-4.7.0.dist-info/METADATA,sha256=HKfPyhKmOfuywhni8Gs3-qlyOUfZranF0eTGBjt3kBA,14564
42
+ ngiab_data_preprocess-4.7.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
43
+ ngiab_data_preprocess-4.7.0.dist-info/entry_points.txt,sha256=spwlhKEJ3ZnNETQsJGeTjD7Vwy8O_zGHb9GdX8ACCtw,128
44
+ ngiab_data_preprocess-4.7.0.dist-info/top_level.txt,sha256=CjhYAUZrdveR2fOK6rxffU09VIN2IuPD7hk4V3l3pV0,52
45
+ ngiab_data_preprocess-4.7.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5