ngiab-data-preprocess 4.5.0__py3-none-any.whl → 4.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,47 @@
1
+ {
2
+ "global": {
3
+ "formulations": [
4
+ {
5
+ "name": "bmi_multi",
6
+ "params": {
7
+ "name": "bmi_multi",
8
+ "model_type_name": "lstm",
9
+ "forcing_file": "",
10
+ "init_config": "",
11
+ "allow_exceed_end_time": true,
12
+ "main_output_variable": "land_surface_water__runoff_depth",
13
+ "modules": [
14
+ {
15
+ "name": "bmi_c",
16
+ "params": {
17
+ "name": "bmi_c",
18
+ "model_type_name": "bmi_rust",
19
+ "init_config": "./config/cat_config/lstm/{{id}}.yml",
20
+ "allow_exceed_end_time": true,
21
+ "main_output_variable": "land_surface_water__runoff_depth",
22
+ "uses_forcing_file": false,
23
+ "registration_function": "register_bmi_lstm",
24
+ "library_file": "/dmod/shared_libs/libbmi_burn_lstm.so"
25
+ }
26
+ }
27
+ ]
28
+ }
29
+ }
30
+ ],
31
+ "forcing": {
32
+ "path": "./forcings/forcings.nc",
33
+ "provider": "NetCDF",
34
+ "enable_cache": false
35
+ }
36
+ },
37
+ "time": {
38
+ "start_time": "2016-01-01 00:00:00",
39
+ "end_time": "2016-02-27 00:00:00",
40
+ "output_interval": 3600
41
+ },
42
+ "routing": {
43
+ "t_route_config_file_with_path": "./config/troute.yaml"
44
+ },
45
+ "remotes_enabled": false,
46
+ "output_root": "./outputs/ngen"
47
+ }
@@ -1,29 +1,31 @@
1
1
  import gzip
2
2
  import json
3
3
  import os
4
+ import sqlite3
4
5
  import tarfile
5
6
  import warnings
6
7
  from time import sleep
7
8
 
8
9
  import boto3
10
+ import botocore
9
11
  import psutil
10
12
  import requests
11
13
  from boto3.s3.transfer import TransferConfig
12
14
  from botocore.exceptions import ClientError
13
- import botocore
14
- from data_processing.file_paths import file_paths
15
+ from data_processing.file_paths import FilePaths
16
+ from data_processing.gpkg_utils import verify_indices
15
17
  from rich.console import Console
16
- from rich.progress import (Progress,
17
- SpinnerColumn,
18
- TextColumn,
19
- TimeElapsedColumn,
20
- BarColumn,
21
- DownloadColumn,
22
- TransferSpeedColumn)
18
+ from rich.progress import (
19
+ BarColumn,
20
+ DownloadColumn,
21
+ Progress,
22
+ SpinnerColumn,
23
+ TextColumn,
24
+ TimeElapsedColumn,
25
+ TransferSpeedColumn,
26
+ )
23
27
  from rich.prompt import Prompt
24
28
  from tqdm import TqdmExperimentalWarning
25
- from data_processing.gpkg_utils import verify_indices
26
- import sqlite3
27
29
 
28
30
  warnings.filterwarnings("ignore", category=TqdmExperimentalWarning)
29
31
 
@@ -60,13 +62,15 @@ def download_from_s3(save_path, bucket=S3_BUCKET, key=S3_KEY, region=S3_REGION):
60
62
  if os.path.exists(save_path):
61
63
  console.print(f"File already exists: {save_path}", style="bold yellow")
62
64
  os.remove(save_path)
63
-
64
- client_config = botocore.config.Config(
65
- max_pool_connections=75
66
- )
65
+
66
+ client_config = botocore.config.Config(max_pool_connections=75)
67
67
  # Initialize S3 client
68
68
  s3_client = boto3.client(
69
- "s3", aws_access_key_id="", aws_secret_access_key="", region_name=region, config=client_config
69
+ "s3",
70
+ aws_access_key_id="",
71
+ aws_secret_access_key="",
72
+ region_name=region,
73
+ config=client_config,
70
74
  )
71
75
  # Disable request signing for public buckets
72
76
  s3_client._request_signer.sign = lambda *args, **kwargs: None
@@ -102,15 +106,20 @@ def download_from_s3(save_path, bucket=S3_BUCKET, key=S3_KEY, region=S3_REGION):
102
106
  use_threads=True,
103
107
  )
104
108
 
105
-
106
109
  try:
107
110
  dl_progress = Progress(BarColumn(), DownloadColumn(), TransferSpeedColumn())
108
111
  # Download file using optimized transfer config
109
112
  with dl_progress:
110
113
  task = dl_progress.add_task("Downloading...", total=total_size)
111
- s3_client.download_file(Bucket=bucket, Key=key, Filename=save_path, Config=config,
112
- Callback=lambda bytes_downloaded: dl_progress.update(
113
- task, advance=bytes_downloaded))
114
+ s3_client.download_file(
115
+ Bucket=bucket,
116
+ Key=key,
117
+ Filename=save_path,
118
+ Config=config,
119
+ Callback=lambda bytes_downloaded: dl_progress.update(
120
+ task, advance=bytes_downloaded
121
+ ),
122
+ )
114
123
  return True
115
124
  except Exception as e:
116
125
  console.print(f"Error downloading file: {e}", style="bold red")
@@ -128,47 +137,47 @@ def get_headers():
128
137
 
129
138
 
130
139
  def download_and_update_hf():
131
-
132
- if file_paths.conus_hydrofabric.is_file():
140
+ if FilePaths.conus_hydrofabric.is_file():
133
141
  console.print(
134
- f"Hydrofabric already exists at {file_paths.conus_hydrofabric}, removing it to download the latest version.",
142
+ f"Hydrofabric already exists at {FilePaths.conus_hydrofabric}, removing it to download the latest version.",
135
143
  style="bold yellow",
136
144
  )
137
- file_paths.conus_hydrofabric.unlink()
138
-
145
+ FilePaths.conus_hydrofabric.unlink()
146
+
139
147
  download_from_s3(
140
- file_paths.conus_hydrofabric.with_suffix(".tar.gz"),
148
+ FilePaths.conus_hydrofabric.with_suffix(".tar.gz"),
141
149
  bucket="communityhydrofabric",
142
150
  key="hydrofabrics/community/conus_nextgen.tar.gz",
143
151
  )
144
152
 
145
- if file_paths.hydrofabric_graph.is_file():
153
+ if FilePaths.hydrofabric_graph.is_file():
146
154
  console.print(
147
- f"Hydrofabric graph already exists at {file_paths.hydrofabric_graph}, removing it to download the latest version.",
155
+ f"Hydrofabric graph already exists at {FilePaths.hydrofabric_graph}, removing it to download the latest version.",
148
156
  style="bold yellow",
149
157
  )
150
- file_paths.hydrofabric_graph.unlink()
158
+ FilePaths.hydrofabric_graph.unlink()
151
159
 
152
160
  download_from_s3(
153
- file_paths.hydrofabric_graph,
161
+ FilePaths.hydrofabric_graph,
154
162
  bucket="communityhydrofabric",
155
- key="hydrofabrics/community/conus_igraph_network.gpickle"
163
+ key="hydrofabrics/community/conus_igraph_network.gpickle",
156
164
  )
157
165
 
158
166
  status, headers = get_headers()
159
167
 
160
168
  if status == 200:
161
169
  # write headers to a file
162
- with open(file_paths.hydrofabric_download_log, "w") as f:
170
+ with open(FilePaths.hydrofabric_download_log, "w") as f:
163
171
  json.dump(dict(headers), f)
164
172
 
165
173
  decompress_gzip_tar(
166
- file_paths.conus_hydrofabric.with_suffix(".tar.gz"),
167
- file_paths.conus_hydrofabric.parent,
174
+ FilePaths.conus_hydrofabric.with_suffix(".tar.gz"),
175
+ FilePaths.conus_hydrofabric.parent,
168
176
  )
169
177
 
178
+
170
179
  def validate_hydrofabric():
171
- if not file_paths.conus_hydrofabric.is_file():
180
+ if not FilePaths.conus_hydrofabric.is_file():
172
181
  response = Prompt.ask(
173
182
  "Hydrofabric files are missing. Would you like to download them now?",
174
183
  default="y",
@@ -180,11 +189,11 @@ def validate_hydrofabric():
180
189
  console.print("Exiting...", style="bold red")
181
190
  exit()
182
191
 
183
- if file_paths.no_update_hf.exists():
192
+ if FilePaths.no_update_hf.exists():
184
193
  # skip the updates
185
194
  return
186
195
 
187
- if not file_paths.hydrofabric_download_log.is_file():
196
+ if not FilePaths.hydrofabric_download_log.is_file():
188
197
  response = Prompt.ask(
189
198
  "Hydrofabric version information unavailable, Would you like to fetch the updated version?",
190
199
  default="y",
@@ -195,13 +204,13 @@ def validate_hydrofabric():
195
204
  else:
196
205
  console.print("Continuing... ", style="bold yellow")
197
206
  console.print(
198
- f"To disable this warning, create an empty file called {file_paths.no_update_hf.resolve()}",
207
+ f"To disable this warning, create an empty file called {FilePaths.no_update_hf.resolve()}",
199
208
  style="bold yellow",
200
209
  )
201
210
  sleep(2)
202
211
  return
203
212
 
204
- with open(file_paths.hydrofabric_download_log, "r") as f:
213
+ with open(FilePaths.hydrofabric_download_log, "r") as f:
205
214
  content = f.read()
206
215
  headers = json.loads(content)
207
216
 
@@ -229,26 +238,29 @@ def validate_hydrofabric():
229
238
  else:
230
239
  console.print("Continuing... ", style="bold yellow")
231
240
  console.print(
232
- f"To disable this warning, create an empty file called {file_paths.no_update_hf.resolve()}",
241
+ f"To disable this warning, create an empty file called {FilePaths.no_update_hf.resolve()}",
233
242
  style="bold yellow",
234
243
  )
235
244
  sleep(2)
236
245
  return
237
-
246
+
238
247
  # moved this from gpkg_utils to here to avoid potential nested rich live displays
239
- if file_paths.conus_hydrofabric.is_file():
248
+ if FilePaths.conus_hydrofabric.is_file():
240
249
  valid_hf = False
241
250
  while not valid_hf:
242
251
  try:
243
252
  verify_indices()
244
253
  valid_hf = True
245
254
  except sqlite3.DatabaseError:
246
- console.print(f"Hydrofabric {file_paths.conus_hydrofabric} is corrupted. Redownloading...", style="red")
255
+ console.print(
256
+ f"Hydrofabric {FilePaths.conus_hydrofabric} is corrupted. Redownloading...",
257
+ style="red",
258
+ )
247
259
  download_and_update_hf()
248
260
 
249
261
 
250
262
  def validate_output_dir():
251
- if not file_paths.config_file.is_file():
263
+ if not FilePaths.config_file.is_file():
252
264
  response = Prompt.ask(
253
265
  "Output directory is not set. Would you like to use the default? ~/ngiab_preprocess_output/",
254
266
  default="y",
@@ -258,7 +270,7 @@ def validate_output_dir():
258
270
  response = Prompt.ask("Enter the path to the working directory")
259
271
  if response == "" or response.lower() == "y":
260
272
  response = "~/ngiab_preprocess_output/"
261
- file_paths.set_working_dir(response) # type: ignore
273
+ FilePaths.set_working_dir(response) # type: ignore
262
274
 
263
275
 
264
276
  def validate_all():
map_app/__main__.py CHANGED
@@ -4,8 +4,9 @@ import logging
4
4
  import webbrowser
5
5
  from threading import Timer
6
6
 
7
- from data_processing.file_paths import file_paths
7
+ from data_processing.file_paths import FilePaths
8
8
  from data_processing.graph_utils import get_graph
9
+
9
10
  from map_app import app, console_handler
10
11
 
11
12
 
@@ -35,7 +36,7 @@ def main():
35
36
  # call this once to cache the graph
36
37
  Timer(1, get_graph).start()
37
38
 
38
- if file_paths.dev_file.is_file():
39
+ if FilePaths.dev_file.is_file():
39
40
  Timer(2, set_logs_to_warning).start()
40
41
  with open("app.log", "a") as f:
41
42
  f.write("Running in debug mode\n")
@@ -318,3 +318,44 @@ input[type="datetime-local"] {
318
318
  width: 100%;
319
319
  }
320
320
  }
321
+
322
+ #progress {
323
+ background-color: black;
324
+ border-radius: 13px;
325
+ height: 20px;
326
+ padding: 3px;
327
+ position: relative;
328
+ display: grid;
329
+ align-items: center;
330
+ overflow: hidden;
331
+ }
332
+
333
+ #bar {
334
+ background-color: rgb(49,96,153);
335
+ width: 100%;
336
+ height: 20px;
337
+ border-radius: 10px;
338
+ position: absolute;
339
+ animation: none;
340
+ transform-origin: 0% 50%;
341
+ }
342
+
343
+
344
+ @keyframes indeterminateAnimation {
345
+ 0% {
346
+ transform: translateX(0) scaleX(0);
347
+ }
348
+ 40% {
349
+ transform: translateX(0) scaleX(0.4);
350
+ }
351
+ 100% {
352
+ transform: translateX(100%) scaleX(0.5);
353
+ }
354
+ }
355
+
356
+ #bar-text {
357
+ position: absolute;
358
+ margin: auto;
359
+ width: 100%;
360
+ text-align: center;
361
+ }
@@ -79,7 +79,36 @@
79
79
  .toggle-input:checked + .toggle-label .toggle-text-left {
80
80
  color: #888; /* Grey color for non-selected text */
81
81
  }
82
-
82
+
83
83
  .toggle-input:checked + .toggle-label .toggle-text-right {
84
84
  color: #888; /* Grey color for non-selected text */
85
85
  }
86
+
87
+ /* toggle switch for the subset option */
88
+ .toggle-switch-subset {
89
+ position: absolute;
90
+ bottom: 20px;
91
+ left: 20px;
92
+ z-index: 10;
93
+ width: 240px;
94
+ }
95
+
96
+ .toggle-text-subset {
97
+ font-size: 10px;
98
+ }
99
+
100
+ /* I didn't mean to have four styles for one element but it looked terrible without this*/
101
+ .toggle-text-left-subset {
102
+ left: 20px;
103
+ }
104
+
105
+ .toggle-handle-subset {
106
+ width: 120px;
107
+ font-size: 10px;
108
+ }
109
+
110
+ .toggle-input:checked + .toggle-label .toggle-handle-subset {
111
+ transform: translateX(116px);
112
+ box-shadow: 0 0 10px rgba(0, 123, 255, 0.8); /* Blue glow effect */
113
+ color: #007bff; /* Blue color for the selected text */
114
+ }
@@ -9,20 +9,29 @@ async function subset() {
9
9
  headers: { 'Content-Type': 'application/json' },
10
10
  body: JSON.stringify([cat_id]),
11
11
  })
12
- .then((response) => {
12
+ .then(async response => {
13
13
  // 409 if that subset gpkg path already exists
14
14
  if (response.status == 409) {
15
+ const filename = await response.text();
15
16
  console.log("check response")
16
17
  if (!confirm('A geopackage already exists with that catchment name. Overwrite?')) {
17
18
  alert("Subset canceled.");
19
+ document.getElementById('output-path').innerHTML = "Subset canceled. Geopackage located at " + filename;
18
20
  return;
19
21
  }
20
22
  }
23
+ // check what kind of subset
24
+ // get the position of the subset toggle
25
+ // false means subset by nexus, true means subset by catchment
26
+ var nexus_catchment = document.getElementById('subset-toggle').checked;
27
+ var subset_type = nexus_catchment ? 'catchment' : 'nexus';
28
+
21
29
  const startTime = performance.now(); // Start the timer
22
30
  fetch('/subset', {
23
31
  method: 'POST',
24
32
  headers: { 'Content-Type': 'application/json' },
25
- body: JSON.stringify([cat_id]),
33
+ // body: JSON.stringify([cat_id]),
34
+ body: JSON.stringify({ 'cat_id': [cat_id], 'subset_type': subset_type}),
26
35
  })
27
36
  .then(response => response.text())
28
37
  .then(filename => {
@@ -41,46 +50,101 @@ async function subset() {
41
50
  });
42
51
  }
43
52
 
44
- async function forcings() {
45
- if (document.getElementById('output-path').textContent === '') {
46
- alert('Please subset the data before getting forcings');
47
- return;
48
- }
49
- console.log('getting forcings');
50
- document.getElementById('forcings-button').disabled = true;
51
- document.getElementById('forcings-loading').style.visibility = "visible";
52
-
53
- const forcing_dir = document.getElementById('output-path').textContent;
54
- const start_time = document.getElementById('start-time').value;
55
- const end_time = document.getElementById('end-time').value;
56
- if (forcing_dir === '' || start_time === '' || end_time === '') {
57
- alert('Please enter a valid output path, start time, and end time');
58
- document.getElementById('time-warning').style.color = 'red';
59
- return;
60
- }
53
+ function updateProgressBar(percent) {
54
+ var bar = document.getElementById("bar");
55
+ bar.style.width = percent + "%";
56
+ var barText = document.getElementById("bar-text");
57
+ barText.textContent = percent + "%";
58
+ }
61
59
 
62
- // get the position of the nwm aorc forcing toggle
63
- // false means nwm forcing, true means aorc forcing
64
- var nwm_aorc = document.getElementById('datasource-toggle').checked;
65
- var source = nwm_aorc ? 'aorc' : 'nwm';
66
- console.log('source:', source);
60
+ function pollForcingsProgress(progressFile) {
61
+ const interval = setInterval(() => {
62
+ fetch('/forcings_progress', {
63
+ method: 'POST',
64
+ headers: { 'Content-Type': 'application/json' },
65
+ body: JSON.stringify(progressFile),
66
+ })
67
+ .then(response => response.text())
68
+ .then(data => {
69
+ if (data == "NaN") {
70
+ document.getElementById('forcings-output-path').textContent = "Downloading data...";
71
+ document.getElementById('bar-text').textContent = "Downloading...";
72
+ document.getElementById('bar').style.animation = "indeterminateAnimation 1s infinite linear";
73
+ } else {
74
+ const percent = parseInt(data, 10);
75
+ updateProgressBar(percent);
76
+ if (percent > 0 && percent < 100) {
77
+ document.getElementById('bar').style.animation = "none"; // stop the indeterminate animation
78
+ document.getElementById('forcings-output-path').textContent = "Calculating zonal statistics. See progress below.";
79
+ } else if (percent >= 100) {
80
+ updateProgressBar(100); // Ensure the progress bar is full
81
+ clearInterval(interval);
82
+ document.getElementById('forcings-output-path').textContent = "Forcings generated successfully";
83
+ }
84
+ }
85
+ })
86
+ .catch(error => {
87
+ console.error('Progress polling error:', error);
88
+ clearInterval(interval);
89
+ });
90
+ }, 1000); // Poll every second
91
+ }
67
92
 
68
- document.getElementById('forcings-output-path').textContent = "Generating forcings...";
69
- fetch('/forcings', {
93
+ async function forcings() {
94
+ fetch('/subset_check', {
70
95
  method: 'POST',
71
96
  headers: { 'Content-Type': 'application/json' },
72
- body: JSON.stringify({ 'forcing_dir': forcing_dir, 'start_time': start_time, 'end_time': end_time , 'source': source}),
73
- }).then(response => response.text())
74
- .then(response_code => {
75
- document.getElementById('forcings-output-path').textContent = "Forcings generated successfully";
76
- })
77
- .catch(error => {
78
- console.error('Error:', error);
79
- }).finally(() => {
80
- document.getElementById('forcings-button').disabled = false;
81
- document.getElementById('forcings-loading').style.visibility = "hidden";
97
+ body: JSON.stringify([cat_id]),
98
+ })
99
+ .then(async response => {
100
+ // 409 if that subset gpkg path already exists
101
+ if (response.status == 409) {
102
+ const filename = await response.text();
103
+ console.log('getting forcings');
104
+ document.getElementById('forcings-button').disabled = true;
105
+ document.getElementById('forcings-loading').style.visibility = "visible";
82
106
 
83
- });
107
+ const forcing_dir = filename;
108
+ console.log('forcing_dir:', forcing_dir);
109
+ const start_time = document.getElementById('start-time').value;
110
+ const end_time = document.getElementById('end-time').value;
111
+ if (forcing_dir === '' || start_time === '' || end_time === '') {
112
+ alert('Please enter a valid output path, start time, and end time');
113
+ document.getElementById('time-warning').style.color = 'red';
114
+ return;
115
+ }
116
+
117
+ // get the position of the nwm aorc forcing toggle
118
+ // false means nwm forcing, true means aorc forcing
119
+ var nwm_aorc = document.getElementById('datasource-toggle').checked;
120
+ var source = nwm_aorc ? 'aorc' : 'nwm';
121
+ console.log('source:', source);
122
+
123
+ fetch('/make_forcings_progress_file', {
124
+ method: 'POST',
125
+ headers: { 'Content-Type': 'application/json' },
126
+ body: JSON.stringify(forcing_dir),
127
+ })
128
+ .then(async (response) => response.text())
129
+ .then(progressFile => {
130
+ pollForcingsProgress(progressFile); // Start polling for progress
131
+ })
132
+ fetch('/forcings', {
133
+ method: 'POST',
134
+ headers: { 'Content-Type': 'application/json' },
135
+ body: JSON.stringify({ 'forcing_dir': forcing_dir, 'start_time': start_time, 'end_time': end_time , 'source': source}),
136
+ })
137
+ .then(response => response.text())
138
+ .catch(error => {
139
+ console.error('Error:', error);
140
+ }).finally(() => {
141
+ document.getElementById('forcings-button').disabled = false;
142
+ });
143
+ } else {
144
+ alert('No existing geopackage found. Please subset the data before getting forcings');
145
+ return;
146
+ }
147
+ })
84
148
  }
85
149
 
86
150
  async function realization() {
map_app/static/js/main.js CHANGED
@@ -133,27 +133,63 @@ function update_map(cat_id, e) {
133
133
  $('#selected-basins').text(cat_id)
134
134
  map.setFilter('selected-catchments', ['any', ['in', 'divide_id', cat_id]]);
135
135
  map.setFilter('upstream-catchments', ['any', ['in', 'divide_id', ""]])
136
- fetch('/get_upstream_catids', {
137
- method: 'POST',
138
- headers: { 'Content-Type': 'application/json' },
139
- body: JSON.stringify(cat_id),
140
- })
141
- .then(response => response.json())
142
- .then(data => {
143
- map.setFilter('upstream-catchments', ['any', ['in', 'divide_id', ...data]]);
144
- if (data.length === 0) {
145
- new maplibregl.Popup()
146
- .setLngLat(e.lngLat)
147
- .setHTML('No upstreams')
148
- .addTo(map);
149
- }
150
- });
136
+ // get the position of the subset toggle
137
+ // false means subset by nexus, true means subset by catchment
138
+ var nexus_catchment = document.getElementById('subset-toggle').checked;
139
+ var subset_type = nexus_catchment ? 'catchment' : 'nexus';
140
+ console.log('subset_type:', subset_type);
141
+
142
+ if (subset_type == 'catchment') {
143
+ fetch('/get_upstream_catids', {
144
+ method: 'POST',
145
+ headers: { 'Content-Type': 'application/json' },
146
+ body: JSON.stringify(cat_id),
147
+ })
148
+ .then(response => response.json())
149
+ .then(data => {
150
+ map.setFilter('upstream-catchments', ['any', ['in', 'divide_id', ...data]]);
151
+ if (data.length === 0) {
152
+ new maplibregl.Popup()
153
+ .setLngLat(e.lngLat)
154
+ .setHTML('No upstreams')
155
+ .addTo(map);
156
+ }
157
+ });
158
+ } else {
159
+ fetch('/get_upstream_wbids', {
160
+ method: 'POST',
161
+ headers: { 'Content-Type': 'application/json' },
162
+ body: JSON.stringify(cat_id),
163
+ })
164
+ .then(response => response.json())
165
+ .then(data => {
166
+ map.setFilter('upstream-catchments', ['any', ['in', 'divide_id', ...data]]);
167
+ if (data.length === 0) {
168
+ new maplibregl.Popup()
169
+ .setLngLat(e.lngLat)
170
+ .setHTML('No upstreams')
171
+ .addTo(map);
172
+ }
173
+ });
174
+ }
151
175
  }
176
+ let lastClickedLngLat = null;
152
177
  map.on('click', 'catchments', (e) => {
153
178
  cat_id = e.features[0].properties.divide_id;
179
+ lastClickedLngLat = e.lngLat; // Store the last clicked location
154
180
  update_map(cat_id, e);
155
181
  });
156
182
 
183
+ // When you want to use it (e.g., in your toggle handler):
184
+ document.getElementById("subset-toggle").addEventListener('change', function() {
185
+ const cat_id = document.getElementById('selected-basins').textContent;
186
+ if (cat_id && cat_id !== 'None - get clicking!' && lastClickedLngLat) {
187
+ // Create a fake event with the last clicked location
188
+ const fakeEvent = { lngLat: lastClickedLngLat };
189
+ update_map(cat_id, fakeEvent);
190
+ }
191
+ });
192
+
157
193
  // Create a popup, but don't add it to the map yet.
158
194
  const popup = new maplibregl.Popup({
159
195
  closeButton: false,
@@ -27,6 +27,15 @@
27
27
  <button id="toggle-button-camels">Show CAMELS basins</button>
28
28
  <button id="toggle-button-nwm">Overlay NWM chunks</button>
29
29
  <button id="toggle-button-aorc">Overlay AORC chunks</button>
30
+ <br>
31
+ <div class="toggle-switch toggle-switch-subset">
32
+ <input type="checkbox" id="subset-toggle" class="toggle-input">
33
+ <label for="subset-toggle" class="toggle-label">
34
+ <span class="toggle-text toggle-text-subset toggle-text-left toggle-text-left-subset">Subset by nexus</span>
35
+ <span class="toggle-text toggle-text-subset toggle-text-right">Subset by catchment</span>
36
+ <span class="toggle-handle toggle-handle-subset"></span>
37
+ </label>
38
+ </div>
30
39
  </div>
31
40
 
32
41
  <div class="command-container">
@@ -126,10 +135,16 @@
126
135
  </label>
127
136
  </div>
128
137
  <button id="forcings-button">Create Forcing From Zarrs</button>
129
- <img class=" loading" src="{{ url_for('static', filename='resources/loading.gif') }}" alt="Loading"
138
+ <img class="loading" src="{{ url_for('static', filename='resources/loading.gif') }}" alt="Loading"
130
139
  id="forcings-loading">
140
+ <br>
131
141
  </div>
142
+ <br>
132
143
  <div id="forcings-output-path"></div>
144
+ <div id="progress">
145
+ <div id="bar"></div>
146
+ <div id="bar-text"></div>
147
+ </div>
133
148
  </section>
134
149
 
135
150
  <section id="realization">