tfv-get-tools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. tfv_get_tools/__init__.py +4 -0
  2. tfv_get_tools/_standard_attrs.py +107 -0
  3. tfv_get_tools/atmos.py +167 -0
  4. tfv_get_tools/cli/_cli_base.py +173 -0
  5. tfv_get_tools/cli/atmos_cli.py +192 -0
  6. tfv_get_tools/cli/ocean_cli.py +204 -0
  7. tfv_get_tools/cli/tide_cli.py +118 -0
  8. tfv_get_tools/cli/wave_cli.py +183 -0
  9. tfv_get_tools/fvc/__init__.py +3 -0
  10. tfv_get_tools/fvc/_atmos.py +230 -0
  11. tfv_get_tools/fvc/_fvc.py +218 -0
  12. tfv_get_tools/fvc/_ocean.py +171 -0
  13. tfv_get_tools/fvc/_tide.py +195 -0
  14. tfv_get_tools/ocean.py +170 -0
  15. tfv_get_tools/providers/__init__.py +0 -0
  16. tfv_get_tools/providers/_custom_conversions.py +34 -0
  17. tfv_get_tools/providers/_downloader.py +566 -0
  18. tfv_get_tools/providers/_merger.py +520 -0
  19. tfv_get_tools/providers/_utilities.py +255 -0
  20. tfv_get_tools/providers/atmos/barra2.py +209 -0
  21. tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml +52 -0
  22. tfv_get_tools/providers/atmos/cfgs/barra2_r2.yaml +85 -0
  23. tfv_get_tools/providers/atmos/cfgs/barra2_re2.yaml +70 -0
  24. tfv_get_tools/providers/atmos/cfgs/cfsr.yaml +68 -0
  25. tfv_get_tools/providers/atmos/cfgs/era5.yaml +77 -0
  26. tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml +77 -0
  27. tfv_get_tools/providers/atmos/cfsr.py +207 -0
  28. tfv_get_tools/providers/atmos/era5.py +20 -0
  29. tfv_get_tools/providers/atmos/era5_gcp.py +20 -0
  30. tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml +64 -0
  31. tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml +67 -0
  32. tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml +62 -0
  33. tfv_get_tools/providers/ocean/cfgs/hycom.yaml +73 -0
  34. tfv_get_tools/providers/ocean/copernicus_ocean.py +457 -0
  35. tfv_get_tools/providers/ocean/hycom.py +611 -0
  36. tfv_get_tools/providers/wave/cawcr.py +166 -0
  37. tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml +39 -0
  38. tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml +39 -0
  39. tfv_get_tools/providers/wave/cfgs/cawcr_glob_24m.yaml +39 -0
  40. tfv_get_tools/providers/wave/cfgs/cawcr_pac_10m.yaml +39 -0
  41. tfv_get_tools/providers/wave/cfgs/cawcr_pac_4m.yaml +39 -0
  42. tfv_get_tools/providers/wave/cfgs/copernicus_glo.yaml +56 -0
  43. tfv_get_tools/providers/wave/cfgs/copernicus_nws.yaml +51 -0
  44. tfv_get_tools/providers/wave/cfgs/era5.yaml +48 -0
  45. tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml +48 -0
  46. tfv_get_tools/providers/wave/copernicus_wave.py +38 -0
  47. tfv_get_tools/providers/wave/era5.py +232 -0
  48. tfv_get_tools/providers/wave/era5_gcp.py +169 -0
  49. tfv_get_tools/tide/__init__.py +2 -0
  50. tfv_get_tools/tide/_nodestring.py +214 -0
  51. tfv_get_tools/tide/_tidal_base.py +568 -0
  52. tfv_get_tools/utilities/_tfv_bc.py +78 -0
  53. tfv_get_tools/utilities/horizontal_padding.py +89 -0
  54. tfv_get_tools/utilities/land_masking.py +93 -0
  55. tfv_get_tools/utilities/parsers.py +44 -0
  56. tfv_get_tools/utilities/warnings.py +38 -0
  57. tfv_get_tools/wave.py +179 -0
  58. tfv_get_tools-0.2.0.dist-info/METADATA +286 -0
  59. tfv_get_tools-0.2.0.dist-info/RECORD +62 -0
  60. tfv_get_tools-0.2.0.dist-info/WHEEL +5 -0
  61. tfv_get_tools-0.2.0.dist-info/entry_points.txt +5 -0
  62. tfv_get_tools-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,566 @@
1
+ """
2
+ Base Downloader class - this is the base class for all downloaders.
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from dataclasses import dataclass, field
7
+ from datetime import datetime
8
+ from enum import Enum
9
+ from pathlib import Path
10
+ from typing import Dict, List, Optional, Tuple, Union
11
+ import sys
12
+
13
+ import pandas as pd
14
+ from pandas.tseries.offsets import MonthBegin, MonthEnd
15
+
16
+ from tfv_get_tools.providers._utilities import _get_config
17
+ from tfv_get_tools.utilities.parsers import _parse_date, _parse_path
18
+
19
+
20
+ class DownloadStatus(Enum):
21
+ """Status codes for download operations"""
22
+
23
+ SUCCESS = "success"
24
+ FAILED = "failed"
25
+ SKIPPED = "skipped"
26
+ PARTIAL = "partial"
27
+
28
+
29
+ @dataclass
30
+ class FileDownloadResult:
31
+ """Result for a single file download operation"""
32
+
33
+ status: DownloadStatus
34
+ file_path: Optional[Path] = None
35
+ url: Optional[str] = None
36
+ timestamp: Optional[pd.Timestamp] = None
37
+ variable: Optional[str] = None
38
+ message: Optional[str] = None
39
+ error: Optional[Exception] = None
40
+ bytes_downloaded: Optional[int] = None
41
+ duration_seconds: Optional[float] = None
42
+
43
+ @property
44
+ def success(self) -> bool:
45
+ return self.status == DownloadStatus.SUCCESS
46
+
47
+ @property
48
+ def failed(self) -> bool:
49
+ return self.status == DownloadStatus.FAILED
50
+
51
+
52
+ @dataclass
53
+ class BatchDownloadResult:
54
+ """Result for a batch download operation (multiple files)"""
55
+
56
+ results: List[FileDownloadResult] = field(default_factory=list)
57
+ start_time: Optional[datetime] = None
58
+ end_time: Optional[datetime] = None
59
+
60
+ def add_result(self, result: FileDownloadResult) -> None:
61
+ """Add a file download result to the batch"""
62
+ self.results.append(result)
63
+
64
+ @property
65
+ def total_files(self) -> int:
66
+ return len(self.results)
67
+
68
+ @property
69
+ def successful_files(self) -> List[FileDownloadResult]:
70
+ return [r for r in self.results if r.success]
71
+
72
+ @property
73
+ def failed_files(self) -> List[FileDownloadResult]:
74
+ return [r for r in self.results if r.failed]
75
+
76
+ @property
77
+ def skipped_files(self) -> List[FileDownloadResult]:
78
+ return [r for r in self.results if r.status == DownloadStatus.SKIPPED]
79
+
80
+ @property
81
+ def success_rate(self) -> float:
82
+ """Return success rate as percentage"""
83
+ if self.total_files == 0:
84
+ return 0.0
85
+ return (len(self.successful_files) / self.total_files) * 100
86
+
87
+ @property
88
+ def summary(self) -> Dict[str, int]:
89
+ """Get summary counts of each status"""
90
+ summary = {}
91
+ for status in DownloadStatus:
92
+ summary[status.value] = len([r for r in self.results if r.status == status])
93
+ return summary
94
+
95
+ @property
96
+ def total_bytes_downloaded(self) -> int:
97
+ """Total bytes downloaded across all successful files"""
98
+ return sum(r.bytes_downloaded or 0 for r in self.successful_files)
99
+
100
+ @property
101
+ def duration_seconds(self) -> Optional[float]:
102
+ """Total duration of the batch download"""
103
+ if self.start_time and self.end_time:
104
+ return (self.end_time - self.start_time).total_seconds()
105
+ return None
106
+
107
+
108
+ class BaseDownloader(ABC):
109
+ """Base class for downloader"""
110
+
111
+ def __init__(
112
+ self,
113
+ start_date: Union[str, datetime, pd.Timestamp],
114
+ end_date: Union[str, datetime, pd.Timestamp],
115
+ xlims: Tuple[float, float],
116
+ ylims: Tuple[float, float],
117
+ zlims: Optional[Tuple[float, float]] = None,
118
+ out_path: Union[str, Path] = Path("./raw"),
119
+ model: Optional[str] = "default",
120
+ prefix: Optional[str] = None,
121
+ time_interval: Optional[Union[int, str]] = 24,
122
+ verbose: bool = False,
123
+ variables: Optional[List[str]] = None,
124
+ skip_check: bool = False,
125
+ **kwargs,
126
+ ):
127
+ """Initialise the BaseDownloader class"""
128
+ self.start_date = _parse_date(start_date)
129
+ self.end_date = _parse_date(end_date)
130
+
131
+ if "output_directory" in kwargs:
132
+ out_path = kwargs.pop("output_directory", "./raw")
133
+ print(
134
+ "Warning - the `output_directory` keyword argument has been replaced by `out_path`"
135
+ )
136
+ print(f"...Setting `out_path={out_path}")
137
+
138
+ elif "outdir" in kwargs:
139
+ out_path = kwargs.pop("outdir", "./raw")
140
+ print(
141
+ "Warning - the `outdir` keyword argument has been replaced by `out_path`"
142
+ )
143
+ print(f"...Setting `out_path={out_path}")
144
+
145
+ self.outdir = _parse_path(out_path)
146
+
147
+ self.xlims = self._validate_coords(xlims)
148
+ self.ylims = self._validate_coords(ylims)
149
+
150
+ # Validate zlim input if provided
151
+ if zlims is not None:
152
+ self.zlims = self._validate_coords(zlims)
153
+ else:
154
+ self.zlims = None
155
+
156
+ self.time_interval = self._validate_time_interval(time_interval)
157
+
158
+ self.prefix = prefix
159
+ self.verbose = verbose
160
+ self.skip_check = skip_check
161
+
162
+ # Flag to download custom variables or not.
163
+ self._custom_variables = True if variables else False
164
+ self.variables = variables
165
+
166
+ self.max_tries = 5
167
+ self.timeout = 30
168
+
169
+ # Track download results
170
+ self._batch_result = BatchDownloadResult()
171
+
172
+ # These are filled in the individual downloaders!
173
+ # We'll init them here for reference.
174
+ # Mode attribute e.g. {'ocean', 'wave'}
175
+ # Source attribute e.g {'hycom', 'cawcr'}
176
+ self.mode = None
177
+ self.source = None
178
+ self.model = model if model else "default"
179
+
180
+ # Testing mode - run program but don't ever call the final API
181
+ self.__test_mode__ = kwargs.pop("TEST_MODE", False)
182
+
183
+ # Now we call the source specific init, which loads the source config `self.cfg`
184
+ self._init_specific(**kwargs)
185
+
186
+ @abstractmethod
187
+ def _init_specific(self, **kwargs):
188
+ """Initialize source specific attributes"""
189
+ pass
190
+
191
+ def prepare_request(self):
192
+ """Prepare attributes including
193
+ - Dataset id mapping (for sequential sources like copernicus, hycom)
194
+ - Download interval and times (source specific)
195
+ - Filename prefixes (source_mode + _model if relevant)
196
+ - Default variables, if custom aren't requested.
197
+ """
198
+ self.dsmap = self.cfg["_DATASETS"]
199
+ self.database = "N/A" # Init for sources with sequential databases
200
+
201
+ # Assign a download interval, monthly or daily.
202
+ if self.cfg["_DOWNLOAD_INTERVAL"] == "daily":
203
+ self.download_interval = "d"
204
+ elif self.cfg["_DOWNLOAD_INTERVAL"] == "monthly":
205
+ self.download_interval = "MS"
206
+ else:
207
+ raise ValueError('_DOWNLOAD_INTERVAL must be one of {"daily", "monthly"}')
208
+
209
+ # Times to download (start times)
210
+ if self.start_date.day == 1:
211
+ ts = self.start_date
212
+ elif (self.start_date.day != 1) & (self.cfg["_DOWNLOAD_INTERVAL"] == "monthly"):
213
+ ts = self.start_date + MonthBegin(-1)
214
+ else:
215
+ # For daily downloaded data, it can be whatever.
216
+ ts = self.start_date
217
+
218
+ self.times = pd.date_range(ts, self.end_date, freq=self.download_interval)
219
+
220
+ # Assign the default variables if no custom ones are requested.
221
+ if not self._custom_variables:
222
+ self.variables = self.cfg["_VARIABLES"]
223
+
224
+ # Set the prefix.
225
+ # Only append model if not 'default'
226
+ if self.model == "default":
227
+ fname = f"{self.source}_{self.mode}".upper()
228
+ else:
229
+ fname = f"{self.source}_{self.mode}_{self.model}".upper()
230
+
231
+ if self.prefix is None:
232
+ self.prefix = fname
233
+ else:
234
+ self.prefix = self.prefix + "_" + fname
235
+
236
+ # Assign zlims for the source, if they were not provided
237
+ if (self.mode == "OCEAN") & (self.zlims is None):
238
+ self.zlims = self.cfg["_SOURCE_ZLIMS"]
239
+
240
+ # Finally, validate the request now everything has been set
241
+ src_xlims = self.cfg.pop("_SOURCE_XLIMS", (None, None))
242
+ src_ylims = self.cfg.pop("_SOURCE_YLIMS", (None, None))
243
+ src_zlims = self.cfg.pop("_SOURCE_ZLIMS", (None, None))
244
+ src_timelims = self.cfg.pop("_SOURCE_TIMELIMS", (None, None))
245
+
246
+ if self.model == "default":
247
+ src_name = self.source
248
+ else:
249
+ src_name = f"{self.source}_{self.model}"
250
+
251
+ self._validate_request_bounds(
252
+ (self.start_date, self.end_date),
253
+ self.xlims,
254
+ self.ylims,
255
+ self.zlims,
256
+ src_timelims,
257
+ src_xlims,
258
+ src_ylims,
259
+ src_zlims,
260
+ src_name,
261
+ )
262
+
263
+ def check_request(self):
264
+ """Print out a verbose confirmation of the user request"""
265
+ # Print request
266
+ fmt = "%Y-%m-%d"
267
+ print(
268
+ f"This request involves collection of approx. {len(self.times)} time intervals of {self.source.upper()} {self.mode} data"
269
+ )
270
+ print(f"Files are downloaded in {self.cfg['_DOWNLOAD_INTERVAL']} increments")
271
+
272
+ # Note for HYCOM about missing days
273
+ if self.source.lower() == "hycom":
274
+ print(
275
+ "Note that this tool does not replace missing days in the HYCOM database"
276
+ )
277
+ print(
278
+ f"HYCOM data will be exported daily with a {self.time_interval}-hourly timestep"
279
+ )
280
+
281
+ # Note for monthly interval datasets when user didn't specify first day of the month
282
+ if self.download_interval == "MS":
283
+ if self.start_date != self.times[0]:
284
+ print(
285
+ "Note: This data source is downloaded in monthly increments. The start date has been rounded."
286
+ )
287
+ if self.end_date != self.times[-1]:
288
+ print(
289
+ "Note: This data source is downloaded in monthly increments. The end date has been rounded."
290
+ )
291
+
292
+ ts = self.times[0]
293
+ if self.download_interval == "MS":
294
+ te = self.times[-1] + MonthEnd(1)
295
+ else:
296
+ te = self.times[-1]
297
+
298
+ print("--------------------")
299
+ print("Confirming Request:")
300
+ print(f"... xLims: {self.xlims}")
301
+ print(f"... yLims: {self.ylims}")
302
+ if self.mode.lower() == "ocean":
303
+ print(f"... zLims: {self.zlims}")
304
+ print(f"... Dates: {ts.strftime(fmt)} to {te.strftime(fmt)}")
305
+ print(f"... Model: {self.model}")
306
+ print(f"... Outdir: {self.outdir.as_posix()}")
307
+
308
+ print("\n")
309
+
310
+ @abstractmethod
311
+ def download(self):
312
+ """Source-specific download implementation
313
+ Must yield download tasks
314
+ """
315
+ pass
316
+
317
+ def execute_download(self) -> BatchDownloadResult:
318
+ """Execute download with progress tracking and error handling"""
319
+ self.prepare_request()
320
+ self.check_request()
321
+
322
+ if self.__test_mode__:
323
+ # In test mode, skip the confirmation
324
+ return self._perform_downloads()
325
+ else:
326
+ # Confirm y/n or let it rip
327
+ if self.skip_check:
328
+ return self._perform_downloads()
329
+ else:
330
+ if self._query_yes_no("Do you want to continue?"):
331
+ return self._perform_downloads()
332
+ else:
333
+ self.log("Finished")
334
+ return BatchDownloadResult()
335
+
336
+ def _perform_downloads(self) -> BatchDownloadResult:
337
+ """Handle the common download workflow - all sources yield tasks"""
338
+ from time import time
339
+ from tqdm import tqdm
340
+
341
+ batch_result = BatchDownloadResult()
342
+ batch_result.start_time = datetime.now()
343
+
344
+ # All sources now yield tasks
345
+ download_tasks = list(self.download())
346
+
347
+ # Count existing files for progress bar
348
+ existing_count = sum(1 for task in download_tasks if task["file_path"].exists())
349
+
350
+ # Set up progress bar
351
+ progress_bar = tqdm(
352
+ initial=existing_count, total=len(download_tasks), unit="file"
353
+ )
354
+
355
+ # Process each download task
356
+ for task in download_tasks:
357
+ result = self._process_single_download(task, progress_bar)
358
+ batch_result.add_result(result)
359
+
360
+ progress_bar.close()
361
+ batch_result.end_time = datetime.now()
362
+
363
+ self._log_download_summary(batch_result)
364
+ return batch_result
365
+
366
+ def _process_single_download(self, task: dict, progress_bar) -> FileDownloadResult:
367
+ """Process a single download task"""
368
+ from time import time
369
+
370
+ file_path = task["file_path"]
371
+ url = task.get("url", "N/A") # Some sources don't use URLs
372
+ timestamp = task["timestamp"]
373
+ variable = task.get("variable", "unknown")
374
+ download_func = task["download_func"]
375
+
376
+ # Check if file already exists
377
+ if file_path.exists():
378
+ if self.verbose:
379
+ self.log(f"{file_path.name} already exists! Moving on...")
380
+
381
+ return FileDownloadResult(
382
+ status=DownloadStatus.SKIPPED,
383
+ file_path=file_path,
384
+ timestamp=timestamp,
385
+ variable=variable,
386
+ message="File already exists",
387
+ )
388
+
389
+ # Perform the download
390
+ start_time = time()
391
+
392
+ if self.verbose:
393
+ if url != "N/A":
394
+ self.log(f"Fetching data for {timestamp} from {url}")
395
+ else:
396
+ self.log(f"Fetching data for {timestamp} via API")
397
+
398
+ try:
399
+ if not self.__test_mode__:
400
+ success = download_func()
401
+ else:
402
+ success = True # Simulate success in test mode
403
+
404
+ duration = time() - start_time
405
+
406
+ if success:
407
+ file_size = file_path.stat().st_size if file_path.exists() else None
408
+
409
+ progress_bar.set_postfix(
410
+ last_downloaded=timestamp.strftime("%Y-%m-%d"), refresh=False
411
+ )
412
+ progress_bar.update()
413
+
414
+ return FileDownloadResult(
415
+ status=DownloadStatus.SUCCESS,
416
+ file_path=file_path,
417
+ url=url,
418
+ timestamp=timestamp,
419
+ variable=variable,
420
+ message=f"Downloaded successfully in {duration:.2f}s",
421
+ bytes_downloaded=file_size,
422
+ duration_seconds=duration,
423
+ )
424
+ else:
425
+ return FileDownloadResult(
426
+ status=DownloadStatus.FAILED,
427
+ file_path=file_path,
428
+ url=url,
429
+ timestamp=timestamp,
430
+ variable=variable,
431
+ message="Download function returned False",
432
+ duration_seconds=duration,
433
+ )
434
+
435
+ except Exception as e:
436
+ duration = time() - start_time
437
+ return FileDownloadResult(
438
+ status=DownloadStatus.FAILED,
439
+ file_path=file_path,
440
+ url=url,
441
+ timestamp=timestamp,
442
+ variable=variable,
443
+ message=f"Exception during download: {str(e)}",
444
+ error=e,
445
+ duration_seconds=duration,
446
+ )
447
+
448
+ def _log_download_summary(self, batch_result: BatchDownloadResult) -> None:
449
+ """Log a summary of the download batch results"""
450
+ print("\n" + "=" * 50)
451
+ print("DOWNLOAD SUMMARY")
452
+ print("=" * 50)
453
+
454
+ summary = batch_result.summary
455
+ print(f"Total files processed: {batch_result.total_files}")
456
+ print(f"Successful downloads: {summary['success']}")
457
+ print(f"Failed downloads: {summary['failed']}")
458
+ print(f"Skipped (existing): {summary['skipped']}")
459
+ print(f"Success rate: {batch_result.success_rate:.1f}%")
460
+
461
+ if batch_result.total_bytes_downloaded > 0:
462
+ size_mb = batch_result.total_bytes_downloaded / (1024 * 1024)
463
+ print(f"Total data downloaded: {size_mb:.2f} MB")
464
+
465
+ if batch_result.duration_seconds:
466
+ print(f"Total duration: {batch_result.duration_seconds:.2f} seconds")
467
+
468
+ # List failed files if any
469
+ if batch_result.failed_files:
470
+ print(f"\nFailed downloads ({len(batch_result.failed_files)}):")
471
+ for failed_result in batch_result.failed_files:
472
+ print(f" • {failed_result.file_path.name}: {failed_result.message}")
473
+
474
+ print("=" * 50)
475
+
476
+ def log(self, message: str):
477
+ if self.verbose:
478
+ print(message)
479
+
480
+ def _load_config(self):
481
+ cfg, base_url = _get_config(self.mode, self.source, self.model)
482
+
483
+ self.cfg = cfg
484
+ self.base_url = base_url
485
+
486
+ @staticmethod
487
+ def _validate_coords(coords: Tuple[float, float]) -> Tuple[float, float]:
488
+ if not isinstance(coords, tuple) or len(coords) != 2:
489
+ raise ValueError("Coordinates must be a tuple of two floats")
490
+ return tuple(float(c) for c in coords)
491
+
492
+ @staticmethod
493
+ def _validate_time_interval(interval: int) -> int:
494
+ valid_intervals = [3, 6, 12, 24, "best"]
495
+ if interval not in valid_intervals:
496
+ raise ValueError(f"Invalid time interval. Must be one of {valid_intervals}")
497
+ return interval
498
+
499
+ @staticmethod
500
+ def _validate_request_bounds(
501
+ timelims,
502
+ xlims,
503
+ ylims,
504
+ zlims,
505
+ src_timelims,
506
+ src_xlims,
507
+ src_ylims,
508
+ src_zlims,
509
+ source_name,
510
+ ):
511
+ """
512
+ Validate that the requested coordinates and time range fall within the dataset's limits.
513
+ """
514
+ source_name = source_name.upper()
515
+
516
+ def check_bounds(name, req_start, req_end, src_start, src_end):
517
+ if src_start is not None and req_start < src_start:
518
+ raise ValueError(
519
+ f"{name} start ({req_start}) is below {source_name} data extent ({src_start} to {src_end})"
520
+ )
521
+ if src_end is not None and req_end > src_end:
522
+ raise ValueError(
523
+ f"{name} end ({req_end}) is above {source_name} data extent ({src_start} to {src_end})"
524
+ )
525
+
526
+ # Validate time limits
527
+ check_bounds(
528
+ "Time",
529
+ pd.Timestamp(timelims[0]),
530
+ pd.Timestamp(timelims[1]),
531
+ pd.Timestamp(src_timelims[0]) if src_timelims[0] else None,
532
+ pd.Timestamp(src_timelims[1]) if src_timelims[1] else None,
533
+ )
534
+
535
+ # Validate spatial limits
536
+ check_bounds("X", xlims[0], xlims[1], src_xlims[0], src_xlims[1])
537
+ check_bounds("Y", ylims[0], ylims[1], src_ylims[0], src_ylims[1])
538
+
539
+ # Validate Z limits if provided
540
+ if zlims and src_zlims:
541
+ check_bounds("Z", zlims[0], zlims[1], src_zlims[0], src_zlims[1])
542
+
543
+ @staticmethod
544
+ def _query_yes_no(question, default="yes"):
545
+ """Ask a yes/no question via raw_input() and return their answer."""
546
+ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
547
+ if default is None:
548
+ prompt = " [y/n] "
549
+ elif default == "yes":
550
+ prompt = " [Y/n] "
551
+ elif default == "no":
552
+ prompt = " [y/N] "
553
+ else:
554
+ raise ValueError("invalid default answer: '%s'" % default)
555
+
556
+ while True:
557
+ sys.stdout.write(question + prompt)
558
+ choice = input().lower()
559
+ if default is not None and choice == "":
560
+ return valid[default]
561
+ elif choice in valid:
562
+ return valid[choice]
563
+ else:
564
+ sys.stdout.write(
565
+ "Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n"
566
+ )