eo-tides 0.7.4.dev7__py3-none-any.whl → 0.7.5.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
eo_tides/eo.py CHANGED
@@ -58,7 +58,7 @@ def _standardise_inputs(
58
58
  """
59
59
 
60
60
  # If `data` is an xarray object, extract its GeoBox and time
61
- if isinstance(data, (xr.DataArray, xr.Dataset)):
61
+ if isinstance(data, xr.DataArray | xr.Dataset):
62
62
  # Try to extract GeoBox
63
63
  try:
64
64
  gbox: GeoBox = data.odc.geobox
@@ -544,14 +544,13 @@ def pixel_tides(
544
544
  # Reproject into original high resolution grid
545
545
  if resample:
546
546
  print("Reprojecting tides into original resolution")
547
- tides_highres = _pixel_tides_resample(
547
+ return _pixel_tides_resample(
548
548
  tides_lowres,
549
549
  gbox,
550
550
  resample_method,
551
551
  dask_chunks,
552
552
  dask_compute,
553
553
  )
554
- return tides_highres
555
554
 
556
555
  print("Returning low resolution tide array")
557
556
  return tides_lowres
eo_tides/model.py CHANGED
@@ -55,10 +55,7 @@ def _parallel_splits(
55
55
  raw_value = os.environ.get("CPU_GUARANTEE") or psutil.cpu_count(logical=False) or os.cpu_count() or 1
56
56
 
57
57
  # Convert to integer
58
- if isinstance(raw_value, str):
59
- parallel_max = int(float(raw_value))
60
- else:
61
- parallel_max = int(raw_value)
58
+ parallel_max = int(float(raw_value)) if isinstance(raw_value, str) else int(raw_value)
62
59
 
63
60
  # Calculate optimal number of splits based on constraints
64
61
  splits_by_size = total_points / min_points_per_split
@@ -66,8 +63,7 @@ def _parallel_splits(
66
63
  optimal_splits = min(splits_by_size, splits_by_cpu)
67
64
 
68
65
  # Convert to integer and ensure at least 1 split
69
- final_split_count = int(max(1, optimal_splits))
70
- return final_split_count
66
+ return int(max(1, optimal_splits))
71
67
 
72
68
 
73
69
  def _model_tides(
@@ -170,12 +166,7 @@ def _model_tides(
170
166
  hc = amp * np.exp(cph)
171
167
 
172
168
  # Compute delta times based on model
173
- if pytmd_model.corrections in ("OTIS", "ATLAS", "TMD3", "netcdf"):
174
- # Use delta time at 2000.0 to match TMD outputs
175
- deltat = np.zeros_like(ts.tt_ut1)
176
- else:
177
- # Use interpolated delta times
178
- deltat = ts.tt_ut1
169
+ deltat = np.zeros_like(ts.tt_ut1) if pytmd_model.corrections in ("OTIS", "ATLAS", "TMD3", "netcdf") else ts.tt_ut1
179
170
 
180
171
  # In "one-to-many" mode, extracted tidal constituents and timesteps
181
172
  # are repeated/multiplied out to match the number of input points and
eo_tides/stats.py CHANGED
@@ -44,7 +44,7 @@ def _tide_statistics(obs_tides, all_tides, min_max_q=(0.0, 1.0), dim="time"):
44
44
  offset_high = offset_high_m / tr
45
45
 
46
46
  # Combine into a single dataset
47
- stats_ds = xr.merge(
47
+ return xr.merge(
48
48
  [
49
49
  mot.rename("mot"),
50
50
  mat.rename("mat"),
@@ -61,8 +61,6 @@ def _tide_statistics(obs_tides, all_tides, min_max_q=(0.0, 1.0), dim="time"):
61
61
  compat="override",
62
62
  )
63
63
 
64
- return stats_ds
65
-
66
64
 
67
65
  def _stats_plain_english(mot, mat, hot, hat, lot, lat, otr, tr, spread, offset_low, offset_high):
68
66
  # Plain text descriptors
@@ -568,7 +566,7 @@ def pixel_stats(
568
566
  # Reproject statistics into original high resolution grid
569
567
  if resample:
570
568
  print("Reprojecting statistics into original resolution")
571
- stats_highres = _pixel_tides_resample(
569
+ return _pixel_tides_resample(
572
570
  stats_lowres,
573
571
  gbox,
574
572
  resample_method,
@@ -576,7 +574,6 @@ def pixel_stats(
576
574
  dask_compute,
577
575
  None,
578
576
  )
579
- return stats_highres
580
577
 
581
578
  print("Returning low resolution statistics array")
582
579
  return stats_lowres
eo_tides/utils.py CHANGED
@@ -7,7 +7,7 @@ import pathlib
7
7
  import textwrap
8
8
  import warnings
9
9
  from collections import Counter
10
- from typing import List, Union
10
+ from typing import TypeAlias
11
11
 
12
12
  import numpy as np
13
13
  import odc.geo
@@ -21,7 +21,7 @@ from scipy.spatial import cKDTree as KDTree
21
21
  from tqdm import tqdm
22
22
 
23
23
  # Type alias for all possible inputs to "time" params
24
- DatetimeLike = Union[np.ndarray, pd.DatetimeIndex, pd.Timestamp, datetime.datetime, str, List[str]]
24
+ DatetimeLike: TypeAlias = np.ndarray | pd.DatetimeIndex | pd.Timestamp | datetime.datetime | str | list[str]
25
25
 
26
26
 
27
27
  def _get_duplicates(array):
@@ -54,8 +54,7 @@ def _set_directory(
54
54
  directory = pathlib.Path(directory).expanduser()
55
55
  if not directory.exists():
56
56
  raise FileNotFoundError(f"No valid tide model directory found at path `{directory}`")
57
- else:
58
- return directory
57
+ return directory
59
58
 
60
59
 
61
60
  def _standardise_time(
@@ -161,7 +160,7 @@ def _standardise_models(
161
160
  raise ValueError(error_text)
162
161
 
163
162
  # Return set of all ensemble plus any other requested models
164
- models_to_process = sorted(list(set(ensemble_models + [m for m in models_requested if m != "ensemble"])))
163
+ models_to_process = sorted(set(ensemble_models + [m for m in models_requested if m != "ensemble"]))
165
164
 
166
165
  # Otherwise, models to process are the same as those requested
167
166
  else:
@@ -271,7 +270,7 @@ def _clip_model_file(
271
270
  for i in ["lat_z", "lat_v", "lat_u", "con"]:
272
271
  try:
273
272
  nc_clipped[i] = nc_clipped[i].isel(nx=0)
274
- except:
273
+ except KeyError:
275
274
  pass
276
275
 
277
276
  return nc_clipped
@@ -558,8 +557,7 @@ def list_models(
558
557
 
559
558
  if raise_error:
560
559
  raise Exception(warning_msg)
561
- else:
562
- warnings.warn(warning_msg, UserWarning)
560
+ warnings.warn(warning_msg, UserWarning)
563
561
 
564
562
  # Return list of available and supported models
565
563
  return available_models, supported_models
eo_tides/validation.py CHANGED
@@ -164,7 +164,7 @@ def _load_gesla_dataset(site, path, na_value):
164
164
  )
165
165
 
166
166
  # Combine two date fields
167
- gesla_df = (
167
+ return (
168
168
  gesla_df.assign(
169
169
  time=pd.to_datetime(gesla_df["date"] + " " + gesla_df["time"]),
170
170
  site_code=site,
@@ -173,8 +173,6 @@ def _load_gesla_dataset(site, path, na_value):
173
173
  .set_index("time")
174
174
  )
175
175
 
176
- return gesla_df
177
-
178
176
 
179
177
  def _nearest_row(gdf, x, y, max_distance=None):
180
178
  # Create a point to find the nearest neighbor for
@@ -288,7 +286,7 @@ def load_gauge_gesla(
288
286
  site_code = [site_code] if not isinstance(site_code, list) else site_code
289
287
 
290
288
  # If x and y are tuples, use xy bounds to identify sites
291
- elif isinstance(x, (tuple, list)) & isinstance(y, (tuple, list)):
289
+ elif isinstance(x, tuple | list) & isinstance(y, tuple | list):
292
290
  bbox = BoundingBox.from_xy(x, y)
293
291
  site_code = metadata_gdf.cx[bbox.left : bbox.right, bbox.top : bbox.bottom].index
294
292
 
@@ -317,7 +315,7 @@ def load_gauge_gesla(
317
315
  # Prepare times
318
316
  if time is None:
319
317
  time = ["1800", str(datetime.datetime.now().year)]
320
- time = [time] if not isinstance(time, (list, tuple)) else time
318
+ time = [time] if not isinstance(time, list | tuple) else time
321
319
  start_time = _round_date_strings(time[0], round_type="start")
322
320
  end_time = _round_date_strings(time[-1], round_type="end")
323
321
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eo-tides
3
- Version: 0.7.4.dev7
3
+ Version: 0.7.5.dev1
4
4
  Summary: Tide modelling tools for large-scale satellite earth observation analysis
5
5
  Project-URL: Homepage, https://GeoscienceAustralia.github.io/eo-tides/
6
6
  Project-URL: Repository, https://github.com/GeoscienceAustralia/eo-tides
@@ -0,0 +1,10 @@
1
+ eo_tides/__init__.py,sha256=LLvX-IipE209LbYLObShRLO5vxQQYBaIP1d3TMHix24,1802
2
+ eo_tides/eo.py,sha256=geXMd9roM7UmZCHK7l7bUUXai2pXGDJdzgAw4GBJ_58,23944
3
+ eo_tides/model.py,sha256=SWqBr0ajin1gw37nYa8ukHHzeiebK5h8XvkXO9LMX4E,37480
4
+ eo_tides/stats.py,sha256=lvl9-0k20ffLQh8Y1kAC_afhjQviK11_3_saRUtX3ws,23009
5
+ eo_tides/utils.py,sha256=wfzJFjWrJVgN8TqRqvwE8Tbtb2WPUQRwmCsihb5j3jc,26625
6
+ eo_tides/validation.py,sha256=6ugPwhNglIovOVTaozZnAiLaIBqU5acOiVmFPHvFDHE,12657
7
+ eo_tides-0.7.5.dev1.dist-info/METADATA,sha256=J_kUz_LeKoKQdTA5xWQZ_lWe3DQI3S_Y6f4m7A8qpas,9235
8
+ eo_tides-0.7.5.dev1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
9
+ eo_tides-0.7.5.dev1.dist-info/licenses/LICENSE,sha256=owxWsXViCL2J6Ks3XYhot7t4Y93nstmXAT95Zf030Cc,11350
10
+ eo_tides-0.7.5.dev1.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- eo_tides/__init__.py,sha256=LLvX-IipE209LbYLObShRLO5vxQQYBaIP1d3TMHix24,1802
2
- eo_tides/eo.py,sha256=TuFt9SSiO9Z2o8Kr1g-wFPzofp0HTcgkfhT83zu03kc,23983
3
- eo_tides/model.py,sha256=jRx6HNgTbK0inNqOkVivr52jPMJPWZ85gJeLjY2Vfk8,37697
4
- eo_tides/stats.py,sha256=ELQpqIH86442IYgjrGrIK3mi0-pu2ZijFw53arA2FYg,23072
5
- eo_tides/utils.py,sha256=T19OuPLHzaUKcovCVGANvmOiRu-L8VuDXSTzmNlA6Bo,26647
6
- eo_tides/validation.py,sha256=paTewYzKf5m_HkSZUeBxbDQRz8WPJRtSyHjwo8lFtrc,12685
7
- eo_tides-0.7.4.dev7.dist-info/METADATA,sha256=hJeocsv1ThjgnxHqFFwL367_AtcB5mdi3ImRnIYwVak,9235
8
- eo_tides-0.7.4.dev7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
9
- eo_tides-0.7.4.dev7.dist-info/licenses/LICENSE,sha256=owxWsXViCL2J6Ks3XYhot7t4Y93nstmXAT95Zf030Cc,11350
10
- eo_tides-0.7.4.dev7.dist-info/RECORD,,