gfdlvitals 3.0.8__tar.gz → 3.0.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {gfdlvitals-3.0.8/gfdlvitals.egg-info → gfdlvitals-3.0.10}/PKG-INFO +12 -2
  2. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/cubesphere.py +3 -1
  3. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/ice.py +4 -1
  4. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/land_lm4.py +3 -1
  5. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/latlon.py +3 -1
  6. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/tripolar.py +3 -1
  7. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/cli.py +1 -0
  8. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/extensions.py +3 -8
  9. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/models/CM4.py +12 -1
  10. gfdlvitals-3.0.10/gfdlvitals/util/extract_ocean_scalar.py +63 -0
  11. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/xrtools.py +6 -0
  12. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/version.py +1 -1
  13. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10/gfdlvitals.egg-info}/PKG-INFO +12 -2
  14. gfdlvitals-3.0.8/gfdlvitals/util/extract_ocean_scalar.py +0 -34
  15. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/LICENSE +0 -0
  16. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/README.md +0 -0
  17. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/docs/source/conf.py +0 -0
  18. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/__init__.py +0 -0
  19. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/__init__.py +0 -0
  20. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/averagers/land_lm3.py +0 -0
  21. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/diags/__init__.py +0 -0
  22. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/diags/amoc.py +0 -0
  23. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/diags/fms.py +0 -0
  24. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/diags/m6toolbox.py +0 -0
  25. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/models/ESM2.py +0 -0
  26. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/models/__init__.py +0 -0
  27. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/plot.py +0 -0
  28. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/LM3_variable_dictionary.pkl +0 -0
  29. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/LICENSE.txt +0 -0
  30. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Black.ttf +0 -0
  31. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-BlackItalic.ttf +0 -0
  32. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Bold.ttf +0 -0
  33. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-BoldItalic.ttf +0 -0
  34. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Italic.ttf +0 -0
  35. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Light.ttf +0 -0
  36. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-LightItalic.ttf +0 -0
  37. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Medium.ttf +0 -0
  38. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-MediumItalic.ttf +0 -0
  39. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Regular.ttf +0 -0
  40. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-Thin.ttf +0 -0
  41. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto/Roboto-ThinItalic.ttf +0 -0
  42. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/LICENSE.txt +0 -0
  43. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/RobotoCondensed-Bold.ttf +0 -0
  44. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/RobotoCondensed-BoldItalic.ttf +0 -0
  45. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/RobotoCondensed-Italic.ttf +0 -0
  46. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/RobotoCondensed-Light.ttf +0 -0
  47. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/RobotoCondensed-LightItalic.ttf +0 -0
  48. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/fonts/Roboto_Condensed/RobotoCondensed-Regular.ttf +0 -0
  49. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/historical.db +0 -0
  50. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/resources/picontrol.db +0 -0
  51. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/sample.py +0 -0
  52. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/__init__.py +0 -0
  53. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/average.py +0 -0
  54. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/git.py +0 -0
  55. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/gmeantools.py +0 -0
  56. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/merge.py +0 -0
  57. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals/util/netcdf.py +0 -0
  58. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals.egg-info/SOURCES.txt +0 -0
  59. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals.egg-info/dependency_links.txt +0 -0
  60. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals.egg-info/not-zip-safe +0 -0
  61. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals.egg-info/requires.txt +0 -0
  62. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/gfdlvitals.egg-info/top_level.txt +0 -0
  63. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/scripts/db2nc +0 -0
  64. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/scripts/gfdlvitals +0 -0
  65. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/scripts/plotdb +0 -0
  66. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/setup.cfg +0 -0
  67. {gfdlvitals-3.0.8 → gfdlvitals-3.0.10}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: gfdlvitals
3
- Version: 3.0.8
3
+ Version: 3.0.10
4
4
  Summary: Tools for calculating scalar diagnostics from GFDL models
5
5
  Home-page: https://github.com/jkrasting/gfdlvitals
6
6
  Author: John Krasting
@@ -16,6 +16,16 @@ Classifier: Programming Language :: Python :: 3.9
16
16
  Requires-Python: >=3.7
17
17
  Description-Content-Type: text/x-rst
18
18
  License-File: LICENSE
19
+ Requires-Dist: setuptools
20
+ Requires-Dist: numpy
21
+ Requires-Dist: scipy
22
+ Requires-Dist: h5netcdf
23
+ Requires-Dist: pandas
24
+ Requires-Dist: netCDF4
25
+ Requires-Dist: matplotlib
26
+ Requires-Dist: xarray
27
+ Requires-Dist: cftime
28
+ Requires-Dist: nc-time-axis
19
29
 
20
30
 
21
31
  **gfdlvitals** is a Python package in the public domain
@@ -62,7 +62,9 @@ def xr_average(fyear, tar, modules):
62
62
  _masked_area.sum().data,
63
63
  )
64
64
 
65
- weights = dset.average_DT.astype("float") * _masked_area
65
+ t_bounds = dset.time_bnds
66
+ dt = t_bounds[:,:,1] - t_bounds[:,:,0]
67
+ weights = dt.astype("float") * _masked_area
66
68
  _dset_weighted = xrtools.xr_weighted_avg(dset, weights)
67
69
  xrtools.xr_to_db(
68
70
  _dset_weighted, fyear, f"{fyear}.{region}Ave{modules[member]}.db"
@@ -136,7 +136,10 @@ def xr_average(fyear, tar, modules):
136
136
  newvars = {x: x + "_min" for x in list(_dset_min.variables)}
137
137
  _dset_min = _dset_min.rename(newvars)
138
138
 
139
- weights = dset.average_DT.astype("float")
139
+ t_bounds = dset.time_bnds
140
+ dt = t_bounds[:,:,1] - t_bounds[:,:,0]
141
+ weights = dt.astype("float")
142
+
140
143
  _dset_weighted = xrtools.xr_weighted_avg(_dset, weights)
141
144
  newvars = {x: x + "_mean" for x in list(_dset_weighted.variables)}
142
145
  _dset_weighted = _dset_weighted.rename(newvars)
@@ -111,7 +111,9 @@ def xr_average(fyear, tar, modules):
111
111
 
112
112
  # _masked_area = _masked_area.fillna(0)
113
113
 
114
- weights = dset.average_DT.astype("float") * _masked_area
114
+ t_bounds = dset.time_bnds
115
+ dt = t_bounds[:,:,1] - t_bounds[:,:,0]
116
+ weights = dt.astype("float") * _masked_area
115
117
  if _measure == "soil_area":
116
118
  area_x_depth = _masked_area * depth
117
119
  gmeantools.write_sqlite_data(
@@ -57,7 +57,9 @@ def xr_average(fyear, tar, modules):
57
57
  _masked_area.sum().data,
58
58
  )
59
59
 
60
- weights = dset.average_DT.astype("float") * _masked_area
60
+ t_bounds = dset.time_bnds
61
+ dt = t_bounds[:,:,1] - t_bounds[:,:,0]
62
+ weights = dt.astype("float") * _masked_area
61
63
  _dset_weighted = xrtools.xr_weighted_avg(dset, weights)
62
64
  xrtools.xr_to_db(
63
65
  _dset_weighted, fyear, f"{fyear}.{region}Ave{modules[member]}.db"
@@ -62,7 +62,9 @@ def xr_average(fyear, tar, modules):
62
62
  _masked_area.sum().data,
63
63
  )
64
64
 
65
- weights = dset.average_DT.astype("float") * _masked_area
65
+ t_bounds = dset.time_bnds
66
+ dt = t_bounds[:,:,1] - t_bounds[:,:,0]
67
+ weights = dt.astype("float") * _masked_area
66
68
  _dset_weighted = xrtools.xr_weighted_avg(dset, weights)
67
69
  xrtools.xr_to_db(
68
70
  _dset_weighted, fyear, f"{fyear}.{region}Ave{modules[member]}.db"
@@ -123,6 +123,7 @@ def process_year(args, infile):
123
123
  "AeroCMIP",
124
124
  "Ocean",
125
125
  "Ice",
126
+ "IceShelf",
126
127
  "TOPAZ",
127
128
  "COBALT",
128
129
  "BLING",
@@ -617,26 +617,21 @@ def open_db(
617
617
  for var in variables:
618
618
  tsobj = Timeseries(dbfile, var, legacy_land=legacy_land, start=start, end=end)
619
619
  if len(tsobj.t) > 0:
620
- data[var] = tsobj.data
621
- years = years + list(tsobj.t)
620
+ data[var] = pd.Series(tsobj.data, index=list(tsobj.t))
622
621
  attributes[var] = {
623
622
  "long_name": tsobj.long_name,
624
623
  "units": tsobj.units,
625
624
  "cell_measure": tsobj.cell_measure,
626
625
  }
627
626
 
628
- years = sorted(list(set(years)))
629
- years = [x + float(yearshift) for x in years]
630
-
631
- variables = list(set(variables) - set(skipped))
632
-
633
627
  if start is None:
634
628
  start = -1 * math.inf
635
629
 
636
630
  if end is None:
637
631
  end = math.inf
638
632
 
639
- df = pd.DataFrame(data, index=years)
633
+ df = pd.DataFrame(data)
634
+ df.index = df.index + float(yearshift)
640
635
  df = df[(df.index >= start) & (df.index <= end)]
641
636
  df.index = cftime.num2date(
642
637
  (df.index * 365.0) - (365.0 / 2.0) - 1,
@@ -55,13 +55,24 @@ def routines(args, infile):
55
55
  if any(comp in comps for comp in ["ice", "all"]):
56
56
  averagers.ice.xr_average(fyear, tar, modules)
57
57
 
58
+ # -- Ice Shelf
59
+ fname = f"{fyear}.ice_shelf_scalar.nc"
60
+ if any(comp in comps for comp in ["iceshelf", "all"]):
61
+ if tar_member_exists(tar, fname):
62
+ print(fname)
63
+ fdata = nctools.extract_from_tar(tar, fname, ncfile=True)
64
+ extract_ocean_scalar.mom6(
65
+ fdata, fyear, "./", outname="globalAveIceShelf.db"
66
+ )
67
+ fdata.close()
68
+
58
69
  # -- Ocean
59
70
  fname = f"{fyear}.ocean_scalar_annual.nc"
60
71
  if any(comp in comps for comp in ["ocean", "all"]):
61
72
  if tar_member_exists(tar, fname):
62
73
  print(f"{fyear}.ocean_scalar_annual.nc")
63
74
  fdata = nctools.extract_from_tar(tar, fname, ncfile=True)
64
- extract_ocean_scalar.mom6(fdata, fyear, "./")
75
+ extract_ocean_scalar.mom6(fdata, fyear, "./", outname="globalAveOcean.db")
65
76
  fdata.close()
66
77
 
67
78
  # -- OBGC
@@ -0,0 +1,63 @@
1
+ """ Extract scalar fields from ocean model output """
2
+
3
+ import numpy as np
4
+ from . import gmeantools
5
+
6
+ __all__ = ["mom6"]
7
+
8
+
9
+ def mom6(fdata, fyear, outdir, outname="globalAveOcean.db"):
10
+ """Extract MOM6 scalar output and save to sqlite
11
+
12
+ Parameters
13
+ ----------
14
+ fdata : netCDF4.Dataset
15
+ Input ocean_scalar.nc file
16
+ fyear : str
17
+ Year being processed
18
+ outdir : str
19
+ Path to output directory
20
+ """
21
+
22
+ ignore_list = ["time_bounds", "time_bnds", "average_T2", "average_T1", "average_DT", "nv"]
23
+
24
+ var_dict = fdata.variables.keys()
25
+ var_dict = list(set(var_dict) - set(ignore_list))
26
+
27
+ for varname in var_dict:
28
+ if len(fdata.variables[varname].shape) <= 2:
29
+ units = gmeantools.extract_metadata(fdata, varname, "units")
30
+ long_name = gmeantools.extract_metadata(fdata, varname, "long_name")
31
+ result = fdata.variables[varname]
32
+ if result.shape[0] == 12:
33
+ if len(result.shape) == 2:
34
+ result = result[:, 0]
35
+ else:
36
+ result = result[:]
37
+ result = np.ma.average(
38
+ result,
39
+ weights=[
40
+ 31.0,
41
+ 28.0,
42
+ 31.0,
43
+ 30.0,
44
+ 31.0,
45
+ 30.0,
46
+ 31.0,
47
+ 31.0,
48
+ 30.0,
49
+ 31.0,
50
+ 30.0,
51
+ 31.0,
52
+ ],
53
+ axis=0,
54
+ )
55
+ elif result.shape[0] == 1:
56
+ if len(result.shape) == 2:
57
+ result = result[0, 0]
58
+ else:
59
+ result = result[0]
60
+ sqlfile = f"{outdir}/{fyear}.{outname}"
61
+ gmeantools.write_metadata(sqlfile, varname, "units", units)
62
+ gmeantools.write_metadata(sqlfile, varname, "long_name", long_name)
63
+ gmeantools.write_sqlite_data(sqlfile, varname, fyear[:4], result)
@@ -1,6 +1,7 @@
1
1
  """ Tools for working with xarray datasets """
2
2
 
3
3
  import xarray as xr
4
+ import numpy as np
4
5
 
5
6
  from gfdlvitals.util.gmeantools import write_sqlite_data
6
7
  from gfdlvitals.util.gmeantools import write_metadata
@@ -84,8 +85,13 @@ def xr_weighted_avg(dset, weights):
84
85
  variables = list(dset.variables.keys())
85
86
  for x in variables:
86
87
  if sorted(dset[x].dims) == sorted(weight.dims):
88
+ if 'timedelta' in str(dset[x].dtype):
89
+ dset[x] = dset[x].astype(np.float32)
87
90
  _dset[x] = dset[x]
88
91
 
92
+ if isinstance(weight, xr.DataArray):
93
+ weight = weight.fillna(0.0)
94
+
89
95
  _dset_weighted = _dset.weighted(weight).mean()
90
96
  for x in list(_dset_weighted.variables):
91
97
  _dset_weighted[x] = _dset_weighted[x].astype(dset[x].dtype)
@@ -1,3 +1,3 @@
1
1
  """momlevel: version information"""
2
2
 
3
- __version__ = "3.0.8"
3
+ __version__ = "3.0.10"
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: gfdlvitals
3
- Version: 3.0.8
3
+ Version: 3.0.10
4
4
  Summary: Tools for calculating scalar diagnostics from GFDL models
5
5
  Home-page: https://github.com/jkrasting/gfdlvitals
6
6
  Author: John Krasting
@@ -16,6 +16,16 @@ Classifier: Programming Language :: Python :: 3.9
16
16
  Requires-Python: >=3.7
17
17
  Description-Content-Type: text/x-rst
18
18
  License-File: LICENSE
19
+ Requires-Dist: setuptools
20
+ Requires-Dist: numpy
21
+ Requires-Dist: scipy
22
+ Requires-Dist: h5netcdf
23
+ Requires-Dist: pandas
24
+ Requires-Dist: netCDF4
25
+ Requires-Dist: matplotlib
26
+ Requires-Dist: xarray
27
+ Requires-Dist: cftime
28
+ Requires-Dist: nc-time-axis
19
29
 
20
30
 
21
31
  **gfdlvitals** is a Python package in the public domain
@@ -1,34 +0,0 @@
1
- """ Extract scalar fields from ocean model output """
2
-
3
- from . import gmeantools
4
-
5
- __all__ = ["mom6"]
6
-
7
-
8
- def mom6(fdata, fyear, outdir):
9
- """Extract MOM6 scalar output and save to sqlite
10
-
11
- Parameters
12
- ----------
13
- fdata : netCDF4.Dataset
14
- Input ocean_scalar.nc file
15
- fyear : str
16
- Year being processed
17
- outdir : str
18
- Path to output directory
19
- """
20
-
21
- ignore_list = ["time_bounds", "time_bnds", "average_T2", "average_T1", "average_DT"]
22
-
23
- var_dict = fdata.variables.keys()
24
- var_dict = list(set(var_dict) - set(ignore_list))
25
-
26
- for varname in var_dict:
27
- if len(fdata.variables[varname].shape) == 2:
28
- units = gmeantools.extract_metadata(fdata, varname, "units")
29
- long_name = gmeantools.extract_metadata(fdata, varname, "long_name")
30
- result = fdata.variables[varname][0, 0]
31
- sqlfile = outdir + "/" + fyear + ".globalAveOcean.db"
32
- gmeantools.write_metadata(sqlfile, varname, "units", units)
33
- gmeantools.write_metadata(sqlfile, varname, "long_name", long_name)
34
- gmeantools.write_sqlite_data(sqlfile, varname, fyear[:4], result)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes