cloudnetpy 1.55.20__py3-none-any.whl → 1.55.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. cloudnetpy/categorize/atmos.py +46 -14
  2. cloudnetpy/categorize/atmos_utils.py +11 -1
  3. cloudnetpy/categorize/categorize.py +38 -21
  4. cloudnetpy/categorize/classify.py +31 -9
  5. cloudnetpy/categorize/containers.py +19 -7
  6. cloudnetpy/categorize/droplet.py +24 -8
  7. cloudnetpy/categorize/falling.py +17 -7
  8. cloudnetpy/categorize/freezing.py +19 -5
  9. cloudnetpy/categorize/insects.py +27 -14
  10. cloudnetpy/categorize/lidar.py +38 -36
  11. cloudnetpy/categorize/melting.py +19 -9
  12. cloudnetpy/categorize/model.py +28 -9
  13. cloudnetpy/categorize/mwr.py +4 -2
  14. cloudnetpy/categorize/radar.py +58 -22
  15. cloudnetpy/cloudnetarray.py +15 -6
  16. cloudnetpy/concat_lib.py +39 -16
  17. cloudnetpy/constants.py +7 -0
  18. cloudnetpy/datasource.py +39 -19
  19. cloudnetpy/instruments/basta.py +6 -2
  20. cloudnetpy/instruments/campbell_scientific.py +33 -16
  21. cloudnetpy/instruments/ceilo.py +30 -13
  22. cloudnetpy/instruments/ceilometer.py +76 -37
  23. cloudnetpy/instruments/cl61d.py +8 -3
  24. cloudnetpy/instruments/cloudnet_instrument.py +2 -1
  25. cloudnetpy/instruments/copernicus.py +27 -14
  26. cloudnetpy/instruments/disdrometer/common.py +51 -32
  27. cloudnetpy/instruments/disdrometer/parsivel.py +79 -48
  28. cloudnetpy/instruments/disdrometer/thies.py +10 -6
  29. cloudnetpy/instruments/galileo.py +23 -12
  30. cloudnetpy/instruments/hatpro.py +27 -11
  31. cloudnetpy/instruments/instruments.py +4 -1
  32. cloudnetpy/instruments/lufft.py +20 -11
  33. cloudnetpy/instruments/mira.py +60 -49
  34. cloudnetpy/instruments/mrr.py +31 -20
  35. cloudnetpy/instruments/nc_lidar.py +15 -6
  36. cloudnetpy/instruments/nc_radar.py +31 -22
  37. cloudnetpy/instruments/pollyxt.py +36 -21
  38. cloudnetpy/instruments/radiometrics.py +32 -18
  39. cloudnetpy/instruments/rpg.py +48 -22
  40. cloudnetpy/instruments/rpg_reader.py +39 -30
  41. cloudnetpy/instruments/vaisala.py +39 -27
  42. cloudnetpy/instruments/weather_station.py +15 -11
  43. cloudnetpy/metadata.py +3 -1
  44. cloudnetpy/model_evaluation/file_handler.py +31 -21
  45. cloudnetpy/model_evaluation/metadata.py +3 -1
  46. cloudnetpy/model_evaluation/model_metadata.py +1 -1
  47. cloudnetpy/model_evaluation/plotting/plot_tools.py +20 -15
  48. cloudnetpy/model_evaluation/plotting/plotting.py +114 -64
  49. cloudnetpy/model_evaluation/products/advance_methods.py +48 -28
  50. cloudnetpy/model_evaluation/products/grid_methods.py +44 -19
  51. cloudnetpy/model_evaluation/products/model_products.py +22 -18
  52. cloudnetpy/model_evaluation/products/observation_products.py +15 -9
  53. cloudnetpy/model_evaluation/products/product_resampling.py +14 -4
  54. cloudnetpy/model_evaluation/products/tools.py +16 -7
  55. cloudnetpy/model_evaluation/statistics/statistical_methods.py +28 -15
  56. cloudnetpy/model_evaluation/tests/e2e/conftest.py +3 -3
  57. cloudnetpy/model_evaluation/tests/e2e/process_cf/main.py +9 -5
  58. cloudnetpy/model_evaluation/tests/e2e/process_cf/tests.py +14 -13
  59. cloudnetpy/model_evaluation/tests/e2e/process_iwc/main.py +9 -5
  60. cloudnetpy/model_evaluation/tests/e2e/process_iwc/tests.py +14 -13
  61. cloudnetpy/model_evaluation/tests/e2e/process_lwc/main.py +9 -5
  62. cloudnetpy/model_evaluation/tests/e2e/process_lwc/tests.py +14 -13
  63. cloudnetpy/model_evaluation/tests/unit/conftest.py +11 -11
  64. cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py +33 -27
  65. cloudnetpy/model_evaluation/tests/unit/test_grid_methods.py +83 -83
  66. cloudnetpy/model_evaluation/tests/unit/test_model_products.py +23 -21
  67. cloudnetpy/model_evaluation/tests/unit/test_observation_products.py +24 -25
  68. cloudnetpy/model_evaluation/tests/unit/test_plot_tools.py +40 -39
  69. cloudnetpy/model_evaluation/tests/unit/test_plotting.py +12 -11
  70. cloudnetpy/model_evaluation/tests/unit/test_statistical_methods.py +30 -30
  71. cloudnetpy/model_evaluation/tests/unit/test_tools.py +18 -17
  72. cloudnetpy/model_evaluation/utils.py +3 -2
  73. cloudnetpy/output.py +45 -19
  74. cloudnetpy/plotting/plot_meta.py +35 -11
  75. cloudnetpy/plotting/plotting.py +172 -104
  76. cloudnetpy/products/classification.py +20 -8
  77. cloudnetpy/products/der.py +25 -10
  78. cloudnetpy/products/drizzle.py +41 -26
  79. cloudnetpy/products/drizzle_error.py +10 -5
  80. cloudnetpy/products/drizzle_tools.py +43 -24
  81. cloudnetpy/products/ier.py +10 -5
  82. cloudnetpy/products/iwc.py +16 -9
  83. cloudnetpy/products/lwc.py +34 -12
  84. cloudnetpy/products/mwr_multi.py +4 -1
  85. cloudnetpy/products/mwr_single.py +4 -1
  86. cloudnetpy/products/product_tools.py +33 -10
  87. cloudnetpy/utils.py +175 -74
  88. cloudnetpy/version.py +1 -1
  89. {cloudnetpy-1.55.20.dist-info → cloudnetpy-1.55.22.dist-info}/METADATA +11 -10
  90. cloudnetpy-1.55.22.dist-info/RECORD +114 -0
  91. docs/source/conf.py +2 -2
  92. cloudnetpy-1.55.20.dist-info/RECORD +0 -114
  93. {cloudnetpy-1.55.20.dist-info → cloudnetpy-1.55.22.dist-info}/LICENSE +0 -0
  94. {cloudnetpy-1.55.20.dist-info → cloudnetpy-1.55.22.dist-info}/WHEEL +0 -0
  95. {cloudnetpy-1.55.20.dist-info → cloudnetpy-1.55.22.dist-info}/top_level.txt +0 -0
@@ -15,6 +15,7 @@ class DayStatistics:
15
15
  and observation data of wanted product.
16
16
 
17
17
  Args:
18
+ ----
18
19
  method (str): Name on statistical method to be calculated
19
20
  product_info (list): List of information of statistical analysis is
20
21
  done with. A list includes observed product name (str), model variable (str)
@@ -24,12 +25,15 @@ class DayStatistics:
24
25
  observation (np.ndarray): Ndrray of Downsampled observation of product
25
26
 
26
27
  Raises:
28
+ ------
27
29
  RuntimeError: A function of given method not found
28
30
 
29
31
  Returns:
32
+ -------
30
33
  day_statistic (object): The :class:'DayStatistic' object.
31
34
 
32
35
  Examples:
36
+ --------
33
37
  >>> from cloudnetpy.model_evaluation.products.product_resampling import \
34
38
  process_L3_day_product
35
39
  >>> method = 'error'
@@ -72,20 +76,22 @@ class DayStatistics:
72
76
  full_name = "vertical_profile"
73
77
  return full_name, params
74
78
 
75
- def _generate_day_statistics(self):
79
+ def _generate_day_statistics(self) -> None:
76
80
  full_name, params = self._get_method_attr()
77
81
  cls = __import__("statistical_methods")
78
82
  try:
79
83
  self.model_stat, self.observation_stat = getattr(cls, f"{full_name}")(
80
- *params
84
+ *params,
81
85
  )
82
- self.title = getattr(cls, "day_stat_title")(self.method, self.product)
83
- except RuntimeError as error:
84
- logging.error(f"Method {full_name} not found or missing: {error}")
86
+ self.title = cls.day_stat_title(self.method, self.product)
87
+ except RuntimeError:
88
+ msg = f"Failed to calculate {self.method} of {self.product[0]}"
89
+ logging.exception(msg)
85
90
 
86
91
 
87
92
  def relative_error(
88
- model: ma.MaskedArray, observation: ma.MaskedArray
93
+ model: ma.MaskedArray,
94
+ observation: ma.MaskedArray,
89
95
  ) -> tuple[float, str]:
90
96
  model, observation = combine_masked_indices(model, observation)
91
97
  error = ((model - observation) / observation) * 100
@@ -93,7 +99,8 @@ def relative_error(
93
99
 
94
100
 
95
101
  def absolute_error(
96
- model: ma.MaskedArray, observation: ma.MaskedArray
102
+ model: ma.MaskedArray,
103
+ observation: ma.MaskedArray,
97
104
  ) -> tuple[float, str]:
98
105
  model, observation = combine_masked_indices(model, observation)
99
106
  error = (observation - model) * 100
@@ -101,7 +108,8 @@ def absolute_error(
101
108
 
102
109
 
103
110
  def combine_masked_indices(
104
- model: ma.MaskedArray, observation: ma.MaskedArray
111
+ model: ma.MaskedArray,
112
+ observation: ma.MaskedArray,
105
113
  ) -> tuple[ma.MaskedArray, ma.MaskedArray]:
106
114
  """Connects two array masked indices to one and add in two array same mask"""
107
115
  observation[np.where(np.isnan(observation))] = ma.masked
@@ -113,9 +121,10 @@ def combine_masked_indices(
113
121
 
114
122
 
115
123
  def calc_common_area_sum(
116
- model: ma.MaskedArray, observation: ma.MaskedArray
124
+ model: ma.MaskedArray,
125
+ observation: ma.MaskedArray,
117
126
  ) -> tuple[float, str]:
118
- def _indices_of_mask_sum():
127
+ def _indices_of_mask_sum() -> float:
119
128
  # Calculate percentage value of common area of indices from two arrays.
120
129
  # Results is total number of common indices with value
121
130
  observation[np.where(np.isnan(observation))] = ma.masked
@@ -131,17 +140,21 @@ def calc_common_area_sum(
131
140
 
132
141
 
133
142
  def histogram(
134
- product: list, model: ma.MaskedArray, observation: ma.MaskedArray
143
+ product: list,
144
+ model: ma.MaskedArray,
145
+ observation: ma.MaskedArray,
135
146
  ) -> tuple:
136
147
  if "cf" in product:
137
148
  model = ma.round(model[~model.mask].data, decimals=1).flatten()
138
149
  observation = ma.round(
139
- observation[~observation.mask].data, decimals=1
150
+ observation[~observation.mask].data,
151
+ decimals=1,
140
152
  ).flatten()
141
153
  else:
142
- model = ma.round(model[~model.mask].data, decimals=6).flatten()
143
- observation = ma.round(
144
- observation[~observation.mask].data, decimals=6
154
+ model = np.round(model[~model.mask].data, decimals=6).flatten()
155
+ observation = np.round(
156
+ observation[~observation.mask].data,
157
+ decimals=6,
145
158
  ).flatten()
146
159
  observation = observation[~np.isnan(observation)]
147
160
  hist_bins = np.histogram(observation, density=True)[-1]
@@ -3,11 +3,11 @@ import pytest
3
3
  args = ["site", "date", "input", "output", "full_path"]
4
4
 
5
5
 
6
- def pytest_addoption(parser):
6
+ def pytest_addoption(parser) -> None:
7
7
  for arg in args:
8
8
  parser.addoption(f"--{arg}", action="store")
9
9
 
10
10
 
11
- @pytest.fixture
12
- def params(request):
11
+ @pytest.fixture()
12
+ def params(request) -> dict:
13
13
  return {arg: request.config.getoption(f"--{arg}") for arg in args}
@@ -15,11 +15,15 @@ test_file_model = (
15
15
  test_file_product = f"{ROOT_PATH}/cloudnetpy/model_evaluation/tests/data/20190517_mace-head_categorize.nc"
16
16
 
17
17
 
18
- def _process():
18
+ def _process() -> None:
19
19
  tmp_dir = TemporaryDirectory()
20
20
  temp_file = f"{tmp_dir.name}/xx.nc"
21
21
  product_resampling.process_L3_day_product(
22
- "ecmwf", "cf", [test_file_model], test_file_product, temp_file
22
+ "ecmwf",
23
+ "cf",
24
+ [test_file_model],
25
+ test_file_product,
26
+ temp_file,
23
27
  )
24
28
  try:
25
29
  subprocess.call(
@@ -29,20 +33,20 @@ def _process():
29
33
  f"{SCRIPT_PATH}/tests.py",
30
34
  "--full_path",
31
35
  temp_file,
32
- ]
36
+ ],
33
37
  )
34
38
  except subprocess.CalledProcessError:
35
39
  raise
36
40
  tmp_dir.cleanup()
37
41
 
38
42
 
39
- def main():
43
+ def main() -> None:
40
44
  _process()
41
45
 
42
46
 
43
47
  if __name__ == "__main__":
44
48
  parser = argparse.ArgumentParser(
45
- description="Model evaluation Cloud fraction processing e2e test."
49
+ description="Model evaluation Cloud fraction processing e2e test.",
46
50
  )
47
51
  ARGS = parser.parse_args()
48
52
  main()
@@ -6,11 +6,11 @@ class TestCloudFractionProcessing:
6
6
  product = "cf"
7
7
 
8
8
  @pytest.fixture(autouse=True)
9
- def _fetch_params(self, params):
9
+ def _fetch_params(self, params) -> None:
10
10
  self.full_path = params["full_path"]
11
11
 
12
- @pytest.mark.reprocess
13
- def test_that_has_correct_attributes(self):
12
+ @pytest.mark.reprocess()
13
+ def test_that_has_correct_attributes(self) -> None:
14
14
  nc = netCDF4.Dataset(self.full_path)
15
15
  assert nc.location == "Mace-Head"
16
16
  assert nc.year == "2019"
@@ -25,31 +25,32 @@ class TestCloudFractionProcessing:
25
25
  )
26
26
  nc.close()
27
27
 
28
- @pytest.mark.reprocess
28
+ @pytest.mark.reprocess()
29
29
  @pytest.mark.parametrize(
30
- "key", ["cf_V_ecmwf", "cf_A_ecmwf", "cf_V_adv_ecmwf", "cf_A_adv_ecmwf"]
30
+ "key",
31
+ ["cf_V_ecmwf", "cf_A_ecmwf", "cf_V_adv_ecmwf", "cf_A_adv_ecmwf"],
31
32
  )
32
- def test_that_has_correct_product_variables(self, key):
33
+ def test_that_has_correct_product_variables(self, key) -> None:
33
34
  nc = netCDF4.Dataset(self.full_path)
34
- assert key in nc.variables.keys()
35
+ assert key in nc.variables
35
36
  nc.close()
36
37
 
37
- @pytest.mark.reprocess
38
+ @pytest.mark.reprocess()
38
39
  @pytest.mark.parametrize(
39
40
  "key",
40
41
  ["time", "level", "latitude", "longitude", "horizontal_resolution"],
41
42
  )
42
- def test_that_has_correct_model_variables(self, key):
43
+ def test_that_has_correct_model_variables(self, key) -> None:
43
44
  nc = netCDF4.Dataset(self.full_path)
44
- assert key in nc.variables.keys()
45
+ assert key in nc.variables
45
46
  nc.close()
46
47
 
47
- @pytest.mark.reprocess
48
+ @pytest.mark.reprocess()
48
49
  @pytest.mark.parametrize(
49
50
  "key",
50
51
  ["ecmwf_forecast_time", "ecmwf_height", "ecmwf_cf", "ecmwf_cf_cirrus"],
51
52
  )
52
- def test_that_has_correct_cycle_variables(self, key):
53
+ def test_that_has_correct_cycle_variables(self, key) -> None:
53
54
  nc = netCDF4.Dataset(self.full_path)
54
- assert key in nc.variables.keys()
55
+ assert key in nc.variables
55
56
  nc.close()
@@ -19,11 +19,15 @@ test_file_model = (
19
19
  test_file_product = f"{ROOT_PATH}/cloudnetpy/model_evaluation/tests/data/20190517_mace-head_iwc-Z-T-method.nc"
20
20
 
21
21
 
22
- def _process():
22
+ def _process() -> None:
23
23
  tmp_dir = TemporaryDirectory()
24
24
  temp_file = f"{tmp_dir.name}/xx.nc"
25
25
  product_resampling.process_L3_day_product(
26
- "ecmwf", "iwc", [test_file_model], test_file_product, temp_file
26
+ "ecmwf",
27
+ "iwc",
28
+ [test_file_model],
29
+ test_file_product,
30
+ temp_file,
27
31
  )
28
32
  try:
29
33
  subprocess.call(
@@ -33,20 +37,20 @@ def _process():
33
37
  f"{SCRIPT_PATH}/tests.py",
34
38
  "--full_path",
35
39
  temp_file,
36
- ]
40
+ ],
37
41
  )
38
42
  except subprocess.CalledProcessError:
39
43
  raise
40
44
  tmp_dir.cleanup()
41
45
 
42
46
 
43
- def main():
47
+ def main() -> None:
44
48
  _process()
45
49
 
46
50
 
47
51
  if __name__ == "__main__":
48
52
  parser = argparse.ArgumentParser(
49
- description="Model evaluation Ice water content processing e2e test."
53
+ description="Model evaluation Ice water content processing e2e test.",
50
54
  )
51
55
  ARGS = parser.parse_args()
52
56
  main()
@@ -6,11 +6,11 @@ class TestCloudFractionProcessing:
6
6
  product = "iwc"
7
7
 
8
8
  @pytest.fixture(autouse=True)
9
- def _fetch_params(self, params):
9
+ def _fetch_params(self, params) -> None:
10
10
  self.full_path = params["full_path"]
11
11
 
12
- @pytest.mark.reprocess
13
- def test_that_has_correct_attributes(self):
12
+ @pytest.mark.reprocess()
13
+ def test_that_has_correct_attributes(self) -> None:
14
14
  nc = netCDF4.Dataset(self.full_path)
15
15
  assert nc.location == "Mace-Head"
16
16
  assert nc.year == "2019"
@@ -25,7 +25,7 @@ class TestCloudFractionProcessing:
25
25
  )
26
26
  nc.close()
27
27
 
28
- @pytest.mark.reprocess
28
+ @pytest.mark.reprocess()
29
29
  @pytest.mark.parametrize(
30
30
  "key",
31
31
  [
@@ -37,26 +37,27 @@ class TestCloudFractionProcessing:
37
37
  "iwc_rain_adv_ecmwf",
38
38
  ],
39
39
  )
40
- def test_that_has_correct_product_variables(self, key):
40
+ def test_that_has_correct_product_variables(self, key) -> None:
41
41
  nc = netCDF4.Dataset(self.full_path)
42
- assert key in nc.variables.keys()
42
+ assert key in nc.variables
43
43
  nc.close()
44
44
 
45
- @pytest.mark.reprocess
45
+ @pytest.mark.reprocess()
46
46
  @pytest.mark.parametrize(
47
47
  "key",
48
48
  ["time", "level", "latitude", "longitude", "horizontal_resolution"],
49
49
  )
50
- def test_that_has_correct_model_variables(self, key):
50
+ def test_that_has_correct_model_variables(self, key) -> None:
51
51
  nc = netCDF4.Dataset(self.full_path)
52
- assert key in nc.variables.keys()
52
+ assert key in nc.variables
53
53
  nc.close()
54
54
 
55
- @pytest.mark.reprocess
55
+ @pytest.mark.reprocess()
56
56
  @pytest.mark.parametrize(
57
- "key", ["ecmwf_forecast_time", "ecmwf_height", "ecmwf_iwc"]
57
+ "key",
58
+ ["ecmwf_forecast_time", "ecmwf_height", "ecmwf_iwc"],
58
59
  )
59
- def test_that_has_correct_cycle_variables(self, key):
60
+ def test_that_has_correct_cycle_variables(self, key) -> None:
60
61
  nc = netCDF4.Dataset(self.full_path)
61
- assert key in nc.variables.keys()
62
+ assert key in nc.variables
62
63
  nc.close()
@@ -15,11 +15,15 @@ test_file_model = (
15
15
  test_file_product = f"{ROOT_PATH}/cloudnetpy/model_evaluation/tests/data/20190517_mace-head_lwc-scaled-adiabatic.nc"
16
16
 
17
17
 
18
- def _process():
18
+ def _process() -> None:
19
19
  tmp_dir = TemporaryDirectory()
20
20
  temp_file = f"{tmp_dir.name}/xx.nc"
21
21
  product_resampling.process_L3_day_product(
22
- "ecmwf", "lwc", [test_file_model], test_file_product, temp_file
22
+ "ecmwf",
23
+ "lwc",
24
+ [test_file_model],
25
+ test_file_product,
26
+ temp_file,
23
27
  )
24
28
  try:
25
29
  subprocess.call(
@@ -29,20 +33,20 @@ def _process():
29
33
  f"{SCRIPT_PATH}/tests.py",
30
34
  "--full_path",
31
35
  temp_file,
32
- ]
36
+ ],
33
37
  )
34
38
  except subprocess.CalledProcessError:
35
39
  raise
36
40
  tmp_dir.cleanup()
37
41
 
38
42
 
39
- def main():
43
+ def main() -> None:
40
44
  _process()
41
45
 
42
46
 
43
47
  if __name__ == "__main__":
44
48
  parser = argparse.ArgumentParser(
45
- description="Model evaluation liquid water content processing e2e test."
49
+ description="Model evaluation liquid water content processing e2e test.",
46
50
  )
47
51
  ARGS = parser.parse_args()
48
52
  main()
@@ -6,11 +6,11 @@ class TestCloudFractionProcessing:
6
6
  product = "lwc"
7
7
 
8
8
  @pytest.fixture(autouse=True)
9
- def _fetch_params(self, params):
9
+ def _fetch_params(self, params) -> None:
10
10
  self.full_path = params["full_path"]
11
11
 
12
- @pytest.mark.reprocess
13
- def test_that_has_correct_attributes(self):
12
+ @pytest.mark.reprocess()
13
+ def test_that_has_correct_attributes(self) -> None:
14
14
  nc = netCDF4.Dataset(self.full_path)
15
15
  assert nc.location == "Mace-Head"
16
16
  assert nc.year == "2019"
@@ -25,28 +25,29 @@ class TestCloudFractionProcessing:
25
25
  )
26
26
  nc.close()
27
27
 
28
- @pytest.mark.reprocess
28
+ @pytest.mark.reprocess()
29
29
  @pytest.mark.parametrize("key", ["lwc_ecmwf", "lwc_adv_ecmwf"])
30
- def test_that_has_correct_product_variables(self, key):
30
+ def test_that_has_correct_product_variables(self, key) -> None:
31
31
  nc = netCDF4.Dataset(self.full_path)
32
- assert key in nc.variables.keys()
32
+ assert key in nc.variables
33
33
  nc.close()
34
34
 
35
- @pytest.mark.reprocess
35
+ @pytest.mark.reprocess()
36
36
  @pytest.mark.parametrize(
37
37
  "key",
38
38
  ["time", "level", "latitude", "longitude", "horizontal_resolution"],
39
39
  )
40
- def test_that_has_correct_model_variables(self, key):
40
+ def test_that_has_correct_model_variables(self, key) -> None:
41
41
  nc = netCDF4.Dataset(self.full_path)
42
- assert key in nc.variables.keys()
42
+ assert key in nc.variables
43
43
  nc.close()
44
44
 
45
- @pytest.mark.reprocess
45
+ @pytest.mark.reprocess()
46
46
  @pytest.mark.parametrize(
47
- "key", ["ecmwf_forecast_time", "ecmwf_height", "ecmwf_lwc"]
47
+ "key",
48
+ ["ecmwf_forecast_time", "ecmwf_height", "ecmwf_lwc"],
48
49
  )
49
- def test_that_has_correct_cycle_variables(self, key):
50
+ def test_that_has_correct_cycle_variables(self, key) -> None:
50
51
  nc = netCDF4.Dataset(self.full_path)
51
- assert key in nc.variables.keys()
52
+ assert key in nc.variables
52
53
  nc.close()
@@ -6,7 +6,7 @@ import pytest
6
6
 
7
7
 
8
8
  @pytest.fixture(scope="session")
9
- def file_metadata():
9
+ def file_metadata() -> dict:
10
10
  year, month, day = "2019", "05", "23"
11
11
  return {
12
12
  "year": year,
@@ -19,7 +19,7 @@ def file_metadata():
19
19
 
20
20
 
21
21
  @pytest.fixture(scope="session")
22
- def model_file(tmpdir_factory, file_metadata):
22
+ def model_file(tmpdir_factory, file_metadata) -> str:
23
23
  file_name = tmpdir_factory.mktemp("data").join("file.nc")
24
24
  root_grp = netCDF4.Dataset(file_name, "w", format="NETCDF4_CLASSIC")
25
25
  time = 3
@@ -61,7 +61,7 @@ def model_file(tmpdir_factory, file_metadata):
61
61
 
62
62
 
63
63
  @pytest.fixture(scope="session")
64
- def obs_file(tmpdir_factory, file_metadata):
64
+ def obs_file(tmpdir_factory, file_metadata) -> str:
65
65
  file_name = tmpdir_factory.mktemp("data").join("file.nc")
66
66
  root_grp = netCDF4.Dataset(file_name, "w", format="NETCDF4_CLASSIC")
67
67
  time = 6
@@ -96,7 +96,7 @@ def obs_file(tmpdir_factory, file_metadata):
96
96
  [4, 4, 0, 1],
97
97
  [0, 8, 16, 16],
98
98
  [32, 2, 8, 32],
99
- ]
99
+ ],
100
100
  )
101
101
  var = root_grp.createVariable("quality_bits", "i4", ("time", "height"))
102
102
  var[:] = np.array(
@@ -107,7 +107,7 @@ def obs_file(tmpdir_factory, file_metadata):
107
107
  [0, 1, 2, 4],
108
108
  [8, 16, 32, 16],
109
109
  [8, 4, 2, 1],
110
- ]
110
+ ],
111
111
  )
112
112
  var = root_grp.createVariable("iwc", "f8", ("time", "height"))
113
113
  var[:] = np.array(
@@ -118,7 +118,7 @@ def obs_file(tmpdir_factory, file_metadata):
118
118
  [0.01, 0.02, 0.06, 0.01],
119
119
  [0.02, 0.06, 0.00, 0.03],
120
120
  [0.08, 0.00, 0.03, 0.08],
121
- ]
121
+ ],
122
122
  )
123
123
  var = root_grp.createVariable("iwc_retrieval_status", "f8", ("time", "height"))
124
124
  var[:] = np.array(
@@ -129,7 +129,7 @@ def obs_file(tmpdir_factory, file_metadata):
129
129
  [1, 2, 6, 7],
130
130
  [4, 6, 5, 3],
131
131
  [7, 5, 3, 4],
132
- ]
132
+ ],
133
133
  )
134
134
  var = root_grp.createVariable("lwc", "f8", ("time", "height"))
135
135
  var[:] = np.array(
@@ -140,7 +140,7 @@ def obs_file(tmpdir_factory, file_metadata):
140
140
  [0.08, 0.04, 0.01, 0.08],
141
141
  [0.04, 0.01, 0.09, 0.07],
142
142
  [0.02, 0.09, 0.07, 0.02],
143
- ]
143
+ ],
144
144
  )
145
145
  var = root_grp.createVariable("data", "i4", ("time", "height"))
146
146
  var[:] = np.array(
@@ -151,14 +151,14 @@ def obs_file(tmpdir_factory, file_metadata):
151
151
  [3, 5, 1, 0],
152
152
  [2, 5, 6, 1],
153
153
  [2, 9, 7, 2],
154
- ]
154
+ ],
155
155
  )
156
156
  root_grp.close()
157
157
  return file_name
158
158
 
159
159
 
160
160
  @pytest.fixture(scope="session")
161
- def regrid_file(tmpdir_factory, file_metadata):
161
+ def regrid_file(tmpdir_factory, file_metadata) -> str:
162
162
  file_name = tmpdir_factory.mktemp("data").join("file.nc")
163
163
  root_grp = netCDF4.Dataset(file_name, "w", format="NETCDF4_CLASSIC")
164
164
  time = 3
@@ -198,6 +198,6 @@ def regrid_file(tmpdir_factory, file_metadata):
198
198
  return file_name
199
199
 
200
200
 
201
- def _create_global_attributes(root_grp, meta):
201
+ def _create_global_attributes(root_grp, meta) -> None:
202
202
  for key in ("year", "month", "day", "location"):
203
203
  setattr(root_grp, key, meta[key])