ocf-data-sampler 0.1.11__py3-none-any.whl → 0.1.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ocf-data-sampler might be problematic. Click here for more details.

Files changed (78) hide show
  1. ocf_data_sampler/config/load.py +3 -3
  2. ocf_data_sampler/config/model.py +146 -64
  3. ocf_data_sampler/config/save.py +5 -4
  4. ocf_data_sampler/load/gsp.py +6 -5
  5. ocf_data_sampler/load/load_dataset.py +5 -6
  6. ocf_data_sampler/load/nwp/nwp.py +17 -5
  7. ocf_data_sampler/load/nwp/providers/ecmwf.py +6 -7
  8. ocf_data_sampler/load/nwp/providers/gfs.py +36 -0
  9. ocf_data_sampler/load/nwp/providers/icon.py +46 -0
  10. ocf_data_sampler/load/nwp/providers/ukv.py +4 -5
  11. ocf_data_sampler/load/nwp/providers/utils.py +3 -1
  12. ocf_data_sampler/load/satellite.py +9 -10
  13. ocf_data_sampler/load/site.py +10 -6
  14. ocf_data_sampler/load/utils.py +21 -16
  15. ocf_data_sampler/numpy_sample/collate.py +10 -9
  16. ocf_data_sampler/numpy_sample/datetime_features.py +3 -5
  17. ocf_data_sampler/numpy_sample/gsp.py +12 -14
  18. ocf_data_sampler/numpy_sample/nwp.py +12 -12
  19. ocf_data_sampler/numpy_sample/satellite.py +9 -9
  20. ocf_data_sampler/numpy_sample/site.py +5 -8
  21. ocf_data_sampler/numpy_sample/sun_position.py +16 -21
  22. ocf_data_sampler/sample/base.py +15 -17
  23. ocf_data_sampler/sample/site.py +13 -20
  24. ocf_data_sampler/sample/uk_regional.py +29 -35
  25. ocf_data_sampler/select/dropout.py +16 -14
  26. ocf_data_sampler/select/fill_time_periods.py +15 -5
  27. ocf_data_sampler/select/find_contiguous_time_periods.py +88 -75
  28. ocf_data_sampler/select/geospatial.py +63 -54
  29. ocf_data_sampler/select/location.py +16 -51
  30. ocf_data_sampler/select/select_spatial_slice.py +105 -89
  31. ocf_data_sampler/select/select_time_slice.py +71 -58
  32. ocf_data_sampler/select/spatial_slice_for_dataset.py +7 -6
  33. ocf_data_sampler/select/time_slice_for_dataset.py +17 -16
  34. ocf_data_sampler/torch_datasets/datasets/pvnet_uk.py +140 -131
  35. ocf_data_sampler/torch_datasets/datasets/site.py +152 -112
  36. ocf_data_sampler/torch_datasets/utils/__init__.py +3 -0
  37. ocf_data_sampler/torch_datasets/utils/channel_dict_to_dataarray.py +11 -0
  38. ocf_data_sampler/torch_datasets/utils/merge_and_fill_utils.py +6 -2
  39. ocf_data_sampler/torch_datasets/utils/valid_time_periods.py +23 -22
  40. ocf_data_sampler/utils.py +3 -1
  41. {ocf_data_sampler-0.1.11.dist-info → ocf_data_sampler-0.1.17.dist-info}/METADATA +7 -18
  42. ocf_data_sampler-0.1.17.dist-info/RECORD +56 -0
  43. {ocf_data_sampler-0.1.11.dist-info → ocf_data_sampler-0.1.17.dist-info}/WHEEL +1 -1
  44. {ocf_data_sampler-0.1.11.dist-info → ocf_data_sampler-0.1.17.dist-info}/top_level.txt +1 -1
  45. scripts/refactor_site.py +63 -33
  46. utils/compute_icon_mean_stddev.py +72 -0
  47. ocf_data_sampler/constants.py +0 -222
  48. ocf_data_sampler/torch_datasets/utils/validate_channels.py +0 -82
  49. ocf_data_sampler-0.1.11.dist-info/LICENSE +0 -21
  50. ocf_data_sampler-0.1.11.dist-info/RECORD +0 -82
  51. tests/__init__.py +0 -0
  52. tests/config/test_config.py +0 -113
  53. tests/config/test_load.py +0 -7
  54. tests/config/test_save.py +0 -28
  55. tests/conftest.py +0 -319
  56. tests/load/test_load_gsp.py +0 -15
  57. tests/load/test_load_nwp.py +0 -21
  58. tests/load/test_load_satellite.py +0 -17
  59. tests/load/test_load_sites.py +0 -14
  60. tests/numpy_sample/test_collate.py +0 -21
  61. tests/numpy_sample/test_datetime_features.py +0 -37
  62. tests/numpy_sample/test_gsp.py +0 -38
  63. tests/numpy_sample/test_nwp.py +0 -13
  64. tests/numpy_sample/test_satellite.py +0 -40
  65. tests/numpy_sample/test_sun_position.py +0 -81
  66. tests/select/test_dropout.py +0 -69
  67. tests/select/test_fill_time_periods.py +0 -28
  68. tests/select/test_find_contiguous_time_periods.py +0 -202
  69. tests/select/test_location.py +0 -67
  70. tests/select/test_select_spatial_slice.py +0 -154
  71. tests/select/test_select_time_slice.py +0 -275
  72. tests/test_sample/test_base.py +0 -164
  73. tests/test_sample/test_site_sample.py +0 -165
  74. tests/test_sample/test_uk_regional_sample.py +0 -136
  75. tests/torch_datasets/test_merge_and_fill_utils.py +0 -40
  76. tests/torch_datasets/test_pvnet_uk.py +0 -154
  77. tests/torch_datasets/test_site.py +0 -226
  78. tests/torch_datasets/test_validate_channels_utils.py +0 -78
@@ -1,21 +0,0 @@
1
- import pandas as pd
2
- from xarray import DataArray
3
- import numpy as np
4
-
5
- from ocf_data_sampler.load.nwp import open_nwp
6
-
7
-
8
- def test_load_ukv(nwp_ukv_zarr_path):
9
- da = open_nwp(zarr_path=nwp_ukv_zarr_path, provider="ukv")
10
- assert isinstance(da, DataArray)
11
- assert da.dims == ("init_time_utc", "step", "channel", "x_osgb", "y_osgb")
12
- assert da.shape == (24 * 7, 11, 4, 50, 100)
13
- assert np.issubdtype(da.dtype, np.number)
14
-
15
-
16
- def test_load_ecmwf(nwp_ecmwf_zarr_path):
17
- da = open_nwp(zarr_path=nwp_ecmwf_zarr_path, provider="ecmwf")
18
- assert isinstance(da, DataArray)
19
- assert da.dims == ("init_time_utc", "step", "channel", "longitude", "latitude")
20
- assert da.shape == (24 * 7, 15, 3, 15, 12)
21
- assert np.issubdtype(da.dtype, np.number)
@@ -1,17 +0,0 @@
1
- from ocf_data_sampler.load.satellite import open_sat_data
2
- import xarray as xr
3
- import numpy as np
4
-
5
-
6
- def test_open_satellite(sat_zarr_path):
7
- da = open_sat_data(zarr_path=sat_zarr_path)
8
-
9
- assert isinstance(da, xr.DataArray)
10
- assert da.dims == ("time_utc", "channel", "x_geostationary", "y_geostationary")
11
- # 288 is 1 days of data at 5 minutes intervals, 12 * 24
12
- # There are 11 channels
13
- # There are 100 x 100 pixels
14
- assert da.shape == (288, 11, 100, 100)
15
- assert np.issubdtype(da.dtype, np.number)
16
-
17
-
@@ -1,14 +0,0 @@
1
- from ocf_data_sampler.load.site import open_site
2
- import xarray as xr
3
-
4
-
5
- def test_open_site(data_sites):
6
- da = open_site(data_sites.file_path, data_sites.metadata_file_path)
7
-
8
- assert isinstance(da, xr.DataArray)
9
- assert da.dims == ("time_utc", "site_id")
10
-
11
- assert "capacity_kwp" in da.coords
12
- assert "latitude" in da.coords
13
- assert "longitude" in da.coords
14
- assert da.shape == (49, 10)
@@ -1,21 +0,0 @@
1
- from ocf_data_sampler.numpy_sample.collate import stack_np_samples_into_batch
2
- from ocf_data_sampler.torch_datasets.datasets.pvnet_uk import PVNetUKRegionalDataset
3
-
4
-
5
- def test_stack_np_samples_into_batch(pvnet_config_filename):
6
-
7
- # Create dataset object
8
- dataset = PVNetUKRegionalDataset(pvnet_config_filename)
9
-
10
- # Generate 2 samples
11
- sample1 = dataset[0]
12
- sample2 = dataset[1]
13
-
14
- batch = stack_np_samples_into_batch([sample1, sample2])
15
-
16
- assert isinstance(batch, dict)
17
- assert "nwp" in batch
18
- assert isinstance(batch["nwp"], dict)
19
- assert "ukv" in batch["nwp"]
20
- assert "gsp" in batch
21
- assert "satellite_actual" in batch
@@ -1,37 +0,0 @@
1
- import numpy as np
2
- import pandas as pd
3
- import pytest
4
-
5
- from ocf_data_sampler.numpy_sample.datetime_features import make_datetime_numpy_dict
6
-
7
-
8
- def test_calculate_azimuth_and_elevation():
9
-
10
- # Pick the day of the summer solstice
11
- datetimes = pd.to_datetime(["2024-06-20 12:00", "2024-06-20 12:30", "2024-06-20 13:00"])
12
-
13
- # Calculate sun angles
14
- datetime_features = make_datetime_numpy_dict(datetimes)
15
-
16
- assert len(datetime_features) == 4
17
-
18
- assert len(datetime_features["wind_date_sin"]) == len(datetimes)
19
- assert (datetime_features["wind_date_cos"] != datetime_features["wind_date_sin"]).all()
20
-
21
- # assert all values are between -1 and 1
22
- assert all(np.abs(datetime_features["wind_date_sin"]) <= 1)
23
- assert all(np.abs(datetime_features["wind_date_cos"]) <= 1)
24
- assert all(np.abs(datetime_features["wind_time_sin"]) <= 1)
25
- assert all(np.abs(datetime_features["wind_time_cos"]) <= 1)
26
-
27
-
28
- def test_make_datetime_numpy_batch_custom_key_prefix():
29
- # Test function correctly applies custom prefix to dict keys
30
- datetimes = pd.to_datetime(["2024-06-20 12:00", "2024-06-20 12:30", "2024-06-20 13:00"])
31
- key_prefix = "solar"
32
-
33
- datetime_features = make_datetime_numpy_dict(datetimes, key_prefix=key_prefix)
34
-
35
- # Assert dict contains expected quantity of keys and verify starting with custom prefix
36
- assert len(datetime_features) == 4
37
- assert all(key.startswith(key_prefix) for key in datetime_features.keys())
@@ -1,38 +0,0 @@
1
- from ocf_data_sampler.load.gsp import open_gsp
2
- import numpy as np
3
-
4
- from ocf_data_sampler.numpy_sample import convert_gsp_to_numpy_sample, GSPSampleKey
5
-
6
- def test_convert_gsp_to_numpy_sample(uk_gsp_zarr_path):
7
-
8
- da = (
9
- open_gsp(uk_gsp_zarr_path)
10
- .isel(time_utc=slice(0, 10))
11
- .sel(gsp_id=1)
12
- )
13
-
14
- numpy_sample = convert_gsp_to_numpy_sample(da)
15
-
16
- # Test data structure
17
- assert isinstance(numpy_sample, dict), "Should be dict"
18
- assert set(numpy_sample.keys()).issubset({
19
- GSPSampleKey.gsp,
20
- GSPSampleKey.nominal_capacity_mwp,
21
- GSPSampleKey.effective_capacity_mwp,
22
- GSPSampleKey.time_utc,
23
- }), "Unexpected keys"
24
-
25
- # Assert data content and capacity values
26
- assert np.array_equal(numpy_sample[GSPSampleKey.gsp], da.values), "GSP values mismatch"
27
- assert isinstance(numpy_sample[GSPSampleKey.time_utc], np.ndarray), "Time UTC should be numpy array"
28
- assert numpy_sample[GSPSampleKey.time_utc].dtype == float, "Time UTC should be float type"
29
- assert numpy_sample[GSPSampleKey.nominal_capacity_mwp] == da.isel(time_utc=0)["nominal_capacity_mwp"].values
30
- assert numpy_sample[GSPSampleKey.effective_capacity_mwp] == da.isel(time_utc=0)["effective_capacity_mwp"].values
31
-
32
- # Test with t0_idx
33
- t0_idx = 5
34
- numpy_sample_with_t0 = convert_gsp_to_numpy_sample(da, t0_idx=t0_idx)
35
- assert numpy_sample_with_t0[GSPSampleKey.t0_idx] == t0_idx, "t0_idx not correctly set"
36
-
37
-
38
-
@@ -1,13 +0,0 @@
1
- from ocf_data_sampler.numpy_sample import convert_nwp_to_numpy_sample, NWPSampleKey
2
-
3
-
4
- def test_convert_nwp_to_numpy_sample(ds_nwp_ukv_time_sliced):
5
-
6
- # Call the function
7
- numpy_sample = convert_nwp_to_numpy_sample(ds_nwp_ukv_time_sliced)
8
-
9
- # Assert the output type
10
- assert isinstance(numpy_sample, dict)
11
-
12
- # Assert the shape of the numpy sample
13
- assert (numpy_sample[NWPSampleKey.nwp] == ds_nwp_ukv_time_sliced.values).all()
@@ -1,40 +0,0 @@
1
-
2
- import numpy as np
3
- import pandas as pd
4
- import xarray as xr
5
-
6
- import pytest
7
-
8
- from ocf_data_sampler.numpy_sample import convert_satellite_to_numpy_sample, SatelliteSampleKey
9
-
10
-
11
- @pytest.fixture(scope="module")
12
- def da_sat_like():
13
- """Create dummy data which looks like satellite data"""
14
- x = np.arange(-100, 100, 10)
15
- y = np.arange(-100, 100, 10)
16
- datetimes = pd.date_range("2024-01-01 12:00", "2024-01-01 12:30", freq="5min")
17
- channels = ["VIS008", "IR016"]
18
-
19
- da_sat = xr.DataArray(
20
- np.random.normal(size=(len(datetimes), len(channels), len(x), len(y))),
21
- coords=dict(
22
- time_utc=(["time_utc"], datetimes),
23
- channel=(["channel"], channels),
24
- x_geostationary=(["x_geostationary"], x),
25
- y_geostationary=(["y_geostationary"], y),
26
- )
27
- )
28
- return da_sat
29
-
30
-
31
- def test_convert_satellite_to_numpy_sample(da_sat_like):
32
-
33
- # Call the function
34
- numpy_sample = convert_satellite_to_numpy_sample(da_sat_like)
35
-
36
- # Assert the output type
37
- assert isinstance(numpy_sample, dict)
38
-
39
- # Assert the shape of the numpy sample
40
- assert (numpy_sample[SatelliteSampleKey.satellite_actual] == da_sat_like.values).all()
@@ -1,81 +0,0 @@
1
- import numpy as np
2
- import pandas as pd
3
- import pytest
4
-
5
- from ocf_data_sampler.numpy_sample.sun_position import (
6
- calculate_azimuth_and_elevation, make_sun_position_numpy_sample
7
- )
8
-
9
- from ocf_data_sampler.numpy_sample import GSPSampleKey
10
-
11
-
12
- @pytest.mark.parametrize("lat", [0, 5, 10, 23.5])
13
- def test_calculate_azimuth_and_elevation(lat):
14
-
15
- # Pick the day of the summer solstice
16
- datetimes = pd.to_datetime(["2024-06-20 12:00"])
17
-
18
- # Calculate sun angles
19
- azimuth, elevation = calculate_azimuth_and_elevation(datetimes, lon=0, lat=lat)
20
-
21
- assert len(azimuth)==len(datetimes)
22
- assert len(elevation)==len(datetimes)
23
-
24
- # elevation should be close to (90 - (23.5-lat) degrees
25
- assert np.abs(elevation - (90-23.5+lat)) < 1
26
-
27
-
28
- def test_calculate_azimuth_and_elevation_random():
29
- """Test that the function produces the expected range of azimuths and elevations"""
30
-
31
- # Set seed so we know the test should pass
32
- np.random.seed(0)
33
-
34
- # Pick the day of the summer solstice
35
- datetimes = pd.to_datetime(["2024-06-20 12:00"])
36
-
37
- # Pick 100 random locations and measure their azimuth and elevations
38
- azimuths = []
39
- elevations = []
40
-
41
- for _ in range(100):
42
-
43
- lon = np.random.uniform(low=0, high=360)
44
- lat = np.random.uniform(low=-90, high=90)
45
-
46
- # Calculate sun angles
47
- azimuth, elevation = calculate_azimuth_and_elevation(datetimes, lon=lon, lat=lat)
48
-
49
- azimuths.append(azimuth.item())
50
- elevations.append(elevation.item())
51
-
52
- azimuths = np.array(azimuths)
53
- elevations = np.array(elevations)
54
-
55
- assert (0<=azimuths).all() and (azimuths<=360).all()
56
- assert (-90<=elevations).all() and (elevations<=90).all()
57
-
58
- # Azimuth range is [0, 360]
59
- assert azimuths.min() < 30
60
- assert azimuths.max() > 330
61
-
62
- # Elevation range is [-90, 90]
63
- assert elevations.min() < -70
64
- assert elevations.max() > 70
65
-
66
-
67
- def test_make_sun_position_numpy_sample():
68
-
69
- datetimes = pd.date_range("2024-06-20 12:00", "2024-06-20 16:00", freq="30min")
70
- lon, lat = 0, 51.5
71
-
72
- sample = make_sun_position_numpy_sample(datetimes, lon, lat, key_prefix="gsp")
73
-
74
- assert GSPSampleKey.solar_elevation in sample
75
- assert GSPSampleKey.solar_azimuth in sample
76
-
77
- # The solar coords are normalised in the function
78
- assert (sample[GSPSampleKey.solar_elevation]>=0).all()
79
- assert (sample[GSPSampleKey.solar_elevation]<=1).all()
80
- assert (sample[GSPSampleKey.solar_azimuth]>=0).all()
81
- assert (sample[GSPSampleKey.solar_azimuth]<=1).all()
@@ -1,69 +0,0 @@
1
- from ocf_data_sampler.select.dropout import draw_dropout_time, apply_dropout_time
2
-
3
- import numpy as np
4
- import pandas as pd
5
- import xarray as xr
6
-
7
- import pytest
8
-
9
-
10
- @pytest.fixture(scope="module")
11
- def da_sample():
12
- """Create dummy data which looks like satellite data"""
13
-
14
- datetimes = pd.date_range("2024-01-01 12:00", "2024-01-01 13:00", freq="5min")
15
-
16
- da_sat = xr.DataArray(
17
- np.random.normal(size=(len(datetimes))),
18
- coords=dict(time_utc=datetimes)
19
- )
20
- return da_sat
21
-
22
-
23
- def test_draw_dropout_time():
24
- t0 = pd.Timestamp("2021-01-01 04:00:00")
25
-
26
- dropout_timedeltas = pd.to_timedelta([-30, -60], unit="min")
27
- dropout_time = draw_dropout_time(t0, dropout_timedeltas, dropout_frac=1)
28
-
29
- assert isinstance(dropout_time, pd.Timestamp)
30
- assert (dropout_time-t0) in dropout_timedeltas
31
-
32
-
33
- def test_draw_dropout_time_partial():
34
- t0 = pd.Timestamp("2021-01-01 04:00:00")
35
-
36
- dropout_timedeltas = pd.to_timedelta([-30, -60], unit="min")
37
-
38
- dropouts = set()
39
-
40
- # Loop over 1000 to have very high probability of seeing all dropouts
41
- # The chances of this failing by chance are approx ((2/3)^100)*3 = 7e-18
42
- for _ in range(100):
43
- dropouts.add(draw_dropout_time(t0, dropout_timedeltas, dropout_frac=2/3))
44
-
45
- # Check all expected dropouts are present
46
- dropouts == {None} | set(t0 + dt for dt in dropout_timedeltas)
47
-
48
-
49
- def test_draw_dropout_time_null():
50
- t0 = pd.Timestamp("2021-01-01 04:00:00")
51
-
52
- # Dropout fraction is 0
53
- dropout_timedeltas = [pd.Timedelta(-30, "min")]
54
- dropout_time = draw_dropout_time(t0, dropout_timedeltas=dropout_timedeltas, dropout_frac=0)
55
- assert dropout_time==t0
56
-
57
- # No dropout timedeltas and dropout fraction is 0
58
- dropout_time = draw_dropout_time(t0, dropout_timedeltas=[], dropout_frac=0)
59
- assert dropout_time==t0
60
-
61
-
62
- @pytest.mark.parametrize("t0_str", ["12:00", "12:30", "13:00"])
63
- def test_apply_dropout_time(da_sample, t0_str):
64
- dropout_time = pd.Timestamp(f"2024-01-01 {t0_str}")
65
-
66
- da_dropout = apply_dropout_time(da_sample, dropout_time)
67
-
68
- assert da_dropout.sel(time_utc=slice(None, dropout_time)).notnull().all()
69
- assert da_dropout.sel(time_utc=slice(dropout_time+pd.Timedelta(5, "min"), None)).isnull().all()
@@ -1,28 +0,0 @@
1
- import pandas as pd
2
-
3
- from ocf_data_sampler.select.fill_time_periods import fill_time_periods
4
-
5
- def test_fill_time_periods():
6
- time_periods = pd.DataFrame(
7
- {
8
- "start_dt": [
9
- "2021-01-01 04:10:00", "2021-01-01 09:00:00",
10
- "2021-01-01 09:15:00", "2021-01-01 12:00:00"
11
- ],
12
- "end_dt": [
13
- "2021-01-01 06:00:00", "2021-01-01 09:00:00",
14
- "2021-01-01 09:20:00", "2021-01-01 14:45:00"
15
- ],
16
- }
17
- )
18
- freq = pd.Timedelta("30min")
19
- filled_time_periods = fill_time_periods(time_periods, freq)
20
-
21
- expected_times = [
22
- "04:30", "05:00", "05:30", "06:00", "09:00", "12:00",
23
- "12:30", "13:00", "13:30", "14:00", "14:30"
24
- ]
25
-
26
- expected_times = pd.DatetimeIndex([f"2021-01-01 {t}" for t in expected_times])
27
-
28
- pd.testing.assert_index_equal(filled_time_periods, expected_times)
@@ -1,202 +0,0 @@
1
- import pandas as pd
2
-
3
- from ocf_data_sampler.select.find_contiguous_time_periods import (
4
- find_contiguous_t0_periods, find_contiguous_t0_periods_nwp,
5
- intersection_of_multiple_dataframes_of_periods,
6
- )
7
-
8
-
9
-
10
- def test_find_contiguous_t0_periods():
11
-
12
- # Create 5-minutely data timestamps
13
- freq = pd.Timedelta(5, "min")
14
- interval_start = pd.Timedelta(-60, "min")
15
- interval_end = pd.Timedelta(15, "min")
16
-
17
- datetimes = (
18
- pd.date_range("2023-01-01 12:00", "2023-01-01 17:00", freq=freq)
19
- .delete([5, 6, 30])
20
- )
21
-
22
- periods = find_contiguous_t0_periods(
23
- datetimes=datetimes,
24
- interval_start=interval_start,
25
- interval_end=interval_end,
26
- sample_period_duration=freq,
27
- )
28
-
29
- expected_results = pd.DataFrame(
30
- {
31
- "start_dt": pd.to_datetime(
32
- [
33
- "2023-01-01 13:35",
34
- "2023-01-01 15:35",
35
- ]
36
- ),
37
- "end_dt": pd.to_datetime(
38
- [
39
- "2023-01-01 14:10",
40
- "2023-01-01 16:45",
41
- ]
42
- ),
43
- },
44
- )
45
-
46
- assert periods.equals(expected_results)
47
-
48
-
49
- def test_find_contiguous_t0_periods_nwp():
50
-
51
- # These are the expected results of the test
52
- expected_results = [
53
- pd.DataFrame(
54
- {
55
- "start_dt": pd.to_datetime(["2023-01-01 03:00", "2023-01-02 03:00"]),
56
- "end_dt": pd.to_datetime(["2023-01-01 21:00", "2023-01-03 06:00"]),
57
- },
58
- ),
59
- pd.DataFrame(
60
- {
61
- "start_dt": pd.to_datetime(
62
- [
63
- "2023-01-01 05:00",
64
- "2023-01-02 05:00",
65
- ]
66
- ),
67
- "end_dt": pd.to_datetime(
68
- [
69
- "2023-01-01 21:00",
70
- "2023-01-03 06:00",
71
- ]
72
- ),
73
- },
74
- ),
75
- pd.DataFrame(
76
- {
77
- "start_dt": pd.to_datetime(
78
- [
79
- "2023-01-01 05:00",
80
- "2023-01-02 05:00",
81
- "2023-01-02 14:00",
82
- ]
83
- ),
84
- "end_dt": pd.to_datetime(
85
- [
86
- "2023-01-01 18:00",
87
- "2023-01-02 09:00",
88
- "2023-01-03 03:00",
89
- ]
90
- ),
91
- },
92
- ),
93
- pd.DataFrame(
94
- {
95
- "start_dt": pd.to_datetime(
96
- [
97
- "2023-01-01 05:00",
98
- "2023-01-01 11:00",
99
- "2023-01-02 05:00",
100
- "2023-01-02 14:00",
101
- ]
102
- ),
103
- "end_dt": pd.to_datetime(
104
- [
105
- "2023-01-01 06:00",
106
- "2023-01-01 15:00",
107
- "2023-01-02 06:00",
108
- "2023-01-03 00:00",
109
- ]
110
- ),
111
- },
112
- ),
113
- pd.DataFrame(
114
- {
115
- "start_dt": pd.to_datetime(
116
- [
117
- "2023-01-01 06:00",
118
- "2023-01-01 12:00",
119
- "2023-01-02 06:00",
120
- "2023-01-02 15:00",
121
- ]
122
- ),
123
- "end_dt": pd.to_datetime(
124
- [
125
- "2023-01-01 09:00",
126
- "2023-01-01 18:00",
127
- "2023-01-02 09:00",
128
- "2023-01-03 03:00",
129
- ]
130
- ),
131
- },
132
- ),
133
- ]
134
-
135
- # Create 3-hourly init times with a few time stamps missing
136
- freq = pd.Timedelta(3, "h")
137
-
138
- init_times = (
139
- pd.date_range("2023-01-01 03:00", "2023-01-02 21:00", freq=freq)
140
- .delete([1, 4, 5, 6, 7, 9, 10])
141
- )
142
-
143
- # Choose some history durations and max stalenesses
144
- history_durations_hr = [0, 2, 2, 2, 2]
145
- max_stalenesses_hr = [9, 9, 6, 3, 6]
146
- max_dropouts_hr = [0, 0, 0, 0, 3]
147
-
148
- for i in range(len(expected_results)):
149
- interval_start = pd.Timedelta(-history_durations_hr[i], "h")
150
- max_staleness = pd.Timedelta(max_stalenesses_hr[i], "h")
151
- max_dropout = pd.Timedelta(max_dropouts_hr[i], "h")
152
-
153
- time_periods = find_contiguous_t0_periods_nwp(
154
- init_times=init_times,
155
- interval_start=interval_start,
156
- max_staleness=max_staleness,
157
- max_dropout=max_dropout,
158
- )
159
-
160
- # Check if results are as expected
161
- assert time_periods.equals(expected_results[i])
162
-
163
-
164
- def test_intersection_of_multiple_dataframes_of_periods():
165
- periods_1 = pd.DataFrame(
166
- {
167
- "start_dt": pd.to_datetime(["2023-01-01 05:00", "2023-01-01 14:10"]),
168
- "end_dt": pd.to_datetime(["2023-01-01 13:35", "2023-01-01 18:00"]),
169
- },
170
- )
171
-
172
- periods_2 = pd.DataFrame(
173
- {
174
- "start_dt": pd.to_datetime(["2023-01-01 12:00"]),
175
- "end_dt": pd.to_datetime(["2023-01-02 00:00"]),
176
- },
177
- )
178
-
179
- periods_3 = pd.DataFrame(
180
- {
181
- "start_dt": pd.to_datetime(["2023-01-01 00:00", "2023-01-01 13:00"]),
182
- "end_dt": pd.to_datetime(["2023-01-01 12:30", "2023-01-01 23:00"]),
183
- },
184
- )
185
-
186
- expected_result = pd.DataFrame(
187
- {
188
- "start_dt": pd.to_datetime(
189
- ["2023-01-01 12:00", "2023-01-01 13:00", "2023-01-01 14:10"]
190
- ),
191
- "end_dt": pd.to_datetime([
192
- "2023-01-01 12:30", "2023-01-01 13:35", "2023-01-01 18:00"]
193
- ),
194
- },
195
- )
196
-
197
- overlaping_periods = intersection_of_multiple_dataframes_of_periods(
198
- [periods_1, periods_2, periods_3]
199
- )
200
-
201
- # Check if results are as expected
202
- assert overlaping_periods.equals(expected_result)
@@ -1,67 +0,0 @@
1
- from ocf_data_sampler.select.location import Location
2
- import pytest
3
-
4
-
5
- def test_make_valid_location_object_with_default_coordinate_system():
6
- x, y = -1000.5, 50000
7
- location = Location(x=x, y=y)
8
- assert location.x == x, "location.x value not set correctly"
9
- assert location.y == y, "location.x value not set correctly"
10
- assert (
11
- location.coordinate_system == "osgb"
12
- ), "location.coordinate_system value not set correctly"
13
-
14
-
15
- def test_make_valid_location_object_with_osgb_coordinate_system():
16
- x, y, coordinate_system = 1.2, 22.9, "osgb"
17
- location = Location(x=x, y=y, coordinate_system=coordinate_system)
18
- assert location.x == x, "location.x value not set correctly"
19
- assert location.y == y, "location.x value not set correctly"
20
- assert (
21
- location.coordinate_system == coordinate_system
22
- ), "location.coordinate_system value not set correctly"
23
-
24
-
25
- def test_make_valid_location_object_with_lon_lat_coordinate_system():
26
- x, y, coordinate_system = 1.2, 1.2, "lon_lat"
27
- location = Location(x=x, y=y, coordinate_system=coordinate_system)
28
- assert location.x == x, "location.x value not set correctly"
29
- assert location.y == y, "location.x value not set correctly"
30
- assert (
31
- location.coordinate_system == coordinate_system
32
- ), "location.coordinate_system value not set correctly"
33
-
34
-
35
- def test_make_invalid_location_object_with_invalid_osgb_x():
36
- x, y, coordinate_system = 10000000, 1.2, "osgb"
37
- with pytest.raises(ValueError) as err:
38
- _ = Location(x=x, y=y, coordinate_system=coordinate_system)
39
- assert err.typename == "ValidationError"
40
-
41
-
42
- def test_make_invalid_location_object_with_invalid_osgb_y():
43
- x, y, coordinate_system = 2.5, 10000000, "osgb"
44
- with pytest.raises(ValueError) as err:
45
- _ = Location(x=x, y=y, coordinate_system=coordinate_system)
46
- assert err.typename == "ValidationError"
47
-
48
-
49
- def test_make_invalid_location_object_with_invalid_lon_lat_x():
50
- x, y, coordinate_system = 200, 1.2, "lon_lat"
51
- with pytest.raises(ValueError) as err:
52
- _ = Location(x=x, y=y, coordinate_system=coordinate_system)
53
- assert err.typename == "ValidationError"
54
-
55
-
56
- def test_make_invalid_location_object_with_invalid_lon_lat_y():
57
- x, y, coordinate_system = 2.5, -200, "lon_lat"
58
- with pytest.raises(ValueError) as err:
59
- _ = Location(x=x, y=y, coordinate_system=coordinate_system)
60
- assert err.typename == "ValidationError"
61
-
62
-
63
- def test_make_invalid_location_object_with_invalid_coordinate_system():
64
- x, y, coordinate_system = 2.5, 1000, "abcd"
65
- with pytest.raises(ValueError) as err:
66
- _ = Location(x=x, y=y, coordinate_system=coordinate_system)
67
- assert err.typename == "ValidationError"