pvlib 0.13.1a1__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -301,7 +301,8 @@ def get_irradiance_poa(surface_tilt, surface_azimuth, solar_zenith,
301
301
  sky_diffuse_comps_horizontal = haydavies(0, 180, dhi, dni, dni_extra,
302
302
  solar_zenith, solar_azimuth,
303
303
  return_components=True)
304
- circumsolar_horizontal = sky_diffuse_comps_horizontal['circumsolar']
304
+ circumsolar_horizontal = \
305
+ sky_diffuse_comps_horizontal['poa_circumsolar']
305
306
 
306
307
  # Call haydavies a second time where circumsolar_normal is facing
307
308
  # directly towards sun, and can be added to DNI
@@ -309,7 +310,7 @@ def get_irradiance_poa(surface_tilt, surface_azimuth, solar_zenith,
309
310
  dni, dni_extra, solar_zenith,
310
311
  solar_azimuth,
311
312
  return_components=True)
312
- circumsolar_normal = sky_diffuse_comps_normal['circumsolar']
313
+ circumsolar_normal = sky_diffuse_comps_normal['poa_circumsolar']
313
314
 
314
315
  dhi = dhi - circumsolar_horizontal
315
316
  dni = dni + circumsolar_normal
pvlib/inverter.py CHANGED
@@ -388,8 +388,8 @@ def pvwatts(pdc, pdc0, eta_inv_nom=0.96, eta_inv_ref=0.9637):
388
388
 
389
389
  References
390
390
  ----------
391
- .. [1] A. P. Dobos, "PVWatts Version 5 Manual,"
392
- http://pvwatts.nrel.gov/downloads/pvwattsv5.pdf (2014).
391
+ .. [1] A. P. Dobos, "PVWatts Version 5 Manual", NREL, Golden, CO, USA,
392
+ Technical Report NREL/TP-6A20-62641, 2014, :doi:`10.2172/1158421`.
393
393
  """
394
394
 
395
395
  pac0 = eta_inv_nom * pdc0
pvlib/iotools/__init__.py CHANGED
@@ -8,9 +8,6 @@ from pvlib.iotools.midc import read_midc_raw_data_from_nrel # noqa: F401
8
8
  from pvlib.iotools.crn import read_crn # noqa: F401
9
9
  from pvlib.iotools.solrad import read_solrad # noqa: F401
10
10
  from pvlib.iotools.solrad import get_solrad # noqa: F401
11
- from pvlib.iotools.psm3 import get_psm3 # noqa: F401
12
- from pvlib.iotools.psm3 import read_psm3 # noqa: F401
13
- from pvlib.iotools.psm3 import parse_psm3 # noqa: F401
14
11
  from pvlib.iotools.psm4 import get_nsrdb_psm4_aggregated # noqa: F401
15
12
  from pvlib.iotools.psm4 import get_nsrdb_psm4_tmy # noqa: F401
16
13
  from pvlib.iotools.psm4 import get_nsrdb_psm4_conus # noqa: F401
@@ -45,3 +42,5 @@ from pvlib.iotools.meteonorm import get_meteonorm_observation_realtime # noqa:
45
42
  from pvlib.iotools.meteonorm import get_meteonorm_observation_training # noqa: F401, E501
46
43
  from pvlib.iotools.meteonorm import get_meteonorm_tmy # noqa: F401
47
44
  from pvlib.iotools.nasa_power import get_nasa_power # noqa: F401
45
+ from pvlib.iotools.era5 import get_era5 # noqa: F401
46
+ from pvlib.iotools.merra2 import get_merra2 # noqa: F401
pvlib/iotools/acis.py CHANGED
@@ -413,8 +413,8 @@ def get_acis_station_data(station, start, end, trace_val=0.001,
413
413
  'climdiv,valid_daterange,tzo,network')
414
414
  }
415
415
  df, metadata = _get_acis(start, end, params, map_variables, url, **kwargs)
416
- df = df.replace("M", np.nan)
417
- df = df.replace("T", trace_val)
416
+ df = df.mask(df == 'M', np.nan)
417
+ df = df.mask(df == 'T', trace_val)
418
418
  df = df.astype(float)
419
419
  return df, metadata
420
420
 
pvlib/iotools/era5.py ADDED
@@ -0,0 +1,207 @@
1
+ import requests
2
+ import pandas as pd
3
+ from io import BytesIO, StringIO
4
+ import zipfile
5
+ import time
6
+
7
+
8
+ VARIABLE_MAP = {
9
+ # short names
10
+ 'd2m': 'temp_dew',
11
+ 't2m': 'temp_air',
12
+ 'sp': 'pressure',
13
+ 'ssrd': 'ghi',
14
+ 'tp': 'precipitation',
15
+ 'strd': 'longwave_down',
16
+
17
+ # long names
18
+ '2m_dewpoint_temperature': 'temp_dew',
19
+ '2m_temperature': 'temp_air',
20
+ 'surface_pressure': 'pressure',
21
+ 'surface_solar_radiation_downwards': 'ghi',
22
+ 'total_precipitation': 'precipitation',
23
+ 'surface_thermal_radiation_downwards': 'longwave_down',
24
+ }
25
+
26
+
27
+ def _same(x):
28
+ return x
29
+
30
+
31
+ def _k_to_c(temp_k):
32
+ return temp_k - 273.15
33
+
34
+
35
+ def _j_to_w(j):
36
+ return j / 3600
37
+
38
+
39
+ def _m_to_cm(m):
40
+ return m / 100
41
+
42
+
43
+ UNITS = {
44
+ 'u100': _same,
45
+ 'v100': _same,
46
+ 'u10': _same,
47
+ 'v10': _same,
48
+ 'd2m': _k_to_c,
49
+ 't2m': _k_to_c,
50
+ 'msl': _same,
51
+ 'sst': _k_to_c,
52
+ 'skt': _k_to_c,
53
+ 'sp': _same,
54
+ 'ssrd': _j_to_w,
55
+ 'strd': _j_to_w,
56
+ 'tp': _m_to_cm,
57
+ }
58
+
59
+
60
+ def get_era5(latitude, longitude, start, end, variables, api_key,
61
+ map_variables=True, timeout=60,
62
+ url='https://cds.climate.copernicus.eu/api/retrieve/v1/'):
63
+ """
64
+ Retrieve ERA5 reanalysis data from the ECMWF's Copernicus Data Store.
65
+
66
+ A CDS API key is needed to access this API. Register for one at [1]_.
67
+
68
+ This API [2]_ provides a subset of the full ERA5 dataset. See [3]_ for
69
+ the available variables. Data are available on a 0.25° x 0.25° grid.
70
+
71
+ Parameters
72
+ ----------
73
+ latitude : float
74
+ In decimal degrees, north is positive (ISO 19115).
75
+ longitude: float
76
+ In decimal degrees, east is positive (ISO 19115).
77
+ start : datetime like or str
78
+ First day of the requested period. Assumed to be UTC if not localized.
79
+ end : datetime like or str
80
+ Last day of the requested period. Assumed to be UTC if not localized.
81
+ variables : list of str
82
+ List of variable names to retrieve, for example
83
+ ``['ghi', 'temp_air']``. Both pvlib and ERA5 names can be used.
84
+ See [1]_ for additional options.
85
+ api_key : str
86
+ ECMWF CDS API key.
87
+ map_variables : bool, default True
88
+ When true, renames columns of the DataFrame to pvlib variable names
89
+ where applicable. Also converts units of some variables. See variable
90
+ :const:`VARIABLE_MAP` and :const:`UNITS`.
91
+ timeout : int, default 60
92
+ Number of seconds to wait for the requested data to become available
93
+ before timeout.
94
+ url : str, optional
95
+ API endpoint URL.
96
+
97
+ Raises
98
+ ------
99
+ Exception
100
+ If ``timeout`` is reached without the job finishing.
101
+
102
+ Returns
103
+ -------
104
+ data : pd.DataFrame
105
+ Time series data. The index corresponds to the start of the interval.
106
+ meta : dict
107
+ Metadata.
108
+
109
+ References
110
+ ----------
111
+ .. [1] https://cds.climate.copernicus.eu/
112
+ .. [2] https://cds.climate.copernicus.eu/datasets/reanalysis-era5-single-levels-timeseries?tab=overview
113
+ .. [3] https://confluence.ecmwf.int/pages/viewpage.action?pageId=505390919
114
+ """ # noqa: E501
115
+
116
+ def _to_utc_dt_notz(dt):
117
+ dt = pd.to_datetime(dt)
118
+ if dt.tzinfo is not None:
119
+ dt = dt.tz_convert("UTC")
120
+ return dt
121
+
122
+ start = _to_utc_dt_notz(start).strftime("%Y-%m-%d")
123
+ end = _to_utc_dt_notz(end).strftime("%Y-%m-%d")
124
+
125
+ headers = {'PRIVATE-TOKEN': api_key}
126
+
127
+ # allow variables to be specified with pvlib names
128
+ reverse_map = {v: k for k, v in VARIABLE_MAP.items()}
129
+ variables = [reverse_map.get(k, k) for k in variables]
130
+
131
+ # Step 1: submit data request (add it to the queue)
132
+ params = {
133
+ "inputs": {
134
+ "variable": variables,
135
+ "location": {"longitude": longitude, "latitude": latitude},
136
+ "date": [f"{start}/{end}"],
137
+ "data_format": "csv"
138
+ }
139
+ }
140
+ slug = "processes/reanalysis-era5-single-levels-timeseries/execution"
141
+ response = requests.post(url + slug, json=params, headers=headers,
142
+ timeout=timeout)
143
+ submission_response = response.json()
144
+ if not response.ok:
145
+ raise Exception(submission_response) # likely need to accept license
146
+
147
+ job_id = submission_response['jobID']
148
+
149
+ # Step 2: poll until the data request is ready
150
+ slug = "jobs/" + job_id
151
+ poll_interval = 1
152
+ num_polls = 0
153
+ while True:
154
+ response = requests.get(url + slug, headers=headers, timeout=timeout)
155
+ poll_response = response.json()
156
+ job_status = poll_response['status']
157
+
158
+ if job_status == 'successful':
159
+ break # ready to proceed to next step
160
+ elif job_status == 'failed':
161
+ msg = (
162
+ 'Request failed. Please check the ECMWF website for details: '
163
+ 'https://cds.climate.copernicus.eu/requests?tab=all'
164
+ )
165
+ raise Exception(msg)
166
+
167
+ num_polls += 1
168
+ if num_polls * poll_interval > timeout:
169
+ raise requests.exceptions.Timeout(
170
+ 'Request timed out. Try increasing the timeout parameter or '
171
+ 'reducing the request size.'
172
+ )
173
+
174
+ time.sleep(1)
175
+
176
+ # Step 3: get the download link for our requested dataset
177
+ slug = "jobs/" + job_id + "/results"
178
+ response = requests.get(url + slug, headers=headers, timeout=timeout)
179
+ results_response = response.json()
180
+ download_url = results_response['asset']['value']['href']
181
+
182
+ # Step 4: finally, download our dataset. it's a zipfile of one CSV
183
+ response = requests.get(download_url, timeout=timeout)
184
+ zipbuffer = BytesIO(response.content)
185
+ archive = zipfile.ZipFile(zipbuffer)
186
+ filename = archive.filelist[0].filename
187
+ csvbuffer = StringIO(archive.read(filename).decode('utf-8'))
188
+ df = pd.read_csv(csvbuffer)
189
+
190
+ # and parse into the usual formats
191
+ metadata = submission_response['metadata'] # include messages from ECMWF
192
+ metadata['jobID'] = job_id
193
+ if not df.empty:
194
+ metadata['latitude'] = df['latitude'].values[0]
195
+ metadata['longitude'] = df['longitude'].values[0]
196
+
197
+ df.index = pd.to_datetime(df['valid_time']).dt.tz_localize('UTC')
198
+ df = df.drop(columns=['valid_time', 'latitude', 'longitude'])
199
+
200
+ if map_variables:
201
+ # convert units and rename
202
+ for shortname in df.columns:
203
+ converter = UNITS.get(shortname, _same)
204
+ df[shortname] = converter(df[shortname])
205
+ df = df.rename(columns=VARIABLE_MAP)
206
+
207
+ return df, metadata
@@ -0,0 +1,196 @@
1
+ import pandas as pd
2
+ import requests
3
+ from io import StringIO
4
+
5
+
6
+ VARIABLE_MAP = {
7
+ 'SWGDN': 'ghi',
8
+ 'SWGDNCLR': 'ghi_clear',
9
+ 'ALBEDO': 'albedo',
10
+ 'LWGNT': 'longwave_net',
11
+ 'LWGEM': 'longwave_up',
12
+ 'LWGAB': 'longwave_down',
13
+ 'T2M': 'temp_air',
14
+ 'T2MDEW': 'temp_dew',
15
+ 'PS': 'pressure',
16
+ 'TOTEXTTAU': 'aod550',
17
+ }
18
+
19
+
20
+ def get_merra2(latitude, longitude, start, end, username, password, dataset,
21
+ variables, map_variables=True):
22
+ """
23
+ Retrieve MERRA-2 time-series irradiance and meteorological reanalysis data
24
+ from NASA's GESDISC data archive.
25
+
26
+ MERRA-2 [1]_ offers modeled data for many atmospheric quantities at hourly
27
+ resolution on a 0.5° x 0.625° global grid.
28
+
29
+ Access must be granted to the GESDISC data archive before EarthData
30
+ credentials will work. See [2]_ for instructions.
31
+
32
+ Parameters
33
+ ----------
34
+ latitude : float
35
+ In decimal degrees, north is positive (ISO 19115).
36
+ longitude: float
37
+ In decimal degrees, east is positive (ISO 19115).
38
+ start : datetime like or str
39
+ First timestamp of the requested period. If a timezone is not
40
+ specified, UTC is assumed.
41
+ end : datetime like or str
42
+ Last timestamp of the requested period. If a timezone is not
43
+ specified, UTC is assumed. Must be in the same year as ``start``.
44
+ username : str
45
+ NASA EarthData username.
46
+ password : str
47
+ NASA EarthData password.
48
+ dataset : str
49
+ Dataset name (with version), e.g. "M2T1NXRAD.5.12.4".
50
+ variables : list of str
51
+ List of variable names to retrieve. See the documentation of the
52
+ specific dataset you are accessing for options.
53
+ map_variables : bool, default True
54
+ When true, renames columns of the DataFrame to pvlib variable names
55
+ where applicable. See variable :const:`VARIABLE_MAP`.
56
+
57
+ Raises
58
+ ------
59
+ ValueError
60
+ If ``start`` and ``end`` are in different years, when converted to UTC.
61
+
62
+ Returns
63
+ -------
64
+ data : pd.DataFrame
65
+ Time series data. The index corresponds to the middle of the interval.
66
+ meta : dict
67
+ Metadata.
68
+
69
+ Notes
70
+ -----
71
+ The following datasets provide quantities useful for PV modeling:
72
+
73
+ +------------------------------------+-----------+---------------+
74
+ | Dataset | Variable | pvlib name |
75
+ +====================================+===========+===============+
76
+ | `M2T1NXRAD.5.12.4 <M2T1NXRAD_>`_ | SWGDN | ghi |
77
+ | +-----------+---------------+
78
+ | | SWGDNCLR | ghi_clear |
79
+ | +-----------+---------------+
80
+ | | ALBEDO | albedo |
81
+ | +-----------+---------------+
82
+ | | LWGAB | longwave_down |
83
+ | +-----------+---------------+
84
+ | | LWGNT | longwave_net |
85
+ | +-----------+---------------+
86
+ | | LWGEM | longwave_up |
87
+ +------------------------------------+-----------+---------------+
88
+ | `M2T1NXSLV.5.12.4 <M2T1NXSLV_>`_ | T2M | temp_air |
89
+ | +-----------+---------------+
90
+ | | U10 | n/a |
91
+ | +-----------+---------------+
92
+ | | V10 | n/a |
93
+ | +-----------+---------------+
94
+ | | T2MDEW | temp_dew |
95
+ | +-----------+---------------+
96
+ | | PS | pressure |
97
+ | +-----------+---------------+
98
+ | | TO3 | n/a |
99
+ | +-----------+---------------+
100
+ | | TQV | n/a |
101
+ +------------------------------------+-----------+---------------+
102
+ | `M2T1NXAER.5.12.4 <M2T1NXAER_>`_ | TOTEXTTAU | aod550 |
103
+ | +-----------+---------------+
104
+ | | TOTSCATAU | n/a |
105
+ | +-----------+---------------+
106
+ | | TOTANGSTR | n/a |
107
+ +------------------------------------+-----------+---------------+
108
+
109
+ .. _M2T1NXRAD: https://disc.gsfc.nasa.gov/datasets/M2T1NXRAD_5.12.4/summary
110
+ .. _M2T1NXSLV: https://disc.gsfc.nasa.gov/datasets/M2T1NXSLV_5.12.4/summary
111
+ .. _M2T1NXAER: https://disc.gsfc.nasa.gov/datasets/M2T1NXAER_5.12.4/summary
112
+
113
+ A complete list of datasets and their documentation is available at [3]_.
114
+
115
+ Note that MERRA2 does not currently provide DNI or DHI.
116
+
117
+ References
118
+ ----------
119
+ .. [1] https://gmao.gsfc.nasa.gov/gmao-products/merra-2/
120
+ .. [2] https://disc.gsfc.nasa.gov/earthdata-login
121
+ .. [3] https://disc.gsfc.nasa.gov/datasets?project=MERRA-2
122
+ """
123
+
124
+ # general API info here:
125
+ # https://docs.unidata.ucar.edu/tds/5.0/userguide/netcdf_subset_service_ref.html # noqa: E501
126
+
127
+ def _to_utc_dt_notz(dt):
128
+ dt = pd.to_datetime(dt)
129
+ if dt.tzinfo is not None:
130
+ # convert to utc, then drop tz so that isoformat() is clean
131
+ dt = dt.tz_convert("UTC").tz_localize(None)
132
+ return dt
133
+
134
+ start = _to_utc_dt_notz(start)
135
+ end = _to_utc_dt_notz(end)
136
+
137
+ if (year := start.year) != end.year:
138
+ raise ValueError("start and end must be in the same year (in UTC)")
139
+
140
+ url = (
141
+ "https://goldsmr4.gesdisc.eosdis.nasa.gov/thredds/ncss/grid/"
142
+ f"MERRA2_aggregation/{dataset}/{dataset}_Aggregation_{year}.ncml"
143
+ )
144
+
145
+ parameters = {
146
+ 'var': ",".join(variables),
147
+ 'latitude': latitude,
148
+ 'longitude': longitude,
149
+ 'time_start': start.isoformat() + "Z",
150
+ 'time_end': end.isoformat() + "Z",
151
+ 'accept': 'csv',
152
+ }
153
+
154
+ auth = (username, password)
155
+
156
+ with requests.Session() as session:
157
+ session.auth = auth
158
+ login = session.request('get', url, params=parameters)
159
+ response = session.get(login.url, auth=auth, params=parameters)
160
+
161
+ response.raise_for_status()
162
+
163
+ content = response.content.decode('utf-8')
164
+ buffer = StringIO(content)
165
+ df = pd.read_csv(buffer)
166
+
167
+ df.index = pd.to_datetime(df['time'])
168
+
169
+ meta = {}
170
+ meta['dataset'] = dataset
171
+ meta['station'] = df['station'].values[0]
172
+ meta['latitude'] = df['latitude[unit="degrees_north"]'].values[0]
173
+ meta['longitude'] = df['longitude[unit="degrees_east"]'].values[0]
174
+
175
+ # drop the non-data columns
176
+ dropcols = ['time', 'station', 'latitude[unit="degrees_north"]',
177
+ 'longitude[unit="degrees_east"]']
178
+ df = df.drop(columns=dropcols)
179
+
180
+ # column names are like T2M[unit="K"] by default. extract the unit
181
+ # for the metadata, then rename col to just T2M
182
+ units = {}
183
+ rename = {}
184
+ for col in df.columns:
185
+ name, _ = col.split("[", maxsplit=1)
186
+ unit = col.split('"')[1]
187
+ units[name] = unit
188
+ rename[col] = name
189
+
190
+ meta['units'] = units
191
+ df = df.rename(columns=rename)
192
+
193
+ if map_variables:
194
+ df = df.rename(columns=VARIABLE_MAP)
195
+
196
+ return df, meta
pvlib/iotools/psm4.py CHANGED
@@ -714,7 +714,6 @@ def read_nsrdb_psm4(filename, map_variables=True):
714
714
  pvlib.iotools.get_nsrdb_psm4_tmy
715
715
  pvlib.iotools.get_nsrdb_psm4_conus
716
716
  pvlib.iotools.get_nsrdb_psm4_full_disc
717
- pvlib.iotools.read_psm3
718
717
 
719
718
  References
720
719
  ----------
pvlib/irradiance.py CHANGED
@@ -16,7 +16,7 @@ from scipy.optimize import bisect
16
16
  from pvlib import atmosphere, solarposition, tools
17
17
  import pvlib # used to avoid dni name collision in complete_irradiance
18
18
 
19
- from pvlib._deprecation import pvlibDeprecationWarning, renamed_kwarg_warning
19
+ from pvlib._deprecation import pvlibDeprecationWarning
20
20
  import warnings
21
21
 
22
22
 
@@ -793,10 +793,11 @@ def haydavies(surface_tilt, surface_azimuth, dhi, dni, dni_extra,
793
793
 
794
794
  diffuse_components : OrderedDict (array input) or DataFrame (Series input)
795
795
  Keys/columns are:
796
- * sky_diffuse: Total sky diffuse
797
- * isotropic
798
- * circumsolar
799
- * horizon
796
+ * poa_sky_diffuse: Total sky diffuse
797
+ * poa_isotropic
798
+ * poa_circumsolar
799
+ * poa_horizon (always zero, not accounted for by the
800
+ Hay-Davies model)
800
801
 
801
802
  Notes
802
803
  ------
@@ -855,13 +856,13 @@ def haydavies(surface_tilt, surface_azimuth, dhi, dni, dni_extra,
855
856
 
856
857
  if return_components:
857
858
  diffuse_components = OrderedDict()
858
- diffuse_components['sky_diffuse'] = sky_diffuse
859
+ diffuse_components['poa_sky_diffuse'] = sky_diffuse
859
860
 
860
861
  # Calculate the individual components
861
- diffuse_components['isotropic'] = poa_isotropic
862
- diffuse_components['circumsolar'] = poa_circumsolar
863
- diffuse_components['horizon'] = np.where(
864
- np.isnan(diffuse_components['isotropic']), np.nan, 0.)
862
+ diffuse_components['poa_isotropic'] = poa_isotropic
863
+ diffuse_components['poa_circumsolar'] = poa_circumsolar
864
+ diffuse_components['poa_horizon'] = np.where(
865
+ np.isnan(diffuse_components['poa_isotropic']), np.nan, 0.)
865
866
 
866
867
  if isinstance(sky_diffuse, pd.Series):
867
868
  diffuse_components = pd.DataFrame(diffuse_components)
@@ -1111,10 +1112,10 @@ def perez(surface_tilt, surface_azimuth, dhi, dni, dni_extra,
1111
1112
 
1112
1113
  diffuse_components : OrderedDict (array input) or DataFrame (Series input)
1113
1114
  Keys/columns are:
1114
- * sky_diffuse: Total sky diffuse
1115
- * isotropic
1116
- * circumsolar
1117
- * horizon
1115
+ * poa_sky_diffuse: Total sky diffuse
1116
+ * poa_isotropic
1117
+ * poa_circumsolar
1118
+ * poa_horizon
1118
1119
 
1119
1120
 
1120
1121
  References
@@ -1197,12 +1198,12 @@ def perez(surface_tilt, surface_azimuth, dhi, dni, dni_extra,
1197
1198
 
1198
1199
  if return_components:
1199
1200
  diffuse_components = OrderedDict()
1200
- diffuse_components['sky_diffuse'] = sky_diffuse
1201
+ diffuse_components['poa_sky_diffuse'] = sky_diffuse
1201
1202
 
1202
1203
  # Calculate the different components
1203
- diffuse_components['isotropic'] = dhi * term1
1204
- diffuse_components['circumsolar'] = dhi * term2
1205
- diffuse_components['horizon'] = dhi * term3
1204
+ diffuse_components['poa_isotropic'] = dhi * term1
1205
+ diffuse_components['poa_circumsolar'] = dhi * term2
1206
+ diffuse_components['poa_horizon'] = dhi * term3
1206
1207
 
1207
1208
  # Set values of components to 0 when sky_diffuse is 0
1208
1209
  mask = sky_diffuse == 0
@@ -1353,10 +1354,10 @@ def perez_driesse(surface_tilt, surface_azimuth, dhi, dni, dni_extra,
1353
1354
 
1354
1355
  diffuse_components : OrderedDict (array input) or DataFrame (Series input)
1355
1356
  Keys/columns are:
1356
- * sky_diffuse: Total sky diffuse
1357
- * isotropic
1358
- * circumsolar
1359
- * horizon
1357
+ * poa_sky_diffuse: Total sky diffuse
1358
+ * poa_isotropic
1359
+ * poa_circumsolar
1360
+ * poa_horizon
1360
1361
 
1361
1362
  Notes
1362
1363
  -----
@@ -1417,12 +1418,12 @@ def perez_driesse(surface_tilt, surface_azimuth, dhi, dni, dni_extra,
1417
1418
 
1418
1419
  if return_components:
1419
1420
  diffuse_components = OrderedDict()
1420
- diffuse_components['sky_diffuse'] = sky_diffuse
1421
+ diffuse_components['poa_sky_diffuse'] = sky_diffuse
1421
1422
 
1422
1423
  # Calculate the different components
1423
- diffuse_components['isotropic'] = dhi * term1
1424
- diffuse_components['circumsolar'] = dhi * term2
1425
- diffuse_components['horizon'] = dhi * term3
1424
+ diffuse_components['poa_isotropic'] = dhi * term1
1425
+ diffuse_components['poa_circumsolar'] = dhi * term2
1426
+ diffuse_components['poa_horizon'] = dhi * term3
1426
1427
 
1427
1428
  if isinstance(sky_diffuse, pd.Series):
1428
1429
  diffuse_components = pd.DataFrame(diffuse_components)
@@ -1612,11 +1613,6 @@ def ghi_from_poa_driesse_2023(surface_tilt, surface_azimuth,
1612
1613
  return ghi
1613
1614
 
1614
1615
 
1615
- @renamed_kwarg_warning(
1616
- since='0.11.2',
1617
- old_param_name='clearsky_ghi',
1618
- new_param_name='ghi_clear',
1619
- removal="0.14.0")
1620
1616
  def clearsky_index(ghi, ghi_clear, max_clearsky_index=2.0):
1621
1617
  """
1622
1618
  Calculate the clearsky index.
@@ -2155,16 +2151,6 @@ def _dirint_bins(times, kt_prime, zenith, w, delta_kt_prime):
2155
2151
  return kt_prime_bin, zenith_bin, w_bin, delta_kt_prime_bin
2156
2152
 
2157
2153
 
2158
- @renamed_kwarg_warning(
2159
- since='0.11.2',
2160
- old_param_name='ghi_clearsky',
2161
- new_param_name='ghi_clear',
2162
- removal="0.14.0")
2163
- @renamed_kwarg_warning(
2164
- since='0.11.2',
2165
- old_param_name='dni_clearsky',
2166
- new_param_name='dni_clear',
2167
- removal="0.14.0")
2168
2154
  def dirindex(ghi, ghi_clear, dni_clear, zenith, times, pressure=101325.,
2169
2155
  use_delta_kt_prime=True, temp_dew=None, min_cos_zenith=0.065,
2170
2156
  max_zenith=87):
@@ -3661,11 +3647,6 @@ def _get_dirint_coeffs():
3661
3647
  return coeffs[1:, 1:, :, :]
3662
3648
 
3663
3649
 
3664
- @renamed_kwarg_warning(
3665
- since='0.11.2',
3666
- old_param_name='clearsky_dni',
3667
- new_param_name='dni_clear',
3668
- removal="0.14.0")
3669
3650
  def dni(ghi, dhi, zenith, dni_clear=None, clearsky_tolerance=1.1,
3670
3651
  zenith_threshold_for_zero_dni=88.0,
3671
3652
  zenith_threshold_for_clearsky_limit=80.0):
@@ -10,7 +10,8 @@ from pvlib.ivtools.sdm.cec import ( # noqa: F401
10
10
 
11
11
  from pvlib.ivtools.sdm.desoto import ( # noqa: F401
12
12
  fit_desoto,
13
- fit_desoto_sandia
13
+ fit_desoto_batzelis,
14
+ fit_desoto_sandia,
14
15
  )
15
16
 
16
17
  from pvlib.ivtools.sdm.pvsyst import ( # noqa: F401