voly 0.0.220__py3-none-any.whl → 0.0.222__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
voly/client.py CHANGED
@@ -375,7 +375,6 @@ class VolyClient:
375
375
  Returns:
376
376
  - Dictionary with pdf_surface, cdf_surface, x_surface, and moments
377
377
  """
378
- logger.info(f"Calculating HD surface using {method} method")
379
378
 
380
379
  return get_hd_surface(
381
380
  model_results=model_results,
voly/core/hd.py CHANGED
@@ -128,8 +128,6 @@ def calculate_historical_returns(df_hist: pd.DataFrame, n_periods: int) -> Tuple
128
128
 
129
129
  @catch_exception
130
130
  def calculate_normal_hd(df_hist: pd.DataFrame,
131
- t: float,
132
- r: float,
133
131
  n_periods: int,
134
132
  domains: Dict[str, np.ndarray]) -> Dict[str, np.ndarray]:
135
133
  """
@@ -139,10 +137,6 @@ def calculate_normal_hd(df_hist: pd.DataFrame,
139
137
  -----------
140
138
  df_hist : pd.DataFrame
141
139
  Historical price data
142
- t : float
143
- Time to maturity in years
144
- r : float
145
- Risk-free rate
146
140
  n_periods : int
147
141
  Number of periods to scale returns
148
142
  domains : Dict[str, np.ndarray]
@@ -164,13 +158,8 @@ def calculate_normal_hd(df_hist: pd.DataFrame,
164
158
  mu_scaled = np.mean(scaled_returns)
165
159
  sigma_scaled = np.std(scaled_returns)
166
160
 
167
- # Apply Girsanov adjustment to shift to risk-neutral measure
168
- expected_risk_neutral_mean = (r - 0.5 * sigma_scaled ** 2) * np.sqrt(t)
169
- adjustment = mu_scaled - expected_risk_neutral_mean
170
- mu_rn = mu_scaled - adjustment
171
-
172
161
  # Calculate PDF using normal distribution in log-moneyness domain
173
- pdf_lm = stats.norm.pdf(LM, loc=mu_rn, scale=sigma_scaled)
162
+ pdf_lm = stats.norm.pdf(LM, loc=mu_scaled, scale=sigma_scaled)
174
163
 
175
164
  # Normalize the PDF
176
165
  pdf_lm = normalize_density(pdf_lm, dx)
@@ -183,8 +172,6 @@ def calculate_normal_hd(df_hist: pd.DataFrame,
183
172
 
184
173
  @catch_exception
185
174
  def calculate_student_t_hd(df_hist: pd.DataFrame,
186
- t: float,
187
- r: float,
188
175
  n_periods: int,
189
176
  domains: Dict[str, np.ndarray]) -> Dict[str, np.ndarray]:
190
177
  """
@@ -194,10 +181,6 @@ def calculate_student_t_hd(df_hist: pd.DataFrame,
194
181
  -----------
195
182
  df_hist : pd.DataFrame
196
183
  Historical price data
197
- t : float
198
- Time to maturity in years
199
- r : float
200
- Risk-free rate
201
184
  n_periods : int
202
185
  Number of periods to scale returns
203
186
  domains : Dict[str, np.ndarray]
@@ -230,11 +213,6 @@ def calculate_student_t_hd(df_hist: pd.DataFrame,
230
213
  else:
231
214
  df = 5 # Default value if kurtosis calculation fails
232
215
 
233
- # Apply Girsanov adjustment to shift to risk-neutral measure
234
- expected_risk_neutral_mean = (r - 0.5 * sigma_scaled ** 2) * np.sqrt(t)
235
- adjustment = mu_scaled - expected_risk_neutral_mean
236
- mu_rn = mu_scaled - adjustment
237
-
238
216
  # Scale parameter for t-distribution
239
217
  # In scipy's t-distribution, the scale parameter is different from normal std
240
218
  # For t-distribution: variance = (df/(df-2)) * scale^2
@@ -242,7 +220,7 @@ def calculate_student_t_hd(df_hist: pd.DataFrame,
242
220
  scale = sigma_scaled * np.sqrt((df - 2) / df) if df > 2 else sigma_scaled
243
221
 
244
222
  # Calculate PDF using t-distribution in log-moneyness domain
245
- pdf_lm = student_t.pdf(LM, df=df, loc=mu_rn, scale=scale)
223
+ pdf_lm = student_t.pdf(LM, df=df, loc=mu_scaled, scale=scale)
246
224
 
247
225
  # Normalize the PDF
248
226
  pdf_lm = normalize_density(pdf_lm, dx)
@@ -255,8 +233,6 @@ def calculate_student_t_hd(df_hist: pd.DataFrame,
255
233
 
256
234
  @catch_exception
257
235
  def calculate_kde_hd(df_hist: pd.DataFrame,
258
- t: float,
259
- r: float,
260
236
  n_periods: int,
261
237
  domains: Dict[str, np.ndarray]) -> Dict[str, np.ndarray]:
262
238
  """
@@ -266,10 +242,6 @@ def calculate_kde_hd(df_hist: pd.DataFrame,
266
242
  -----------
267
243
  df_hist : pd.DataFrame
268
244
  Historical price data
269
- t : float
270
- Time to maturity in years
271
- r : float
272
- Risk-free rate
273
245
  n_periods : int
274
246
  Number of periods to scale returns
275
247
  domains : Dict[str, np.ndarray]
@@ -286,20 +258,11 @@ def calculate_kde_hd(df_hist: pd.DataFrame,
286
258
 
287
259
  # Get scaled returns
288
260
  scaled_returns, dte_returns = calculate_historical_returns(df_hist, n_periods)
289
-
290
- # Calculate parameters (for Girsanov adjustment)
291
- mu_scaled = np.mean(scaled_returns)
292
- sigma_scaled = np.std(scaled_returns)
293
-
294
- # Apply Girsanov adjustment to shift to risk-neutral measure
295
- expected_risk_neutral_mean = (r - 0.5 * sigma_scaled ** 2) * np.sqrt(t)
296
- adjustment = mu_scaled - expected_risk_neutral_mean
297
-
298
- # Shift the returns to be risk-neutral
299
- rn_returns = scaled_returns - adjustment + expected_risk_neutral_mean
261
+ vol = np.sqrt(scaled_returns.std())
262
+ scaled_returns = scaled_returns / vol
300
263
 
301
264
  # Fit KDE model using scipy's gaussian_kde with Scott's rule for bandwidth
302
- kde = stats.gaussian_kde(rn_returns, bw_method='scott')
265
+ kde = stats.gaussian_kde(scaled_returns, bw_method='scott')
303
266
 
304
267
  # Evaluate KDE at points in log-moneyness domain
305
268
  pdf_lm = kde(LM)
@@ -344,7 +307,7 @@ def get_hd_surface(model_results: pd.DataFrame,
344
307
  Dictionary with pdf_surface, cdf_surface, x_surface, and moments
345
308
  """
346
309
  # Validate inputs
347
- required_columns = ['s', 't', 'r']
310
+ required_columns = ['s', 't']
348
311
  missing_columns = [col for col in required_columns if col not in model_results.columns]
349
312
  if missing_columns:
350
313
  raise VolyError(f"Required columns missing in model_results: {missing_columns}")
@@ -371,7 +334,7 @@ def get_hd_surface(model_results: pd.DataFrame,
371
334
  logger.info("Using Kernel Density Estimation (KDE) for historical density")
372
335
  else: # default to normal
373
336
  calculate_hd = calculate_normal_hd
374
- logger.info("Using normal distribution for historical density")
337
+ logger.info("Using Normal distribution for historical density")
375
338
 
376
339
  # Determine granularity from data (minutes between data points)
377
340
  time_diff = (df_hist.index[1] - df_hist.index[0]).total_seconds() / 60
@@ -389,7 +352,6 @@ def get_hd_surface(model_results: pd.DataFrame,
389
352
  # Get parameters for this maturity
390
353
  s = model_results.loc[i, 's'] # Spot price
391
354
  t = model_results.loc[i, 't'] # Time to maturity in years
392
- r = model_results.loc[i, 'r'] # Risk-free rate
393
355
 
394
356
  # Calculate relevant periods for this maturity
395
357
  dte = t * 365.25 # Days to expiry
@@ -401,8 +363,6 @@ def get_hd_surface(model_results: pd.DataFrame,
401
363
  # Calculate density using the selected method
402
364
  pdfs = calculate_hd(
403
365
  df_hist=df_hist,
404
- t=t,
405
- r=r,
406
366
  n_periods=n_periods,
407
367
  domains=domains
408
368
  )
voly/core/interpolate.py CHANGED
@@ -29,7 +29,6 @@ def interpolate_model(fit_results: pd.DataFrame,
29
29
  Returns:
30
30
  - DataFrame with interpolated model parameters for the specified days
31
31
  """
32
- logger.info(f"Interpolating model parameters using {method} method")
33
32
 
34
33
  # Check if fit_results is valid
35
34
  if fit_results is None or fit_results.empty:
@@ -68,7 +67,7 @@ def interpolate_model(fit_results: pd.DataFrame,
68
67
  "Extrapolation may give unreliable results.")
69
68
 
70
69
  # Columns to interpolate
71
- param_columns = ['u', 'a', 'b', 'sigma', 'rho', 'm', 'nu', 'psi', 'p', 'c', 'nu_tilde']
70
+ param_columns = ['a', 'b', 'm', 'rho', 'sigma', 'nu', 'psi', 'p', 'c', 'nu_tilde']
72
71
 
73
72
  # Create empty DataFrame for interpolated results
74
73
  interpolated_df = pd.DataFrame(index=[f"{day}d" for day in target_days])
@@ -119,7 +118,7 @@ def interpolate_model(fit_results: pd.DataFrame,
119
118
  interpolated_df[param] = f(target_years)
120
119
 
121
120
  # Ensure consistent ordering of columns with expected structure
122
- expected_columns = ['s', 'u', 't', 'r', 'maturity_date', 'a', 'b', 'sigma', 'rho', 'm',
121
+ expected_columns = ['s', 't', 'r', 'maturity_date', 'a', 'b', 'm', 'rho', 'sigma',
123
122
  'nu', 'psi', 'p', 'c', 'nu_tilde']
124
123
 
125
124
  # Create final column order based on available columns
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: voly
3
- Version: 0.0.220
3
+ Version: 0.0.222
4
4
  Summary: Options & volatility research package
5
5
  Author-email: Manu de Cara <manu.de.cara@gmail.com>
6
6
  License: MIT
@@ -1,5 +1,5 @@
1
1
  voly/__init__.py,sha256=8xyDk7rFCn_MOD5hxuv5cxxKZvBVRiSIM7TgaMPpwpw,211
2
- voly/client.py,sha256=BHcTRfoM2VLoBsMLmp0QGt4dlgrjZ0CdC0L4PeefUI0,14479
2
+ voly/client.py,sha256=F5jRdmEfxoE2RGHryCntRFrKLlyS7W974jEtEcBz8Co,14410
3
3
  voly/exceptions.py,sha256=PBsbn1vNMvKcCJwwJ4lBO6glD85jo1h2qiEmD7ArAjs,92
4
4
  voly/formulas.py,sha256=Jn9hBoIx6PGv9k4lm8PeGM4lxFJkrLau8LpnXatdQPM,11176
5
5
  voly/models.py,sha256=CGJQr13Uie7iwtx2hjViN9lMXeRN_uOqzp4u8NPaTlA,9282
@@ -7,14 +7,14 @@ voly/core/__init__.py,sha256=bu6fS2I1Pj9fPPnl-zY3L7NqrZSY5Zy6NY2uMUvdhKs,183
7
7
  voly/core/charts.py,sha256=2S-BfCo30aj1_xlNLqF-za5rQWxF_mWKIdtdOe5bgbw,12735
8
8
  voly/core/data.py,sha256=SNF87C7-r-1IbKwf7rAhXkJ6X305yo7fCDJDdkwz3NM,14103
9
9
  voly/core/fit.py,sha256=bVyx7qMgFFpTUjgoCUs58ppmeNN2CORnqPKbGUpV9xw,14081
10
- voly/core/hd.py,sha256=UFAyLncNUHivpPAcko6IK1bC55mudVtdlRFfXp63HXE,14771
11
- voly/core/interpolate.py,sha256=JkK172-FXyhesW3hY4pEeuJWG3Bugq7QZXbeKoRpLuo,5305
10
+ voly/core/hd.py,sha256=gyGfrqMH52Y1umqCDcyO3yD35fh_JRM2ZfAeqb_--vU,13381
11
+ voly/core/interpolate.py,sha256=-cNChFpuLnCSMOmfW2ldXxePgQXi-pxcjJvF2yImD1w,5222
12
12
  voly/core/rnd.py,sha256=wiZ5OIjPDf1Th5_sQ9CZG5JgAo3EL8f63T_Rj1_VP-0,13214
13
13
  voly/utils/__init__.py,sha256=E05mWatyC-PDOsCxQV1p5Xi1IgpOomxrNURyCx_gB-w,200
14
14
  voly/utils/density.py,sha256=ONpRli-IaJDgOZ2sb27HHFc9_tkkGSATKl94JODd86A,5879
15
15
  voly/utils/logger.py,sha256=4-_2bVJmq17Q0d7Rd2mPg1AeR8gxv6EPvcmBDMFWcSM,1744
16
- voly-0.0.220.dist-info/licenses/LICENSE,sha256=wcHIVbE12jfcBOai_wqBKY6xvNQU5E909xL1zZNq_2Q,1065
17
- voly-0.0.220.dist-info/METADATA,sha256=rgHwubZX4cFKLWm7Cw2iPFDA6ClzR0GKx02d4HES6vY,4115
18
- voly-0.0.220.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
19
- voly-0.0.220.dist-info/top_level.txt,sha256=ZfLw2sSxF-LrKAkgGjOmeTcw6_gD-30zvtdEY5W4B7c,5
20
- voly-0.0.220.dist-info/RECORD,,
16
+ voly-0.0.222.dist-info/licenses/LICENSE,sha256=wcHIVbE12jfcBOai_wqBKY6xvNQU5E909xL1zZNq_2Q,1065
17
+ voly-0.0.222.dist-info/METADATA,sha256=38E1kmnHs1z5ivWU4Bf7lnce5v5Bd2hFb9RDOtJMNc8,4115
18
+ voly-0.0.222.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
19
+ voly-0.0.222.dist-info/top_level.txt,sha256=ZfLw2sSxF-LrKAkgGjOmeTcw6_gD-30zvtdEY5W4B7c,5
20
+ voly-0.0.222.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (79.0.0)
2
+ Generator: setuptools (80.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5