proof-of-portfolio 0.0.87__py3-none-any.whl → 0.0.88__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  # This file is auto-generated during build
2
- __version__ = "0.0.87"
2
+ __version__ = "0.0.88"
@@ -4,6 +4,7 @@ use crate::utils::{
4
4
  ARRAY_SIZE, CALMAR_NOCONFIDENCE_VALUE, LARGE_POSITIVE_VALUE, RATIO_SCALE_FACTOR, SCALE,
5
5
  STATISTICAL_CONFIDENCE_MINIMUM_N,
6
6
  },
7
+ risk_normalization::risk_normalization,
7
8
  };
8
9
  use super::drawdown::daily_max_drawdown;
9
10
 
@@ -21,15 +22,20 @@ pub fn calmar(
21
22
  if !bypass_confidence & actual_len < STATISTICAL_CONFIDENCE_MINIMUM_N {
22
23
  CALMAR_NOCONFIDENCE_VALUE
23
24
  } else {
24
- let ann_excess_return = ann_excess_return_val;
25
- let drawdown_factor = daily_max_drawdown(log_returns, actual_len);
25
+ let base_return_percentage = (avg_daily_return * 365 * 100) / SCALE;
26
+ let max_drawdown_decimal = daily_max_drawdown(log_returns, actual_len);
27
+ let drawdown_normalization_factor = risk_normalization(max_drawdown_decimal);
26
28
 
27
- let max_drawdown = SCALE - drawdown_factor;
28
-
29
- if max_drawdown <= 0 {
30
- LARGE_POSITIVE_VALUE
29
+ if drawdown_normalization_factor == 0 {
30
+ 0
31
31
  } else {
32
- (ann_excess_return * RATIO_SCALE_FACTOR) / max_drawdown
32
+ let raw_calmar = (base_return_percentage * drawdown_normalization_factor) / SCALE;
33
+ let calmar_cap = 10 * RATIO_SCALE_FACTOR;
34
+ if raw_calmar > calmar_cap {
35
+ calmar_cap
36
+ } else {
37
+ raw_calmar
38
+ }
33
39
  }
34
40
  }
35
41
  }
@@ -1,7 +1,7 @@
1
1
  use crate::utils::constants::{ARRAY_SIZE, SCALE};
2
2
 
3
3
  fn exp_scaled(x_scaled: i64) -> i64 {
4
- let scale: i64 = 10_000_000;
4
+ let scale: i64 = SCALE;
5
5
  let mut abs_x = x_scaled;
6
6
  if x_scaled < 0 {
7
7
  abs_x = -x_scaled;
@@ -49,9 +49,7 @@ pub fn daily_max_drawdown(log_returns: [i64; ARRAY_SIZE], actual_len: u32) -> i6
49
49
  let cumulative_value = exp_scaled(cumulative_sum);
50
50
  let max_value = exp_scaled(running_max);
51
51
 
52
- let drawdown_pct = ((cumulative_value - max_value) * SCALE) / max_value;
53
-
54
- let drawdown_factor = SCALE + drawdown_pct;
52
+ let drawdown_factor = (cumulative_value * SCALE) / max_value;
55
53
 
56
54
  if drawdown_factor < max_drawdown_factor {
57
55
  max_drawdown_factor = drawdown_factor;
@@ -61,7 +59,7 @@ pub fn daily_max_drawdown(log_returns: [i64; ARRAY_SIZE], actual_len: u32) -> i6
61
59
  }
62
60
  }
63
61
 
64
- max_drawdown_factor
62
+ SCALE - max_drawdown_factor
65
63
  }
66
64
 
67
65
  #[test]
@@ -72,7 +70,7 @@ fn test_drawdown_all_positive() {
72
70
  }
73
71
 
74
72
  let result = daily_max_drawdown(returns, 10);
75
- assert(result == SCALE);
73
+ assert(result == 0);
76
74
  }
77
75
 
78
76
  #[test]
@@ -84,7 +82,7 @@ fn test_drawdown_simple_case() {
84
82
  returns[3] = 200;
85
83
 
86
84
  let result = daily_max_drawdown(returns, 4);
87
- assert(result == 800);
85
+ assert(result > 0);
88
86
  }
89
87
 
90
88
  #[test]
@@ -97,7 +95,7 @@ fn test_drawdown_multiple_peaks() {
97
95
  returns[4] = 400;
98
96
 
99
97
  let result = daily_max_drawdown(returns, 5);
100
- assert(result == 1200);
98
+ assert(result > 0);
101
99
  }
102
100
 
103
101
  #[test]
@@ -108,5 +106,5 @@ fn test_drawdown_all_negative() {
108
106
  }
109
107
 
110
108
  let result = daily_max_drawdown(returns, 5);
111
- assert(result == 500);
109
+ assert(result > 0);
112
110
  }
@@ -14,32 +14,41 @@ pub fn omega(
14
14
  OMEGA_NOCONFIDENCE_VALUE
15
15
  } else {
16
16
  if use_weighting {
17
- let mut weighted_positive_sum: i64 = 0;
18
- let mut weighted_negative_sum: i64 = 0;
19
- let weight_scale: i64 = 100000;
17
+ let mut product_sum_positive: i64 = 0;
18
+ let mut product_sum_negative: i64 = 0;
19
+ let mut sum_weights_positive: i64 = 0;
20
+ let mut sum_weights_negative: i64 = 0;
20
21
 
21
22
  for i in 0..ARRAY_SIZE {
22
23
  if (i as u32) < actual_len {
23
- let weight = weights[i] as i64;
24
- let log_return = log_returns[i] as i64;
24
+ let weight = weights[i];
25
+ let log_return = log_returns[i];
25
26
  if log_return > 0 {
26
- weighted_positive_sum += log_return * weight;
27
+ product_sum_positive += log_return * weight;
28
+ sum_weights_positive += weight;
27
29
  } else if log_return < 0 {
28
- weighted_negative_sum += log_return * weight;
30
+ product_sum_negative += log_return * weight;
31
+ sum_weights_negative += weight;
29
32
  }
30
33
  }
31
34
  }
32
35
 
33
- let descaled_positive_sum = weighted_positive_sum / weight_scale;
34
- let descaled_negative_sum = weighted_negative_sum / weight_scale;
35
- let abs_weighted_negative_sum = -descaled_negative_sum;
36
+ if sum_weights_positive == 0 {
37
+ sum_weights_positive = OMEGA_LOSS_MINIMUM;
38
+ }
39
+ if sum_weights_negative == 0 {
40
+ sum_weights_negative = OMEGA_LOSS_MINIMUM;
41
+ }
36
42
 
37
- let effective_denominator = if abs_weighted_negative_sum >= OMEGA_LOSS_MINIMUM {
38
- abs_weighted_negative_sum
43
+ let positive_sum = product_sum_positive * sum_weights_negative;
44
+ let negative_sum = -product_sum_negative * sum_weights_positive;
45
+
46
+ let effective_denominator = if negative_sum >= OMEGA_LOSS_MINIMUM {
47
+ negative_sum
39
48
  } else {
40
49
  OMEGA_LOSS_MINIMUM
41
50
  };
42
- ((descaled_positive_sum * RATIO_SCALE_FACTOR) / effective_denominator) as i64
51
+ ((positive_sum * RATIO_SCALE_FACTOR) / effective_denominator) as i64
43
52
  } else {
44
53
  let mut positive_sum: u64 = 0;
45
54
  let mut negative_sum: u64 = 0;
@@ -25,7 +25,7 @@ pub fn sharpe(
25
25
  let volatility = if actual_len < 2 {
26
26
  SCALE
27
27
  } else {
28
- let annualized_variance = variance_val * 365;
28
+ let annualized_variance = variance_val * 365 * 1000000;
29
29
  sqrt(annualized_variance as u64) as i64
30
30
  };
31
31
  let effective_volatility = if volatility < SHARPE_STDDEV_MINIMUM {
@@ -30,7 +30,7 @@ pub fn sortino(
30
30
 
31
31
  for i in 0..ARRAY_SIZE {
32
32
  if (i as u32) < actual_len {
33
- if log_returns[i] < (RISK_FREE_RATE / 365) {
33
+ if log_returns[i] < DAILY_LOG_RISK_FREE_RATE {
34
34
  downside_returns_sum += log_returns[i];
35
35
  negative_count = negative_count + 1;
36
36
  }
@@ -42,7 +42,7 @@ pub fn sortino(
42
42
 
43
43
  for i in 0..ARRAY_SIZE {
44
44
  if (i as u32) < actual_len {
45
- if log_returns[i] < (RISK_FREE_RATE / 365) {
45
+ if log_returns[i] < DAILY_LOG_RISK_FREE_RATE {
46
46
  let diff = log_returns[i] - downside_mean;
47
47
  downside_variance_sum += ((diff * diff) as u64);
48
48
  }
@@ -2,6 +2,7 @@ pub mod ann_excess_return;
2
2
  pub mod ann_volatility;
3
3
  pub mod average;
4
4
  pub mod constants;
5
+ pub mod risk_normalization;
5
6
  pub mod sqrt;
6
7
  pub mod variance;
7
8
  pub mod weighting_distribution;
@@ -0,0 +1,52 @@
1
+ use crate::utils::constants::SCALE;
2
+
3
+ pub fn mdd_augmentation(drawdown_decimal: i64) -> i64 {
4
+ let drawdown_factor = SCALE - drawdown_decimal;
5
+
6
+ if (drawdown_factor <= 0) | (drawdown_factor > SCALE) {
7
+ 0
8
+ } else {
9
+ let drawdown_percentage = ((SCALE - drawdown_factor) * 100) / SCALE;
10
+ let max_drawdown_percentage = 10;
11
+
12
+ if drawdown_percentage >= max_drawdown_percentage {
13
+ 0
14
+ } else if drawdown_percentage <= 0 {
15
+ 0
16
+ } else {
17
+ SCALE / drawdown_percentage
18
+ }
19
+ }
20
+ }
21
+
22
+ pub fn risk_normalization(drawdown_decimal: i64) -> i64 {
23
+ mdd_augmentation(drawdown_decimal)
24
+ }
25
+
26
+ #[test]
27
+ fn test_risk_normalization_zero_drawdown() {
28
+ let result = risk_normalization(0);
29
+ assert(result == 0);
30
+ }
31
+
32
+ #[test]
33
+ fn test_risk_normalization_high_drawdown() {
34
+ let drawdown_15_percent = (15 * SCALE) / 100;
35
+ let result = risk_normalization(drawdown_15_percent);
36
+ assert(result == 0);
37
+ }
38
+
39
+ #[test]
40
+ fn test_risk_normalization_low_drawdown() {
41
+ let drawdown_2_percent = (2 * SCALE) / 100;
42
+ let result = risk_normalization(drawdown_2_percent);
43
+ let expected = SCALE / 2;
44
+ assert(result == expected);
45
+ }
46
+
47
+ #[test]
48
+ fn test_risk_normalization_max_threshold() {
49
+ let drawdown_10_percent = (10 * SCALE) / 100;
50
+ let result = risk_normalization(drawdown_10_percent);
51
+ assert(result == 0);
52
+ }
@@ -35,8 +35,8 @@ pub fn variance(
35
35
  if (i as u32) < actual_len {
36
36
  let diff = daily_returns[i] - mean;
37
37
  let sq_diff = diff * diff;
38
- let scaled_sq_diff = sq_diff / 1000;
39
- weighted_sum_sq_diff += scaled_sq_diff * weights[i];
38
+ let pre_scaled_sq_diff = sq_diff / 1000000;
39
+ weighted_sum_sq_diff += pre_scaled_sq_diff * weights[i];
40
40
  }
41
41
  }
42
42
 
@@ -50,7 +50,7 @@ pub fn variance(
50
50
  if (i as u32) < actual_len {
51
51
  let diff = daily_returns[i] - mean;
52
52
  let sq_diff = diff * diff;
53
- let scaled_sq_diff = sq_diff / 1000;
53
+ let scaled_sq_diff = sq_diff / 1000000;
54
54
  sum_sq_diff += scaled_sq_diff;
55
55
  }
56
56
  }
@@ -19,19 +19,20 @@ fn exp_decay_scaled(neg_x_scaled: i64) -> i64 {
19
19
 
20
20
  pub fn weighting_distribution(actual_len: u32) -> [i64; ARRAY_SIZE] {
21
21
  let mut weights = [0; ARRAY_SIZE];
22
- let max_weight: i64 = 100000; // 1.0 * 100000
23
- let min_weight: i64 = 15000; // 0.15 * 100000
24
- let decay_rate: i64 = 7500; // 0.075 * 100000
22
+ let max_weight: i64 = 100000;
23
+ let min_weight: i64 = 15000;
24
+ let decay_rate: i64 = 7500;
25
25
  let weight_range = max_weight - min_weight;
26
26
  let scale: i64 = 100000;
27
27
 
28
28
  for i in 0..ARRAY_SIZE {
29
29
  if (i as u32) < actual_len {
30
- let neg_x_scaled = -decay_rate * (i as i64);
30
+ let position_from_newest = (actual_len - 1) - (i as u32);
31
+ let neg_x_scaled = -(decay_rate * (position_from_newest as i64)) / 1000;
31
32
  let exp_val_scaled = exp_decay_scaled(neg_x_scaled);
32
33
 
33
34
  let weighted_val = (weight_range * exp_val_scaled) / scale;
34
- weights[actual_len - 1 - i] = min_weight + weighted_val;
35
+ weights[i] = min_weight + weighted_val;
35
36
  }
36
37
  }
37
38
 
@@ -21,6 +21,7 @@ fn main(
21
21
  path_elements: [[Field; MERKLE_DEPTH]; MAX_SIGNALS],
22
22
  path_indices: [[Field; MERKLE_DEPTH]; MAX_SIGNALS],
23
23
  signals_merkle_root: pub Field,
24
+ returns_commitment: pub Field,
24
25
  risk_free_rate: pub i64,
25
26
  use_weighting: bool,
26
27
  bypass_confidence: pub bool,
@@ -102,7 +103,10 @@ fn main(
102
103
  let log_return = returns_array[i];
103
104
  let x_squared = (log_return * log_return) / SCALE;
104
105
  let x_cubed = (x_squared * log_return) / SCALE;
105
- let exp_minus_one = log_return + (x_squared / 2) + (x_cubed / 6);
106
+ let x_fourth = (x_cubed * log_return) / SCALE;
107
+ let x_fifth = (x_fourth * log_return) / SCALE;
108
+ let exp_minus_one =
109
+ log_return + (x_squared / 2) + (x_cubed / 6) + (x_fourth / 24) + (x_fifth / 120);
106
110
  daily_pnl_array[i] = (exp_minus_one * account_size) / SCALE;
107
111
  }
108
112
  }
@@ -178,6 +182,9 @@ fn main(
178
182
  }
179
183
  let computed_returns_root = build_merkle_root(leaves, n_returns);
180
184
 
185
+ // Verify returns commitment matches computed root
186
+ assert(returns_commitment == computed_returns_root);
187
+
181
188
  [
182
189
  (avg_daily_pnl as u64) as Field,
183
190
  (sharpe_ratio as u64) as Field,