deltafq 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of deltafq might be problematic. Click here for more details.

Files changed (60) hide show
  1. deltafq/__init__.py +30 -31
  2. deltafq/backtest/__init__.py +17 -7
  3. deltafq/backtest/engine.py +99 -52
  4. deltafq/backtest/metrics.py +113 -0
  5. deltafq/backtest/performance.py +81 -0
  6. deltafq/backtest/reporter.py +91 -0
  7. deltafq/core/__init__.py +19 -0
  8. deltafq/core/base.py +37 -0
  9. deltafq/core/config.py +63 -0
  10. deltafq/core/exceptions.py +35 -0
  11. deltafq/core/logger.py +46 -0
  12. deltafq/data/__init__.py +17 -7
  13. deltafq/data/cleaner.py +41 -0
  14. deltafq/data/fetcher.py +52 -0
  15. deltafq/data/storage.py +56 -0
  16. deltafq/data/validator.py +52 -0
  17. deltafq/indicators/__init__.py +17 -8
  18. deltafq/indicators/momentum.py +56 -23
  19. deltafq/indicators/technical.py +59 -0
  20. deltafq/indicators/trend.py +129 -61
  21. deltafq/indicators/volatility.py +67 -27
  22. deltafq/live/__init__.py +17 -0
  23. deltafq/live/connection.py +235 -0
  24. deltafq/live/data_feed.py +159 -0
  25. deltafq/live/monitoring.py +192 -0
  26. deltafq/live/risk_control.py +193 -0
  27. deltafq/strategy/__init__.py +17 -6
  28. deltafq/strategy/base_strategy.py +53 -0
  29. deltafq/strategy/portfolio.py +82 -0
  30. deltafq/strategy/risk_manager.py +64 -0
  31. deltafq/strategy/signal_generator.py +52 -0
  32. deltafq/trading/__init__.py +19 -0
  33. deltafq/trading/broker.py +119 -0
  34. deltafq/trading/execution.py +176 -0
  35. deltafq/trading/order_manager.py +111 -0
  36. deltafq/trading/position_manager.py +157 -0
  37. deltafq/trading/simulator.py +150 -0
  38. deltafq-0.1.2.dist-info/METADATA +110 -0
  39. deltafq-0.1.2.dist-info/RECORD +43 -0
  40. deltafq-0.1.2.dist-info/entry_points.txt +2 -0
  41. {deltafq-0.1.1.dist-info → deltafq-0.1.2.dist-info}/licenses/LICENSE +21 -22
  42. deltafq/backtest/result.py +0 -45
  43. deltafq/data/base.py +0 -30
  44. deltafq/data/loader.py +0 -63
  45. deltafq/optimization/__init__.py +0 -6
  46. deltafq/optimization/grid_search.py +0 -41
  47. deltafq/performance/__init__.py +0 -6
  48. deltafq/performance/metrics.py +0 -37
  49. deltafq/risk/__init__.py +0 -7
  50. deltafq/risk/metrics.py +0 -33
  51. deltafq/risk/position.py +0 -39
  52. deltafq/strategy/base.py +0 -44
  53. deltafq/trade/__init__.py +0 -6
  54. deltafq/trade/broker.py +0 -40
  55. deltafq/utils/__init__.py +0 -6
  56. deltafq/utils/time.py +0 -32
  57. deltafq-0.1.1.dist-info/METADATA +0 -202
  58. deltafq-0.1.1.dist-info/RECORD +0 -29
  59. {deltafq-0.1.1.dist-info → deltafq-0.1.2.dist-info}/WHEEL +0 -0
  60. {deltafq-0.1.1.dist-info → deltafq-0.1.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,35 @@
1
+ """
2
+ Custom exceptions for DeltaFQ.
3
+ """
4
+
5
+
6
+ class DeltaFQError(Exception):
7
+ """Base exception for DeltaFQ."""
8
+ pass
9
+
10
+
11
+ class DataError(DeltaFQError):
12
+ """Exception raised for data-related errors."""
13
+ pass
14
+
15
+
16
+ class TradingError(DeltaFQError):
17
+ """Exception raised for trading-related errors."""
18
+ pass
19
+
20
+
21
+ class BacktestError(DeltaFQError):
22
+ """Exception raised for backtesting errors."""
23
+ pass
24
+
25
+
26
+ class StrategyError(DeltaFQError):
27
+ """Exception raised for strategy-related errors."""
28
+ pass
29
+
30
+
31
+ class IndicatorError(DeltaFQError):
32
+ """Exception raised for indicator calculation errors."""
33
+ pass
34
+
35
+
deltafq/core/logger.py ADDED
@@ -0,0 +1,46 @@
1
+ """
2
+ Logging system for DeltaFQ.
3
+ """
4
+
5
+ import logging
6
+ import sys
7
+ from typing import Optional
8
+
9
+
10
+ class Logger:
11
+ """Logger for DeltaFQ components."""
12
+
13
+ def __init__(self, name: str = "deltafq", level: str = "INFO"):
14
+ """Initialize logger."""
15
+ self.logger = logging.getLogger(name)
16
+ self.logger.setLevel(getattr(logging, level.upper()))
17
+
18
+ if not self.logger.handlers:
19
+ handler = logging.StreamHandler(sys.stdout)
20
+ formatter = logging.Formatter(
21
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
22
+ )
23
+ handler.setFormatter(formatter)
24
+ self.logger.addHandler(handler)
25
+
26
+ def debug(self, message: str):
27
+ """Log debug message."""
28
+ self.logger.debug(message)
29
+
30
+ def info(self, message: str):
31
+ """Log info message."""
32
+ self.logger.info(message)
33
+
34
+ def warning(self, message: str):
35
+ """Log warning message."""
36
+ self.logger.warning(message)
37
+
38
+ def error(self, message: str):
39
+ """Log error message."""
40
+ self.logger.error(message)
41
+
42
+ def critical(self, message: str):
43
+ """Log critical message."""
44
+ self.logger.critical(message)
45
+
46
+
deltafq/data/__init__.py CHANGED
@@ -1,7 +1,17 @@
1
- """数据获取和管理模块"""
2
-
3
- from deltafq.data.base import DataSource
4
- from deltafq.data.loader import get_stock_daily, get_stock_minute
5
-
6
- __all__ = ["DataSource", "get_stock_daily", "get_stock_minute"]
7
-
1
+ """
2
+ Data management module for DeltaFQ.
3
+ """
4
+
5
+ from .fetcher import DataFetcher
6
+ from .cleaner import DataCleaner
7
+ from .validator import DataValidator
8
+ from .storage import DataStorage
9
+
10
+ __all__ = [
11
+ "DataFetcher",
12
+ "DataCleaner",
13
+ "DataValidator",
14
+ "DataStorage"
15
+ ]
16
+
17
+
@@ -0,0 +1,41 @@
1
+ """
2
+ Data cleaning utilities for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ from typing import Optional
7
+ from ..core.base import BaseComponent
8
+
9
+
10
+ class DataCleaner(BaseComponent):
11
+ """Data cleaning utilities."""
12
+
13
+ def initialize(self) -> bool:
14
+ """Initialize the data cleaner."""
15
+ self.logger.info("Initializing data cleaner")
16
+ return True
17
+
18
+ def clean_price_data(self, data: pd.DataFrame) -> pd.DataFrame:
19
+ """Clean price data by removing invalid values."""
20
+ # Remove rows with NaN values
21
+ cleaned_data = data.dropna()
22
+
23
+ # Remove rows with zero or negative prices
24
+ price_columns = ['open', 'high', 'low', 'close']
25
+ for col in price_columns:
26
+ if col in cleaned_data.columns:
27
+ cleaned_data = cleaned_data[cleaned_data[col] > 0]
28
+
29
+ self.logger.info(f"Cleaned data: {len(data)} -> {len(cleaned_data)} rows")
30
+ return cleaned_data
31
+
32
+ def fill_missing_data(self, data: pd.DataFrame, method: str = "forward") -> pd.DataFrame:
33
+ """Fill missing data using specified method."""
34
+ if method == "forward":
35
+ return data.fillna(method='ffill')
36
+ elif method == "backward":
37
+ return data.fillna(method='bfill')
38
+ else:
39
+ return data.fillna(0)
40
+
41
+
@@ -0,0 +1,52 @@
1
+ """
2
+ Data fetching interfaces for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ from typing import List, Optional
7
+ from ..core.base import BaseComponent
8
+ from ..core.exceptions import DataError
9
+
10
+
11
+ class DataFetcher(BaseComponent):
12
+ """Data fetcher for various sources."""
13
+
14
+ def __init__(self, source: str = "yahoo", **kwargs):
15
+ """Initialize data fetcher."""
16
+ super().__init__(**kwargs)
17
+ self.source = source
18
+
19
+ def initialize(self) -> bool:
20
+ """Initialize the data fetcher."""
21
+ self.logger.info(f"Initializing data fetcher with source: {self.source}")
22
+ return True
23
+
24
+ def fetch_stock_data(self, symbol: str, start_date: str, end_date: str = None) -> pd.DataFrame:
25
+ """Fetch stock data for given symbol."""
26
+ try:
27
+ # Placeholder implementation
28
+ self.logger.info(f"Fetching data for {symbol} from {start_date} to {end_date}")
29
+
30
+ # This would be replaced with actual data fetching logic
31
+ dates = pd.date_range(start=start_date, end=end_date or "2024-01-01", freq='D')
32
+ data = pd.DataFrame({
33
+ 'date': dates,
34
+ 'open': 100.0,
35
+ 'high': 105.0,
36
+ 'low': 95.0,
37
+ 'close': 102.0,
38
+ 'volume': 1000000
39
+ })
40
+
41
+ return data
42
+ except Exception as e:
43
+ raise DataError(f"Failed to fetch data for {symbol}: {str(e)}")
44
+
45
+ def fetch_multiple_symbols(self, symbols: List[str], start_date: str, end_date: str = None) -> dict:
46
+ """Fetch data for multiple symbols."""
47
+ data_dict = {}
48
+ for symbol in symbols:
49
+ data_dict[symbol] = self.fetch_stock_data(symbol, start_date, end_date)
50
+ return data_dict
51
+
52
+
@@ -0,0 +1,56 @@
1
+ """
2
+ Data storage management for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ import os
7
+ from pathlib import Path
8
+ from typing import Optional
9
+ from ..core.base import BaseComponent
10
+
11
+
12
+ class DataStorage(BaseComponent):
13
+ """Data storage manager."""
14
+
15
+ def __init__(self, storage_path: str = "./data_cache", **kwargs):
16
+ """Initialize data storage."""
17
+ super().__init__(**kwargs)
18
+ self.storage_path = Path(storage_path)
19
+ self.storage_path.mkdir(exist_ok=True)
20
+
21
+ def initialize(self) -> bool:
22
+ """Initialize the data storage."""
23
+ self.logger.info(f"Initializing data storage at: {self.storage_path}")
24
+ return True
25
+
26
+ def save_data(self, data: pd.DataFrame, filename: str) -> bool:
27
+ """Save data to storage."""
28
+ try:
29
+ filepath = self.storage_path / filename
30
+ data.to_csv(filepath, index=False)
31
+ self.logger.info(f"Saved data to: {filepath}")
32
+ return True
33
+ except Exception as e:
34
+ self.logger.error(f"Failed to save data: {str(e)}")
35
+ return False
36
+
37
+ def load_data(self, filename: str) -> Optional[pd.DataFrame]:
38
+ """Load data from storage."""
39
+ try:
40
+ filepath = self.storage_path / filename
41
+ if filepath.exists():
42
+ data = pd.read_csv(filepath)
43
+ self.logger.info(f"Loaded data from: {filepath}")
44
+ return data
45
+ else:
46
+ self.logger.warning(f"File not found: {filepath}")
47
+ return None
48
+ except Exception as e:
49
+ self.logger.error(f"Failed to load data: {str(e)}")
50
+ return None
51
+
52
+ def list_files(self) -> list:
53
+ """List all files in storage."""
54
+ return [f.name for f in self.storage_path.iterdir() if f.is_file()]
55
+
56
+
@@ -0,0 +1,52 @@
1
+ """
2
+ Data validation for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ from typing import List, Optional
7
+ from ..core.base import BaseComponent
8
+ from ..core.exceptions import DataError
9
+
10
+
11
+ class DataValidator(BaseComponent):
12
+ """Data validator for ensuring data quality."""
13
+
14
+ def initialize(self) -> bool:
15
+ """Initialize the data validator."""
16
+ self.logger.info("Initializing data validator")
17
+ return True
18
+
19
+ def validate_price_data(self, data: pd.DataFrame) -> bool:
20
+ """Validate price data structure and values."""
21
+ required_columns = ['open', 'high', 'low', 'close']
22
+
23
+ # Check required columns
24
+ missing_columns = [col for col in required_columns if col not in data.columns]
25
+ if missing_columns:
26
+ raise DataError(f"Missing required columns: {missing_columns}")
27
+
28
+ # Check for negative prices
29
+ for col in required_columns:
30
+ if (data[col] <= 0).any():
31
+ raise DataError(f"Found non-positive values in {col} column")
32
+
33
+ # Check high >= low
34
+ if (data['high'] < data['low']).any():
35
+ raise DataError("Found high < low values")
36
+
37
+ self.logger.info("Price data validation passed")
38
+ return True
39
+
40
+ def validate_data_continuity(self, data: pd.DataFrame, date_column: str = 'date') -> bool:
41
+ """Validate data continuity."""
42
+ if date_column not in data.columns:
43
+ raise DataError(f"Date column '{date_column}' not found")
44
+
45
+ # Check for duplicate dates
46
+ if data[date_column].duplicated().any():
47
+ raise DataError("Found duplicate dates in data")
48
+
49
+ self.logger.info("Data continuity validation passed")
50
+ return True
51
+
52
+
@@ -1,8 +1,17 @@
1
- """技术指标计算模块"""
2
-
3
- from deltafq.indicators.trend import SMA, EMA, MACD
4
- from deltafq.indicators.momentum import RSI
5
- from deltafq.indicators.volatility import BOLL
6
-
7
- __all__ = ["SMA", "EMA", "MACD", "RSI", "BOLL"]
8
-
1
+ """
2
+ Technical indicators module for DeltaFQ.
3
+ """
4
+
5
+ from .technical import TechnicalIndicators
6
+ from .momentum import MomentumIndicators
7
+ from .trend import TrendIndicators
8
+ from .volatility import VolatilityIndicators
9
+
10
+ __all__ = [
11
+ "TechnicalIndicators",
12
+ "MomentumIndicators",
13
+ "TrendIndicators",
14
+ "VolatilityIndicators"
15
+ ]
16
+
17
+
@@ -1,23 +1,56 @@
1
- """动量类指标"""
2
-
3
- import pandas as pd
4
-
5
-
6
- def RSI(data: pd.Series, period: int = 14) -> pd.Series:
7
- """相对强弱指标
8
-
9
- Args:
10
- data: 价格序列
11
- period: 周期
12
-
13
- Returns:
14
- RSI序列
15
- """
16
- delta = data.diff()
17
- gain = (delta.where(delta > 0, 0)).rolling(window=period).mean()
18
- loss = (-delta.where(delta < 0, 0)).rolling(window=period).mean()
19
-
20
- rs = gain / loss
21
- rsi = 100 - (100 / (1 + rs))
22
- return rsi
23
-
1
+ """
2
+ Momentum indicators for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ import numpy as np
7
+ from ..core.base import BaseComponent
8
+
9
+
10
+ class MomentumIndicators(BaseComponent):
11
+ """Momentum-based technical indicators."""
12
+
13
+ def initialize(self) -> bool:
14
+ """Initialize momentum indicators."""
15
+ self.logger.info("Initializing momentum indicators")
16
+ return True
17
+
18
+ def roc(self, data: pd.Series, period: int) -> pd.Series:
19
+ """Rate of Change."""
20
+ return data.pct_change(period) * 100
21
+
22
+ def momentum(self, data: pd.Series, period: int) -> pd.Series:
23
+ """Momentum indicator."""
24
+ return data - data.shift(period)
25
+
26
+ def williams_r(self, high: pd.Series, low: pd.Series, close: pd.Series, period: int = 14) -> pd.Series:
27
+ """Williams %R."""
28
+ highest_high = high.rolling(window=period).max()
29
+ lowest_low = low.rolling(window=period).min()
30
+ return -100 * (highest_high - close) / (highest_high - lowest_low)
31
+
32
+ def stochastic(self, high: pd.Series, low: pd.Series, close: pd.Series,
33
+ k_period: int = 14, d_period: int = 3) -> pd.DataFrame:
34
+ """Stochastic Oscillator."""
35
+ lowest_low = low.rolling(window=k_period).min()
36
+ highest_high = high.rolling(window=k_period).max()
37
+
38
+ k_percent = 100 * (close - lowest_low) / (highest_high - lowest_low)
39
+ d_percent = k_percent.rolling(window=d_period).mean()
40
+
41
+ return pd.DataFrame({
42
+ 'k_percent': k_percent,
43
+ 'd_percent': d_percent
44
+ })
45
+
46
+ def cci(self, high: pd.Series, low: pd.Series, close: pd.Series, period: int = 20) -> pd.Series:
47
+ """Commodity Channel Index."""
48
+ typical_price = (high + low + close) / 3
49
+ sma_tp = typical_price.rolling(window=period).mean()
50
+ mean_deviation = typical_price.rolling(window=period).apply(
51
+ lambda x: np.mean(np.abs(x - x.mean()))
52
+ )
53
+
54
+ return (typical_price - sma_tp) / (0.015 * mean_deviation)
55
+
56
+
@@ -0,0 +1,59 @@
1
+ """
2
+ Technical indicators for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ import numpy as np
7
+ from ..core.base import BaseComponent
8
+
9
+
10
+ class TechnicalIndicators(BaseComponent):
11
+ """Basic technical indicators."""
12
+
13
+ def initialize(self) -> bool:
14
+ """Initialize technical indicators."""
15
+ self.logger.info("Initializing technical indicators")
16
+ return True
17
+
18
+ def sma(self, data: pd.Series, period: int) -> pd.Series:
19
+ """Simple Moving Average."""
20
+ return data.rolling(window=period).mean()
21
+
22
+ def ema(self, data: pd.Series, period: int) -> pd.Series:
23
+ """Exponential Moving Average."""
24
+ return data.ewm(span=period).mean()
25
+
26
+ def rsi(self, data: pd.Series, period: int = 14) -> pd.Series:
27
+ """Relative Strength Index."""
28
+ delta = data.diff()
29
+ gain = (delta.where(delta > 0, 0)).rolling(window=period).mean()
30
+ loss = (-delta.where(delta < 0, 0)).rolling(window=period).mean()
31
+ rs = gain / loss
32
+ return 100 - (100 / (1 + rs))
33
+
34
+ def macd(self, data: pd.Series, fast: int = 12, slow: int = 26, signal: int = 9) -> pd.DataFrame:
35
+ """MACD indicator."""
36
+ ema_fast = self.ema(data, fast)
37
+ ema_slow = self.ema(data, slow)
38
+ macd_line = ema_fast - ema_slow
39
+ signal_line = self.ema(macd_line, signal)
40
+ histogram = macd_line - signal_line
41
+
42
+ return pd.DataFrame({
43
+ 'macd': macd_line,
44
+ 'signal': signal_line,
45
+ 'histogram': histogram
46
+ })
47
+
48
+ def bollinger_bands(self, data: pd.Series, period: int = 20, std_dev: float = 2) -> pd.DataFrame:
49
+ """Bollinger Bands."""
50
+ sma = self.sma(data, period)
51
+ std = data.rolling(window=period).std()
52
+
53
+ return pd.DataFrame({
54
+ 'upper': sma + (std * std_dev),
55
+ 'middle': sma,
56
+ 'lower': sma - (std * std_dev)
57
+ })
58
+
59
+
@@ -1,61 +1,129 @@
1
- """趋势类指标"""
2
-
3
- import pandas as pd
4
- from typing import Union
5
-
6
-
7
- def SMA(data: Union[pd.Series, pd.DataFrame], period: int) -> pd.Series:
8
- """简单移动平均线
9
-
10
- Args:
11
- data: 价格序列
12
- period: 周期
13
-
14
- Returns:
15
- SMA序列
16
- """
17
- return data.rolling(window=period).mean()
18
-
19
-
20
- def EMA(data: Union[pd.Series, pd.DataFrame], period: int) -> pd.Series:
21
- """指数移动平均线
22
-
23
- Args:
24
- data: 价格序列
25
- period: 周期
26
-
27
- Returns:
28
- EMA序列
29
- """
30
- return data.ewm(span=period, adjust=False).mean()
31
-
32
-
33
- def MACD(
34
- data: pd.Series,
35
- fast_period: int = 12,
36
- slow_period: int = 26,
37
- signal_period: int = 9
38
- ) -> pd.DataFrame:
39
- """MACD指标
40
-
41
- Args:
42
- data: 价格序列
43
- fast_period: 快线周期
44
- slow_period: 慢线周期
45
- signal_period: 信号线周期
46
-
47
- Returns:
48
- 包含DIF、DEA、MACD的DataFrame
49
- """
50
- fast = EMA(data, fast_period)
51
- slow = EMA(data, slow_period)
52
- dif = fast - slow
53
- dea = EMA(dif, signal_period)
54
- macd = (dif - dea) * 2
55
-
56
- return pd.DataFrame({
57
- 'dif': dif,
58
- 'dea': dea,
59
- 'macd': macd
60
- })
61
-
1
+ """
2
+ Trend indicators for DeltaFQ.
3
+ """
4
+
5
+ import pandas as pd
6
+ import numpy as np
7
+ from ..core.base import BaseComponent
8
+
9
+
10
+ class TrendIndicators(BaseComponent):
11
+ """Trend-based technical indicators."""
12
+
13
+ def initialize(self) -> bool:
14
+ """Initialize trend indicators."""
15
+ self.logger.info("Initializing trend indicators")
16
+ return True
17
+
18
+ def adx(self, high: pd.Series, low: pd.Series, close: pd.Series, period: int = 14) -> pd.DataFrame:
19
+ """Average Directional Index."""
20
+ # True Range
21
+ tr1 = high - low
22
+ tr2 = abs(high - close.shift())
23
+ tr3 = abs(low - close.shift())
24
+ tr = pd.concat([tr1, tr2, tr3], axis=1).max(axis=1)
25
+
26
+ # Directional Movement
27
+ dm_plus = np.where((high.diff() > low.diff().abs()) & (high.diff() > 0), high.diff(), 0)
28
+ dm_minus = np.where((low.diff().abs() > high.diff()) & (low.diff() < 0), low.diff().abs(), 0)
29
+
30
+ dm_plus = pd.Series(dm_plus, index=high.index)
31
+ dm_minus = pd.Series(dm_minus, index=high.index)
32
+
33
+ # Smoothed values
34
+ atr = tr.rolling(window=period).mean()
35
+ di_plus = 100 * (dm_plus.rolling(window=period).mean() / atr)
36
+ di_minus = 100 * (dm_minus.rolling(window=period).mean() / atr)
37
+
38
+ # ADX
39
+ dx = 100 * abs(di_plus - di_minus) / (di_plus + di_minus)
40
+ adx = dx.rolling(window=period).mean()
41
+
42
+ return pd.DataFrame({
43
+ 'adx': adx,
44
+ 'di_plus': di_plus,
45
+ 'di_minus': di_minus
46
+ })
47
+
48
+ def parabolic_sar(self, high: pd.Series, low: pd.Series, close: pd.Series,
49
+ acceleration: float = 0.02, maximum: float = 0.2) -> pd.Series:
50
+ """Parabolic SAR."""
51
+ # Simplified implementation
52
+ psar = pd.Series(index=close.index, dtype=float)
53
+ trend = pd.Series(index=close.index, dtype=int)
54
+ af = pd.Series(index=close.index, dtype=float)
55
+ ep = pd.Series(index=close.index, dtype=float)
56
+
57
+ # Initialize
58
+ psar.iloc[0] = low.iloc[0]
59
+ trend.iloc[0] = 1
60
+ af.iloc[0] = acceleration
61
+ ep.iloc[0] = high.iloc[0]
62
+
63
+ for i in range(1, len(close)):
64
+ if trend.iloc[i-1] == 1: # Uptrend
65
+ psar.iloc[i] = psar.iloc[i-1] + af.iloc[i-1] * (ep.iloc[i-1] - psar.iloc[i-1])
66
+
67
+ if low.iloc[i] <= psar.iloc[i]:
68
+ trend.iloc[i] = -1
69
+ psar.iloc[i] = ep.iloc[i-1]
70
+ ep.iloc[i] = low.iloc[i]
71
+ af.iloc[i] = acceleration
72
+ else:
73
+ trend.iloc[i] = 1
74
+ if high.iloc[i] > ep.iloc[i-1]:
75
+ ep.iloc[i] = high.iloc[i]
76
+ af.iloc[i] = min(af.iloc[i-1] + acceleration, maximum)
77
+ else:
78
+ ep.iloc[i] = ep.iloc[i-1]
79
+ af.iloc[i] = af.iloc[i-1]
80
+ else: # Downtrend
81
+ psar.iloc[i] = psar.iloc[i-1] + af.iloc[i-1] * (ep.iloc[i-1] - psar.iloc[i-1])
82
+
83
+ if high.iloc[i] >= psar.iloc[i]:
84
+ trend.iloc[i] = 1
85
+ psar.iloc[i] = ep.iloc[i-1]
86
+ ep.iloc[i] = high.iloc[i]
87
+ af.iloc[i] = acceleration
88
+ else:
89
+ trend.iloc[i] = -1
90
+ if low.iloc[i] < ep.iloc[i-1]:
91
+ ep.iloc[i] = low.iloc[i]
92
+ af.iloc[i] = min(af.iloc[i-1] + acceleration, maximum)
93
+ else:
94
+ ep.iloc[i] = ep.iloc[i-1]
95
+ af.iloc[i] = af.iloc[i-1]
96
+
97
+ return psar
98
+
99
+ def ichimoku(self, high: pd.Series, low: pd.Series, close: pd.Series,
100
+ conversion_period: int = 9, base_period: int = 26,
101
+ leading_span_b_period: int = 52, displacement: int = 26) -> pd.DataFrame:
102
+ """Ichimoku Cloud."""
103
+ # Conversion Line (Tenkan-sen)
104
+ conversion_line = (high.rolling(window=conversion_period).max() +
105
+ low.rolling(window=conversion_period).min()) / 2
106
+
107
+ # Base Line (Kijun-sen)
108
+ base_line = (high.rolling(window=base_period).max() +
109
+ low.rolling(window=base_period).min()) / 2
110
+
111
+ # Leading Span A (Senkou Span A)
112
+ leading_span_a = ((conversion_line + base_line) / 2).shift(displacement)
113
+
114
+ # Leading Span B (Senkou Span B)
115
+ leading_span_b = ((high.rolling(window=leading_span_b_period).max() +
116
+ low.rolling(window=leading_span_b_period).min()) / 2).shift(displacement)
117
+
118
+ # Lagging Span (Chikou Span)
119
+ lagging_span = close.shift(-displacement)
120
+
121
+ return pd.DataFrame({
122
+ 'conversion_line': conversion_line,
123
+ 'base_line': base_line,
124
+ 'leading_span_a': leading_span_a,
125
+ 'leading_span_b': leading_span_b,
126
+ 'lagging_span': lagging_span
127
+ })
128
+
129
+