quantvn 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quantvn might be problematic. Click here for more details.

quantvn/__init__.py ADDED
@@ -0,0 +1,2 @@
1
+ from quantvn import *
2
+ from .vn.data.utils import client
@@ -0,0 +1 @@
1
+ from quantvn.crypto import data
@@ -0,0 +1,31 @@
1
+
2
+ from __future__ import annotations
3
+
4
+ # Public constants/helpers
5
+ from .const import * # noqa: F401,F403
6
+
7
+ # Core helpers (chỉ giữ TA & request)
8
+ from .core import (
9
+ add_all_ta_features,
10
+ send_request,
11
+ )
12
+
13
+ # Crypto (đọc S3)
14
+ from .utils import get_crypto
15
+
16
+ # Backward-compatibility: giữ get_hist của stocks nếu tồn tại (optional)
17
+ try:
18
+ from .stocks import get_hist as get_stock_hist # type: ignore
19
+ get_hist = get_stock_hist # alias mặc định
20
+ except Exception:
21
+ # Không có module stocks trong gói này — bỏ qua
22
+ pass
23
+
24
+ __all__ = [
25
+ # helpers
26
+ "send_request",
27
+ # TA features
28
+ "add_all_ta_features",
29
+ # crypto
30
+ "get_crypto",
31
+ ]
@@ -0,0 +1,26 @@
1
+
2
+ TRADING_URL = "https://trading.vietcap.com.vn/api/"
3
+ GRAPHQL_URL = "https://trading.vietcap.com.vn/data-mt/graphql"
4
+ CHART_URL = "chart/OHLCChart/gap-chart"
5
+ INTRADAY_URL = "market-watch"
6
+
7
+ INTERVAL_MAP = {
8
+ '1m':'ONE_MINUTE','5m':'ONE_MINUTE','15m':'ONE_MINUTE','30m':'ONE_MINUTE',
9
+ '1H':'ONE_HOUR','1D':'ONE_DAY','1W':'ONE_DAY','1M':'ONE_DAY'
10
+ }
11
+
12
+ OHLC_COLUMNS = ["t","o","h","l","c","v"]
13
+ OHLC_RENAME = {"t":"time","o":"open","h":"high","l":"low","c":"close","v":"volume"}
14
+
15
+ INTRADAY_MAP = {'truncTime':'time','matchPrice':'price','matchVol':'volume','matchType':'match_type','id':'id'}
16
+
17
+ PRICE_DEPTH_URL = f"{TRADING_URL}{INTRADAY_URL}/AccumulatedPriceStepVol/getSymbolData"
18
+
19
+ PRICE_INFO_MAP = {
20
+ 'ev':'ev','ticker':'symbol',
21
+ 'open_price':'open','ceiling_price':'ceiling','floor_price':'floor','reference_price':'ref_price',
22
+ 'highest_price':'high','lowest_price':'low',
23
+ 'price_change':'price_change','percent_price_change':'price_change_pct',
24
+ 'foreign_total_volume':'foreign_volume','foreign_total_room':'foreign_room','foreign_holding_room':'foreign_holding_room',
25
+ 'average_match_volume2_week':'avg_match_volume_2w',
26
+ }
@@ -0,0 +1,82 @@
1
+
2
+ import time, random, requests
3
+ import pandas as pd
4
+
5
+ DEFAULT_TIMEOUT = 25
6
+
7
+ def _ua(source="vietmarket"):
8
+ return {
9
+ "User-Agent": f"{source}/1.0 (+https://example.local)",
10
+ "Accept": "application/json, text/plain, */*",
11
+ "Origin": "https://example.local",
12
+ "Referer": "https://example.local/",
13
+ }
14
+
15
+ def send_request(url, method="GET", headers=None, params=None, payload=None,
16
+ retries=2, backoff=(0.6, 1.2), timeout=DEFAULT_TIMEOUT):
17
+ h = _ua()
18
+ if headers:
19
+ h.update(headers)
20
+ for attempt in range(retries + 1):
21
+ try:
22
+ if method.upper() == "GET":
23
+ r = requests.get(url, headers=h, params=params, timeout=timeout)
24
+ else:
25
+ r = requests.post(url, headers=h, params=params, json=payload, timeout=timeout)
26
+ r.raise_for_status()
27
+ if "application/json" in r.headers.get("Content-Type", ""):
28
+ return r.json()
29
+ return r.text
30
+ except Exception:
31
+ if attempt >= retries:
32
+ raise
33
+ time.sleep(random.uniform(*backoff))
34
+
35
+ def add_all_ta_features(
36
+ df,
37
+ open: str = "Open",
38
+ high: str = "High",
39
+ low: str = "Low",
40
+ close: str = "Close",
41
+ volume: str = "Volume",
42
+ fillna: bool = True,
43
+ ):
44
+ """
45
+ Thêm toàn bộ technical indicators từ thư viện `ta` vào DataFrame.
46
+ Giữ nguyên signature giống `ta.add_all_ta_features` để sử dụng y hệt.
47
+
48
+ Parameters
49
+ ----------
50
+ df : pandas.DataFrame
51
+ DataFrame phải có các cột giá/khối lượng tương ứng.
52
+ open,high,low,close,volume : str
53
+ Tên cột trong df.
54
+ fillna : bool
55
+ Nếu True, sẽ điền các giá trị NaN theo mặc định của thư viện `ta`.
56
+
57
+ Returns
58
+ -------
59
+ pandas.DataFrame
60
+ DataFrame đầu vào + các cột TA features.
61
+ """
62
+ try:
63
+ from ta import add_all_ta_features as _ta_add_all_ta_features
64
+ from ta.utils import dropna as _ta_dropna
65
+ except Exception as e:
66
+ raise ImportError(
67
+ "Thiếu thư viện 'ta'. Hãy cài: pip install ta"
68
+ ) from e
69
+
70
+ # Làm sạch NaN theo chuẩn của 'ta'
71
+ _df = _ta_dropna(df.copy())
72
+
73
+ # Gọi trực tiếp hàm gốc
74
+ return _ta_add_all_ta_features(
75
+ _df,
76
+ open=open,
77
+ high=high,
78
+ low=low,
79
+ close=close,
80
+ volume=volume,
81
+ fillna=fillna,
82
+ )
@@ -0,0 +1,22 @@
1
+
2
+ """
3
+ ĐÃ THAY THẾ: Module derivatives nay đã được thay bằng hàm `get_crypto` (đọc dữ liệu crypto từ S3).
4
+ Vui lòng chuyển sang: from .crypto import get_crypto
5
+ """
6
+ from __future__ import annotations
7
+
8
+ from .utils import get_crypto as _get_crypto
9
+
10
+ __all__ = ["get_hist"]
11
+
12
+ def get_hist(symbol: str, frequency: str = "1D", *, days: int | None = None, **kwargs):
13
+ """
14
+ [Deprecated] Giả lập API cũ:
15
+ - Bỏ qua tham số `frequency` của phái sinh; dùng `days` nếu truyền vào.
16
+ - Gọi sang crypto.get_crypto(...).
17
+ """
18
+ if days is None:
19
+ # Mặc định map frequency sang days đơn giản
20
+ freq_map = {"1D": 365, "1H": 30, "5M": 7}
21
+ days = freq_map.get(str(frequency).upper(), None)
22
+ return _get_crypto(symbol, days=days, **kwargs)
@@ -0,0 +1,93 @@
1
+ # quantvn/crypto/data/utils.py
2
+ from __future__ import annotations
3
+ from typing import Optional, Literal
4
+ import io
5
+ import pandas as pd
6
+ import boto3, requests
7
+ from botocore.exceptions import ClientError, NoCredentialsError
8
+ from botocore.config import Config
9
+ from botocore import UNSIGNED
10
+
11
+ __all__ = ["get_crypto"]
12
+
13
+ def get_crypto(
14
+ symbol: str,
15
+ days: Optional[int] = None,
16
+ *,
17
+ bucket: str = "qco-market",
18
+ prefix: str = "data-csv",
19
+ session: Optional[boto3.Session] = None,
20
+ auth: Literal["auto", "signed", "unsigned"] = "auto",
21
+ ) -> pd.DataFrame:
22
+ """
23
+ Đọc s3://{bucket}/{prefix}/{symbol}.csv và (tuỳ chọn) lọc N ngày.
24
+ - auth="auto": thử signed; nếu không có credential → fallback unsigned → fallback HTTP.
25
+ - auth="signed": chỉ signed (bắt buộc user có credential).
26
+ - auth="unsigned": luôn truy cập ẩn danh (chỉ dùng khi object public).
27
+ """
28
+ key = f"{prefix.rstrip('/')}/{symbol}.csv" if prefix else f"{symbol}.csv"
29
+ session = session or boto3.Session()
30
+
31
+ def _post(df: pd.DataFrame) -> pd.DataFrame:
32
+ if "datetime" not in df.columns:
33
+ raise KeyError("CSV phải có cột 'datetime'.")
34
+ df["datetime"] = pd.to_datetime(df["datetime"], errors="coerce", utc=True)
35
+ if days is not None:
36
+ cutoff = pd.Timestamp.utcnow() - pd.Timedelta(days=days)
37
+ df = df[df["datetime"] >= cutoff].copy()
38
+ df.attrs["s3_key"] = key
39
+ return df
40
+
41
+ def _read_via_boto(client) -> pd.DataFrame:
42
+ obj = client.get_object(Bucket=bucket, Key=key)
43
+ return _post(pd.read_csv(obj["Body"]))
44
+
45
+ def _http_fallback(possible_region: Optional[str] = None) -> pd.DataFrame:
46
+ # S3 virtual-hosted–style URL; requests sẽ tự theo redirect nếu region khác. :contentReference[oaicite:1]{index=1}
47
+ urls = [
48
+ f"https://{bucket}.s3.amazonaws.com/{key}",
49
+ ]
50
+ if possible_region:
51
+ urls.insert(0, f"https://{bucket}.s3.{possible_region}.amazonaws.com/{key}")
52
+ for url in urls:
53
+ r = requests.get(url, timeout=30)
54
+ if r.ok:
55
+ return _post(pd.read_csv(io.BytesIO(r.content)))
56
+ raise PermissionError(
57
+ "AccessDenied khi dùng unsigned/HTTP. Hãy bật public-read cho prefix hoặc cung cấp credential/presigned URL."
58
+ )
59
+
60
+ # 1) Chế độ ép unsigned ngay từ đầu
61
+ if auth == "unsigned":
62
+ try:
63
+ s3u = session.client("s3", config=Config(signature_version=UNSIGNED)) # :contentReference[oaicite:2]{index=2}
64
+ return _read_via_boto(s3u)
65
+ except ClientError as e:
66
+ region = e.response.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
67
+ return _http_fallback(region)
68
+
69
+ # 2) signed hoặc auto (ưu tiên signed)
70
+ try:
71
+ s3 = session.client("s3")
72
+ return _read_via_boto(s3)
73
+ except NoCredentialsError:
74
+ if auth == "signed":
75
+ raise
76
+ # auto → fallback unsigned
77
+ try:
78
+ s3u = session.client("s3", config=Config(signature_version=UNSIGNED)) # :contentReference[oaicite:3]{index=3}
79
+ return _read_via_boto(s3u)
80
+ except ClientError as e:
81
+ region = e.response.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
82
+ return _http_fallback(region)
83
+ except ClientError as e:
84
+ # Trường hợp sai region → thử lại signed với region thật; nếu vẫn fail → HTTP
85
+ region = e.response.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
86
+ if region:
87
+ try:
88
+ s3r = session.client("s3", region_name=region)
89
+ return _read_via_boto(s3r)
90
+ except Exception:
91
+ pass
92
+ # Cuối cùng: HTTP
93
+ return _http_fallback(region)
@@ -0,0 +1,3 @@
1
+ from quantvn.metrics.single_asset import TradingBacktest
2
+ from quantvn.metrics.st import StockAlgorithm
3
+ __all__ = ["TradingBacktest", "StockAlgorithm"]
File without changes
@@ -0,0 +1,419 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+
5
+ class TradingBacktest:
6
+ """
7
+ A class for backtesting derivatives trading strategies.
8
+
9
+ Parameters
10
+ ----------
11
+ df : pandas.DataFrame
12
+ Dataframe containing historical data with columns ['date', 'time', 'close', 'position'].
13
+ pnl_type : str, optional
14
+ Type of PNL calculation ('raw' or 'after_fees'), by default 'raw'.
15
+
16
+ Raises ------
17
+ ValueError
18
+ If pnl_type is not 'raw' or 'after_fees'.
19
+ NotImplementedError
20
+ If pnl_type is 'after_fees'
21
+ """
22
+
23
+ def __init__(self, df, pnl_type="raw"):
24
+ """
25
+ Initializes the BacktestDerivates class.
26
+
27
+ Parameters
28
+ ----------
29
+ df : pd.DataFrame
30
+ Data containing trade details.
31
+ pnl_type : str, optional
32
+ Type of PNL calculation ('raw' or 'after_fees'), by default "after_fees".
33
+ """
34
+ if pnl_type not in ["raw", "after_fees"]:
35
+ raise ValueError("Invalid pnl_type. Choose 'raw' or 'after_fees'.")
36
+
37
+ if pnl_type == "after_fees":
38
+ raise NotImplementedError
39
+
40
+ self.df = df.copy()
41
+ self.pnl_type = pnl_type
42
+
43
+ # Check if DataFrame already has DateTimeIndex
44
+ if not isinstance(df.index, pd.DatetimeIndex):
45
+ self.df.index = pd.to_datetime(self.df["date"] + " " + self.df["time"])
46
+
47
+ self.df = self.df.sort_index()
48
+
49
+ # Calculate PNL
50
+ self.df["pnl_raw"] = self.df["close"].diff().shift(-1) * self.df["position"]
51
+ self.df["pnl_raw"] = self.df["pnl_raw"].fillna(0)
52
+
53
+ self.daily_pnl = self.compute_daily_pnl()
54
+
55
+ def compute_cumulative_pnl(self):
56
+ """
57
+ Calculate cumulative PNL based on selected pnl_type.
58
+
59
+ Returns
60
+ -------
61
+ pandas.Series
62
+ Cumulative PNL.
63
+ """
64
+ return self.df[f"pnl_{self.pnl_type}"].cumsum()
65
+
66
+ def compute_daily_pnl(self):
67
+ """
68
+ Calculate daily PNL based on selected pnl_type.
69
+
70
+ Returns
71
+ -------
72
+ pandas.Series
73
+ Daily PNL.
74
+ """
75
+ return self.df.groupby(self.df["date"])[f"pnl_{self.pnl_type}"].sum()
76
+
77
+ def estimate_minimum_capital(self):
78
+ """
79
+ Estimate the minimum capital required to run the strategy.
80
+
81
+ Returns
82
+ -------
83
+ float
84
+ Minimum capital required.
85
+ """
86
+ self.df["cumulative_pnl"] = self.compute_cumulative_pnl()
87
+ self.df["capital_required"] = (self.df["position"].abs() * self.df["close"]) - self.df["cumulative_pnl"]
88
+
89
+ return max(self.df["capital_required"].max(), 0)
90
+
91
+ def compute_pnl_percentage(self):
92
+ """
93
+ Calculate PNL percentage relative to minimum required capital.
94
+
95
+ Returns
96
+ -------
97
+ float
98
+ PNL percentage.
99
+ """
100
+ min_capital = self.estimate_minimum_capital()
101
+ return self.daily_pnl / min_capital if min_capital != 0 else np.nan # avoid division by 0
102
+
103
+ def avg_loss(self):
104
+ """
105
+ Compute the average loss from daily PNL.
106
+
107
+ Returns
108
+ -------
109
+ float
110
+ Average loss.
111
+ """
112
+ losses = self.daily_pnl[self.daily_pnl < 0]
113
+ return losses.mean()
114
+
115
+ def avg_return(self):
116
+ """
117
+ Compute the average return from daily PNL.
118
+
119
+ Returns
120
+ -------
121
+ float
122
+ Average return.
123
+ """
124
+ return self.daily_pnl.mean()
125
+
126
+ def avg_win(self):
127
+ """
128
+ Compute the average win from daily PNL.
129
+
130
+ Returns
131
+ -------
132
+ float
133
+ Average win.
134
+ """
135
+ wins = self.daily_pnl[self.daily_pnl > 0]
136
+ return wins.mean()
137
+
138
+ def avg_loss_pct(self, initial_capital=1):
139
+ """
140
+ Compute the average loss (percentage) from daily PNL.
141
+
142
+ Returns
143
+ -------
144
+ float
145
+ Average loss.
146
+ """
147
+ losses = self.daily_pnl[self.daily_pnl < 0]
148
+ return losses.mean() / initial_capital
149
+
150
+ def avg_return_pct(self, initial_capital=1):
151
+ """
152
+ Compute the average return (percentage) from daily PNL.
153
+
154
+ Returns
155
+ -------
156
+ float
157
+ Average return.
158
+ """
159
+ return self.daily_pnl.mean() / initial_capital
160
+
161
+ def avg_win_pct(self, initial_capital=1):
162
+ """
163
+ Compute the average win (percentage) from daily PNL.
164
+
165
+ Returns
166
+ -------
167
+ float
168
+ Average win.
169
+ """
170
+ wins = self.daily_pnl[self.daily_pnl > 0]
171
+ return wins.mean() / initial_capital
172
+
173
+ def max_drawdown(self):
174
+ """
175
+ Compute the maximum drawdown.
176
+
177
+ Returns
178
+ -------
179
+ float
180
+ Maximum drawdown as a percentage of minimum capital.
181
+ """
182
+ cumulative_daily_pnl = self.daily_pnl.cumsum()
183
+ peak = cumulative_daily_pnl.cummax()
184
+ drawdown = cumulative_daily_pnl - peak
185
+ return drawdown.min() / self.estimate_minimum_capital()
186
+
187
+ def win_rate(self):
188
+ """
189
+ Compute the win rate.
190
+
191
+ Returns
192
+ -------
193
+ float
194
+ Win rate.
195
+ """
196
+ wins = (self.daily_pnl > 0).sum()
197
+ total = len(self.daily_pnl)
198
+ return wins / total if total > 0 else 0
199
+
200
+ def volatility(self):
201
+ """
202
+ Compute the standard deviation of daily PNL.
203
+
204
+ Returns
205
+ -------
206
+ float
207
+ Volatility.
208
+ """
209
+ return self.daily_pnl.std()
210
+
211
+ def sharpe(self):
212
+ """
213
+ Compute the Sharpe ratio.
214
+
215
+ Returns
216
+ -------
217
+ float
218
+ Sharpe ratio.
219
+ """
220
+ return self.avg_return() / self.volatility() * np.sqrt(252)
221
+
222
+ def sortino(self):
223
+ """
224
+ Compute the Sortino ratio.
225
+
226
+ Returns
227
+ -------
228
+ float
229
+ Sortino ratio.
230
+ """
231
+ downside_std = self.daily_pnl[self.daily_pnl < 0].std()
232
+ return self.avg_return() / downside_std * np.sqrt(252) if downside_std > 0 else np.nan
233
+
234
+ def calmar(self):
235
+ """
236
+ Compute the Calmar ratio.
237
+
238
+ Returns
239
+ -------
240
+ float
241
+ Calmar ratio.
242
+ """
243
+ return self.avg_return() / abs(self.max_drawdown()) * np.sqrt(252) if self.max_drawdown() != 0 else np.nan
244
+
245
+ def profit_factor(self):
246
+ """
247
+ Compute the profit factor.
248
+
249
+ Returns
250
+ -------
251
+ float
252
+ Profit factor.
253
+ """
254
+ total_gain = self.daily_pnl[self.daily_pnl > 0].sum()
255
+ total_loss = abs(self.daily_pnl[self.daily_pnl < 0].sum())
256
+ return total_gain / total_loss if total_loss != 0 else np.nan # avoid division by 0
257
+
258
+ def risk_of_ruin(self, initial_capital=1):
259
+ """
260
+ Compute risk of ruin.
261
+
262
+ Returns
263
+ -------
264
+ float
265
+ Risk of ruin.
266
+ """
267
+ win_rate = self.win_rate()
268
+ loss_rate = 1 - win_rate
269
+ avg_loss_pct = self.avg_loss_pct(initial_capital)
270
+ return (loss_rate / win_rate) ** (1 / avg_loss_pct) if avg_loss_pct != 0 else np.nan
271
+
272
+ def value_at_risk(self, confidence_level=0.05):
273
+ """
274
+ Compute Value at Risk (VaR).
275
+
276
+ Parameters
277
+ ----------
278
+ confidence_level : float, optional
279
+ Confidence level for VaR, by default 0.05.
280
+
281
+ Returns
282
+ -------
283
+ float
284
+ Value at Risk (VaR).
285
+ """
286
+ return self.daily_pnl.quantile(confidence_level)
287
+
288
+ def apply_tp_sl(self, df, tp_percentage, sl_percentage):
289
+ if not isinstance(df.index, pd.DatetimeIndex):
290
+ raise ValueError("The DataFrame index must be a DatetimeIndex")
291
+
292
+ prices = df["close"]
293
+ positions = df["position"]
294
+ new_positions = positions.copy()
295
+
296
+ assert set(positions.unique()) == {-1, 0, 1}, "Positions must be -1 (Short), 0 (Neutral), 1 (Long) only"
297
+
298
+ # Tracking for each holding window
299
+ entry_price = None
300
+ entry_position = 0
301
+ profit_flag = False
302
+
303
+ for i in range(0, len(prices)):
304
+ position = positions.iloc[i]
305
+
306
+ # Neutral resets position
307
+ if position == 0:
308
+ entry_position = 0
309
+ profit_flag = False
310
+ continue
311
+
312
+ # New position
313
+ if position != entry_position:
314
+ entry_price = prices.iloc[i]
315
+ entry_position = position
316
+ profit_flag = False
317
+ # Hold position
318
+ else:
319
+ current_price = prices.iloc[i]
320
+
321
+ # PnL % of Long (1) and Short (-1)
322
+ if position == 1:
323
+ pnl_percentage = (current_price - entry_price) / entry_price * 100
324
+ else:
325
+ pnl_percentage = (entry_price - current_price) / entry_price * 100
326
+
327
+ # Reach profit threshold
328
+ if pnl_percentage >= tp_percentage:
329
+ if not profit_flag:
330
+ profit_flag = True # hold for 1 more timestep
331
+ else:
332
+ new_positions.iloc[i] = 0
333
+ entry_position = 0
334
+ profit_flag = False
335
+
336
+ # Reach loss threshold
337
+ if pnl_percentage <= -sl_percentage:
338
+ new_positions.iloc[i] = 0
339
+ entry_position = 0
340
+
341
+ return new_positions
342
+
343
+ def apply_tp_sl_trailing(self, df, tp_percentage, sl_percentage):
344
+ if not isinstance(df.index, pd.DatetimeIndex):
345
+ raise ValueError("The DataFrame index must be a DatetimeIndex")
346
+
347
+ prices = df["close"]
348
+ positions = df["position"]
349
+ new_positions = positions.copy()
350
+
351
+ assert set(positions.unique()) == {-1, 0, 1}, "Positions must be -1 (Short), 0 (Neutral), 1 (Long) only"
352
+
353
+ # Tracking for trailing stop loss
354
+ max_price = None
355
+ min_price = None
356
+ trailing_sl = None
357
+
358
+ # Tracking for each holding window
359
+ entry_price = None
360
+ entry_position = 0
361
+ profit_flag = False
362
+
363
+ for i in range(0, len(prices)):
364
+ position = positions.iloc[i]
365
+
366
+ # Neutral resets position
367
+ if position == 0:
368
+ entry_position = 0
369
+ continue
370
+
371
+ # New position
372
+ if position != entry_position:
373
+ entry_price = prices.iloc[i]
374
+ entry_position = position
375
+ profit_flag = False
376
+
377
+ max_price = entry_price
378
+ min_price = entry_price
379
+
380
+ if position == 1:
381
+ trailing_sl = entry_price * (1 - sl_percentage / 100)
382
+ else:
383
+ trailing_sl = entry_price * (1 + sl_percentage / 100)
384
+ # Hold position
385
+ else:
386
+ current_price = prices.iloc[i]
387
+
388
+ # PnL % of Long (1) and Short (-1)
389
+ if position == 1:
390
+ pnl_percentage = (current_price - entry_price) / entry_price * 100
391
+ else:
392
+ pnl_percentage = (entry_price - current_price) / entry_price * 100
393
+
394
+ # Take profit threshold
395
+ if pnl_percentage >= tp_percentage:
396
+ if not profit_flag:
397
+ profit_flag = True # hold for 1 more timestep
398
+ else:
399
+ new_positions.iloc[i] = 0
400
+ entry_position = 0
401
+ profit_flag = False
402
+
403
+ # Reach / update trailing stop loss for Long (1) and Short (-1)
404
+ if position == 1:
405
+ if current_price <= trailing_sl:
406
+ new_positions.iloc[i] = 0
407
+ entry_position = 0
408
+ elif current_price > max_price:
409
+ max_price = current_price
410
+ trailing_sl = max_price * (1 - sl_percentage / 100)
411
+ else:
412
+ if current_price >= trailing_sl:
413
+ new_positions.iloc[i] = 0
414
+ entry_position = 0
415
+ elif current_price < min_price:
416
+ min_price = current_price
417
+ trailing_sl = min_price * (1 + sl_percentage / 100)
418
+
419
+ return new_positions