quantvn 0.1.0__tar.gz → 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {quantvn-0.1.0/quantvn.egg-info → quantvn-0.1.1}/PKG-INFO +2 -1
- quantvn-0.1.1/quantvn/crypto/data/__init__.py +7 -0
- quantvn-0.1.1/quantvn/crypto/data/derivatives.py +77 -0
- quantvn-0.1.1/quantvn/crypto/data/download.py +50 -0
- quantvn-0.1.1/quantvn/crypto/metrics/__init__.py +4 -0
- quantvn-0.1.1/quantvn/crypto/metrics/backtest.py +76 -0
- quantvn-0.1.1/quantvn/crypto/metrics/metrics.py +67 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/metrics/backtest.py +24 -12
- {quantvn-0.1.0 → quantvn-0.1.1/quantvn.egg-info}/PKG-INFO +2 -1
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn.egg-info/SOURCES.txt +4 -3
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn.egg-info/requires.txt +1 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/setup.py +3 -2
- quantvn-0.1.0/quantvn/crypto/data/__init__.py +0 -31
- quantvn-0.1.0/quantvn/crypto/data/const.py +0 -26
- quantvn-0.1.0/quantvn/crypto/data/core.py +0 -82
- quantvn-0.1.0/quantvn/crypto/data/derivatives.py +0 -22
- quantvn-0.1.0/quantvn/crypto/data/utils.py +0 -93
- {quantvn-0.1.0 → quantvn-0.1.1}/LICENSE +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/MANIFEST.in +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/README.md +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/crypto/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/metrics/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/metrics/portfolio.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/metrics/single_asset.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/metrics/st.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/paper/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/paper/portfolio.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/paper/single_asset.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/data/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/data/const.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/data/core.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/data/derivatives.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/data/stocks.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/data/utils.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/metrics/__init__.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn/vn/metrics/metrics.py +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn.egg-info/dependency_links.txt +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/quantvn.egg-info/top_level.txt +0 -0
- {quantvn-0.1.0 → quantvn-0.1.1}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: quantvn
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: QuantVN API Library for Financial Data Analysis
|
|
5
5
|
Author: quantvn
|
|
6
6
|
Classifier: Development Status :: 3 - Alpha
|
|
@@ -16,6 +16,7 @@ License-File: LICENSE
|
|
|
16
16
|
Requires-Dist: requests
|
|
17
17
|
Requires-Dist: pandas
|
|
18
18
|
Requires-Dist: matplotlib
|
|
19
|
+
Requires-Dist: tqdm
|
|
19
20
|
Dynamic: author
|
|
20
21
|
Dynamic: classifier
|
|
21
22
|
Dynamic: description-content-type
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from datetime import datetime, timezone, timedelta
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import pandas as pd
|
|
5
|
+
from tqdm import tqdm
|
|
6
|
+
|
|
7
|
+
from quantvn.crypto.data.download import download_monthly, extract_csv
|
|
8
|
+
|
|
9
|
+
__all__ = ["get_hist"]
|
|
10
|
+
|
|
11
|
+
def get_hist(
|
|
12
|
+
symbol: str,
|
|
13
|
+
interval: str = "1m",
|
|
14
|
+
cache_dir: str | Path | None = None,
|
|
15
|
+
) -> pd.DataFrame:
|
|
16
|
+
"""
|
|
17
|
+
Fetch historical monthly data from Binance.
|
|
18
|
+
|
|
19
|
+
start/end: str in "YYYY-MM-DD HH:MM:SS" format or datetime, in Vietnam time (UTC+7)
|
|
20
|
+
Returns DataFrame in format:
|
|
21
|
+
["Date","time","Open","High","Low","Close","volume"]
|
|
22
|
+
"""
|
|
23
|
+
cache_dir = Path(cache_dir or Path.home() / ".cache/quantvn")
|
|
24
|
+
VN_TZ = timezone(timedelta(hours=7))
|
|
25
|
+
|
|
26
|
+
# default start/end
|
|
27
|
+
start_dt = datetime(2019, 7, 1, tzinfo=VN_TZ)
|
|
28
|
+
end_dt = datetime(2022, 12, 31, 23, 59, 59, tzinfo=VN_TZ)
|
|
29
|
+
|
|
30
|
+
# parse if string
|
|
31
|
+
if isinstance(start_dt, str):
|
|
32
|
+
start_dt = datetime.strptime(start_dt, "%Y-%m-%d %H:%M:%S").replace(tzinfo=VN_TZ)
|
|
33
|
+
if isinstance(end_dt, str):
|
|
34
|
+
end_dt = datetime.strptime(end_dt, "%Y-%m-%d %H:%M:%S").replace(tzinfo=VN_TZ)
|
|
35
|
+
|
|
36
|
+
# generate list of months
|
|
37
|
+
months = []
|
|
38
|
+
dt = start_dt.replace(day=1)
|
|
39
|
+
while dt <= end_dt:
|
|
40
|
+
months.append(dt.strftime("%Y-%m"))
|
|
41
|
+
if dt.month == 12:
|
|
42
|
+
dt = dt.replace(year=dt.year + 1, month=1)
|
|
43
|
+
else:
|
|
44
|
+
dt = dt.replace(month=dt.month + 1)
|
|
45
|
+
|
|
46
|
+
all_dfs = []
|
|
47
|
+
for m in tqdm(months, desc=f"Downloading {symbol}", disable=True):
|
|
48
|
+
try:
|
|
49
|
+
zip_path = download_monthly(symbol, interval, m, cache_dir)
|
|
50
|
+
df = extract_csv(zip_path)
|
|
51
|
+
all_dfs.append(df)
|
|
52
|
+
except Exception as e:
|
|
53
|
+
print(f"Skip {symbol} {interval} {m}: {e}")
|
|
54
|
+
|
|
55
|
+
if not all_dfs:
|
|
56
|
+
return pd.DataFrame(columns=["Date","time","Open","High","Low","Close","volume"])
|
|
57
|
+
|
|
58
|
+
# concat all months
|
|
59
|
+
df_all = pd.concat(all_dfs, ignore_index=True)
|
|
60
|
+
df_all = df_all.drop_duplicates(subset=["t"]).sort_values("t").reset_index(drop=True)
|
|
61
|
+
|
|
62
|
+
# convert timestamp t -> Asia/Ho_Chi_Minh
|
|
63
|
+
df_all["t"] = pd.to_datetime(df_all["t"], unit="ms", errors="coerce", utc=True)
|
|
64
|
+
df_all = df_all.dropna(subset=["t"])
|
|
65
|
+
df_all["t"] = df_all["t"].dt.tz_convert(VN_TZ)
|
|
66
|
+
|
|
67
|
+
# filter by start/end datetime
|
|
68
|
+
df_all = df_all[(df_all["t"] >= start_dt) & (df_all["t"] <= end_dt)]
|
|
69
|
+
|
|
70
|
+
# rename Volume -> volume
|
|
71
|
+
df_all.rename(columns={"Volume": "volume"}, inplace=True)
|
|
72
|
+
|
|
73
|
+
# tạo cột Date + time
|
|
74
|
+
df_all["Date"] = df_all["t"].dt.strftime("%Y-%m-%d")
|
|
75
|
+
df_all["time"] = df_all["t"].dt.strftime("%H:%M:%S")
|
|
76
|
+
|
|
77
|
+
return df_all[["Date","time","Open","High","Low","Close","volume"]]
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import zipfile
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import pandas as pd
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
BASE_URL = "https://data.binance.vision/data/spot/monthly/klines/"
|
|
8
|
+
|
|
9
|
+
def download_monthly(symbol: str, interval: str, month: str, cache_dir: Path) -> Path:
|
|
10
|
+
"""
|
|
11
|
+
Download a monthly ZIP file for any symbol & interval, return local path.
|
|
12
|
+
"""
|
|
13
|
+
cache_dir = cache_dir / symbol / interval
|
|
14
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
15
|
+
|
|
16
|
+
zip_name = f"{symbol}-{interval}-{month}.zip"
|
|
17
|
+
zip_path = cache_dir / zip_name
|
|
18
|
+
if zip_path.exists():
|
|
19
|
+
return zip_path
|
|
20
|
+
|
|
21
|
+
url = f"{BASE_URL}{symbol}/{interval}/{zip_name}"
|
|
22
|
+
resp = requests.get(url, stream=True, timeout=60)
|
|
23
|
+
if resp.status_code != 200:
|
|
24
|
+
raise RuntimeError(f"File not found: {url}")
|
|
25
|
+
|
|
26
|
+
with open(zip_path, "wb") as f:
|
|
27
|
+
for chunk in resp.iter_content(1024*1024):
|
|
28
|
+
f.write(chunk)
|
|
29
|
+
|
|
30
|
+
return zip_path
|
|
31
|
+
|
|
32
|
+
def extract_csv(zip_path: Path) -> pd.DataFrame:
|
|
33
|
+
"""
|
|
34
|
+
Extract CSV from ZIP and return DataFrame with columns:
|
|
35
|
+
t, Open, High, Low, Close, Volume
|
|
36
|
+
"""
|
|
37
|
+
with zipfile.ZipFile(zip_path, "r") as zf:
|
|
38
|
+
csv_name = zf.namelist()[0]
|
|
39
|
+
with zf.open(csv_name) as f:
|
|
40
|
+
df = pd.read_csv(f, header=None)
|
|
41
|
+
|
|
42
|
+
df = df.iloc[:, :6] # keep first 6 cols
|
|
43
|
+
df.columns = ["t", "Open", "High", "Low", "Close", "Volume"]
|
|
44
|
+
|
|
45
|
+
# ensure t is int64 safe
|
|
46
|
+
df["t"] = pd.to_numeric(df["t"], errors="coerce")
|
|
47
|
+
df = df.dropna(subset=["t"])
|
|
48
|
+
df["t"] = df["t"].astype("int64")
|
|
49
|
+
|
|
50
|
+
return df
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pandas as pd
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Backtest_Crypto:
|
|
6
|
+
def __init__(self, df, pnl_type="after_fees"):
|
|
7
|
+
if pnl_type not in ["raw", "after_fees"]:
|
|
8
|
+
raise ValueError("Invalid pnl_type. Choose 'raw' or 'after_fees'.")
|
|
9
|
+
|
|
10
|
+
self.df = df.copy()
|
|
11
|
+
self.pnl_type = pnl_type
|
|
12
|
+
self.df["datetime"] = pd.to_datetime(self.df["Date"] + " " + self.df["time"])
|
|
13
|
+
self.df.set_index("datetime", inplace=True)
|
|
14
|
+
self.df.sort_index(inplace=True)
|
|
15
|
+
|
|
16
|
+
# Calculate raw PNL
|
|
17
|
+
self.df["pnl_raw"] = self.df["Close"].diff().shift(-1) * self.df["position"]
|
|
18
|
+
self.df["pnl_raw"].fillna(0, inplace=True)
|
|
19
|
+
|
|
20
|
+
# Calculate PNL after fees
|
|
21
|
+
transaction_fee = 2700 / 100000 # VND per contract
|
|
22
|
+
overnight_fee = 2550 / 100000 # VND per contract per day if held overnight
|
|
23
|
+
|
|
24
|
+
self.df["transaction_fee"] = self.df["position"].diff().abs() * transaction_fee
|
|
25
|
+
|
|
26
|
+
# Identify overnight holdings
|
|
27
|
+
self.df["date"] = self.df.index.date
|
|
28
|
+
self.df["overnight"] = (self.df["position"] > 0) & (
|
|
29
|
+
self.df["date"] != self.df["date"].shift()
|
|
30
|
+
)
|
|
31
|
+
self.df["overnight_fee"] = self.df["overnight"] * overnight_fee
|
|
32
|
+
|
|
33
|
+
self.df["total_fee"] = self.df["transaction_fee"].fillna(0) + self.df[
|
|
34
|
+
"overnight_fee"
|
|
35
|
+
].fillna(0)
|
|
36
|
+
self.df["pnl_after_fees"] = self.df["pnl_raw"] - self.df["total_fee"]
|
|
37
|
+
|
|
38
|
+
def PNL(self):
|
|
39
|
+
"""Calculate cumulative PNL based on selected pnl_type."""
|
|
40
|
+
return self.df[f"pnl_{self.pnl_type}"].cumsum()
|
|
41
|
+
|
|
42
|
+
def daily_PNL(self):
|
|
43
|
+
"""Calculate daily PNL based on selected pnl_type."""
|
|
44
|
+
daily_pnl = (
|
|
45
|
+
self.df.groupby(self.df.index.date)[f"pnl_{self.pnl_type}"].sum().cumsum()
|
|
46
|
+
)
|
|
47
|
+
return daily_pnl
|
|
48
|
+
|
|
49
|
+
def daily_PNL_custom(self):
|
|
50
|
+
"""Calculate daily PNL based on selected pnl_type."""
|
|
51
|
+
daily_pnl = (
|
|
52
|
+
self.df.groupby(self.df.index.date)[f"pnl_{self.pnl_type}"].sum().cumsum()
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
# Chuyển thành danh sách dictionary [{date, pnl}]
|
|
56
|
+
result = {str(date): round(pnl, 2) for date, pnl in daily_pnl.items()}
|
|
57
|
+
|
|
58
|
+
return result
|
|
59
|
+
|
|
60
|
+
def estimate_minimum_capital(self):
|
|
61
|
+
"""Estimate the minimum capital required to run the strategy based on selected PNL type."""
|
|
62
|
+
self.df["cumulative_pnl"] = (
|
|
63
|
+
self.df[f"pnl_{self.pnl_type}"].cumsum().shift().fillna(0)
|
|
64
|
+
)
|
|
65
|
+
self.df["capital_required"] = (
|
|
66
|
+
self.df["position"].abs() * self.df["Close"]
|
|
67
|
+
) - self.df["cumulative_pnl"]
|
|
68
|
+
|
|
69
|
+
return max(self.df["capital_required"].max(), 0)
|
|
70
|
+
|
|
71
|
+
def PNL_percentage(self):
|
|
72
|
+
"""Calculate PNL percentage by dividing daily_PNL by estimate_minimum_capital."""
|
|
73
|
+
min_capital = self.estimate_minimum_capital()
|
|
74
|
+
if min_capital == 0:
|
|
75
|
+
return np.nan # Avoid division by zero
|
|
76
|
+
return round(self.daily_PNL() / min_capital, 2)
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class Metrics:
|
|
5
|
+
def __init__(self, backtest):
|
|
6
|
+
self.backtest = backtest
|
|
7
|
+
self.daily_pnl = backtest.daily_PNL().diff().dropna()
|
|
8
|
+
|
|
9
|
+
def avg_loss(self):
|
|
10
|
+
losses = self.daily_pnl[self.daily_pnl < 0]
|
|
11
|
+
return losses.mean()
|
|
12
|
+
|
|
13
|
+
def avg_return(self):
|
|
14
|
+
return self.daily_pnl.mean()
|
|
15
|
+
|
|
16
|
+
def avg_win(self):
|
|
17
|
+
wins = self.daily_pnl[self.daily_pnl > 0]
|
|
18
|
+
return wins.mean()
|
|
19
|
+
|
|
20
|
+
def max_drawdown(self):
|
|
21
|
+
cumulative = self.daily_pnl.cumsum()
|
|
22
|
+
peak = cumulative.cummax()
|
|
23
|
+
drawdown = cumulative - peak
|
|
24
|
+
return drawdown.min() / self.backtest.estimate_minimum_capital()
|
|
25
|
+
|
|
26
|
+
def win_rate(self):
|
|
27
|
+
wins = (self.daily_pnl > 0).sum()
|
|
28
|
+
total = len(self.daily_pnl)
|
|
29
|
+
return wins / total if total > 0 else 0
|
|
30
|
+
|
|
31
|
+
def volatility(self):
|
|
32
|
+
return self.daily_pnl.std()
|
|
33
|
+
|
|
34
|
+
def sharpe(self, risk_free_rate=0.0):
|
|
35
|
+
return (self.avg_return() - risk_free_rate) / self.volatility() * np.sqrt(365)
|
|
36
|
+
|
|
37
|
+
def sortino(self):
|
|
38
|
+
downside_std = self.daily_pnl[self.daily_pnl < 0].std()
|
|
39
|
+
return (
|
|
40
|
+
np.sqrt(252) * self.avg_return() / downside_std
|
|
41
|
+
if downside_std > 0
|
|
42
|
+
else np.nan
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
def calmar(self):
|
|
46
|
+
return (
|
|
47
|
+
np.sqrt(252) * self.avg_return() / abs(self.max_drawdown())
|
|
48
|
+
if self.max_drawdown() != 0
|
|
49
|
+
else np.nan
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
def profit_factor(self):
|
|
53
|
+
total_gain = self.daily_pnl[self.daily_pnl > 0].sum()
|
|
54
|
+
total_loss = abs(self.daily_pnl[self.daily_pnl < 0].sum())
|
|
55
|
+
return total_gain / total_loss if total_loss != 0 else np.nan
|
|
56
|
+
|
|
57
|
+
def risk_of_ruin(self):
|
|
58
|
+
win_rate = self.win_rate()
|
|
59
|
+
loss_rate = 1 - win_rate
|
|
60
|
+
return (
|
|
61
|
+
(loss_rate / win_rate) ** (1 / self.avg_loss())
|
|
62
|
+
if self.avg_loss() != 0
|
|
63
|
+
else np.nan
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
def value_at_risk(self, confidence_level=0.05):
|
|
67
|
+
return self.daily_pnl.quantile(confidence_level)
|
|
@@ -1,11 +1,9 @@
|
|
|
1
1
|
# ===== Backtest_Stock & helpers (migrated from your “second block”) =====
|
|
2
|
-
import logging
|
|
3
|
-
from abc import abstractmethod
|
|
4
|
-
from typing import TypedDict, List, Dict, Union
|
|
5
2
|
import matplotlib.pyplot as plt
|
|
6
3
|
import numpy as np
|
|
7
4
|
import pandas as pd
|
|
8
5
|
|
|
6
|
+
|
|
9
7
|
class Backtest_Derivates:
|
|
10
8
|
"""
|
|
11
9
|
A class for backtesting derivatives trading strategies.
|
|
@@ -131,7 +129,7 @@ class Backtest_Derivates:
|
|
|
131
129
|
float
|
|
132
130
|
Average pos enter per day
|
|
133
131
|
"""
|
|
134
|
-
return abs(self.df[
|
|
132
|
+
return abs(self.df["position"].diff().dropna()).sum() / len(self.daily_PNL())
|
|
135
133
|
|
|
136
134
|
|
|
137
135
|
class Backtest_Stock:
|
|
@@ -143,7 +141,9 @@ class Backtest_Stock:
|
|
|
143
141
|
'position' = số lượng cổ phiếu mong muốn (âm sẽ bị cắt về 0).
|
|
144
142
|
"""
|
|
145
143
|
|
|
146
|
-
def __init__(
|
|
144
|
+
def __init__(
|
|
145
|
+
self, df: pd.DataFrame, pnl_type: str = "after_fees", min_hold_days: int = 3
|
|
146
|
+
):
|
|
147
147
|
if pnl_type not in ["raw", "after_fees"]:
|
|
148
148
|
raise ValueError("Invalid pnl_type. Choose 'raw' or 'after_fees'.")
|
|
149
149
|
|
|
@@ -152,7 +152,10 @@ class Backtest_Stock:
|
|
|
152
152
|
|
|
153
153
|
# Chuẩn hóa thời gian & index
|
|
154
154
|
self.df = df.copy()
|
|
155
|
-
self.df["datetime"] = pd.to_datetime(
|
|
155
|
+
self.df["datetime"] = pd.to_datetime(
|
|
156
|
+
self.df["Date"].astype(str) + " " + self.df["time"].astype(str),
|
|
157
|
+
errors="coerce",
|
|
158
|
+
)
|
|
156
159
|
self.df = self.df.dropna(subset=["datetime"])
|
|
157
160
|
self.df.set_index("datetime", inplace=True)
|
|
158
161
|
self.df.sort_index(inplace=True)
|
|
@@ -160,7 +163,12 @@ class Backtest_Stock:
|
|
|
160
163
|
# Long-only ý định
|
|
161
164
|
self.df["Close"] = pd.to_numeric(self.df["Close"], errors="coerce")
|
|
162
165
|
self.df = self.df.dropna(subset=["Close"])
|
|
163
|
-
self.df["position_intent"] =
|
|
166
|
+
self.df["position_intent"] = (
|
|
167
|
+
pd.to_numeric(self.df["position"], errors="coerce")
|
|
168
|
+
.fillna(0)
|
|
169
|
+
.clip(lower=0)
|
|
170
|
+
.astype(float)
|
|
171
|
+
)
|
|
164
172
|
|
|
165
173
|
# Xây effective position tôn trọng min_hold theo SỐ PHIÊN
|
|
166
174
|
eff_pos, trade_qty = self._build_effective_position_with_min_hold(
|
|
@@ -173,11 +181,15 @@ class Backtest_Stock:
|
|
|
173
181
|
self.df["trade_qty"] = trade_qty
|
|
174
182
|
|
|
175
183
|
# PnL: giữ vị thế từ bar t -> t+1
|
|
176
|
-
self.df["pnl_raw"] =
|
|
184
|
+
self.df["pnl_raw"] = (
|
|
185
|
+
self.df["Close"].diff().shift(-1).fillna(0) * self.df["effective_position"]
|
|
186
|
+
)
|
|
177
187
|
|
|
178
188
|
# Phí giao dịch: 0.1% notional mỗi lần khớp
|
|
179
189
|
fee_rate = 0.001
|
|
180
|
-
notional_traded =
|
|
190
|
+
notional_traded = (
|
|
191
|
+
np.abs(self.df["trade_qty"].to_numpy()) * self.df["Close"].to_numpy()
|
|
192
|
+
)
|
|
181
193
|
self.df["transaction_fee"] = notional_traded * fee_rate
|
|
182
194
|
|
|
183
195
|
self.df["pnl_after_fees"] = self.df["pnl_raw"] - self.df["transaction_fee"]
|
|
@@ -275,7 +287,9 @@ class Backtest_Stock:
|
|
|
275
287
|
def estimate_minimum_capital(self) -> float:
|
|
276
288
|
# Ước lượng nhu cầu vốn tối thiểu thô: notional giữ - lũy kế PnL tại mỗi thời điểm
|
|
277
289
|
cum_pnl = self.df[f"pnl_{self.pnl_type}"].cumsum().shift().fillna(0.0)
|
|
278
|
-
capital_required = (
|
|
290
|
+
capital_required = (
|
|
291
|
+
self.df["effective_position"].abs() * self.df["Close"]
|
|
292
|
+
) - cum_pnl
|
|
279
293
|
return float(max(capital_required.max(), 0.0))
|
|
280
294
|
|
|
281
295
|
def PNL_percentage(self) -> pd.Series:
|
|
@@ -319,5 +333,3 @@ class Backtest_Stock:
|
|
|
319
333
|
plt.grid(True, alpha=0.3)
|
|
320
334
|
plt.tight_layout()
|
|
321
335
|
plt.show()
|
|
322
|
-
|
|
323
|
-
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: quantvn
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: QuantVN API Library for Financial Data Analysis
|
|
5
5
|
Author: quantvn
|
|
6
6
|
Classifier: Development Status :: 3 - Alpha
|
|
@@ -16,6 +16,7 @@ License-File: LICENSE
|
|
|
16
16
|
Requires-Dist: requests
|
|
17
17
|
Requires-Dist: pandas
|
|
18
18
|
Requires-Dist: matplotlib
|
|
19
|
+
Requires-Dist: tqdm
|
|
19
20
|
Dynamic: author
|
|
20
21
|
Dynamic: classifier
|
|
21
22
|
Dynamic: description-content-type
|
|
@@ -10,10 +10,11 @@ quantvn.egg-info/requires.txt
|
|
|
10
10
|
quantvn.egg-info/top_level.txt
|
|
11
11
|
quantvn/crypto/__init__.py
|
|
12
12
|
quantvn/crypto/data/__init__.py
|
|
13
|
-
quantvn/crypto/data/const.py
|
|
14
|
-
quantvn/crypto/data/core.py
|
|
15
13
|
quantvn/crypto/data/derivatives.py
|
|
16
|
-
quantvn/crypto/data/
|
|
14
|
+
quantvn/crypto/data/download.py
|
|
15
|
+
quantvn/crypto/metrics/__init__.py
|
|
16
|
+
quantvn/crypto/metrics/backtest.py
|
|
17
|
+
quantvn/crypto/metrics/metrics.py
|
|
17
18
|
quantvn/metrics/__init__.py
|
|
18
19
|
quantvn/metrics/portfolio.py
|
|
19
20
|
quantvn/metrics/single_asset.py
|
|
@@ -2,12 +2,13 @@ from setuptools import find_packages, setup
|
|
|
2
2
|
|
|
3
3
|
setup(
|
|
4
4
|
name="quantvn",
|
|
5
|
-
version="0.1.
|
|
5
|
+
version="0.1.1",
|
|
6
6
|
packages=find_packages(),
|
|
7
7
|
install_requires=[
|
|
8
8
|
"requests",
|
|
9
9
|
"pandas",
|
|
10
|
-
"matplotlib"
|
|
10
|
+
"matplotlib",
|
|
11
|
+
"tqdm",
|
|
11
12
|
],
|
|
12
13
|
author="quantvn",
|
|
13
14
|
description="QuantVN API Library for Financial Data Analysis",
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
from __future__ import annotations
|
|
3
|
-
|
|
4
|
-
# Public constants/helpers
|
|
5
|
-
from .const import * # noqa: F401,F403
|
|
6
|
-
|
|
7
|
-
# Core helpers (chỉ giữ TA & request)
|
|
8
|
-
from .core import (
|
|
9
|
-
add_all_ta_features,
|
|
10
|
-
send_request,
|
|
11
|
-
)
|
|
12
|
-
|
|
13
|
-
# Crypto (đọc S3)
|
|
14
|
-
from .utils import get_crypto
|
|
15
|
-
|
|
16
|
-
# Backward-compatibility: giữ get_hist của stocks nếu tồn tại (optional)
|
|
17
|
-
try:
|
|
18
|
-
from .stocks import get_hist as get_stock_hist # type: ignore
|
|
19
|
-
get_hist = get_stock_hist # alias mặc định
|
|
20
|
-
except Exception:
|
|
21
|
-
# Không có module stocks trong gói này — bỏ qua
|
|
22
|
-
pass
|
|
23
|
-
|
|
24
|
-
__all__ = [
|
|
25
|
-
# helpers
|
|
26
|
-
"send_request",
|
|
27
|
-
# TA features
|
|
28
|
-
"add_all_ta_features",
|
|
29
|
-
# crypto
|
|
30
|
-
"get_crypto",
|
|
31
|
-
]
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
TRADING_URL = "https://trading.vietcap.com.vn/api/"
|
|
3
|
-
GRAPHQL_URL = "https://trading.vietcap.com.vn/data-mt/graphql"
|
|
4
|
-
CHART_URL = "chart/OHLCChart/gap-chart"
|
|
5
|
-
INTRADAY_URL = "market-watch"
|
|
6
|
-
|
|
7
|
-
INTERVAL_MAP = {
|
|
8
|
-
'1m':'ONE_MINUTE','5m':'ONE_MINUTE','15m':'ONE_MINUTE','30m':'ONE_MINUTE',
|
|
9
|
-
'1H':'ONE_HOUR','1D':'ONE_DAY','1W':'ONE_DAY','1M':'ONE_DAY'
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
OHLC_COLUMNS = ["t","o","h","l","c","v"]
|
|
13
|
-
OHLC_RENAME = {"t":"time","o":"open","h":"high","l":"low","c":"close","v":"volume"}
|
|
14
|
-
|
|
15
|
-
INTRADAY_MAP = {'truncTime':'time','matchPrice':'price','matchVol':'volume','matchType':'match_type','id':'id'}
|
|
16
|
-
|
|
17
|
-
PRICE_DEPTH_URL = f"{TRADING_URL}{INTRADAY_URL}/AccumulatedPriceStepVol/getSymbolData"
|
|
18
|
-
|
|
19
|
-
PRICE_INFO_MAP = {
|
|
20
|
-
'ev':'ev','ticker':'symbol',
|
|
21
|
-
'open_price':'open','ceiling_price':'ceiling','floor_price':'floor','reference_price':'ref_price',
|
|
22
|
-
'highest_price':'high','lowest_price':'low',
|
|
23
|
-
'price_change':'price_change','percent_price_change':'price_change_pct',
|
|
24
|
-
'foreign_total_volume':'foreign_volume','foreign_total_room':'foreign_room','foreign_holding_room':'foreign_holding_room',
|
|
25
|
-
'average_match_volume2_week':'avg_match_volume_2w',
|
|
26
|
-
}
|
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
import time, random, requests
|
|
3
|
-
import pandas as pd
|
|
4
|
-
|
|
5
|
-
DEFAULT_TIMEOUT = 25
|
|
6
|
-
|
|
7
|
-
def _ua(source="vietmarket"):
|
|
8
|
-
return {
|
|
9
|
-
"User-Agent": f"{source}/1.0 (+https://example.local)",
|
|
10
|
-
"Accept": "application/json, text/plain, */*",
|
|
11
|
-
"Origin": "https://example.local",
|
|
12
|
-
"Referer": "https://example.local/",
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
def send_request(url, method="GET", headers=None, params=None, payload=None,
|
|
16
|
-
retries=2, backoff=(0.6, 1.2), timeout=DEFAULT_TIMEOUT):
|
|
17
|
-
h = _ua()
|
|
18
|
-
if headers:
|
|
19
|
-
h.update(headers)
|
|
20
|
-
for attempt in range(retries + 1):
|
|
21
|
-
try:
|
|
22
|
-
if method.upper() == "GET":
|
|
23
|
-
r = requests.get(url, headers=h, params=params, timeout=timeout)
|
|
24
|
-
else:
|
|
25
|
-
r = requests.post(url, headers=h, params=params, json=payload, timeout=timeout)
|
|
26
|
-
r.raise_for_status()
|
|
27
|
-
if "application/json" in r.headers.get("Content-Type", ""):
|
|
28
|
-
return r.json()
|
|
29
|
-
return r.text
|
|
30
|
-
except Exception:
|
|
31
|
-
if attempt >= retries:
|
|
32
|
-
raise
|
|
33
|
-
time.sleep(random.uniform(*backoff))
|
|
34
|
-
|
|
35
|
-
def add_all_ta_features(
|
|
36
|
-
df,
|
|
37
|
-
open: str = "Open",
|
|
38
|
-
high: str = "High",
|
|
39
|
-
low: str = "Low",
|
|
40
|
-
close: str = "Close",
|
|
41
|
-
volume: str = "Volume",
|
|
42
|
-
fillna: bool = True,
|
|
43
|
-
):
|
|
44
|
-
"""
|
|
45
|
-
Thêm toàn bộ technical indicators từ thư viện `ta` vào DataFrame.
|
|
46
|
-
Giữ nguyên signature giống `ta.add_all_ta_features` để sử dụng y hệt.
|
|
47
|
-
|
|
48
|
-
Parameters
|
|
49
|
-
----------
|
|
50
|
-
df : pandas.DataFrame
|
|
51
|
-
DataFrame phải có các cột giá/khối lượng tương ứng.
|
|
52
|
-
open,high,low,close,volume : str
|
|
53
|
-
Tên cột trong df.
|
|
54
|
-
fillna : bool
|
|
55
|
-
Nếu True, sẽ điền các giá trị NaN theo mặc định của thư viện `ta`.
|
|
56
|
-
|
|
57
|
-
Returns
|
|
58
|
-
-------
|
|
59
|
-
pandas.DataFrame
|
|
60
|
-
DataFrame đầu vào + các cột TA features.
|
|
61
|
-
"""
|
|
62
|
-
try:
|
|
63
|
-
from ta import add_all_ta_features as _ta_add_all_ta_features
|
|
64
|
-
from ta.utils import dropna as _ta_dropna
|
|
65
|
-
except Exception as e:
|
|
66
|
-
raise ImportError(
|
|
67
|
-
"Thiếu thư viện 'ta'. Hãy cài: pip install ta"
|
|
68
|
-
) from e
|
|
69
|
-
|
|
70
|
-
# Làm sạch NaN theo chuẩn của 'ta'
|
|
71
|
-
_df = _ta_dropna(df.copy())
|
|
72
|
-
|
|
73
|
-
# Gọi trực tiếp hàm gốc
|
|
74
|
-
return _ta_add_all_ta_features(
|
|
75
|
-
_df,
|
|
76
|
-
open=open,
|
|
77
|
-
high=high,
|
|
78
|
-
low=low,
|
|
79
|
-
close=close,
|
|
80
|
-
volume=volume,
|
|
81
|
-
fillna=fillna,
|
|
82
|
-
)
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
"""
|
|
3
|
-
ĐÃ THAY THẾ: Module derivatives nay đã được thay bằng hàm `get_crypto` (đọc dữ liệu crypto từ S3).
|
|
4
|
-
Vui lòng chuyển sang: from .crypto import get_crypto
|
|
5
|
-
"""
|
|
6
|
-
from __future__ import annotations
|
|
7
|
-
|
|
8
|
-
from .utils import get_crypto as _get_crypto
|
|
9
|
-
|
|
10
|
-
__all__ = ["get_hist"]
|
|
11
|
-
|
|
12
|
-
def get_hist(symbol: str, frequency: str = "1D", *, days: int | None = None, **kwargs):
|
|
13
|
-
"""
|
|
14
|
-
[Deprecated] Giả lập API cũ:
|
|
15
|
-
- Bỏ qua tham số `frequency` của phái sinh; dùng `days` nếu truyền vào.
|
|
16
|
-
- Gọi sang crypto.get_crypto(...).
|
|
17
|
-
"""
|
|
18
|
-
if days is None:
|
|
19
|
-
# Mặc định map frequency sang days đơn giản
|
|
20
|
-
freq_map = {"1D": 365, "1H": 30, "5M": 7}
|
|
21
|
-
days = freq_map.get(str(frequency).upper(), None)
|
|
22
|
-
return _get_crypto(symbol, days=days, **kwargs)
|
|
@@ -1,93 +0,0 @@
|
|
|
1
|
-
# quantvn/crypto/data/utils.py
|
|
2
|
-
from __future__ import annotations
|
|
3
|
-
from typing import Optional, Literal
|
|
4
|
-
import io
|
|
5
|
-
import pandas as pd
|
|
6
|
-
import boto3, requests
|
|
7
|
-
from botocore.exceptions import ClientError, NoCredentialsError
|
|
8
|
-
from botocore.config import Config
|
|
9
|
-
from botocore import UNSIGNED
|
|
10
|
-
|
|
11
|
-
__all__ = ["get_crypto"]
|
|
12
|
-
|
|
13
|
-
def get_crypto(
|
|
14
|
-
symbol: str,
|
|
15
|
-
days: Optional[int] = None,
|
|
16
|
-
*,
|
|
17
|
-
bucket: str = "qco-market",
|
|
18
|
-
prefix: str = "data-csv",
|
|
19
|
-
session: Optional[boto3.Session] = None,
|
|
20
|
-
auth: Literal["auto", "signed", "unsigned"] = "auto",
|
|
21
|
-
) -> pd.DataFrame:
|
|
22
|
-
"""
|
|
23
|
-
Đọc s3://{bucket}/{prefix}/{symbol}.csv và (tuỳ chọn) lọc N ngày.
|
|
24
|
-
- auth="auto": thử signed; nếu không có credential → fallback unsigned → fallback HTTP.
|
|
25
|
-
- auth="signed": chỉ signed (bắt buộc user có credential).
|
|
26
|
-
- auth="unsigned": luôn truy cập ẩn danh (chỉ dùng khi object public).
|
|
27
|
-
"""
|
|
28
|
-
key = f"{prefix.rstrip('/')}/{symbol}.csv" if prefix else f"{symbol}.csv"
|
|
29
|
-
session = session or boto3.Session()
|
|
30
|
-
|
|
31
|
-
def _post(df: pd.DataFrame) -> pd.DataFrame:
|
|
32
|
-
if "datetime" not in df.columns:
|
|
33
|
-
raise KeyError("CSV phải có cột 'datetime'.")
|
|
34
|
-
df["datetime"] = pd.to_datetime(df["datetime"], errors="coerce", utc=True)
|
|
35
|
-
if days is not None:
|
|
36
|
-
cutoff = pd.Timestamp.utcnow() - pd.Timedelta(days=days)
|
|
37
|
-
df = df[df["datetime"] >= cutoff].copy()
|
|
38
|
-
df.attrs["s3_key"] = key
|
|
39
|
-
return df
|
|
40
|
-
|
|
41
|
-
def _read_via_boto(client) -> pd.DataFrame:
|
|
42
|
-
obj = client.get_object(Bucket=bucket, Key=key)
|
|
43
|
-
return _post(pd.read_csv(obj["Body"]))
|
|
44
|
-
|
|
45
|
-
def _http_fallback(possible_region: Optional[str] = None) -> pd.DataFrame:
|
|
46
|
-
# S3 virtual-hosted–style URL; requests sẽ tự theo redirect nếu region khác. :contentReference[oaicite:1]{index=1}
|
|
47
|
-
urls = [
|
|
48
|
-
f"https://{bucket}.s3.amazonaws.com/{key}",
|
|
49
|
-
]
|
|
50
|
-
if possible_region:
|
|
51
|
-
urls.insert(0, f"https://{bucket}.s3.{possible_region}.amazonaws.com/{key}")
|
|
52
|
-
for url in urls:
|
|
53
|
-
r = requests.get(url, timeout=30)
|
|
54
|
-
if r.ok:
|
|
55
|
-
return _post(pd.read_csv(io.BytesIO(r.content)))
|
|
56
|
-
raise PermissionError(
|
|
57
|
-
"AccessDenied khi dùng unsigned/HTTP. Hãy bật public-read cho prefix hoặc cung cấp credential/presigned URL."
|
|
58
|
-
)
|
|
59
|
-
|
|
60
|
-
# 1) Chế độ ép unsigned ngay từ đầu
|
|
61
|
-
if auth == "unsigned":
|
|
62
|
-
try:
|
|
63
|
-
s3u = session.client("s3", config=Config(signature_version=UNSIGNED)) # :contentReference[oaicite:2]{index=2}
|
|
64
|
-
return _read_via_boto(s3u)
|
|
65
|
-
except ClientError as e:
|
|
66
|
-
region = e.response.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
|
|
67
|
-
return _http_fallback(region)
|
|
68
|
-
|
|
69
|
-
# 2) signed hoặc auto (ưu tiên signed)
|
|
70
|
-
try:
|
|
71
|
-
s3 = session.client("s3")
|
|
72
|
-
return _read_via_boto(s3)
|
|
73
|
-
except NoCredentialsError:
|
|
74
|
-
if auth == "signed":
|
|
75
|
-
raise
|
|
76
|
-
# auto → fallback unsigned
|
|
77
|
-
try:
|
|
78
|
-
s3u = session.client("s3", config=Config(signature_version=UNSIGNED)) # :contentReference[oaicite:3]{index=3}
|
|
79
|
-
return _read_via_boto(s3u)
|
|
80
|
-
except ClientError as e:
|
|
81
|
-
region = e.response.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
|
|
82
|
-
return _http_fallback(region)
|
|
83
|
-
except ClientError as e:
|
|
84
|
-
# Trường hợp sai region → thử lại signed với region thật; nếu vẫn fail → HTTP
|
|
85
|
-
region = e.response.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
|
|
86
|
-
if region:
|
|
87
|
-
try:
|
|
88
|
-
s3r = session.client("s3", region_name=region)
|
|
89
|
-
return _read_via_boto(s3r)
|
|
90
|
-
except Exception:
|
|
91
|
-
pass
|
|
92
|
-
# Cuối cùng: HTTP
|
|
93
|
-
return _http_fallback(region)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|