akshare-one 0.3.5.1__py3-none-any.whl → 0.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- akshare_one/eastmoney/client.py +80 -0
- akshare_one/eastmoney/utils.py +102 -0
- akshare_one/modules/financial/eastmoney_direct.py +183 -0
- akshare_one/modules/indicators/simple.py +384 -230
- {akshare_one-0.3.5.1.dist-info → akshare_one-0.3.7.dist-info}/METADATA +2 -2
- {akshare_one-0.3.5.1.dist-info → akshare_one-0.3.7.dist-info}/RECORD +9 -6
- {akshare_one-0.3.5.1.dist-info → akshare_one-0.3.7.dist-info}/WHEEL +0 -0
- {akshare_one-0.3.5.1.dist-info → akshare_one-0.3.7.dist-info}/licenses/LICENSE +0 -0
- {akshare_one-0.3.5.1.dist-info → akshare_one-0.3.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,80 @@
|
|
1
|
+
import requests
|
2
|
+
from typing import Dict, Any
|
3
|
+
|
4
|
+
|
5
|
+
class EastMoneyClient:
|
6
|
+
"""
|
7
|
+
A client for interacting directly with EastMoney's data APIs.
|
8
|
+
This class handles session management, request signing, and API calls.
|
9
|
+
"""
|
10
|
+
|
11
|
+
def __init__(self):
|
12
|
+
self.session = requests.Session()
|
13
|
+
|
14
|
+
def _get_security_id(self, symbol: str) -> str:
|
15
|
+
"""
|
16
|
+
Converts a stock symbol to EastMoney's internal secid format.
|
17
|
+
e.g., '600519' -> '1.600519', '000001' -> '0.000001'
|
18
|
+
"""
|
19
|
+
symbol = symbol.upper()
|
20
|
+
if symbol.startswith("SZ"):
|
21
|
+
market = "0"
|
22
|
+
code = symbol[2:]
|
23
|
+
elif symbol.startswith("SH"):
|
24
|
+
market = "1"
|
25
|
+
code = symbol[2:]
|
26
|
+
elif symbol.startswith("HK"):
|
27
|
+
market = "116"
|
28
|
+
code = symbol[2:]
|
29
|
+
elif len(symbol) == 6:
|
30
|
+
if symbol.startswith(("000", "001", "002", "003", "300", "200")):
|
31
|
+
market = "0"
|
32
|
+
elif symbol.startswith(("600", "601", "603", "605", "688", "900")):
|
33
|
+
market = "1"
|
34
|
+
else:
|
35
|
+
market = "0" # Default to SZ for ambiguity
|
36
|
+
code = symbol
|
37
|
+
elif len(symbol) == 5: # HK Market
|
38
|
+
market = "116"
|
39
|
+
code = symbol
|
40
|
+
else:
|
41
|
+
market = "0"
|
42
|
+
code = symbol
|
43
|
+
return f"{market}.{code}"
|
44
|
+
|
45
|
+
def fetch_historical_klines(
|
46
|
+
self, symbol: str, klt: str, fqt: str, start_date: str, end_date: str
|
47
|
+
) -> Dict[str, Any]:
|
48
|
+
"""
|
49
|
+
Fetches historical K-line (candlestick) data.
|
50
|
+
"""
|
51
|
+
url = "https://push2his.eastmoney.com/api/qt/stock/kline/get"
|
52
|
+
secid = self._get_security_id(symbol)
|
53
|
+
params = {
|
54
|
+
"fields1": "f1,f2,f3,f4,f5,f6",
|
55
|
+
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
|
56
|
+
"klt": klt,
|
57
|
+
"fqt": fqt,
|
58
|
+
"secid": secid,
|
59
|
+
"beg": start_date,
|
60
|
+
"end": end_date,
|
61
|
+
}
|
62
|
+
response = self.session.get(url, params=params)
|
63
|
+
response.raise_for_status()
|
64
|
+
return response.json()
|
65
|
+
|
66
|
+
def fetch_realtime_quote(self, symbol: str) -> Dict[str, Any]:
|
67
|
+
"""
|
68
|
+
Fetches real-time quote data for a single stock.
|
69
|
+
"""
|
70
|
+
url = "https://push2.eastmoney.com/api/qt/stock/get"
|
71
|
+
secid = self._get_security_id(symbol)
|
72
|
+
params = {
|
73
|
+
"invt": "2",
|
74
|
+
"fltt": "2",
|
75
|
+
"fields": "f43,f57,f58,f169,f170,f46,f60,f44,f51,f168,f47,f164,f163,f116,f60,f45,f52,f50,f48,f167,f117,f71,f161,f49,f530",
|
76
|
+
"secid": secid,
|
77
|
+
}
|
78
|
+
response = self.session.get(url, params=params)
|
79
|
+
response.raise_for_status()
|
80
|
+
return response.json()
|
@@ -0,0 +1,102 @@
|
|
1
|
+
import pandas as pd
|
2
|
+
from typing import Dict, Any
|
3
|
+
|
4
|
+
|
5
|
+
def parse_kline_data(data: Dict[str, Any]) -> pd.DataFrame:
|
6
|
+
"""
|
7
|
+
Parses K-line data from the API response into a pandas DataFrame.
|
8
|
+
"""
|
9
|
+
klines = data.get("data", {}).get("klines", [])
|
10
|
+
if not klines:
|
11
|
+
return pd.DataFrame(
|
12
|
+
columns=["timestamp", "open", "high", "low", "close", "volume"]
|
13
|
+
)
|
14
|
+
|
15
|
+
records = []
|
16
|
+
for kline in klines:
|
17
|
+
parts = kline.split(",")
|
18
|
+
if len(parts) >= 6:
|
19
|
+
records.append(
|
20
|
+
{
|
21
|
+
"timestamp": parts[0],
|
22
|
+
"open": float(parts[1]),
|
23
|
+
"close": float(parts[2]),
|
24
|
+
"high": float(parts[3]),
|
25
|
+
"low": float(parts[4]),
|
26
|
+
"volume": int(parts[5]),
|
27
|
+
}
|
28
|
+
)
|
29
|
+
|
30
|
+
df = pd.DataFrame(records)
|
31
|
+
if not df.empty:
|
32
|
+
df["timestamp"] = pd.to_datetime(df["timestamp"])
|
33
|
+
df["timestamp"] = df["timestamp"].dt.tz_localize("Asia/Shanghai")
|
34
|
+
df = df[["timestamp", "open", "high", "low", "close", "volume"]]
|
35
|
+
return df
|
36
|
+
|
37
|
+
|
38
|
+
def parse_realtime_data(data: Dict[str, Any]) -> pd.DataFrame:
|
39
|
+
"""
|
40
|
+
Parses real-time quote data from the API response into a pandas DataFrame.
|
41
|
+
"""
|
42
|
+
stock_data = data.get("data")
|
43
|
+
if not stock_data:
|
44
|
+
return pd.DataFrame()
|
45
|
+
|
46
|
+
df = pd.DataFrame(
|
47
|
+
[
|
48
|
+
{
|
49
|
+
"symbol": stock_data.get("f57"),
|
50
|
+
"price": stock_data.get("f43"),
|
51
|
+
"change": stock_data.get("f169"),
|
52
|
+
"pct_change": stock_data.get("f170"),
|
53
|
+
"volume": stock_data.get("f47"),
|
54
|
+
"amount": stock_data.get("f48"),
|
55
|
+
"open": stock_data.get("f46"),
|
56
|
+
"high": stock_data.get("f44"),
|
57
|
+
"low": stock_data.get("f45"),
|
58
|
+
"prev_close": stock_data.get("f60"),
|
59
|
+
}
|
60
|
+
]
|
61
|
+
)
|
62
|
+
df["timestamp"] = pd.Timestamp.now(tz="Asia/Shanghai")
|
63
|
+
return df
|
64
|
+
|
65
|
+
|
66
|
+
def resample_historical_data(
|
67
|
+
df: pd.DataFrame, interval: str, multiplier: int
|
68
|
+
) -> pd.DataFrame:
|
69
|
+
"""
|
70
|
+
Resamples historical data to a specified frequency.
|
71
|
+
"""
|
72
|
+
if df.empty or multiplier <= 1:
|
73
|
+
return df
|
74
|
+
|
75
|
+
df = df.set_index("timestamp")
|
76
|
+
|
77
|
+
freq_map = {
|
78
|
+
"day": f"{multiplier}D",
|
79
|
+
"week": f"{multiplier}W-MON",
|
80
|
+
"month": f"{multiplier}MS",
|
81
|
+
"year": f"{multiplier * 12}MS",
|
82
|
+
}
|
83
|
+
freq = freq_map.get(interval)
|
84
|
+
|
85
|
+
if not freq:
|
86
|
+
return df.reset_index()
|
87
|
+
|
88
|
+
resampled = (
|
89
|
+
df.resample(freq)
|
90
|
+
.agg(
|
91
|
+
{
|
92
|
+
"open": "first",
|
93
|
+
"high": "max",
|
94
|
+
"low": "min",
|
95
|
+
"close": "last",
|
96
|
+
"volume": "sum",
|
97
|
+
}
|
98
|
+
)
|
99
|
+
.dropna()
|
100
|
+
)
|
101
|
+
|
102
|
+
return resampled.reset_index()
|
@@ -0,0 +1,183 @@
|
|
1
|
+
import pandas as pd
|
2
|
+
import requests
|
3
|
+
|
4
|
+
from akshare_one.modules.cache import cache
|
5
|
+
from .base import FinancialDataProvider
|
6
|
+
|
7
|
+
|
8
|
+
class EastMoneyDirectFinancialReport(FinancialDataProvider):
|
9
|
+
_balance_sheet_rename_map = {
|
10
|
+
"REPORT_DATE": "report_date",
|
11
|
+
"TOTAL_ASSETS": "total_assets",
|
12
|
+
"FIXED_ASSET": "fixed_assets_net",
|
13
|
+
"MONETARYFUNDS": "cash_and_equivalents",
|
14
|
+
"ACCOUNTS_RECE": "accounts_receivable",
|
15
|
+
"INVENTORY": "inventory",
|
16
|
+
"TOTAL_LIABILITIES": "total_liabilities",
|
17
|
+
"ACCOUNTS_PAYABLE": "trade_and_non_trade_payables",
|
18
|
+
"ADVANCE_RECEIVABLES": "deferred_revenue",
|
19
|
+
"TOTAL_EQUITY": "shareholders_equity",
|
20
|
+
}
|
21
|
+
|
22
|
+
_income_statement_rename_map = {
|
23
|
+
"REPORT_DATE": "report_date",
|
24
|
+
"TOTAL_OPERATE_INCOME": "revenue",
|
25
|
+
"TOTAL_OPERATE_COST": "total_operating_costs",
|
26
|
+
"OPERATE_PROFIT": "operating_profit",
|
27
|
+
"PARENT_NETPROFIT": "net_income_common_stock",
|
28
|
+
}
|
29
|
+
|
30
|
+
_cash_flow_rename_map = {
|
31
|
+
"REPORT_DATE": "report_date",
|
32
|
+
"NETCASH_OPERATE": "net_cash_flow_from_operations",
|
33
|
+
"NETCASH_INVEST": "net_cash_flow_from_investing",
|
34
|
+
"NETCASH_FINANCE": "net_cash_flow_from_financing",
|
35
|
+
"CCE_ADD": "change_in_cash_and_equivalents",
|
36
|
+
}
|
37
|
+
|
38
|
+
def __init__(self, symbol):
|
39
|
+
super().__init__(symbol)
|
40
|
+
|
41
|
+
def get_income_statement(self):
|
42
|
+
pass
|
43
|
+
|
44
|
+
def get_balance_sheet(self):
|
45
|
+
pass
|
46
|
+
|
47
|
+
def get_cash_flow(self):
|
48
|
+
pass
|
49
|
+
|
50
|
+
@cache(
|
51
|
+
"financial_cache",
|
52
|
+
key=lambda self, symbol=None: f"eastmoney_financial_metrics_{self.symbol}",
|
53
|
+
)
|
54
|
+
def get_financial_metrics(self) -> pd.DataFrame:
|
55
|
+
"""获取三大财务报表关键指标"""
|
56
|
+
balance_sheet = self._fetch_balance_sheet()
|
57
|
+
income_statement = self._fetch_income_statement()
|
58
|
+
cash_flow = self._fetch_cash_flow()
|
59
|
+
|
60
|
+
if balance_sheet.empty and income_statement.empty and cash_flow.empty:
|
61
|
+
return pd.DataFrame()
|
62
|
+
|
63
|
+
merged = pd.merge(
|
64
|
+
balance_sheet, income_statement, on="report_date", how="outer"
|
65
|
+
)
|
66
|
+
merged = pd.merge(merged, cash_flow, on="report_date", how="outer")
|
67
|
+
|
68
|
+
# Convert report_date to datetime and format as YYYY-MM-DD
|
69
|
+
merged["report_date"] = pd.to_datetime(merged["report_date"]).dt.strftime(
|
70
|
+
"%Y-%m-%d"
|
71
|
+
)
|
72
|
+
|
73
|
+
# Sort by report_date in descending order (most recent first)
|
74
|
+
merged = merged.sort_values("report_date", ascending=False).reset_index(
|
75
|
+
drop=True
|
76
|
+
)
|
77
|
+
|
78
|
+
return merged
|
79
|
+
|
80
|
+
def _fetch_balance_sheet(self) -> pd.DataFrame:
|
81
|
+
"""
|
82
|
+
Get stock balance sheet data from East Money API
|
83
|
+
"""
|
84
|
+
try:
|
85
|
+
# API endpoint and parameters
|
86
|
+
api_url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
|
87
|
+
params = {
|
88
|
+
"reportName": "RPT_DMSK_FN_BALANCE",
|
89
|
+
"filter": f'(SECURITY_CODE="{self.symbol}")',
|
90
|
+
"pageNumber": "1",
|
91
|
+
"pageSize": "1000",
|
92
|
+
"sortColumns": "REPORT_DATE",
|
93
|
+
"sortTypes": "-1",
|
94
|
+
"columns": ",".join(self._balance_sheet_rename_map.keys()),
|
95
|
+
}
|
96
|
+
|
97
|
+
# Fetch data from API
|
98
|
+
response = requests.get(api_url, params=params)
|
99
|
+
response.raise_for_status()
|
100
|
+
data = response.json()
|
101
|
+
|
102
|
+
# Extract the actual data
|
103
|
+
if data.get("result") and data["result"].get("data"):
|
104
|
+
df = pd.DataFrame(data["result"]["data"])
|
105
|
+
df.rename(columns=self._balance_sheet_rename_map, inplace=True)
|
106
|
+
return df
|
107
|
+
else:
|
108
|
+
print("No balance sheet data found in API response")
|
109
|
+
return pd.DataFrame()
|
110
|
+
|
111
|
+
except Exception as e:
|
112
|
+
print(f"Error occurred: {str(e)}")
|
113
|
+
return pd.DataFrame()
|
114
|
+
|
115
|
+
def _fetch_income_statement(self) -> pd.DataFrame:
|
116
|
+
"""
|
117
|
+
Get stock income statement data from East Money API
|
118
|
+
"""
|
119
|
+
try:
|
120
|
+
# API endpoint and parameters
|
121
|
+
api_url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
|
122
|
+
params = {
|
123
|
+
"reportName": "RPT_DMSK_FN_INCOME",
|
124
|
+
"filter": f'(SECURITY_CODE="{self.symbol}")',
|
125
|
+
"pageNumber": "1",
|
126
|
+
"pageSize": "1000",
|
127
|
+
"sortColumns": "REPORT_DATE",
|
128
|
+
"sortTypes": "-1",
|
129
|
+
"columns": ",".join(self._income_statement_rename_map.keys()),
|
130
|
+
}
|
131
|
+
|
132
|
+
# Fetch data from API
|
133
|
+
response = requests.get(api_url, params=params)
|
134
|
+
response.raise_for_status()
|
135
|
+
data = response.json()
|
136
|
+
|
137
|
+
# Extract the actual data
|
138
|
+
if data.get("result") and data["result"].get("data"):
|
139
|
+
df = pd.DataFrame(data["result"]["data"])
|
140
|
+
df.rename(columns=self._income_statement_rename_map, inplace=True)
|
141
|
+
return df
|
142
|
+
else:
|
143
|
+
print("No income statement data found in API response")
|
144
|
+
return pd.DataFrame()
|
145
|
+
|
146
|
+
except Exception as e:
|
147
|
+
print(f"Error occurred: {str(e)}")
|
148
|
+
return pd.DataFrame()
|
149
|
+
|
150
|
+
def _fetch_cash_flow(self) -> pd.DataFrame:
|
151
|
+
"""
|
152
|
+
Get stock cash flow statement data from East Money API
|
153
|
+
"""
|
154
|
+
try:
|
155
|
+
# API endpoint and parameters
|
156
|
+
api_url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
|
157
|
+
params = {
|
158
|
+
"reportName": "RPT_DMSK_FN_CASHFLOW",
|
159
|
+
"filter": f'(SECURITY_CODE="{self.symbol}")',
|
160
|
+
"pageNumber": "1",
|
161
|
+
"pageSize": "1000",
|
162
|
+
"sortColumns": "REPORT_DATE",
|
163
|
+
"sortTypes": "-1",
|
164
|
+
"columns": ",".join(self._cash_flow_rename_map.keys()),
|
165
|
+
}
|
166
|
+
|
167
|
+
# Fetch data from API
|
168
|
+
response = requests.get(api_url, params=params)
|
169
|
+
response.raise_for_status()
|
170
|
+
data = response.json()
|
171
|
+
|
172
|
+
# Extract the actual data
|
173
|
+
if data.get("result") and data["result"].get("data"):
|
174
|
+
df = pd.DataFrame(data["result"]["data"])
|
175
|
+
df.rename(columns=self._cash_flow_rename_map, inplace=True)
|
176
|
+
return df
|
177
|
+
else:
|
178
|
+
print("No cash flow statement data found in API response")
|
179
|
+
return pd.DataFrame()
|
180
|
+
|
181
|
+
except Exception as e:
|
182
|
+
print(f"Error occurred: {str(e)}")
|
183
|
+
return pd.DataFrame()
|
@@ -1,230 +1,384 @@
|
|
1
|
-
import pandas as pd
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
.rolling(window=window, min_periods=window)
|
12
|
-
|
13
|
-
.
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
return
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
)
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
close = df["close"]
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
high = df["high"]
|
87
|
-
low = df["low"]
|
88
|
-
close = df["close"]
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
)
|
108
|
-
|
109
|
-
|
110
|
-
return
|
111
|
-
|
112
|
-
def
|
113
|
-
high = df["high"]
|
114
|
-
low = df["low"]
|
115
|
-
close = df["close"]
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
)
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
)
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
def
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
1
|
+
import pandas as pd
|
2
|
+
import numpy as np
|
3
|
+
from .base import BaseIndicatorCalculator
|
4
|
+
|
5
|
+
|
6
|
+
class SimpleIndicatorCalculator(BaseIndicatorCalculator):
|
7
|
+
"""Basic pandas-based indicator implementations"""
|
8
|
+
|
9
|
+
def _get_ma(self, series: pd.Series, window: int, ma_type: int) -> pd.Series:
|
10
|
+
if ma_type == 0:
|
11
|
+
return series.rolling(window=window, min_periods=window).mean()
|
12
|
+
elif ma_type == 1:
|
13
|
+
return series.ewm(span=window, adjust=False, min_periods=window).mean()
|
14
|
+
else:
|
15
|
+
raise ValueError(
|
16
|
+
f"Unsupported ma_type: {ma_type} in simple calculator. Only SMA (0) and EMA (1) are supported."
|
17
|
+
)
|
18
|
+
|
19
|
+
def _wilder_smooth(self, series: pd.Series, window: int) -> pd.Series:
|
20
|
+
return series.ewm(alpha=1 / window, adjust=False, min_periods=window).mean()
|
21
|
+
|
22
|
+
def calculate_sma(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
23
|
+
return (
|
24
|
+
df["close"]
|
25
|
+
.rolling(window=window, min_periods=window)
|
26
|
+
.mean()
|
27
|
+
.to_frame("sma")
|
28
|
+
)
|
29
|
+
|
30
|
+
def calculate_ema(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
31
|
+
return (
|
32
|
+
df["close"]
|
33
|
+
.ewm(span=window, adjust=False, min_periods=window)
|
34
|
+
.mean()
|
35
|
+
.to_frame("ema")
|
36
|
+
)
|
37
|
+
|
38
|
+
def calculate_rsi(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
39
|
+
delta = df["close"].diff()
|
40
|
+
gain = delta.clip(lower=0)
|
41
|
+
loss = -delta.clip(upper=0)
|
42
|
+
|
43
|
+
avg_gain = gain.ewm(alpha=1 / window, min_periods=window, adjust=False).mean()
|
44
|
+
avg_loss = loss.ewm(alpha=1 / window, min_periods=window, adjust=False).mean()
|
45
|
+
|
46
|
+
rs = avg_gain / avg_loss
|
47
|
+
rsi = 100 - (100 / (1 + rs))
|
48
|
+
|
49
|
+
return rsi.clip(0, 100).to_frame("rsi")
|
50
|
+
|
51
|
+
def calculate_macd(
|
52
|
+
self, df: pd.DataFrame, fast: int, slow: int, signal: int
|
53
|
+
) -> pd.DataFrame:
|
54
|
+
close = df["close"]
|
55
|
+
ema_fast = close.ewm(span=fast, adjust=False, min_periods=fast).mean()
|
56
|
+
ema_slow = close.ewm(span=slow, adjust=False, min_periods=slow).mean()
|
57
|
+
|
58
|
+
macd_line = ema_fast - ema_slow
|
59
|
+
signal_line = macd_line.ewm(
|
60
|
+
span=signal, adjust=False, min_periods=signal
|
61
|
+
).mean()
|
62
|
+
|
63
|
+
return pd.DataFrame(
|
64
|
+
{
|
65
|
+
"macd": macd_line,
|
66
|
+
"signal": signal_line,
|
67
|
+
"histogram": macd_line - signal_line,
|
68
|
+
}
|
69
|
+
)
|
70
|
+
|
71
|
+
def calculate_bollinger_bands(
|
72
|
+
self, df: pd.DataFrame, window: int, std: int
|
73
|
+
) -> pd.DataFrame:
|
74
|
+
close = df["close"]
|
75
|
+
sma = close.rolling(window=window, min_periods=window).mean()
|
76
|
+
rolling_std = close.rolling(window=window, min_periods=window).std()
|
77
|
+
upper_band = sma + (rolling_std * std)
|
78
|
+
lower_band = sma - (rolling_std * std)
|
79
|
+
return pd.DataFrame(
|
80
|
+
{"upper_band": upper_band, "middle_band": sma, "lower_band": lower_band}
|
81
|
+
)
|
82
|
+
|
83
|
+
def calculate_stoch(
|
84
|
+
self, df: pd.DataFrame, window: int, smooth_d: int, smooth_k: int
|
85
|
+
) -> pd.DataFrame:
|
86
|
+
high = df["high"]
|
87
|
+
low = df["low"]
|
88
|
+
close = df["close"]
|
89
|
+
|
90
|
+
lowest_low = low.rolling(window=window).min()
|
91
|
+
highest_high = high.rolling(window=window).max()
|
92
|
+
|
93
|
+
k = 100 * (close - lowest_low) / (highest_high - lowest_low).replace(0, np.nan)
|
94
|
+
slow_k = k.rolling(window=smooth_k, min_periods=smooth_k).mean()
|
95
|
+
slow_d = slow_k.rolling(window=smooth_d, min_periods=smooth_d).mean()
|
96
|
+
|
97
|
+
return pd.DataFrame({"slow_k": slow_k, "slow_d": slow_d})
|
98
|
+
|
99
|
+
def calculate_atr(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
100
|
+
high = df["high"]
|
101
|
+
low = df["low"]
|
102
|
+
close = df["close"]
|
103
|
+
|
104
|
+
tr1 = high - low
|
105
|
+
tr2 = abs(high - close.shift())
|
106
|
+
tr3 = abs(low - close.shift())
|
107
|
+
tr = pd.concat([tr1, tr2, tr3], axis=1).max(axis=1)
|
108
|
+
|
109
|
+
atr = self._wilder_smooth(tr, window)
|
110
|
+
return atr.to_frame("atr")
|
111
|
+
|
112
|
+
def calculate_cci(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
113
|
+
high = df["high"]
|
114
|
+
low = df["low"]
|
115
|
+
close = df["close"]
|
116
|
+
|
117
|
+
tp = (high + low + close) / 3
|
118
|
+
tp_sma = tp.rolling(window=window, min_periods=window).mean()
|
119
|
+
mean_dev = tp.rolling(window=window, min_periods=window).apply(
|
120
|
+
lambda x: (x - x.mean()).abs().mean()
|
121
|
+
)
|
122
|
+
|
123
|
+
cci = (tp - tp_sma) / (0.015 * mean_dev)
|
124
|
+
return cci.to_frame("cci")
|
125
|
+
|
126
|
+
def calculate_adx(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
127
|
+
dx = self.calculate_dx(df, window)["dx"]
|
128
|
+
adx = self._wilder_smooth(dx, window)
|
129
|
+
return adx.to_frame("adx")
|
130
|
+
|
131
|
+
def calculate_willr(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
132
|
+
high = df["high"]
|
133
|
+
low = df["low"]
|
134
|
+
close = df["close"]
|
135
|
+
highest_high = high.rolling(window=window, min_periods=window).max()
|
136
|
+
lowest_low = low.rolling(window=window, min_periods=window).min()
|
137
|
+
willr = -100 * (highest_high - close) / (highest_high - lowest_low)
|
138
|
+
return willr.to_frame("willr")
|
139
|
+
|
140
|
+
def calculate_ad(self, df: pd.DataFrame) -> pd.DataFrame:
|
141
|
+
high = df["high"]
|
142
|
+
low = df["low"]
|
143
|
+
close = df["close"]
|
144
|
+
volume = df["volume"]
|
145
|
+
mfm = ((close - low) - (high - close)) / (high - low).replace(0, np.nan)
|
146
|
+
mfm = mfm.fillna(0)
|
147
|
+
mfv = mfm * volume
|
148
|
+
ad = mfv.cumsum()
|
149
|
+
return ad.to_frame("ad")
|
150
|
+
|
151
|
+
def calculate_adosc(
|
152
|
+
self, df: pd.DataFrame, fast_period: int, slow_period: int
|
153
|
+
) -> pd.DataFrame:
|
154
|
+
ad = self.calculate_ad(df)["ad"]
|
155
|
+
ema_fast = ad.ewm(span=fast_period, adjust=False).mean()
|
156
|
+
ema_slow = ad.ewm(span=slow_period, adjust=False).mean()
|
157
|
+
adosc = ema_fast - ema_slow
|
158
|
+
return adosc.to_frame("adosc")
|
159
|
+
|
160
|
+
def calculate_obv(self, df: pd.DataFrame) -> pd.DataFrame:
|
161
|
+
close = df["close"]
|
162
|
+
volume = df["volume"]
|
163
|
+
sign = (close > close.shift(1)).astype(int) - (close < close.shift(1)).astype(
|
164
|
+
int
|
165
|
+
)
|
166
|
+
obv = (volume * sign).cumsum()
|
167
|
+
return obv.to_frame("obv")
|
168
|
+
|
169
|
+
def calculate_mom(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
170
|
+
close = df["close"]
|
171
|
+
mom = close.diff(periods=window)
|
172
|
+
return mom.to_frame("mom")
|
173
|
+
|
174
|
+
def calculate_sar(
|
175
|
+
self, df: pd.DataFrame, acceleration: float, maximum: float
|
176
|
+
) -> pd.DataFrame:
|
177
|
+
high, low = df["high"], df["low"]
|
178
|
+
sar = pd.Series(index=df.index, dtype=float)
|
179
|
+
uptrend = True
|
180
|
+
accel_factor = acceleration
|
181
|
+
extreme_point = high[0]
|
182
|
+
sar.iloc[0] = low[0]
|
183
|
+
|
184
|
+
for i in range(1, len(df)):
|
185
|
+
prev_sar = sar.iloc[i - 1]
|
186
|
+
|
187
|
+
if uptrend:
|
188
|
+
sar.iloc[i] = prev_sar + accel_factor * (extreme_point - prev_sar)
|
189
|
+
sar.iloc[i] = min(sar.iloc[i], low.iloc[i - 1])
|
190
|
+
if i > 1:
|
191
|
+
sar.iloc[i] = min(sar.iloc[i], low.iloc[i - 2])
|
192
|
+
|
193
|
+
if low[i] < sar.iloc[i]:
|
194
|
+
uptrend = False
|
195
|
+
sar.iloc[i] = extreme_point
|
196
|
+
extreme_point = low[i]
|
197
|
+
accel_factor = acceleration
|
198
|
+
else:
|
199
|
+
if high[i] > extreme_point:
|
200
|
+
extreme_point = high[i]
|
201
|
+
accel_factor = min(maximum, accel_factor + acceleration)
|
202
|
+
else:
|
203
|
+
sar.iloc[i] = prev_sar - accel_factor * (prev_sar - extreme_point)
|
204
|
+
sar.iloc[i] = max(sar.iloc[i], high.iloc[i - 1])
|
205
|
+
if i > 1:
|
206
|
+
sar.iloc[i] = max(sar.iloc[i], high.iloc[i - 2])
|
207
|
+
|
208
|
+
if high[i] > sar.iloc[i]:
|
209
|
+
uptrend = True
|
210
|
+
sar.iloc[i] = extreme_point
|
211
|
+
extreme_point = high[i]
|
212
|
+
accel_factor = acceleration
|
213
|
+
else:
|
214
|
+
if low[i] < extreme_point:
|
215
|
+
extreme_point = low[i]
|
216
|
+
accel_factor = min(maximum, accel_factor + acceleration)
|
217
|
+
|
218
|
+
return sar.to_frame("sar")
|
219
|
+
|
220
|
+
def calculate_tsf(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
221
|
+
close = df["close"]
|
222
|
+
|
223
|
+
def linear_reg_forecast(y):
|
224
|
+
x = np.arange(1, len(y) + 1)
|
225
|
+
b_num = len(x) * np.sum(x * y) - np.sum(x) * np.sum(y)
|
226
|
+
b_den = len(x) * np.sum(x * x) - np.sum(x) ** 2
|
227
|
+
b = b_num / b_den if b_den != 0 else 0
|
228
|
+
a = np.mean(y) - b * np.mean(x)
|
229
|
+
return a + b * len(y)
|
230
|
+
|
231
|
+
tsf = close.rolling(window=window, min_periods=window).apply(
|
232
|
+
linear_reg_forecast, raw=True
|
233
|
+
)
|
234
|
+
return tsf.to_frame("tsf")
|
235
|
+
|
236
|
+
def calculate_apo(
|
237
|
+
self, df: pd.DataFrame, fast_period: int, slow_period: int, ma_type: int
|
238
|
+
) -> pd.DataFrame:
|
239
|
+
close = df["close"]
|
240
|
+
fast_ma = self._get_ma(close, fast_period, ma_type)
|
241
|
+
slow_ma = self._get_ma(close, slow_period, ma_type)
|
242
|
+
apo = fast_ma - slow_ma
|
243
|
+
return apo.to_frame("apo")
|
244
|
+
|
245
|
+
def calculate_aroon(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
246
|
+
high = df["high"]
|
247
|
+
low = df["low"]
|
248
|
+
periods_since_high = high.rolling(window=window, min_periods=window).apply(
|
249
|
+
lambda x: len(x) - 1 - np.argmax(x), raw=True
|
250
|
+
)
|
251
|
+
periods_since_low = low.rolling(window=window, min_periods=window).apply(
|
252
|
+
lambda x: len(x) - 1 - np.argmin(x), raw=True
|
253
|
+
)
|
254
|
+
aroon_up = ((window - periods_since_high) / window) * 100
|
255
|
+
aroon_down = ((window - periods_since_low) / window) * 100
|
256
|
+
return pd.DataFrame({"aroon_up": aroon_up, "aroon_down": aroon_down})
|
257
|
+
|
258
|
+
def calculate_aroonosc(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
259
|
+
aroon_df = self.calculate_aroon(df, window)
|
260
|
+
aroonosc = aroon_df["aroon_up"] - aroon_df["aroon_down"]
|
261
|
+
return aroonosc.to_frame("aroonosc")
|
262
|
+
|
263
|
+
def calculate_bop(self, df: pd.DataFrame) -> pd.DataFrame:
|
264
|
+
bop = (df["close"] - df["open"]) / (df["high"] - df["low"]).replace(0, np.nan)
|
265
|
+
bop = bop.fillna(0)
|
266
|
+
return bop.to_frame("bop")
|
267
|
+
|
268
|
+
def calculate_cmo(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
269
|
+
close_diff = df["close"].diff(1)
|
270
|
+
sum_up = close_diff.where(close_diff > 0, 0).rolling(window=window).sum()
|
271
|
+
sum_down = -close_diff.where(close_diff < 0, 0).rolling(window=window).sum()
|
272
|
+
cmo = 100 * (sum_up - sum_down) / (sum_up + sum_down).replace(0, np.nan)
|
273
|
+
cmo = cmo.fillna(0)
|
274
|
+
return cmo.to_frame("cmo")
|
275
|
+
|
276
|
+
def calculate_dx(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
277
|
+
plus_di = self.calculate_plus_di(df, window)["plus_di"]
|
278
|
+
minus_di = self.calculate_minus_di(df, window)["minus_di"]
|
279
|
+
dx = 100 * abs(plus_di - minus_di) / (plus_di + minus_di).replace(0, np.nan)
|
280
|
+
dx = dx.fillna(0)
|
281
|
+
return dx.to_frame("dx")
|
282
|
+
|
283
|
+
def calculate_mfi(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
284
|
+
typical_price = (df["high"] + df["low"] + df["close"]) / 3
|
285
|
+
money_flow = typical_price * df["volume"]
|
286
|
+
price_diff = typical_price.diff()
|
287
|
+
positive_mf = money_flow.where(price_diff > 0, 0)
|
288
|
+
negative_mf = money_flow.where(price_diff < 0, 0)
|
289
|
+
positive_mf_sum = positive_mf.rolling(window=window).sum()
|
290
|
+
negative_mf_sum = negative_mf.rolling(window=window).sum()
|
291
|
+
money_ratio = positive_mf_sum / negative_mf_sum.replace(0, np.nan)
|
292
|
+
money_ratio = money_ratio.fillna(0)
|
293
|
+
mfi = 100 - (100 / (1 + money_ratio))
|
294
|
+
return mfi.to_frame("mfi")
|
295
|
+
|
296
|
+
def calculate_minus_di(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
297
|
+
atr = self.calculate_atr(df, window)["atr"]
|
298
|
+
minus_dm = self.calculate_minus_dm(df, window)["minus_dm"]
|
299
|
+
minus_di = 100 * (minus_dm / atr)
|
300
|
+
return minus_di.to_frame("minus_di")
|
301
|
+
|
302
|
+
def calculate_minus_dm(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
303
|
+
high = df["high"]
|
304
|
+
low = df["low"]
|
305
|
+
up_move = high.diff()
|
306
|
+
down_move = -low.diff()
|
307
|
+
minus_dm = down_move.where((down_move > up_move) & (down_move > 0), 0)
|
308
|
+
smoothed_minus_dm = self._wilder_smooth(minus_dm, window)
|
309
|
+
return smoothed_minus_dm.to_frame("minus_dm")
|
310
|
+
|
311
|
+
def calculate_plus_di(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
312
|
+
atr = self.calculate_atr(df, window)["atr"]
|
313
|
+
plus_dm = self.calculate_plus_dm(df, window)["plus_dm"]
|
314
|
+
plus_di = 100 * (plus_dm / atr)
|
315
|
+
return plus_di.to_frame("plus_di")
|
316
|
+
|
317
|
+
def calculate_plus_dm(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
318
|
+
high = df["high"]
|
319
|
+
low = df["low"]
|
320
|
+
up_move = high.diff()
|
321
|
+
down_move = -low.diff()
|
322
|
+
plus_dm = up_move.where((up_move > down_move) & (up_move > 0), 0)
|
323
|
+
smoothed_plus_dm = self._wilder_smooth(plus_dm, window)
|
324
|
+
return smoothed_plus_dm.to_frame("plus_dm")
|
325
|
+
|
326
|
+
def calculate_ppo(
|
327
|
+
self, df: pd.DataFrame, fast_period: int, slow_period: int, ma_type: int
|
328
|
+
) -> pd.DataFrame:
|
329
|
+
close = df["close"]
|
330
|
+
fast_ma = self._get_ma(close, fast_period, ma_type)
|
331
|
+
slow_ma = self._get_ma(close, slow_period, ma_type)
|
332
|
+
ppo = ((fast_ma - slow_ma) / slow_ma) * 100
|
333
|
+
return ppo.to_frame("ppo")
|
334
|
+
|
335
|
+
def calculate_roc(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
336
|
+
close = df["close"]
|
337
|
+
roc = (close.diff(window) / close.shift(window)) * 100
|
338
|
+
return roc.to_frame("roc")
|
339
|
+
|
340
|
+
def calculate_rocp(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
341
|
+
close = df["close"]
|
342
|
+
rocp = close.diff(window) / close.shift(window)
|
343
|
+
return rocp.to_frame("rocp")
|
344
|
+
|
345
|
+
def calculate_rocr(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
346
|
+
close = df["close"]
|
347
|
+
rocr = close / close.shift(window)
|
348
|
+
return rocr.to_frame("rocr")
|
349
|
+
|
350
|
+
def calculate_rocr100(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
351
|
+
close = df["close"]
|
352
|
+
rocr100 = (close / close.shift(window)) * 100
|
353
|
+
return rocr100.to_frame("rocr100")
|
354
|
+
|
355
|
+
def calculate_trix(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
|
356
|
+
close = df["close"]
|
357
|
+
ema1 = close.ewm(span=window, adjust=False).mean()
|
358
|
+
ema2 = ema1.ewm(span=window, adjust=False).mean()
|
359
|
+
ema3 = ema2.ewm(span=window, adjust=False).mean()
|
360
|
+
trix = 100 * ema3.diff(1) / ema3.shift(1)
|
361
|
+
return trix.to_frame("trix")
|
362
|
+
|
363
|
+
def calculate_ultosc(
|
364
|
+
self, df: pd.DataFrame, window1: int, window2: int, window3: int
|
365
|
+
) -> pd.DataFrame:
|
366
|
+
low = df["low"]
|
367
|
+
high = df["high"]
|
368
|
+
close = df["close"]
|
369
|
+
close_prev = close.shift(1)
|
370
|
+
true_low = pd.concat([low, close_prev], axis=1).min(axis=1)
|
371
|
+
true_high = pd.concat([high, close_prev], axis=1).max(axis=1)
|
372
|
+
bp = close - true_low
|
373
|
+
tr = true_high - true_low
|
374
|
+
tr_sum1 = tr.rolling(window=window1).sum()
|
375
|
+
tr_sum2 = tr.rolling(window=window2).sum()
|
376
|
+
tr_sum3 = tr.rolling(window=window3).sum()
|
377
|
+
avg1 = bp.rolling(window=window1).sum() / tr_sum1.replace(0, np.nan)
|
378
|
+
avg2 = bp.rolling(window=window2).sum() / tr_sum2.replace(0, np.nan)
|
379
|
+
avg3 = bp.rolling(window=window3).sum() / tr_sum3.replace(0, np.nan)
|
380
|
+
avg1 = avg1.fillna(0)
|
381
|
+
avg2 = avg2.fillna(0)
|
382
|
+
avg3 = avg3.fillna(0)
|
383
|
+
ultosc = 100 * (4 * avg1 + 2 * avg2 + 1 * avg3) / (4 + 2 + 1)
|
384
|
+
return ultosc.to_frame("ultosc")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: akshare-one
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.7
|
4
4
|
Summary: Standardized interface for Chinese financial market data, built on AKShare with unified data formats and simplified APIs
|
5
5
|
License-Expression: MIT
|
6
6
|
Project-URL: Homepage, https://github.com/zwldarren/akshare-one
|
@@ -9,7 +9,7 @@ Keywords: akshare,financial-data,stock-data,quant
|
|
9
9
|
Requires-Python: >=3.10
|
10
10
|
Description-Content-Type: text/markdown
|
11
11
|
License-File: LICENSE
|
12
|
-
Requires-Dist: akshare>=1.17.
|
12
|
+
Requires-Dist: akshare>=1.17.26
|
13
13
|
Requires-Dist: cachetools>=5.5.2
|
14
14
|
Provides-Extra: talib
|
15
15
|
Requires-Dist: ta-lib>=0.6.4; extra == "talib"
|
@@ -1,8 +1,11 @@
|
|
1
1
|
akshare_one/__init__.py,sha256=htgTumncxiGHqXMgqmBuIQXoPBcOnwcTjCg02Ydr73I,6524
|
2
2
|
akshare_one/indicators.py,sha256=x3Amff9CG_GvQpA-sqGfFwEAIvaaXlBxDfzTxD05taQ,12533
|
3
|
+
akshare_one/eastmoney/client.py,sha256=SSMB4oupaCns5hxtSwxaX-UE_uOrxZEGxqUiC3BT-4k,2794
|
4
|
+
akshare_one/eastmoney/utils.py,sha256=fATw0L5SW14wHWXlJ4IFEqnSsSBMT8MYGevxo7Kf1nY,2935
|
3
5
|
akshare_one/modules/cache.py,sha256=_3n35rt9xJfQzZSV6JZ6bGzf2VnqTmLfe49WXk4c9K8,867
|
4
6
|
akshare_one/modules/utils.py,sha256=msHqsjWSRULbX-3Bnit1p26a4a7MOEuNfkPSaECXr4k,333
|
5
7
|
akshare_one/modules/financial/base.py,sha256=TG3ncf3rXfgWCk4qUORN01uxT1SgLWiyjkt5Jb9eoxo,688
|
8
|
+
akshare_one/modules/financial/eastmoney_direct.py,sha256=BwiUWi3X24qbtrQv5ht2Lj6crgFrhdcgW4Aa53ZFEOg,6725
|
6
9
|
akshare_one/modules/financial/factory.py,sha256=9xR_uKt7n8dndYUxEGDDL65LXnbm-edtTLdhF0Bfyro,1468
|
7
10
|
akshare_one/modules/financial/sina.py,sha256=c6rSxCVNU6h-7XWSiqPHDN_XAhRdGHdqI9Haruy3mDs,12801
|
8
11
|
akshare_one/modules/historical/base.py,sha256=kDy76OJUp-LIddsC23YAQdf4Q_YGCrnZ8AvU4xRzQsI,1286
|
@@ -13,7 +16,7 @@ akshare_one/modules/historical/sina.py,sha256=sQoUnQlkxyI4i7Cuw5YwKT3IoNM8-K5wle
|
|
13
16
|
akshare_one/modules/indicators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
17
|
akshare_one/modules/indicators/base.py,sha256=DhFivpVIUIkdIv24U2WoOy1GCDySxsw0tD0-rBRe5Lc,4376
|
15
18
|
akshare_one/modules/indicators/factory.py,sha256=pKx57oej_L0Lz3kkXwzVievKpOYph0T_Y7fzSwO3Zd4,1021
|
16
|
-
akshare_one/modules/indicators/simple.py,sha256=
|
19
|
+
akshare_one/modules/indicators/simple.py,sha256=fwkM7tqqu6JIX_jv5_w6klrGO3s3WCJTz87xy4dy-Hc,15479
|
17
20
|
akshare_one/modules/indicators/talib.py,sha256=w0KpV-BXVxU0LmWs_EbXJUFgo9dbMeUQijjJMkjtWtU,10773
|
18
21
|
akshare_one/modules/info/base.py,sha256=Kof-e1I2usx1VOc1d05kyL-8B_QEDOsbry4R3dV0zZE,697
|
19
22
|
akshare_one/modules/info/eastmoney.py,sha256=pvWLcVoVWwgZS_4Bg-OtHQW5SPCZ9I1PAFbN4yqluq0,1610
|
@@ -29,8 +32,8 @@ akshare_one/modules/realtime/eastmoney.py,sha256=6acJeIdrvkW4ZqM9CALithlx85QSogr
|
|
29
32
|
akshare_one/modules/realtime/eastmoney_direct.py,sha256=A2ScBRfIP6n_BxQ6muB26AEykIvTG7Mt3BDAZMyugkg,1236
|
30
33
|
akshare_one/modules/realtime/factory.py,sha256=_7jBDgqWqkt5xTTT1SpZoUHM9IpMRpcUQeyyCglM5z0,1528
|
31
34
|
akshare_one/modules/realtime/xueqiu.py,sha256=CHTN5VUwo24H-2EGKQkN8oqr3MWjDi-7DpvQEDyPlls,2196
|
32
|
-
akshare_one-0.3.
|
33
|
-
akshare_one-0.3.
|
34
|
-
akshare_one-0.3.
|
35
|
-
akshare_one-0.3.
|
36
|
-
akshare_one-0.3.
|
35
|
+
akshare_one-0.3.7.dist-info/licenses/LICENSE,sha256=3bqxoD7aU4QS7kpNtQmRd4MikxXe6Gtm_DrojyFHGAc,1087
|
36
|
+
akshare_one-0.3.7.dist-info/METADATA,sha256=R2t-LPC6OnmV95Ec_Y6P0Hu0W8sctFC5GjRlLBSGCdQ,2272
|
37
|
+
akshare_one-0.3.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
38
|
+
akshare_one-0.3.7.dist-info/top_level.txt,sha256=kNiucyLVAGa89wmUSpXbBLWD7pF_RuahuiaOfLHZSyw,12
|
39
|
+
akshare_one-0.3.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|