akshare-one 0.3.1__py3-none-any.whl → 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. akshare_one/__init__.py +214 -31
  2. akshare_one/indicators.py +395 -395
  3. akshare_one/modules/cache.py +10 -9
  4. akshare_one/modules/eastmoney/client.py +88 -88
  5. akshare_one/modules/eastmoney/utils.py +104 -104
  6. akshare_one/modules/financial/base.py +27 -22
  7. akshare_one/modules/financial/eastmoney.py +184 -0
  8. akshare_one/modules/financial/factory.py +46 -44
  9. akshare_one/modules/financial/sina.py +298 -273
  10. akshare_one/modules/historical/base.py +47 -47
  11. akshare_one/modules/historical/eastmoney.py +241 -241
  12. akshare_one/modules/historical/eastmoney_direct.py +79 -79
  13. akshare_one/modules/historical/factory.py +48 -48
  14. akshare_one/modules/historical/sina.py +254 -254
  15. akshare_one/modules/indicators/base.py +158 -158
  16. akshare_one/modules/indicators/factory.py +33 -33
  17. akshare_one/modules/indicators/simple.py +230 -230
  18. akshare_one/modules/indicators/talib.py +263 -263
  19. akshare_one/modules/info/base.py +25 -0
  20. akshare_one/modules/info/eastmoney.py +52 -0
  21. akshare_one/modules/info/factory.py +44 -0
  22. akshare_one/modules/insider/base.py +28 -28
  23. akshare_one/modules/insider/factory.py +44 -44
  24. akshare_one/modules/insider/xueqiu.py +115 -115
  25. akshare_one/modules/news/base.py +22 -22
  26. akshare_one/modules/news/eastmoney.py +47 -47
  27. akshare_one/modules/news/factory.py +44 -44
  28. akshare_one/modules/realtime/base.py +27 -27
  29. akshare_one/modules/realtime/eastmoney.py +57 -57
  30. akshare_one/modules/realtime/eastmoney_direct.py +37 -37
  31. akshare_one/modules/realtime/factory.py +48 -48
  32. akshare_one/modules/realtime/xueqiu.py +60 -60
  33. akshare_one/modules/utils.py +10 -10
  34. {akshare_one-0.3.1.dist-info → akshare_one-0.3.3.dist-info}/METADATA +70 -70
  35. akshare_one-0.3.3.dist-info/RECORD +39 -0
  36. {akshare_one-0.3.1.dist-info → akshare_one-0.3.3.dist-info}/licenses/LICENSE +21 -21
  37. akshare_one/financial.py +0 -46
  38. akshare_one/insider.py +0 -33
  39. akshare_one/news.py +0 -27
  40. akshare_one/stock.py +0 -78
  41. akshare_one-0.3.1.dist-info/RECORD +0 -39
  42. {akshare_one-0.3.1.dist-info → akshare_one-0.3.3.dist-info}/WHEEL +0 -0
  43. {akshare_one-0.3.1.dist-info → akshare_one-0.3.3.dist-info}/top_level.txt +0 -0
@@ -1,254 +1,254 @@
1
- from cachetools import cached
2
- from .base import HistoricalDataProvider
3
- import akshare as ak
4
- import pandas as pd
5
- from ..cache import CACHE_CONFIG
6
-
7
-
8
- class SinaHistorical(HistoricalDataProvider):
9
- """Adapter for Sina historical stock data API"""
10
-
11
- @cached(
12
- cache=CACHE_CONFIG["hist_data_cache"],
13
- key=lambda self: f"sina_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
14
- )
15
- def get_hist_data(self) -> pd.DataFrame:
16
- """Fetches Sina historical market data
17
-
18
- Returns:
19
- pd.DataFrame:
20
- - timestamp
21
- - open
22
- - high
23
- - low
24
- - close
25
- - volume
26
- """
27
- self.interval = self.interval.lower()
28
- self._validate_interval_params(self.interval, self.interval_multiplier)
29
-
30
- try:
31
- stock = (
32
- f"sh{self.symbol}"
33
- if not self.symbol.startswith(("sh", "sz", "bj"))
34
- else self.symbol
35
- )
36
-
37
- if self.interval == "minute":
38
- df = self._get_minute_data(stock)
39
- elif self.interval == "hour":
40
- df = self._get_hour_data(stock)
41
- else:
42
- df = self._get_daily_plus_data(stock)
43
-
44
- return df
45
- except Exception as e:
46
- raise ValueError(f"Failed to fetch historical data: {str(e)}")
47
-
48
- def _get_minute_data(self, stock: str) -> pd.DataFrame:
49
- """Fetches minute level data"""
50
- raw_df = ak.stock_zh_a_minute(
51
- symbol=stock,
52
- period="1",
53
- adjust=self._map_adjust_param(self.adjust),
54
- )
55
- raw_df = raw_df.rename(columns={"day": "date"})
56
- raw_df["date"] = pd.to_datetime(raw_df["date"])
57
- raw_df = raw_df.set_index("date")
58
- raw_df = (
59
- raw_df.resample(f"{self.interval_multiplier}min")
60
- .agg(
61
- {
62
- "open": "first",
63
- "high": "max",
64
- "low": "min",
65
- "close": "last",
66
- "volume": "sum",
67
- }
68
- )
69
- .reset_index()
70
- )
71
- return self._clean_minute_data(raw_df)
72
-
73
- def _get_hour_data(self, stock: str) -> pd.DataFrame:
74
- """Fetches hour level data"""
75
- if self.interval_multiplier < 1:
76
- raise ValueError("Hour interval multiplier must be >= 1")
77
-
78
- raw_df = ak.stock_zh_a_minute(
79
- symbol=stock,
80
- period="60",
81
- adjust=self._map_adjust_param(self.adjust),
82
- )
83
- raw_df = raw_df.rename(columns={"day": "date"})
84
- raw_df["date"] = pd.to_datetime(raw_df["date"])
85
- raw_df = raw_df.set_index("date")
86
- raw_df = (
87
- raw_df.resample(f"{self.interval_multiplier}h")
88
- .agg(
89
- {
90
- "open": "first",
91
- "high": "max",
92
- "low": "min",
93
- "close": "last",
94
- "volume": "sum",
95
- }
96
- )
97
- .reset_index()
98
- )
99
- return self._clean_minute_data(raw_df)
100
-
101
- def _get_b_share_data(self, stock: str) -> pd.DataFrame:
102
- """Fetches B-share historical data"""
103
- start_date = self._convert_date_format(self.start_date)
104
- end_date = self._convert_date_format(self.end_date)
105
-
106
- if self.interval in ["minute", "hour"]:
107
- period = "1" if self.interval == "minute" else "60"
108
- raw_df = ak.stock_zh_b_minute(
109
- symbol=stock,
110
- period=period,
111
- adjust=self._map_adjust_param(self.adjust),
112
- )
113
- # Rename 'day' to 'date' for consistency
114
- raw_df = raw_df.rename(columns={"day": "date"})
115
-
116
- if self.interval_multiplier > 1:
117
- freq = f"{self.interval_multiplier}{'min' if self.interval == 'minute' else 'h'}"
118
- raw_df = self._resample_intraday_data(raw_df, freq)
119
- else:
120
- raw_df = ak.stock_zh_b_daily(
121
- symbol=stock,
122
- start_date=start_date,
123
- end_date=end_date,
124
- adjust=self._map_adjust_param(self.adjust),
125
- )
126
- if self.interval_multiplier > 1:
127
- raw_df = self._resample_data(
128
- raw_df, self.interval, self.interval_multiplier
129
- )
130
-
131
- return self._clean_data(raw_df)
132
-
133
- def _get_daily_plus_data(self, stock: str) -> pd.DataFrame:
134
- """Fetches daily and higher-level data (day/week/month/year)"""
135
- # Check if it's a B-share symbol
136
- if stock.startswith(("sh9", "sz2")):
137
- return self._get_b_share_data(stock)
138
-
139
- start_date = self._convert_date_format(self.start_date)
140
- end_date = self._convert_date_format(self.end_date)
141
-
142
- raw_df = ak.stock_zh_a_daily(
143
- symbol=stock,
144
- start_date=start_date,
145
- end_date=end_date,
146
- adjust=self._map_adjust_param(self.adjust),
147
- )
148
-
149
- if self.interval_multiplier > 1:
150
- raw_df = self._resample_data(
151
- raw_df, self.interval, self.interval_multiplier
152
- )
153
-
154
- return self._clean_data(raw_df)
155
-
156
- def _validate_interval_params(self, interval: str, multiplier: int) -> None:
157
- """Validates the validity of interval and multiplier"""
158
- if interval not in self.get_supported_intervals():
159
- raise ValueError(f"Unsupported interval parameter: {interval}")
160
-
161
- if interval in ["minute", "hour"] and multiplier < 1:
162
- raise ValueError(f"interval_multiplier for {interval} level must be ≥ 1")
163
-
164
- def _convert_date_format(self, date_str: str) -> str:
165
- """Converts date format from YYYY-MM-DD to YYYYMMDD"""
166
- return date_str.replace("-", "") if "-" in date_str else date_str
167
-
168
- def _map_adjust_param(self, adjust: str) -> str:
169
- """Maps adjustment parameters to the required format"""
170
- return adjust if adjust != "none" else ""
171
-
172
- def _resample_data(
173
- self, df: pd.DataFrame, interval: str, multiplier: int
174
- ) -> pd.DataFrame:
175
- """Resamples daily and higher-level data to the specified interval"""
176
- freq_map = {
177
- "day": f"{multiplier}D",
178
- "week": f"{multiplier}W-MON",
179
- "month": f"{multiplier}MS",
180
- "year": f"{multiplier}AS-JAN",
181
- }
182
- freq = freq_map[interval]
183
-
184
- df["date"] = pd.to_datetime(df["date"])
185
- df = df.set_index("date")
186
- resampled = df.resample(freq).agg(
187
- {
188
- "open": "first",
189
- "high": "max",
190
- "low": "min",
191
- "close": "last",
192
- "volume": "sum",
193
- }
194
- )
195
- return resampled.reset_index()
196
-
197
- def _clean_minute_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
198
- """Cleans and standardizes minute/hour level data, converting timestamps to UTC"""
199
- column_map = {
200
- "date": "timestamp",
201
- "open": "open",
202
- "high": "high",
203
- "low": "low",
204
- "close": "close",
205
- "volume": "volume",
206
- }
207
-
208
- df = raw_df.rename(columns=column_map)
209
-
210
- if "timestamp" in df.columns:
211
- df["timestamp"] = (
212
- pd.to_datetime(df["timestamp"])
213
- .dt.tz_localize("Asia/Shanghai")
214
- .dt.tz_convert("UTC")
215
- )
216
-
217
- return self._select_standard_columns(df)
218
-
219
- def _clean_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
220
- """Cleans and standardizes daily and higher-level data, converting timestamps to UTC"""
221
- column_map = {
222
- "date": "timestamp",
223
- "open": "open",
224
- "high": "high",
225
- "low": "low",
226
- "close": "close",
227
- "volume": "volume",
228
- }
229
-
230
- df = raw_df.rename(columns=column_map)
231
-
232
- if "timestamp" in df.columns:
233
- df["timestamp"] = (
234
- pd.to_datetime(df["timestamp"])
235
- .dt.tz_localize("Asia/Shanghai")
236
- .dt.tz_convert("UTC")
237
- )
238
-
239
- if "volume" in df.columns:
240
- df["volume"] = df["volume"].astype("int64")
241
-
242
- return self._select_standard_columns(df)
243
-
244
- def _select_standard_columns(self, df: pd.DataFrame) -> pd.DataFrame:
245
- """Selects and orders the standard output columns"""
246
- standard_columns = [
247
- "timestamp",
248
- "open",
249
- "high",
250
- "low",
251
- "close",
252
- "volume",
253
- ]
254
- return df[[col for col in standard_columns if col in df.columns]]
1
+ from cachetools import cached
2
+ from .base import HistoricalDataProvider
3
+ import akshare as ak
4
+ import pandas as pd
5
+ from ..cache import CACHE_CONFIG
6
+
7
+
8
+ class SinaHistorical(HistoricalDataProvider):
9
+ """Adapter for Sina historical stock data API"""
10
+
11
+ @cached(
12
+ cache=CACHE_CONFIG["hist_data_cache"],
13
+ key=lambda self: f"sina_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
14
+ )
15
+ def get_hist_data(self) -> pd.DataFrame:
16
+ """Fetches Sina historical market data
17
+
18
+ Returns:
19
+ pd.DataFrame:
20
+ - timestamp
21
+ - open
22
+ - high
23
+ - low
24
+ - close
25
+ - volume
26
+ """
27
+ self.interval = self.interval.lower()
28
+ self._validate_interval_params(self.interval, self.interval_multiplier)
29
+
30
+ try:
31
+ stock = (
32
+ f"sh{self.symbol}"
33
+ if not self.symbol.startswith(("sh", "sz", "bj"))
34
+ else self.symbol
35
+ )
36
+
37
+ if self.interval == "minute":
38
+ df = self._get_minute_data(stock)
39
+ elif self.interval == "hour":
40
+ df = self._get_hour_data(stock)
41
+ else:
42
+ df = self._get_daily_plus_data(stock)
43
+
44
+ return df
45
+ except Exception as e:
46
+ raise ValueError(f"Failed to fetch historical data: {str(e)}")
47
+
48
+ def _get_minute_data(self, stock: str) -> pd.DataFrame:
49
+ """Fetches minute level data"""
50
+ raw_df = ak.stock_zh_a_minute(
51
+ symbol=stock,
52
+ period="1",
53
+ adjust=self._map_adjust_param(self.adjust),
54
+ )
55
+ raw_df = raw_df.rename(columns={"day": "date"})
56
+ raw_df["date"] = pd.to_datetime(raw_df["date"])
57
+ raw_df = raw_df.set_index("date")
58
+ raw_df = (
59
+ raw_df.resample(f"{self.interval_multiplier}min")
60
+ .agg(
61
+ {
62
+ "open": "first",
63
+ "high": "max",
64
+ "low": "min",
65
+ "close": "last",
66
+ "volume": "sum",
67
+ }
68
+ )
69
+ .reset_index()
70
+ )
71
+ return self._clean_minute_data(raw_df)
72
+
73
+ def _get_hour_data(self, stock: str) -> pd.DataFrame:
74
+ """Fetches hour level data"""
75
+ if self.interval_multiplier < 1:
76
+ raise ValueError("Hour interval multiplier must be >= 1")
77
+
78
+ raw_df = ak.stock_zh_a_minute(
79
+ symbol=stock,
80
+ period="60",
81
+ adjust=self._map_adjust_param(self.adjust),
82
+ )
83
+ raw_df = raw_df.rename(columns={"day": "date"})
84
+ raw_df["date"] = pd.to_datetime(raw_df["date"])
85
+ raw_df = raw_df.set_index("date")
86
+ raw_df = (
87
+ raw_df.resample(f"{self.interval_multiplier}h")
88
+ .agg(
89
+ {
90
+ "open": "first",
91
+ "high": "max",
92
+ "low": "min",
93
+ "close": "last",
94
+ "volume": "sum",
95
+ }
96
+ )
97
+ .reset_index()
98
+ )
99
+ return self._clean_minute_data(raw_df)
100
+
101
+ def _get_b_share_data(self, stock: str) -> pd.DataFrame:
102
+ """Fetches B-share historical data"""
103
+ start_date = self._convert_date_format(self.start_date)
104
+ end_date = self._convert_date_format(self.end_date)
105
+
106
+ if self.interval in ["minute", "hour"]:
107
+ period = "1" if self.interval == "minute" else "60"
108
+ raw_df = ak.stock_zh_b_minute(
109
+ symbol=stock,
110
+ period=period,
111
+ adjust=self._map_adjust_param(self.adjust),
112
+ )
113
+ # Rename 'day' to 'date' for consistency
114
+ raw_df = raw_df.rename(columns={"day": "date"})
115
+
116
+ if self.interval_multiplier > 1:
117
+ freq = f"{self.interval_multiplier}{'min' if self.interval == 'minute' else 'h'}"
118
+ raw_df = self._resample_intraday_data(raw_df, freq)
119
+ else:
120
+ raw_df = ak.stock_zh_b_daily(
121
+ symbol=stock,
122
+ start_date=start_date,
123
+ end_date=end_date,
124
+ adjust=self._map_adjust_param(self.adjust),
125
+ )
126
+ if self.interval_multiplier > 1:
127
+ raw_df = self._resample_data(
128
+ raw_df, self.interval, self.interval_multiplier
129
+ )
130
+
131
+ return self._clean_data(raw_df)
132
+
133
+ def _get_daily_plus_data(self, stock: str) -> pd.DataFrame:
134
+ """Fetches daily and higher-level data (day/week/month/year)"""
135
+ # Check if it's a B-share symbol
136
+ if stock.startswith(("sh9", "sz2")):
137
+ return self._get_b_share_data(stock)
138
+
139
+ start_date = self._convert_date_format(self.start_date)
140
+ end_date = self._convert_date_format(self.end_date)
141
+
142
+ raw_df = ak.stock_zh_a_daily(
143
+ symbol=stock,
144
+ start_date=start_date,
145
+ end_date=end_date,
146
+ adjust=self._map_adjust_param(self.adjust),
147
+ )
148
+
149
+ if self.interval_multiplier > 1:
150
+ raw_df = self._resample_data(
151
+ raw_df, self.interval, self.interval_multiplier
152
+ )
153
+
154
+ return self._clean_data(raw_df)
155
+
156
+ def _validate_interval_params(self, interval: str, multiplier: int) -> None:
157
+ """Validates the validity of interval and multiplier"""
158
+ if interval not in self.get_supported_intervals():
159
+ raise ValueError(f"Unsupported interval parameter: {interval}")
160
+
161
+ if interval in ["minute", "hour"] and multiplier < 1:
162
+ raise ValueError(f"interval_multiplier for {interval} level must be ≥ 1")
163
+
164
+ def _convert_date_format(self, date_str: str) -> str:
165
+ """Converts date format from YYYY-MM-DD to YYYYMMDD"""
166
+ return date_str.replace("-", "") if "-" in date_str else date_str
167
+
168
+ def _map_adjust_param(self, adjust: str) -> str:
169
+ """Maps adjustment parameters to the required format"""
170
+ return adjust if adjust != "none" else ""
171
+
172
+ def _resample_data(
173
+ self, df: pd.DataFrame, interval: str, multiplier: int
174
+ ) -> pd.DataFrame:
175
+ """Resamples daily and higher-level data to the specified interval"""
176
+ freq_map = {
177
+ "day": f"{multiplier}D",
178
+ "week": f"{multiplier}W-MON",
179
+ "month": f"{multiplier}MS",
180
+ "year": f"{multiplier}AS-JAN",
181
+ }
182
+ freq = freq_map[interval]
183
+
184
+ df["date"] = pd.to_datetime(df["date"])
185
+ df = df.set_index("date")
186
+ resampled = df.resample(freq).agg(
187
+ {
188
+ "open": "first",
189
+ "high": "max",
190
+ "low": "min",
191
+ "close": "last",
192
+ "volume": "sum",
193
+ }
194
+ )
195
+ return resampled.reset_index()
196
+
197
+ def _clean_minute_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
198
+ """Cleans and standardizes minute/hour level data, converting timestamps to UTC"""
199
+ column_map = {
200
+ "date": "timestamp",
201
+ "open": "open",
202
+ "high": "high",
203
+ "low": "low",
204
+ "close": "close",
205
+ "volume": "volume",
206
+ }
207
+
208
+ df = raw_df.rename(columns=column_map)
209
+
210
+ if "timestamp" in df.columns:
211
+ df["timestamp"] = (
212
+ pd.to_datetime(df["timestamp"])
213
+ .dt.tz_localize("Asia/Shanghai")
214
+ .dt.tz_convert("UTC")
215
+ )
216
+
217
+ return self._select_standard_columns(df)
218
+
219
+ def _clean_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
220
+ """Cleans and standardizes daily and higher-level data, converting timestamps to UTC"""
221
+ column_map = {
222
+ "date": "timestamp",
223
+ "open": "open",
224
+ "high": "high",
225
+ "low": "low",
226
+ "close": "close",
227
+ "volume": "volume",
228
+ }
229
+
230
+ df = raw_df.rename(columns=column_map)
231
+
232
+ if "timestamp" in df.columns:
233
+ df["timestamp"] = (
234
+ pd.to_datetime(df["timestamp"])
235
+ .dt.tz_localize("Asia/Shanghai")
236
+ .dt.tz_convert("UTC")
237
+ )
238
+
239
+ if "volume" in df.columns:
240
+ df["volume"] = df["volume"].astype("int64")
241
+
242
+ return self._select_standard_columns(df)
243
+
244
+ def _select_standard_columns(self, df: pd.DataFrame) -> pd.DataFrame:
245
+ """Selects and orders the standard output columns"""
246
+ standard_columns = [
247
+ "timestamp",
248
+ "open",
249
+ "high",
250
+ "low",
251
+ "close",
252
+ "volume",
253
+ ]
254
+ return df[[col for col in standard_columns if col in df.columns]]