akshare-one 0.3.3__tar.gz → 0.3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. {akshare_one-0.3.3 → akshare_one-0.3.5}/PKG-INFO +8 -4
  2. {akshare_one-0.3.3 → akshare_one-0.3.5}/README.md +6 -2
  3. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/__init__.py +4 -4
  4. akshare_one-0.3.5/akshare_one/modules/cache.py +27 -0
  5. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/financial/factory.py +2 -2
  6. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/financial/sina.py +6 -12
  7. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/historical/base.py +1 -1
  8. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/historical/eastmoney.py +9 -14
  9. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/historical/eastmoney_direct.py +5 -6
  10. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/historical/sina.py +6 -10
  11. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/info/eastmoney.py +3 -4
  12. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/insider/base.py +1 -1
  13. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/insider/xueqiu.py +8 -13
  14. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/news/base.py +1 -1
  15. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/news/eastmoney.py +5 -9
  16. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/realtime/eastmoney.py +4 -8
  17. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/realtime/eastmoney_direct.py +5 -6
  18. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/realtime/xueqiu.py +4 -7
  19. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one.egg-info/PKG-INFO +8 -4
  20. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one.egg-info/SOURCES.txt +0 -3
  21. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one.egg-info/requires.txt +1 -1
  22. {akshare_one-0.3.3 → akshare_one-0.3.5}/pyproject.toml +6 -2
  23. akshare_one-0.3.5/tests/test_info.py +82 -0
  24. akshare_one-0.3.3/akshare_one/modules/cache.py +0 -10
  25. akshare_one-0.3.3/akshare_one/modules/eastmoney/client.py +0 -88
  26. akshare_one-0.3.3/akshare_one/modules/eastmoney/utils.py +0 -104
  27. akshare_one-0.3.3/akshare_one/modules/financial/eastmoney.py +0 -184
  28. akshare_one-0.3.3/tests/test_info.py +0 -33
  29. {akshare_one-0.3.3 → akshare_one-0.3.5}/LICENSE +0 -0
  30. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/indicators.py +0 -0
  31. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/financial/base.py +0 -0
  32. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/historical/factory.py +0 -0
  33. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/indicators/__init__.py +0 -0
  34. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/indicators/base.py +0 -0
  35. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/indicators/factory.py +0 -0
  36. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/indicators/simple.py +0 -0
  37. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/indicators/talib.py +0 -0
  38. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/info/base.py +0 -0
  39. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/info/factory.py +0 -0
  40. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/insider/factory.py +0 -0
  41. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/news/factory.py +0 -0
  42. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/realtime/base.py +0 -0
  43. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/realtime/factory.py +0 -0
  44. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one/modules/utils.py +0 -0
  45. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one.egg-info/dependency_links.txt +0 -0
  46. {akshare_one-0.3.3 → akshare_one-0.3.5}/akshare_one.egg-info/top_level.txt +0 -0
  47. {akshare_one-0.3.3 → akshare_one-0.3.5}/setup.cfg +0 -0
  48. {akshare_one-0.3.3 → akshare_one-0.3.5}/tests/test_financial.py +0 -0
  49. {akshare_one-0.3.3 → akshare_one-0.3.5}/tests/test_indicators.py +0 -0
  50. {akshare_one-0.3.3 → akshare_one-0.3.5}/tests/test_insider.py +0 -0
  51. {akshare_one-0.3.3 → akshare_one-0.3.5}/tests/test_news.py +0 -0
  52. {akshare_one-0.3.3 → akshare_one-0.3.5}/tests/test_stock.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: akshare-one
3
- Version: 0.3.3
3
+ Version: 0.3.5
4
4
  Summary: Standardized interface for Chinese financial market data, built on AKShare with unified data formats and simplified APIs
5
5
  License-Expression: MIT
6
6
  Project-URL: Homepage, https://github.com/zwldarren/akshare-one
@@ -9,7 +9,7 @@ Keywords: akshare,financial-data,stock-data,quant
9
9
  Requires-Python: >=3.10
10
10
  Description-Content-Type: text/markdown
11
11
  License-File: LICENSE
12
- Requires-Dist: akshare>=1.17.20
12
+ Requires-Dist: akshare>=1.17.21
13
13
  Requires-Dist: cachetools>=5.5.2
14
14
  Provides-Extra: talib
15
15
  Requires-Dist: ta-lib>=0.6.4; extra == "talib"
@@ -40,7 +40,9 @@ Dynamic: license-file
40
40
  | Stock news | `get_news_data` |
41
41
  | Financial data | `get_balance_sheet`/`get_income_statement`/`get_cash_flow` |
42
42
  | Internal transactions | `get_inner_trade_data` |
43
- | Technical indicators | See [indicators.py](akshare_one/indicators.py) |
43
+ | Basic stock info | `get_basic_info` |
44
+ | Financial metrics | `get_financial_metrics` |
45
+ | Technical indicators | See [indicators.py](akshare-one/indicators.py) |
44
46
 
45
47
  ## 📦 Quick Installation
46
48
 
@@ -67,4 +69,6 @@ df_sma = get_sma(df, window=20)
67
69
 
68
70
  ## 📚 Documentation
69
71
 
70
- Detailed API reference: [docs/api.md](docs/api.md)
72
+ Full API documentation is now available on GitHub Pages:
73
+
74
+ https://zwldarren.github.io/akshare-one/
@@ -23,7 +23,9 @@
23
23
  | Stock news | `get_news_data` |
24
24
  | Financial data | `get_balance_sheet`/`get_income_statement`/`get_cash_flow` |
25
25
  | Internal transactions | `get_inner_trade_data` |
26
- | Technical indicators | See [indicators.py](akshare_one/indicators.py) |
26
+ | Basic stock info | `get_basic_info` |
27
+ | Financial metrics | `get_financial_metrics` |
28
+ | Technical indicators | See [indicators.py](akshare-one/indicators.py) |
27
29
 
28
30
  ## 📦 Quick Installation
29
31
 
@@ -50,4 +52,6 @@ df_sma = get_sma(df, window=20)
50
52
 
51
53
  ## 📚 Documentation
52
54
 
53
- Detailed API reference: [docs/api.md](docs/api.md)
55
+ Full API documentation is now available on GitHub Pages:
56
+
57
+ https://zwldarren.github.io/akshare-one/
@@ -71,7 +71,7 @@ def get_hist_data(
71
71
 
72
72
  Returns:
73
73
  pd.DataFrame:
74
- - timestamp: 时间戳(UTC时区)
74
+ - timestamp: 时间戳
75
75
  - open: 开盘价
76
76
  - high: 最高价
77
77
  - low: 最低价
@@ -92,7 +92,7 @@ def get_hist_data(
92
92
 
93
93
  def get_realtime_data(
94
94
  symbol: Optional[str] = None,
95
- source: Literal["eastmoney", "eastmoney_direct", "xueqiu"] = "xueqiu",
95
+ source: Literal["eastmoney", "eastmoney_direct", "xueqiu"] = "eastmoney_direct",
96
96
  ) -> pd.DataFrame:
97
97
  """Get real-time market quotes
98
98
 
@@ -183,13 +183,13 @@ def get_cash_flow(symbol: str, source: Literal["sina"] = "sina") -> pd.DataFrame
183
183
 
184
184
 
185
185
  def get_financial_metrics(
186
- symbol: str, source: Literal["eastmoney"] = "eastmoney"
186
+ symbol: str, source: Literal["eastmoney_direct"] = "eastmoney_direct"
187
187
  ) -> pd.DataFrame:
188
188
  """获取三大财务报表关键指标
189
189
 
190
190
  Args:
191
191
  symbol: 股票代码 (如 "600600")
192
- source: 数据源 ('eastmoney')
192
+ source: 数据源 ('eastmoney_direct')
193
193
 
194
194
  Returns:
195
195
  pd.DataFrame: 财务关键指标数据
@@ -0,0 +1,27 @@
1
+ from cachetools import TTLCache, cached
2
+ import os
3
+
4
+ # 缓存配置
5
+ CACHE_CONFIG = {
6
+ "hist_data_cache": TTLCache(maxsize=1000, ttl=3600), # 历史数据缓存1小时
7
+ "realtime_cache": TTLCache(maxsize=500, ttl=60), # 实时数据缓存1分钟
8
+ "news_cache": TTLCache(maxsize=500, ttl=3600), # 新闻数据缓存1小时
9
+ "financial_cache": TTLCache(maxsize=500, ttl=86400), # 财务数据缓存24小时
10
+ "info_cache": TTLCache(maxsize=500, ttl=86400), # 信息数据缓存24小时
11
+ }
12
+
13
+
14
+ def cache(cache_key, key=None):
15
+ cache_enabled = os.getenv("AKSHARE_ONE_CACHE_ENABLED", "true").lower() in (
16
+ "1",
17
+ "true",
18
+ "yes",
19
+ "on",
20
+ )
21
+
22
+ def decorator(func):
23
+ if cache_enabled:
24
+ return cached(CACHE_CONFIG[cache_key], key=key)(func)
25
+ return func
26
+
27
+ return decorator
@@ -1,4 +1,4 @@
1
- from .eastmoney import EastMoneyFinancialReport
1
+ from .eastmoney_direct import EastMoneyDirectFinancialReport
2
2
  from .sina import SinaFinancialReport
3
3
  from .base import FinancialDataProvider
4
4
 
@@ -10,7 +10,7 @@ class FinancialDataFactory:
10
10
 
11
11
  _providers = {
12
12
  "sina": SinaFinancialReport,
13
- "eastmoney": EastMoneyFinancialReport,
13
+ "eastmoney_direct": EastMoneyDirectFinancialReport,
14
14
  }
15
15
 
16
16
  @classmethod
@@ -1,8 +1,7 @@
1
- from cachetools import cached
2
1
  import pandas as pd
3
2
  import akshare as ak
4
3
 
5
- from akshare_one.modules.cache import CACHE_CONFIG
4
+ from ..cache import cache
6
5
  from .base import FinancialDataProvider
7
6
 
8
7
 
@@ -19,9 +18,8 @@ class SinaFinancialReport(FinancialDataProvider):
19
18
  f"sh{symbol}" if not symbol.startswith(("sh", "sz", "bj")) else symbol
20
19
  )
21
20
 
22
- @cached(
23
- CACHE_CONFIG["financial_cache"],
24
- key=lambda self, symbol=None: f"sina_balance_{self.symbol}",
21
+ @cache(
22
+ "financial_cache", key=lambda self, symbol=None: f"sina_balance_{self.symbol}"
25
23
  )
26
24
  def get_balance_sheet(self) -> pd.DataFrame:
27
25
  """获取资产负债表数据
@@ -35,9 +33,8 @@ class SinaFinancialReport(FinancialDataProvider):
35
33
  raw_df = ak.stock_financial_report_sina(stock=self.stock, symbol="资产负债表")
36
34
  return self._clean_balance_data(raw_df)
37
35
 
38
- @cached(
39
- CACHE_CONFIG["financial_cache"],
40
- key=lambda self, symbol=None: f"sina_income_{self.symbol}",
36
+ @cache(
37
+ "financial_cache", key=lambda self, symbol=None: f"sina_income_{self.symbol}"
41
38
  )
42
39
  def get_income_statement(self) -> pd.DataFrame:
43
40
  """获取利润表数据
@@ -51,10 +48,7 @@ class SinaFinancialReport(FinancialDataProvider):
51
48
  raw_df = ak.stock_financial_report_sina(stock=self.stock, symbol="利润表")
52
49
  return self._clean_income_data(raw_df)
53
50
 
54
- @cached(
55
- CACHE_CONFIG["financial_cache"],
56
- key=lambda self, symbol=None: f"sina_cash_{self.symbol}",
57
- )
51
+ @cache("financial_cache", key=lambda self, symbol=None: f"sina_cash_{self.symbol}")
58
52
  def get_cash_flow(self) -> pd.DataFrame:
59
53
  """获取现金流量表数据
60
54
 
@@ -37,7 +37,7 @@ class HistoricalDataProvider(ABC):
37
37
 
38
38
  Returns:
39
39
  pd.DataFrame:
40
- - timestamp (UTC)
40
+ - timestamp
41
41
  - open
42
42
  - high
43
43
  - low
@@ -1,15 +1,14 @@
1
- from cachetools import cached
2
1
  from .base import HistoricalDataProvider
3
2
  import akshare as ak
4
3
  import pandas as pd
5
- from ..cache import CACHE_CONFIG
4
+ from ..cache import cache
6
5
 
7
6
 
8
7
  class EastMoneyHistorical(HistoricalDataProvider):
9
8
  """Adapter for EastMoney historical stock data API"""
10
9
 
11
- @cached(
12
- cache=CACHE_CONFIG["hist_data_cache"],
10
+ @cache(
11
+ "hist_data_cache",
13
12
  key=lambda self: f"eastmoney_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
14
13
  )
15
14
  def get_hist_data(self) -> pd.DataFrame:
@@ -157,7 +156,7 @@ class EastMoneyHistorical(HistoricalDataProvider):
157
156
  return resampled.reset_index()
158
157
 
159
158
  def _clean_minute_data(self, raw_df: pd.DataFrame, period: str) -> pd.DataFrame:
160
- """Cleans and standardizes minute/hour level data, converting timestamps to UTC"""
159
+ """Cleans and standardizes minute/hour level data"""
161
160
  column_map = {
162
161
  "1": {
163
162
  "时间": "timestamp",
@@ -188,16 +187,14 @@ class EastMoneyHistorical(HistoricalDataProvider):
188
187
  df = raw_df.rename(columns=mapping)
189
188
 
190
189
  if "timestamp" in df.columns:
191
- df["timestamp"] = (
192
- pd.to_datetime(df["timestamp"])
193
- .dt.tz_localize("Asia/Shanghai")
194
- .dt.tz_convert("UTC")
190
+ df["timestamp"] = pd.to_datetime(df["timestamp"]).dt.tz_localize(
191
+ "Asia/Shanghai"
195
192
  )
196
193
 
197
194
  return self._select_standard_columns(df)
198
195
 
199
196
  def _clean_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
200
- """Cleans and standardizes daily and higher-level data, converting timestamps to UTC"""
197
+ """Cleans and standardizes daily and higher-level data"""
201
198
  column_map = {
202
199
  "日期": "timestamp",
203
200
  "开盘": "open",
@@ -217,10 +214,8 @@ class EastMoneyHistorical(HistoricalDataProvider):
217
214
  df = raw_df.rename(columns=available_columns)
218
215
 
219
216
  if "timestamp" in df.columns:
220
- df["timestamp"] = (
221
- pd.to_datetime(df["timestamp"])
222
- .dt.tz_localize("Asia/Shanghai")
223
- .dt.tz_convert("UTC")
217
+ df["timestamp"] = pd.to_datetime(df["timestamp"]).dt.tz_localize(
218
+ "Asia/Shanghai"
224
219
  )
225
220
 
226
221
  if "volume" in df.columns:
@@ -1,9 +1,8 @@
1
1
  import pandas as pd
2
- from cachetools import cached
3
2
  from .base import HistoricalDataProvider
4
- from ..cache import CACHE_CONFIG
5
- from ..eastmoney.client import EastMoneyClient
6
- from ..eastmoney.utils import parse_kline_data, resample_historical_data
3
+ from ..cache import cache
4
+ from akshare_one.eastmoney.client import EastMoneyClient
5
+ from akshare_one.eastmoney.utils import parse_kline_data, resample_historical_data
7
6
 
8
7
 
9
8
  class EastMoneyDirectHistorical(HistoricalDataProvider):
@@ -13,8 +12,8 @@ class EastMoneyDirectHistorical(HistoricalDataProvider):
13
12
  super().__init__(*args, **kwargs)
14
13
  self.client = EastMoneyClient()
15
14
 
16
- @cached(
17
- cache=CACHE_CONFIG["hist_data_cache"],
15
+ @cache(
16
+ "hist_data_cache",
18
17
  key=lambda self: f"eastmoney_direct_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
19
18
  )
20
19
  def get_hist_data(self) -> pd.DataFrame:
@@ -195,7 +195,7 @@ class SinaHistorical(HistoricalDataProvider):
195
195
  return resampled.reset_index()
196
196
 
197
197
  def _clean_minute_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
198
- """Cleans and standardizes minute/hour level data, converting timestamps to UTC"""
198
+ """Cleans and standardizes minute/hour level data"""
199
199
  column_map = {
200
200
  "date": "timestamp",
201
201
  "open": "open",
@@ -208,16 +208,14 @@ class SinaHistorical(HistoricalDataProvider):
208
208
  df = raw_df.rename(columns=column_map)
209
209
 
210
210
  if "timestamp" in df.columns:
211
- df["timestamp"] = (
212
- pd.to_datetime(df["timestamp"])
213
- .dt.tz_localize("Asia/Shanghai")
214
- .dt.tz_convert("UTC")
211
+ df["timestamp"] = pd.to_datetime(df["timestamp"]).dt.tz_localize(
212
+ "Asia/Shanghai"
215
213
  )
216
214
 
217
215
  return self._select_standard_columns(df)
218
216
 
219
217
  def _clean_data(self, raw_df: pd.DataFrame) -> pd.DataFrame:
220
- """Cleans and standardizes daily and higher-level data, converting timestamps to UTC"""
218
+ """Cleans and standardizes daily and higher-level data"""
221
219
  column_map = {
222
220
  "date": "timestamp",
223
221
  "open": "open",
@@ -230,10 +228,8 @@ class SinaHistorical(HistoricalDataProvider):
230
228
  df = raw_df.rename(columns=column_map)
231
229
 
232
230
  if "timestamp" in df.columns:
233
- df["timestamp"] = (
234
- pd.to_datetime(df["timestamp"])
235
- .dt.tz_localize("Asia/Shanghai")
236
- .dt.tz_convert("UTC")
231
+ df["timestamp"] = pd.to_datetime(df["timestamp"]).dt.tz_localize(
232
+ "Asia/Shanghai"
237
233
  )
238
234
 
239
235
  if "volume" in df.columns:
@@ -1,8 +1,7 @@
1
- from cachetools import cached
2
1
  import pandas as pd
3
2
  import akshare as ak
4
3
 
5
- from ..cache import CACHE_CONFIG
4
+ from ..cache import cache
6
5
  from .base import InfoDataProvider
7
6
 
8
7
 
@@ -19,8 +18,8 @@ class EastmoneyInfo(InfoDataProvider):
19
18
  "上市时间": "listing_date",
20
19
  }
21
20
 
22
- @cached(
23
- CACHE_CONFIG["info_cache"],
21
+ @cache(
22
+ "info_cache",
24
23
  key=lambda self, symbol=None: f"eastmoney_{symbol}",
25
24
  )
26
25
  def get_basic_info(self) -> pd.DataFrame:
@@ -16,7 +16,7 @@ class InsiderDataProvider(ABC):
16
16
  - issuer: 股票名称
17
17
  - name: 变动人
18
18
  - title: 董监高职务
19
- - transaction_date: 变动日期(UTC时区)
19
+ - transaction_date: 变动日期
20
20
  - transaction_shares: 变动股数
21
21
  - transaction_price_per_share: 成交均价
22
22
  - shares_owned_after_transaction: 变动后持股数
@@ -1,16 +1,15 @@
1
- from cachetools import cached
2
1
  import pandas as pd
3
2
  import akshare as ak
4
3
  from .base import InsiderDataProvider
5
4
  from ..utils import convert_xieqiu_symbol
6
- from ..cache import CACHE_CONFIG
5
+ from ..cache import cache
7
6
 
8
7
 
9
8
  class XueQiuInsider(InsiderDataProvider):
10
9
  """Provider for XueQiu insider trading data"""
11
10
 
12
- @cached(
13
- cache=CACHE_CONFIG["financial_cache"],
11
+ @cache(
12
+ "financial_cache",
14
13
  key=lambda self, symbol=None: f"xueqiu_insider_{symbol if symbol else 'all'}",
15
14
  )
16
15
  def get_inner_trade_data(self) -> pd.DataFrame:
@@ -87,17 +86,13 @@ class XueQiuInsider(InsiderDataProvider):
87
86
 
88
87
  # Convert date format
89
88
  if "transaction_date" in df.columns:
90
- df["transaction_date"] = (
91
- pd.to_datetime(df["transaction_date"])
92
- .dt.tz_localize("Asia/Shanghai")
93
- .dt.tz_convert("UTC")
94
- )
89
+ df["transaction_date"] = pd.to_datetime(
90
+ df["transaction_date"]
91
+ ).dt.tz_localize("Asia/Shanghai")
95
92
 
96
93
  if "filing_date" in df.columns:
97
- df["filing_date"] = (
98
- pd.to_datetime(df["filing_date"])
99
- .dt.tz_localize("Asia/Shanghai")
100
- .dt.tz_convert("UTC")
94
+ df["filing_date"] = pd.to_datetime(df["filing_date"]).dt.tz_localize(
95
+ "Asia/Shanghai"
101
96
  )
102
97
 
103
98
  # Convert numeric columns
@@ -15,7 +15,7 @@ class NewsDataProvider(ABC):
15
15
  - keyword: 关键词
16
16
  - title: 新闻标题
17
17
  - content: 新闻内容
18
- - publish_time: 发布时间 (UTC)
18
+ - publish_time: 发布时间
19
19
  - source: 文章来源
20
20
  - url: 新闻链接
21
21
  """
@@ -1,14 +1,13 @@
1
- from cachetools import cached
2
1
  import pandas as pd
3
2
  import akshare as ak
4
3
 
5
- from ..cache import CACHE_CONFIG
4
+ from ..cache import cache
6
5
  from .base import NewsDataProvider
7
6
 
8
7
 
9
8
  class EastMoneyNews(NewsDataProvider):
10
- @cached(
11
- CACHE_CONFIG["news_cache"],
9
+ @cache(
10
+ "news_cache",
12
11
  key=lambda self: f"eastmoney_news_{self.symbol}",
13
12
  )
14
13
  def get_news_data(self) -> pd.DataFrame:
@@ -29,11 +28,8 @@ class EastMoneyNews(NewsDataProvider):
29
28
 
30
29
  df = raw_df.rename(columns=column_mapping)
31
30
 
32
- # Convert time to UTC
33
- df["publish_time"] = (
34
- pd.to_datetime(df["publish_time"])
35
- .dt.tz_localize("Asia/Shanghai")
36
- .dt.tz_convert("UTC")
31
+ df["publish_time"] = pd.to_datetime(df["publish_time"]).dt.tz_localize(
32
+ "Asia/Shanghai"
37
33
  )
38
34
 
39
35
  required_columns = [
@@ -1,14 +1,13 @@
1
- from cachetools import cached
2
1
  import pandas as pd
3
2
  import akshare as ak
4
3
 
5
- from ..cache import CACHE_CONFIG
4
+ from ..cache import cache
6
5
  from .base import RealtimeDataProvider
7
6
 
8
7
 
9
8
  class EastmoneyRealtime(RealtimeDataProvider):
10
- @cached(
11
- CACHE_CONFIG["realtime_cache"],
9
+ @cache(
10
+ "realtime_cache",
12
11
  key=lambda self, symbol=None: f"eastmoney_{symbol if symbol else 'all'}",
13
12
  )
14
13
  def get_current_data(self) -> pd.DataFrame:
@@ -36,10 +35,7 @@ class EastmoneyRealtime(RealtimeDataProvider):
36
35
 
37
36
  df = raw_df.rename(columns=column_mapping)
38
37
 
39
- # Change time to UTC
40
- df = df.assign(
41
- timestamp=lambda x: pd.Timestamp.now(tz="Asia/Shanghai").tz_convert("UTC")
42
- )
38
+ df = df.assign(timestamp=lambda x: pd.Timestamp.now(tz="Asia/Shanghai"))
43
39
 
44
40
  required_columns = [
45
41
  "symbol",
@@ -1,9 +1,8 @@
1
1
  import pandas as pd
2
- from cachetools import cached
3
2
  from .base import RealtimeDataProvider
4
- from ..cache import CACHE_CONFIG
5
- from ..eastmoney.client import EastMoneyClient
6
- from ..eastmoney.utils import parse_realtime_data
3
+ from ..cache import cache
4
+ from akshare_one.eastmoney.client import EastMoneyClient
5
+ from akshare_one.eastmoney.utils import parse_realtime_data
7
6
 
8
7
 
9
8
  class EastMoneyDirectRealtime(RealtimeDataProvider):
@@ -13,8 +12,8 @@ class EastMoneyDirectRealtime(RealtimeDataProvider):
13
12
  super().__init__(symbol)
14
13
  self.client = EastMoneyClient()
15
14
 
16
- @cached(
17
- cache=CACHE_CONFIG["realtime_cache"],
15
+ @cache(
16
+ "realtime_cache",
18
17
  key=lambda self: f"eastmoney_direct_realtime_{self.symbol}",
19
18
  )
20
19
  def get_current_data(self) -> pd.DataFrame:
@@ -1,14 +1,13 @@
1
- from cachetools import cached
2
1
  import pandas as pd
3
2
  import akshare as ak
4
3
  from ..utils import convert_xieqiu_symbol
5
- from ..cache import CACHE_CONFIG
4
+ from ..cache import cache
6
5
  from .base import RealtimeDataProvider
7
6
 
8
7
 
9
8
  class XueQiuRealtime(RealtimeDataProvider):
10
- @cached(
11
- cache=CACHE_CONFIG["realtime_cache"],
9
+ @cache(
10
+ "realtime_cache",
12
11
  key=lambda self, symbol=None: f"xueqiu_{symbol}",
13
12
  )
14
13
  def get_current_data(self) -> pd.DataFrame:
@@ -43,9 +42,7 @@ class XueQiuRealtime(RealtimeDataProvider):
43
42
  ),
44
43
  "timestamp": pd.to_datetime(
45
44
  raw_df.loc[raw_df["item"] == "时间", "value"].values[0]
46
- )
47
- .tz_localize("Asia/Shanghai")
48
- .tz_convert("UTC"),
45
+ ).tz_localize("Asia/Shanghai"),
49
46
  "volume": int(raw_df.loc[raw_df["item"] == "成交量", "value"].values[0])
50
47
  / 100,
51
48
  "amount": float(raw_df.loc[raw_df["item"] == "成交额", "value"].values[0]),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: akshare-one
3
- Version: 0.3.3
3
+ Version: 0.3.5
4
4
  Summary: Standardized interface for Chinese financial market data, built on AKShare with unified data formats and simplified APIs
5
5
  License-Expression: MIT
6
6
  Project-URL: Homepage, https://github.com/zwldarren/akshare-one
@@ -9,7 +9,7 @@ Keywords: akshare,financial-data,stock-data,quant
9
9
  Requires-Python: >=3.10
10
10
  Description-Content-Type: text/markdown
11
11
  License-File: LICENSE
12
- Requires-Dist: akshare>=1.17.20
12
+ Requires-Dist: akshare>=1.17.21
13
13
  Requires-Dist: cachetools>=5.5.2
14
14
  Provides-Extra: talib
15
15
  Requires-Dist: ta-lib>=0.6.4; extra == "talib"
@@ -40,7 +40,9 @@ Dynamic: license-file
40
40
  | Stock news | `get_news_data` |
41
41
  | Financial data | `get_balance_sheet`/`get_income_statement`/`get_cash_flow` |
42
42
  | Internal transactions | `get_inner_trade_data` |
43
- | Technical indicators | See [indicators.py](akshare_one/indicators.py) |
43
+ | Basic stock info | `get_basic_info` |
44
+ | Financial metrics | `get_financial_metrics` |
45
+ | Technical indicators | See [indicators.py](akshare-one/indicators.py) |
44
46
 
45
47
  ## 📦 Quick Installation
46
48
 
@@ -67,4 +69,6 @@ df_sma = get_sma(df, window=20)
67
69
 
68
70
  ## 📚 Documentation
69
71
 
70
- Detailed API reference: [docs/api.md](docs/api.md)
72
+ Full API documentation is now available on GitHub Pages:
73
+
74
+ https://zwldarren.github.io/akshare-one/
@@ -10,10 +10,7 @@ akshare_one.egg-info/requires.txt
10
10
  akshare_one.egg-info/top_level.txt
11
11
  akshare_one/modules/cache.py
12
12
  akshare_one/modules/utils.py
13
- akshare_one/modules/eastmoney/client.py
14
- akshare_one/modules/eastmoney/utils.py
15
13
  akshare_one/modules/financial/base.py
16
- akshare_one/modules/financial/eastmoney.py
17
14
  akshare_one/modules/financial/factory.py
18
15
  akshare_one/modules/financial/sina.py
19
16
  akshare_one/modules/historical/base.py
@@ -1,4 +1,4 @@
1
- akshare>=1.17.20
1
+ akshare>=1.17.21
2
2
  cachetools>=5.5.2
3
3
 
4
4
  [talib]
@@ -1,11 +1,11 @@
1
1
  [project]
2
2
  name = "akshare-one"
3
- version = "0.3.3"
3
+ version = "0.3.5"
4
4
  description = "Standardized interface for Chinese financial market data, built on AKShare with unified data formats and simplified APIs"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
7
7
  dependencies = [
8
- "akshare>=1.17.20",
8
+ "akshare>=1.17.21",
9
9
  "cachetools>=5.5.2",
10
10
  ]
11
11
  license = "MIT"
@@ -22,12 +22,16 @@ talib = [
22
22
 
23
23
  [dependency-groups]
24
24
  dev = [
25
+ "mkdocs-material>=9.6.15",
25
26
  "pre-commit>=4.2.0",
26
27
  "pytest>=8.4.1",
27
28
  "pytest-cov>=6.2.1",
28
29
  "ruff>=0.12.1",
29
30
  ]
30
31
 
32
+ [tool.setuptools]
33
+ packages = ["akshare_one"]
34
+
31
35
  [tool.pytest.ini_options]
32
36
  testpaths = ["tests"]
33
37
  python_files = "test_*.py"
@@ -0,0 +1,82 @@
1
+ import unittest
2
+ from cachetools import TTLCache
3
+ import pandas as pd
4
+ import os
5
+ import time
6
+ from akshare_one import get_basic_info
7
+ from akshare_one.modules.cache import CACHE_CONFIG
8
+
9
+
10
+ class TestInfo(unittest.TestCase):
11
+ def test_get_info(self):
12
+ """测试获取股票基本信息"""
13
+ df = get_basic_info("600405")
14
+ self.assertIsInstance(df, pd.DataFrame)
15
+ self.assertFalse(df.empty)
16
+ self.assertEqual(df.shape[0], 1)
17
+
18
+ expected_columns = [
19
+ "price",
20
+ "symbol",
21
+ "name",
22
+ "total_shares",
23
+ "float_shares",
24
+ "total_market_cap",
25
+ "float_market_cap",
26
+ "industry",
27
+ "listing_date",
28
+ ]
29
+ for col in expected_columns:
30
+ self.assertIn(col, df.columns)
31
+
32
+ self.assertEqual(df["symbol"].iloc[0], "600405")
33
+ self.assertIsInstance(df["listing_date"].iloc[0], pd.Timestamp)
34
+
35
+ def test_cache(self):
36
+ """测试缓存功能是否生效"""
37
+ cache = CACHE_CONFIG["info_cache"]
38
+ cache.clear()
39
+
40
+ # 测试缓存命中
41
+ os.environ["AKSHARE_ONE_CACHE_ENABLED"] = "true"
42
+
43
+ # 第一次调用 - 应该缓存未命中
44
+ initial_size = cache.currsize
45
+ df1 = get_basic_info("600405")
46
+ self.assertEqual(cache.currsize, initial_size + 1) # 缓存应增加
47
+
48
+ # 第二次调用 - 应该缓存命中
49
+ df2 = get_basic_info("600405")
50
+ self.assertEqual(cache.currsize, initial_size + 1) # 缓存大小不变
51
+ pd.testing.assert_frame_equal(df1, df2)
52
+
53
+ # 测试缓存禁用
54
+ os.environ["AKSHARE_ONE_CACHE_ENABLED"] = "false"
55
+ disabled_size = cache.currsize
56
+ get_basic_info("600405")
57
+ self.assertEqual(cache.currsize, disabled_size)
58
+
59
+ # 测试缓存过期
60
+ os.environ["AKSHARE_ONE_CACHE_ENABLED"] = "true"
61
+
62
+ # 创建临时缓存并替换原缓存
63
+ original_cache = cache
64
+ temp_cache = TTLCache(maxsize=1000, ttl=1)
65
+ CACHE_CONFIG["info_cache"] = temp_cache
66
+
67
+ # 填充缓存
68
+ get_basic_info("600405")
69
+ expired_size = temp_cache.currsize
70
+
71
+ # 等待缓存过期
72
+ time.sleep(1.1)
73
+
74
+ # 过期后调用 - 应该缓存未命中
75
+ get_basic_info("600405")
76
+ self.assertEqual(temp_cache.currsize, expired_size) # 缓存被替换,大小不变
77
+
78
+ CACHE_CONFIG["info_cache"] = original_cache
79
+
80
+
81
+ if __name__ == "__main__":
82
+ unittest.main()
@@ -1,10 +0,0 @@
1
- from cachetools import TTLCache
2
-
3
- # 缓存配置
4
- CACHE_CONFIG = {
5
- "hist_data_cache": TTLCache(maxsize=1000, ttl=3600), # 历史数据缓存1小时
6
- "realtime_cache": TTLCache(maxsize=500, ttl=60), # 实时数据缓存1分钟
7
- "news_cache": TTLCache(maxsize=500, ttl=3600),
8
- "financial_cache": TTLCache(maxsize=500, ttl=86400), # 财务数据缓存24小时
9
- "info_cache": TTLCache(maxsize=500, ttl=86400), # 信息数据缓存24小时
10
- }
@@ -1,88 +0,0 @@
1
- import requests
2
- from typing import Dict, Any
3
-
4
-
5
- class EastMoneyClient:
6
- """
7
- A client for interacting directly with EastMoney's data APIs.
8
- This class handles session management, request signing, and API calls.
9
- """
10
-
11
- def __init__(self):
12
- self.session = requests.Session()
13
- self.session.headers.update(
14
- {
15
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
16
- "Referer": "https://quote.eastmoney.com/",
17
- "Accept": "application/json, text/plain, */*",
18
- "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
19
- }
20
- )
21
-
22
- def _get_security_id(self, symbol: str) -> str:
23
- """
24
- Converts a stock symbol to EastMoney's internal secid format.
25
- e.g., '600519' -> '1.600519', '000001' -> '0.000001'
26
- """
27
- symbol = symbol.upper()
28
- if symbol.startswith("SZ"):
29
- market = "0"
30
- code = symbol[2:]
31
- elif symbol.startswith("SH"):
32
- market = "1"
33
- code = symbol[2:]
34
- elif symbol.startswith("HK"):
35
- market = "116"
36
- code = symbol[2:]
37
- elif len(symbol) == 6:
38
- if symbol.startswith(("000", "001", "002", "003", "300", "200")):
39
- market = "0"
40
- elif symbol.startswith(("600", "601", "603", "605", "688", "900")):
41
- market = "1"
42
- else:
43
- market = "0" # Default to SZ for ambiguity
44
- code = symbol
45
- elif len(symbol) == 5: # HK Market
46
- market = "116"
47
- code = symbol
48
- else:
49
- market = "0"
50
- code = symbol
51
- return f"{market}.{code}"
52
-
53
- def fetch_historical_klines(
54
- self, symbol: str, klt: str, fqt: str, start_date: str, end_date: str
55
- ) -> Dict[str, Any]:
56
- """
57
- Fetches historical K-line (candlestick) data.
58
- """
59
- url = "https://push2his.eastmoney.com/api/qt/stock/kline/get"
60
- secid = self._get_security_id(symbol)
61
- params = {
62
- "fields1": "f1,f2,f3,f4,f5,f6",
63
- "fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
64
- "klt": klt,
65
- "fqt": fqt,
66
- "secid": secid,
67
- "beg": start_date,
68
- "end": end_date,
69
- }
70
- response = self.session.get(url, params=params)
71
- response.raise_for_status()
72
- return response.json()
73
-
74
- def fetch_realtime_quote(self, symbol: str) -> Dict[str, Any]:
75
- """
76
- Fetches real-time quote data for a single stock.
77
- """
78
- url = "https://push2.eastmoney.com/api/qt/stock/get"
79
- secid = self._get_security_id(symbol)
80
- params = {
81
- "invt": "2",
82
- "fltt": "2",
83
- "fields": "f43,f57,f58,f169,f170,f46,f60,f44,f51,f168,f47,f164,f163,f116,f60,f45,f52,f50,f48,f167,f117,f71,f161,f49,f530",
84
- "secid": secid,
85
- }
86
- response = self.session.get(url, params=params)
87
- response.raise_for_status()
88
- return response.json()
@@ -1,104 +0,0 @@
1
- import pandas as pd
2
- from typing import Dict, Any
3
-
4
-
5
- def parse_kline_data(data: Dict[str, Any]) -> pd.DataFrame:
6
- """
7
- Parses K-line data from the API response into a pandas DataFrame.
8
- """
9
- klines = data.get("data", {}).get("klines", [])
10
- if not klines:
11
- return pd.DataFrame(
12
- columns=["timestamp", "open", "high", "low", "close", "volume"]
13
- )
14
-
15
- records = []
16
- for kline in klines:
17
- parts = kline.split(",")
18
- if len(parts) >= 6:
19
- records.append(
20
- {
21
- "timestamp": parts[0],
22
- "open": float(parts[1]),
23
- "close": float(parts[2]),
24
- "high": float(parts[3]),
25
- "low": float(parts[4]),
26
- "volume": int(parts[5]),
27
- }
28
- )
29
-
30
- df = pd.DataFrame(records)
31
- if not df.empty:
32
- df["timestamp"] = pd.to_datetime(df["timestamp"])
33
- df["timestamp"] = (
34
- df["timestamp"].dt.tz_localize("Asia/Shanghai").dt.tz_convert("UTC")
35
- )
36
- df = df[["timestamp", "open", "high", "low", "close", "volume"]]
37
- return df
38
-
39
-
40
- def parse_realtime_data(data: Dict[str, Any]) -> pd.DataFrame:
41
- """
42
- Parses real-time quote data from the API response into a pandas DataFrame.
43
- """
44
- stock_data = data.get("data")
45
- if not stock_data:
46
- return pd.DataFrame()
47
-
48
- df = pd.DataFrame(
49
- [
50
- {
51
- "symbol": stock_data.get("f57"),
52
- "price": stock_data.get("f43"),
53
- "change": stock_data.get("f169"),
54
- "pct_change": stock_data.get("f170"),
55
- "volume": stock_data.get("f47"),
56
- "amount": stock_data.get("f48"),
57
- "open": stock_data.get("f46"),
58
- "high": stock_data.get("f44"),
59
- "low": stock_data.get("f45"),
60
- "prev_close": stock_data.get("f60"),
61
- }
62
- ]
63
- )
64
- df["timestamp"] = pd.Timestamp.now(tz="Asia/Shanghai").tz_convert("UTC")
65
- return df
66
-
67
-
68
- def resample_historical_data(
69
- df: pd.DataFrame, interval: str, multiplier: int
70
- ) -> pd.DataFrame:
71
- """
72
- Resamples historical data to a specified frequency.
73
- """
74
- if df.empty or multiplier <= 1:
75
- return df
76
-
77
- df = df.set_index("timestamp")
78
-
79
- freq_map = {
80
- "day": f"{multiplier}D",
81
- "week": f"{multiplier}W-MON",
82
- "month": f"{multiplier}MS",
83
- "year": f"{multiplier * 12}MS",
84
- }
85
- freq = freq_map.get(interval)
86
-
87
- if not freq:
88
- return df.reset_index()
89
-
90
- resampled = (
91
- df.resample(freq)
92
- .agg(
93
- {
94
- "open": "first",
95
- "high": "max",
96
- "low": "min",
97
- "close": "last",
98
- "volume": "sum",
99
- }
100
- )
101
- .dropna()
102
- )
103
-
104
- return resampled.reset_index()
@@ -1,184 +0,0 @@
1
- from cachetools import cached
2
- import pandas as pd
3
- import requests
4
-
5
- from akshare_one.modules.cache import CACHE_CONFIG
6
- from .base import FinancialDataProvider
7
-
8
-
9
- class EastMoneyFinancialReport(FinancialDataProvider):
10
- _balance_sheet_rename_map = {
11
- "REPORT_DATE": "report_date",
12
- "TOTAL_ASSETS": "total_assets",
13
- "FIXED_ASSET": "fixed_assets_net",
14
- "MONETARYFUNDS": "cash_and_equivalents",
15
- "ACCOUNTS_RECE": "accounts_receivable",
16
- "INVENTORY": "inventory",
17
- "TOTAL_LIABILITIES": "total_liabilities",
18
- "ACCOUNTS_PAYABLE": "trade_and_non_trade_payables",
19
- "ADVANCE_RECEIVABLES": "deferred_revenue",
20
- "TOTAL_EQUITY": "shareholders_equity",
21
- }
22
-
23
- _income_statement_rename_map = {
24
- "REPORT_DATE": "report_date",
25
- "TOTAL_OPERATE_INCOME": "revenue",
26
- "TOTAL_OPERATE_COST": "total_operating_costs",
27
- "OPERATE_PROFIT": "operating_profit",
28
- "PARENT_NETPROFIT": "net_income_common_stock",
29
- }
30
-
31
- _cash_flow_rename_map = {
32
- "REPORT_DATE": "report_date",
33
- "NETCASH_OPERATE": "net_cash_flow_from_operations",
34
- "NETCASH_INVEST": "net_cash_flow_from_investing",
35
- "NETCASH_FINANCE": "net_cash_flow_from_financing",
36
- "CCE_ADD": "change_in_cash_and_equivalents",
37
- }
38
-
39
- def __init__(self, symbol):
40
- super().__init__(symbol)
41
-
42
- def get_income_statement(self):
43
- pass
44
-
45
- def get_balance_sheet(self):
46
- pass
47
-
48
- def get_cash_flow(self):
49
- pass
50
-
51
- @cached(
52
- CACHE_CONFIG["financial_cache"],
53
- key=lambda self, symbol=None: f"financial_metrics_{self.symbol}",
54
- )
55
- def get_financial_metrics(self) -> pd.DataFrame:
56
- """获取三大财务报表关键指标"""
57
- balance_sheet = self._fetch_balance_sheet()
58
- income_statement = self._fetch_income_statement()
59
- cash_flow = self._fetch_cash_flow()
60
-
61
- if balance_sheet.empty and income_statement.empty and cash_flow.empty:
62
- return pd.DataFrame()
63
-
64
- merged = pd.merge(
65
- balance_sheet, income_statement, on="report_date", how="outer"
66
- )
67
- merged = pd.merge(merged, cash_flow, on="report_date", how="outer")
68
-
69
- # Convert report_date to datetime and format as YYYY-MM-DD
70
- merged["report_date"] = pd.to_datetime(merged["report_date"]).dt.strftime(
71
- "%Y-%m-%d"
72
- )
73
-
74
- # Sort by report_date in descending order (most recent first)
75
- merged = merged.sort_values("report_date", ascending=False).reset_index(
76
- drop=True
77
- )
78
-
79
- return merged
80
-
81
- def _fetch_balance_sheet(self) -> pd.DataFrame:
82
- """
83
- Get stock balance sheet data from East Money API
84
- """
85
- try:
86
- # API endpoint and parameters
87
- api_url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
88
- params = {
89
- "reportName": "RPT_DMSK_FN_BALANCE",
90
- "filter": f'(SECURITY_CODE="{self.symbol}")',
91
- "pageNumber": "1",
92
- "pageSize": "1000",
93
- "sortColumns": "REPORT_DATE",
94
- "sortTypes": "-1",
95
- "columns": ",".join(self._balance_sheet_rename_map.keys()),
96
- }
97
-
98
- # Fetch data from API
99
- response = requests.get(api_url, params=params)
100
- response.raise_for_status()
101
- data = response.json()
102
-
103
- # Extract the actual data
104
- if data.get("result") and data["result"].get("data"):
105
- df = pd.DataFrame(data["result"]["data"])
106
- df.rename(columns=self._balance_sheet_rename_map, inplace=True)
107
- return df
108
- else:
109
- print("No balance sheet data found in API response")
110
- return pd.DataFrame()
111
-
112
- except Exception as e:
113
- print(f"Error occurred: {str(e)}")
114
- return pd.DataFrame()
115
-
116
- def _fetch_income_statement(self) -> pd.DataFrame:
117
- """
118
- Get stock income statement data from East Money API
119
- """
120
- try:
121
- # API endpoint and parameters
122
- api_url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
123
- params = {
124
- "reportName": "RPT_DMSK_FN_INCOME",
125
- "filter": f'(SECURITY_CODE="{self.symbol}")',
126
- "pageNumber": "1",
127
- "pageSize": "1000",
128
- "sortColumns": "REPORT_DATE",
129
- "sortTypes": "-1",
130
- "columns": ",".join(self._income_statement_rename_map.keys()),
131
- }
132
-
133
- # Fetch data from API
134
- response = requests.get(api_url, params=params)
135
- response.raise_for_status()
136
- data = response.json()
137
-
138
- # Extract the actual data
139
- if data.get("result") and data["result"].get("data"):
140
- df = pd.DataFrame(data["result"]["data"])
141
- df.rename(columns=self._income_statement_rename_map, inplace=True)
142
- return df
143
- else:
144
- print("No income statement data found in API response")
145
- return pd.DataFrame()
146
-
147
- except Exception as e:
148
- print(f"Error occurred: {str(e)}")
149
- return pd.DataFrame()
150
-
151
- def _fetch_cash_flow(self) -> pd.DataFrame:
152
- """
153
- Get stock cash flow statement data from East Money API
154
- """
155
- try:
156
- # API endpoint and parameters
157
- api_url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
158
- params = {
159
- "reportName": "RPT_DMSK_FN_CASHFLOW",
160
- "filter": f'(SECURITY_CODE="{self.symbol}")',
161
- "pageNumber": "1",
162
- "pageSize": "1000",
163
- "sortColumns": "REPORT_DATE",
164
- "sortTypes": "-1",
165
- "columns": ",".join(self._cash_flow_rename_map.keys()),
166
- }
167
-
168
- # Fetch data from API
169
- response = requests.get(api_url, params=params)
170
- response.raise_for_status()
171
- data = response.json()
172
-
173
- # Extract the actual data
174
- if data.get("result") and data["result"].get("data"):
175
- df = pd.DataFrame(data["result"]["data"])
176
- df.rename(columns=self._cash_flow_rename_map, inplace=True)
177
- return df
178
- else:
179
- print("No cash flow statement data found in API response")
180
- return pd.DataFrame()
181
-
182
- except Exception as e:
183
- print(f"Error occurred: {str(e)}")
184
- return pd.DataFrame()
@@ -1,33 +0,0 @@
1
- import unittest
2
- import pandas as pd
3
- from akshare_one import get_basic_info
4
-
5
-
6
- class TestInfo(unittest.TestCase):
7
- def test_get_info(self):
8
- """测试获取股票基本信息"""
9
- df = get_basic_info("600405")
10
- self.assertIsInstance(df, pd.DataFrame)
11
- self.assertFalse(df.empty)
12
- self.assertEqual(df.shape[0], 1)
13
-
14
- expected_columns = [
15
- "price",
16
- "symbol",
17
- "name",
18
- "total_shares",
19
- "float_shares",
20
- "total_market_cap",
21
- "float_market_cap",
22
- "industry",
23
- "listing_date",
24
- ]
25
- for col in expected_columns:
26
- self.assertIn(col, df.columns)
27
-
28
- self.assertEqual(df["symbol"].iloc[0], "600405")
29
- self.assertIsInstance(df["listing_date"].iloc[0], pd.Timestamp)
30
-
31
-
32
- if __name__ == "__main__":
33
- unittest.main()
File without changes
File without changes