akshare-one 0.3.9__tar.gz → 0.3.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {akshare_one-0.3.9 → akshare_one-0.3.11}/PKG-INFO +2 -2
  2. {akshare_one-0.3.9 → akshare_one-0.3.11}/pyproject.toml +18 -2
  3. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/__init__.py +6 -4
  4. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/eastmoney/client.py +7 -4
  5. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/eastmoney/utils.py +4 -3
  6. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/indicators.py +1 -0
  7. akshare_one-0.3.11/src/akshare_one/modules/cache.py +45 -0
  8. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/financial/base.py +1 -0
  9. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/financial/eastmoney_direct.py +2 -1
  10. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/financial/sina.py +11 -11
  11. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/historical/base.py +2 -1
  12. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/historical/eastmoney.py +7 -4
  13. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/historical/eastmoney_direct.py +12 -5
  14. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/historical/sina.py +12 -9
  15. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/indicators/base.py +1 -0
  16. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/indicators/simple.py +8 -6
  17. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/indicators/talib.py +96 -76
  18. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/info/base.py +1 -0
  19. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/info/eastmoney.py +2 -2
  20. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/insider/base.py +1 -0
  21. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/insider/xueqiu.py +5 -4
  22. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/news/base.py +1 -0
  23. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/realtime/base.py +1 -0
  24. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/realtime/eastmoney.py +2 -2
  25. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/realtime/eastmoney_direct.py +7 -3
  26. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/realtime/factory.py +6 -22
  27. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/realtime/xueqiu.py +4 -3
  28. akshare_one-0.3.9/src/akshare_one/modules/cache.py +0 -30
  29. {akshare_one-0.3.9 → akshare_one-0.3.11}/README.md +0 -0
  30. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/financial/factory.py +1 -1
  31. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/historical/factory.py +0 -0
  32. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/indicators/__init__.py +0 -0
  33. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/indicators/factory.py +0 -0
  34. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/info/factory.py +1 -1
  35. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/insider/factory.py +1 -1
  36. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/news/eastmoney.py +1 -1
  37. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/news/factory.py +1 -1
  38. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/modules/utils.py +0 -0
  39. {akshare_one-0.3.9 → akshare_one-0.3.11}/src/akshare_one/py.typed +0 -0
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: akshare-one
3
- Version: 0.3.9
3
+ Version: 0.3.11
4
4
  Summary: Standardized interface for Chinese financial market data, built on AKShare with unified data formats and simplified APIs
5
5
  Keywords: akshare,financial-data,stock-data,quant
6
6
  License-Expression: MIT
7
- Requires-Dist: akshare>=1.17.38
7
+ Requires-Dist: akshare>=1.17.41
8
8
  Requires-Dist: cachetools>=5.5.2
9
9
  Requires-Dist: ta-lib>=0.6.4 ; extra == 'talib'
10
10
  Requires-Python: >=3.10
@@ -1,11 +1,11 @@
1
1
  [project]
2
2
  name = "akshare-one"
3
- version = "0.3.9"
3
+ version = "0.3.11"
4
4
  description = "Standardized interface for Chinese financial market data, built on AKShare with unified data formats and simplified APIs"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
7
7
  dependencies = [
8
- "akshare>=1.17.38",
8
+ "akshare>=1.17.41",
9
9
  "cachetools>=5.5.2",
10
10
  ]
11
11
  license = "MIT"
@@ -46,5 +46,21 @@ addopts = "-v --cov=akshare_one --cov-report=term-missing"
46
46
  [tool.ruff]
47
47
  line-length = 88
48
48
 
49
+ [tool.ruff.lint]
50
+ select = [
51
+ # pycodestyle
52
+ "E",
53
+ # Pyflakes
54
+ "F",
55
+ # pyupgrade
56
+ "UP",
57
+ # flake8-bugbear
58
+ "B",
59
+ # flake8-simplify
60
+ "SIM",
61
+ # isort
62
+ "I",
63
+ ]
64
+
49
65
  [tool.mypy]
50
66
  strict = true
@@ -14,14 +14,16 @@ Example:
14
14
  >>> df = get_realtime_data(symbol="600000")
15
15
  """
16
16
 
17
- from typing import Optional, Literal
17
+ from typing import Literal
18
+
18
19
  import pandas as pd
20
+
19
21
  from .modules.financial.factory import FinancialDataFactory
20
22
  from .modules.historical.factory import HistoricalDataFactory
21
- from .modules.realtime.factory import RealtimeDataFactory
22
23
  from .modules.info.factory import InfoDataFactory
23
- from .modules.news.factory import NewsDataFactory
24
24
  from .modules.insider.factory import InsiderDataFactory
25
+ from .modules.news.factory import NewsDataFactory
26
+ from .modules.realtime.factory import RealtimeDataFactory
25
27
 
26
28
 
27
29
  def get_basic_info(
@@ -91,7 +93,7 @@ def get_hist_data(
91
93
 
92
94
 
93
95
  def get_realtime_data(
94
- symbol: Optional[str] = None,
96
+ symbol: str | None = None,
95
97
  source: Literal["eastmoney", "eastmoney_direct", "xueqiu"] = "eastmoney_direct",
96
98
  ) -> pd.DataFrame:
97
99
  """Get real-time market quotes
@@ -1,5 +1,6 @@
1
+ from typing import Any
2
+
1
3
  import requests
2
- from typing import Dict, Any
3
4
 
4
5
 
5
6
  class EastMoneyClient:
@@ -44,7 +45,7 @@ class EastMoneyClient:
44
45
 
45
46
  def fetch_historical_klines(
46
47
  self, symbol: str, klt: str, fqt: str, start_date: str, end_date: str
47
- ) -> Dict[str, Any]:
48
+ ) -> dict[str, Any]:
48
49
  """
49
50
  Fetches historical K-line (candlestick) data.
50
51
  """
@@ -63,7 +64,7 @@ class EastMoneyClient:
63
64
  response.raise_for_status()
64
65
  return response.json() # type: ignore
65
66
 
66
- def fetch_realtime_quote(self, symbol: str) -> Dict[str, Any]:
67
+ def fetch_realtime_quote(self, symbol: str) -> dict[str, Any]:
67
68
  """
68
69
  Fetches real-time quote data for a single stock.
69
70
  """
@@ -72,7 +73,9 @@ class EastMoneyClient:
72
73
  params = {
73
74
  "invt": "2",
74
75
  "fltt": "2",
75
- "fields": "f43,f57,f58,f169,f170,f46,f60,f44,f51,f168,f47,f164,f163,f116,f60,f45,f52,f50,f48,f167,f117,f71,f161,f49,f530",
76
+ "fields": (
77
+ "f43,f57,f58,f169,f170,f46,f60,f44,f51,f168,f47,f164,f163,f116,f60,f45,f52,f50,f48,f167,f117,f71,f161,f49,f530"
78
+ ),
76
79
  "secid": secid,
77
80
  }
78
81
  response = self.session.get(url, params=params)
@@ -1,8 +1,9 @@
1
+ from typing import Any
2
+
1
3
  import pandas as pd
2
- from typing import Dict, Any
3
4
 
4
5
 
5
- def parse_kline_data(data: Dict[str, Any]) -> pd.DataFrame:
6
+ def parse_kline_data(data: dict[str, Any]) -> pd.DataFrame:
6
7
  """
7
8
  Parses K-line data from the API response into a pandas DataFrame.
8
9
  """
@@ -35,7 +36,7 @@ def parse_kline_data(data: Dict[str, Any]) -> pd.DataFrame:
35
36
  return df
36
37
 
37
38
 
38
- def parse_realtime_data(data: Dict[str, Any]) -> pd.DataFrame:
39
+ def parse_realtime_data(data: dict[str, Any]) -> pd.DataFrame:
39
40
  """
40
41
  Parses real-time quote data from the API response into a pandas DataFrame.
41
42
  """
@@ -38,6 +38,7 @@ Provides common technical analysis indicators like:
38
38
  """
39
39
 
40
40
  import pandas as pd
41
+
41
42
  from .modules.indicators.factory import IndicatorFactory
42
43
 
43
44
 
@@ -0,0 +1,45 @@
1
+ import os
2
+ from collections.abc import Callable
3
+ from typing import Any, TypeVar
4
+
5
+ from cachetools import TTLCache, cached
6
+
7
+ F = TypeVar("F", bound=Callable[..., Any])
8
+
9
+ # 缓存配置
10
+ CACHE_CONFIG: dict[str, TTLCache[Any, Any]] = {
11
+ "hist_data_cache": TTLCache(maxsize=1000, ttl=3600), # 历史数据缓存1小时
12
+ "realtime_cache": TTLCache(maxsize=500, ttl=60), # 实时数据缓存1分钟
13
+ "news_cache": TTLCache(maxsize=500, ttl=3600), # 新闻数据缓存1小时
14
+ "financial_cache": TTLCache(maxsize=500, ttl=86400), # 财务数据缓存24小时
15
+ "info_cache": TTLCache(maxsize=500, ttl=86400), # 信息数据缓存24小时
16
+ }
17
+
18
+
19
+ def cache(cache_key: str, key: Callable[..., Any] | None = None) -> Callable[[F], F]:
20
+ def decorator(func: F) -> F:
21
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
22
+ cache_enabled = os.getenv("AKSHARE_ONE_CACHE_ENABLED", "true").lower() in (
23
+ "1",
24
+ "true",
25
+ "yes",
26
+ "on",
27
+ )
28
+
29
+ if cache_enabled:
30
+ if cache_key not in CACHE_CONFIG:
31
+ raise KeyError(
32
+ f"Cache configuration '{cache_key}' not found. "
33
+ f"Available keys: {list(CACHE_CONFIG.keys())}"
34
+ )
35
+ if key is not None:
36
+ return cached(CACHE_CONFIG[cache_key], key=key)(func)(
37
+ *args, **kwargs
38
+ )
39
+ else:
40
+ return cached(CACHE_CONFIG[cache_key])(func)(*args, **kwargs)
41
+ return func(*args, **kwargs)
42
+
43
+ return wrapper # type: ignore
44
+
45
+ return decorator
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -2,6 +2,7 @@ import pandas as pd
2
2
  import requests
3
3
 
4
4
  from akshare_one.modules.cache import cache
5
+
5
6
  from .base import FinancialDataProvider
6
7
 
7
8
 
@@ -49,7 +50,7 @@ class EastMoneyDirectFinancialReport(FinancialDataProvider):
49
50
 
50
51
  @cache(
51
52
  "financial_cache",
52
- key=lambda self, symbol=None: f"eastmoney_financial_metrics_{self.symbol}",
53
+ key=lambda self: f"eastmoney_financial_metrics_{self.symbol}",
53
54
  )
54
55
  def get_financial_metrics(self) -> pd.DataFrame:
55
56
  """获取三大财务报表关键指标"""
@@ -1,5 +1,5 @@
1
+ import akshare as ak # type: ignore
1
2
  import pandas as pd
2
- import akshare as ak # type: ignore
3
3
 
4
4
  from ..cache import cache
5
5
  from .base import FinancialDataProvider
@@ -18,9 +18,7 @@ class SinaFinancialReport(FinancialDataProvider):
18
18
  f"sh{symbol}" if not symbol.startswith(("sh", "sz", "bj")) else symbol
19
19
  )
20
20
 
21
- @cache(
22
- "financial_cache", key=lambda self, symbol=None: f"sina_balance_{self.symbol}"
23
- )
21
+ @cache("financial_cache", key=lambda self: f"sina_balance_{self.symbol}")
24
22
  def get_balance_sheet(self) -> pd.DataFrame:
25
23
  """获取资产负债表数据
26
24
 
@@ -33,9 +31,7 @@ class SinaFinancialReport(FinancialDataProvider):
33
31
  raw_df = ak.stock_financial_report_sina(stock=self.stock, symbol="资产负债表")
34
32
  return self._clean_balance_data(raw_df)
35
33
 
36
- @cache(
37
- "financial_cache", key=lambda self, symbol=None: f"sina_income_{self.symbol}"
38
- )
34
+ @cache("financial_cache", key=lambda self: f"sina_income_{self.symbol}")
39
35
  def get_income_statement(self) -> pd.DataFrame:
40
36
  """获取利润表数据
41
37
 
@@ -48,7 +44,7 @@ class SinaFinancialReport(FinancialDataProvider):
48
44
  raw_df = ak.stock_financial_report_sina(stock=self.stock, symbol="利润表")
49
45
  return self._clean_income_data(raw_df)
50
46
 
51
- @cache("financial_cache", key=lambda self, symbol=None: f"sina_cash_{self.symbol}")
47
+ @cache("financial_cache", key=lambda self: f"sina_cash_{self.symbol}")
52
48
  def get_cash_flow(self) -> pd.DataFrame:
53
49
  """获取现金流量表数据
54
50
 
@@ -81,8 +77,10 @@ class SinaFinancialReport(FinancialDataProvider):
81
77
  column_mapping = {
82
78
  "币种": "currency",
83
79
  "经营活动产生的现金流量净额": "net_cash_flow_from_operations",
84
- "购建固定资产、无形资产和其他长期资产支付的现金": "capital_expenditure",
85
- "取得子公司及其他营业单位支付的现金净额": "business_acquisitions_and_disposals",
80
+ "购建固定资产、无形资产和其他长期资产支付的现金": ("capital_expenditure"),
81
+ "取得子公司及其他营业单位支付的现金净额": (
82
+ "business_acquisitions_and_disposals"
83
+ ),
86
84
  "投资活动产生的现金流量净额": "net_cash_flow_from_investing",
87
85
  "取得借款收到的现金": "issuance_or_repayment_of_debt_securities",
88
86
  "吸收投资收到的现金": "issuance_or_purchase_of_equity_shares",
@@ -101,7 +99,9 @@ class SinaFinancialReport(FinancialDataProvider):
101
99
  "处置固定资产、无形资产收回的现金": "cash_from_asset_sales",
102
100
  "投资活动现金流入小计": "total_cash_inflow_from_investing",
103
101
  "投资活动现金流出小计": "total_cash_outflow_from_investing",
104
- "分配股利、利润或偿付利息所支付的现金": "cash_paid_for_dividends_and_interest",
102
+ "分配股利、利润或偿付利息所支付的现金": (
103
+ "cash_paid_for_dividends_and_interest"
104
+ ),
105
105
  "偿还债务支付的现金": "cash_paid_for_debt_repayment",
106
106
  "筹资活动现金流入小计": "total_cash_inflow_from_financing",
107
107
  "筹资活动现金流出小计": "total_cash_outflow_from_financing",
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -25,7 +26,7 @@ class HistoricalDataProvider(ABC):
25
26
  pd.to_datetime(self.start_date)
26
27
  pd.to_datetime(self.end_date)
27
28
  except ValueError:
28
- raise ValueError("Invalid date format. Please use YYYY-MM-DD.")
29
+ raise ValueError("Invalid date format. Please use YYYY-MM-DD.") from None
29
30
 
30
31
  @classmethod
31
32
  def get_supported_intervals(cls) -> list[str]:
@@ -1,7 +1,8 @@
1
- from .base import HistoricalDataProvider
2
- import akshare as ak # type: ignore
1
+ import akshare as ak # type: ignore
3
2
  import pandas as pd
3
+
4
4
  from ..cache import cache
5
+ from .base import HistoricalDataProvider
5
6
 
6
7
 
7
8
  class EastMoneyHistorical(HistoricalDataProvider):
@@ -9,7 +10,9 @@ class EastMoneyHistorical(HistoricalDataProvider):
9
10
 
10
11
  @cache(
11
12
  "hist_data_cache",
12
- key=lambda self: f"eastmoney_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
13
+ key=lambda self: (
14
+ f"eastmoney_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}"
15
+ ),
13
16
  )
14
17
  def get_hist_data(self) -> pd.DataFrame:
15
18
  """Fetches EastMoney historical market data
@@ -34,7 +37,7 @@ class EastMoneyHistorical(HistoricalDataProvider):
34
37
 
35
38
  return df
36
39
  except Exception as e:
37
- raise ValueError(f"Failed to fetch historical data: {str(e)}")
40
+ raise ValueError(f"Failed to fetch historical data: {str(e)}") from e
38
41
 
39
42
  def _get_intraday_data(self) -> pd.DataFrame:
40
43
  """Fetches intraday data at minute or hour intervals"""
@@ -1,10 +1,13 @@
1
- import pandas as pd
2
1
  from typing import Any
3
- from .base import HistoricalDataProvider
4
- from ..cache import cache
2
+
3
+ import pandas as pd
4
+
5
5
  from akshare_one.eastmoney.client import EastMoneyClient
6
6
  from akshare_one.eastmoney.utils import parse_kline_data, resample_historical_data
7
7
 
8
+ from ..cache import cache
9
+ from .base import HistoricalDataProvider
10
+
8
11
 
9
12
  class EastMoneyDirectHistorical(HistoricalDataProvider):
10
13
  """Direct implementation for EastMoney historical stock data API"""
@@ -15,7 +18,9 @@ class EastMoneyDirectHistorical(HistoricalDataProvider):
15
18
 
16
19
  @cache(
17
20
  "hist_data_cache",
18
- key=lambda self: f"eastmoney_direct_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
21
+ key=lambda self: (
22
+ f"eastmoney_direct_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}"
23
+ ),
19
24
  )
20
25
  def get_hist_data(self) -> pd.DataFrame:
21
26
  """Fetches EastMoney historical market data directly from API"""
@@ -46,7 +51,9 @@ class EastMoneyDirectHistorical(HistoricalDataProvider):
46
51
  return df
47
52
 
48
53
  except Exception as e:
49
- raise ValueError(f"Failed to fetch historical data for {self.symbol}: {e}")
54
+ raise ValueError(
55
+ f"Failed to fetch historical data for {self.symbol}: {e}"
56
+ ) from e
50
57
 
51
58
  def _get_kline_type(self) -> str:
52
59
  """Get K-line type based on interval."""
@@ -1,16 +1,18 @@
1
- from cachetools import cached
2
- from .base import HistoricalDataProvider
3
1
  import akshare as ak # type: ignore
4
2
  import pandas as pd
5
- from ..cache import CACHE_CONFIG
3
+
4
+ from ..cache import cache
5
+ from .base import HistoricalDataProvider
6
6
 
7
7
 
8
8
  class SinaHistorical(HistoricalDataProvider):
9
9
  """Adapter for Sina historical stock data API"""
10
10
 
11
- @cached(
12
- cache=CACHE_CONFIG["hist_data_cache"],
13
- key=lambda self: f"sina_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}",
11
+ @cache(
12
+ "hist_data_cache",
13
+ key=lambda self: (
14
+ f"sina_hist_{self.symbol}_{self.interval}_{self.interval_multiplier}_{self.adjust}"
15
+ ),
14
16
  )
15
17
  def get_hist_data(self) -> pd.DataFrame:
16
18
  """Fetches Sina historical market data
@@ -43,7 +45,7 @@ class SinaHistorical(HistoricalDataProvider):
43
45
 
44
46
  return df
45
47
  except Exception as e:
46
- raise ValueError(f"Failed to fetch historical data: {str(e)}")
48
+ raise ValueError(f"Failed to fetch historical data: {str(e)}") from e
47
49
 
48
50
  def _get_minute_data(self, stock: str) -> pd.DataFrame:
49
51
  """Fetches minute level data"""
@@ -114,8 +116,9 @@ class SinaHistorical(HistoricalDataProvider):
114
116
  raw_df = raw_df.rename(columns={"day": "date"})
115
117
 
116
118
  if self.interval_multiplier > 1:
117
- freq = f"{self.interval_multiplier}{'min' if self.interval == 'minute' else 'h'}"
118
- raw_df = self._resample_data(raw_df, self.interval, self.interval_multiplier)
119
+ raw_df = self._resample_data(
120
+ raw_df, self.interval, self.interval_multiplier
121
+ )
119
122
  else:
120
123
  raw_df = ak.stock_zh_b_daily(
121
124
  symbol=stock,
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -1,5 +1,6 @@
1
- import pandas as pd
2
1
  import numpy as np
2
+ import pandas as pd
3
+
3
4
  from .base import BaseIndicatorCalculator
4
5
 
5
6
 
@@ -13,7 +14,8 @@ class SimpleIndicatorCalculator(BaseIndicatorCalculator):
13
14
  return series.ewm(span=window, adjust=False, min_periods=window).mean()
14
15
  else:
15
16
  raise ValueError(
16
- f"Unsupported ma_type: {ma_type} in simple calculator. Only SMA (0) and EMA (1) are supported."
17
+ f"Unsupported ma_type: {ma_type} in simple calculator. "
18
+ f"Only SMA (0) and EMA (1) are supported."
17
19
  )
18
20
 
19
21
  def _wilder_smooth(self, series: pd.Series, window: int) -> pd.Series:
@@ -358,8 +360,8 @@ class SimpleIndicatorCalculator(BaseIndicatorCalculator):
358
360
  ema1 = close.ewm(span=window, adjust=False).mean()
359
361
  ema2 = ema1.ewm(span=window, adjust=False).mean()
360
362
  ema3 = ema2.ewm(span=window, adjust=False).mean()
361
- trix = 100 * ema3.diff(1) / ema3.shift(1)
362
- return trix.to_frame("trix")
363
+ trix = 100 * ema3.diff(1) / ema3.shift(1) # type: ignore
364
+ return pd.DataFrame({"trix": trix}, index=df.index)
363
365
 
364
366
  def calculate_ultosc(
365
367
  self, df: pd.DataFrame, window1: int, window2: int, window3: int
@@ -381,5 +383,5 @@ class SimpleIndicatorCalculator(BaseIndicatorCalculator):
381
383
  avg1 = avg1.fillna(0)
382
384
  avg2 = avg2.fillna(0)
383
385
  avg3 = avg3.fillna(0)
384
- ultosc = 100 * (4 * avg1 + 2 * avg2 + 1 * avg3) / (4 + 2 + 1)
385
- return ultosc.to_frame("ultosc")
386
+ ultosc = 100 * (4 * avg1 + 2 * avg2 + avg3) / (4 + 2 + 1)
387
+ return pd.DataFrame({"ultosc": ultosc}, index=df.index)
@@ -1,30 +1,46 @@
1
- import talib # type: ignore
1
+ import numpy as np
2
2
  import pandas as pd
3
+ import talib
4
+ from talib import MA_Type # type: ignore
5
+
3
6
  from .base import BaseIndicatorCalculator
4
7
 
8
+ # Create a mapping from integer values to MA_Type enum values
9
+ MA_TYPE_MAPPING = {
10
+ 0: MA_Type.SMA,
11
+ 1: MA_Type.EMA,
12
+ 2: MA_Type.WMA,
13
+ 3: MA_Type.DEMA,
14
+ 4: MA_Type.TEMA,
15
+ 5: MA_Type.TRIMA,
16
+ 6: MA_Type.KAMA,
17
+ 7: MA_Type.MAMA,
18
+ 8: MA_Type.T3,
19
+ }
20
+
5
21
 
6
22
  class TalibIndicatorCalculator(BaseIndicatorCalculator):
7
23
  """TA-Lib based indicator implementations"""
8
24
 
9
25
  def calculate_sma(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
10
- close = df["close"].values
26
+ close = df["close"].values.astype(np.float64)
11
27
  sma = talib.SMA(close, timeperiod=window)
12
28
  return pd.DataFrame({"sma": sma}, index=df.index)
13
29
 
14
30
  def calculate_ema(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
15
- close = df["close"].values
31
+ close = df["close"].values.astype(np.float64)
16
32
  ema = talib.EMA(close, timeperiod=window)
17
33
  return pd.DataFrame({"ema": ema}, index=df.index)
18
34
 
19
35
  def calculate_rsi(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
20
- close = df["close"].values
36
+ close = df["close"].values.astype(np.float64)
21
37
  rsi = talib.RSI(close, timeperiod=window)
22
38
  return pd.DataFrame({"rsi": rsi}, index=df.index)
23
39
 
24
40
  def calculate_macd(
25
41
  self, df: pd.DataFrame, fast: int, slow: int, signal: int
26
42
  ) -> pd.DataFrame:
27
- close = df["close"].values
43
+ close = df["close"].values.astype(np.float64)
28
44
  macd, signal_line, histogram = talib.MACD(
29
45
  close, fastperiod=fast, slowperiod=slow, signalperiod=signal
30
46
  )
@@ -36,9 +52,9 @@ class TalibIndicatorCalculator(BaseIndicatorCalculator):
36
52
  def calculate_bollinger_bands(
37
53
  self, df: pd.DataFrame, window: int, std: int
38
54
  ) -> pd.DataFrame:
39
- close = df["close"].values
55
+ close = df["close"].values.astype(np.float64)
40
56
  upper, middle, lower = talib.BBANDS(
41
- close, timeperiod=window, nbdevup=std, nbdevdn=std, matype=talib.MA_Type.SMA
57
+ close, timeperiod=window, nbdevup=std, nbdevdn=std, matype=MA_Type.SMA
42
58
  )
43
59
  return pd.DataFrame(
44
60
  {"upper_band": upper, "middle_band": middle, "lower_band": lower},
@@ -48,210 +64,214 @@ class TalibIndicatorCalculator(BaseIndicatorCalculator):
48
64
  def calculate_stoch(
49
65
  self, df: pd.DataFrame, window: int, smooth_d: int, smooth_k: int
50
66
  ) -> pd.DataFrame:
51
- high = df["high"].values
52
- low = df["low"].values
53
- close = df["close"].values
67
+ high = df["high"].values.astype(np.float64)
68
+ low = df["low"].values.astype(np.float64)
69
+ close = df["close"].values.astype(np.float64)
54
70
  slow_k, slow_d = talib.STOCH(
55
71
  high,
56
72
  low,
57
73
  close,
58
74
  fastk_period=window,
59
75
  slowk_period=smooth_k,
60
- slowk_matype=talib.MA_Type.SMA,
76
+ slowk_matype=MA_Type.SMA,
61
77
  slowd_period=smooth_d,
62
- slowd_matype=talib.MA_Type.SMA,
78
+ slowd_matype=MA_Type.SMA,
63
79
  )
64
80
  return pd.DataFrame({"slow_k": slow_k, "slow_d": slow_d}, index=df.index)
65
81
 
66
82
  def calculate_atr(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
67
- high = df["high"].values
68
- low = df["low"].values
69
- close = df["close"].values
83
+ high = df["high"].values.astype(np.float64)
84
+ low = df["low"].values.astype(np.float64)
85
+ close = df["close"].values.astype(np.float64)
70
86
  atr = talib.ATR(high, low, close, timeperiod=window)
71
87
  return pd.DataFrame({"atr": atr}, index=df.index)
72
88
 
73
89
  def calculate_cci(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
74
- high = df["high"].values
75
- low = df["low"].values
76
- close = df["close"].values
90
+ high = df["high"].values.astype(np.float64)
91
+ low = df["low"].values.astype(np.float64)
92
+ close = df["close"].values.astype(np.float64)
77
93
  cci = talib.CCI(high, low, close, timeperiod=window)
78
94
  return pd.DataFrame({"cci": cci}, index=df.index)
79
95
 
80
96
  def calculate_adx(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
81
- high = df["high"].values
82
- low = df["low"].values
83
- close = df["close"].values
97
+ high = df["high"].values.astype(np.float64)
98
+ low = df["low"].values.astype(np.float64)
99
+ close = df["close"].values.astype(np.float64)
84
100
  adx = talib.ADX(high, low, close, timeperiod=window)
85
101
  return pd.DataFrame({"adx": adx}, index=df.index)
86
102
 
87
103
  def calculate_willr(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
88
- high = df["high"].values
89
- low = df["low"].values
90
- close = df["close"].values
104
+ high = df["high"].values.astype(np.float64)
105
+ low = df["low"].values.astype(np.float64)
106
+ close = df["close"].values.astype(np.float64)
91
107
  willr = talib.WILLR(high, low, close, timeperiod=window)
92
108
  return pd.DataFrame({"willr": willr}, index=df.index)
93
109
 
94
110
  def calculate_ad(self, df: pd.DataFrame) -> pd.DataFrame:
95
- high = df["high"].values
96
- low = df["low"].values
97
- close = df["close"].values
98
- volume = df["volume"].values.astype(float)
111
+ high = df["high"].values.astype(np.float64)
112
+ low = df["low"].values.astype(np.float64)
113
+ close = df["close"].values.astype(np.float64)
114
+ volume = df["volume"].values.astype(np.float64)
99
115
  ad = talib.AD(high, low, close, volume)
100
116
  return pd.DataFrame({"ad": ad}, index=df.index)
101
117
 
102
118
  def calculate_adosc(
103
119
  self, df: pd.DataFrame, fast_period: int, slow_period: int
104
120
  ) -> pd.DataFrame:
105
- high = df["high"].values
106
- low = df["low"].values
107
- close = df["close"].values
108
- volume = df["volume"].values.astype(float)
121
+ high = df["high"].values.astype(np.float64)
122
+ low = df["low"].values.astype(np.float64)
123
+ close = df["close"].values.astype(np.float64)
124
+ volume = df["volume"].values.astype(np.float64)
109
125
  adosc = talib.ADOSC(
110
126
  high, low, close, volume, fastperiod=fast_period, slowperiod=slow_period
111
127
  )
112
128
  return pd.DataFrame({"adosc": adosc}, index=df.index)
113
129
 
114
130
  def calculate_obv(self, df: pd.DataFrame) -> pd.DataFrame:
115
- close = df["close"].values
116
- volume = df["volume"].values.astype(float)
131
+ close = df["close"].values.astype(np.float64)
132
+ volume = df["volume"].values.astype(np.float64)
117
133
  obv = talib.OBV(close, volume)
118
134
  return pd.DataFrame({"obv": obv}, index=df.index)
119
135
 
120
136
  def calculate_mom(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
121
- close = df["close"].values
137
+ close = df["close"].values.astype(np.float64)
122
138
  mom = talib.MOM(close, timeperiod=window)
123
139
  return pd.DataFrame({"mom": mom}, index=df.index)
124
140
 
125
141
  def calculate_sar(
126
142
  self, df: pd.DataFrame, acceleration: float, maximum: float
127
143
  ) -> pd.DataFrame:
128
- high = df["high"].values
129
- low = df["low"].values
144
+ high = df["high"].values.astype(np.float64)
145
+ low = df["low"].values.astype(np.float64)
130
146
  sar = talib.SAR(high, low, acceleration=acceleration, maximum=maximum)
131
147
  return pd.DataFrame({"sar": sar}, index=df.index)
132
148
 
133
149
  def calculate_tsf(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
134
- close = df["close"].values
150
+ close = df["close"].values.astype(np.float64)
135
151
  tsf = talib.TSF(close, timeperiod=window)
136
152
  return pd.DataFrame({"tsf": tsf}, index=df.index)
137
153
 
138
154
  def calculate_apo(
139
155
  self, df: pd.DataFrame, fast_period: int, slow_period: int, ma_type: int
140
156
  ) -> pd.DataFrame:
141
- close = df["close"].values
157
+ close = df["close"].values.astype(np.float64)
158
+ # Convert integer to MA_Type enum value
159
+ ma_type_enum = MA_TYPE_MAPPING.get(ma_type, MA_Type.SMA)
142
160
  apo = talib.APO(
143
- close, fastperiod=fast_period, slowperiod=slow_period, matype=ma_type
161
+ close, fastperiod=fast_period, slowperiod=slow_period, matype=ma_type_enum
144
162
  )
145
163
  return pd.DataFrame({"apo": apo}, index=df.index)
146
164
 
147
165
  def calculate_aroon(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
148
- high = df["high"].values
149
- low = df["low"].values
166
+ high = df["high"].values.astype(np.float64)
167
+ low = df["low"].values.astype(np.float64)
150
168
  aroon_down, aroon_up = talib.AROON(high, low, timeperiod=window)
151
169
  return pd.DataFrame(
152
170
  {"aroon_down": aroon_down, "aroon_up": aroon_up}, index=df.index
153
171
  )
154
172
 
155
173
  def calculate_aroonosc(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
156
- high = df["high"].values
157
- low = df["low"].values
174
+ high = df["high"].values.astype(np.float64)
175
+ low = df["low"].values.astype(np.float64)
158
176
  aroonosc = talib.AROONOSC(high, low, timeperiod=window)
159
177
  return pd.DataFrame({"aroonosc": aroonosc}, index=df.index)
160
178
 
161
179
  def calculate_bop(self, df: pd.DataFrame) -> pd.DataFrame:
162
- open_ = df["open"].values
163
- high = df["high"].values
164
- low = df["low"].values
165
- close = df["close"].values
180
+ open_ = df["open"].values.astype(np.float64)
181
+ high = df["high"].values.astype(np.float64)
182
+ low = df["low"].values.astype(np.float64)
183
+ close = df["close"].values.astype(np.float64)
166
184
  bop = talib.BOP(open_, high, low, close)
167
185
  return pd.DataFrame({"bop": bop}, index=df.index)
168
186
 
169
187
  def calculate_cmo(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
170
- close = df["close"].values
188
+ close = df["close"].values.astype(np.float64)
171
189
  cmo = talib.CMO(close, timeperiod=window)
172
190
  return pd.DataFrame({"cmo": cmo}, index=df.index)
173
191
 
174
192
  def calculate_dx(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
175
- high = df["high"].values
176
- low = df["low"].values
177
- close = df["close"].values
193
+ high = df["high"].values.astype(np.float64)
194
+ low = df["low"].values.astype(np.float64)
195
+ close = df["close"].values.astype(np.float64)
178
196
  dx = talib.DX(high, low, close, timeperiod=window)
179
197
  return pd.DataFrame({"dx": dx}, index=df.index)
180
198
 
181
199
  def calculate_mfi(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
182
- high = df["high"].values
183
- low = df["low"].values
184
- close = df["close"].values
185
- volume = df["volume"].values.astype(float)
200
+ high = df["high"].values.astype(np.float64)
201
+ low = df["low"].values.astype(np.float64)
202
+ close = df["close"].values.astype(np.float64)
203
+ volume = df["volume"].values.astype(np.float64)
186
204
  mfi = talib.MFI(high, low, close, volume, timeperiod=window)
187
205
  return pd.DataFrame({"mfi": mfi}, index=df.index)
188
206
 
189
207
  def calculate_minus_di(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
190
- high = df["high"].values
191
- low = df["low"].values
192
- close = df["close"].values
208
+ high = df["high"].values.astype(np.float64)
209
+ low = df["low"].values.astype(np.float64)
210
+ close = df["close"].values.astype(np.float64)
193
211
  minus_di = talib.MINUS_DI(high, low, close, timeperiod=window)
194
212
  return pd.DataFrame({"minus_di": minus_di}, index=df.index)
195
213
 
196
214
  def calculate_minus_dm(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
197
- high = df["high"].values
198
- low = df["low"].values
215
+ high = df["high"].values.astype(np.float64)
216
+ low = df["low"].values.astype(np.float64)
199
217
  minus_dm = talib.MINUS_DM(high, low, timeperiod=window)
200
218
  return pd.DataFrame({"minus_dm": minus_dm}, index=df.index)
201
219
 
202
220
  def calculate_plus_di(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
203
- high = df["high"].values
204
- low = df["low"].values
205
- close = df["close"].values
221
+ high = df["high"].values.astype(np.float64)
222
+ low = df["low"].values.astype(np.float64)
223
+ close = df["close"].values.astype(np.float64)
206
224
  plus_di = talib.PLUS_DI(high, low, close, timeperiod=window)
207
225
  return pd.DataFrame({"plus_di": plus_di}, index=df.index)
208
226
 
209
227
  def calculate_plus_dm(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
210
- high = df["high"].values
211
- low = df["low"].values
228
+ high = df["high"].values.astype(np.float64)
229
+ low = df["low"].values.astype(np.float64)
212
230
  plus_dm = talib.PLUS_DM(high, low, timeperiod=window)
213
231
  return pd.DataFrame({"plus_dm": plus_dm}, index=df.index)
214
232
 
215
233
  def calculate_ppo(
216
234
  self, df: pd.DataFrame, fast_period: int, slow_period: int, ma_type: int
217
235
  ) -> pd.DataFrame:
218
- close = df["close"].values
236
+ close = df["close"].values.astype(np.float64)
237
+ # Convert integer to MA_Type enum value
238
+ ma_type_enum = MA_TYPE_MAPPING.get(ma_type, MA_Type.SMA)
219
239
  ppo = talib.PPO(
220
- close, fastperiod=fast_period, slowperiod=slow_period, matype=ma_type
240
+ close, fastperiod=fast_period, slowperiod=slow_period, matype=ma_type_enum
221
241
  )
222
242
  return pd.DataFrame({"ppo": ppo}, index=df.index)
223
243
 
224
244
  def calculate_roc(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
225
- close = df["close"].values
245
+ close = df["close"].values.astype(np.float64)
226
246
  roc = talib.ROC(close, timeperiod=window)
227
247
  return pd.DataFrame({"roc": roc}, index=df.index)
228
248
 
229
249
  def calculate_rocp(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
230
- close = df["close"].values
250
+ close = df["close"].values.astype(np.float64)
231
251
  rocp = talib.ROCP(close, timeperiod=window)
232
252
  return pd.DataFrame({"rocp": rocp}, index=df.index)
233
253
 
234
254
  def calculate_rocr(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
235
- close = df["close"].values
255
+ close = df["close"].values.astype(np.float64)
236
256
  rocr = talib.ROCR(close, timeperiod=window)
237
257
  return pd.DataFrame({"rocr": rocr}, index=df.index)
238
258
 
239
259
  def calculate_rocr100(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
240
- close = df["close"].values
260
+ close = df["close"].values.astype(np.float64)
241
261
  rocr100 = talib.ROCR100(close, timeperiod=window)
242
262
  return pd.DataFrame({"rocr100": rocr100}, index=df.index)
243
263
 
244
264
  def calculate_trix(self, df: pd.DataFrame, window: int) -> pd.DataFrame:
245
- close = df["close"].values
265
+ close = df["close"].values.astype(np.float64)
246
266
  trix = talib.TRIX(close, timeperiod=window)
247
267
  return pd.DataFrame({"trix": trix}, index=df.index)
248
268
 
249
269
  def calculate_ultosc(
250
270
  self, df: pd.DataFrame, window1: int, window2: int, window3: int
251
271
  ) -> pd.DataFrame:
252
- high = df["high"].values
253
- low = df["low"].values
254
- close = df["close"].values
272
+ high = df["high"].values.astype(np.float64)
273
+ low = df["low"].values.astype(np.float64)
274
+ close = df["close"].values.astype(np.float64)
255
275
  ultosc = talib.ULTOSC(
256
276
  high,
257
277
  low,
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -1,5 +1,5 @@
1
+ import akshare as ak # type: ignore
1
2
  import pandas as pd
2
- import akshare as ak # type: ignore
3
3
 
4
4
  from ..cache import cache
5
5
  from .base import InfoDataProvider
@@ -20,7 +20,7 @@ class EastmoneyInfo(InfoDataProvider):
20
20
 
21
21
  @cache(
22
22
  "info_cache",
23
- key=lambda self, symbol=None: f"eastmoney_{symbol}",
23
+ key=lambda self: f"eastmoney_{self.symbol}",
24
24
  )
25
25
  def get_basic_info(self) -> pd.DataFrame:
26
26
  """获取东方财富个股信息"""
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -1,8 +1,9 @@
1
+ import akshare as ak # type: ignore
1
2
  import pandas as pd
2
- import akshare as ak # type: ignore
3
- from .base import InsiderDataProvider
4
- from ..utils import convert_xieqiu_symbol
3
+
5
4
  from ..cache import cache
5
+ from ..utils import convert_xieqiu_symbol
6
+ from .base import InsiderDataProvider
6
7
 
7
8
 
8
9
  class XueQiuInsider(InsiderDataProvider):
@@ -10,7 +11,7 @@ class XueQiuInsider(InsiderDataProvider):
10
11
 
11
12
  @cache(
12
13
  "financial_cache",
13
- key=lambda self, symbol=None: f"xueqiu_insider_{symbol if symbol else 'all'}",
14
+ key=lambda self: f"xueqiu_insider_{self.symbol if self.symbol else 'all'}",
14
15
  )
15
16
  def get_inner_trade_data(self) -> pd.DataFrame:
16
17
  """获取雪球内部交易数据
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+
2
3
  import pandas as pd
3
4
 
4
5
 
@@ -1,5 +1,5 @@
1
- import pandas as pd
2
1
  import akshare as ak # type: ignore
2
+ import pandas as pd
3
3
 
4
4
  from ..cache import cache
5
5
  from .base import RealtimeDataProvider
@@ -8,7 +8,7 @@ from .base import RealtimeDataProvider
8
8
  class EastmoneyRealtime(RealtimeDataProvider):
9
9
  @cache(
10
10
  "realtime_cache",
11
- key=lambda self, symbol=None: f"eastmoney_{symbol if symbol else 'all'}",
11
+ key=lambda self: f"eastmoney_{self.symbol if self.symbol else 'all'}",
12
12
  )
13
13
  def get_current_data(self) -> pd.DataFrame:
14
14
  """获取沪深京A股实时行情数据"""
@@ -1,9 +1,11 @@
1
1
  import pandas as pd
2
- from .base import RealtimeDataProvider
3
- from ..cache import cache
2
+
4
3
  from akshare_one.eastmoney.client import EastMoneyClient
5
4
  from akshare_one.eastmoney.utils import parse_realtime_data
6
5
 
6
+ from ..cache import cache
7
+ from .base import RealtimeDataProvider
8
+
7
9
 
8
10
  class EastMoneyDirectRealtime(RealtimeDataProvider):
9
11
  """Direct implementation for EastMoney realtime stock data API"""
@@ -33,4 +35,6 @@ class EastMoneyDirectRealtime(RealtimeDataProvider):
33
35
  return df
34
36
 
35
37
  except Exception as e:
36
- raise ValueError(f"Failed to get real-time data for {self.symbol}: {e}")
38
+ raise ValueError(
39
+ f"Failed to get real-time data for {self.symbol}: {e}"
40
+ ) from e
@@ -1,7 +1,7 @@
1
- from .eastmoney import EastmoneyRealtime
2
- from .xueqiu import XueQiuRealtime
3
1
  from .base import RealtimeDataProvider
2
+ from .eastmoney import EastmoneyRealtime
4
3
  from .eastmoney_direct import EastMoneyDirectRealtime
4
+ from .xueqiu import XueQiuRealtime
5
5
 
6
6
 
7
7
  class RealtimeDataFactory:
@@ -16,7 +16,9 @@ class RealtimeDataFactory:
16
16
  }
17
17
 
18
18
  @classmethod
19
- def get_provider(cls, provider_name: str, **kwargs: object) -> "RealtimeDataProvider":
19
+ def get_provider(
20
+ cls, provider_name: str, **kwargs: object
21
+ ) -> "RealtimeDataProvider":
20
22
  """
21
23
  Get a realtime data provider by name
22
24
 
@@ -38,26 +40,8 @@ class RealtimeDataFactory:
38
40
  symbol = kwargs.get("symbol", "")
39
41
  if not isinstance(symbol, str):
40
42
  raise ValueError("symbol must be a string")
41
-
42
- return provider_class(symbol=symbol)
43
- """
44
- Get a realtime data provider by name
45
-
46
- Args:
47
- provider_name: Name of the provider (e.g., 'eastmoney')
48
- **kwargs: Additional arguments to pass to the provider's constructor
49
43
 
50
- Returns:
51
- RealtimeDataProvider: An instance of the requested provider
52
-
53
- Raises:
54
- ValueError: If the requested provider is not found
55
- """
56
- provider_class = cls._providers.get(provider_name.lower())
57
- if not provider_class:
58
- raise ValueError(f"Unknown realtime data provider: {provider_name}")
59
-
60
- return provider_class(**kwargs)
44
+ return provider_class(symbol=symbol)
61
45
 
62
46
  @classmethod
63
47
  def register_provider(cls, name: str, provider_class: type) -> None:
@@ -1,14 +1,15 @@
1
- import pandas as pd
2
1
  import akshare as ak # type: ignore
3
- from ..utils import convert_xieqiu_symbol
2
+ import pandas as pd
3
+
4
4
  from ..cache import cache
5
+ from ..utils import convert_xieqiu_symbol
5
6
  from .base import RealtimeDataProvider
6
7
 
7
8
 
8
9
  class XueQiuRealtime(RealtimeDataProvider):
9
10
  @cache(
10
11
  "realtime_cache",
11
- key=lambda self, symbol=None: f"xueqiu_{symbol}",
12
+ key=lambda self: f"xueqiu_{self.symbol}",
12
13
  )
13
14
  def get_current_data(self) -> pd.DataFrame:
14
15
  """获取雪球实时行情数据
@@ -1,30 +0,0 @@
1
- from cachetools import TTLCache, cached
2
- import os
3
- from typing import Any, Callable, TypeVar, Optional
4
-
5
- F = TypeVar('F', bound=Callable[..., Any])
6
-
7
- # 缓存配置
8
- CACHE_CONFIG: dict[str, TTLCache[Any, Any]] = {
9
- "hist_data_cache": TTLCache(maxsize=1000, ttl=3600), # 历史数据缓存1小时
10
- "realtime_cache": TTLCache(maxsize=500, ttl=60), # 实时数据缓存1分钟
11
- "news_cache": TTLCache(maxsize=500, ttl=3600), # 新闻数据缓存1小时
12
- "financial_cache": TTLCache(maxsize=500, ttl=86400), # 财务数据缓存24小时
13
- "info_cache": TTLCache(maxsize=500, ttl=86400), # 信息数据缓存24小时
14
- }
15
-
16
-
17
- def cache(cache_key: str, key: Optional[Callable[..., Any]] = None) -> Callable[[F], F]:
18
- cache_enabled = os.getenv("AKSHARE_ONE_CACHE_ENABLED", "true").lower() in (
19
- "1",
20
- "true",
21
- "yes",
22
- "on",
23
- )
24
-
25
- def decorator(func: F) -> F:
26
- if cache_enabled:
27
- return cached(CACHE_CONFIG[cache_key], key=key)(func) # type: ignore
28
- return func
29
-
30
- return decorator
File without changes
@@ -1,6 +1,6 @@
1
+ from .base import FinancialDataProvider
1
2
  from .eastmoney_direct import EastMoneyDirectFinancialReport
2
3
  from .sina import SinaFinancialReport
3
- from .base import FinancialDataProvider
4
4
 
5
5
 
6
6
  class FinancialDataFactory:
@@ -1,5 +1,5 @@
1
- from .eastmoney import EastmoneyInfo
2
1
  from .base import InfoDataProvider
2
+ from .eastmoney import EastmoneyInfo
3
3
 
4
4
 
5
5
  class InfoDataFactory:
@@ -1,5 +1,5 @@
1
- from .xueqiu import XueQiuInsider
2
1
  from .base import InsiderDataProvider
2
+ from .xueqiu import XueQiuInsider
3
3
 
4
4
 
5
5
  class InsiderDataFactory:
@@ -1,5 +1,5 @@
1
- import pandas as pd
2
1
  import akshare as ak # type: ignore
2
+ import pandas as pd
3
3
 
4
4
  from ..cache import cache
5
5
  from .base import NewsDataProvider
@@ -1,5 +1,5 @@
1
- from .eastmoney import EastMoneyNews
2
1
  from .base import NewsDataProvider
2
+ from .eastmoney import EastMoneyNews
3
3
 
4
4
 
5
5
  class NewsDataFactory: