akshare 1.12.99__py3-none-any.whl → 1.15.72__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of akshare might be problematic. Click here for more details.

Files changed (236) hide show
  1. akshare/__init__.py +441 -138
  2. akshare/air/air_hebei.py +79 -53
  3. akshare/air/air_zhenqi.py +29 -43
  4. akshare/air/sunrise_tad.py +32 -17
  5. akshare/bank/bank_cbirc_2020.py +12 -9
  6. akshare/bond/bond_cb_ths.py +17 -9
  7. akshare/bond/bond_china.py +38 -39
  8. akshare/bond/bond_china_money.py +75 -48
  9. akshare/bond/bond_info_cm.py +28 -8
  10. akshare/bond/bond_issue_cninfo.py +73 -30
  11. akshare/bond/bond_zh_cov.py +1 -1
  12. akshare/bond/bond_zh_sina.py +57 -51
  13. akshare/cal/__init__.py +0 -0
  14. akshare/cal/rv.py +170 -0
  15. akshare/cost/cost_living.py +7 -5
  16. akshare/currency/currency_safe.py +7 -6
  17. akshare/data/cninfo.js +15 -0
  18. akshare/datasets.py +10 -21
  19. akshare/economic/macro_bank.py +95 -653
  20. akshare/economic/macro_china.py +772 -1024
  21. akshare/economic/macro_china_hk.py +65 -243
  22. akshare/economic/macro_china_nbs.py +24 -7
  23. akshare/economic/macro_constitute.py +17 -12
  24. akshare/economic/macro_euro.py +13 -6
  25. akshare/economic/macro_finance_ths.py +133 -0
  26. akshare/economic/macro_info_ws.py +100 -0
  27. akshare/economic/macro_japan.py +5 -4
  28. akshare/economic/macro_other.py +12 -9
  29. akshare/economic/macro_usa.py +376 -1940
  30. akshare/economic/marco_cnbs.py +11 -6
  31. akshare/energy/energy_carbon.py +94 -125
  32. akshare/event/migration.py +3 -2
  33. akshare/exceptions.py +43 -0
  34. akshare/file_fold/calendar.json +245 -2
  35. akshare/fortune/fortune_500.py +15 -48
  36. akshare/fund/fund_amac.py +157 -75
  37. akshare/fund/fund_em.py +191 -184
  38. akshare/fund/fund_etf_em.py +16 -15
  39. akshare/fund/fund_etf_sina.py +71 -23
  40. akshare/fund/fund_etf_ths.py +93 -0
  41. akshare/fund/fund_fee_em.py +98 -0
  42. akshare/fund/fund_portfolio_em.py +60 -50
  43. akshare/fund/fund_rank_em.py +91 -82
  44. akshare/fund/fund_report_cninfo.py +63 -48
  45. akshare/fund/fund_scale_sina.py +20 -10
  46. akshare/fund/fund_xq.py +139 -109
  47. akshare/futures/cons.py +8 -31
  48. akshare/futures/cot.py +185 -137
  49. akshare/futures/futures_basis.py +97 -32
  50. akshare/futures/futures_comm_ctp.py +37 -0
  51. akshare/futures/futures_comm_qihuo.py +74 -45
  52. akshare/futures/futures_daily_bar.py +121 -184
  53. akshare/futures/futures_hf_em.py +66 -61
  54. akshare/futures/futures_hq_sina.py +79 -61
  55. akshare/futures/futures_index_ccidx.py +6 -3
  56. akshare/futures/futures_inventory_99.py +61 -272
  57. akshare/futures/futures_news_shmet.py +4 -2
  58. akshare/futures/futures_roll_yield.py +12 -25
  59. akshare/futures/futures_spot_stock_em.py +19 -13
  60. akshare/futures/futures_stock_js.py +14 -12
  61. akshare/futures/futures_to_spot.py +38 -33
  62. akshare/futures/futures_warehouse_receipt.py +75 -71
  63. akshare/futures/futures_zh_sina.py +5 -5
  64. akshare/futures/symbol_var.py +18 -13
  65. akshare/futures_derivative/futures_contract_info_czce.py +60 -52
  66. akshare/futures_derivative/futures_contract_info_ine.py +43 -34
  67. akshare/futures_derivative/futures_contract_info_shfe.py +46 -35
  68. akshare/futures_derivative/futures_cot_sina.py +26 -19
  69. akshare/futures_derivative/futures_spot_sys.py +21 -8
  70. akshare/fx/currency_investing.py +19 -285
  71. akshare/index/index_cflp.py +29 -26
  72. akshare/index/index_cni.py +86 -88
  73. akshare/index/index_cons.py +26 -10
  74. akshare/index/index_cx.py +248 -47
  75. akshare/index/index_drewry.py +17 -16
  76. akshare/index/index_option_qvix.py +329 -0
  77. akshare/index/index_research_fund_sw.py +134 -0
  78. akshare/index/{index_sw_research.py → index_research_sw.py} +122 -58
  79. akshare/index/index_spot.py +9 -5
  80. akshare/index/index_stock_hk.py +35 -16
  81. akshare/index/index_stock_us_sina.py +1 -1
  82. akshare/index/index_stock_zh.py +180 -89
  83. akshare/index/index_stock_zh_csindex.py +15 -369
  84. akshare/index/index_sw.py +62 -34
  85. akshare/index/index_yw.py +46 -23
  86. akshare/index/index_zh_a_scope.py +48 -0
  87. akshare/index/index_zh_em.py +6 -4
  88. akshare/interest_rate/interbank_rate_em.py +14 -9
  89. akshare/movie/artist_yien.py +32 -5
  90. akshare/movie/movie_yien.py +92 -18
  91. akshare/movie/video_yien.py +28 -5
  92. akshare/news/news_baidu.py +78 -44
  93. akshare/news/news_cctv.py +38 -38
  94. akshare/news/news_stock.py +6 -3
  95. akshare/nlp/nlp_interface.py +7 -8
  96. akshare/option/cons.py +11 -11
  97. akshare/option/option_comm_qihuo.py +86 -0
  98. akshare/option/option_commodity.py +178 -51
  99. akshare/option/option_daily_stats_sse_szse.py +146 -0
  100. akshare/option/option_em.py +147 -138
  101. akshare/option/option_finance_sina.py +160 -137
  102. akshare/option/option_lhb_em.py +62 -56
  103. akshare/option/option_risk_indicator_sse.py +17 -14
  104. akshare/other/other_car_cpca.py +934 -0
  105. akshare/other/{other_car.py → other_car_gasgoo.py} +15 -54
  106. akshare/qdii/__init__.py +0 -0
  107. akshare/qdii/qdii_jsl.py +233 -0
  108. akshare/request.py +117 -0
  109. akshare/spot/spot_hog_soozhu.py +232 -0
  110. akshare/spot/spot_price_qh.py +121 -0
  111. akshare/spot/spot_sge.py +63 -10
  112. akshare/stock/stock_allotment_cninfo.py +10 -9
  113. akshare/stock/stock_board_concept_em.py +23 -14
  114. akshare/stock/stock_board_industry_em.py +40 -34
  115. akshare/stock/stock_cg_equity_mortgage.py +15 -11
  116. akshare/stock/stock_cg_guarantee.py +41 -51
  117. akshare/stock/stock_cg_lawsuit.py +36 -35
  118. akshare/stock/stock_dividend_cninfo.py +12 -6
  119. akshare/stock/stock_dzjy_em.py +347 -260
  120. akshare/stock/stock_fund_em.py +332 -84
  121. akshare/stock/stock_hk_famous.py +108 -0
  122. akshare/stock/stock_hk_sina.py +8 -7
  123. akshare/stock/stock_hold_control_cninfo.py +100 -15
  124. akshare/stock/stock_hold_control_em.py +4 -3
  125. akshare/stock/stock_hold_num_cninfo.py +18 -12
  126. akshare/stock/stock_hot_rank_em.py +2 -1
  127. akshare/stock/stock_hot_search_baidu.py +5 -2
  128. akshare/stock/stock_industry_cninfo.py +24 -18
  129. akshare/stock/stock_industry_pe_cninfo.py +45 -31
  130. akshare/stock/stock_industry_sw.py +9 -10
  131. akshare/stock/stock_info.py +25 -15
  132. akshare/stock/stock_info_em.py +5 -2
  133. akshare/stock/stock_intraday_em.py +5 -2
  134. akshare/stock/stock_intraday_sina.py +22 -18
  135. akshare/stock/stock_ipo_summary_cninfo.py +25 -10
  136. akshare/stock/stock_new_cninfo.py +32 -19
  137. akshare/stock/stock_news_cx.py +39 -0
  138. akshare/stock/stock_profile_cninfo.py +9 -8
  139. akshare/stock/stock_rank_forecast.py +8 -6
  140. akshare/stock/stock_share_changes_cninfo.py +18 -14
  141. akshare/stock/stock_share_hold.py +24 -19
  142. akshare/stock/stock_summary.py +54 -26
  143. akshare/stock/stock_us_famous.py +15 -6
  144. akshare/stock/stock_us_pink.py +7 -5
  145. akshare/stock/stock_us_sina.py +15 -12
  146. akshare/stock/stock_xq.py +38 -12
  147. akshare/stock/stock_zh_a_sina.py +53 -78
  148. akshare/stock/stock_zh_b_sina.py +32 -55
  149. akshare/stock/stock_zh_kcb_report.py +11 -9
  150. akshare/stock/stock_zh_kcb_sina.py +67 -64
  151. akshare/stock_feature/stock_a_below_net_asset_statistics.py +5 -2
  152. akshare/stock_feature/stock_a_high_low.py +5 -2
  153. akshare/stock_feature/stock_a_indicator.py +12 -9
  154. akshare/stock_feature/stock_a_pe_and_pb.py +27 -6
  155. akshare/stock_feature/stock_account_em.py +58 -40
  156. akshare/stock_feature/stock_analyst_em.py +36 -27
  157. akshare/stock_feature/stock_board_industry_ths.py +136 -400
  158. akshare/stock_feature/stock_comment_em.py +118 -85
  159. akshare/stock_feature/stock_concept_futu.py +183 -0
  160. akshare/stock_feature/stock_cyq_em.py +58 -54
  161. akshare/stock_feature/stock_disclosure_cninfo.py +147 -102
  162. akshare/stock_feature/stock_esg_sina.py +216 -11
  163. akshare/stock_feature/stock_fhps_em.py +60 -25
  164. akshare/stock_feature/stock_fhps_ths.py +25 -6
  165. akshare/stock_feature/stock_fund_flow.py +38 -25
  166. akshare/stock_feature/stock_gdfx_em.py +180 -95
  167. akshare/stock_feature/stock_gdhs.py +73 -49
  168. akshare/stock_feature/stock_gpzy_em.py +78 -46
  169. akshare/stock_feature/stock_hist_em.py +55 -23
  170. akshare/stock_feature/stock_hk_valuation_baidu.py +20 -8
  171. akshare/stock_feature/stock_hsgt_em.py +184 -452
  172. akshare/stock_feature/stock_info.py +52 -29
  173. akshare/stock_feature/stock_inner_trade_xq.py +39 -31
  174. akshare/stock_feature/stock_irm_cninfo.py +32 -9
  175. akshare/stock_feature/stock_jgdy_em.py +41 -38
  176. akshare/stock_feature/stock_lh_yybpm.py +36 -37
  177. akshare/stock_feature/stock_lhb_em.py +135 -71
  178. akshare/stock_feature/stock_lhb_sina.py +93 -46
  179. akshare/stock_feature/stock_margin_em.py +102 -0
  180. akshare/stock_feature/{stock_sse_margin.py → stock_margin_sse.py} +21 -15
  181. akshare/stock_feature/{stock_szse_margin.py → stock_margin_szse.py} +23 -19
  182. akshare/stock_feature/stock_market_legu.py +13 -8
  183. akshare/stock_feature/stock_pankou_em.py +72 -34
  184. akshare/stock_feature/stock_report_em.py +244 -54
  185. akshare/stock_feature/stock_research_report_em.py +48 -19
  186. akshare/stock_feature/stock_sns_sseinfo.py +15 -12
  187. akshare/stock_feature/stock_sy_em.py +86 -33
  188. akshare/stock_feature/stock_technology_ths.py +152 -120
  189. akshare/stock_feature/stock_tfp_em.py +35 -13
  190. akshare/stock_feature/stock_three_report_em.py +119 -77
  191. akshare/stock_feature/stock_ttm_lyr.py +4 -7
  192. akshare/stock_feature/stock_value_em.py +83 -0
  193. akshare/stock_feature/stock_wencai.py +21 -9
  194. akshare/stock_feature/stock_yjyg_em.py +63 -28
  195. akshare/stock_feature/stock_zf_pg.py +61 -38
  196. akshare/stock_feature/stock_zh_valuation_baidu.py +3 -2
  197. akshare/stock_feature/stock_ztb_em.py +62 -40
  198. akshare/stock_fundamental/stock_finance.py +150 -58
  199. akshare/stock_fundamental/stock_finance_ths.py +116 -31
  200. akshare/stock_fundamental/stock_mda_ym.py +5 -3
  201. akshare/stock_fundamental/stock_notice.py +29 -15
  202. akshare/stock_fundamental/stock_profit_forecast_em.py +31 -13
  203. akshare/stock_fundamental/stock_profit_forecast_ths.py +19 -10
  204. akshare/stock_fundamental/stock_register_em.py +448 -0
  205. akshare/stock_fundamental/stock_restricted_em.py +79 -32
  206. akshare/stock_fundamental/stock_zygc.py +10 -8
  207. akshare/stock_fundamental/stock_zyjs_ths.py +5 -3
  208. akshare/tool/trade_date_hist.py +4 -3
  209. akshare/utils/cons.py +10 -0
  210. akshare/utils/context.py +43 -0
  211. akshare/utils/demjson.py +2 -2
  212. akshare/utils/func.py +26 -0
  213. akshare/utils/tqdm.py +13 -3
  214. {akshare-1.12.99.dist-info → akshare-1.15.72.dist-info}/METADATA +52 -69
  215. akshare-1.15.72.dist-info/RECORD +385 -0
  216. {akshare-1.12.99.dist-info → akshare-1.15.72.dist-info}/WHEEL +1 -1
  217. tests/test_func.py +3 -5
  218. akshare/bond/bond_futures.py +0 -50
  219. akshare/bond/bond_investing.py +0 -139
  220. akshare/crypto/crypto_hist_investing.py +0 -249
  221. akshare/fortune/fortune_it_juzi.py +0 -123
  222. akshare/futures/futures_international.py +0 -170
  223. akshare/futures/futures_news_baidu.py +0 -54
  224. akshare/futures/inventory_data.py +0 -100
  225. akshare/futures_derivative/futures_index_price_nh.py +0 -61
  226. akshare/futures_derivative/futures_index_return_nh.py +0 -47
  227. akshare/futures_derivative/futures_index_volatility_nh.py +0 -51
  228. akshare/futures_derivative/futures_other_index_nh.py +0 -145
  229. akshare/index/index_fear_greed_funddb.py +0 -71
  230. akshare/index/index_investing.py +0 -232
  231. akshare/sport/sport_olympic_winter.py +0 -39
  232. akshare/stock_feature/stock_board_concept_ths.py +0 -422
  233. akshare/stock_fundamental/stock_register.py +0 -292
  234. akshare-1.12.99.dist-info/RECORD +0 -374
  235. {akshare-1.12.99.dist-info → akshare-1.15.72.dist-info}/LICENSE +0 -0
  236. {akshare-1.12.99.dist-info → akshare-1.15.72.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,19 @@
1
1
  # -*- coding:utf-8 -*-
2
2
  # !/usr/bin/env python
3
3
  """
4
- Date: 2023/9/17 16:53
4
+ Date: 2024/11/07 17:00
5
5
  Desc: 百度股市通-经济数据
6
6
  https://gushitong.baidu.com/calendar
7
7
  """
8
+
9
+ import http.client
10
+ import json
11
+ from urllib.parse import urlencode
12
+
8
13
  import pandas as pd
9
- import requests
10
14
 
11
15
 
12
- def news_economic_baidu(date: str = "20220502") -> pd.DataFrame:
16
+ def news_economic_baidu(date: str = "20241107") -> pd.DataFrame:
13
17
  """
14
18
  百度股市通-经济数据
15
19
  https://gushitong.baidu.com/calendar
@@ -20,17 +24,20 @@ def news_economic_baidu(date: str = "20220502") -> pd.DataFrame:
20
24
  """
21
25
  start_date = "-".join([date[:4], date[4:6], date[6:]])
22
26
  end_date = "-".join([date[:4], date[4:6], date[6:]])
23
- url = "https://finance.pae.baidu.com/api/financecalendar"
27
+ conn = http.client.HTTPSConnection("finance.pae.baidu.com")
24
28
  params = {
25
29
  "start_date": start_date,
26
30
  "end_date": end_date,
27
31
  "market": "",
28
32
  "cate": "economic_data",
29
- "rn": "500",
30
- "pn": "0",
33
+ "finClientType": "pc",
31
34
  }
32
- r = requests.get(url, params=params)
33
- data_json = r.json()
35
+ query_string = urlencode(params)
36
+ url = "/api/financecalendar" + "?" + query_string
37
+ conn.request(method="GET", url=url)
38
+ r = conn.getresponse()
39
+ data = r.read()
40
+ data_json = json.loads(data)
34
41
  big_df = pd.DataFrame()
35
42
  for item in data_json["Result"]:
36
43
  if not item["list"] == []:
@@ -66,14 +73,13 @@ def news_economic_baidu(date: str = "20220502") -> pd.DataFrame:
66
73
  temp_df["前值"] = pd.to_numeric(temp_df["前值"], errors="coerce")
67
74
  temp_df["重要性"] = pd.to_numeric(temp_df["重要性"], errors="coerce")
68
75
  temp_df["日期"] = pd.to_datetime(temp_df["日期"], errors="coerce").dt.date
69
-
70
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
76
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
71
77
  else:
72
78
  continue
73
79
  return big_df
74
80
 
75
81
 
76
- def news_trade_notify_suspend_baidu(date: str = "20220513") -> pd.DataFrame:
82
+ def news_trade_notify_suspend_baidu(date: str = "20241107") -> pd.DataFrame:
77
83
  """
78
84
  百度股市通-交易提醒-停复牌
79
85
  https://gushitong.baidu.com/calendar
@@ -84,15 +90,20 @@ def news_trade_notify_suspend_baidu(date: str = "20220513") -> pd.DataFrame:
84
90
  """
85
91
  start_date = "-".join([date[:4], date[4:6], date[6:]])
86
92
  end_date = "-".join([date[:4], date[4:6], date[6:]])
87
- url = "https://finance.pae.baidu.com/api/financecalendar"
93
+ conn = http.client.HTTPSConnection("finance.pae.baidu.com")
88
94
  params = {
89
95
  "start_date": start_date,
90
96
  "end_date": end_date,
91
97
  "market": "",
92
98
  "cate": "notify_suspend",
99
+ "finClientType": "pc",
93
100
  }
94
- r = requests.get(url, params=params)
95
- data_json = r.json()
101
+ query_string = urlencode(params)
102
+ url = "/api/financecalendar" + "?" + query_string
103
+ conn.request(method="GET", url=url)
104
+ r = conn.getresponse()
105
+ data = r.read()
106
+ data_json = json.loads(data)
96
107
  big_df = pd.DataFrame()
97
108
  for item in data_json["Result"]:
98
109
  if not item["list"] == []:
@@ -117,15 +128,19 @@ def news_trade_notify_suspend_baidu(date: str = "20220513") -> pd.DataFrame:
117
128
  "停牌事项说明",
118
129
  ]
119
130
  ]
120
- temp_df["停牌时间"] = pd.to_datetime(temp_df["停牌时间"], errors="coerce").dt.date
121
- temp_df["复牌时间"] = pd.to_datetime(temp_df["复牌时间"], errors="coerce").dt.date
131
+ temp_df["停牌时间"] = pd.to_datetime(
132
+ temp_df["停牌时间"], errors="coerce"
133
+ ).dt.date
134
+ temp_df["复牌时间"] = pd.to_datetime(
135
+ temp_df["复牌时间"], errors="coerce"
136
+ ).dt.date
122
137
  big_df = pd.concat([big_df, temp_df], ignore_index=True)
123
138
  else:
124
139
  continue
125
140
  return big_df
126
141
 
127
142
 
128
- def news_trade_notify_dividend_baidu(date: str = "20220916") -> pd.DataFrame:
143
+ def news_trade_notify_dividend_baidu(date: str = "20241107") -> pd.DataFrame:
129
144
  """
130
145
  百度股市通-交易提醒-分红派息
131
146
  https://gushitong.baidu.com/calendar
@@ -136,31 +151,42 @@ def news_trade_notify_dividend_baidu(date: str = "20220916") -> pd.DataFrame:
136
151
  """
137
152
  start_date = "-".join([date[:4], date[4:6], date[6:]])
138
153
  end_date = "-".join([date[:4], date[4:6], date[6:]])
139
- url = "https://finance.pae.baidu.com/api/financecalendar"
154
+ conn = http.client.HTTPSConnection("finance.pae.baidu.com")
140
155
  params = {
141
156
  "start_date": start_date,
142
157
  "end_date": end_date,
143
158
  "market": "",
144
159
  "cate": "notify_divide",
160
+ "rn": 500,
161
+ "pn": 0,
145
162
  }
146
- r = requests.get(url, params=params)
147
- data_json = r.json()
163
+ query_string = urlencode(params)
164
+ url = "/api/financecalendar" + "?" + query_string
165
+ conn.request(method="GET", url=url)
166
+ r = conn.getresponse()
167
+ data = r.read()
168
+ data_json = json.loads(data)
148
169
  big_df = pd.DataFrame()
149
170
  for item in data_json["Result"]:
150
171
  if not item["list"] == []:
151
172
  temp_df = pd.DataFrame(item["list"])
152
- temp_df.columns = [
153
- "股票代码",
154
- "-",
155
- "交易所",
156
- "股票简称",
157
- "除权日",
158
- "报告期",
159
- "分红",
160
- "送股",
161
- "转增",
162
- "实物",
163
- ]
173
+ temp_df.rename(
174
+ columns={
175
+ "code": "股票代码",
176
+ "market": "-",
177
+ "exchange": "交易所",
178
+ "name": "股票简称",
179
+ "diviDate": "除权日",
180
+ "date": "报告期",
181
+ "diviCash": "分红",
182
+ "shareDivide": "送股",
183
+ "transfer": "转增",
184
+ "physical": "实物",
185
+ },
186
+ inplace=True,
187
+ )
188
+ if "实物" not in temp_df.columns:
189
+ temp_df["实物"] = pd.NA
164
190
  temp_df = temp_df[
165
191
  [
166
192
  "股票代码",
@@ -174,15 +200,19 @@ def news_trade_notify_dividend_baidu(date: str = "20220916") -> pd.DataFrame:
174
200
  "报告期",
175
201
  ]
176
202
  ]
177
- temp_df["除权日"] = pd.to_datetime(temp_df["除权日"], errors="coerce").dt.date
178
- temp_df["报告期"] = pd.to_datetime(temp_df["报告期"], errors="coerce").dt.date
179
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
203
+ temp_df["除权日"] = pd.to_datetime(
204
+ temp_df["除权日"], errors="coerce"
205
+ ).dt.date
206
+ temp_df["报告期"] = pd.to_datetime(
207
+ temp_df["报告期"], errors="coerce"
208
+ ).dt.date
209
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
180
210
  else:
181
211
  continue
182
212
  return big_df
183
213
 
184
214
 
185
- def news_report_time_baidu(date: str = "20220514") -> pd.DataFrame:
215
+ def news_report_time_baidu(date: str = "20241107") -> pd.DataFrame:
186
216
  """
187
217
  百度股市通-财报发行
188
218
  https://gushitong.baidu.com/calendar
@@ -193,7 +223,7 @@ def news_report_time_baidu(date: str = "20220514") -> pd.DataFrame:
193
223
  """
194
224
  start_date = "-".join([date[:4], date[4:6], date[6:]])
195
225
  end_date = "-".join([date[:4], date[4:6], date[6:]])
196
- url = "https://finance.pae.baidu.com/api/financecalendar"
226
+ conn = http.client.HTTPSConnection("finance.pae.baidu.com")
197
227
  params = {
198
228
  "start_date": start_date,
199
229
  "end_date": end_date,
@@ -201,8 +231,12 @@ def news_report_time_baidu(date: str = "20220514") -> pd.DataFrame:
201
231
  "cate": "report_time",
202
232
  "finClientType": "pc",
203
233
  }
204
- r = requests.get(url, params=params)
205
- data_json = r.json()
234
+ query_string = urlencode(params)
235
+ url = "/api/financecalendar" + "?" + query_string
236
+ conn.request(method="GET", url=url)
237
+ r = conn.getresponse()
238
+ data = r.read()
239
+ data_json = json.loads(data)
206
240
  big_df = pd.DataFrame()
207
241
  for item in data_json["Result"]:
208
242
  if not item["list"] == []:
@@ -224,25 +258,25 @@ def news_report_time_baidu(date: str = "20220514") -> pd.DataFrame:
224
258
  "财报期",
225
259
  ]
226
260
  ]
227
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
261
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
228
262
  else:
229
263
  continue
230
264
  return big_df
231
265
 
232
266
 
233
267
  if __name__ == "__main__":
234
- news_economic_baidu_df = news_economic_baidu(date="20230917")
268
+ news_economic_baidu_df = news_economic_baidu(date="20241107")
235
269
  print(news_economic_baidu_df)
236
270
 
237
271
  news_trade_notify_suspend_baidu_df = news_trade_notify_suspend_baidu(
238
- date="20220523"
272
+ date="20241107"
239
273
  )
240
274
  print(news_trade_notify_suspend_baidu_df)
241
275
 
242
276
  news_trade_notify_dividend_baidu_df = news_trade_notify_dividend_baidu(
243
- date="20220916"
277
+ date="20241107"
244
278
  )
245
279
  print(news_trade_notify_dividend_baidu_df)
246
280
 
247
- news_report_time_baidu_df = news_report_time_baidu(date="20220514")
281
+ news_report_time_baidu_df = news_report_time_baidu(date="20241107")
248
282
  print(news_report_time_baidu_df)
akshare/news/news_cctv.py CHANGED
@@ -1,10 +1,11 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding:utf-8 -*-
3
3
  """
4
- Date: 2022/5/26 15:42
4
+ Date: 2024/4/25 17:00
5
5
  Desc: 新闻联播文字稿
6
- https://tv.cctv.com/lm/xwlb/?spm=C52056131267.P4y8I53JvSWE.0.0
6
+ https://tv.cctv.com/lm/xwlb
7
7
  """
8
+
8
9
  import re
9
10
 
10
11
  import pandas as pd
@@ -13,28 +14,28 @@ from bs4 import BeautifulSoup
13
14
  from tqdm import tqdm
14
15
 
15
16
 
16
- def news_cctv(date: str = "20130308") -> pd.DataFrame:
17
+ def news_cctv(date: str = "20240424") -> pd.DataFrame:
17
18
  """
18
19
  新闻联播文字稿
19
- https://tv.cctv.com/lm/xwlb/?spm=C52056131267.P4y8I53JvSWE.0.0
20
+ https://tv.cctv.com/lm/xwlb
20
21
  :param date: 需要获取数据的日期; 目前 20160203 年后
21
22
  :type date: str
22
23
  :return: 新闻联播文字稿
23
24
  :rtype: pandas.DataFrame
24
25
  """
25
26
  if int(date) <= int("20130708"):
26
- url = f"http://cctv.cntv.cn/lm/xinwenlianbo/{date}.shtml"
27
+ url = f"https://cctv.cntv.cn/lm/xinwenlianbo/{date}.shtml"
27
28
  r = requests.get(url)
28
29
  r.encoding = "gbk"
29
30
  raw_list = re.findall(r"title_array_01\((.*)", r.text)
30
31
  page_url = [
31
- re.findall("(http.*)", item)[0].split("'")[0]
32
- for item in raw_list[1:]
32
+ re.findall("(http.*)", item)[0].split("'")[0] for item in raw_list[1:]
33
33
  ]
34
34
  title_list = []
35
35
  content_list = []
36
36
  headers = {
37
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
37
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,"
38
+ "image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
38
39
  "Accept-Encoding": "gzip, deflate",
39
40
  "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
40
41
  "Cache-Control": "no-cache",
@@ -43,7 +44,8 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
43
44
  "Pragma": "no-cache",
44
45
  "Proxy-Connection": "keep-alive",
45
46
  "Upgrade-Insecure-Requests": "1",
46
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36",
47
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
48
+ "Chrome/92.0.4515.159 Safari/537.36",
47
49
  }
48
50
  for page in tqdm(page_url, leave=False):
49
51
  try:
@@ -52,9 +54,7 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
52
54
  soup = BeautifulSoup(r.text, "lxml")
53
55
  title = soup.find("h3").text
54
56
  content = soup.find("div", attrs={"class": "cnt_bd"}).text
55
- title_list.append(
56
- title.strip("[视频]").strip().replace("\n", " ")
57
- )
57
+ title_list.append(title.strip("[视频]").strip().replace("\n", " "))
58
58
  content_list.append(
59
59
  content.strip()
60
60
  .strip("央视网消息(新闻联播):")
@@ -63,29 +63,30 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
63
63
  .strip()
64
64
  .replace("\n", " ")
65
65
  )
66
- except:
66
+ except: # noqa: E722
67
67
  continue
68
68
  temp_df = pd.DataFrame(
69
- [[date] * len(title_list), title_list, content_list],
69
+ data=[[date] * len(title_list), title_list, content_list],
70
70
  index=["date", "title", "content"],
71
71
  ).T
72
72
  return temp_df
73
73
 
74
74
  elif int(date) < int("20160203"):
75
- url = f"http://cctv.cntv.cn/lm/xinwenlianbo/{date}.shtml"
75
+ url = f"https://cctv.cntv.cn/lm/xinwenlianbo/{date}.shtml"
76
76
  r = requests.get(url)
77
77
  r.encoding = "utf-8"
78
78
  soup = BeautifulSoup(r.text, "lxml")
79
79
  page_url = [
80
80
  item.find("a")["href"]
81
81
  for item in soup.find(
82
- "div", attrs={"id": "contentELMT1368521805488378"}
82
+ name="div", attrs={"id": "contentELMT1368521805488378"}
83
83
  ).find_all("li")[1:]
84
84
  ]
85
85
  title_list = []
86
86
  content_list = []
87
87
  headers = {
88
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
88
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,"
89
+ "image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
89
90
  "Accept-Encoding": "gzip, deflate",
90
91
  "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
91
92
  "Cache-Control": "no-cache",
@@ -94,18 +95,17 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
94
95
  "Pragma": "no-cache",
95
96
  "Proxy-Connection": "keep-alive",
96
97
  "Upgrade-Insecure-Requests": "1",
97
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36",
98
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
99
+ "Chrome/92.0.4515.159 Safari/537.36",
98
100
  }
99
101
  for page in tqdm(page_url, leave=False):
100
102
  try:
101
103
  r = requests.get(page, headers=headers)
102
104
  r.encoding = "utf-8"
103
- soup = BeautifulSoup(r.text, "lxml")
105
+ soup = BeautifulSoup(r.text, features="lxml")
104
106
  title = soup.find("h3").text
105
- content = soup.find("div", attrs={"class": "cnt_bd"}).text
106
- title_list.append(
107
- title.strip("[视频]").strip().replace("\n", " ")
108
- )
107
+ content = soup.find(name="div", attrs={"class": "cnt_bd"}).text
108
+ title_list.append(title.strip("[视频]").strip().replace("\n", " "))
109
109
  content_list.append(
110
110
  content.strip()
111
111
  .strip("央视网消息(新闻联播):")
@@ -114,10 +114,10 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
114
114
  .strip()
115
115
  .replace("\n", " ")
116
116
  )
117
- except:
117
+ except: # noqa: E722
118
118
  continue
119
119
  temp_df = pd.DataFrame(
120
- [[date] * len(title_list), title_list, content_list],
120
+ data=[[date] * len(title_list), title_list, content_list],
121
121
  index=["date", "title", "content"],
122
122
  ).T
123
123
  return temp_df
@@ -130,7 +130,8 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
130
130
  title_list = []
131
131
  content_list = []
132
132
  headers = {
133
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
133
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,"
134
+ "image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
134
135
  "Accept-Encoding": "gzip, deflate",
135
136
  "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
136
137
  "Cache-Control": "no-cache",
@@ -139,26 +140,25 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
139
140
  "Pragma": "no-cache",
140
141
  "Proxy-Connection": "keep-alive",
141
142
  "Upgrade-Insecure-Requests": "1",
142
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36",
143
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
144
+ "Chrome/92.0.4515.159 Safari/537.36",
143
145
  }
144
146
  for page in tqdm(page_url, leave=False):
145
147
  try:
146
148
  r = requests.get(page, headers=headers)
147
149
  r.encoding = "utf-8"
148
- soup = BeautifulSoup(r.text, "lxml")
150
+ soup = BeautifulSoup(r.text, features="lxml")
149
151
  if soup.find("h3"):
150
152
  title = soup.find("h3").text
151
153
  else:
152
- title = soup.find("div", attrs={"class": "tit"}).text
153
- if soup.find("div", attrs={"class": "cnt_bd"}):
154
- content = soup.find("div", attrs={"class": "cnt_bd"}).text
154
+ title = soup.find(name="div", attrs={"class": "tit"}).text
155
+ if soup.find(name="div", attrs={"class": "cnt_bd"}):
156
+ content = soup.find(name="div", attrs={"class": "cnt_bd"}).text
155
157
  else:
156
158
  content = soup.find(
157
- "div", attrs={"class": "content_area"}
159
+ name="div", attrs={"class": "content_area"}
158
160
  ).text
159
- title_list.append(
160
- title.strip("[视频]").strip().replace("\n", " ")
161
- )
161
+ title_list.append(title.strip("[视频]").strip().replace("\n", " "))
162
162
  content_list.append(
163
163
  content.strip()
164
164
  .strip("央视网消息(新闻联播):")
@@ -167,15 +167,15 @@ def news_cctv(date: str = "20130308") -> pd.DataFrame:
167
167
  .strip()
168
168
  .replace("\n", " ")
169
169
  )
170
- except:
170
+ except: # noqa: E722
171
171
  continue
172
172
  temp_df = pd.DataFrame(
173
- [[date] * len(title_list), title_list, content_list],
173
+ data=[[date] * len(title_list), title_list, content_list],
174
174
  index=["date", "title", "content"],
175
175
  ).T
176
176
  return temp_df
177
177
 
178
178
 
179
179
  if __name__ == "__main__":
180
- news_cctv_df = news_cctv(date="20220525")
180
+ news_cctv_df = news_cctv(date="20240424")
181
181
  print(news_cctv_df)
@@ -1,10 +1,11 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding:utf-8 -*-
3
3
  """
4
- Date: 2022/11/12 20:59
4
+ Date: 2024/05/23 14:00
5
5
  Desc: 个股新闻数据
6
6
  https://so.eastmoney.com/news/s?keyword=%E4%B8%AD%E5%9B%BD%E4%BA%BA%E5%AF%BF&pageindex=1&searchrange=8192&sortfiled=4
7
7
  """
8
+
8
9
  import json
9
10
 
10
11
  import pandas as pd
@@ -20,12 +21,14 @@ def stock_news_em(symbol: str = "300059") -> pd.DataFrame:
20
21
  :return: 个股新闻
21
22
  :rtype: pandas.DataFrame
22
23
  """
23
- url = "https://search-api-web.eastmoney.com/search/jsonp"
24
+ url = "http://search-api-web.eastmoney.com/search/jsonp"
24
25
  params = {
25
26
  "cb": "jQuery3510875346244069884_1668256937995",
26
27
  "param": '{"uid":"",'
27
28
  + f'"keyword":"{symbol}"'
28
- + ',"type":["cmsArticleWebOld"],"client":"web","clientType":"web","clientVersion":"curr","param":{"cmsArticleWebOld":{"searchScope":"default","sort":"default","pageIndex":1,"pageSize":100,"preTag":"<em>","postTag":"</em>"}}}',
29
+ + ',"type":["cmsArticleWebOld"],"client":"web","clientType":"web","clientVersion":"curr",'
30
+ '"param":{"cmsArticleWebOld":{"searchScope":"default","sort":"default","pageIndex":1,'
31
+ '"pageSize":100,"preTag":"<em>","postTag":"</em>"}}}',
29
32
  "_": "1668256937996",
30
33
  }
31
34
  r = requests.get(url, params=params)
@@ -1,11 +1,12 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding:utf-8 -*-
3
3
  """
4
- Date: 2020/11/27 14:02
4
+ Date: 2024/7/6 14:02
5
5
  Desc: ownthink-knowledge graph
6
6
  https://ownthink.com/
7
7
  https://www.ownthink.com/docs/kg/
8
8
  """
9
+
9
10
  import pandas as pd
10
11
  import requests
11
12
 
@@ -28,7 +29,7 @@ def nlp_ownthink(word: str = "人工智能", indicator: str = "entity") -> pd.Da
28
29
  r = requests.post(url, data=payload)
29
30
  if not r.json()["data"]:
30
31
  print("Can not find the resource, please type into the correct word")
31
- return None
32
+ return pd.DataFrame()
32
33
  if indicator == "entity":
33
34
  return r.json()["data"]["entity"]
34
35
  if indicator == "desc":
@@ -48,18 +49,16 @@ def nlp_answer(question: str = "人工智能") -> str:
48
49
  :return: indicator data
49
50
  :rtype: list or dict or pandas.DataFrame
50
51
  """
51
- url = 'https://api.ownthink.com/bot'
52
- params = {
53
- 'spoken': question
54
- }
52
+ url = "https://api.ownthink.com/bot"
53
+ params = {"spoken": question}
55
54
  r = requests.get(url, params=params)
56
55
  json_data = r.json()
57
- answer = json_data['data']['info']['text']
56
+ answer = json_data["data"]["info"]["text"]
58
57
  return answer
59
58
 
60
59
 
61
60
  if __name__ == "__main__":
62
- nlp_ownthink_df = nlp_ownthink(word="人工智能", indicator="tag")
61
+ nlp_ownthink_df = nlp_ownthink(word="人工智能", indicator="entity")
63
62
  print(nlp_ownthink_df)
64
63
 
65
64
  nlp_answer_df = nlp_answer(question="姚明的身高")
akshare/option/cons.py CHANGED
@@ -4,6 +4,7 @@
4
4
  Date: 2019/9/30 13:58
5
5
  Desc: 期权配置文件
6
6
  """
7
+
7
8
  import datetime
8
9
  import json
9
10
  import os
@@ -38,21 +39,22 @@ SH_OPTION_PAYLOAD = {
38
39
  "select": "select: code,name,last,change,chg_rate,amp_rate,volume,amount,prev_close"
39
40
  }
40
41
 
41
- SH_OPTION_PAYLOAD_OTHER = {
42
- "select": "contractid,last,chg_rate,presetpx,exepx"
43
- }
44
-
42
+ SH_OPTION_PAYLOAD_OTHER = {"select": "contractid,last,chg_rate,presetpx,exepx"}
45
43
 
46
44
 
47
45
  # 大连商品交易所
48
46
  DCE_OPTION_URL = "http://www.dce.com.cn/publicweb/quotesdata/dayQuotesCh.html"
49
- DCE_DAILY_OPTION_URL = "http://www.dce.com.cn/publicweb/quotesdata/exportDayQuotesChData.html"
47
+ DCE_DAILY_OPTION_URL = (
48
+ "http://www.dce.com.cn/publicweb/quotesdata/exportDayQuotesChData.html"
49
+ )
50
50
 
51
51
  # 上海期货交易所
52
- SHFE_OPTION_URL = "http://www.shfe.com.cn/data/dailydata/option/kx/kx{}.dat"
52
+ SHFE_OPTION_URL = "https://tsite.shfe.com.cn/data/dailydata/option/kx/kx{}.dat"
53
53
 
54
54
  # 郑州商品交易所
55
- CZCE_DAILY_OPTION_URL_3 = "http://www.czce.com.cn/cn/DFSStaticFiles/Option/{}/{}/OptionDataDaily.txt"
55
+ CZCE_DAILY_OPTION_URL_3 = (
56
+ "http://www.czce.com.cn/cn/DFSStaticFiles/Option/{}/{}/OptionDataDaily.txt"
57
+ )
56
58
 
57
59
  # PAYLOAD
58
60
  SHFE_HEADERS = {"User-Agent": "Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)"}
@@ -74,10 +76,8 @@ def convert_date(date):
74
76
  groups = match.groups()
75
77
  if len(groups) == 3:
76
78
  return datetime.date(
77
- year=int(
78
- groups[0]), month=int(
79
- groups[1]), day=int(
80
- groups[2]))
79
+ year=int(groups[0]), month=int(groups[1]), day=int(groups[2])
80
+ )
81
81
  return None
82
82
 
83
83