akshare 1.14.80__py3-none-any.whl → 1.14.81__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of akshare might be problematic. Click here for more details.

akshare/__init__.py CHANGED
@@ -2891,9 +2891,10 @@ amac_manager_cancelled_info # 中国证券投资基金业协会-信息公示-诚
2891
2891
  1.14.78 fix: fix stock_analyst_rank_em interface
2892
2892
  1.14.79 add: add stock_zcfz_bj_em interface
2893
2893
  1.14.80 fix: fix stock_hot_rank_wc interface
2894
+ 1.14.81 fix: fix stock_hsgt_hist_em interface
2894
2895
  """
2895
2896
 
2896
- __version__ = "1.14.80"
2897
+ __version__ = "1.14.81"
2897
2898
  __author__ = "AKFamily"
2898
2899
 
2899
2900
  import sys
@@ -1533,7 +1533,7 @@ if __name__ == "__main__":
1533
1533
  )
1534
1534
  print(stock_hsgt_institution_statistics_em_df)
1535
1535
 
1536
- stock_hsgt_hist_em_df = stock_hsgt_hist_em(symbol="港股通沪")
1536
+ stock_hsgt_hist_em_df = stock_hsgt_hist_em(symbol="北向资金")
1537
1537
  print(stock_hsgt_hist_em_df)
1538
1538
 
1539
1539
  stock_hsgt_board_rank_em_df = stock_hsgt_board_rank_em(
@@ -1,7 +1,7 @@
1
1
  # -*- coding:utf-8 -*-
2
2
  # !/usr/bin/env python
3
3
  """
4
- Date: 2024/2/7 23:00
4
+ Date: 2024/9/21 19:00
5
5
  Desc: 同花顺-数据中心-技术选股
6
6
  https://data.10jqka.com.cn/rank/cxg/
7
7
  """
@@ -9,9 +9,9 @@ https://data.10jqka.com.cn/rank/cxg/
9
9
  from io import StringIO
10
10
 
11
11
  import pandas as pd
12
+ import py_mini_racer
12
13
  import requests
13
14
  from bs4 import BeautifulSoup
14
- import py_mini_racer
15
15
 
16
16
  from akshare.datasets import get_ths_js
17
17
  from akshare.utils.tqdm import get_tqdm
@@ -26,7 +26,7 @@ def _get_file_content_ths(file: str = "ths.js") -> str:
26
26
  :rtype: str
27
27
  """
28
28
  setting_file_path = get_ths_js(file)
29
- with open(setting_file_path) as f:
29
+ with open(setting_file_path, encoding="utf-8") as f:
30
30
  file_data = f.read()
31
31
  return file_data
32
32
 
@@ -51,14 +51,20 @@ def stock_rank_cxg_ths(symbol: str = "创月新高") -> pd.DataFrame:
51
51
  js_code.eval(js_content)
52
52
  v_code = js_code.call("v")
53
53
  headers = {
54
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
54
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
55
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
55
56
  "Cookie": f"v={v_code}",
56
57
  }
57
- url = f"http://data.10jqka.com.cn/rank/cxg/board/{symbol_map[symbol]}/field/stockcode/order/asc/page/1/ajax/1/free/1/"
58
+ url = (
59
+ f"http://data.10jqka.com.cn/rank/cxg/board/{symbol_map[symbol]}/field/"
60
+ f"stockcode/order/asc/page/1/ajax/1/free/1/"
61
+ )
58
62
  r = requests.get(url, headers=headers)
59
- soup = BeautifulSoup(r.text, "lxml")
63
+ soup = BeautifulSoup(r.text, features="lxml")
60
64
  try:
61
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
65
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
66
+ "/"
67
+ )[1]
62
68
  except AttributeError:
63
69
  total_page = 1
64
70
  big_df = pd.DataFrame()
@@ -66,13 +72,17 @@ def stock_rank_cxg_ths(symbol: str = "创月新高") -> pd.DataFrame:
66
72
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
67
73
  v_code = js_code.call("v")
68
74
  headers = {
69
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
75
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
76
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
70
77
  "Cookie": f"v={v_code}",
71
78
  }
72
- url = f"http://data.10jqka.com.cn/rank/cxg/board/{symbol_map[symbol]}/field/stockcode/order/asc/page/{page}/ajax/1/free/1/"
79
+ url = (
80
+ f"http://data.10jqka.com.cn/rank/cxg/board/{symbol_map[symbol]}/field/stockcode/"
81
+ f"order/asc/page/{page}/ajax/1/free/1/"
82
+ )
73
83
  r = requests.get(url, headers=headers)
74
84
  temp_df = pd.read_html(StringIO(r.text))[0]
75
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
85
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
76
86
  big_df.columns = [
77
87
  "序号",
78
88
  "股票代码",
@@ -116,14 +126,20 @@ def stock_rank_cxd_ths(symbol: str = "创月新低") -> pd.DataFrame:
116
126
  js_code.eval(js_content)
117
127
  v_code = js_code.call("v")
118
128
  headers = {
119
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
129
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
130
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
120
131
  "Cookie": f"v={v_code}",
121
132
  }
122
- url = f"http://data.10jqka.com.cn/rank/cxd/board/{symbol_map[symbol]}/field/stockcode/order/asc/page/1/ajax/1/free/1/"
133
+ url = (
134
+ f"http://data.10jqka.com.cn/rank/cxd/board/{symbol_map[symbol]}/field/"
135
+ f"stockcode/order/asc/page/1/ajax/1/free/1/"
136
+ )
123
137
  r = requests.get(url, headers=headers)
124
- soup = BeautifulSoup(r.text, "lxml")
138
+ soup = BeautifulSoup(r.text, features="lxml")
125
139
  try:
126
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
140
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
141
+ "/"
142
+ )[1]
127
143
  except AttributeError:
128
144
  total_page = 1
129
145
  big_df = pd.DataFrame()
@@ -131,13 +147,17 @@ def stock_rank_cxd_ths(symbol: str = "创月新低") -> pd.DataFrame:
131
147
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
132
148
  v_code = js_code.call("v")
133
149
  headers = {
134
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
150
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
151
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
135
152
  "Cookie": f"v={v_code}",
136
153
  }
137
- url = f"http://data.10jqka.com.cn/rank/cxd/board/{symbol_map[symbol]}/field/stockcode/order/asc/page/{page}/ajax/1/free/1/"
154
+ url = (
155
+ f"http://data.10jqka.com.cn/rank/cxd/board/{symbol_map[symbol]}/field/"
156
+ f"stockcode/order/asc/page/{page}/ajax/1/free/1/"
157
+ )
138
158
  r = requests.get(url, headers=headers)
139
159
  temp_df = pd.read_html(StringIO(r.text))[0]
140
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
160
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
141
161
  big_df.columns = [
142
162
  "序号",
143
163
  "股票代码",
@@ -173,14 +193,17 @@ def stock_rank_lxsz_ths() -> pd.DataFrame:
173
193
  js_code.eval(js_content)
174
194
  v_code = js_code.call("v")
175
195
  headers = {
176
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
196
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
197
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
177
198
  "Cookie": f"v={v_code}",
178
199
  }
179
200
  url = "http://data.10jqka.com.cn/rank/lxsz/field/lxts/order/desc/page/1/ajax/1/free/1/"
180
201
  r = requests.get(url, headers=headers)
181
- soup = BeautifulSoup(r.text, "lxml")
202
+ soup = BeautifulSoup(r.text, features="lxml")
182
203
  try:
183
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
204
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
205
+ "/"
206
+ )[1]
184
207
  except AttributeError:
185
208
  total_page = 1
186
209
  big_df = pd.DataFrame()
@@ -188,13 +211,14 @@ def stock_rank_lxsz_ths() -> pd.DataFrame:
188
211
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
189
212
  v_code = js_code.call("v")
190
213
  headers = {
191
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
214
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
215
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
192
216
  "Cookie": f"v={v_code}",
193
217
  }
194
218
  url = f"http://data.10jqka.com.cn/rank/lxsz/field/lxts/order/desc/page/{page}/ajax/1/free/1/"
195
219
  r = requests.get(url, headers=headers)
196
220
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
197
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
221
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
198
222
  big_df.columns = [
199
223
  "序号",
200
224
  "股票代码",
@@ -230,14 +254,17 @@ def stock_rank_lxxd_ths() -> pd.DataFrame:
230
254
  js_code.eval(js_content)
231
255
  v_code = js_code.call("v")
232
256
  headers = {
233
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
257
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
258
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
234
259
  "Cookie": f"v={v_code}",
235
260
  }
236
261
  url = "http://data.10jqka.com.cn/rank/lxxd/field/lxts/order/desc/page/1/ajax/1/free/1/"
237
262
  r = requests.get(url, headers=headers)
238
- soup = BeautifulSoup(r.text, "lxml")
263
+ soup = BeautifulSoup(r.text, features="lxml")
239
264
  try:
240
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
265
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
266
+ "/"
267
+ )[1]
241
268
  except AttributeError:
242
269
  total_page = 1
243
270
  big_df = pd.DataFrame()
@@ -245,13 +272,14 @@ def stock_rank_lxxd_ths() -> pd.DataFrame:
245
272
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
246
273
  v_code = js_code.call("v")
247
274
  headers = {
248
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
275
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
276
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
249
277
  "Cookie": f"v={v_code}",
250
278
  }
251
279
  url = f"http://data.10jqka.com.cn/rank/lxxd/field/lxts/order/desc/page/{page}/ajax/1/free/1/"
252
280
  r = requests.get(url, headers=headers)
253
281
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
254
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
282
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
255
283
  big_df.columns = [
256
284
  "序号",
257
285
  "股票代码",
@@ -287,14 +315,17 @@ def stock_rank_cxfl_ths() -> pd.DataFrame:
287
315
  js_code.eval(js_content)
288
316
  v_code = js_code.call("v")
289
317
  headers = {
290
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
318
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
319
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
291
320
  "Cookie": f"v={v_code}",
292
321
  }
293
322
  url = "http://data.10jqka.com.cn/rank/cxfl/field/count/order/desc/ajax/1/free/1/page/1/free/1/"
294
323
  r = requests.get(url, headers=headers)
295
- soup = BeautifulSoup(r.text, "lxml")
324
+ soup = BeautifulSoup(r.text, features="lxml")
296
325
  try:
297
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
326
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
327
+ "/"
328
+ )[1]
298
329
  except AttributeError:
299
330
  total_page = 1
300
331
  big_df = pd.DataFrame()
@@ -302,13 +333,14 @@ def stock_rank_cxfl_ths() -> pd.DataFrame:
302
333
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
303
334
  v_code = js_code.call("v")
304
335
  headers = {
305
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
336
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
337
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
306
338
  "Cookie": f"v={v_code}",
307
339
  }
308
340
  url = f"http://data.10jqka.com.cn/rank/cxfl/field/count/order/desc/ajax/1/free/1/page/{page}/free/1/"
309
341
  r = requests.get(url, headers=headers)
310
342
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
311
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
343
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
312
344
  big_df.columns = [
313
345
  "序号",
314
346
  "股票代码",
@@ -343,14 +375,17 @@ def stock_rank_cxsl_ths() -> pd.DataFrame:
343
375
  js_code.eval(js_content)
344
376
  v_code = js_code.call("v")
345
377
  headers = {
346
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
378
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
379
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
347
380
  "Cookie": f"v={v_code}",
348
381
  }
349
382
  url = "http://data.10jqka.com.cn/rank/cxsl/field/count/order/desc/ajax/1/free/1/page/1/free/1/"
350
383
  r = requests.get(url, headers=headers)
351
- soup = BeautifulSoup(r.text, "lxml")
384
+ soup = BeautifulSoup(r.text, features="lxml")
352
385
  try:
353
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
386
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
387
+ "/"
388
+ )[1]
354
389
  except AttributeError:
355
390
  total_page = 1
356
391
  big_df = pd.DataFrame()
@@ -358,13 +393,14 @@ def stock_rank_cxsl_ths() -> pd.DataFrame:
358
393
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
359
394
  v_code = js_code.call("v")
360
395
  headers = {
361
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
396
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
397
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
362
398
  "Cookie": f"v={v_code}",
363
399
  }
364
400
  url = f"http://data.10jqka.com.cn/rank/cxsl/field/count/order/desc/ajax/1/free/1/page/{page}/free/1/"
365
401
  r = requests.get(url, headers=headers)
366
402
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
367
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
403
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
368
404
  big_df.columns = [
369
405
  "序号",
370
406
  "股票代码",
@@ -411,14 +447,17 @@ def stock_rank_xstp_ths(symbol: str = "500日均线") -> pd.DataFrame:
411
447
  js_code.eval(js_content)
412
448
  v_code = js_code.call("v")
413
449
  headers = {
414
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
450
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
451
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
415
452
  "Cookie": f"v={v_code}",
416
453
  }
417
454
  url = f"http://data.10jqka.com.cn/rank/xstp/board/{symbol_map[symbol]}/order/asc/ajax/1/free/1/page/1/free/1/"
418
455
  r = requests.get(url, headers=headers)
419
- soup = BeautifulSoup(r.text, "lxml")
456
+ soup = BeautifulSoup(r.text, features="lxml")
420
457
  try:
421
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
458
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
459
+ "/"
460
+ )[1]
422
461
  except AttributeError:
423
462
  total_page = 1
424
463
  big_df = pd.DataFrame()
@@ -426,13 +465,17 @@ def stock_rank_xstp_ths(symbol: str = "500日均线") -> pd.DataFrame:
426
465
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
427
466
  v_code = js_code.call("v")
428
467
  headers = {
429
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
468
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
469
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
430
470
  "Cookie": f"v={v_code}",
431
471
  }
432
- url = f"http://data.10jqka.com.cn/rank/xstp/board/{symbol_map[symbol]}/order/asc/ajax/1/free/1/page/{page}/free/1/"
472
+ url = (
473
+ f"http://data.10jqka.com.cn/rank/xstp/board/{symbol_map[symbol]}/order/"
474
+ f"asc/ajax/1/free/1/page/{page}/free/1/"
475
+ )
433
476
  r = requests.get(url, headers=headers)
434
477
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
435
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
478
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
436
479
  big_df.columns = [
437
480
  "序号",
438
481
  "股票代码",
@@ -476,14 +519,17 @@ def stock_rank_xxtp_ths(symbol: str = "500日均线") -> pd.DataFrame:
476
519
  js_code.eval(js_content)
477
520
  v_code = js_code.call("v")
478
521
  headers = {
479
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
522
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
523
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
480
524
  "Cookie": f"v={v_code}",
481
525
  }
482
526
  url = f"http://data.10jqka.com.cn/rank/xxtp/board/{symbol_map[symbol]}/order/asc/ajax/1/free/1/page/1/free/1/"
483
527
  r = requests.get(url, headers=headers)
484
- soup = BeautifulSoup(r.text, "lxml")
528
+ soup = BeautifulSoup(r.text, features="lxml")
485
529
  try:
486
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
530
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
531
+ "/"
532
+ )[1]
487
533
  except AttributeError:
488
534
  total_page = 1
489
535
  big_df = pd.DataFrame()
@@ -491,13 +537,17 @@ def stock_rank_xxtp_ths(symbol: str = "500日均线") -> pd.DataFrame:
491
537
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
492
538
  v_code = js_code.call("v")
493
539
  headers = {
494
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
540
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
541
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
495
542
  "Cookie": f"v={v_code}",
496
543
  }
497
- url = f"http://data.10jqka.com.cn/rank/xxtp/board/{symbol_map[symbol]}/order/asc/ajax/1/free/1/page/{page}/free/1/"
544
+ url = (
545
+ f"http://data.10jqka.com.cn/rank/xxtp/board/{symbol_map[symbol]}/order/"
546
+ f"asc/ajax/1/free/1/page/{page}/free/1/"
547
+ )
498
548
  r = requests.get(url, headers=headers)
499
549
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
500
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
550
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
501
551
  big_df.columns = [
502
552
  "序号",
503
553
  "股票代码",
@@ -529,14 +579,17 @@ def stock_rank_ljqs_ths() -> pd.DataFrame:
529
579
  js_code.eval(js_content)
530
580
  v_code = js_code.call("v")
531
581
  headers = {
532
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
582
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
583
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
533
584
  "Cookie": f"v={v_code}",
534
585
  }
535
586
  url = "http://data.10jqka.com.cn/rank/ljqs/field/count/order/desc/ajax/1/free/1/page/1/free/1/"
536
587
  r = requests.get(url, headers=headers)
537
- soup = BeautifulSoup(r.text, "lxml")
588
+ soup = BeautifulSoup(r.text, features="lxml")
538
589
  try:
539
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
590
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
591
+ "/"
592
+ )[1]
540
593
  except AttributeError:
541
594
  total_page = 1
542
595
  big_df = pd.DataFrame()
@@ -544,13 +597,14 @@ def stock_rank_ljqs_ths() -> pd.DataFrame:
544
597
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
545
598
  v_code = js_code.call("v")
546
599
  headers = {
547
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
600
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
601
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
548
602
  "Cookie": f"v={v_code}",
549
603
  }
550
604
  url = f"http://data.10jqka.com.cn/rank/ljqs/field/count/order/desc/ajax/1/free/1/page/{page}/free/1/"
551
605
  r = requests.get(url, headers=headers)
552
606
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
553
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
607
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
554
608
  big_df.columns = [
555
609
  "序号",
556
610
  "股票代码",
@@ -583,14 +637,17 @@ def stock_rank_ljqd_ths() -> pd.DataFrame:
583
637
  js_code.eval(js_content)
584
638
  v_code = js_code.call("v")
585
639
  headers = {
586
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
640
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
641
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
587
642
  "Cookie": f"v={v_code}",
588
643
  }
589
644
  url = "http://data.10jqka.com.cn/rank/ljqd/field/count/order/desc/ajax/1/free/1/page/1/free/1/"
590
645
  r = requests.get(url, headers=headers)
591
- soup = BeautifulSoup(r.text, "lxml")
646
+ soup = BeautifulSoup(r.text, features="lxml")
592
647
  try:
593
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
648
+ total_page = soup.find(name="span", attrs={"class": "page_info"}).text.split(
649
+ "/"
650
+ )[1]
594
651
  except AttributeError:
595
652
  total_page = 1
596
653
  big_df = pd.DataFrame()
@@ -598,13 +655,14 @@ def stock_rank_ljqd_ths() -> pd.DataFrame:
598
655
  for page in tqdm(range(1, int(total_page) + 1), leave=False):
599
656
  v_code = js_code.call("v")
600
657
  headers = {
601
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
658
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
659
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
602
660
  "Cookie": f"v={v_code}",
603
661
  }
604
662
  url = f"http://data.10jqka.com.cn/rank/ljqd/field/count/order/desc/ajax/1/free/1/page/{page}/free/1/"
605
663
  r = requests.get(url, headers=headers)
606
664
  temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
607
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
665
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
608
666
  big_df.columns = [
609
667
  "序号",
610
668
  "股票代码",
@@ -635,30 +693,17 @@ def stock_rank_xzjp_ths() -> pd.DataFrame:
635
693
  js_code = py_mini_racer.MiniRacer()
636
694
  js_content = _get_file_content_ths("ths.js")
637
695
  js_code.eval(js_content)
696
+ big_df = pd.DataFrame()
638
697
  v_code = js_code.call("v")
639
698
  headers = {
640
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
699
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
700
+ "(KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
641
701
  "Cookie": f"v={v_code}",
642
702
  }
643
703
  url = "http://data.10jqka.com.cn/ajax/xzjp/field/DECLAREDATE/order/desc/ajax/1/free/1/"
644
704
  r = requests.get(url, headers=headers)
645
- soup = BeautifulSoup(r.text, "lxml")
646
- try:
647
- total_page = soup.find("span", attrs={"class": "page_info"}).text.split("/")[1]
648
- except AttributeError:
649
- total_page = 1
650
- big_df = pd.DataFrame()
651
- tqdm = get_tqdm()
652
- for page in tqdm(range(1, int(total_page) + 1), leave=False):
653
- v_code = js_code.call("v")
654
- headers = {
655
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36",
656
- "Cookie": f"v={v_code}",
657
- }
658
- url = "http://data.10jqka.com.cn/ajax/xzjp/field/DECLAREDATE/order/desc/ajax/1/free/1/"
659
- r = requests.get(url, headers=headers)
660
- temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
661
- big_df = pd.concat([big_df, temp_df], ignore_index=True)
705
+ temp_df = pd.read_html(StringIO(r.text), converters={"股票代码": str})[0]
706
+ big_df = pd.concat(objs=[big_df, temp_df], ignore_index=True)
662
707
  big_df.columns = [
663
708
  "序号",
664
709
  "举牌公告日",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: akshare
3
- Version: 1.14.80
3
+ Version: 1.14.81
4
4
  Summary: AKShare is an elegant and simple financial data interface library for Python, built for human beings!
5
5
  Home-page: https://github.com/akfamily/akshare
6
6
  Author: AKFamily
@@ -1,4 +1,4 @@
1
- akshare/__init__.py,sha256=hJffLPp34sDasydnUaiXjAmvpQLUWySJHC-plk6slqc,181804
1
+ akshare/__init__.py,sha256=UNOTe6VC1wTpP9qjsDdwdwqGGDpryPAXNaCFeI89_z8,181850
2
2
  akshare/datasets.py,sha256=-qdwaQjgBlftX84uM74KJqCYJYkQ50PV416_neA4uls,995
3
3
  akshare/air/__init__.py,sha256=RMTf1bT5EOE3ttWpn3hGu1LtUmsVxDoa0W7W0gXHOy8,81
4
4
  akshare/air/air_hebei.py,sha256=xIXNGLK7IGYqrkteM9fxnHAwWqk6PCQs6D9-ggZ7byY,4442
@@ -317,7 +317,7 @@ akshare/stock_feature/stock_hist_em.py,sha256=H8bbC0LJBvDTRnrhZvFtrMM8Y6H4LZk-C0
317
317
  akshare/stock_feature/stock_hist_tx.py,sha256=WpLsbkG2didSx7lYNkSbTWNTrLhUKbcopfD18WO2Rlc,3397
318
318
  akshare/stock_feature/stock_hk_valuation_baidu.py,sha256=i3bPliFkPZcfXvuxuefSWFLC_DoF9ILdPBEyM0A9Lg4,1715
319
319
  akshare/stock_feature/stock_hot_xq.py,sha256=NmoH4x-0hiDztj-YwzMFVIyOICQ2wUUBbhjt91q-tq4,9112
320
- akshare/stock_feature/stock_hsgt_em.py,sha256=4traNM0l5DYCxd4CtqS9MgwYsvJvT8Tk3FQl0DWT08w,56688
320
+ akshare/stock_feature/stock_hsgt_em.py,sha256=1S18-GB9RvDP7tTvKR_W-BFSoKOPbpBTrbK67wbf-Uw,56688
321
321
  akshare/stock_feature/stock_hsgt_exchange_rate.py,sha256=YvhvdGx1nBJ_1swos1YNOtzy0GMFYo8MgNBh5QKphtE,6838
322
322
  akshare/stock_feature/stock_hsgt_min_em.py,sha256=KLeez7MQwBAcO-RT7n41LOikUfvXDGK0-G1n9av5mtY,2883
323
323
  akshare/stock_feature/stock_info.py,sha256=oLeWzDAEO0GCKGaNE-k1yUKn11qCkgENEmt-1nJq6-M,9912
@@ -337,7 +337,7 @@ akshare/stock_feature/stock_report_em.py,sha256=jhePrTKGIYzdz8idiPoDs1vEajd73XRI
337
337
  akshare/stock_feature/stock_research_report_em.py,sha256=XFQadpUI2l0-Ik8BQWf-eCC4uFC1xxt9VNiZ9NU2zp0,4888
338
338
  akshare/stock_feature/stock_sns_sseinfo.py,sha256=TGGLw5P77Hh-sSHgw_KKoK29d1m_V_2GDQXe9m_XFew,4556
339
339
  akshare/stock_feature/stock_sy_em.py,sha256=GdqjWH8uVy_8st2sYENgzNlJRQxkTajDkaHBwX31pjc,17721
340
- akshare/stock_feature/stock_technology_ths.py,sha256=Cf-QYVJ2aweFIlQAgqAom6bYyfnoSsYqa95A6I7_XRQ,30579
340
+ akshare/stock_feature/stock_technology_ths.py,sha256=4u9z7H6MYEutOYAQvYfzgc_FxG6XlhkMLujSotAbraw,30827
341
341
  akshare/stock_feature/stock_tfp_em.py,sha256=nN4gcK6hOe4tIA-nOoY_lMNC7qY7o9Ga35_-VvxS3fA,2474
342
342
  akshare/stock_feature/stock_three_report_em.py,sha256=riIE9YwMKn279GAAdBFATTfK4_ui4X30Ew1LNcIwwsk,23675
343
343
  akshare/stock_feature/stock_ttm_lyr.py,sha256=_UYGCAsZZqYrcnwdT3qVwM9kPxuOshWIUY4lVaO1rLw,1176
@@ -380,8 +380,8 @@ akshare/utils/token_process.py,sha256=K4rGXjh_tgugbRcyOK2h2x0jP3PT65IIK7nxhUKhOe
380
380
  akshare/utils/tqdm.py,sha256=MuPNwcswkOGjwWQOMWXi9ZvQ_RmW4obCWRj2i7HM7FE,847
381
381
  tests/__init__.py,sha256=gNzhlO0UPjFq6Ieb38kaVIODXv4cTDByrdohAZnDYt4,82
382
382
  tests/test_func.py,sha256=j1MGYbZI2if2j_LY1S4FLsf4qfq4NwVqD5wmRlv5Log,832
383
- akshare-1.14.80.dist-info/LICENSE,sha256=mmSZCPgfHiVw34LXuFArd-SUgQtBJ_QsIlh-kWlDHfs,1073
384
- akshare-1.14.80.dist-info/METADATA,sha256=4PKzIicP5KvR1Wh0tDYVKSnCgETraC5sZW3w0kkY09o,14112
385
- akshare-1.14.80.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
386
- akshare-1.14.80.dist-info/top_level.txt,sha256=jsf9ZzZPmHaISTVumQPsAw7vv7Yv-PdEVW70SMEelQQ,14
387
- akshare-1.14.80.dist-info/RECORD,,
383
+ akshare-1.14.81.dist-info/LICENSE,sha256=mmSZCPgfHiVw34LXuFArd-SUgQtBJ_QsIlh-kWlDHfs,1073
384
+ akshare-1.14.81.dist-info/METADATA,sha256=XFRpPhoSjkrAB-A50E9k16x7hpybJHkra92R6SSbYj0,14112
385
+ akshare-1.14.81.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
386
+ akshare-1.14.81.dist-info/top_level.txt,sha256=jsf9ZzZPmHaISTVumQPsAw7vv7Yv-PdEVW70SMEelQQ,14
387
+ akshare-1.14.81.dist-info/RECORD,,