akshare 1.14.47__py3-none-any.whl → 1.14.49__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of akshare might be problematic. Click here for more details.

akshare/__init__.py CHANGED
@@ -2858,9 +2858,11 @@ amac_manager_cancelled_info # 中国证券投资基金业协会-信息公示-诚
2858
2858
  1.14.45 fix: fix option_lhb_em interface
2859
2859
  1.14.46 add: add spot_hog_soozhu interface
2860
2860
  1.14.47 fix: fix spot_hog_soozhu interface
2861
+ 1.14.48 add: add spot_hog_year_trend_soozhu interface
2862
+ 1.14.49 fix: fix fortune_rank interface
2861
2863
  """
2862
2864
 
2863
- __version__ = "1.14.47"
2865
+ __version__ = "1.14.49"
2864
2866
  __author__ = "AKFamily"
2865
2867
 
2866
2868
  import sys
@@ -2885,7 +2887,7 @@ del sys
2885
2887
  """
2886
2888
  搜猪-生猪大数据-各省均价实时排行榜
2887
2889
  """
2888
- from akshare.spot.spot_hog_soozhu import spot_hog_soozhu
2890
+ from akshare.spot.spot_hog_soozhu import spot_hog_soozhu, spot_hog_year_trend_soozhu
2889
2891
 
2890
2892
  """
2891
2893
  知名港股
@@ -3447,11 +3449,6 @@ from akshare.stock.stock_hk_hot_rank_em import (
3447
3449
  stock_hk_hot_rank_em,
3448
3450
  )
3449
3451
 
3450
- """
3451
- 冬奥会历届奖牌榜
3452
- """
3453
- from akshare.sport.sport_olympic_winter import sport_olympic_winter_hist
3454
-
3455
3452
  """
3456
3453
  财新指数
3457
3454
  """
@@ -7,6 +7,7 @@ https://www.fortunechina.com/fortune500/index.htm
7
7
  特殊情况说明:
8
8
  2010年由于网页端没有公布公司所属的国家, 故 2010 年数据没有国家这列
9
9
  """
10
+
10
11
  import json
11
12
  from functools import lru_cache
12
13
  from io import StringIO
@@ -28,12 +29,12 @@ def _fortune_rank_year_url_map() -> dict:
28
29
  url = "https://www.fortunechina.com/fortune500/index.htm"
29
30
  r = requests.get(url)
30
31
  soup = BeautifulSoup(r.text, features="lxml")
31
- url_2023 = soup.find(name='meta', attrs={"property": "og:url"})['content'].strip()
32
- node_list = soup.find_all(name='div', attrs={"class": "swiper-slide"})
33
- url_list = [item.find("a")['href'] for item in node_list]
32
+ url_2023 = "https://www.fortunechina.com/fortune500/c/2023-08/02/content_436874.htm"
33
+ node_list = soup.find_all(name="div", attrs={"class": "swiper-slide"})
34
+ url_list = [item.find("a")["href"] for item in node_list]
34
35
  year_list = [item.find("a").text for item in node_list]
35
36
  year_url_map = dict(zip(year_list, url_list))
36
- year_url_map['2023'] = url_2023
37
+ year_url_map["2023"] = url_2023
37
38
  return year_url_map
38
39
 
39
40
 
@@ -49,17 +50,17 @@ def fortune_rank(year: str = "2015") -> pd.DataFrame:
49
50
  r = requests.get(url)
50
51
  r.encoding = "utf-8"
51
52
  if int(year) < 2007:
52
- df = pd.read_html(StringIO(r.text))[0].iloc[1:-1, ]
53
+ df = pd.read_html(StringIO(r.text))[0].iloc[1:-1,]
53
54
  df.columns = pd.read_html(StringIO(r.text))[0].iloc[0, :].tolist()
54
55
  return df
55
56
  elif 2006 < int(year) < 2010:
56
- df = pd.read_html(StringIO(r.text))[0].iloc[1:, ]
57
+ df = pd.read_html(StringIO(r.text))[0].iloc[1:,]
57
58
  df.columns = pd.read_html(StringIO(r.text))[0].iloc[0, :].tolist()
58
59
  for page in tqdm(range(2, 11), leave=False):
59
60
  # page =2
60
61
  r = requests.get(url.rsplit(".", maxsplit=1)[0] + "_" + str(page) + ".htm")
61
62
  r.encoding = "utf-8"
62
- temp_df = pd.read_html(StringIO(r.text))[0].iloc[1:, ]
63
+ temp_df = pd.read_html(StringIO(r.text))[0].iloc[1:,]
63
64
  temp_df.columns = pd.read_html(StringIO(r.text))[0].iloc[0, :].tolist()
64
65
  df = pd.concat(objs=[df, temp_df], ignore_index=True)
65
66
  return df
@@ -79,18 +80,20 @@ def fortune_rank_eng(year: str = "2023") -> pd.DataFrame:
79
80
  :rtype: pandas.DataFrame
80
81
  """
81
82
  url = f"https://fortune.com/ranking/global500/{year}/search/"
82
- res = requests.get(url)
83
- soup = BeautifulSoup(res.text, "lxml")
84
- code = json.loads(soup.find("script", attrs={"type": "application/ld+json"}).string)["identifier"]
85
- url = f"https://content.fortune.com/wp-json/irving/v1/data/franchise-search-results"
83
+ r = requests.get(url)
84
+ soup = BeautifulSoup(r.text, features="lxml")
85
+ code = json.loads(
86
+ soup.find(name="script", attrs={"type": "application/ld+json"}).string
87
+ )["identifier"]
88
+ url = "https://content.fortune.com/wp-json/irving/v1/data/franchise-search-results"
86
89
  params = {
87
90
  "list_id": code,
88
91
  "token": "Zm9ydHVuZTpCcHNyZmtNZCN5SndjWkkhNHFqMndEOTM=",
89
92
  }
90
- res = requests.get(url, params=params)
93
+ r = requests.get(url, params=params)
91
94
  big_df = pd.DataFrame()
92
- for i in range(len(res.json()[1]["items"][0]['fields'])):
93
- temp_df = pd.DataFrame([item["fields"][i] for item in res.json()[1]["items"]])
95
+ for i in range(len(r.json()[1]["items"][0]["fields"])):
96
+ temp_df = pd.DataFrame([item["fields"][i] for item in r.json()[1]["items"]])
94
97
  big_df[temp_df["key"].values[0]] = temp_df["value"]
95
98
  big_df["rank"] = big_df["rank"].astype(int)
96
99
  big_df.sort_values(by="rank", inplace=True)
@@ -98,23 +101,23 @@ def fortune_rank_eng(year: str = "2023") -> pd.DataFrame:
98
101
  return big_df
99
102
 
100
103
 
101
- if __name__ == '__main__':
104
+ if __name__ == "__main__":
102
105
  fortune_rank_eng_df = fortune_rank_eng(year="2022")
103
106
  print(fortune_rank_eng_df)
104
107
 
105
- fortune_rank_df = fortune_rank(year='2023') # 2010 不一样
108
+ fortune_rank_df = fortune_rank(year="2023") # 2010 不一样
106
109
  print(fortune_rank_df)
107
110
 
108
- fortune_rank_df = fortune_rank(year='2022') # 2010 不一样
111
+ fortune_rank_df = fortune_rank(year="2022") # 2010 不一样
109
112
  print(fortune_rank_df)
110
113
 
111
- fortune_rank_df = fortune_rank(year='2008') # 2010 不一样
114
+ fortune_rank_df = fortune_rank(year="2008") # 2010 不一样
112
115
  print(fortune_rank_df)
113
116
 
114
- fortune_rank_df = fortune_rank(year='2008') # 2010 不一样
117
+ fortune_rank_df = fortune_rank(year="2008") # 2010 不一样
115
118
  print(fortune_rank_df)
116
119
 
117
- fortune_rank_df = fortune_rank(year='2009') # 2010 不一样
120
+ fortune_rank_df = fortune_rank(year="2009") # 2010 不一样
118
121
  print(fortune_rank_df)
119
122
 
120
123
  for item in range(1996, 2008):
@@ -1,9 +1,9 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding:utf-8 -*-
3
3
  """
4
- Date: 2024/3/31 19:00
4
+ Date: 2024/8/3 20:00
5
5
  Desc: 乘联会
6
- http://data.cpcaauto.com/FuelMarket
6
+ http://data.cpcadata.com/FuelMarket
7
7
  """
8
8
 
9
9
  import pandas as pd
@@ -15,7 +15,7 @@ def car_market_total_cpca(
15
15
  ) -> pd.DataFrame:
16
16
  """
17
17
  乘联会-统计数据-总体市场
18
- http://data.cpcaauto.com/TotalMarket
18
+ http://data.cpcadata.com/TotalMarket
19
19
  :param symbol: choice of {"狭义乘用车", "广义乘用车"}
20
20
  :type symbol: str
21
21
  :param indicator: choice of {"产量", "批发", "零售", "出口"}
@@ -23,7 +23,7 @@ def car_market_total_cpca(
23
23
  :return: 统计数据-总体市场
24
24
  :rtype: pandas.DataFrame
25
25
  """
26
- url = "http://data.cpcaauto.com/api/chartlist"
26
+ url = "http://data.cpcadata.com/api/chartlist"
27
27
  params = {"charttype": "1"}
28
28
  r = requests.get(url, params=params)
29
29
  data_json = r.json()
@@ -163,13 +163,13 @@ def car_market_total_cpca(
163
163
  def __car_market_man_rank_cpca_pifa(symbol: str = "狭义乘用车-累计") -> pd.DataFrame:
164
164
  """
165
165
  乘联会-统计数据-厂商排名
166
- http://data.cpcaauto.com/ManRank
166
+ http://data.cpcadata.com/ManRank
167
167
  :param symbol: choice of {"狭义乘用车-单月", "狭义乘用车-累计", "广义乘用车-单月", "广义乘用车-累计"}
168
168
  :type symbol: str
169
169
  :return: 统计数据-厂商排名
170
170
  :rtype: pandas.DataFrame
171
171
  """
172
- url = "http://data.cpcaauto.com/api/chartlist"
172
+ url = "http://data.cpcadata.com/api/chartlist"
173
173
  params = {"charttype": "2"}
174
174
  r = requests.get(url, params=params)
175
175
  data_json = r.json()
@@ -278,13 +278,13 @@ def __car_market_man_rank_cpca_lingshou(
278
278
  ) -> pd.DataFrame:
279
279
  """
280
280
  乘联会-统计数据-厂商排名
281
- http://data.cpcaauto.com/ManRank
281
+ http://data.cpcadata.com/ManRank
282
282
  :param symbol: choice of {"狭义乘用车-单月", "狭义乘用车-累计", "广义乘用车-单月", "广义乘用车-累计"}
283
283
  :type symbol: str
284
284
  :return: 统计数据-厂商排名
285
285
  :rtype: pandas.DataFrame
286
286
  """
287
- url = "http://data.cpcaauto.com/api/chartlist_2"
287
+ url = "http://data.cpcadata.com/api/chartlist_2"
288
288
  params = {"charttype": "2"}
289
289
  r = requests.get(url, params=params)
290
290
  data_json = r.json()
@@ -393,7 +393,7 @@ def car_market_man_rank_cpca(
393
393
  ) -> pd.DataFrame:
394
394
  """
395
395
  乘联会-统计数据-厂商排名
396
- http://data.cpcaauto.com/ManRank
396
+ http://data.cpcadata.com/ManRank
397
397
  :param symbol: choice of {"狭义乘用车-单月", "狭义乘用车-累计", "广义乘用车-单月", "广义乘用车-累计"}
398
398
  :type symbol: str
399
399
  :param indicator: choice of {"批发", "零售"}
@@ -412,13 +412,13 @@ def car_market_man_rank_cpca(
412
412
  def __car_market_cate_cpca_pifa(symbol: str = "MPV") -> pd.DataFrame:
413
413
  """
414
414
  乘联会-统计数据-车型大类
415
- http://data.cpcaauto.com/CategoryMarket
415
+ http://data.cpcadata.com/CategoryMarket
416
416
  :param symbol: choice of {"轿车", "MPV", "SUV", "占比"}
417
417
  :type symbol: str
418
418
  :return: 统计数据-车型大类
419
419
  :rtype: pandas.DataFrame
420
420
  """
421
- url = "http://data.cpcaauto.com/api/chartlist"
421
+ url = "http://data.cpcadata.com/api/chartlist"
422
422
  params = {"charttype": "3"}
423
423
  r = requests.get(url, params=params)
424
424
  data_json = r.json()
@@ -530,13 +530,13 @@ def __car_market_cate_cpca_lingshou(
530
530
  ) -> pd.DataFrame:
531
531
  """
532
532
  乘联会-统计数据-车型大类
533
- http://data.cpcaauto.com/CategoryMarket
533
+ http://data.cpcadata.com/CategoryMarket
534
534
  :param symbol: choice of {"轿车", "MPV", "SUV", "占比"}
535
535
  :type symbol: str
536
536
  :return: 统计数据-车型大类
537
537
  :rtype: pandas.DataFrame
538
538
  """
539
- url = "http://data.cpcaauto.com/api/chartlist"
539
+ url = "http://data.cpcadata.com/api/chartlist"
540
540
  params = {"charttype": "3"}
541
541
  r = requests.get(url, params=params)
542
542
  data_json = r.json()
@@ -646,7 +646,7 @@ def __car_market_cate_cpca_lingshou(
646
646
  def car_market_cate_cpca(symbol: str = "轿车", indicator: str = "批发") -> pd.DataFrame:
647
647
  """
648
648
  乘联会-统计数据-车型大类
649
- http://data.cpcaauto.com/CategoryMarket
649
+ http://data.cpcadata.com/CategoryMarket
650
650
  :param symbol: choice of {"轿车", "MPV", "SUV", "占比"}
651
651
  :type symbol: str
652
652
  :param indicator: choice of {"批发", "零售"}
@@ -665,11 +665,11 @@ def car_market_cate_cpca(symbol: str = "轿车", indicator: str = "批发") -> p
665
665
  def car_market_country_cpca() -> pd.DataFrame:
666
666
  """
667
667
  乘联会-统计数据-国别细分市场
668
- http://data.cpcaauto.com/CountryMarket
668
+ http://data.cpcadata.com/CountryMarket
669
669
  :return: 统计数据-车型大类
670
670
  :rtype: pandas.DataFrame
671
671
  """
672
- url = "http://data.cpcaauto.com/api/chartlist"
672
+ url = "http://data.cpcadata.com/api/chartlist"
673
673
  params = {"charttype": "4"}
674
674
  r = requests.get(url=url, params=params)
675
675
  data_json = r.json()
@@ -685,13 +685,13 @@ def car_market_country_cpca() -> pd.DataFrame:
685
685
  def car_market_segment_cpca(symbol: str = "轿车") -> pd.DataFrame:
686
686
  """
687
687
  乘联会-统计数据-级别细分市场
688
- http://data.cpcaauto.com/SegmentMarket
688
+ http://data.cpcadata.com/SegmentMarket
689
689
  :param symbol: choice of {"轿车", "MPV", "SUV"}
690
690
  :type symbol: str
691
691
  :return: 统计数据-车型大类
692
692
  :rtype: pandas.DataFrame
693
693
  """
694
- url = "http://data.cpcaauto.com/api/chartlist"
694
+ url = "http://data.cpcadata.com/api/chartlist"
695
695
  params = {"charttype": "5"}
696
696
  r = requests.get(url=url, params=params)
697
697
  data_json = r.json()
@@ -724,11 +724,11 @@ def car_market_fuel_cpca(symbol: str = "整体市场") -> pd.DataFrame:
724
724
  乘联会-统计数据-新能源细分市场
725
725
  :param symbol: choice of {"整体市场", "销量占比-PHEV-BEV", "销量占比-ICE-NEV"}
726
726
  :type symbol: str
727
- https://data.cpcaauto.com/FuelMarket
727
+ https://data.cpcadata.com/FuelMarket
728
728
  :return: 新能源细分市场
729
729
  :rtype: pandas.DataFrame
730
730
  """
731
- url = "http://data.cpcaauto.com/api/chartlist"
731
+ url = "http://data.cpcadata.com/api/chartlist"
732
732
  params = {"charttype": "6"}
733
733
  r = requests.get(url, params=params)
734
734
  data_json = r.json()
@@ -38,6 +38,33 @@ def spot_hog_soozhu() -> pd.DataFrame:
38
38
  return big_df
39
39
 
40
40
 
41
+ def spot_hog_year_trend_soozhu() -> pd.DataFrame:
42
+ """
43
+ 搜猪-生猪大数据-今年以来全国出栏均价走势
44
+ https://www.soozhu.com/price/data/center/
45
+ :return: 今年以来全国出栏均价走势
46
+ :rtype: pd.DataFrame
47
+ """
48
+ session = requests.session()
49
+ url = "https://www.soozhu.com/price/data/center/"
50
+ r = session.get(url)
51
+ soup = BeautifulSoup(r.text, features="lxml")
52
+ token = soup.find(name="input", attrs={"name": "csrfmiddlewaretoken"})["value"]
53
+ url = "https://www.soozhu.com/price/data/center/"
54
+ payload = {"act": "yeartrend", "csrfmiddlewaretoken": token}
55
+ r = session.post(url, data=payload)
56
+ data_json = r.json()
57
+ temp_df = pd.DataFrame(data_json["nationlist"])
58
+ temp_df.columns = ["日期", "价格"]
59
+ temp_df["日期"] = pd.to_datetime(temp_df["日期"], errors="coerce").dt.date
60
+ temp_df["价格"] = pd.to_numeric(temp_df["价格"], errors="coerce")
61
+ temp_df.sort_values(by=["日期"], ignore_index=True, inplace=True)
62
+ return temp_df
63
+
64
+
41
65
  if __name__ == "__main__":
42
66
  spot_hog_soozhu_df = spot_hog_soozhu()
43
67
  print(spot_hog_soozhu_df)
68
+
69
+ spot_hog_year_trend_soozhu_df = spot_hog_year_trend_soozhu()
70
+ print(spot_hog_year_trend_soozhu_df)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: akshare
3
- Version: 1.14.47
3
+ Version: 1.14.49
4
4
  Summary: AKShare is an elegant and simple financial data interface library for Python, built for human beings!
5
5
  Home-page: https://github.com/akfamily/akshare
6
6
  Author: AKFamily
@@ -1,4 +1,4 @@
1
- akshare/__init__.py,sha256=VZTkZinnFxazlt-Pet9pEhTfVsS4bAuDnf6P9NiF4wA,180214
1
+ akshare/__init__.py,sha256=pEmZLvGV4WCO1j-1Zxi6NeOAG2U-z5g8Rxbq2oxaRv4,180229
2
2
  akshare/datasets.py,sha256=oIu1zC7o_LMHY22lQmdM7vCnryHibKrJLBqJwQiitlI,1167
3
3
  akshare/air/__init__.py,sha256=RMTf1bT5EOE3ttWpn3hGu1LtUmsVxDoa0W7W0gXHOy8,81
4
4
  akshare/air/air_hebei.py,sha256=xIXNGLK7IGYqrkteM9fxnHAwWqk6PCQs6D9-ggZ7byY,4442
@@ -75,7 +75,7 @@ akshare/event/migration.py,sha256=2lR3D_XHRlOKiarBSbjQVEIm3Spj5hbZIwDOvcyKsEU,37
75
75
  akshare/file_fold/__init__.py,sha256=RMTf1bT5EOE3ttWpn3hGu1LtUmsVxDoa0W7W0gXHOy8,81
76
76
  akshare/file_fold/calendar.json,sha256=gKqhd5CrXPGbhfztADaT6WjDDGrxGoxIOKGjB1VUsLk,116370
77
77
  akshare/fortune/__init__.py,sha256=4OCuVKMykUB3Ubm8WogA814U5EGLdrexmfTAOcBnYM4,82
78
- akshare/fortune/fortune_500.py,sha256=2pYu_zZsXehpIdioMggl-QkNSclNOsIhM8_8Z5zHv58,4468
78
+ akshare/fortune/fortune_500.py,sha256=V3d3KDHn89j3CItVtSDtDvj1RbHX-OJ4rYRhFTMT0kI,4485
79
79
  akshare/fortune/fortune_bloomberg.py,sha256=i5HqnICYXKmwy3rOvMLDzJPDJA_am8imKsXzNlLLU6g,3673
80
80
  akshare/fortune/fortune_forbes_500.py,sha256=GiHFZ_dLHkc3K03H1rfVQ0FEGbwOWjdF4CFlRXXWBIM,1437
81
81
  akshare/fortune/fortune_hurun.py,sha256=B2BMDdwsatY5Yo2hX03GBqMJyifPhQzsRTXhelVNs6s,11512
@@ -208,7 +208,7 @@ akshare/option/option_risk_analysis_em.py,sha256=WYwjXzZhIr8WpwoVKXfAnFA7Ylx_vLQ
208
208
  akshare/option/option_risk_indicator_sse.py,sha256=W1_mGrk8M9pbbadzSAy5euWMEGn4-cVWBc8Jk_I2WmI,2484
209
209
  akshare/option/option_value_analysis_em.py,sha256=XAHbSvUvcmyuv6rr2hxxGalWcKK8EqaP2L8G7p8woko,2487
210
210
  akshare/other/__init__.py,sha256=guH4GLhFcE_5iaMHOHtgK7QKa0i7esYdmZFfJMG6E10,82
211
- akshare/other/other_car_cpca.py,sha256=wlB1bPXr2ZnTN-JO_JoYS9AuRyhw8RbrrwV9g3TZeAI,34988
211
+ akshare/other/other_car_cpca.py,sha256=hCBNUrCI2l3OCP3Gqgr_4zpyzhO99XCBoiwkIhUM3r0,34987
212
212
  akshare/other/other_car_gasgoo.py,sha256=KaCMVPydiGJvhJN9eZEvObygYquCsSgsZkQRB0J6srk,3046
213
213
  akshare/pro/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
214
214
  akshare/pro/client.py,sha256=p9r3fZYGgfMplQwGLo8oPAen8w65xennP5D1Ca89im4,2248
@@ -226,9 +226,8 @@ akshare/reits/__init__.py,sha256=0MO0aWWC8jQBth2IPl3W63vZKvuDb1OJqIpHE-sCQVU,82
226
226
  akshare/reits/reits_basic.py,sha256=gxQeP8_K7SYjBT9zkKuVRRi8B4SPOHNqVdXe_UvTWTY,2730
227
227
  akshare/sport/__init__.py,sha256=aMsxmuOMZFkcI8tGmQanhPyPwyBpdeApAWyCtDRKMeg,81
228
228
  akshare/sport/sport_olympic.py,sha256=CB1cvLpz2BWuadfonhHuQ17Qxt9X_3ks0Zc5Bff2w9k,818
229
- akshare/sport/sport_olympic_winter.py,sha256=l_NhDpYfUBJMCLDezIa8un1S5Z_R6giv8EsuTRXgt6E,1473
230
229
  akshare/spot/__init__.py,sha256=VGuha94pXYoezmMs3F3Q_ofaN8RZIrkJ2NtVv8hWCjY,83
231
- akshare/spot/spot_hog_soozhu.py,sha256=S42cfeR76cl2x9lKbQdIH9sawaqToXtIdJnr6kW5zZA,1508
230
+ akshare/spot/spot_hog_soozhu.py,sha256=4l040e30BIaqheivlQv-0H6AqR1zwvN5JKYYrZxfqlk,2641
232
231
  akshare/spot/spot_price_qh.py,sha256=rRv09vR8K0U_x6x8AiLgGluxytIXkLatFNVkFbIh8eQ,3756
233
232
  akshare/spot/spot_sge.py,sha256=Ta5EHUlGCSpNDlufIJcwgCErsJEUgH6HJHzYxoYtaMk,6313
234
233
  akshare/stock/__init__.py,sha256=jSa9260d6aNZajaW68chI2mpPkDSXLOgi3eXrqo4MQ8,82
@@ -381,8 +380,8 @@ akshare/utils/token_process.py,sha256=K4rGXjh_tgugbRcyOK2h2x0jP3PT65IIK7nxhUKhOe
381
380
  akshare/utils/tqdm.py,sha256=MuPNwcswkOGjwWQOMWXi9ZvQ_RmW4obCWRj2i7HM7FE,847
382
381
  tests/__init__.py,sha256=gNzhlO0UPjFq6Ieb38kaVIODXv4cTDByrdohAZnDYt4,82
383
382
  tests/test_func.py,sha256=tfvy_YnYmDra2dkKZ5JvprU1gNW5X9T634PszdSdH1A,944
384
- akshare-1.14.47.dist-info/LICENSE,sha256=mmSZCPgfHiVw34LXuFArd-SUgQtBJ_QsIlh-kWlDHfs,1073
385
- akshare-1.14.47.dist-info/METADATA,sha256=pbqC023BkL1kqUqJZZkVrGsZfpo4CXuw-xGh__HGc1U,13976
386
- akshare-1.14.47.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
387
- akshare-1.14.47.dist-info/top_level.txt,sha256=jsf9ZzZPmHaISTVumQPsAw7vv7Yv-PdEVW70SMEelQQ,14
388
- akshare-1.14.47.dist-info/RECORD,,
383
+ akshare-1.14.49.dist-info/LICENSE,sha256=mmSZCPgfHiVw34LXuFArd-SUgQtBJ_QsIlh-kWlDHfs,1073
384
+ akshare-1.14.49.dist-info/METADATA,sha256=XEyUrgLciEc2YOB6U9tmiaD1US6deNhqAPO26RWL3aI,13976
385
+ akshare-1.14.49.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
386
+ akshare-1.14.49.dist-info/top_level.txt,sha256=jsf9ZzZPmHaISTVumQPsAw7vv7Yv-PdEVW70SMEelQQ,14
387
+ akshare-1.14.49.dist-info/RECORD,,
@@ -1,39 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding:utf-8 -*-
3
- """
4
- Date: 2022/2/8 17:14
5
- Desc: 腾讯运动-冬奥会-历届奖牌榜
6
- https://m.sports.qq.com/g/sv3/winter-oly22/winter-olympic-rank.htm?type=0
7
- """
8
- import requests
9
- import pandas as pd
10
-
11
-
12
- def sport_olympic_winter_hist() -> pd.DataFrame:
13
- """
14
- 腾讯运动-冬奥会-历届奖牌榜
15
- :return: 历届奖牌榜
16
- :rtype: pandas.DataFrame
17
- """
18
- url = "https://app.sports.qq.com/m/oly/historyMedal"
19
- r = requests.get(url)
20
- data_json = r.json()
21
- temp_df = pd.DataFrame(data_json["data"]["list"])
22
- temp_df = temp_df.explode("list")
23
- temp_df["国家及地区"] = temp_df["list"].apply(lambda x: (x["noc"]))
24
- temp_df["金牌数"] = temp_df["list"].apply(lambda x: (int(x["gold"])))
25
- temp_df["总奖牌数"] = temp_df["list"].apply(lambda x: (int(x["total"])))
26
- temp_df["举办年份"] = temp_df["year"].astype("str")
27
- temp_df["届数"] = temp_df["no"].astype("str")
28
- temp_df["举办地点"] = temp_df["country"]
29
- temp_df = temp_df[["举办年份", "届数", "举办地点", "国家及地区", "金牌数", "总奖牌数"]]
30
- temp_df = temp_df.replace("俄罗斯奥委会", "俄罗斯")
31
- temp_df.reset_index(inplace=True)
32
- temp_df["index"] = range(1, len(temp_df) + 1)
33
- temp_df.rename(columns={"index": "序号"}, inplace=True)
34
- return temp_df
35
-
36
-
37
- if __name__ == "__main__":
38
- sport_olympic_winter_hist_df = sport_olympic_winter_hist()
39
- print(sport_olympic_winter_hist_df)