py2ls 0.2.4.1__py3-none-any.whl → 0.2.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2ls/bio.py +513 -0
- py2ls/data/usages_pd copy.json +1105 -0
- py2ls/data/usages_pd.json +1413 -52
- py2ls/fetch_update.py +45 -27
- py2ls/ips.py +680 -168
- py2ls/plot.py +104 -77
- {py2ls-0.2.4.1.dist-info → py2ls-0.2.4.3.dist-info}/METADATA +1 -1
- {py2ls-0.2.4.1.dist-info → py2ls-0.2.4.3.dist-info}/RECORD +9 -7
- {py2ls-0.2.4.1.dist-info → py2ls-0.2.4.3.dist-info}/WHEEL +0 -0
py2ls/fetch_update.py
CHANGED
@@ -13,45 +13,63 @@ def usage_pd(
|
|
13
13
|
sp = get_soup(url, driver="se")
|
14
14
|
return fetch(sp, where="dt")[0]
|
15
15
|
|
16
|
+
|
16
17
|
if dir_save is None:
|
17
18
|
if "mac" in get_os():
|
18
19
|
dir_save = "/Users/macjianfeng/Dropbox/github/python/py2ls/py2ls/data/"
|
19
20
|
else:
|
20
21
|
dir_save = "Z:\\Jianfeng\\temp\\"
|
21
22
|
sp = get_soup(url, driver="se")
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
23
|
+
links_all4lev1 = fetch(sp, where="a", get="href", class_="reference internal")
|
24
|
+
links_level_1 = [
|
25
|
+
strcmp(link, links_all4lev1)[0].replace(
|
26
|
+
"../", "https://pandas.pydata.org/docs/reference/"
|
27
|
+
)
|
28
|
+
for link in links_all4lev1
|
29
|
+
if not link.startswith("pandas")
|
28
30
|
]
|
29
|
-
|
30
|
-
usages = [get_usage(i) for i in links]
|
31
31
|
dict_usage = {}
|
32
|
-
for
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
32
|
+
for link_level_1 in links_level_1:
|
33
|
+
sp = get_soup(link_level_1, driver="se")
|
34
|
+
links_all = fetch(sp, where="a", get="href", class_="reference internal")
|
35
|
+
|
36
|
+
filtered_links = unique(
|
37
|
+
[
|
38
|
+
i
|
39
|
+
for i in links_all
|
40
|
+
if any([i.startswith(cond) for cond in ["pandas", "api"]])
|
41
|
+
]
|
42
|
+
)
|
43
|
+
links = [
|
44
|
+
(
|
45
|
+
"https://pandas.pydata.org/docs/reference/api/" + i
|
46
|
+
if not i.startswith("api")
|
47
|
+
else "https://pandas.pydata.org/docs/reference/" + i
|
48
|
+
)
|
49
|
+
for i in filtered_links
|
50
|
+
]
|
51
|
+
usages = [get_usage(i) for i in links]
|
52
|
+
for usage, link in zip(usages, links):
|
53
|
+
if usage.startswith("DataFrame"):
|
54
|
+
usage = usage.replace("DataFrame", "df")
|
55
|
+
if usage.startswith("pandas"):
|
56
|
+
usage = usage.replace("pandas", "pd")
|
57
|
+
if usage.endswith("[source]#"):
|
58
|
+
usage = usage.replace("[source]#", "")
|
59
|
+
if usage.endswith("#"):
|
60
|
+
usage = usage.replace("#", "")
|
61
|
+
str2rm = ["class", "property"]
|
62
|
+
for str2rm_ in str2rm:
|
63
|
+
if usage.startswith(str2rm_):
|
64
|
+
usage = usage.replace(str2rm_, "")
|
65
|
+
funcname = ssplit(usage, by="(")[0]
|
66
|
+
dict_usage.update({funcname: usage + f"\n{link}"})
|
47
67
|
# save to local
|
48
68
|
dir_save += "/" if not dir_save.endswith("/") else ""
|
49
69
|
fsave(
|
50
70
|
dir_save + "usages_pd.json",
|
51
71
|
dict_usage,
|
52
72
|
)
|
53
|
-
|
54
|
-
|
55
73
|
def usage_sns(
|
56
74
|
url="https://seaborn.pydata.org/generated/seaborn.swarmplot.html",
|
57
75
|
dir_save=None,
|
@@ -119,8 +137,8 @@ def usage_sns(
|
|
119
137
|
|
120
138
|
def main():
|
121
139
|
# update pandas usage to local
|
122
|
-
|
123
|
-
#
|
140
|
+
usage_pd()
|
141
|
+
# usage_sns()
|
124
142
|
|
125
143
|
|
126
144
|
if __name__ == "__main__":
|