py2ls 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py2ls/stats.py CHANGED
@@ -95,7 +95,7 @@ def FuncStars(
95
95
  fontname=fontname,
96
96
  color=symbolcolor,
97
97
  )
98
- elif 0 < pval <= 0.001:
98
+ elif 0 <= pval <= 0.001:
99
99
  ax.text(
100
100
  xcenter,
101
101
  y_loc,
@@ -107,7 +107,7 @@ def FuncStars(
107
107
  color=symbolcolor,
108
108
  )
109
109
  # lines indicators
110
- if linego and 0 < pval <= 0.05:
110
+ if linego and 0 <= pval <= 0.05:
111
111
  # horizontal line
112
112
  if yscale <= 0.99:
113
113
  ax.plot(
@@ -659,17 +659,26 @@ def corr_pair(pair):
659
659
 
660
660
 
661
661
  def check_normality(data, verbose=True):
662
- stat_shapiro, pval_shapiro = stats.shapiro(data)
663
- if pval_shapiro > 0.05:
662
+ if len(data)<=5000:
663
+ # Shapiro-Wilk test is designed to test the normality of a small sample, typically less than 5000 observations.
664
+ stat_shapiro, pval4norm = stats.shapiro(data)
665
+ method='Shapiro-Wilk test'
666
+ else:
667
+ from scipy.stats import kstest, zscore
668
+ data_scaled = zscore(data) # a standard normal distribution(mean=0,sd=1)
669
+ stat_kstest, pval4norm = kstest(data_scaled, 'norm')
670
+ method='Kolmogorov–Smirnov test'
671
+ if pval4norm >= 0.05:
664
672
  Normality = True
665
673
  else:
666
674
  Normality = False
667
675
  if verbose:
668
- (
669
- print(f"\n normally distributed\n")
670
- if Normality
671
- else print(f"\n NOT normally distributed\n")
672
- )
676
+ print(f"\'{method}' was used to test for normality")
677
+ (
678
+ print("\nnormally distributed")
679
+ if Normality
680
+ else print(f"\n NOT normally distributed\n")
681
+ )
673
682
  return Normality
674
683
 
675
684
 
py2ls/update2usage.py ADDED
@@ -0,0 +1,126 @@
1
+ from .ips import *
2
+ from .netfinder import fetch, get_soup
3
+
4
+
5
+ def update_pd_usages(
6
+ url="https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_clipboard.html",
7
+ dir_save=None,
8
+ ):
9
+ # extract each usage from its url
10
+ def get_usage(url):
11
+ # extract each usage from its url
12
+ sp = get_soup(url, driver="se")
13
+ return fetch(sp, where="dt")[0]
14
+
15
+ if dir_save is None:
16
+ if "mac" in get_os():
17
+ dir_save = "/Users/macjianfeng/Dropbox/github/python/py2ls/py2ls/data/"
18
+ else:
19
+ dir_save = "Z:\\Jianfeng\\temp\\"
20
+ sp = get_soup(url, driver="se")
21
+ links_all = fetch(sp, where="a", get="href", class_="reference internal")
22
+ filtered_links = unique(
23
+ [i for i in links_all if any([i.startswith(cond) for cond in ["pandas"]])]
24
+ )
25
+ links = [
26
+ "https://pandas.pydata.org/docs/reference/api/" + i for i in filtered_links
27
+ ]
28
+
29
+ usages = [get_usage(i) for i in links]
30
+ dict_usage = {}
31
+ for usage in usages:
32
+ if usage.startswith("DataFrame"):
33
+ usage = usage.replace("DataFrame", "df")
34
+ if usage.startswith("pandas"):
35
+ usage = usage.replace("pandas", "pd")
36
+ if usage.endswith("[source]#"):
37
+ usage = usage.replace("[source]#", "")
38
+ if usage.endswith("#"):
39
+ usage = usage.replace("#", "")
40
+ str2rm = ["class", "property"]
41
+ for str2rm_ in str2rm:
42
+ if usage.startswith(str2rm_):
43
+ usage = usage.replace(str2rm, "")
44
+ funcname = ssplit(usage, by="(")[0]
45
+ dict_usage.update({funcname: usage})
46
+ # save to local
47
+ dir_save += "/" if not dir_save.endswith("/") else ""
48
+ fsave(
49
+ dir_save + "usages_pd.json",
50
+ dict_usage,
51
+ )
52
+
53
+
54
+ def update_sns_usages(
55
+ url="https://seaborn.pydata.org/generated/seaborn.swarmplot.html",
56
+ dir_save=None,
57
+ ):
58
+ """
59
+ Fetches usage examples of various Seaborn plotting functions from the Seaborn documentation website.
60
+ It filters the relevant plot-related links, extracts usage examples, and saves them in a JSON file.
61
+
62
+ Parameters:
63
+ - url (str): URL of the Seaborn page to start extracting plot usages (default is swarmplot page).
64
+ - dir_save (str): Directory where the JSON file containing usages will be saved (default is a local path).
65
+
66
+ Saves:
67
+ - A JSON file named 'usages_sns.json' containing plotting function names and their usage descriptions.
68
+
69
+ Returns:
70
+ - None
71
+ """
72
+
73
+ # extract each usage from its url
74
+ def get_usage(url):
75
+ sp = get_soup(url, driver="se")
76
+ # preview(sp)
77
+ return fetch(sp, where="dt")[0]
78
+
79
+ if dir_save is None:
80
+ if "mac" in get_os():
81
+ dir_save = "/Users/macjianfeng/Dropbox/github/python/py2ls/py2ls/data/"
82
+ else:
83
+ dir_save = "Z:\\Jianfeng\\temp\\"
84
+ sp = get_soup(url, driver="se")
85
+ links_all = fetch(sp, where="a", get="href", class_="reference internal")
86
+ filtered_links = unique(
87
+ [
88
+ i
89
+ for i in links_all
90
+ if not any(
91
+ [
92
+ i.startswith(cond)
93
+ for cond in [
94
+ "seaborn.JointGrid",
95
+ "seaborn.PairGrid",
96
+ "seaborn.objects",
97
+ ]
98
+ ]
99
+ + ["plot" not in i]
100
+ )
101
+ ]
102
+ )
103
+ links = ["https://seaborn.pydata.org/generated/" + i for i in filtered_links]
104
+
105
+ usages = [get_usage(i) for i in links]
106
+ dict_usage = {}
107
+ for usage in usages:
108
+ dict_usage.update(
109
+ {ssplit(usage, by="(")[0].replace("seaborn.", ""): usage[:-1]}
110
+ )
111
+ # save to local
112
+ dir_save += "/" if not dir_save.endswith("/") else ""
113
+ fsave(
114
+ dir_save + "usages_sns.json",
115
+ dict_usage,
116
+ )
117
+
118
+
119
+ def main():
120
+ # update pandas usage to local
121
+ update_pd_usages()
122
+ # update_sns_usages()
123
+
124
+
125
+ if __name__ == "__main__":
126
+ main()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: py2ls
3
- Version: 0.2.1
3
+ Version: 0.2.3
4
4
  Summary: py(thon)2(too)ls
5
5
  Author: Jianfeng
6
6
  Author-email: Jianfeng.Liu0413@gmail.com
@@ -1,4 +1,4 @@
1
- py2ls/.DS_Store,sha256=BloZZz2vlFVfF-I3X7ZsqXusvqOawJMx7erKcnIP-b0,6148
1
+ py2ls/.DS_Store,sha256=N6pp6R6M5ZPDw6IdIAnsHaAAOu8hWJASHMjjjKMNmOc,6148
2
2
  py2ls/.git/COMMIT_EDITMSG,sha256=AdtqRHle5Ej2EBNPJY79v-SB454v5UK4wuPCPFELiFQ,11
3
3
  py2ls/.git/FETCH_HEAD,sha256=VM-2Jiw6iPaGu0ftg9xwq76OyNPWV0iT1nL0VWiL1zI,100
4
4
  py2ls/.git/HEAD,sha256=KNJb-Cr0wOK3L1CVmyvrhZ4-YLljCl6MYD2tTdsrboA,21
@@ -181,6 +181,7 @@ py2ls/data/db2ls_sql_chtsht.json,sha256=ls9d7Sm8TLeujanWHfHlWhU85Qz1KnAizO_9X3wU
181
181
  py2ls/data/docs_links.json,sha256=kXgbbWo0b8bfV4n6iuuUNLnZipIyLzokUO6Lzmf7nO4,101829
182
182
  py2ls/data/email/email_html_template.html,sha256=UIg3aixWfdNsvVx-j2dX1M5N3G-6DgrnV1Ya1cLjiUQ,2809
183
183
  py2ls/data/lang_code_iso639.json,sha256=qZiU7H2RLJjDMXK22C-jhwzLJCI5vKmampjB1ys4ek4,2157
184
+ py2ls/data/sns_info.json,sha256=pEzdg2bhMkwQHZpXx02_7zAP7NvRoCc0Le8PN6Uv0Vk,4074
184
185
  py2ls/data/styles/example/style1.pdf,sha256=Pt_qQJ5kiCSIPiz3TWSwEffHUdj75kKXnZ4MPqpEx4I,29873
185
186
  py2ls/data/styles/example/style2.pdf,sha256=0xduPLPulET38LEP2V2H_q70wqlrrBEo8ttqO-FMrfQ,25449
186
187
  py2ls/data/styles/example/style3.pdf,sha256=010-Pm2BUowAt0XDkJWZTR5rAszLqmI1DO3209sIFWs,65536
@@ -202,20 +203,23 @@ py2ls/data/styles/style6.json,sha256=tu-MYOT9x5Rorc-2IK6sy-J-frmz0RNdm65XAsDQKX4
202
203
  py2ls/data/styles/style7.json,sha256=StdUFwIVrS7T_6CDrADHMorzc0WZFWBM7IyYdO1TPHg,4447
203
204
  py2ls/data/styles/style8.json,sha256=8XUgkZtew8ebvjbAHlDHCSWUqNra3ktDvMCO4vNh-CM,4456
204
205
  py2ls/data/styles/style9.json,sha256=PLxvntbH_kfzZlnCTtCEAUVBGi5m6Lngb9C01rArQog,4769
206
+ py2ls/data/usages_pd.json,sha256=XvFAwxKn6yYZztxmAhcqQ-kuYm6xBnLRx5aOLfvp3BQ,11060
207
+ py2ls/data/usages_sns.json,sha256=Vu2kGIIMxxWxJ1kW0Ov7mq47DQwZa_-gwsXiW72A2ag,7788
205
208
  py2ls/db2ls.py,sha256=MMfFX47aIPIyu7fU9aPvX9lbPRPYOpJ_VXwlnWk-8qo,13615
206
209
  py2ls/doc.py,sha256=xN3g1OWfoaGUhikbJ0NqbN5eKy1VZVvWwRlhHMgyVEc,4243
207
210
  py2ls/export_requirements.py,sha256=x2WgUF0jYKz9GfA1MVKN-MdsM-oQ8yUeC6Ua8oCymio,2325
208
211
  py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
209
212
  py2ls/ich2ls.py,sha256=3E9R8oVpyYZXH5PiIQgT3CN5NxLe4Dwtm2LwaeacE6I,21381
210
- py2ls/ips.py,sha256=HjMZDXzfOiqhgNOdtoX7dxoY2cRsrD78LXilWyIUffE,164940
213
+ py2ls/ips.py,sha256=uEpvsKo_E9eX49Icd60g-fQBDzmpGW65hJM9_mr6wuM,195254
211
214
  py2ls/netfinder.py,sha256=vgOOMhzwbjRuLWMAPyf_kh3HoOhsJ9dlA-tCkMf7kNU,55371
212
215
  py2ls/ocr.py,sha256=5lhUbJufIKRSOL6wAWVLEo8TqMYSjoI_Q-IO-_4u3DE,31419
213
- py2ls/plot.py,sha256=x_bvQyPM6sl7IscgHPUbOEnqR82Iefcyur1JOweEAZw,100536
216
+ py2ls/plot.py,sha256=ynL5fz2jf1pD09FWkAayQOT3c4Jj64sjFHrkgCqdgAs,127193
214
217
  py2ls/setuptools-70.1.0-py3-none-any.whl,sha256=2bi3cUVal8ip86s0SOvgspteEF8SKLukECi-EWmFomc,882588
215
218
  py2ls/sleep_events_detectors.py,sha256=bQA3HJqv5qnYKJJEIhCyhlDtkXQfIzqksnD0YRXso68,52145
216
- py2ls/stats.py,sha256=fJmXQ9Lq460StOn-kfEljE97cySq7876HUPTnpB5hLs,38123
219
+ py2ls/stats.py,sha256=DMoJd8Z5YV9T1wB-4P52F5K5scfVK55DT8UP4Twcebo,38627
217
220
  py2ls/translator.py,sha256=zBeq4pYZeroqw3DT-5g7uHfVqKd-EQptT6LJ-Adi8JY,34244
221
+ py2ls/update2usage.py,sha256=9uLoipgM0k-xUy56XOa6v-sOim3mE5srdNiPzn03ALY,3964
218
222
  py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
219
- py2ls-0.2.1.dist-info/METADATA,sha256=Qr6DFCoJWEj0_JrHmUDLJYRtoPqO7GyHth0Apsq5wOk,20036
220
- py2ls-0.2.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
221
- py2ls-0.2.1.dist-info/RECORD,,
223
+ py2ls-0.2.3.dist-info/METADATA,sha256=QvoGrLUwlS0T7VI09f-OJ8tU1Z3VGCl0bJWbP73RCCg,20036
224
+ py2ls-0.2.3.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
225
+ py2ls-0.2.3.dist-info/RECORD,,
File without changes