py2ls 0.1.7.6__py3-none-any.whl → 0.1.7.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py2ls/netfinder.py CHANGED
@@ -116,11 +116,18 @@ def extract_text_from_content(content, content_type="text/html", where=None, wha
116
116
  texts = ""
117
117
  for tag in result_set:
118
118
  texts =texts+" "+ extract_text(tag) + " \n"
119
- text_list = [tx.strip() for tx in texts.split("\n") if tx.strip()]
119
+ text_list = [tx.strip() for tx in texts.split(" \n") if tx.strip()]
120
120
  return text_list
121
121
  else:
122
- texts_ = " ".join(tag.get_text() for tag in result_set)
123
- texts = [tx.strip() for tx in texts_.split("\n") if tx.strip()]
122
+ # texts_ = " ".join(tag.get_text() for tag in result_set)
123
+ texts_=[]
124
+ for tag in result_set:
125
+ for child in tag.children:
126
+ if child.name is None:
127
+ texts_.append(child.strip())
128
+ # texts_=" ".join(texts_)
129
+ # texts = [tx.strip() for tx in texts_.split("\n") if tx.strip()]
130
+ texts = [tx.strip() for tx in texts_ if tx.strip()]
124
131
  return texts
125
132
 
126
133
  def extract_text_from_json(content, key=None):
@@ -493,7 +500,7 @@ def downloader(url, dir_save=dir_save, kind=['.pdf'], contains=None, rm_folder=F
493
500
  new_filename = f"{base}_{counter_}{ext}"
494
501
  counter += 1
495
502
  return new_filename
496
- fpath_tmp=None
503
+ fpath_tmp, corrected_fname=None, None
497
504
  if not isinstance(kind,list):
498
505
  kind=[kind]
499
506
  if isinstance(url, list):
@@ -588,7 +595,10 @@ def downloader(url, dir_save=dir_save, kind=['.pdf'], contains=None, rm_folder=F
588
595
 
589
596
  print(f'\n{len(fnames)} files were downloaded:')
590
597
  if verbose:
591
- pp(corrected_fname) if corrected_fname in locals() else pp(fnames)
598
+ if corrected_fname:
599
+ pp(corrected_fname)
600
+ else:
601
+ pp(fnames)
592
602
  print(f"\n\nsaved @:\n{dir_save}")
593
603
 
594
604
  def find_img(url, driver='request',dir_save="images", rm_folder=False, verbose=True):
@@ -139,7 +139,7 @@ def filter_bandpass(data=None, ord=4, freq_range=[11, 16], fs=1000):
139
139
  def moving_average(data, window_size):
140
140
  return convolve1d(data, np.ones(window_size) / window_size)
141
141
 
142
- def detect_cross(data, thr=0):
142
+ def detect_cross(data, thr=0, match=True):
143
143
  if isinstance(data, list):
144
144
  data = np.array(data)
145
145
  if data.ndim == 1:
@@ -162,6 +162,18 @@ def detect_cross(data, thr=0):
162
162
  falling_before.pop(0)
163
163
  if len(rising_before) > len(falling_before):
164
164
  rising_before.pop(0)
165
+ if rising_before and falling_before:
166
+ return rising_before, falling_before
167
+ else:
168
+ signal_shifted = data - thr
169
+ signal_sign = np.sign(signal_shifted)
170
+ sign_diff = np.diff(signal_sign)
171
+ rising_before, falling_before=np.where(sign_diff > 0)[0],np.where(sign_diff < 0)[0]
172
+ if match:
173
+ # make sure they are matched
174
+ min_length = min(len(rising_indices), len(falling_indices))
175
+ rising_before, falling_before=rising_before[:min_length], falling_before[:min_length]
176
+ return rising_before, falling_before
165
177
  ## debug
166
178
  # a = np.sin(np.arange(0, 10 * np.pi, np.pi / 100))
167
179
 
@@ -188,8 +200,6 @@ def detect_cross(data, thr=0):
188
200
  # lw=5 - i,
189
201
  # )
190
202
  # plt.gca().axhline(thres)
191
- return rising_before, falling_before
192
-
193
203
  def find_repeats(data, N, nGap=None):
194
204
  """
195
205
  Find the beginning and end points of repeated occurrences in a dataset.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: py2ls
3
- Version: 0.1.7.6
3
+ Version: 0.1.7.8
4
4
  Summary: py(thon)2(too)ls
5
5
  Author: Jianfeng
6
6
  Author-email: Jianfeng.Liu0413@gmail.com
@@ -135,13 +135,13 @@ py2ls/doc.py,sha256=xN3g1OWfoaGUhikbJ0NqbN5eKy1VZVvWwRlhHMgyVEc,4243
135
135
  py2ls/export_requirements.py,sha256=x2WgUF0jYKz9GfA1MVKN-MdsM-oQ8yUeC6Ua8oCymio,2325
136
136
  py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
137
137
  py2ls/ips.py,sha256=KkrkGAF0VQ-N0rH4FQFLyP-C-skY6EPpeO8t_5RngWw,88519
138
- py2ls/netfinder.py,sha256=9Mer8VAsOoIEew6wEW3wLZhg_46QpISPEsa_O16zO1k,47175
138
+ py2ls/netfinder.py,sha256=MY_0TQY_zaRBZ6wfR4RxNCGrU93HFmDVDRRy1EXl75o,47566
139
139
  py2ls/plot.py,sha256=8_33-1wpkGZrDUuvRBfTPUi_BRKdf1njoR725OLSLSY,48579
140
140
  py2ls/setuptools-70.1.0-py3-none-any.whl,sha256=2bi3cUVal8ip86s0SOvgspteEF8SKLukECi-EWmFomc,882588
141
- py2ls/sleep_events_detectors.py,sha256=36MCuRrpurn0Uvzpo3p3b3_JlVsRNHSWCXbJxCGM3mg,51546
141
+ py2ls/sleep_events_detectors.py,sha256=OzXn_uWr3x2aecOOymKDhKj6TAny-T-8C7vAM1zI6Io,52101
142
142
  py2ls/stats.py,sha256=Wd9yCKQ_61QD29WMEgMuEcreFxF91NmlPW65iWT2B5w,39041
143
143
  py2ls/translator.py,sha256=bc5FB-wqC4TtQz9gyCP1mE38HqNRJ_pmuRIgKnAlMzM,30581
144
144
  py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
145
- py2ls-0.1.7.6.dist-info/METADATA,sha256=sA2AqQ6j0UbJH2s5hTkAAq5nrWvM0d3kNYciCIEcNTQ,20017
146
- py2ls-0.1.7.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
147
- py2ls-0.1.7.6.dist-info/RECORD,,
145
+ py2ls-0.1.7.8.dist-info/METADATA,sha256=AY6ORoiV4L48RkIU4yhh3WwK7e-M9VxhTtvlDxEVYOk,20017
146
+ py2ls-0.1.7.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
147
+ py2ls-0.1.7.8.dist-info/RECORD,,