py2ls 0.2.4.29__py3-none-any.whl → 0.2.4.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2ls/.DS_Store +0 -0
- py2ls/.git/index +0 -0
- py2ls/.git/logs/refs/remotes/origin/HEAD +1 -0
- py2ls/data/.DS_Store +0 -0
- py2ls/data/hyper_param_tabrepo_2024.py +1753 -0
- py2ls/data/styles/.DS_Store +0 -0
- py2ls/data/tiles.csv +146 -0
- py2ls/ips.py +964 -117
- py2ls/ml2ls.py +30 -23
- py2ls/netfinder.py +59 -9
- py2ls/plot.py +127 -9
- {py2ls-0.2.4.29.dist-info → py2ls-0.2.4.31.dist-info}/METADATA +4 -1
- {py2ls-0.2.4.29.dist-info → py2ls-0.2.4.31.dist-info}/RECORD +14 -12
- {py2ls-0.2.4.29.dist-info → py2ls-0.2.4.31.dist-info}/WHEEL +0 -0
py2ls/ml2ls.py
CHANGED
@@ -2254,7 +2254,8 @@ def predict(
|
|
2254
2254
|
y_train: pd.Series,
|
2255
2255
|
x_true: pd.DataFrame = None,
|
2256
2256
|
y_true: Optional[pd.Series] = None,
|
2257
|
-
fill_missing:
|
2257
|
+
fill_missing:str = 'knn',
|
2258
|
+
encoder="dummy",
|
2258
2259
|
scaler:str='standard',# ["standard", "minmax", "robust","maxabs"]
|
2259
2260
|
backward: bool = False, # backward_regression
|
2260
2261
|
backward_thr:float = 0.05,# pval thr,only works when backward is True
|
@@ -2464,15 +2465,21 @@ def predict(
|
|
2464
2465
|
"DummyRegressor": DummyRegressor(),
|
2465
2466
|
"TransformedTargetRegressor": TransformedTargetRegressor(regressor=LinearRegression())
|
2466
2467
|
}
|
2468
|
+
|
2467
2469
|
if cls is None:
|
2468
2470
|
models = model_
|
2469
2471
|
else:
|
2470
|
-
if
|
2471
|
-
|
2472
|
-
|
2473
|
-
|
2474
|
-
|
2475
|
-
|
2472
|
+
if "trad" in cls: # tradition
|
2473
|
+
models = model_
|
2474
|
+
elif "autogluon" in cls:
|
2475
|
+
models = {"autogluon_tab": None}
|
2476
|
+
else:
|
2477
|
+
if not isinstance(cls, list):
|
2478
|
+
cls = [cls]
|
2479
|
+
models = {}
|
2480
|
+
for cls_ in cls:
|
2481
|
+
cls_ = ips.strcmp(cls_, list(model_.keys()))[0]
|
2482
|
+
models[cls_] = model_[cls_]
|
2476
2483
|
if "LightGBM" in models:
|
2477
2484
|
x_train = ips.df_special_characters_cleaner(x_train)
|
2478
2485
|
x_true = (
|
@@ -2481,10 +2488,7 @@ def predict(
|
|
2481
2488
|
|
2482
2489
|
# only keep "autogluon_tab" in models
|
2483
2490
|
cls = [cls] if isinstance(cls, str) else cls
|
2484
|
-
|
2485
|
-
if cls is not None:
|
2486
|
-
models={"autogluon_tab":None} if "auto" in cls else models
|
2487
|
-
|
2491
|
+
print(cls)
|
2488
2492
|
# indicate cls:
|
2489
2493
|
if ips.run_once_within(30): # 10 min
|
2490
2494
|
print(f"processing: {list(models.keys())}")
|
@@ -2500,7 +2504,7 @@ def predict(
|
|
2500
2504
|
|
2501
2505
|
y_train = pd.DataFrame(y_train)
|
2502
2506
|
if y_train.select_dtypes(include=np.number).empty:
|
2503
|
-
y_train_ = ips.df_encoder(y_train, method=
|
2507
|
+
y_train_ = ips.df_encoder(y_train, method=encoder, drop=None)
|
2504
2508
|
is_binary = False if y_train_.shape[1] > 2 else True
|
2505
2509
|
else:
|
2506
2510
|
y_train_ = ips.flatten(y_train.values)
|
@@ -2511,8 +2515,8 @@ def predict(
|
|
2511
2515
|
print("is_binary:", is_binary)
|
2512
2516
|
|
2513
2517
|
if fill_missing:
|
2514
|
-
ips.df_fillna(data=x_train, method=
|
2515
|
-
ips.df_fillna(data=y_train, method=
|
2518
|
+
ips.df_fillna(data=x_train, method=fill_missing, inplace=True, axis=0)
|
2519
|
+
ips.df_fillna(data=y_train, method=fill_missing, inplace=True, axis=0)
|
2516
2520
|
# Perform backward feature selection
|
2517
2521
|
if backward:
|
2518
2522
|
selected_features = backward_regression(x_train, y_train, thr=backward_thr)
|
@@ -2542,7 +2546,7 @@ def predict(
|
|
2542
2546
|
).values.ravel()
|
2543
2547
|
|
2544
2548
|
if fill_missing:
|
2545
|
-
ips.df_fillna(data=x_true, method=
|
2549
|
+
ips.df_fillna(data=x_true, method=fill_missing, inplace=True, axis=0)
|
2546
2550
|
if y_true is not None:
|
2547
2551
|
if isinstance(y_true, str) and y_true in x_true.columns:
|
2548
2552
|
y_true_col_name = y_true
|
@@ -2585,8 +2589,8 @@ def predict(
|
|
2585
2589
|
x_train,scaler_=ips.df_scaler(x_train,method=scaler,return_scaler=True)
|
2586
2590
|
#
|
2587
2591
|
x_true=ips.df_scaler(x_true,scaler=scaler_)# make sure 用于同一个scaler
|
2588
|
-
x_train, x_true = ips.df_encoder(x_train, method=
|
2589
|
-
x_true, method=
|
2592
|
+
x_train, x_true = ips.df_encoder(x_train, method=encoder), ips.df_encoder(
|
2593
|
+
x_true, method=encoder
|
2590
2594
|
)
|
2591
2595
|
# Handle class imbalance using SMOTE (only for classification)
|
2592
2596
|
if (
|
@@ -2598,6 +2602,7 @@ def predict(
|
|
2598
2602
|
|
2599
2603
|
smote_sampler = SMOTE(random_state=random_state)
|
2600
2604
|
x_train, y_train = smote_sampler.fit_resample(x_train, y_train)
|
2605
|
+
|
2601
2606
|
if not is_binary:
|
2602
2607
|
if isinstance(y_train, np.ndarray):
|
2603
2608
|
y_train = ips.df_encoder(data=pd.DataFrame(y_train), method="label")
|
@@ -3768,7 +3773,7 @@ def predict(
|
|
3768
3773
|
y_pred = best_clf.predict(x_true)
|
3769
3774
|
if hasattr(best_clf, "predict_proba"):
|
3770
3775
|
y_pred_proba = best_clf.predict_proba(x_true)
|
3771
|
-
|
3776
|
+
|
3772
3777
|
if y_pred_proba.shape[1] == 1:
|
3773
3778
|
y_pred_proba = np.hstack(
|
3774
3779
|
[1 - y_pred_proba, y_pred_proba]
|
@@ -3778,9 +3783,11 @@ def predict(
|
|
3778
3783
|
y_pred_proba = y_pred_proba.iloc[:, 1]
|
3779
3784
|
elif isinstance(y_pred_proba, pd.Series):
|
3780
3785
|
y_pred_proba = y_pred_proba.values[:, 1]
|
3786
|
+
else:
|
3787
|
+
y_pred_proba = y_pred_proba[:, 1]
|
3781
3788
|
else:
|
3782
3789
|
y_pred_proba = y_pred_proba[:, 1]
|
3783
|
-
|
3790
|
+
# print("Shape of predicted probabilities:", y_pred_proba.shape)
|
3784
3791
|
elif hasattr(best_clf, "decision_function"):
|
3785
3792
|
# If predict_proba is not available, use decision_function (e.g., for SVM)
|
3786
3793
|
y_pred_proba = best_clf.decision_function(x_true)
|
@@ -4029,7 +4036,7 @@ def predict(
|
|
4029
4036
|
|
4030
4037
|
# Convert results to DataFrame
|
4031
4038
|
df_results = pd.DataFrame.from_dict(results, orient="index")
|
4032
|
-
|
4039
|
+
print(df_results)
|
4033
4040
|
# sort
|
4034
4041
|
if y_true is not None:
|
4035
4042
|
if purpose == "classification":
|
@@ -4154,7 +4161,7 @@ def predict(
|
|
4154
4161
|
#* Convert the results into a DataFrame for easy comparison
|
4155
4162
|
cv_results_df = pd.DataFrame(cv_results)
|
4156
4163
|
|
4157
|
-
#* Sort and
|
4164
|
+
#* Sort and print the best model
|
4158
4165
|
cv_results_df = cv_results_df.sort_values(by='Mean Accuracy', ascending=False)
|
4159
4166
|
|
4160
4167
|
|
@@ -4440,7 +4447,7 @@ def predict(
|
|
4440
4447
|
if purpose == "classification":
|
4441
4448
|
if hasattr(voting_model, "predict_proba"):
|
4442
4449
|
y_pred_proba_vote = voting_model.predict_proba(x_true)
|
4443
|
-
print("Shape of predicted probabilities:", y_pred_proba_vote.shape)
|
4450
|
+
# print("Shape of predicted probabilities:", y_pred_proba_vote.shape)
|
4444
4451
|
if y_pred_proba_vote.shape[1] == 1:
|
4445
4452
|
y_pred_proba_vote = np.hstack(
|
4446
4453
|
[1 - y_pred_proba_vote, y_pred_proba_vote]
|
@@ -5071,7 +5078,7 @@ def img_datasets_preprocessing(
|
|
5071
5078
|
if verbose:
|
5072
5079
|
print("Processed images:", len(df_img))
|
5073
5080
|
print("Final DataFrame shape:", df_img.shape)
|
5074
|
-
|
5081
|
+
print(df_img.head())
|
5075
5082
|
|
5076
5083
|
return df_img
|
5077
5084
|
|
py2ls/netfinder.py
CHANGED
@@ -7,7 +7,7 @@ import json
|
|
7
7
|
import time
|
8
8
|
from selenium.webdriver.common.by import By
|
9
9
|
from . import ips
|
10
|
-
|
10
|
+
import random
|
11
11
|
dir_save = "/Users/macjianfeng/Dropbox/Downloads/"
|
12
12
|
# Set up logging
|
13
13
|
logging.basicConfig(level=logging.INFO)
|
@@ -310,6 +310,28 @@ def corr_by_kind(wait_until_kind):
|
|
310
310
|
raise ValueError(f"Unsupported wait_until_kind: {wait_until_kind}")
|
311
311
|
|
312
312
|
|
313
|
+
|
314
|
+
|
315
|
+
def parse_cookies(cookies_str):
|
316
|
+
"""
|
317
|
+
直接复制于browser,它可以负责转换成最终的dict
|
318
|
+
"""
|
319
|
+
import re
|
320
|
+
cookies_dict = {}
|
321
|
+
|
322
|
+
# Split the string by newlines to get each cookie row
|
323
|
+
cookies_list = cookies_str.strip().split("\n")
|
324
|
+
|
325
|
+
for cookie in cookies_list:
|
326
|
+
# Use regular expression to capture name and value pairs
|
327
|
+
match = re.match(r"([a-zA-Z0-9_\-\.]+)\s+([^\s]+)", cookie)
|
328
|
+
if match:
|
329
|
+
cookie_name = match.group(1)
|
330
|
+
cookie_value = match.group(2)
|
331
|
+
cookies_dict[cookie_name] = cookie_value
|
332
|
+
|
333
|
+
return cookies_dict
|
334
|
+
|
313
335
|
def fetch_all(
|
314
336
|
url,
|
315
337
|
parser="lxml",
|
@@ -336,6 +358,7 @@ def fetch_all(
|
|
336
358
|
disable_images=False, # Add option to disable images
|
337
359
|
iframe_name=None,
|
338
360
|
login_dict=None,
|
361
|
+
cookies=None, # Add cookies parameter
|
339
362
|
): # Add option to handle iframe): # lxml is faster, # parser="html.parser"
|
340
363
|
try:
|
341
364
|
# # Generate a random user-agent string
|
@@ -348,9 +371,23 @@ def fetch_all(
|
|
348
371
|
|
349
372
|
headers = {"User-Agent": user_agent()}
|
350
373
|
if "req" in driver.lower():
|
351
|
-
response = requests.get(
|
352
|
-
|
353
|
-
)
|
374
|
+
# response = requests.get(
|
375
|
+
# url, headers=headers, proxies=proxies_glob, timeout=30, stream=True
|
376
|
+
# )
|
377
|
+
|
378
|
+
# Handle cookies for requests
|
379
|
+
if cookies:
|
380
|
+
from requests.cookies import RequestsCookieJar
|
381
|
+
cookie_jar = RequestsCookieJar()
|
382
|
+
if isinstance(cookies, str):
|
383
|
+
cookies=parse_cookies(cookies)
|
384
|
+
for cookie_name, cookie_value in cookies.items():
|
385
|
+
cookie_jar.set(cookie_name, cookie_value)
|
386
|
+
response = requests.get(
|
387
|
+
url, headers=headers, cookies=cookie_jar, timeout=timeout, stream=True
|
388
|
+
)
|
389
|
+
else:
|
390
|
+
response = requests.get(url, headers=headers, timeout=timeout, stream=True)
|
354
391
|
|
355
392
|
# If the response is a redirect, follow it
|
356
393
|
while response.is_redirect:
|
@@ -359,7 +396,7 @@ def fetch_all(
|
|
359
396
|
response.headers["Location"],
|
360
397
|
headers=headers,
|
361
398
|
proxies=proxies_glob,
|
362
|
-
timeout=
|
399
|
+
timeout=timeout,
|
363
400
|
stream=True,
|
364
401
|
)
|
365
402
|
# Check for a 403 error
|
@@ -368,7 +405,7 @@ def fetch_all(
|
|
368
405
|
# Retry the request after a short delay
|
369
406
|
time.sleep(random.uniform(1, 3))
|
370
407
|
response = requests.get(
|
371
|
-
url, headers=headers, proxies=proxies_glob, timeout=
|
408
|
+
url, headers=headers, proxies=proxies_glob, timeout=timeout, stream=True
|
372
409
|
)
|
373
410
|
# Raise an error if retry also fails
|
374
411
|
response.raise_for_status()
|
@@ -427,14 +464,21 @@ def fetch_all(
|
|
427
464
|
except Exception as e:
|
428
465
|
print(f"Error occurred: {e}")
|
429
466
|
print("Attempting to reinstall webdriver-manager...")
|
430
|
-
try:
|
431
|
-
ips.upgrade("webdriver-manager", uninstall=True)
|
467
|
+
try:
|
432
468
|
service = Service(ChromeDriverManager().install())
|
433
469
|
driver_ = webdriver.Chrome(service=service, options=chrome_options)
|
434
470
|
except Exception as reinstall_error:
|
435
471
|
print(
|
436
472
|
f"Reinstallation failed: {reinstall_error}\n之前发生过类似的问题, 更新了webdriver-manager以后得到解决"
|
437
473
|
)
|
474
|
+
try:
|
475
|
+
ips.upgrade("webdriver-manager", uninstall=True)
|
476
|
+
service = Service(ChromeDriverManager().install())
|
477
|
+
driver_ = webdriver.Chrome(service=service, options=chrome_options)
|
478
|
+
except Exception as e:
|
479
|
+
print(
|
480
|
+
f"Reinstallation failed: {reinstall_error}\n之前发生过类似的问题, 但是更新了'webdriver-manager'之后依然没有解决"
|
481
|
+
)
|
438
482
|
|
439
483
|
# 隐式等等待
|
440
484
|
if 3 < wait < 5:
|
@@ -457,7 +501,13 @@ def fetch_all(
|
|
457
501
|
driver_.get(url)
|
458
502
|
for cookie_name, cookie_value in cookies.items():
|
459
503
|
driver_.add_cookie({"name": cookie_name, "value": cookie_value})
|
460
|
-
|
504
|
+
else:
|
505
|
+
if cookies:
|
506
|
+
driver_.get(url)
|
507
|
+
if isinstance(cookies, str):
|
508
|
+
cookies=parse_cookies(cookies)
|
509
|
+
for cookie_name, cookie_value in cookies.items():
|
510
|
+
driver_.add_cookie({"name": cookie_name, "value": cookie_value})
|
461
511
|
if not javascript:
|
462
512
|
driver_.execute_cdp_cmd(
|
463
513
|
"Emulation.setScriptExecutionDisabled", {"value": True}
|
py2ls/plot.py
CHANGED
@@ -20,7 +20,7 @@ from .ips import (
|
|
20
20
|
flatten,
|
21
21
|
plt_font,
|
22
22
|
run_once_within,
|
23
|
-
|
23
|
+
get_df_format,
|
24
24
|
df_corr,
|
25
25
|
df_scaler
|
26
26
|
)
|
@@ -3635,9 +3635,7 @@ def plotxy(
|
|
3635
3635
|
kws_strip = kwargs.pop("kws_strip", kwargs)
|
3636
3636
|
kws_strip = {k: v for k, v in kws_strip.items() if not k.startswith("kws_")}
|
3637
3637
|
dodge = kws_strip.pop("dodge", True)
|
3638
|
-
ax = sns.stripplot(
|
3639
|
-
data=data, x=x, y=y, ax=ax, zorder=zorder, dodge=dodge, **kws_strip
|
3640
|
-
)
|
3638
|
+
ax = sns.stripplot(data=data, x=x, y=y, ax=ax, zorder=zorder, dodge=dodge, **kws_strip)
|
3641
3639
|
elif k == "swarmplot":
|
3642
3640
|
kws_swarm = kwargs.pop("kws_swarm", kwargs)
|
3643
3641
|
kws_swarm = {k: v for k, v in kws_swarm.items() if not k.startswith("kws_")}
|
@@ -3713,9 +3711,6 @@ def plotxy(
|
|
3713
3711
|
return g, ax
|
3714
3712
|
return ax
|
3715
3713
|
|
3716
|
-
import pandas as pd
|
3717
|
-
|
3718
|
-
|
3719
3714
|
def df_preprocessing_(data, kind, verbose=False):
|
3720
3715
|
"""
|
3721
3716
|
Automatically formats data for various seaborn plot types.
|
@@ -3729,7 +3724,7 @@ def df_preprocessing_(data, kind, verbose=False):
|
|
3729
3724
|
- pd.DataFrame: Formatted DataFrame ready for the specified seaborn plot type.
|
3730
3725
|
"""
|
3731
3726
|
# Determine data format: 'long', 'wide', or 'uncertain'
|
3732
|
-
df_format_ =
|
3727
|
+
df_format_ = get_df_format(data)
|
3733
3728
|
|
3734
3729
|
# Correct plot type name
|
3735
3730
|
kind = strcmp(
|
@@ -5872,4 +5867,127 @@ def ppi(
|
|
5872
5867
|
nx.write_graphml(G, dir_save.replace(".html",".graphml")) # Export to GraphML
|
5873
5868
|
print(f"could be edited in Cytoscape \n{dir_save.replace(".html",".graphml")}")
|
5874
5869
|
ips.figsave(dir_save.replace(".html",".pdf"))
|
5875
|
-
return G,ax
|
5870
|
+
return G,ax
|
5871
|
+
|
5872
|
+
|
5873
|
+
def plot_map(
|
5874
|
+
location=[39.949610, -75.150282], # Default center of the map
|
5875
|
+
zoom_start=16, # Default zoom level
|
5876
|
+
tiles="OpenStreetMap", # Tile style for Folium
|
5877
|
+
markers=None, # List of marker dictionaries for Folium
|
5878
|
+
overlays=None, # List of overlays (e.g., GeoJson, PolyLine, Circle) for Folium
|
5879
|
+
custom_layers=None, # List of custom Folium layers
|
5880
|
+
fit_bounds=None, # Coordinates to fit map bounds
|
5881
|
+
plugins=None, # List of Folium plugins to add
|
5882
|
+
scroll_wheel_zoom=True, # Enable/disable scroll wheel zoom
|
5883
|
+
map_width=725, # Map display width for Streamlit
|
5884
|
+
map_height=None, # Map display height for Streamlit
|
5885
|
+
output="normale", # "streamlit" or "offline" rendering
|
5886
|
+
save_path=None, # Path to save the map in offline mode
|
5887
|
+
pydeck_map=False, # Whether to use pydeck for rendering (True for pydeck)
|
5888
|
+
pydeck_style="mapbox://styles/mapbox/streets-v11", # Map style for pydeck
|
5889
|
+
**kwargs, # Additional arguments for Folium Map
|
5890
|
+
):
|
5891
|
+
"""
|
5892
|
+
Creates a customizable Folium or pydeck map and renders it in Streamlit or saves offline.
|
5893
|
+
|
5894
|
+
# get all built-in tiles
|
5895
|
+
from py2ls import netfinder as nt
|
5896
|
+
sp = nt.get_soup(url, driver="se")
|
5897
|
+
url = "https://leaflet-extras.github.io/leaflet-providers/preview/"
|
5898
|
+
tiles_support = nt.fetch(sp,"span",class_="leaflet-minimap-label")
|
5899
|
+
df_tiles = pd.DataFrame({"tiles": tiles_support})
|
5900
|
+
fsave("....tiles.csv",df_tiles)
|
5901
|
+
"""
|
5902
|
+
from pathlib import Path
|
5903
|
+
|
5904
|
+
# Get the current script's directory as a Path object
|
5905
|
+
current_directory = Path(__file__).resolve().parent
|
5906
|
+
if not "tiles_support" in locals():
|
5907
|
+
tiles_support = fload(current_directory / "data" / "tiles.csv", verbose=0).iloc[:, 1].tolist()
|
5908
|
+
tiles=strcmp(tiles, tiles_support)[0]
|
5909
|
+
import folium
|
5910
|
+
import streamlit as st
|
5911
|
+
import pydeck as pdk
|
5912
|
+
from streamlit_folium import st_folium
|
5913
|
+
from folium.plugins import HeatMap
|
5914
|
+
|
5915
|
+
if pydeck_map:
|
5916
|
+
view = pdk.ViewState(
|
5917
|
+
latitude=location[0],
|
5918
|
+
longitude=location[1],
|
5919
|
+
zoom=zoom_start,
|
5920
|
+
pitch=0,
|
5921
|
+
)
|
5922
|
+
|
5923
|
+
# Example Layer (can be replaced by your custom layers)
|
5924
|
+
layer = pdk.Layer(
|
5925
|
+
"ScatterplotLayer",
|
5926
|
+
data=[{"lat": location[0], "lon": location[1]}],
|
5927
|
+
get_position="[lon, lat]",
|
5928
|
+
get_color="[200, 30, 0, 160]",
|
5929
|
+
get_radius=1000,
|
5930
|
+
)
|
5931
|
+
|
5932
|
+
# Create the deck
|
5933
|
+
deck = pdk.Deck(
|
5934
|
+
layers=[layer],
|
5935
|
+
initial_view_state=view,
|
5936
|
+
map_style=pydeck_style,
|
5937
|
+
)
|
5938
|
+
|
5939
|
+
# Render map in Streamlit
|
5940
|
+
st.pydeck_chart(deck)
|
5941
|
+
|
5942
|
+
return deck # Return the pydeck map
|
5943
|
+
|
5944
|
+
else:
|
5945
|
+
# Initialize the base map (Folium)
|
5946
|
+
m = folium.Map(
|
5947
|
+
location=location,
|
5948
|
+
zoom_start=zoom_start,
|
5949
|
+
tiles=tiles,
|
5950
|
+
scrollWheelZoom=scroll_wheel_zoom,
|
5951
|
+
**kwargs,
|
5952
|
+
)
|
5953
|
+
|
5954
|
+
# Add markers
|
5955
|
+
if markers:
|
5956
|
+
for marker in markers:
|
5957
|
+
folium.Marker(
|
5958
|
+
location=marker.get("location"),
|
5959
|
+
popup=marker.get("popup"),
|
5960
|
+
tooltip=marker.get("tooltip"),
|
5961
|
+
icon=marker.get("icon", folium.Icon()), # Default icon if none specified
|
5962
|
+
).add_to(m)
|
5963
|
+
|
5964
|
+
# Add overlays
|
5965
|
+
if overlays:
|
5966
|
+
for overlay in overlays:
|
5967
|
+
overlay.add_to(m)
|
5968
|
+
|
5969
|
+
# Add custom layers
|
5970
|
+
if custom_layers:
|
5971
|
+
for layer in custom_layers:
|
5972
|
+
layer.add_to(m)
|
5973
|
+
|
5974
|
+
# Add plugins
|
5975
|
+
if plugins:
|
5976
|
+
for plugin in plugins:
|
5977
|
+
plugin.add_to(m)
|
5978
|
+
|
5979
|
+
# Fit map bounds
|
5980
|
+
if fit_bounds:
|
5981
|
+
m.fit_bounds(fit_bounds)
|
5982
|
+
|
5983
|
+
# Handle rendering based on output
|
5984
|
+
if output == "streamlit":
|
5985
|
+
# Render the map in Streamlit
|
5986
|
+
st_data = st_folium(m, width=map_width, height=map_height)
|
5987
|
+
return st_data
|
5988
|
+
elif output == "offline":
|
5989
|
+
if save_path:
|
5990
|
+
m.save(save_path)
|
5991
|
+
return m
|
5992
|
+
else:
|
5993
|
+
return m
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: py2ls
|
3
|
-
Version: 0.2.4.
|
3
|
+
Version: 0.2.4.31
|
4
4
|
Summary: py(thon)2(too)ls
|
5
5
|
Author: Jianfeng
|
6
6
|
Author-email: Jianfeng.Liu0413@gmail.com
|
@@ -167,6 +167,7 @@ Requires-Dist: protobuf (>=5.27.2)
|
|
167
167
|
Requires-Dist: psutil (>=5.9.8)
|
168
168
|
Requires-Dist: ptyprocess (>=0.7.0)
|
169
169
|
Requires-Dist: pure_eval (>=0.2.3)
|
170
|
+
Requires-Dist: py-cpuinfo (>=9.0.0)
|
170
171
|
Requires-Dist: pycodestyle (>=2.12.0)
|
171
172
|
Requires-Dist: pycparser (>=2.22)
|
172
173
|
Requires-Dist: pyflakes (>=3.2.0)
|
@@ -210,6 +211,8 @@ Requires-Dist: soupsieve (>=2.5)
|
|
210
211
|
Requires-Dist: stack-data (>=0.6.3)
|
211
212
|
Requires-Dist: statsmodels (>=0.14.2)
|
212
213
|
Requires-Dist: stem (>=1.8.2)
|
214
|
+
Requires-Dist: streamlit (>=1.41.0)
|
215
|
+
Requires-Dist: streamlit-autorefresh (>=1.0.1)
|
213
216
|
Requires-Dist: sympy (>=1.13.1)
|
214
217
|
Requires-Dist: tabulate (>=0.9.0)
|
215
218
|
Requires-Dist: tenacity (>=8.5.0)
|
@@ -1,4 +1,4 @@
|
|
1
|
-
py2ls/.DS_Store,sha256=
|
1
|
+
py2ls/.DS_Store,sha256=4sRy5JgScBybkYDAVnroayQNecFjkuxb5iQ3xansB0A,6148
|
2
2
|
py2ls/.git/.DS_Store,sha256=_QMqSMvFkiuSTudJbBnYTcETf3zOthZjrcI8LJ48tDA,6148
|
3
3
|
py2ls/.git/COMMIT_EDITMSG,sha256=AdtqRHle5Ej2EBNPJY79v-SB454v5UK4wuPCPFELiFQ,11
|
4
4
|
py2ls/.git/FETCH_HEAD,sha256=VM-2Jiw6iPaGu0ftg9xwq76OyNPWV0iT1nL0VWiL1zI,100
|
@@ -18,11 +18,11 @@ py2ls/.git/hooks/pre-receive.sample,sha256=pMPSuce7P9jRRBwxvU7nGlldZrRPz0ndsxAlI
|
|
18
18
|
py2ls/.git/hooks/prepare-commit-msg.sample,sha256=6d3KpBif3dJe2X_Ix4nsp7bKFjkLI5KuMnbwyOGqRhk,1492
|
19
19
|
py2ls/.git/hooks/push-to-checkout.sample,sha256=pT0HQXmLKHxt16-mSu5HPzBeZdP0lGO7nXQI7DsSv18,2783
|
20
20
|
py2ls/.git/hooks/update.sample,sha256=jV8vqD4QPPCLV-qmdSHfkZT0XL28s32lKtWGCXoU0QY,3650
|
21
|
-
py2ls/.git/index,sha256=
|
21
|
+
py2ls/.git/index,sha256=Gu1Qs5VDPu7CPREfYn_oQpjffIKsIpsSBsAUPInp8es,4232
|
22
22
|
py2ls/.git/info/exclude,sha256=ZnH-g7egfIky7okWTR8nk7IxgFjri5jcXAbuClo7DsE,240
|
23
23
|
py2ls/.git/logs/HEAD,sha256=8ID7WuAe_TlO9g-ARxhIJYdgdL3u3m7-1qrOanaIUlA,3535
|
24
24
|
py2ls/.git/logs/refs/heads/main,sha256=8ID7WuAe_TlO9g-ARxhIJYdgdL3u3m7-1qrOanaIUlA,3535
|
25
|
-
py2ls/.git/logs/refs/remotes/origin/HEAD,sha256=
|
25
|
+
py2ls/.git/logs/refs/remotes/origin/HEAD,sha256=XROqkeNoDTQcnvmIJb-KjSXB-SpUTKbnzM8QljdSC6k,23718
|
26
26
|
py2ls/.git/logs/refs/remotes/origin/main,sha256=9ohHV9XT1dBowBZUVo52U9205_o513hmvCvtW9rS4Fk,3192
|
27
27
|
py2ls/.git/objects/.DS_Store,sha256=p_UeT5XAI5c6cGAz23lbqMNMljXKR5ddgXEXeYDk4nk,18436
|
28
28
|
py2ls/.git/objects/01/d5bd8065e6860c0bd23ff9fa57161806a099e1,sha256=hEQ8nqJnGsfFsuV5wc4cZas58rehXvT0v5ANx1zmMAY,584
|
@@ -181,15 +181,16 @@ py2ls/brain_atlas.py,sha256=w1o5EelRjq89zuFJUNSz4Da8HnTCwAwDAZ4NU4a-bAY,5486
|
|
181
181
|
py2ls/chat.py,sha256=Yr22GoIvoWhpV3m4fdwV_I0Mn77La346_ymSinR-ORA,3793
|
182
182
|
py2ls/corr.py,sha256=RbOaJIPLCHJtUm5SFi_4dCJ7VFUPWR0PErfK3K26ad4,18243
|
183
183
|
py2ls/correlators.py,sha256=RbOaJIPLCHJtUm5SFi_4dCJ7VFUPWR0PErfK3K26ad4,18243
|
184
|
-
py2ls/data/.DS_Store,sha256=
|
184
|
+
py2ls/data/.DS_Store,sha256=0vLN27VSbtJbHNG5DG-oGqs9h_ubWh3xHz-baHnyfck,6148
|
185
185
|
py2ls/data/db2ls_sql_chtsht.json,sha256=ls9d7Sm8TLeujanWHfHlWhU85Qz1KnAizO_9X3wUH7E,6933
|
186
186
|
py2ls/data/docs_links.json,sha256=kXgbbWo0b8bfV4n6iuuUNLnZipIyLzokUO6Lzmf7nO4,101829
|
187
187
|
py2ls/data/email/email_html_template.html,sha256=UIg3aixWfdNsvVx-j2dX1M5N3G-6DgrnV1Ya1cLjiUQ,2809
|
188
188
|
py2ls/data/hyper_param_autogluon_zeroshot2024.json,sha256=MblV_gYh_6nWEeeidj-5_YRWajUy_u0qycRWFrujH6E,68152
|
189
|
+
py2ls/data/hyper_param_tabrepo_2024.py,sha256=YGSLbeVibDq11l_q1j9wz3WNL4sl8nLSIcBCInsX_wk,60705
|
189
190
|
py2ls/data/lang_code_iso639.json,sha256=qZiU7H2RLJjDMXK22C-jhwzLJCI5vKmampjB1ys4ek4,2157
|
190
191
|
py2ls/data/mygenes_fields_241022.txt,sha256=-7htEdtmqbSRTUKHHVmjUFLBwZZg9u3LFpn9OZMb1qg,11348
|
191
192
|
py2ls/data/sns_info.json,sha256=pEzdg2bhMkwQHZpXx02_7zAP7NvRoCc0Le8PN6Uv0Vk,4074
|
192
|
-
py2ls/data/styles/.DS_Store,sha256=
|
193
|
+
py2ls/data/styles/.DS_Store,sha256=54wi4v4nOVHfBA7TU_ITv3tbW3-fJ5j2Paq1m2aXvt0,6148
|
193
194
|
py2ls/data/styles/example/.DS_Store,sha256=1lFlJ5EFymdzGAUAaI30vcaaLHt3F1LwpG7xILf9jsM,6148
|
194
195
|
py2ls/data/styles/example/style1.pdf,sha256=Pt_qQJ5kiCSIPiz3TWSwEffHUdj75kKXnZ4MPqpEx4I,29873
|
195
196
|
py2ls/data/styles/example/style2.pdf,sha256=0xduPLPulET38LEP2V2H_q70wqlrrBEo8ttqO-FMrfQ,25449
|
@@ -231,6 +232,7 @@ py2ls/data/styles/stylelib/scatter.mplstyle,sha256=g8bxBf-euVKwcN35emBoHzgGBjE_O
|
|
231
232
|
py2ls/data/styles/stylelib/science.mplstyle,sha256=t6uBwdG8di84mgxQyJWj9jRsux385Td41vRDoxmqn6E,1110
|
232
233
|
py2ls/data/styles/stylelib/std-colors.mplstyle,sha256=eD1GJ6b6wF4eygQO-wwaseHKK85TMV9tqExH5CZswX0,201
|
233
234
|
py2ls/data/styles/stylelib/vibrant.mplstyle,sha256=99EGa-cDX380VLtURAwimFB-FmTvTZJdJ7rAkTkmhok,227
|
235
|
+
py2ls/data/tiles.csv,sha256=73YEcjFZRRcwWyUkkwG-KJTPMAB1VBPqGf5lj07vQqw,3435
|
234
236
|
py2ls/data/usages_pd.json,sha256=4DgbPahF4G5Hd6G0TQurb6dBRVey67lpKdgK6A01Tww,266818
|
235
237
|
py2ls/data/usages_sns.json,sha256=6gV_G5wjQTazOE0TyIGX-wGoEtGN7MzHpaX5n8pLsKo,11242
|
236
238
|
py2ls/db2ls.py,sha256=MMfFX47aIPIyu7fU9aPvX9lbPRPYOpJ_VXwlnWk-8qo,13615
|
@@ -240,18 +242,18 @@ py2ls/export_requirements.py,sha256=x2WgUF0jYKz9GfA1MVKN-MdsM-oQ8yUeC6Ua8oCymio,
|
|
240
242
|
py2ls/fetch_update.py,sha256=9LXj661GpCEFII2wx_99aINYctDiHni6DOruDs_fdt8,4752
|
241
243
|
py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
|
242
244
|
py2ls/ich2ls.py,sha256=3E9R8oVpyYZXH5PiIQgT3CN5NxLe4Dwtm2LwaeacE6I,21381
|
243
|
-
py2ls/ips.py,sha256=
|
244
|
-
py2ls/ml2ls.py,sha256=
|
245
|
+
py2ls/ips.py,sha256=B8MwtyPskphAnXDb2hjq5sT-LxMNmXPH4zKRgUa6p7g,373829
|
246
|
+
py2ls/ml2ls.py,sha256=I-JFPdikgEtfQjhv5gBz-QSeorpTJI_Pda_JwkTioBY,209732
|
245
247
|
py2ls/mol.py,sha256=AZnHzarIk_MjueKdChqn1V6e4tUle3X1NnHSFA6n3Nw,10645
|
246
|
-
py2ls/netfinder.py,sha256=
|
248
|
+
py2ls/netfinder.py,sha256=neFi28nlpqldBeTTNEXkUiKbHU8KoHuo76U1PBPipUQ,66199
|
247
249
|
py2ls/nl2ls.py,sha256=UEIdok-OamFZFIvvz_PdZenu085zteMdaJd9mLu3F-s,11485
|
248
250
|
py2ls/ocr.py,sha256=CmG2GUBorz4q1aaq5TkQ7bKn3iueQJ9JKrPTzloGqlY,33447
|
249
|
-
py2ls/plot.py,sha256=
|
251
|
+
py2ls/plot.py,sha256=NoXg_MZfcLmu9ZAJo56T6P6Wua9kCFIe542qG5_gUHY,233874
|
250
252
|
py2ls/setuptools-70.1.0-py3-none-any.whl,sha256=2bi3cUVal8ip86s0SOvgspteEF8SKLukECi-EWmFomc,882588
|
251
253
|
py2ls/sleep_events_detectors.py,sha256=bQA3HJqv5qnYKJJEIhCyhlDtkXQfIzqksnD0YRXso68,52145
|
252
254
|
py2ls/stats.py,sha256=qBn2rJmNa_QLLUqjwYqXUlGzqmW94sgA1bxJU2FC3r0,39175
|
253
255
|
py2ls/translator.py,sha256=77Tp_GjmiiwFbEIJD_q3VYpQ43XL9ZeJo6Mhl44mvh8,34284
|
254
256
|
py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
|
255
|
-
py2ls-0.2.4.
|
256
|
-
py2ls-0.2.4.
|
257
|
-
py2ls-0.2.4.
|
257
|
+
py2ls-0.2.4.31.dist-info/METADATA,sha256=jCdnH7D7wJUolwqghrIYRIW2oD6Mn5KnNYAKOdzB-Vo,20332
|
258
|
+
py2ls-0.2.4.31.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
259
|
+
py2ls-0.2.4.31.dist-info/RECORD,,
|
File without changes
|