webscout 1.3.3__tar.gz → 1.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (44) hide show
  1. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/networks/webpage_fetcher.py +8 -10
  2. webscout-1.3.4/DeepWEBS/utilsdw/enver.py +78 -0
  3. {webscout-1.3.3 → webscout-1.3.4}/PKG-INFO +51 -3
  4. {webscout-1.3.3 → webscout-1.3.4}/README.md +49 -1
  5. {webscout-1.3.3 → webscout-1.3.4}/setup.py +2 -2
  6. {webscout-1.3.3 → webscout-1.3.4}/webscout/AIutel.py +19 -3
  7. {webscout-1.3.3 → webscout-1.3.4}/webscout/__init__.py +1 -2
  8. {webscout-1.3.3 → webscout-1.3.4}/webscout/g4f.py +1 -1
  9. webscout-1.3.4/webscout/version.py +2 -0
  10. {webscout-1.3.3 → webscout-1.3.4}/webscout/webai.py +5 -4
  11. {webscout-1.3.3 → webscout-1.3.4}/webscout.egg-info/PKG-INFO +51 -3
  12. webscout-1.3.3/DeepWEBS/utilsdw/enver.py +0 -60
  13. webscout-1.3.3/webscout/version.py +0 -2
  14. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/__init__.py +0 -0
  15. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/documents/__init__.py +0 -0
  16. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/documents/query_results_extractor.py +0 -0
  17. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/documents/webpage_content_extractor.py +0 -0
  18. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/networks/__init__.py +0 -0
  19. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/networks/filepath_converter.py +0 -0
  20. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/networks/google_searcher.py +0 -0
  21. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/networks/network_configs.py +0 -0
  22. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/utilsdw/__init__.py +0 -0
  23. {webscout-1.3.3 → webscout-1.3.4}/DeepWEBS/utilsdw/logger.py +0 -0
  24. {webscout-1.3.3 → webscout-1.3.4}/LICENSE.md +0 -0
  25. {webscout-1.3.3 → webscout-1.3.4}/setup.cfg +0 -0
  26. {webscout-1.3.3 → webscout-1.3.4}/webscout/AI.py +0 -0
  27. {webscout-1.3.3 → webscout-1.3.4}/webscout/AIbase.py +0 -0
  28. {webscout-1.3.3 → webscout-1.3.4}/webscout/DWEBS.py +0 -0
  29. {webscout-1.3.3 → webscout-1.3.4}/webscout/HelpingAI.py +0 -0
  30. {webscout-1.3.3 → webscout-1.3.4}/webscout/LLM.py +0 -0
  31. {webscout-1.3.3 → webscout-1.3.4}/webscout/__main__.py +0 -0
  32. {webscout-1.3.3 → webscout-1.3.4}/webscout/cli.py +0 -0
  33. {webscout-1.3.3 → webscout-1.3.4}/webscout/exceptions.py +0 -0
  34. {webscout-1.3.3 → webscout-1.3.4}/webscout/models.py +0 -0
  35. {webscout-1.3.3 → webscout-1.3.4}/webscout/transcriber.py +0 -0
  36. {webscout-1.3.3 → webscout-1.3.4}/webscout/utils.py +0 -0
  37. {webscout-1.3.3 → webscout-1.3.4}/webscout/voice.py +0 -0
  38. {webscout-1.3.3 → webscout-1.3.4}/webscout/webscout_search.py +0 -0
  39. {webscout-1.3.3 → webscout-1.3.4}/webscout/webscout_search_async.py +0 -0
  40. {webscout-1.3.3 → webscout-1.3.4}/webscout.egg-info/SOURCES.txt +0 -0
  41. {webscout-1.3.3 → webscout-1.3.4}/webscout.egg-info/dependency_links.txt +0 -0
  42. {webscout-1.3.3 → webscout-1.3.4}/webscout.egg-info/entry_points.txt +0 -0
  43. {webscout-1.3.3 → webscout-1.3.4}/webscout.egg-info/requires.txt +0 -0
  44. {webscout-1.3.3 → webscout-1.3.4}/webscout.egg-info/top_level.txt +0 -0
@@ -78,20 +78,18 @@ class BatchWebpageFetcher:
78
78
  self.urls = urls
79
79
  self.total_count = len(self.urls)
80
80
 
81
- with concurrent.futures.ThreadPoolExecutor() as executor:
81
+ with concurrent.futures.ProcessPoolExecutor() as executor:
82
82
  futures = [
83
- executor.submit(self.fetch_single_webpage, url, overwrite, output_parent)
83
+ executor.submit(WebpageFetcher().fetch, url, overwrite, output_parent)
84
84
  for url in urls
85
85
  ]
86
86
  concurrent.futures.wait(futures)
87
87
 
88
+ self.url_and_html_path_list = [
89
+ {"url": future.result().url, "html_path": str(future.result().html_path)}
90
+ for future in futures
91
+ ]
92
+
88
93
  return self.url_and_html_path_list
89
94
 
90
- if __name__ == "__main__":
91
- urls = [
92
- "https://stackoverflow.com/questions/295135/turn-a-string-into-a-valid-filename",
93
- "https://www.liaoxuefeng.com/wiki/1016959663602400/1017495723838528",
94
- "https://docs.python.org/zh-cn/3/tutorial/interpreter.html",
95
- ]
96
- batch_webpage_fetcher = BatchWebpageFetcher()
97
- batch_webpage_fetcher.fetch(urls=urls, overwrite=True, output_parent="python tutorials")
95
+
@@ -0,0 +1,78 @@
1
+ import json
2
+ import os
3
+ from pathlib import Path
4
+ from typing import Dict, Optional
5
+
6
+ from DeepWEBS.utilsdw.logger import OSLogger
7
+
8
+
9
+ class OSEnver:
10
+ """Manages the OS environment variables."""
11
+
12
+ def __init__(self) -> None:
13
+ """Initializes the OSEnver object."""
14
+ self.envs_stack: list[Dict[str, str]] = []
15
+ self.envs: Dict[str, str] = os.environ.copy()
16
+
17
+ def store_envs(self) -> None:
18
+ """Stores a copy of the current environment variables on a stack."""
19
+ self.envs_stack.append(self.envs.copy())
20
+
21
+ def restore_envs(self) -> None:
22
+ """Restores environment variables from the top of the stack."""
23
+ self.envs = self.envs_stack.pop()
24
+
25
+ def set_envs(
26
+ self,
27
+ secrets: bool = True,
28
+ proxies: Optional[str] = None,
29
+ store_envs: bool = True,
30
+ ) -> None:
31
+ """Sets environment variables based on the contents of secrets.json.
32
+
33
+ Args:
34
+ secrets (bool): Whether to load secrets from secrets.json.
35
+ proxies (Optional[str]): Proxy URL to set as environment variable.
36
+ store_envs (bool): Whether to store a copy of the environment variables
37
+ on the stack.
38
+ """
39
+ if store_envs:
40
+ self.store_envs()
41
+
42
+ if secrets:
43
+ secrets_path = Path(__file__).parents[1] / "secrets.json"
44
+ if secrets_path.exists():
45
+ with open(secrets_path, "r") as rf:
46
+ secrets = json.load(rf)
47
+ else:
48
+ secrets = {}
49
+
50
+ if proxies:
51
+ for proxy_env in ["http_proxy", "https_proxy"]:
52
+ if isinstance(proxies, str):
53
+ self.envs[proxy_env] = proxies
54
+ elif "http_proxy" in secrets.keys():
55
+ self.envs[proxy_env] = secrets["http_proxy"]
56
+ elif os.getenv("http_proxy"):
57
+ self.envs[proxy_env] = os.getenv("http_proxy")
58
+ else:
59
+ continue
60
+
61
+ self.proxy = (
62
+ self.envs.get("all_proxy")
63
+ or self.envs.get("http_proxy")
64
+ or self.envs.get("https_proxy")
65
+ or None
66
+ )
67
+ self.requests_proxies = {
68
+ "http": self.proxy,
69
+ "https": self.proxy,
70
+ }
71
+
72
+ if self.proxy:
73
+ OSLogger().note(f"Using proxy: [{self.proxy}]")
74
+
75
+
76
+ enver: OSEnver = OSEnver()
77
+
78
+
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 1.3.3
4
- Summary: Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos and have TTS support
3
+ Version: 1.3.4
4
+ Summary: Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
7
7
  License: HelpingAI Simplified Universal License
@@ -56,7 +56,7 @@ Requires-Dist: pytest>=7.4.2; extra == "dev"
56
56
  <a href="#"><img alt="Python version" src="https://img.shields.io/pypi/pyversions/webscout"/></a>
57
57
  <a href="https://pepy.tech/project/webscout"><img alt="Downloads" src="https://static.pepy.tech/badge/webscout"></a>
58
58
 
59
- Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models and now can transcribe yt videos
59
+ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
60
60
 
61
61
 
62
62
  ## Table of Contents
@@ -100,6 +100,8 @@ Search for words, documents, images, videos, news, maps and text translation usi
100
100
  - [usage of special .LLM file from webscout (webscout.LLM)](#usage-of-special-llm-file-from-webscout-webscoutllm)
101
101
  - [`LLM`](#llm)
102
102
  - [`LLM` with internet](#llm-with-internet)
103
+ - [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
104
+ - [for using as terminal gpt](#for-using-as-terminal-gpt)
103
105
 
104
106
  ## Install
105
107
  ```python
@@ -801,3 +803,49 @@ if __name__ == "__main__":
801
803
  else:
802
804
  print("No response")
803
805
  ```
806
+ ## `Webai` - terminal gpt and a open interpeter
807
+
808
+ ```python
809
+ from webscout.webai import Main
810
+
811
+ def use_rawdog_with_webai(prompt):
812
+ """
813
+ Wrap the webscout default method in a try-except block to catch any unhandled
814
+ exceptions and print a helpful message.
815
+ """
816
+ try:
817
+ webai_bot = Main(
818
+ max_tokens=500,
819
+ provider="phind",
820
+ temperature=0.7,
821
+ top_k=40,
822
+ top_p=0.95,
823
+ model="Phind Model", # Replace with your desired model
824
+ auth=None, # Replace with your auth key/value (if needed)
825
+ timeout=30,
826
+ disable_conversation=True,
827
+ filepath=None,
828
+ update_file=True,
829
+ intro=None,
830
+ rawdog=True,
831
+ history_offset=10250,
832
+ awesome_prompt=None,
833
+ proxy_path=None,
834
+ quiet=True
835
+ )
836
+ webai_response = webai_bot.default(prompt)
837
+ except Exception as e:
838
+ print("Unexpected error:", e)
839
+
840
+
841
+ if __name__ == "__main__":
842
+ user_prompt = input("Enter your prompt: ")
843
+ use_rawdog_with_webai(user_prompt)
844
+ ```
845
+ ```shell
846
+ python -m webscout.webai webai --provider "phind" --rawdog
847
+ ```
848
+ ### for using as terminal gpt
849
+ ```python
850
+ python -m webscout.webai webai --provider "sean"
851
+ ```
@@ -4,7 +4,7 @@
4
4
  <a href="#"><img alt="Python version" src="https://img.shields.io/pypi/pyversions/webscout"/></a>
5
5
  <a href="https://pepy.tech/project/webscout"><img alt="Downloads" src="https://static.pepy.tech/badge/webscout"></a>
6
6
 
7
- Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models and now can transcribe yt videos
7
+ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
8
8
 
9
9
 
10
10
  ## Table of Contents
@@ -48,6 +48,8 @@ Search for words, documents, images, videos, news, maps and text translation usi
48
48
  - [usage of special .LLM file from webscout (webscout.LLM)](#usage-of-special-llm-file-from-webscout-webscoutllm)
49
49
  - [`LLM`](#llm)
50
50
  - [`LLM` with internet](#llm-with-internet)
51
+ - [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
52
+ - [for using as terminal gpt](#for-using-as-terminal-gpt)
51
53
 
52
54
  ## Install
53
55
  ```python
@@ -748,4 +750,50 @@ if __name__ == "__main__":
748
750
  print("AI:", response)
749
751
  else:
750
752
  print("No response")
753
+ ```
754
+ ## `Webai` - terminal gpt and a open interpeter
755
+
756
+ ```python
757
+ from webscout.webai import Main
758
+
759
+ def use_rawdog_with_webai(prompt):
760
+ """
761
+ Wrap the webscout default method in a try-except block to catch any unhandled
762
+ exceptions and print a helpful message.
763
+ """
764
+ try:
765
+ webai_bot = Main(
766
+ max_tokens=500,
767
+ provider="phind",
768
+ temperature=0.7,
769
+ top_k=40,
770
+ top_p=0.95,
771
+ model="Phind Model", # Replace with your desired model
772
+ auth=None, # Replace with your auth key/value (if needed)
773
+ timeout=30,
774
+ disable_conversation=True,
775
+ filepath=None,
776
+ update_file=True,
777
+ intro=None,
778
+ rawdog=True,
779
+ history_offset=10250,
780
+ awesome_prompt=None,
781
+ proxy_path=None,
782
+ quiet=True
783
+ )
784
+ webai_response = webai_bot.default(prompt)
785
+ except Exception as e:
786
+ print("Unexpected error:", e)
787
+
788
+
789
+ if __name__ == "__main__":
790
+ user_prompt = input("Enter your prompt: ")
791
+ use_rawdog_with_webai(user_prompt)
792
+ ```
793
+ ```shell
794
+ python -m webscout.webai webai --provider "phind" --rawdog
795
+ ```
796
+ ### for using as terminal gpt
797
+ ```python
798
+ python -m webscout.webai webai --provider "sean"
751
799
  ```
@@ -5,8 +5,8 @@ with open("README.md", encoding="utf-8") as f:
5
5
 
6
6
  setup(
7
7
  name="webscout",
8
- version="1.3.3",
9
- description="Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos and have TTS support",
8
+ version="1.3.4",
9
+ description="Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support",
10
10
  long_description=README,
11
11
  long_description_content_type="text/markdown",
12
12
  author="OEvortex",
@@ -10,14 +10,31 @@ import sys
10
10
  import click
11
11
  from rich.markdown import Markdown
12
12
  from rich.console import Console
13
-
13
+ import g4f
14
14
  appdir = appdirs.AppDirs("AIWEBS", "vortex")
15
15
 
16
16
  default_path = appdir.user_cache_dir
17
17
 
18
18
  if not os.path.exists(default_path):
19
19
  os.makedirs(default_path)
20
-
20
+ webai = [
21
+ "leo",
22
+ "openai",
23
+ "opengpt",
24
+ "koboldai",
25
+ "gemini",
26
+ "phind",
27
+ "blackboxai",
28
+ "g4fauto",
29
+ "perplexity",
30
+ "sean",
31
+ ]
32
+
33
+ gpt4free_providers = [
34
+ provider.__name__ for provider in g4f.Provider.__providers__ # if provider.working
35
+ ]
36
+
37
+ available_providers = webai + gpt4free_providers
21
38
 
22
39
  def run_system_command(
23
40
  command: str,
@@ -468,7 +485,6 @@ print("The essay is about...")
468
485
  ```
469
486
  """
470
487
 
471
- # Idea borrowed from https://github.com/AbanteAI/rawdog
472
488
 
473
489
  def __init__(
474
490
  self,
@@ -1,7 +1,6 @@
1
1
  """Webscout.
2
2
 
3
- Search for words, documents, images, videos, news, maps and text translation
4
- using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models
3
+ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
5
4
  """
6
5
  import g4f
7
6
  import logging
@@ -2,7 +2,7 @@ import g4f
2
2
  from webscout.AIutel import Optimizers
3
3
  from webscout.AIutel import Conversation
4
4
  from webscout.AIutel import AwesomePrompts
5
- from webscout.AIutel import Provider
5
+ from webscout.AIbase import Provider
6
6
  from webscout.AIutel import available_providers
7
7
 
8
8
 
@@ -0,0 +1,2 @@
1
+ __version__ = "1.3.4"
2
+
@@ -29,6 +29,7 @@ from webscout.AIutel import Optimizers
29
29
  from webscout.AIutel import default_path
30
30
  from webscout.AIutel import AwesomePrompts
31
31
  from webscout.AIutel import RawDog
32
+ from webscout import available_providers
32
33
  from colorama import Fore
33
34
  from colorama import init as init_colorama
34
35
  from dotenv import load_dotenv
@@ -41,7 +42,7 @@ init_colorama(autoreset=True)
41
42
  load_dotenv() # loads .env variables
42
43
 
43
44
  logging.basicConfig(
44
- format="%(asctime)s - %(levelname)s : %(message)s ", # [%(module)s,%(lineno)s]", # for debug purposes
45
+ format="%(asctime)s - %(levelname)s : %(message)s ",
45
46
  datefmt="%H:%M:%S",
46
47
  level=logging.INFO,
47
48
  )
@@ -61,7 +62,7 @@ class this:
61
62
 
62
63
  rich_code_themes = ["monokai", "paraiso-dark", "igor", "vs", "fruity", "xcode"]
63
64
 
64
- default_provider = "sean"
65
+ default_provider = "phind"
65
66
 
66
67
  getExc = lambda e: e.args[1] if len(e.args) > 1 else str(e)
67
68
 
@@ -1077,7 +1078,7 @@ class EntryGroup:
1077
1078
  pass
1078
1079
 
1079
1080
 
1080
-
1081
+ import webscout
1081
1082
  class Chatwebai:
1082
1083
  """webai command"""
1083
1084
 
@@ -1194,7 +1195,7 @@ class Chatwebai:
1194
1195
  @click.option(
1195
1196
  "-p",
1196
1197
  "--provider",
1197
- type=click.Choice(webscout.available_providers),
1198
+ type=click.Choice(available_providers),
1198
1199
  default=this.default_provider,
1199
1200
  help="Name of LLM provider.",
1200
1201
  metavar=(
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 1.3.3
4
- Summary: Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos and have TTS support
3
+ Version: 1.3.4
4
+ Summary: Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
7
7
  License: HelpingAI Simplified Universal License
@@ -56,7 +56,7 @@ Requires-Dist: pytest>=7.4.2; extra == "dev"
56
56
  <a href="#"><img alt="Python version" src="https://img.shields.io/pypi/pyversions/webscout"/></a>
57
57
  <a href="https://pepy.tech/project/webscout"><img alt="Downloads" src="https://static.pepy.tech/badge/webscout"></a>
58
58
 
59
- Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models and now can transcribe yt videos
59
+ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
60
60
 
61
61
 
62
62
  ## Table of Contents
@@ -100,6 +100,8 @@ Search for words, documents, images, videos, news, maps and text translation usi
100
100
  - [usage of special .LLM file from webscout (webscout.LLM)](#usage-of-special-llm-file-from-webscout-webscoutllm)
101
101
  - [`LLM`](#llm)
102
102
  - [`LLM` with internet](#llm-with-internet)
103
+ - [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
104
+ - [for using as terminal gpt](#for-using-as-terminal-gpt)
103
105
 
104
106
  ## Install
105
107
  ```python
@@ -801,3 +803,49 @@ if __name__ == "__main__":
801
803
  else:
802
804
  print("No response")
803
805
  ```
806
+ ## `Webai` - terminal gpt and a open interpeter
807
+
808
+ ```python
809
+ from webscout.webai import Main
810
+
811
+ def use_rawdog_with_webai(prompt):
812
+ """
813
+ Wrap the webscout default method in a try-except block to catch any unhandled
814
+ exceptions and print a helpful message.
815
+ """
816
+ try:
817
+ webai_bot = Main(
818
+ max_tokens=500,
819
+ provider="phind",
820
+ temperature=0.7,
821
+ top_k=40,
822
+ top_p=0.95,
823
+ model="Phind Model", # Replace with your desired model
824
+ auth=None, # Replace with your auth key/value (if needed)
825
+ timeout=30,
826
+ disable_conversation=True,
827
+ filepath=None,
828
+ update_file=True,
829
+ intro=None,
830
+ rawdog=True,
831
+ history_offset=10250,
832
+ awesome_prompt=None,
833
+ proxy_path=None,
834
+ quiet=True
835
+ )
836
+ webai_response = webai_bot.default(prompt)
837
+ except Exception as e:
838
+ print("Unexpected error:", e)
839
+
840
+
841
+ if __name__ == "__main__":
842
+ user_prompt = input("Enter your prompt: ")
843
+ use_rawdog_with_webai(user_prompt)
844
+ ```
845
+ ```shell
846
+ python -m webscout.webai webai --provider "phind" --rawdog
847
+ ```
848
+ ### for using as terminal gpt
849
+ ```python
850
+ python -m webscout.webai webai --provider "sean"
851
+ ```
@@ -1,60 +0,0 @@
1
- import json
2
- import os
3
-
4
- from pathlib import Path
5
- from DeepWEBS.utilsdw.logger import logger
6
-
7
-
8
- class OSEnver:
9
- def __init__(self):
10
- self.envs_stack = []
11
- self.envs = os.environ.copy()
12
-
13
- def store_envs(self):
14
- self.envs_stack.append(self.envs)
15
-
16
- def restore_envs(self):
17
- self.envs = self.envs_stack.pop()
18
-
19
- def set_envs(self, secrets=True, proxies=None, store_envs=True):
20
- # caller_info = inspect.stack()[1]
21
- # logger.back(f"OS Envs is set by: {caller_info.filename}")
22
-
23
- if store_envs:
24
- self.store_envs()
25
-
26
- if secrets:
27
- secrets_path = Path(__file__).parents[1] / "secrets.json"
28
- if secrets_path.exists():
29
- with open(secrets_path, "r") as rf:
30
- secrets = json.load(rf)
31
- else:
32
- secrets = {}
33
-
34
- if proxies:
35
- for proxy_env in ["http_proxy", "https_proxy"]:
36
- if isinstance(proxies, str):
37
- self.envs[proxy_env] = proxies
38
- elif "http_proxy" in secrets.keys():
39
- self.envs[proxy_env] = secrets["http_proxy"]
40
- elif os.getenv("http_proxy"):
41
- self.envs[proxy_env] = os.getenv("http_proxy")
42
- else:
43
- continue
44
-
45
- self.proxy = (
46
- self.envs.get("all_proxy")
47
- or self.envs.get("http_proxy")
48
- or self.envs.get("https_proxy")
49
- or None
50
- )
51
- self.requests_proxies = {
52
- "http": self.proxy,
53
- "https": self.proxy,
54
- }
55
-
56
- if self.proxy:
57
- logger.note(f"Using proxy: [{self.proxy}]")
58
-
59
-
60
- enver = OSEnver()
@@ -1,2 +0,0 @@
1
- __version__ = "1.3.3"
2
-
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes