webscout 4.2__tar.gz → 4.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (77) hide show
  1. {webscout-4.2/webscout.egg-info → webscout-4.4}/PKG-INFO +35 -1
  2. {webscout-4.2 → webscout-4.4}/README.md +28 -0
  3. {webscout-4.2 → webscout-4.4}/setup.py +8 -2
  4. {webscout-4.2 → webscout-4.4}/webscout/AIauto.py +5 -7
  5. {webscout-4.2 → webscout-4.4}/webscout/AIutel.py +6 -8
  6. {webscout-4.2 → webscout-4.4}/webscout/Extra/autollama.py +20 -9
  7. {webscout-4.2 → webscout-4.4}/webscout/Extra/gguf.py +5 -19
  8. webscout-4.4/webscout/Extra/weather.py +67 -0
  9. {webscout-4.2 → webscout-4.4}/webscout/Extra/weather_ascii.py +5 -0
  10. webscout-4.4/webscout/GoogleS.py +342 -0
  11. {webscout-4.2 → webscout-4.4}/webscout/Local/_version.py +1 -1
  12. webscout-4.4/webscout/Provider/FreeGemini.py +169 -0
  13. webscout-4.4/webscout/Provider/Llama.py +211 -0
  14. {webscout-4.2 → webscout-4.4}/webscout/Provider/__init__.py +5 -5
  15. {webscout-4.2 → webscout-4.4}/webscout/__init__.py +7 -4
  16. {webscout-4.2 → webscout-4.4}/webscout/async_providers.py +0 -2
  17. {webscout-4.2 → webscout-4.4}/webscout/cli.py +22 -21
  18. {webscout-4.2 → webscout-4.4}/webscout/utils.py +13 -3
  19. {webscout-4.2 → webscout-4.4}/webscout/version.py +1 -1
  20. {webscout-4.2 → webscout-4.4}/webscout/webai.py +3 -0
  21. {webscout-4.2 → webscout-4.4}/webscout/webscout_search.py +34 -26
  22. {webscout-4.2 → webscout-4.4/webscout.egg-info}/PKG-INFO +35 -1
  23. {webscout-4.2 → webscout-4.4}/webscout.egg-info/SOURCES.txt +3 -1
  24. {webscout-4.2 → webscout-4.4}/webscout.egg-info/requires.txt +6 -0
  25. webscout-4.2/webscout/Extra/weather.py +0 -49
  26. webscout-4.2/webscout/Provider/Llama2.py +0 -437
  27. {webscout-4.2 → webscout-4.4}/LICENSE.md +0 -0
  28. {webscout-4.2 → webscout-4.4}/setup.cfg +0 -0
  29. {webscout-4.2 → webscout-4.4}/webscout/AIbase.py +0 -0
  30. {webscout-4.2 → webscout-4.4}/webscout/DWEBS.py +0 -0
  31. {webscout-4.2 → webscout-4.4}/webscout/Extra/__init__.py +0 -0
  32. {webscout-4.2 → webscout-4.4}/webscout/LLM.py +0 -0
  33. {webscout-4.2 → webscout-4.4}/webscout/Local/__init__.py +0 -0
  34. {webscout-4.2 → webscout-4.4}/webscout/Local/formats.py +0 -0
  35. {webscout-4.2 → webscout-4.4}/webscout/Local/model.py +0 -0
  36. {webscout-4.2 → webscout-4.4}/webscout/Local/rawdog.py +0 -0
  37. {webscout-4.2 → webscout-4.4}/webscout/Local/samplers.py +0 -0
  38. {webscout-4.2 → webscout-4.4}/webscout/Local/thread.py +0 -0
  39. {webscout-4.2 → webscout-4.4}/webscout/Local/utils.py +0 -0
  40. {webscout-4.2 → webscout-4.4}/webscout/Provider/BasedGPT.py +0 -0
  41. {webscout-4.2 → webscout-4.4}/webscout/Provider/Berlin4h.py +0 -0
  42. {webscout-4.2 → webscout-4.4}/webscout/Provider/Blackboxai.py +0 -0
  43. {webscout-4.2 → webscout-4.4}/webscout/Provider/ChatGPTUK.py +0 -0
  44. {webscout-4.2 → webscout-4.4}/webscout/Provider/Cohere.py +0 -0
  45. {webscout-4.2 → webscout-4.4}/webscout/Provider/Deepinfra.py +0 -0
  46. {webscout-4.2 → webscout-4.4}/webscout/Provider/Deepseek.py +0 -0
  47. {webscout-4.2 → webscout-4.4}/webscout/Provider/Gemini.py +0 -0
  48. {webscout-4.2 → webscout-4.4}/webscout/Provider/Geminiflash.py +0 -0
  49. {webscout-4.2 → webscout-4.4}/webscout/Provider/Geminipro.py +0 -0
  50. {webscout-4.2 → webscout-4.4}/webscout/Provider/Groq.py +0 -0
  51. {webscout-4.2 → webscout-4.4}/webscout/Provider/Koboldai.py +0 -0
  52. {webscout-4.2 → webscout-4.4}/webscout/Provider/Leo.py +0 -0
  53. {webscout-4.2 → webscout-4.4}/webscout/Provider/OLLAMA.py +0 -0
  54. {webscout-4.2 → webscout-4.4}/webscout/Provider/OpenGPT.py +0 -0
  55. {webscout-4.2 → webscout-4.4}/webscout/Provider/Openai.py +0 -0
  56. {webscout-4.2 → webscout-4.4}/webscout/Provider/Perplexity.py +0 -0
  57. {webscout-4.2 → webscout-4.4}/webscout/Provider/Phind.py +0 -0
  58. {webscout-4.2 → webscout-4.4}/webscout/Provider/Poe.py +0 -0
  59. {webscout-4.2 → webscout-4.4}/webscout/Provider/Reka.py +0 -0
  60. {webscout-4.2 → webscout-4.4}/webscout/Provider/ThinkAnyAI.py +0 -0
  61. {webscout-4.2 → webscout-4.4}/webscout/Provider/VTLchat.py +0 -0
  62. {webscout-4.2 → webscout-4.4}/webscout/Provider/Xjai.py +0 -0
  63. {webscout-4.2 → webscout-4.4}/webscout/Provider/Yepchat.py +0 -0
  64. {webscout-4.2 → webscout-4.4}/webscout/Provider/Youchat.py +0 -0
  65. {webscout-4.2 → webscout-4.4}/webscout/YTdownloader.py +0 -0
  66. {webscout-4.2 → webscout-4.4}/webscout/__main__.py +0 -0
  67. {webscout-4.2 → webscout-4.4}/webscout/exceptions.py +0 -0
  68. {webscout-4.2 → webscout-4.4}/webscout/g4f.py +0 -0
  69. {webscout-4.2 → webscout-4.4}/webscout/models.py +0 -0
  70. {webscout-4.2 → webscout-4.4}/webscout/tempid.py +0 -0
  71. {webscout-4.2 → webscout-4.4}/webscout/transcriber.py +0 -0
  72. {webscout-4.2 → webscout-4.4}/webscout/voice.py +0 -0
  73. {webscout-4.2 → webscout-4.4}/webscout/webscout_search_async.py +0 -0
  74. {webscout-4.2 → webscout-4.4}/webscout/websx_search.py +0 -0
  75. {webscout-4.2 → webscout-4.4}/webscout.egg-info/dependency_links.txt +0 -0
  76. {webscout-4.2 → webscout-4.4}/webscout.egg-info/entry_points.txt +0 -0
  77. {webscout-4.2 → webscout-4.4}/webscout.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 4.2
3
+ Version: 4.4
4
4
  Summary: Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -34,6 +34,7 @@ Requires-Dist: tqdm
34
34
  Requires-Dist: webdriver-manager
35
35
  Requires-Dist: halo>=0.0.31
36
36
  Requires-Dist: g4f>=0.2.2.3
37
+ Requires-Dist: g4f[webdriver]
37
38
  Requires-Dist: rich
38
39
  Requires-Dist: python-dotenv
39
40
  Requires-Dist: beautifulsoup4
@@ -55,6 +56,11 @@ Requires-Dist: playsound
55
56
  Requires-Dist: poe_api_wrapper
56
57
  Requires-Dist: pyreqwest_impersonate
57
58
  Requires-Dist: ballyregan
59
+ Requires-Dist: nodriver
60
+ Requires-Dist: PyExecJS
61
+ Requires-Dist: ollama
62
+ Requires-Dist: pyfiglet
63
+ Requires-Dist: yaspin
58
64
  Provides-Extra: dev
59
65
  Requires-Dist: ruff>=0.1.6; extra == "dev"
60
66
  Requires-Dist: pytest>=7.4.2; extra == "dev"
@@ -1472,6 +1478,34 @@ response = ollama_provider.chat("What is the meaning of life?")
1472
1478
  print(response)
1473
1479
  ```
1474
1480
 
1481
+ ### 22. GROQ
1482
+ ```python
1483
+ from webscout import GROQ
1484
+ ai = GROQ(api_key="")
1485
+ response = ai.chat("What is the meaning of life?")
1486
+ print(response)
1487
+
1488
+ ```
1489
+
1490
+ ### 23. Freegemini - chat with gemini for free
1491
+ ```python
1492
+ from webscout import FreeGemini
1493
+ ai = FreeGemini()
1494
+ response = ai.chat("What is the meaning of life?")
1495
+ print(response)
1496
+ ```
1497
+
1498
+ ### 24. LLama 70b - chat with meta's llama 3 70b
1499
+ ```python
1500
+
1501
+ from webscout import LLAMA
1502
+
1503
+ llama = LLAMA()
1504
+
1505
+ r = llama.chat("What is the meaning of life?")
1506
+ print(r)
1507
+ ```
1508
+
1475
1509
  ### `LLM`
1476
1510
  ```python
1477
1511
  from webscout.LLM import LLM
@@ -1406,6 +1406,34 @@ response = ollama_provider.chat("What is the meaning of life?")
1406
1406
  print(response)
1407
1407
  ```
1408
1408
 
1409
+ ### 22. GROQ
1410
+ ```python
1411
+ from webscout import GROQ
1412
+ ai = GROQ(api_key="")
1413
+ response = ai.chat("What is the meaning of life?")
1414
+ print(response)
1415
+
1416
+ ```
1417
+
1418
+ ### 23. Freegemini - chat with gemini for free
1419
+ ```python
1420
+ from webscout import FreeGemini
1421
+ ai = FreeGemini()
1422
+ response = ai.chat("What is the meaning of life?")
1423
+ print(response)
1424
+ ```
1425
+
1426
+ ### 24. LLama 70b - chat with meta's llama 3 70b
1427
+ ```python
1428
+
1429
+ from webscout import LLAMA
1430
+
1431
+ llama = LLAMA()
1432
+
1433
+ r = llama.chat("What is the meaning of life?")
1434
+ print(r)
1435
+ ```
1436
+
1409
1437
  ### `LLM`
1410
1438
  ```python
1411
1439
  from webscout.LLM import LLM
@@ -5,7 +5,7 @@ with open("README.md", encoding="utf-8") as f:
5
5
 
6
6
  setup(
7
7
  name="webscout",
8
- version="4.2",
8
+ version="4.4",
9
9
  description="Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more",
10
10
  long_description=README,
11
11
  long_description_content_type="text/markdown",
@@ -38,6 +38,7 @@ setup(
38
38
  "webdriver-manager",
39
39
  "halo>=0.0.31",
40
40
  "g4f>=0.2.2.3",
41
+ "g4f[webdriver]",
41
42
  "rich",
42
43
  "python-dotenv",
43
44
  "beautifulsoup4",
@@ -58,7 +59,12 @@ setup(
58
59
  "playsound",
59
60
  "poe_api_wrapper",
60
61
  "pyreqwest_impersonate",
61
- "ballyregan"
62
+ "ballyregan",
63
+ "nodriver",
64
+ "PyExecJS",
65
+ "ollama",
66
+ "pyfiglet",
67
+ "yaspin",
62
68
  ],
63
69
  entry_points={
64
70
  "console_scripts": [
@@ -1,8 +1,7 @@
1
1
  from webscout.AIbase import Provider, AsyncProvider
2
2
  from webscout.Provider.ThinkAnyAI import ThinkAnyAI
3
3
  from webscout.Provider.Xjai import Xjai
4
- from webscout.Provider.Llama2 import LLAMA2
5
- from webscout.Provider.Llama2 import AsyncLLAMA2
4
+ from webscout.Provider.Llama import LLAMA
6
5
  from webscout.Provider.Leo import LEO
7
6
  from webscout.Provider.Leo import AsyncLEO
8
7
  from webscout.Provider.Koboldai import KOBOLDAI
@@ -44,7 +43,7 @@ provider_map: dict[
44
43
  Union[
45
44
  ThinkAnyAI,
46
45
  Xjai,
47
- LLAMA2,
46
+ LLAMA,
48
47
  LEO,
49
48
  KOBOLDAI,
50
49
  OPENGPT,
@@ -69,7 +68,7 @@ provider_map: dict[
69
68
  ] = {
70
69
  "ThinkAnyAI": ThinkAnyAI,
71
70
  "Xjai": Xjai,
72
- "LLAMA2": LLAMA2,
71
+ "LLAMA2": LLAMA,
73
72
  "LEO": LEO,
74
73
  "KOBOLDAI": KOBOLDAI,
75
74
  "OPENGPT": OPENGPT,
@@ -124,7 +123,7 @@ class AUTO(Provider):
124
123
  self.provider: Union[
125
124
  ThinkAnyAI,
126
125
  Xjai,
127
- LLAMA2,
126
+ LLAMA,
128
127
  LEO,
129
128
  KOBOLDAI,
130
129
  OPENGPT,
@@ -353,13 +352,12 @@ class AsyncAUTO(AsyncProvider):
353
352
  AsyncOPENGPT,
354
353
  AsyncKOBOLDAI,
355
354
  AsyncPhindSearch,
356
- AsyncLLAMA2,
357
355
  AsyncBLACKBOXAI,
358
356
  AsyncGPT4FREE,
359
357
  AsyncLEO,
360
358
  ThinkAnyAI,
361
359
  Xjai,
362
- LLAMA2,
360
+ LLAMA,
363
361
  LEO,
364
362
  KOBOLDAI,
365
363
  OPENGPT,
@@ -220,17 +220,16 @@ class Conversation:
220
220
  ), f"File '{filepath}' does not exist"
221
221
  if not os.path.isfile(filepath):
222
222
  logging.debug(f"Creating new chat-history file - '{filepath}'")
223
- with open(filepath, "w") as fh: # Try creating new file
224
- # lets add intro here
223
+ with open(filepath, "w", encoding="utf-8") as fh: # Try creating new file with UTF-8 encoding
225
224
  fh.write(self.intro)
226
225
  else:
227
226
  logging.debug(f"Loading conversation from '{filepath}'")
228
- with open(filepath) as fh:
227
+ with open(filepath, encoding="utf-8") as fh: # Open with UTF-8 encoding
229
228
  file_contents = fh.readlines()
230
229
  if file_contents:
231
230
  self.intro = file_contents[0] # Presume first line is the intro.
232
231
  self.chat_history = "\n".join(file_contents[1:])
233
-
232
+
234
233
  def __trim_chat_history(self, chat_history: str, intro: str) -> str:
235
234
  """Ensures the len(prompt) and max_tokens_to_sample is not > 4096"""
236
235
  len_of_intro = len(intro)
@@ -243,7 +242,6 @@ class Conversation:
243
242
  # Remove head of total (n) of chat_history
244
243
  trimmed_chat_history = chat_history[truncate_at:]
245
244
  return "... " + trimmed_chat_history
246
- # print(len(self.chat_history))
247
245
  else:
248
246
  return chat_history
249
247
 
@@ -281,12 +279,12 @@ class Conversation:
281
279
  new_history = self.history_format % dict(user=prompt, llm=response)
282
280
  if self.file and self.update_file:
283
281
  if os.path.exists(self.file):
284
- with open(self.file, "w") as fh:
282
+ with open(self.file, "w", encoding="utf-8") as fh: # Specify UTF-8 encoding
285
283
  fh.write(self.intro + "\n" + new_history)
286
284
  else:
287
- with open(self.file, "a") as fh:
285
+ with open(self.file, "a", encoding="utf-8") as fh: # Specify UTF-8 encoding
288
286
  fh.write(new_history)
289
- self.chat_history += new_history
287
+ self.chat_history += new_history
290
288
 
291
289
 
292
290
 
@@ -1,6 +1,14 @@
1
1
  import subprocess
2
2
  import argparse
3
3
  import os
4
+ from rich.console import Console
5
+ from rich.panel import Panel
6
+ from rich.progress import track
7
+ from yaspin import yaspin
8
+ from pyfiglet import figlet_format
9
+ import time
10
+
11
+ console = Console()
4
12
 
5
13
  def autollama(model_path, gguf_file):
6
14
  """Manages models with Ollama using the autollama.sh script.
@@ -9,6 +17,7 @@ def autollama(model_path, gguf_file):
9
17
  model_path (str): The path to the Hugging Face model.
10
18
  gguf_file (str): The name of the GGUF file.
11
19
  """
20
+ console.print(f"[bold green]{figlet_format('Autollama')}[/]\n", justify="center")
12
21
 
13
22
  # Check if autollama.sh exists in the current working directory
14
23
  script_path = os.path.join(os.getcwd(), "autollama.sh")
@@ -172,25 +181,27 @@ echo "Use Ollama run $MODEL_NAME"
172
181
  # Execute the command
173
182
  process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
174
183
 
175
- # Print the output and error in real-time
176
- for line in process.stdout:
177
- print(line, end='')
178
-
179
- for line in process.stderr:
180
- print(line, end='')
184
+ for line in iter(process.stdout.readline, ''):
185
+ console.print(Panel(line.strip(), title="Autollama Output", expand=False))
181
186
 
187
+ for line in iter(process.stderr.readline, ''):
188
+ console.print(Panel(line.strip(), title="Autollama Errors (if any)", expand=False))
189
+
182
190
  process.wait()
191
+ console.print("[green]Model is ready![/]")
183
192
 
184
193
  def main():
185
194
  parser = argparse.ArgumentParser(description='Automatically create and run an Ollama model in Ollama')
186
- parser.add_argument('-m', '--model_path', required=True, help='Set the hunggingface model id to the Hugging Face model')
195
+ parser.add_argument('-m', '--model_path', required=True, help='Set the huggingface model id to the Hugging Face model')
187
196
  parser.add_argument('-g', '--gguf_file', required=True, help='Set the GGUF file name')
188
197
  args = parser.parse_args()
189
198
 
190
199
  try:
191
- autollama(args.model_path, args.gguf_file)
200
+ with yaspin(text="Processing...") as spinner:
201
+ autollama(args.model_path, args.gguf_file)
202
+ spinner.ok("Done!")
192
203
  except Exception as e:
193
- print(f"Error: {e}")
204
+ console.print(f"[red]Error: {e}[/]")
194
205
  exit(1)
195
206
 
196
207
  if __name__ == "__main__":
@@ -1,7 +1,10 @@
1
1
  # webscout/Extra/gguf.py
2
2
  import subprocess
3
- import argparse
4
3
  import os
4
+ from pyfiglet import figlet_format
5
+ from rich.console import Console
6
+
7
+ console = Console()
5
8
 
6
9
  def convert(model_id, username=None, token=None, quantization_methods="q4_k_m,q5_k_m"):
7
10
  """Converts and quantizes a Hugging Face model to GGUF format.
@@ -17,6 +20,7 @@ def convert(model_id, username=None, token=None, quantization_methods="q4_k_m,q5
17
20
  ValueError: If an invalid quantization method is provided.
18
21
  """
19
22
 
23
+ console.print(f"[bold green]{figlet_format('GGUF Converter')}[/]\n", justify="center")
20
24
  # List of valid quantization methods
21
25
  valid_methods = [
22
26
  "q2_k", "q3_k_l", "q3_k_m", "q3_k_s",
@@ -220,21 +224,3 @@ echo "Script completed."
220
224
  process.wait()
221
225
 
222
226
 
223
- def main():
224
- parser = argparse.ArgumentParser(description='Convert and quantize model using gguf.sh')
225
- parser.add_argument('-m', '--model_id', required=True, help='Set the HF model ID (e.g., "google/flan-t5-xl")')
226
- parser.add_argument('-u', '--username', help='Set your Hugging Face username (required for uploads)')
227
- parser.add_argument('-t', '--token', help='Set your Hugging Face API token (required for uploads)')
228
- parser.add_argument('-q', '--quantization_methods', default="q4_k_m,q5_k_m",
229
- help='Comma-separated quantization methods (default: q4_k_m,q5_k_m). Valid methods: q2_k, q3_k_l, q3_k_m, q3_k_s, q4_0, q4_1, q4_k_m, q4_k_s, q5_0, q5_1, q5_k_m, q5_k_s, q6_k, q8_0')
230
-
231
- args = parser.parse_args()
232
-
233
- try:
234
- convert(args.model_id, args.username, args.token, args.quantization_methods)
235
- except ValueError as e:
236
- print(e)
237
- exit(1)
238
-
239
- if __name__ == "__main__":
240
- main()
@@ -0,0 +1,67 @@
1
+ import requests
2
+ from rich.console import Console
3
+ from rich.table import Table
4
+ from yaspin import yaspin
5
+ from pyfiglet import figlet_format
6
+
7
+ console = Console()
8
+
9
+ def get(location):
10
+ """Fetches weather data for the given location.
11
+
12
+ Args:
13
+ location (str): The location for which to fetch weather data.
14
+
15
+ Returns:
16
+ dict: A dictionary containing weather data if the request is successful,
17
+ otherwise a string indicating the error.
18
+ """
19
+ url = f"https://wttr.in/{location}?format=j1"
20
+
21
+ with yaspin(text="Fetching weather data...") as spinner:
22
+ response = requests.get(url)
23
+ spinner.ok("✅ ")
24
+
25
+ if response.status_code == 200:
26
+ return response.json()
27
+ else:
28
+ return f"Error: Unable to fetch weather data. Status code: {response.status_code}"
29
+
30
+ def print_weather(weather_data):
31
+ """Prints the weather data in a user-friendly format.
32
+
33
+ Args:
34
+ weather_data (dict or str): The weather data returned from get_weather()
35
+ or an error message.
36
+ """
37
+ if isinstance(weather_data, str):
38
+ console.print(f"[bold red]Error:[/] {weather_data}")
39
+ return
40
+
41
+ current = weather_data['current_condition'][0]
42
+ location_name = weather_data['nearest_area'][0]['areaName'][0]['value']
43
+
44
+ console.print(f"[bold blue]\n{figlet_format('Weather Report')}[/]\n", justify="center")
45
+ console.print(f"[bold green]Weather in {location_name}:[/]\n")
46
+
47
+ table = Table(show_header=False, show_lines=True)
48
+ table.add_row("Temperature:", f"{current['temp_C']}°C / {current['temp_F']}°F")
49
+ table.add_row("Condition:", current['weatherDesc'][0]['value'])
50
+ table.add_row("Humidity:", f"{current['humidity']}%")
51
+ table.add_row("Wind:", f"{current['windspeedKmph']} km/h, {current['winddir16Point']}")
52
+ console.print(table)
53
+
54
+ console.print(f"\n[bold green]Forecast:[/]")
55
+ table = Table(show_header=True, header_style="bold cyan")
56
+ table.add_column("Date", style="dim", width=12)
57
+ table.add_column("Temperature Range")
58
+ table.add_column("Description")
59
+
60
+ for day in weather_data['weather']:
61
+ date = day['date']
62
+ max_temp = day['maxtempC']
63
+ min_temp = day['mintempC']
64
+ desc = day['hourly'][4]['weatherDesc'][0]['value']
65
+ table.add_row(date, f"{min_temp}°C to {max_temp}°C", desc)
66
+ console.print(table)
67
+
@@ -1,5 +1,8 @@
1
1
  import requests
2
+ from rich.console import Console
3
+ from pyfiglet import figlet_format
2
4
 
5
+ console = Console()
3
6
  def get(location):
4
7
  """Fetches ASCII art weather data for the given location.
5
8
  Args:
@@ -9,6 +12,7 @@ def get(location):
9
12
  str: ASCII art weather report if the request is successful,
10
13
  otherwise an error message.
11
14
  """
15
+ console.print(f"[bold green]{figlet_format('Weather')}[/]\n", justify="center")
12
16
  url = f"https://wttr.in/{location}"
13
17
  response = requests.get(url, headers={'User-Agent': 'curl'})
14
18
 
@@ -16,3 +20,4 @@ def get(location):
16
20
  return "\n".join(response.text.splitlines()[:-1])
17
21
  else:
18
22
  return f"Error: Unable to fetch weather data. Status code: {response.status_code}"
23
+