webscout 3.4__tar.gz → 3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (76) hide show
  1. {webscout-3.4 → webscout-3.5}/PKG-INFO +148 -216
  2. {webscout-3.4 → webscout-3.5}/README.md +146 -214
  3. {webscout-3.4 → webscout-3.5}/setup.py +2 -2
  4. {webscout-3.4 → webscout-3.5}/webscout/AIutel.py +1 -0
  5. webscout-3.5/webscout/DWEBS.py +793 -0
  6. webscout-3.5/webscout/Provider/Deepinfra.py +479 -0
  7. {webscout-3.4 → webscout-3.5}/webscout/Provider/__init__.py +4 -1
  8. {webscout-3.4 → webscout-3.5}/webscout/__init__.py +4 -2
  9. {webscout-3.4 → webscout-3.5}/webscout/webai.py +15 -0
  10. webscout-3.5/webscout/websx_search.py +370 -0
  11. {webscout-3.4 → webscout-3.5}/webscout.egg-info/PKG-INFO +148 -216
  12. {webscout-3.4 → webscout-3.5}/webscout.egg-info/SOURCES.txt +2 -12
  13. {webscout-3.4 → webscout-3.5}/webscout.egg-info/top_level.txt +0 -1
  14. webscout-3.4/DeepWEBS/__init__.py +0 -0
  15. webscout-3.4/DeepWEBS/documents/__init__.py +0 -0
  16. webscout-3.4/DeepWEBS/documents/query_results_extractor.py +0 -99
  17. webscout-3.4/DeepWEBS/documents/webpage_content_extractor.py +0 -145
  18. webscout-3.4/DeepWEBS/networks/__init__.py +0 -0
  19. webscout-3.4/DeepWEBS/networks/filepath_converter.py +0 -109
  20. webscout-3.4/DeepWEBS/networks/google_searcher.py +0 -52
  21. webscout-3.4/DeepWEBS/networks/network_configs.py +0 -30
  22. webscout-3.4/DeepWEBS/networks/webpage_fetcher.py +0 -95
  23. webscout-3.4/DeepWEBS/utilsdw/__init__.py +0 -0
  24. webscout-3.4/DeepWEBS/utilsdw/enver.py +0 -78
  25. webscout-3.4/DeepWEBS/utilsdw/logger.py +0 -269
  26. webscout-3.4/webscout/DWEBS.py +0 -197
  27. {webscout-3.4 → webscout-3.5}/LICENSE.md +0 -0
  28. {webscout-3.4 → webscout-3.5}/setup.cfg +0 -0
  29. {webscout-3.4 → webscout-3.5}/webscout/AIauto.py +0 -0
  30. {webscout-3.4 → webscout-3.5}/webscout/AIbase.py +0 -0
  31. {webscout-3.4 → webscout-3.5}/webscout/LLM.py +0 -0
  32. {webscout-3.4 → webscout-3.5}/webscout/Local/__init__.py +0 -0
  33. {webscout-3.4 → webscout-3.5}/webscout/Local/_version.py +0 -0
  34. {webscout-3.4 → webscout-3.5}/webscout/Local/formats.py +0 -0
  35. {webscout-3.4 → webscout-3.5}/webscout/Local/model.py +0 -0
  36. {webscout-3.4 → webscout-3.5}/webscout/Local/rawdog.py +0 -0
  37. {webscout-3.4 → webscout-3.5}/webscout/Local/samplers.py +0 -0
  38. {webscout-3.4 → webscout-3.5}/webscout/Local/thread.py +0 -0
  39. {webscout-3.4 → webscout-3.5}/webscout/Local/utils.py +0 -0
  40. {webscout-3.4 → webscout-3.5}/webscout/Provider/BasedGPT.py +0 -0
  41. {webscout-3.4 → webscout-3.5}/webscout/Provider/Berlin4h.py +0 -0
  42. {webscout-3.4 → webscout-3.5}/webscout/Provider/Blackboxai.py +0 -0
  43. {webscout-3.4 → webscout-3.5}/webscout/Provider/ChatGPTUK.py +0 -0
  44. {webscout-3.4 → webscout-3.5}/webscout/Provider/Cohere.py +0 -0
  45. {webscout-3.4 → webscout-3.5}/webscout/Provider/Deepseek.py +0 -0
  46. {webscout-3.4 → webscout-3.5}/webscout/Provider/Gemini.py +0 -0
  47. {webscout-3.4 → webscout-3.5}/webscout/Provider/Groq.py +0 -0
  48. {webscout-3.4 → webscout-3.5}/webscout/Provider/Koboldai.py +0 -0
  49. {webscout-3.4 → webscout-3.5}/webscout/Provider/Leo.py +0 -0
  50. {webscout-3.4 → webscout-3.5}/webscout/Provider/Llama2.py +0 -0
  51. {webscout-3.4 → webscout-3.5}/webscout/Provider/OpenGPT.py +0 -0
  52. {webscout-3.4 → webscout-3.5}/webscout/Provider/Openai.py +0 -0
  53. {webscout-3.4 → webscout-3.5}/webscout/Provider/Perplexity.py +0 -0
  54. {webscout-3.4 → webscout-3.5}/webscout/Provider/Phind.py +0 -0
  55. {webscout-3.4 → webscout-3.5}/webscout/Provider/Poe.py +0 -0
  56. {webscout-3.4 → webscout-3.5}/webscout/Provider/Reka.py +0 -0
  57. {webscout-3.4 → webscout-3.5}/webscout/Provider/ThinkAnyAI.py +0 -0
  58. {webscout-3.4 → webscout-3.5}/webscout/Provider/Xjai.py +0 -0
  59. {webscout-3.4 → webscout-3.5}/webscout/Provider/Yepchat.py +0 -0
  60. {webscout-3.4 → webscout-3.5}/webscout/Provider/Youchat.py +0 -0
  61. {webscout-3.4 → webscout-3.5}/webscout/__main__.py +0 -0
  62. {webscout-3.4 → webscout-3.5}/webscout/async_providers.py +0 -0
  63. {webscout-3.4 → webscout-3.5}/webscout/cli.py +0 -0
  64. {webscout-3.4 → webscout-3.5}/webscout/exceptions.py +0 -0
  65. {webscout-3.4 → webscout-3.5}/webscout/g4f.py +0 -0
  66. {webscout-3.4 → webscout-3.5}/webscout/models.py +0 -0
  67. {webscout-3.4 → webscout-3.5}/webscout/tempid.py +0 -0
  68. {webscout-3.4 → webscout-3.5}/webscout/transcriber.py +0 -0
  69. {webscout-3.4 → webscout-3.5}/webscout/utils.py +0 -0
  70. {webscout-3.4 → webscout-3.5}/webscout/version.py +0 -0
  71. {webscout-3.4 → webscout-3.5}/webscout/voice.py +0 -0
  72. {webscout-3.4 → webscout-3.5}/webscout/webscout_search.py +0 -0
  73. {webscout-3.4 → webscout-3.5}/webscout/webscout_search_async.py +0 -0
  74. {webscout-3.4 → webscout-3.5}/webscout.egg-info/dependency_links.txt +0 -0
  75. {webscout-3.4 → webscout-3.5}/webscout.egg-info/entry_points.txt +0 -0
  76. {webscout-3.4 → webscout-3.5}/webscout.egg-info/requires.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 3.4
4
- Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs
3
+ Version: 3.5
4
+ Summary: Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
7
7
  License: HelpingAI
@@ -104,14 +104,14 @@ Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can
104
104
  - [Temp number](#temp-number)
105
105
  - [Tempmail](#tempmail)
106
106
  - [Transcriber](#transcriber)
107
- - [DeepWEBS: Advanced Web Searches](#deepwebs-advanced-web-searches)
108
- - [Activating DeepWEBS](#activating-deepwebs)
109
- - [Point to remember before using `DeepWEBS`](#point-to-remember-before-using-deepwebs)
107
+ - [DWEBS: Advanced Web Searches](#dwebs-advanced-web-searches)
108
+ - [Activating DWEBS](#activating-dwebs)
109
+ - [Point to remember before using `DWEBS`](#point-to-remember-before-using-dwebs)
110
110
  - [Usage Example](#usage-example)
111
111
  - [Text-to-Speech:](#text-to-speech)
112
112
  - [Available TTS Voices:](#available-tts-voices)
113
113
  - [Exceptions](#exceptions)
114
- - [usage of webscout](#usage-of-webscout)
114
+ - [usage of WEBS](#usage-of-webs)
115
115
  - [1. `text()` - text search by DuckDuckGo.com](#1-text---text-search-by-duckduckgocom)
116
116
  - [2. `answers()` - instant answers by DuckDuckGo.com](#2-answers---instant-answers-by-duckduckgocom)
117
117
  - [3. `images()` - image search by DuckDuckGo.com](#3-images---image-search-by-duckduckgocom)
@@ -120,6 +120,7 @@ Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can
120
120
  - [6. `maps()` - map search by DuckDuckGo.com](#6-maps---map-search-by-duckduckgocom)
121
121
  - [7. `translate()` - translation by DuckDuckGo.com](#7-translate---translation-by-duckduckgocom)
122
122
  - [8. `suggestions()` - suggestions by DuckDuckGo.com](#8-suggestions---suggestions-by-duckduckgocom)
123
+ - [usage of WEBSX -- Another Websearch thing](#usage-of-websx----another-websearch-thing)
123
124
  - [ALL acts](#all-acts)
124
125
  - [Webscout Supported Acts:](#webscout-supported-acts)
125
126
  - [usage of webscout AI](#usage-of-webscout-ai)
@@ -141,12 +142,12 @@ Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can
141
142
  - [15. `poe`- chat with poe](#15-poe--chat-with-poe)
142
143
  - [16. `BasedGPT` - chat with GPT](#16-basedgpt---chat-with-gpt)
143
144
  - [17. `DeepSeek` -chat with deepseek](#17-deepseek--chat-with-deepseek)
145
+ - [18. Deepinfra](#18-deepinfra)
146
+ - [19. Deepinfra - VLM](#19-deepinfra---vlm)
144
147
  - [`LLM`](#llm)
145
148
  - [`Local-LLM` webscout can now run GGUF models](#local-llm-webscout-can-now-run-gguf-models)
146
- - [`Function-calling-local-llm`](#function-calling-local-llm)
147
149
  - [`Local-rawdog`](#local-rawdog)
148
150
  - [`LLM` with internet](#llm-with-internet)
149
- - [LLM with deepwebs](#llm-with-deepwebs)
150
151
  - [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
151
152
 
152
153
  ## Install
@@ -392,68 +393,76 @@ def main():
392
393
  if __name__ == "__main__":
393
394
  main()
394
395
  ```
395
- ## DeepWEBS: Advanced Web Searches
396
396
 
397
- `DeepWEBS` is a standalone feature designed to perform advanced web searches with enhanced capabilities. It is particularly powerful in extracting relevant information directly from webpages and Search engine, focusing exclusively on text (web) searches. Unlike the `WEBS` , which provides a broader range of search functionalities, `DeepWEBS` is specifically tailored for in-depth web searches.
397
+ ## DWEBS: Advanced Web Searches
398
398
 
399
- ### Activating DeepWEBS
399
+ `DWEBS` is a standalone feature designed to perform advanced web searches with enhanced capabilities. It is particularly powerful in extracting relevant information directly from webpages and Search engine, focusing exclusively on text (web) searches. Unlike the `WEBS` , which provides a broader range of search functionalities, `DWEBS` is specifically tailored for in-depth web searches.
400
400
 
401
- To utilize the `DeepWEBS` feature, you must first create an instance of the `DeepWEBS` . This is designed to be used independently of the `WEBS` , offering a focused approach to web searches.
401
+ ### Activating DWEBS
402
402
 
403
- ### Point to remember before using `DeepWEBS`
404
- As `DeepWEBS` is designed to extract relevant information directly from webpages and Search engine, It extracts html from webpages and saves them to folder named files in `DeepWEBS` that can be found at `C:\Users\Username\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\LocalCache\local-packages\Python311\site-packages\DeepWEBS`
403
+ To utilize the `DWEBS` feature, you must first create an instance of the `DWEBS` . This is designed to be used independently of the `WEBS` , offering a focused approach to web searches.
404
+
405
+ ### Point to remember before using `DWEBS`
406
+ As `DWEBS` is designed to extract relevant information directly from webpages and Search engine, It extracts html from webpages and saves them to folder named files
405
407
 
406
408
  ### Usage Example
407
409
 
408
- Here's a basic example of how to use the `DeepWEBS` :
410
+ Here's a basic example of how to use the `DWEBS` :
409
411
  ```python
410
- from webscout import DeepWEBS
411
-
412
- def perform_web_search(query):
413
- # Initialize the DeepWEBS class
414
- D = DeepWEBS()
415
-
416
- # Set up the search parameters
417
- search_params = D.DeepSearch(
418
- queries=[query], # Query to search
419
- result_num=5, # Number of search results
420
- safe=True, # Enable SafeSearch
421
- types=["web"], # Search type: web
422
- extract_webpage=True, # True for extracting webpages
423
- overwrite_query_html=False,
424
- overwrite_webpage_html=False,
412
+ from webscout import DWEBS
413
+
414
+ def finalextractor(extract_webpage=True):
415
+ print('---------------Here Running for GoogleSearch--------------------')
416
+ # 1. Google Search
417
+ google_searcher = DWEBS.GoogleSearcher()
418
+ query_html_path = google_searcher.search(
419
+ query='HelpingAI-9B',
420
+ result_num=10,
421
+ safe=False,
422
+ overwrite=False,
425
423
  )
426
-
427
- # Execute the search and retrieve results
428
- results = D.queries_to_search_results(search_params)
429
-
430
- return results
431
424
 
432
- def print_search_results(results):
433
- """
434
- Print the search results.
435
-
436
- Args:
437
- - search_results (list): List of search results to print.
438
- """
439
- if results:
440
- for index, result in enumerate(results, start=1):
441
- print(f"Result {index}: {result}")
425
+ # 2. Search Result Extraction
426
+ query_results_extractor = DWEBS.QueryResultsExtractor()
427
+ query_search_results = query_results_extractor.extract(query_html_path)
428
+
429
+ if extract_webpage:
430
+ print('---------------Batch Webpage Fetcher--------------------')
431
+ # 3. Batch Webpage Fetching
432
+ batch_webpage_fetcher = DWEBS.BatchWebpageFetcher()
433
+ urls = [query_extracts['url'] for query_extracts in query_search_results['query_results']]
434
+ url_and_html_path_list = batch_webpage_fetcher.fetch(
435
+ urls,
436
+ overwrite=False,
437
+ output_parent=query_search_results["query"],
438
+ )
439
+
440
+ print('---------------Batch Webpage Extractor--------------------')
441
+ # 4. Batch Webpage Content Extraction
442
+ batch_webpage_content_extractor = DWEBS.BatchWebpageContentExtractor()
443
+ webpageurls = [url_and_html['html_path'] for url_and_html in url_and_html_path_list]
444
+ html_path_and_extracted_content_list = batch_webpage_content_extractor.extract(webpageurls)
445
+
446
+ # 5. Printing Extracted Content
447
+ for html_path_and_extracted_content in html_path_and_extracted_content_list:
448
+ print(html_path_and_extracted_content['extracted_content'])
442
449
  else:
443
- print("No search results found.")
450
+ # Print only search results if extract_webpage is False
451
+ for result in query_search_results['query_results']:
452
+ DWEBS.logger.mesg(
453
+ f"{result['title']}\n"
454
+ f" - {result['site']}\n"
455
+ f" - {result['url']}\n"
456
+ f" - {result['abstract']}\n"
457
+ f"\n"
458
+ )
444
459
 
445
- def main():
446
- # Prompt the user for a search query
447
- query = input("Enter your search query: ")
448
-
449
- # Perform the web search
450
- results = perform_web_search(query)
451
-
452
- # Print the search results
453
- print_search_results(results)
460
+ DWEBS.logger.success(f"- {len(query_search_results['query_results'])} query results")
461
+ DWEBS.logger.success(f"- {len(query_search_results['related_questions'])} related questions")
454
462
 
455
- if __name__ == "__main__":
456
- main()
463
+ # Example usage:
464
+ finalextractor(extract_webpage=True) # Extract webpage content
465
+ finalextractor(extract_webpage=False) # Skip webpage extraction and print search results only
457
466
 
458
467
  ```
459
468
  ## Text-to-Speech:
@@ -532,7 +541,7 @@ This ensures proper resource management and cleanup, as the context manager will
532
541
  Exceptions:
533
542
  - `WebscoutE`: Raised when there is a generic exception during the API request.
534
543
 
535
- ## usage of webscout
544
+ ## usage of WEBS
536
545
 
537
546
  ### 1. `text()` - text search by DuckDuckGo.com
538
547
 
@@ -682,6 +691,36 @@ with WEBS() as WEBS:
682
691
  for r in WEBS.suggestions("fly"):
683
692
  print(r)
684
693
  ```
694
+
695
+
696
+ ## usage of WEBSX -- Another Websearch thing
697
+ ```python
698
+ from webscout import WEBSX
699
+
700
+ def main():
701
+ # Initialize the WEBSX client
702
+ search = WEBSX(
703
+ k=10,
704
+ )
705
+
706
+ # Example using `run` method - Get a summary
707
+ query = "What is the capital of France?"
708
+ answer = search.run(query)
709
+ print(f"Answer: {answer}\n")
710
+
711
+ # Example using `results` method - Get detailed results with metadata
712
+ query = "What is the capital of France?"
713
+ results = search.results(query, num_results=3)
714
+ print("Search Results:")
715
+ for result in results:
716
+ print(f"Title: {result['title']}")
717
+ print(f"Snippet: {result['snippet']}")
718
+ print(f"Link: {result['link']}\n")
719
+ print(f'Engines: {result["engines"]}')
720
+
721
+ if __name__ == "__main__":
722
+ main()
723
+ ```
685
724
  ## ALL acts
686
725
  <details>
687
726
  <summary>expand</summary>
@@ -1253,6 +1292,57 @@ while True:
1253
1292
  r = ai.chat(prompt)
1254
1293
  print(r)
1255
1294
  ```
1295
+ ### 18. Deepinfra
1296
+ ```python
1297
+ from webscout import DeepInfra
1298
+
1299
+ ai = DeepInfra(
1300
+ is_conversation=True,
1301
+ model= "Qwen/Qwen2-72B-Instruct",
1302
+ max_tokens=800,
1303
+ timeout=30,
1304
+ intro=None,
1305
+ filepath=None,
1306
+ update_file=True,
1307
+ proxies={},
1308
+ history_offset=10250,
1309
+ act=None,
1310
+ )
1311
+
1312
+ prompt = "what is meaning of life"
1313
+
1314
+ response = ai.ask(prompt)
1315
+
1316
+ # Extract and print the message from the response
1317
+ message = ai.get_message(response)
1318
+ print(message)
1319
+ ```
1320
+
1321
+ ### 19. Deepinfra - VLM
1322
+ ```python
1323
+ from webscout import DeepInfra
1324
+
1325
+ ai = DeepInfra(
1326
+ is_conversation=True,
1327
+ model= "Qwen/Qwen2-72B-Instruct",
1328
+ max_tokens=800,
1329
+ timeout=30,
1330
+ intro=None,
1331
+ filepath=None,
1332
+ update_file=True,
1333
+ proxies={},
1334
+ history_offset=10250,
1335
+ act=None,
1336
+ )
1337
+
1338
+ prompt = "what is meaning of life"
1339
+
1340
+ response = ai.ask(prompt)
1341
+
1342
+ # Extract and print the message from the response
1343
+ message = ai.get_message(response)
1344
+ print(message)
1345
+ ```
1256
1346
  ### `LLM`
1257
1347
  ```python
1258
1348
  from webscout.LLM import LLM
@@ -1300,78 +1390,7 @@ thread = Thread(model, formats.phi3)
1300
1390
  # 4. Start interacting with the model
1301
1391
  thread.interact()
1302
1392
  ```
1303
- ### `Function-calling-local-llm`
1304
- ```python
1305
- from webscout.Local import Model, Thread, formats
1306
- from webscout import DeepWEBS
1307
- from webscout.Local.utils import download_model
1308
- from webscout.Local.model import Model
1309
- from webscout.Local.thread import Thread
1310
- from webscout.Local import formats
1311
- from webscout.Local.samplers import SamplerSettings
1312
- def deepwebs_search(query, max_results=5):
1313
- """Performs a web search using DeepWEBS and returns results as JSON."""
1314
- deepwebs = DeepWEBS()
1315
- search_config = DeepWEBS.DeepSearch(
1316
- queries=[query],
1317
- max_results=max_results,
1318
- extract_webpage=False,
1319
- safe=False,
1320
- types=["web"],
1321
- overwrite_query_html=True,
1322
- overwrite_webpage_html=True,
1323
- )
1324
- search_results = deepwebs.queries_to_search_results(search_config)
1325
- formatted_results = []
1326
- for result in search_results[0]: # Assuming only one query
1327
- formatted_results.append(f"Title: {result['title']}\nURL: {result['url']}\n")
1328
- return "\n".join(formatted_results)
1329
-
1330
- # Load your model
1331
- repo_id = "OEvortex/HelpingAI-9B"
1332
- filename = "helpingai-9b.Q4_0.gguf"
1333
- model_path = download_model(repo_id, filename, token='')
1334
1393
 
1335
- # 2. Load the model
1336
- model = Model(model_path, n_gpu_layers=10)
1337
-
1338
- # Create a Thread
1339
- system_prompt = "You are a helpful AI assistant. Respond to user queries concisely. If a user asks for information that requires a web search, use the `deepwebs_search` tool. Do not call the tool if it is not necessary."
1340
- sampler = SamplerSettings(temp=0.7, top_p=0.9) # Adjust these values as needed
1341
- # 4. Create a custom chatml format with your system prompt
1342
- custom_chatml = formats.chatml.copy()
1343
- custom_chatml['system_content'] = system_prompt
1344
- thread = Thread(model, custom_chatml, sampler=sampler)
1345
- # Add the deepwebs_search tool
1346
- thread.add_tool({
1347
- "type": "function",
1348
- "function": {
1349
- "name": "deepwebs_search",
1350
- "description": "Performs a web search using DeepWEBS and returns the title and URLs of the results.",
1351
- "execute": deepwebs_search,
1352
- "parameters": {
1353
- "type": "object",
1354
- "properties": {
1355
- "query": {
1356
- "type": "string",
1357
- "description": "The query to search on the web",
1358
- },
1359
- "max_results": {
1360
- "type": "integer",
1361
- "description": "Maximum number of search results (default: 5)",
1362
- },
1363
- },
1364
- "required": ["query"],
1365
- },
1366
- },
1367
- })
1368
-
1369
- # Start interacting with the model
1370
- while True:
1371
- user_input = input("You: ")
1372
- response = thread.send(user_input)
1373
- print("Bot: ", response)
1374
- ```
1375
1394
  ### `Local-rawdog`
1376
1395
  ```python
1377
1396
  import webscout.Local as ws
@@ -1532,94 +1551,7 @@ if __name__ == "__main__":
1532
1551
  else:
1533
1552
  print("No response")
1534
1553
  ```
1535
- ### LLM with deepwebs
1536
- ```python
1537
- from __future__ import annotations
1538
- from typing import List, Optional
1539
- from webscout.LLM import LLM
1540
- from webscout import DeepWEBS
1541
- import warnings
1542
-
1543
- system_message: str = (
1544
- "As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
1545
- "My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
1546
- "Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
1547
- )
1548
-
1549
- # Ignore the specific UserWarning
1550
- warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffio", lineno=205)
1551
-
1552
- LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
1553
-
1554
- def perform_web_search(query):
1555
- # Initialize the DeepWEBS class
1556
- D = DeepWEBS()
1557
-
1558
- # Set up the search parameters
1559
- search_params = D.DeepSearch(
1560
- queries=[query], # Query to search
1561
- result_num=10, # Number of search results
1562
- safe=True, # Enable SafeSearch
1563
- types=["web"], # Search type: web
1564
- extract_webpage=True, # True for extracting webpages
1565
- overwrite_query_html=True,
1566
- overwrite_webpage_html=True,
1567
- )
1568
1554
 
1569
- # Execute the search and retrieve results
1570
- results = D.queries_to_search_results(search_params)
1571
- return results
1572
-
1573
- def chat(user_input: str, result_num: int = 10) -> Optional[str]:
1574
- """
1575
- Chat function to perform a web search based on the user input and generate a response using the LLM model.
1576
-
1577
- Parameters
1578
- ----------
1579
- user_input : str
1580
- The user input to be used for the web search
1581
- max_results : int, optional
1582
- The maximum number of search results to include in the response, by default 10
1583
-
1584
- Returns
1585
- -------
1586
- Optional[str]
1587
- The response generated by the LLM model, or None if there is no response
1588
- """
1589
- # Perform a web search based on the user input
1590
- search_results = perform_web_search(user_input)
1591
-
1592
- # Extract URLs from search results
1593
- url_results = []
1594
- for result in search_results[0]['query_results']:
1595
- url_results.append(f"{result['title']} ({result['site']}): {result['url']}")
1596
-
1597
- # Format search results
1598
- formatted_results = "\n".join(url_results)
1599
-
1600
- # Define the messages to be sent, including the user input, search results, and system message
1601
- messages = [
1602
- {"role": "user", "content": f"User question is:\n{user_input}\nwebsearch results are:\n{formatted_results}"},
1603
- ]
1604
-
1605
- # Use the chat method to get the response
1606
- response = LLM.chat(messages)
1607
- return response
1608
-
1609
- if __name__ == "__main__":
1610
- while True:
1611
- # Get the user input
1612
- user_input = input("User: ")
1613
-
1614
- # Perform a web search based on the user input
1615
- response = chat(user_input)
1616
-
1617
- # Print the response
1618
- if response:
1619
- print("AI:", response)
1620
- else:
1621
- print("No response")
1622
- ```
1623
1555
  ## `Webai` - terminal gpt and a open interpeter
1624
1556
 
1625
1557
  ```python