webscout 7.8__py3-none-any.whl → 8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/Bard.py +5 -25
- webscout/DWEBS.py +476 -476
- webscout/Extra/GitToolkit/__init__.py +10 -0
- webscout/Extra/GitToolkit/gitapi/__init__.py +12 -0
- webscout/Extra/GitToolkit/gitapi/repository.py +195 -0
- webscout/Extra/GitToolkit/gitapi/user.py +96 -0
- webscout/Extra/GitToolkit/gitapi/utils.py +62 -0
- webscout/Extra/YTToolkit/ytapi/video.py +232 -103
- webscout/Extra/__init__.py +2 -0
- webscout/Extra/autocoder/__init__.py +1 -1
- webscout/Extra/autocoder/{rawdog.py → autocoder.py} +849 -849
- webscout/Extra/tempmail/__init__.py +26 -0
- webscout/Extra/tempmail/async_utils.py +141 -0
- webscout/Extra/tempmail/base.py +156 -0
- webscout/Extra/tempmail/cli.py +187 -0
- webscout/Extra/tempmail/mail_tm.py +361 -0
- webscout/Extra/tempmail/temp_mail_io.py +292 -0
- webscout/Provider/AISEARCH/__init__.py +5 -1
- webscout/Provider/AISEARCH/hika_search.py +194 -0
- webscout/Provider/AISEARCH/monica_search.py +246 -0
- webscout/Provider/AISEARCH/scira_search.py +320 -0
- webscout/Provider/AISEARCH/webpilotai_search.py +281 -0
- webscout/Provider/AllenAI.py +255 -122
- webscout/Provider/DeepSeek.py +1 -2
- webscout/Provider/Deepinfra.py +296 -286
- webscout/Provider/ElectronHub.py +709 -716
- webscout/Provider/ExaAI.py +261 -0
- webscout/Provider/ExaChat.py +28 -6
- webscout/Provider/Gemini.py +167 -165
- webscout/Provider/GithubChat.py +2 -1
- webscout/Provider/Groq.py +38 -24
- webscout/Provider/LambdaChat.py +2 -1
- webscout/Provider/Netwrck.py +3 -2
- webscout/Provider/OpenGPT.py +199 -0
- webscout/Provider/PI.py +39 -24
- webscout/Provider/TextPollinationsAI.py +232 -230
- webscout/Provider/Youchat.py +326 -296
- webscout/Provider/__init__.py +10 -4
- webscout/Provider/ai4chat.py +58 -56
- webscout/Provider/akashgpt.py +34 -22
- webscout/Provider/copilot.py +427 -427
- webscout/Provider/freeaichat.py +9 -2
- webscout/Provider/labyrinth.py +121 -20
- webscout/Provider/llmchatco.py +306 -0
- webscout/Provider/scira_chat.py +271 -0
- webscout/Provider/typefully.py +280 -0
- webscout/Provider/uncovr.py +312 -299
- webscout/Provider/yep.py +64 -12
- webscout/__init__.py +38 -36
- webscout/cli.py +293 -293
- webscout/conversation.py +350 -17
- webscout/litprinter/__init__.py +59 -667
- webscout/optimizers.py +419 -419
- webscout/update_checker.py +14 -12
- webscout/version.py +1 -1
- webscout/webscout_search.py +1346 -1282
- webscout/webscout_search_async.py +877 -813
- {webscout-7.8.dist-info → webscout-8.0.dist-info}/METADATA +44 -39
- {webscout-7.8.dist-info → webscout-8.0.dist-info}/RECORD +63 -46
- webscout/Provider/DARKAI.py +0 -225
- webscout/Provider/EDITEE.py +0 -192
- webscout/litprinter/colors.py +0 -54
- {webscout-7.8.dist-info → webscout-8.0.dist-info}/LICENSE.md +0 -0
- {webscout-7.8.dist-info → webscout-8.0.dist-info}/WHEEL +0 -0
- {webscout-7.8.dist-info → webscout-8.0.dist-info}/entry_points.txt +0 -0
- {webscout-7.8.dist-info → webscout-8.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import hashlib
|
|
3
|
+
import json
|
|
4
|
+
import random
|
|
5
|
+
import time
|
|
6
|
+
import re
|
|
7
|
+
from typing import Dict, Optional, Generator, Union, Any
|
|
8
|
+
|
|
9
|
+
from webscout.AIbase import AISearch
|
|
10
|
+
from webscout import exceptions
|
|
11
|
+
from webscout import LitAgent
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Response:
|
|
15
|
+
"""A wrapper class for Hika API responses."""
|
|
16
|
+
def __init__(self, text: str):
|
|
17
|
+
self.text = text
|
|
18
|
+
|
|
19
|
+
def __str__(self):
|
|
20
|
+
return self.text
|
|
21
|
+
|
|
22
|
+
def __repr__(self):
|
|
23
|
+
return self.text
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class Hika(AISearch):
|
|
27
|
+
"""A class to interact with the Hika AI search API."""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
timeout: int = 60,
|
|
32
|
+
proxies: Optional[dict] = None,
|
|
33
|
+
language: str = "en",
|
|
34
|
+
):
|
|
35
|
+
self.session = requests.Session()
|
|
36
|
+
self.base_url = "https://api.hika.fyi/api/"
|
|
37
|
+
self.endpoint = "kbase/web"
|
|
38
|
+
self.timeout = timeout
|
|
39
|
+
self.language = language
|
|
40
|
+
self.last_response = {}
|
|
41
|
+
|
|
42
|
+
self.headers = {
|
|
43
|
+
"Content-Type": "application/json",
|
|
44
|
+
"Origin": "https://hika.fyi",
|
|
45
|
+
"Referer": "https://hika.fyi/",
|
|
46
|
+
"User-Agent": LitAgent().random()
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
self.session.headers.update(self.headers)
|
|
50
|
+
self.proxies = proxies
|
|
51
|
+
|
|
52
|
+
def generate_id(self):
|
|
53
|
+
"""Generate a unique ID and hash for the request."""
|
|
54
|
+
uid = ''.join(random.choice('0123456789abcdefghijklmnopqrstuvwxyz') for _ in range(10))
|
|
55
|
+
uid += hex(int(time.time()))[2:]
|
|
56
|
+
hash_id = hashlib.sha256(f"#{uid}*".encode()).hexdigest()
|
|
57
|
+
return {"uid": uid, "hashId": hash_id}
|
|
58
|
+
|
|
59
|
+
def clean_text(self, text):
|
|
60
|
+
"""Clean all XML tags and control markers from text.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
text (str): The text to clean
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
str: Cleaned text
|
|
67
|
+
"""
|
|
68
|
+
if not text:
|
|
69
|
+
return ""
|
|
70
|
+
|
|
71
|
+
# Remove XML tags and special markers
|
|
72
|
+
# First remove <r> tag at the beginning
|
|
73
|
+
text = text.lstrip("<r>")
|
|
74
|
+
|
|
75
|
+
# Remove any remaining XML tags
|
|
76
|
+
text = re.sub(r'<[^>]+>', '', text)
|
|
77
|
+
|
|
78
|
+
# Remove [DONE] marker at the end
|
|
79
|
+
text = re.sub(r'\[DONE\]\s*$', '', text)
|
|
80
|
+
|
|
81
|
+
return text
|
|
82
|
+
|
|
83
|
+
def search(
|
|
84
|
+
self,
|
|
85
|
+
prompt: str,
|
|
86
|
+
stream: bool = False,
|
|
87
|
+
raw: bool = False,
|
|
88
|
+
) -> Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
|
|
89
|
+
"""Search using the Hika API and get AI-generated responses."""
|
|
90
|
+
if not prompt or len(prompt) < 2:
|
|
91
|
+
raise exceptions.APIConnectionError("Search query must be at least 2 characters long")
|
|
92
|
+
|
|
93
|
+
# Generate ID for this request
|
|
94
|
+
id_data = self.generate_id()
|
|
95
|
+
uid, hash_id = id_data["uid"], id_data["hashId"]
|
|
96
|
+
|
|
97
|
+
# Update headers with request-specific values
|
|
98
|
+
request_headers = {
|
|
99
|
+
**self.headers,
|
|
100
|
+
"x-hika": hash_id,
|
|
101
|
+
"x-uid": uid
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
# Prepare payload
|
|
105
|
+
payload = {
|
|
106
|
+
"keyword": prompt,
|
|
107
|
+
"language": self.language,
|
|
108
|
+
"stream": True # Always request streaming for consistent handling
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
def for_stream():
|
|
112
|
+
try:
|
|
113
|
+
with self.session.post(
|
|
114
|
+
f"{self.base_url}{self.endpoint}",
|
|
115
|
+
json=payload,
|
|
116
|
+
headers=request_headers,
|
|
117
|
+
stream=True,
|
|
118
|
+
timeout=self.timeout,
|
|
119
|
+
proxies=self.proxies
|
|
120
|
+
) as response:
|
|
121
|
+
if not response.ok:
|
|
122
|
+
raise exceptions.APIConnectionError(
|
|
123
|
+
f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
for line in response.iter_lines(decode_unicode=True):
|
|
127
|
+
if line and line.startswith("data: "):
|
|
128
|
+
try:
|
|
129
|
+
data = json.loads(line[6:])
|
|
130
|
+
if "chunk" in data:
|
|
131
|
+
chunk = data["chunk"]
|
|
132
|
+
|
|
133
|
+
# Skip [DONE] markers completely
|
|
134
|
+
if "[DONE]" in chunk:
|
|
135
|
+
continue
|
|
136
|
+
|
|
137
|
+
# Clean the chunk
|
|
138
|
+
clean_chunk = self.clean_text(chunk)
|
|
139
|
+
|
|
140
|
+
if clean_chunk: # Only yield if there's content after cleaning
|
|
141
|
+
if raw:
|
|
142
|
+
yield {"text": clean_chunk}
|
|
143
|
+
else:
|
|
144
|
+
yield Response(clean_chunk)
|
|
145
|
+
except json.JSONDecodeError:
|
|
146
|
+
pass
|
|
147
|
+
|
|
148
|
+
except requests.exceptions.RequestException as e:
|
|
149
|
+
raise exceptions.APIConnectionError(f"Request failed: {e}")
|
|
150
|
+
|
|
151
|
+
def for_non_stream():
|
|
152
|
+
full_response = ""
|
|
153
|
+
for chunk in for_stream():
|
|
154
|
+
if raw:
|
|
155
|
+
yield chunk
|
|
156
|
+
else:
|
|
157
|
+
full_response += str(chunk)
|
|
158
|
+
|
|
159
|
+
if not raw:
|
|
160
|
+
# Clean up the response text one final time
|
|
161
|
+
cleaned_response = self.format_response(full_response)
|
|
162
|
+
self.last_response = Response(cleaned_response)
|
|
163
|
+
return self.last_response
|
|
164
|
+
|
|
165
|
+
return for_stream() if stream else for_non_stream()
|
|
166
|
+
|
|
167
|
+
def format_response(self, text: str) -> str:
|
|
168
|
+
"""Format the response text for better readability."""
|
|
169
|
+
if not text:
|
|
170
|
+
return ""
|
|
171
|
+
|
|
172
|
+
# First clean any tags or markers
|
|
173
|
+
cleaned_text = self.clean_text(text)
|
|
174
|
+
|
|
175
|
+
# Remove any empty lines
|
|
176
|
+
cleaned_text = re.sub(r'\n\s*\n', '\n\n', cleaned_text)
|
|
177
|
+
|
|
178
|
+
# Remove any trailing whitespace
|
|
179
|
+
cleaned_text = cleaned_text.strip()
|
|
180
|
+
|
|
181
|
+
return cleaned_text
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
if __name__ == "__main__":
|
|
185
|
+
from rich import print
|
|
186
|
+
ai = Hika()
|
|
187
|
+
try:
|
|
188
|
+
response = ai.search(input(">>> "), stream=True, raw=False)
|
|
189
|
+
for chunk in response:
|
|
190
|
+
print(chunk, end="", flush=True)
|
|
191
|
+
except KeyboardInterrupt:
|
|
192
|
+
print("\nSearch interrupted by user.")
|
|
193
|
+
except Exception as e:
|
|
194
|
+
print(f"\nError: {e}")
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
import uuid
|
|
5
|
+
from typing import Dict, Optional, Generator, Union, Any
|
|
6
|
+
|
|
7
|
+
from webscout.AIbase import AISearch
|
|
8
|
+
from webscout import exceptions
|
|
9
|
+
from webscout.litagent import LitAgent
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Response:
|
|
13
|
+
"""A wrapper class for Monica API responses.
|
|
14
|
+
|
|
15
|
+
This class automatically converts response objects to their text representation
|
|
16
|
+
when printed or converted to string.
|
|
17
|
+
|
|
18
|
+
Attributes:
|
|
19
|
+
text (str): The text content of the response
|
|
20
|
+
|
|
21
|
+
Example:
|
|
22
|
+
>>> response = Response("Hello, world!")
|
|
23
|
+
>>> print(response)
|
|
24
|
+
Hello, world!
|
|
25
|
+
>>> str(response)
|
|
26
|
+
'Hello, world!'
|
|
27
|
+
"""
|
|
28
|
+
def __init__(self, text: str):
|
|
29
|
+
self.text = text
|
|
30
|
+
|
|
31
|
+
def __str__(self):
|
|
32
|
+
return self.text
|
|
33
|
+
|
|
34
|
+
def __repr__(self):
|
|
35
|
+
return self.text
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class Monica(AISearch):
|
|
39
|
+
"""A class to interact with the Monica AI search API.
|
|
40
|
+
|
|
41
|
+
Monica provides a powerful search interface that returns AI-generated responses
|
|
42
|
+
based on web content. It supports both streaming and non-streaming responses.
|
|
43
|
+
|
|
44
|
+
Basic Usage:
|
|
45
|
+
>>> from webscout import Monica
|
|
46
|
+
>>> ai = Monica()
|
|
47
|
+
>>> # Non-streaming example
|
|
48
|
+
>>> response = ai.search("What is Python?")
|
|
49
|
+
>>> print(response)
|
|
50
|
+
Python is a high-level programming language...
|
|
51
|
+
|
|
52
|
+
>>> # Streaming example
|
|
53
|
+
>>> for chunk in ai.search("Tell me about AI", stream=True):
|
|
54
|
+
... print(chunk, end="", flush=True)
|
|
55
|
+
Artificial Intelligence is...
|
|
56
|
+
|
|
57
|
+
>>> # Raw response format
|
|
58
|
+
>>> for chunk in ai.search("Hello", stream=True, raw=True):
|
|
59
|
+
... print(chunk)
|
|
60
|
+
{'text': 'Hello'}
|
|
61
|
+
{'text': ' there!'}
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
timeout (int, optional): Request timeout in seconds. Defaults to 60.
|
|
65
|
+
proxies (dict, optional): Proxy configuration for requests. Defaults to None.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(
|
|
69
|
+
self,
|
|
70
|
+
timeout: int = 60,
|
|
71
|
+
proxies: Optional[dict] = None,
|
|
72
|
+
):
|
|
73
|
+
"""Initialize the Monica API client.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
timeout (int, optional): Request timeout in seconds. Defaults to 60.
|
|
77
|
+
proxies (dict, optional): Proxy configuration for requests. Defaults to None.
|
|
78
|
+
"""
|
|
79
|
+
self.session = requests.Session()
|
|
80
|
+
self.api_endpoint = "https://monica.so/api/search_v1/search"
|
|
81
|
+
self.stream_chunk_size = 64
|
|
82
|
+
self.timeout = timeout
|
|
83
|
+
self.last_response = {}
|
|
84
|
+
self.client_id = str(uuid.uuid4())
|
|
85
|
+
self.session_id = ""
|
|
86
|
+
|
|
87
|
+
self.headers = {
|
|
88
|
+
"accept": "*/*",
|
|
89
|
+
"accept-encoding": "gzip, deflate, br, zstd",
|
|
90
|
+
"accept-language": "en-US,en;q=0.9",
|
|
91
|
+
"content-type": "application/json",
|
|
92
|
+
"dnt": "1",
|
|
93
|
+
"origin": "https://monica.so",
|
|
94
|
+
"referer": "https://monica.so/answers",
|
|
95
|
+
"sec-ch-ua": '"Microsoft Edge";v="135", "Not-A.Brand";v="8", "Chromium";v="135"',
|
|
96
|
+
"sec-ch-ua-mobile": "?0",
|
|
97
|
+
"sec-ch-ua-platform": '"Windows"',
|
|
98
|
+
"sec-fetch-dest": "empty",
|
|
99
|
+
"sec-fetch-mode": "cors",
|
|
100
|
+
"sec-fetch-site": "same-origin",
|
|
101
|
+
"sec-gpc": "1",
|
|
102
|
+
"user-agent": LitAgent().random(),
|
|
103
|
+
"x-client-id": self.client_id,
|
|
104
|
+
"x-client-locale": "en",
|
|
105
|
+
"x-client-type": "web",
|
|
106
|
+
"x-client-version": "5.4.3",
|
|
107
|
+
"x-from-channel": "NA",
|
|
108
|
+
"x-product-name": "Monica-Search",
|
|
109
|
+
"x-time-zone": "Asia/Calcutta;-330"
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
self.cookies = {
|
|
113
|
+
"monica_home_theme": "auto",
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
self.session.headers.update(self.headers)
|
|
117
|
+
self.proxies = proxies
|
|
118
|
+
|
|
119
|
+
def search(
|
|
120
|
+
self,
|
|
121
|
+
prompt: str,
|
|
122
|
+
stream: bool = False,
|
|
123
|
+
raw: bool = False,
|
|
124
|
+
) -> Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
|
|
125
|
+
"""Search using the Monica API and get AI-generated responses.
|
|
126
|
+
|
|
127
|
+
This method sends a search query to Monica and returns the AI-generated response.
|
|
128
|
+
It supports both streaming and non-streaming modes, as well as raw response format.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
prompt (str): The search query or prompt to send to the API.
|
|
132
|
+
stream (bool, optional): If True, yields response chunks as they arrive.
|
|
133
|
+
If False, returns complete response. Defaults to False.
|
|
134
|
+
raw (bool, optional): If True, returns raw response dictionaries with 'text' key.
|
|
135
|
+
If False, returns Response objects that convert to text automatically.
|
|
136
|
+
Defaults to False.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
|
|
140
|
+
- If stream=False: Returns complete response as Response object
|
|
141
|
+
- If stream=True: Yields response chunks as either Dict or Response objects
|
|
142
|
+
|
|
143
|
+
Raises:
|
|
144
|
+
APIConnectionError: If the API request fails
|
|
145
|
+
"""
|
|
146
|
+
task_id = str(uuid.uuid4())
|
|
147
|
+
payload = {
|
|
148
|
+
"pro": False,
|
|
149
|
+
"query": prompt,
|
|
150
|
+
"round": 1,
|
|
151
|
+
"session_id": self.session_id,
|
|
152
|
+
"language": "auto",
|
|
153
|
+
"task_id": task_id
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
def for_stream():
|
|
157
|
+
try:
|
|
158
|
+
with self.session.post(
|
|
159
|
+
self.api_endpoint,
|
|
160
|
+
json=payload,
|
|
161
|
+
stream=True,
|
|
162
|
+
cookies=self.cookies,
|
|
163
|
+
timeout=self.timeout,
|
|
164
|
+
proxies=self.proxies
|
|
165
|
+
) as response:
|
|
166
|
+
if not response.ok:
|
|
167
|
+
raise exceptions.APIConnectionError(
|
|
168
|
+
f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
# Process the Server-Sent Events (SSE) stream
|
|
172
|
+
for line in response.iter_lines(decode_unicode=True):
|
|
173
|
+
if line and line.startswith("data: "):
|
|
174
|
+
try:
|
|
175
|
+
data = json.loads(line[6:]) # Remove 'data: ' prefix
|
|
176
|
+
|
|
177
|
+
# Save session_id for future requests if present
|
|
178
|
+
if "session_id" in data and data["session_id"]:
|
|
179
|
+
self.session_id = data["session_id"]
|
|
180
|
+
|
|
181
|
+
# Only process chunks with text content
|
|
182
|
+
if "text" in data and data["text"]:
|
|
183
|
+
text_chunk = data["text"]
|
|
184
|
+
|
|
185
|
+
if raw:
|
|
186
|
+
yield {"text": text_chunk}
|
|
187
|
+
else:
|
|
188
|
+
yield Response(text_chunk)
|
|
189
|
+
|
|
190
|
+
# Check if stream is finished
|
|
191
|
+
if "finished" in data and data["finished"]:
|
|
192
|
+
break
|
|
193
|
+
|
|
194
|
+
except json.JSONDecodeError:
|
|
195
|
+
continue
|
|
196
|
+
|
|
197
|
+
except requests.exceptions.RequestException as e:
|
|
198
|
+
raise exceptions.APIConnectionError(f"Request failed: {e}")
|
|
199
|
+
|
|
200
|
+
def for_non_stream():
|
|
201
|
+
full_response = ""
|
|
202
|
+
search_results = []
|
|
203
|
+
|
|
204
|
+
for chunk in for_stream():
|
|
205
|
+
if raw:
|
|
206
|
+
yield chunk
|
|
207
|
+
else:
|
|
208
|
+
full_response += str(chunk)
|
|
209
|
+
|
|
210
|
+
if not raw:
|
|
211
|
+
# Process the full response to clean up formatting
|
|
212
|
+
formatted_response = self.format_response(full_response)
|
|
213
|
+
self.last_response = Response(formatted_response)
|
|
214
|
+
return self.last_response
|
|
215
|
+
|
|
216
|
+
return for_stream() if stream else for_non_stream()
|
|
217
|
+
|
|
218
|
+
@staticmethod
|
|
219
|
+
def format_response(text: str) -> str:
|
|
220
|
+
"""Format the response text for better readability.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
text (str): The raw response text
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
str: Formatted text
|
|
227
|
+
"""
|
|
228
|
+
# Clean up markdown formatting
|
|
229
|
+
cleaned_text = text.replace('**', '')
|
|
230
|
+
|
|
231
|
+
# Remove any empty lines
|
|
232
|
+
cleaned_text = re.sub(r'\n\s*\n', '\n\n', cleaned_text)
|
|
233
|
+
|
|
234
|
+
# Remove any trailing whitespace
|
|
235
|
+
cleaned_text = cleaned_text.strip()
|
|
236
|
+
|
|
237
|
+
return cleaned_text
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
if __name__ == "__main__":
|
|
241
|
+
from rich import print
|
|
242
|
+
|
|
243
|
+
ai = Monica()
|
|
244
|
+
response = ai.search(input(">>> "), stream=True, raw=False)
|
|
245
|
+
for chunk in response:
|
|
246
|
+
print(chunk, end="", flush=True)
|