webscout 7.0__py3-none-any.whl → 7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIauto.py +191 -191
- webscout/AIbase.py +122 -122
- webscout/AIutel.py +440 -440
- webscout/Bard.py +343 -161
- webscout/DWEBS.py +489 -492
- webscout/Extra/YTToolkit/YTdownloader.py +995 -995
- webscout/Extra/YTToolkit/__init__.py +2 -2
- webscout/Extra/YTToolkit/transcriber.py +476 -479
- webscout/Extra/YTToolkit/ytapi/channel.py +307 -307
- webscout/Extra/YTToolkit/ytapi/playlist.py +58 -58
- webscout/Extra/YTToolkit/ytapi/pool.py +7 -7
- webscout/Extra/YTToolkit/ytapi/utils.py +62 -62
- webscout/Extra/YTToolkit/ytapi/video.py +103 -103
- webscout/Extra/autocoder/__init__.py +9 -9
- webscout/Extra/autocoder/autocoder_utiles.py +199 -199
- webscout/Extra/autocoder/rawdog.py +5 -7
- webscout/Extra/autollama.py +230 -230
- webscout/Extra/gguf.py +3 -3
- webscout/Extra/weather.py +171 -171
- webscout/LLM.py +442 -442
- webscout/Litlogger/__init__.py +67 -681
- webscout/Litlogger/core/__init__.py +6 -0
- webscout/Litlogger/core/level.py +20 -0
- webscout/Litlogger/core/logger.py +123 -0
- webscout/Litlogger/handlers/__init__.py +12 -0
- webscout/Litlogger/handlers/console.py +50 -0
- webscout/Litlogger/handlers/file.py +143 -0
- webscout/Litlogger/handlers/network.py +174 -0
- webscout/Litlogger/styles/__init__.py +7 -0
- webscout/Litlogger/styles/colors.py +231 -0
- webscout/Litlogger/styles/formats.py +377 -0
- webscout/Litlogger/styles/text.py +87 -0
- webscout/Litlogger/utils/__init__.py +6 -0
- webscout/Litlogger/utils/detectors.py +154 -0
- webscout/Litlogger/utils/formatters.py +200 -0
- webscout/Provider/AISEARCH/DeepFind.py +250 -250
- webscout/Provider/Blackboxai.py +136 -137
- webscout/Provider/ChatGPTGratis.py +226 -0
- webscout/Provider/Cloudflare.py +91 -78
- webscout/Provider/DeepSeek.py +218 -0
- webscout/Provider/Deepinfra.py +59 -35
- webscout/Provider/Free2GPT.py +131 -124
- webscout/Provider/Gemini.py +100 -115
- webscout/Provider/Glider.py +74 -59
- webscout/Provider/Groq.py +30 -18
- webscout/Provider/Jadve.py +108 -77
- webscout/Provider/Llama3.py +117 -94
- webscout/Provider/Marcus.py +191 -137
- webscout/Provider/Netwrck.py +62 -50
- webscout/Provider/PI.py +79 -124
- webscout/Provider/PizzaGPT.py +129 -83
- webscout/Provider/QwenLM.py +311 -0
- webscout/Provider/TTI/AiForce/__init__.py +22 -22
- webscout/Provider/TTI/AiForce/async_aiforce.py +257 -257
- webscout/Provider/TTI/AiForce/sync_aiforce.py +242 -242
- webscout/Provider/TTI/Nexra/__init__.py +22 -22
- webscout/Provider/TTI/Nexra/async_nexra.py +286 -286
- webscout/Provider/TTI/Nexra/sync_nexra.py +258 -258
- webscout/Provider/TTI/PollinationsAI/__init__.py +23 -23
- webscout/Provider/TTI/PollinationsAI/async_pollinations.py +330 -330
- webscout/Provider/TTI/PollinationsAI/sync_pollinations.py +285 -285
- webscout/Provider/TTI/artbit/__init__.py +22 -22
- webscout/Provider/TTI/artbit/async_artbit.py +184 -184
- webscout/Provider/TTI/artbit/sync_artbit.py +176 -176
- webscout/Provider/TTI/blackbox/__init__.py +4 -4
- webscout/Provider/TTI/blackbox/async_blackbox.py +212 -212
- webscout/Provider/TTI/blackbox/sync_blackbox.py +199 -199
- webscout/Provider/TTI/deepinfra/__init__.py +4 -4
- webscout/Provider/TTI/deepinfra/async_deepinfra.py +227 -227
- webscout/Provider/TTI/deepinfra/sync_deepinfra.py +199 -199
- webscout/Provider/TTI/huggingface/__init__.py +22 -22
- webscout/Provider/TTI/huggingface/async_huggingface.py +199 -199
- webscout/Provider/TTI/huggingface/sync_huggingface.py +195 -195
- webscout/Provider/TTI/imgninza/__init__.py +4 -4
- webscout/Provider/TTI/imgninza/async_ninza.py +214 -214
- webscout/Provider/TTI/imgninza/sync_ninza.py +209 -209
- webscout/Provider/TTI/talkai/__init__.py +4 -4
- webscout/Provider/TTI/talkai/async_talkai.py +229 -229
- webscout/Provider/TTI/talkai/sync_talkai.py +207 -207
- webscout/Provider/TTS/deepgram.py +182 -182
- webscout/Provider/TTS/elevenlabs.py +136 -136
- webscout/Provider/TTS/gesserit.py +150 -150
- webscout/Provider/TTS/murfai.py +138 -138
- webscout/Provider/TTS/parler.py +133 -134
- webscout/Provider/TTS/streamElements.py +360 -360
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TTS/voicepod.py +116 -116
- webscout/Provider/TextPollinationsAI.py +74 -47
- webscout/Provider/WiseCat.py +193 -0
- webscout/Provider/__init__.py +144 -136
- webscout/Provider/cerebras.py +242 -227
- webscout/Provider/chatglm.py +204 -204
- webscout/Provider/dgaf.py +67 -39
- webscout/Provider/gaurish.py +105 -66
- webscout/Provider/geminiapi.py +208 -208
- webscout/Provider/granite.py +223 -0
- webscout/Provider/hermes.py +218 -218
- webscout/Provider/llama3mitril.py +179 -179
- webscout/Provider/llamatutor.py +72 -62
- webscout/Provider/llmchat.py +60 -35
- webscout/Provider/meta.py +794 -794
- webscout/Provider/multichat.py +331 -230
- webscout/Provider/typegpt.py +359 -356
- webscout/Provider/yep.py +5 -5
- webscout/__main__.py +5 -5
- webscout/cli.py +319 -319
- webscout/conversation.py +241 -242
- webscout/exceptions.py +328 -328
- webscout/litagent/__init__.py +28 -28
- webscout/litagent/agent.py +2 -3
- webscout/litprinter/__init__.py +0 -58
- webscout/scout/__init__.py +8 -8
- webscout/scout/core.py +884 -884
- webscout/scout/element.py +459 -459
- webscout/scout/parsers/__init__.py +69 -69
- webscout/scout/parsers/html5lib_parser.py +172 -172
- webscout/scout/parsers/html_parser.py +236 -236
- webscout/scout/parsers/lxml_parser.py +178 -178
- webscout/scout/utils.py +38 -38
- webscout/swiftcli/__init__.py +811 -811
- webscout/update_checker.py +2 -12
- webscout/version.py +1 -1
- webscout/webscout_search.py +1142 -1140
- webscout/webscout_search_async.py +635 -635
- webscout/zeroart/__init__.py +54 -54
- webscout/zeroart/base.py +60 -60
- webscout/zeroart/effects.py +99 -99
- webscout/zeroart/fonts.py +816 -816
- {webscout-7.0.dist-info → webscout-7.2.dist-info}/METADATA +21 -28
- webscout-7.2.dist-info/RECORD +217 -0
- webstoken/__init__.py +30 -30
- webstoken/classifier.py +189 -189
- webstoken/keywords.py +216 -216
- webstoken/language.py +128 -128
- webstoken/ner.py +164 -164
- webstoken/normalizer.py +35 -35
- webstoken/processor.py +77 -77
- webstoken/sentiment.py +206 -206
- webstoken/stemmer.py +73 -73
- webstoken/tagger.py +60 -60
- webstoken/tokenizer.py +158 -158
- webscout/Provider/RUBIKSAI.py +0 -272
- webscout-7.0.dist-info/RECORD +0 -199
- {webscout-7.0.dist-info → webscout-7.2.dist-info}/LICENSE.md +0 -0
- {webscout-7.0.dist-info → webscout-7.2.dist-info}/WHEEL +0 -0
- {webscout-7.0.dist-info → webscout-7.2.dist-info}/entry_points.txt +0 -0
- {webscout-7.0.dist-info → webscout-7.2.dist-info}/top_level.txt +0 -0
webscout/Provider/Marcus.py
CHANGED
|
@@ -1,137 +1,191 @@
|
|
|
1
|
-
import requests
|
|
2
|
-
import json
|
|
3
|
-
from typing import Any, Dict, Optional, Generator
|
|
4
|
-
|
|
5
|
-
from webscout.AIutel import Optimizers
|
|
6
|
-
from webscout.AIutel import Conversation
|
|
7
|
-
from webscout.AIutel import AwesomePrompts
|
|
8
|
-
from webscout.AIbase import Provider
|
|
9
|
-
from webscout import exceptions
|
|
10
|
-
from webscout import
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
self.
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
self.
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
self.
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
1
|
+
import requests
|
|
2
|
+
import json
|
|
3
|
+
from typing import Any, Dict, Optional, Generator
|
|
4
|
+
|
|
5
|
+
from webscout.AIutel import Optimizers
|
|
6
|
+
from webscout.AIutel import Conversation
|
|
7
|
+
from webscout.AIutel import AwesomePrompts
|
|
8
|
+
from webscout.AIbase import Provider
|
|
9
|
+
from webscout import exceptions
|
|
10
|
+
from webscout.Litlogger import Logger, LogFormat
|
|
11
|
+
from webscout import LitAgent as Lit
|
|
12
|
+
|
|
13
|
+
class Marcus(Provider):
|
|
14
|
+
"""
|
|
15
|
+
This class provides methods for interacting with the AskMarcus API.
|
|
16
|
+
Improved to match webscout provider standards with comprehensive logging.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
is_conversation: bool = True,
|
|
22
|
+
max_tokens: int = 2048,
|
|
23
|
+
timeout: int = 30,
|
|
24
|
+
intro: str = None,
|
|
25
|
+
filepath: str = None,
|
|
26
|
+
update_file: bool = True,
|
|
27
|
+
proxies: dict = {},
|
|
28
|
+
history_offset: int = 10250,
|
|
29
|
+
act: str = None,
|
|
30
|
+
logging: bool = False
|
|
31
|
+
):
|
|
32
|
+
"""Initializes the Marcus API with logging capabilities."""
|
|
33
|
+
self.logger = Logger(
|
|
34
|
+
name="Marcus",
|
|
35
|
+
format=LogFormat.MODERN_EMOJI,
|
|
36
|
+
) if logging else None
|
|
37
|
+
|
|
38
|
+
if self.logger:
|
|
39
|
+
self.logger.info("Initializing Marcus API")
|
|
40
|
+
|
|
41
|
+
self.session = requests.Session()
|
|
42
|
+
self.is_conversation = is_conversation
|
|
43
|
+
self.max_tokens_to_sample = max_tokens
|
|
44
|
+
self.api_endpoint = "https://www.askmarcus.app/api/response"
|
|
45
|
+
self.timeout = timeout
|
|
46
|
+
self.last_response = {}
|
|
47
|
+
|
|
48
|
+
self.headers = {
|
|
49
|
+
'content-type': 'application/json',
|
|
50
|
+
'accept': '*/*',
|
|
51
|
+
'origin': 'https://www.askmarcus.app',
|
|
52
|
+
'referer': 'https://www.askmarcus.app/chat',
|
|
53
|
+
'user-agent': Lit().random(),
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
self.__available_optimizers = (
|
|
57
|
+
method
|
|
58
|
+
for method in dir(Optimizers)
|
|
59
|
+
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
Conversation.intro = (
|
|
63
|
+
AwesomePrompts().get_act(
|
|
64
|
+
act, raise_not_found=True, default=None, case_insensitive=True
|
|
65
|
+
)
|
|
66
|
+
if act
|
|
67
|
+
else intro or Conversation.intro
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
self.conversation = Conversation(
|
|
71
|
+
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
72
|
+
)
|
|
73
|
+
self.conversation.history_offset = history_offset
|
|
74
|
+
self.session.proxies = proxies
|
|
75
|
+
|
|
76
|
+
if self.logger:
|
|
77
|
+
self.logger.info("Marcus API initialized successfully")
|
|
78
|
+
|
|
79
|
+
def ask(
|
|
80
|
+
self,
|
|
81
|
+
prompt: str,
|
|
82
|
+
stream: bool = False,
|
|
83
|
+
raw: bool = False,
|
|
84
|
+
optimizer: str = None,
|
|
85
|
+
conversationally: bool = False,
|
|
86
|
+
) -> Dict[str, Any] | Generator[str, None, None]:
|
|
87
|
+
"""Sends a prompt to the AskMarcus API and returns the response with logging."""
|
|
88
|
+
if self.logger:
|
|
89
|
+
self.logger.debug(f"Processing request - Prompt: {prompt[:50]}...")
|
|
90
|
+
self.logger.debug(f"Stream: {stream}, Optimizer: {optimizer}")
|
|
91
|
+
|
|
92
|
+
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
93
|
+
if optimizer:
|
|
94
|
+
if optimizer in self.__available_optimizers:
|
|
95
|
+
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
96
|
+
conversation_prompt if conversationally else prompt
|
|
97
|
+
)
|
|
98
|
+
if self.logger:
|
|
99
|
+
self.logger.debug(f"Applied optimizer: {optimizer}")
|
|
100
|
+
else:
|
|
101
|
+
if self.logger:
|
|
102
|
+
self.logger.error(f"Invalid optimizer requested: {optimizer}")
|
|
103
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
104
|
+
f"Optimizer is not one of {self.__available_optimizers}"
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
data = {"message": conversation_prompt}
|
|
108
|
+
|
|
109
|
+
def for_stream():
|
|
110
|
+
try:
|
|
111
|
+
if self.logger:
|
|
112
|
+
self.logger.debug("Initiating streaming request to API")
|
|
113
|
+
|
|
114
|
+
with requests.post(
|
|
115
|
+
self.api_endpoint,
|
|
116
|
+
headers=self.headers,
|
|
117
|
+
json=data,
|
|
118
|
+
stream=True,
|
|
119
|
+
timeout=self.timeout
|
|
120
|
+
) as response:
|
|
121
|
+
response.raise_for_status()
|
|
122
|
+
|
|
123
|
+
if self.logger:
|
|
124
|
+
self.logger.info(f"API connection established successfully. Status: {response.status_code}")
|
|
125
|
+
|
|
126
|
+
for line in response.iter_lines():
|
|
127
|
+
if line:
|
|
128
|
+
yield line.decode('utf-8')
|
|
129
|
+
|
|
130
|
+
self.conversation.update_chat_history(
|
|
131
|
+
prompt, self.get_message(self.last_response)
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
except requests.exceptions.RequestException as e:
|
|
135
|
+
if self.logger:
|
|
136
|
+
self.logger.error(f"API request failed: {str(e)}")
|
|
137
|
+
raise exceptions.ProviderConnectionError(f"Error connecting to Marcus: {str(e)}")
|
|
138
|
+
|
|
139
|
+
def for_non_stream():
|
|
140
|
+
if self.logger:
|
|
141
|
+
self.logger.debug("Processing non-streaming request")
|
|
142
|
+
|
|
143
|
+
full_response = ""
|
|
144
|
+
for line in for_stream():
|
|
145
|
+
full_response += line
|
|
146
|
+
self.last_response = {"text": full_response}
|
|
147
|
+
|
|
148
|
+
if self.logger:
|
|
149
|
+
self.logger.debug("Response processing completed")
|
|
150
|
+
|
|
151
|
+
return self.last_response
|
|
152
|
+
|
|
153
|
+
return for_stream() if stream else for_non_stream()
|
|
154
|
+
|
|
155
|
+
def chat(
|
|
156
|
+
self,
|
|
157
|
+
prompt: str,
|
|
158
|
+
stream: bool = False,
|
|
159
|
+
optimizer: str = None,
|
|
160
|
+
conversationally: bool = False,
|
|
161
|
+
) -> str | Generator[str, None, None]:
|
|
162
|
+
"""Generates a response from the AskMarcus API with logging."""
|
|
163
|
+
if self.logger:
|
|
164
|
+
self.logger.debug(f"Chat request initiated - Prompt: {prompt[:50]}...")
|
|
165
|
+
|
|
166
|
+
def for_stream():
|
|
167
|
+
for response_chunk in self.ask(
|
|
168
|
+
prompt, stream=True, optimizer=optimizer, conversationally=conversationally
|
|
169
|
+
):
|
|
170
|
+
yield response_chunk
|
|
171
|
+
|
|
172
|
+
def for_non_stream():
|
|
173
|
+
response = self.ask(
|
|
174
|
+
prompt, stream=False, optimizer=optimizer, conversationally=conversationally
|
|
175
|
+
)
|
|
176
|
+
return self.get_message(response)
|
|
177
|
+
|
|
178
|
+
return for_stream() if stream else for_non_stream()
|
|
179
|
+
|
|
180
|
+
def get_message(self, response: Dict[str, Any]) -> str:
|
|
181
|
+
"""Extracts the message from the API response."""
|
|
182
|
+
assert isinstance(response, dict), "Response should be of dict data-type only"
|
|
183
|
+
return response.get("text", "")
|
|
184
|
+
|
|
185
|
+
if __name__ == "__main__":
|
|
186
|
+
from rich import print
|
|
187
|
+
# Enable logging for testing
|
|
188
|
+
ai = Marcus(logging=True)
|
|
189
|
+
response = ai.chat(input(">>> "), stream=True)
|
|
190
|
+
for chunk in response:
|
|
191
|
+
print(chunk, end="", flush=True)
|
webscout/Provider/Netwrck.py
CHANGED
|
@@ -2,41 +2,39 @@ import time
|
|
|
2
2
|
import uuid
|
|
3
3
|
import requests
|
|
4
4
|
import json
|
|
5
|
-
|
|
6
5
|
from typing import Any, Dict, Optional, Generator, Union
|
|
7
6
|
from dataclasses import dataclass, asdict
|
|
8
7
|
from datetime import date
|
|
9
|
-
|
|
10
8
|
from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
|
|
11
9
|
from webscout.AIbase import Provider
|
|
12
10
|
from webscout import exceptions
|
|
13
|
-
from webscout.Litlogger import
|
|
11
|
+
from webscout.Litlogger import Logger, LogFormat
|
|
14
12
|
from webscout.litagent import LitAgent
|
|
15
13
|
|
|
16
|
-
|
|
17
14
|
class Netwrck(Provider):
|
|
18
15
|
"""
|
|
19
16
|
A class to interact with the Netwrck.com API. Supports streaming.
|
|
20
17
|
"""
|
|
21
|
-
greeting = """
|
|
18
|
+
greeting = """Hello! I'm a helpful assistant. How can I help you today?"""
|
|
22
19
|
|
|
23
20
|
AVAILABLE_MODELS = {
|
|
24
|
-
"lumimaid": "neversleep/llama-3
|
|
21
|
+
"lumimaid": "neversleep/llama-3-lumimaid-8b:extended",
|
|
25
22
|
"grok": "x-ai/grok-2",
|
|
26
23
|
"claude": "anthropic/claude-3.5-sonnet:beta",
|
|
27
24
|
"euryale": "sao10k/l3-euryale-70b",
|
|
28
25
|
"gpt4mini": "openai/gpt-4o-mini",
|
|
29
26
|
"mythomax": "gryphe/mythomax-l2-13b",
|
|
30
27
|
"gemini": "google/gemini-pro-1.5",
|
|
31
|
-
"lumimaid70b": "neversleep/llama-3.1-lumimaid-70b",
|
|
32
28
|
"nemotron": "nvidia/llama-3.1-nemotron-70b-instruct",
|
|
29
|
+
"deepseek-r1": "deepseek/deepseek-r1",
|
|
30
|
+
"deepseek": "deepseek/deepseek-chat",
|
|
33
31
|
}
|
|
34
32
|
|
|
35
33
|
def __init__(
|
|
36
34
|
self,
|
|
37
35
|
model: str = "claude",
|
|
38
36
|
is_conversation: bool = True,
|
|
39
|
-
max_tokens: int =
|
|
37
|
+
max_tokens: int = 4096,
|
|
40
38
|
timeout: int = 30,
|
|
41
39
|
intro: Optional[str] = None,
|
|
42
40
|
filepath: Optional[str] = None,
|
|
@@ -50,8 +48,18 @@ class Netwrck(Provider):
|
|
|
50
48
|
logging: bool = False
|
|
51
49
|
):
|
|
52
50
|
"""Initializes the Netwrck API client."""
|
|
51
|
+
# Initialize logger first for initialization logging
|
|
52
|
+
self.logger = Logger(
|
|
53
|
+
name="Netwrck",
|
|
54
|
+
format=LogFormat.MODERN_EMOJI,
|
|
55
|
+
|
|
56
|
+
) if logging else None
|
|
57
|
+
|
|
53
58
|
if model not in self.AVAILABLE_MODELS:
|
|
54
|
-
|
|
59
|
+
error_msg = f"Invalid model: {model}. Choose from: {list(self.AVAILABLE_MODELS.keys())}"
|
|
60
|
+
if self.logger:
|
|
61
|
+
self.logger.error(error_msg)
|
|
62
|
+
raise ValueError(error_msg)
|
|
55
63
|
|
|
56
64
|
self.model = model
|
|
57
65
|
self.model_name = self.AVAILABLE_MODELS[model]
|
|
@@ -64,9 +72,7 @@ class Netwrck(Provider):
|
|
|
64
72
|
self.temperature = temperature
|
|
65
73
|
self.top_p = top_p
|
|
66
74
|
|
|
67
|
-
# Initialize LitAgent for user agent generation
|
|
68
75
|
self.agent = LitAgent()
|
|
69
|
-
|
|
70
76
|
self.headers = {
|
|
71
77
|
'authority': 'netwrck.com',
|
|
72
78
|
'accept': '*/*',
|
|
@@ -76,6 +82,7 @@ class Netwrck(Provider):
|
|
|
76
82
|
'referer': 'https://netwrck.com/',
|
|
77
83
|
'user-agent': self.agent.random()
|
|
78
84
|
}
|
|
85
|
+
|
|
79
86
|
self.session.headers.update(self.headers)
|
|
80
87
|
self.proxies = proxies or {}
|
|
81
88
|
|
|
@@ -84,16 +91,16 @@ class Netwrck(Provider):
|
|
|
84
91
|
if act
|
|
85
92
|
else intro or Conversation.intro
|
|
86
93
|
)
|
|
94
|
+
|
|
87
95
|
self.conversation = Conversation(is_conversation, max_tokens, filepath, update_file)
|
|
88
96
|
self.conversation.history_offset = history_offset
|
|
89
97
|
self.__available_optimizers = (
|
|
90
|
-
method
|
|
91
|
-
for method in dir(Optimizers)
|
|
98
|
+
method for method in dir(Optimizers)
|
|
92
99
|
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
93
100
|
)
|
|
94
101
|
|
|
95
|
-
|
|
96
|
-
|
|
102
|
+
if self.logger:
|
|
103
|
+
self.logger.info(f"Initialized Netwrck with model: {self.model_name}")
|
|
97
104
|
|
|
98
105
|
def ask(
|
|
99
106
|
self,
|
|
@@ -104,22 +111,20 @@ class Netwrck(Provider):
|
|
|
104
111
|
conversationally: bool = False,
|
|
105
112
|
) -> Union[Dict[str, Any], Generator]:
|
|
106
113
|
"""Sends a prompt to the Netwrck API and returns the response."""
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
114
|
+
if optimizer and optimizer not in self.__available_optimizers:
|
|
115
|
+
error_msg = f"Optimizer is not one of {self.__available_optimizers}"
|
|
116
|
+
if self.logger:
|
|
117
|
+
self.logger.error(f"Invalid optimizer requested: {optimizer}")
|
|
118
|
+
raise exceptions.FailedToGenerateResponseError(error_msg)
|
|
110
119
|
|
|
111
120
|
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
112
121
|
if optimizer:
|
|
113
|
-
|
|
114
|
-
conversation_prompt
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
self.logger.error(f"Invalid optimizer: {optimizer}")
|
|
120
|
-
raise exceptions.FailedToGenerateResponseError(
|
|
121
|
-
f"Optimizer is not one of {self.__available_optimizers}"
|
|
122
|
-
)
|
|
122
|
+
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
123
|
+
conversation_prompt if conversationally else prompt
|
|
124
|
+
)
|
|
125
|
+
if self.logger:
|
|
126
|
+
self.logger.debug(f"Applied optimizer: {optimizer}")
|
|
127
|
+
|
|
123
128
|
payload = {
|
|
124
129
|
"query": prompt,
|
|
125
130
|
"context": self.system_prompt,
|
|
@@ -128,6 +133,9 @@ class Netwrck(Provider):
|
|
|
128
133
|
"greeting": self.greeting
|
|
129
134
|
}
|
|
130
135
|
|
|
136
|
+
if self.logger:
|
|
137
|
+
self.logger.debug(f"Sending request to Netwrck API [stream={stream}]")
|
|
138
|
+
|
|
131
139
|
def for_stream():
|
|
132
140
|
try:
|
|
133
141
|
response = self.session.post(
|
|
@@ -140,26 +148,23 @@ class Netwrck(Provider):
|
|
|
140
148
|
)
|
|
141
149
|
response.raise_for_status()
|
|
142
150
|
|
|
143
|
-
# Initialize an empty string to accumulate the streaming text
|
|
144
151
|
streaming_text = ""
|
|
145
152
|
for line in response.iter_lines():
|
|
146
153
|
if line:
|
|
147
154
|
decoded_line = line.decode('utf-8').strip('"')
|
|
148
|
-
streaming_text += decoded_line
|
|
149
|
-
yield {"text": decoded_line}
|
|
155
|
+
streaming_text += decoded_line
|
|
156
|
+
yield {"text": decoded_line}
|
|
150
157
|
|
|
151
|
-
# Optionally, you can update the conversation history with the full streaming text
|
|
152
158
|
self.conversation.update_chat_history(payload["query"], streaming_text)
|
|
153
159
|
|
|
154
|
-
except
|
|
160
|
+
except requests.exceptions.RequestException as e:
|
|
155
161
|
if self.logger:
|
|
156
|
-
self.logger.error(f"
|
|
157
|
-
raise exceptions.ProviderConnectionError(f"
|
|
158
|
-
|
|
162
|
+
self.logger.error(f"Network error: {str(e)}")
|
|
163
|
+
raise exceptions.ProviderConnectionError(f"Network error: {str(e)}") from e
|
|
159
164
|
except Exception as e:
|
|
160
165
|
if self.logger:
|
|
161
|
-
self.logger.error(f"
|
|
162
|
-
raise exceptions.ProviderConnectionError(f"
|
|
166
|
+
self.logger.error(f"Unexpected error: {str(e)}")
|
|
167
|
+
raise exceptions.ProviderConnectionError(f"Unexpected error: {str(e)}") from e
|
|
163
168
|
|
|
164
169
|
def for_non_stream():
|
|
165
170
|
try:
|
|
@@ -171,16 +176,24 @@ class Netwrck(Provider):
|
|
|
171
176
|
timeout=self.timeout,
|
|
172
177
|
)
|
|
173
178
|
response.raise_for_status()
|
|
174
|
-
|
|
179
|
+
|
|
180
|
+
if self.logger:
|
|
181
|
+
self.logger.debug(f"Response status: {response.status_code}")
|
|
182
|
+
|
|
175
183
|
text = response.text.strip('"')
|
|
176
184
|
self.last_response = {"text": text}
|
|
177
185
|
self.conversation.update_chat_history(prompt, text)
|
|
178
186
|
|
|
179
187
|
return self.last_response
|
|
188
|
+
|
|
189
|
+
except requests.exceptions.RequestException as e:
|
|
190
|
+
if self.logger:
|
|
191
|
+
self.logger.error(f"Network error: {str(e)}")
|
|
192
|
+
raise exceptions.FailedToGenerateResponseError(f"Network error: {str(e)}") from e
|
|
180
193
|
except Exception as e:
|
|
181
194
|
if self.logger:
|
|
182
|
-
self.logger.error(f"
|
|
183
|
-
raise exceptions.
|
|
195
|
+
self.logger.error(f"Unexpected error: {str(e)}")
|
|
196
|
+
raise exceptions.FailedToGenerateResponseError(f"Unexpected error: {str(e)}") from e
|
|
184
197
|
|
|
185
198
|
return for_stream() if stream else for_non_stream()
|
|
186
199
|
|
|
@@ -193,7 +206,7 @@ class Netwrck(Provider):
|
|
|
193
206
|
) -> str:
|
|
194
207
|
"""Generates a response from the Netwrck API."""
|
|
195
208
|
if self.logger:
|
|
196
|
-
|
|
209
|
+
self.logger.debug(f"Processing chat request [stream={stream}]")
|
|
197
210
|
|
|
198
211
|
def for_stream():
|
|
199
212
|
for response in self.ask(
|
|
@@ -219,21 +232,20 @@ class Netwrck(Provider):
|
|
|
219
232
|
def get_message(self, response: Dict[str, Any]) -> str:
|
|
220
233
|
"""Retrieves message only from response"""
|
|
221
234
|
assert isinstance(response, dict), "Response should be of dict data-type only"
|
|
222
|
-
return response["text"]
|
|
235
|
+
return response["text"].replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
223
236
|
|
|
224
|
-
# Example Usage:
|
|
225
237
|
if __name__ == "__main__":
|
|
226
238
|
from rich import print
|
|
227
239
|
|
|
228
|
-
#
|
|
240
|
+
# Example with logging enabled
|
|
241
|
+
netwrck = Netwrck(model="claude", logging=False)
|
|
242
|
+
|
|
229
243
|
print("Non-Streaming Response:")
|
|
230
|
-
|
|
231
|
-
response = netwrck.chat("tell me about Russia")
|
|
244
|
+
response = netwrck.chat("Tell me about Russia")
|
|
232
245
|
print(response)
|
|
233
246
|
|
|
234
|
-
# Streaming example
|
|
235
247
|
print("\nStreaming Response:")
|
|
236
|
-
response = netwrck.chat("
|
|
248
|
+
response = netwrck.chat("Tell me about India", stream=True)
|
|
237
249
|
for chunk in response:
|
|
238
250
|
print(chunk, end="", flush=True)
|
|
239
|
-
print()
|
|
251
|
+
print()
|