webscout 6.6__py3-none-any.whl → 6.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

webscout/Extra/weather.py CHANGED
@@ -16,7 +16,7 @@ from rich.columns import Columns
16
16
  # Initialize Rich console with force terminal
17
17
  console = Console(force_terminal=True)
18
18
 
19
- def get_weather_emoji(condition: str) -> str:
19
+ def get_emoji(condition: str) -> str:
20
20
  """Get appropriate emoji for weather condition"""
21
21
  conditions = {
22
22
  'sunny': '*', 'clear': '*',
@@ -61,7 +61,7 @@ def create_current_weather_panel(data):
61
61
  location_name = f"{location['areaName'][0]['value']}, {location['country'][0]['value']}"
62
62
 
63
63
  weather_desc = current['weatherDesc'][0]['value']
64
- symbol = get_weather_emoji(weather_desc)
64
+ symbol = get_emoji(weather_desc)
65
65
 
66
66
  # Create weather info table
67
67
  table = Table(show_header=False, box=box.ROUNDED, expand=True)
@@ -98,7 +98,7 @@ def create_forecast_panel(data):
98
98
  # Get mid-day conditions (noon)
99
99
  noon = day['hourly'][4]
100
100
  condition = noon['weatherDesc'][0]['value']
101
- symbol = get_weather_emoji(condition)
101
+ symbol = get_emoji(condition)
102
102
  temp_range = f"{day['mintempC']}° - {day['maxtempC']}°"
103
103
  rain_chance = f"v {noon['chanceofrain']}%"
104
104
  wind = f"> {noon['windspeedKmph']} km/h"
@@ -113,7 +113,7 @@ def create_forecast_panel(data):
113
113
 
114
114
  return Panel(table, title="[bold]3-Day Forecast[/]", border_style="blue")
115
115
 
116
- def get_weather(location: str):
116
+ def get(location: str):
117
117
  """Get weather data with progress indicator"""
118
118
  with Progress(
119
119
  SpinnerColumn(),
@@ -159,7 +159,7 @@ def main():
159
159
  console.print("\n[bold cyan]* Weather Information[/]\n")
160
160
  location = console.input("[cyan]Enter location: [/]")
161
161
 
162
- weather_data = get_weather(location)
162
+ weather_data = get(location)
163
163
  if weather_data:
164
164
  display_weather(weather_data)
165
165
 
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  from uuid import uuid4
3
+ import webscout
3
4
  from webscout.AIutel import Optimizers
4
5
  from webscout.AIutel import Conversation
5
6
  from webscout.AIutel import AwesomePrompts, sanitize_stream
@@ -107,7 +108,7 @@ class Cloudflare(Provider):
107
108
  'Sec-Fetch-Dest': 'empty',
108
109
  'Sec-Fetch-Mode': 'cors',
109
110
  'Sec-Fetch-Site': 'same-origin',
110
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 Edg/127.0.0.0',
111
+ 'User-Agent': webscout.LitAgent().random()
111
112
  }
112
113
 
113
114
  self.cookies = {
@@ -4,7 +4,7 @@ from webscout.AIutel import Optimizers
4
4
  from webscout.AIutel import Conversation
5
5
  from webscout.AIutel import AwesomePrompts, sanitize_stream
6
6
  from webscout.AIbase import Provider
7
- from webscout import exceptions
7
+ from webscout import exceptions, LitAgent
8
8
  import requests
9
9
 
10
10
  class DARKAI(Provider):
@@ -75,7 +75,7 @@ class DARKAI(Provider):
75
75
  "sec-fetch-dest": "empty",
76
76
  "sec-fetch-mode": "cors",
77
77
  "sec-fetch-site": "cross-site",
78
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 Edg/127.0.0.0"
78
+ "user-agent": LitAgent().random(),
79
79
  }
80
80
 
81
81
  self.__available_optimizers = (
@@ -9,7 +9,7 @@ from webscout.AIutel import Conversation
9
9
  from webscout.AIutel import AwesomePrompts
10
10
  from webscout.AIbase import Provider
11
11
  from webscout import exceptions
12
-
12
+ from webscout import LitAgent
13
13
 
14
14
  class Free2GPT(Provider):
15
15
  """
@@ -48,7 +48,7 @@ class Free2GPT(Provider):
48
48
  self.session = requests.Session()
49
49
  self.is_conversation = is_conversation
50
50
  self.max_tokens_to_sample = max_tokens
51
- self.api_endpoint = "https://chat10.free2gpt.xyz/api/generate"
51
+ self.api_endpoint = "https://chat1.free2gpt.com/api/generate"
52
52
  self.stream_chunk_size = 64
53
53
  self.timeout = timeout
54
54
  self.last_response = {}
@@ -59,15 +59,15 @@ class Free2GPT(Provider):
59
59
  "accept-language": "en-US,en;q=0.9,en-IN;q=0.8",
60
60
  "content-type": "text/plain;charset=UTF-8",
61
61
  "dnt": "1",
62
- "origin": "https://chat10.free2gpt.xyz",
63
- "referer": "https://chat10.free2gpt.xyz/",
62
+ "origin": "https://chat1.free2gpt.co",
63
+ "referer": "https://chat1.free2gpt.co",
64
64
  "sec-ch-ua": '"Chromium";v="128", "Not;A=Brand";v="24", "Microsoft Edge";v="128"',
65
65
  "sec-ch-ua-mobile": "?0",
66
66
  "sec-ch-ua-platform": '"Windows"',
67
67
  "sec-fetch-dest": "empty",
68
68
  "sec-fetch-mode": "cors",
69
69
  "sec-fetch-site": "same-origin",
70
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0"
70
+ "user-agent": LitAgent().random(),
71
71
  }
72
72
 
73
73
  self.__available_optimizers = (
@@ -7,7 +7,7 @@ from webscout.AIutel import Conversation
7
7
  from webscout.AIutel import AwesomePrompts
8
8
  from webscout.AIbase import Provider
9
9
  from webscout import exceptions
10
-
10
+ from webscout import LitAgent as Lit
11
11
 
12
12
  class Marcus(Provider):
13
13
  """
@@ -39,7 +39,7 @@ class Marcus(Provider):
39
39
  'accept': '*/*',
40
40
  'origin': 'https://www.askmarcus.app',
41
41
  'referer': 'https://www.askmarcus.app/chat',
42
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36'
42
+ 'user-agent': Lit().random(),
43
43
  }
44
44
  self.__available_optimizers = (
45
45
  method
@@ -134,4 +134,4 @@ if __name__ == '__main__':
134
134
  ai = Marcus(timeout=30)
135
135
  response = ai.chat("Tell me about India", stream=True)
136
136
  for chunk in response:
137
- print(chunk)
137
+ print(chunk, end="", flush=True)
@@ -1,235 +1,239 @@
1
- import time
2
- import uuid
3
- import requests
4
- import json
5
-
6
- from typing import Any, Dict, Optional, Generator, Union
7
- from dataclasses import dataclass, asdict
8
- from datetime import date
9
-
10
- from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
11
- from webscout.AIbase import Provider
12
- from webscout import exceptions
13
- from webscout.Litlogger import LitLogger, LogFormat, ColorScheme
14
- from webscout.litagent import LitAgent
15
-
16
-
17
- class Netwrck(Provider):
18
- """
19
- A class to interact with the Netwrck.com API. Supports streaming.
20
- """
21
-
22
- AVAILABLE_MODELS = {
23
- "lumimaid": "neversleep/llama-3.1-lumimaid-8b",
24
- "grok": "x-ai/grok-2",
25
- "claude": "anthropic/claude-3.5-sonnet:beta",
26
- "euryale": "sao10k/l3-euryale-70b",
27
- "gpt4mini": "openai/gpt-4o-mini",
28
- "mythomax": "gryphe/mythomax-l2-13b",
29
- "gemini": "google/gemini-pro-1.5",
30
- "lumimaid70b": "neversleep/llama-3.1-lumimaid-70b",
31
- "nemotron": "nvidia/llama-3.1-nemotron-70b-instruct",
32
- }
33
-
34
- def __init__(
35
- self,
36
- model: str = "lumimaid",
37
- is_conversation: bool = True,
38
- max_tokens: int = 2048,
39
- timeout: int = 30,
40
- intro: Optional[str] = None,
41
- filepath: Optional[str] = None,
42
- update_file: bool = False,
43
- proxies: Optional[dict] = None,
44
- history_offset: int = 0,
45
- act: Optional[str] = None,
46
- system_prompt: str = "You are a helpful assistant.",
47
- temperature: float = 0.7,
48
- top_p: float = 0.8,
49
- logging: bool = False
50
- ):
51
- """Initializes the Netwrck API client."""
52
- if model not in self.AVAILABLE_MODELS:
53
- raise ValueError(f"Invalid model: {model}. Choose from: {list(self.AVAILABLE_MODELS.keys())}")
54
-
55
- self.model = model
56
- self.model_name = self.AVAILABLE_MODELS[model]
57
- self.system_prompt = system_prompt
58
- self.session = requests.Session()
59
- self.is_conversation = is_conversation
60
- self.max_tokens_to_sample = max_tokens
61
- self.timeout = timeout
62
- self.last_response: Dict[str, Any] = {}
63
- self.temperature = temperature
64
- self.top_p = top_p
65
-
66
- # Initialize LitAgent for user agent generation
67
- self.agent = LitAgent()
68
-
69
- self.headers = {
70
- 'authority': 'netwrck.com',
71
- 'accept': '*/*',
72
- 'accept-language': 'en-US,en;q=0.9',
73
- 'content-type': 'application/json',
74
- 'origin': 'https://netwrck.com',
75
- 'referer': 'https://netwrck.com/',
76
- 'user-agent': self.agent.random()
77
- }
78
- self.session.headers.update(self.headers)
79
- self.proxies = proxies or {}
80
-
81
- Conversation.intro = (
82
- AwesomePrompts().get_act(act, raise_not_found=True, default=None, case_insensitive=True)
83
- if act
84
- else intro or Conversation.intro
85
- )
86
- self.conversation = Conversation(is_conversation, max_tokens, filepath, update_file)
87
- self.conversation.history_offset = history_offset
88
- self.__available_optimizers = (
89
- method
90
- for method in dir(Optimizers)
91
- if callable(getattr(Optimizers, method)) and not method.startswith("__")
92
- )
93
-
94
- # Initialize logger
95
- self.logger = LitLogger(name="Netwrck", format=LogFormat.MODERN_EMOJI, color_scheme=ColorScheme.CYBERPUNK) if logging else None
96
-
97
-
98
- def ask(
99
- self,
100
- prompt: str,
101
- stream: bool = False,
102
- raw: bool = False,
103
- optimizer: Optional[str] = None,
104
- conversationally: bool = False,
105
- ) -> Union[Dict[str, Any], Generator]:
106
- """Sends a prompt to the Netwrck API and returns the response."""
107
-
108
- if self.logger:
109
- self.logger.debug(f"ask() called with prompt: {prompt}")
110
-
111
- conversation_prompt = self.conversation.gen_complete_prompt(prompt)
112
- if optimizer:
113
- if optimizer in self.__available_optimizers:
114
- conversation_prompt = getattr(Optimizers, optimizer)(
115
- conversation_prompt if conversationally else prompt
116
- )
117
- else:
118
- if self.logger:
119
- self.logger.error(f"Invalid optimizer: {optimizer}")
120
- raise exceptions.FailedToGenerateResponseError(
121
- f"Optimizer is not one of {self.__available_optimizers}"
122
- )
123
-
124
- payload = {
125
- "query": conversation_prompt,
126
- "context": self.system_prompt,
127
- "examples": [],
128
- "model_name": self.model_name,
129
- "temperature": self.temperature,
130
- "top_p": self.top_p,
131
- }
132
-
133
-
134
- def for_stream():
135
- try:
136
- response = self.session.post(
137
- "https://netwrck.com/api/chatpred_or",
138
- json=payload,
139
- headers=self.headers,
140
- proxies=self.proxies,
141
- timeout=self.timeout,
142
- stream=True,
143
- )
144
- response.raise_for_status()
145
-
146
- streaming_text = ""
147
- for line in response.iter_lines():
148
- if line:
149
- decoded_line = line.decode('utf-8').strip('"')
150
- streaming_text += decoded_line
151
- yield {"text": decoded_line} if not raw else decoded_line
152
-
153
- self.conversation.update_chat_history(prompt, streaming_text)
154
-
155
- except Exception as e:
156
- if self.logger:
157
- self.logger.error(f"Error communicating with Netwrck: {e}")
158
- raise exceptions.ProviderConnectionError(f"Error communicating with Netwrck: {e}") from e
159
-
160
- def for_non_stream():
161
- try:
162
- response = self.session.post(
163
- "https://netwrck.com/api/chatpred_or",
164
- json=payload,
165
- headers=self.headers,
166
- proxies=self.proxies,
167
- timeout=self.timeout,
168
- )
169
- response.raise_for_status()
170
- print(response.text)
171
- text = response.text.strip('"')
172
- self.last_response = {"text": text}
173
- self.conversation.update_chat_history(prompt, text)
174
-
175
- return self.last_response
176
- except Exception as e:
177
- if self.logger:
178
- self.logger.error(f"Error communicating with Netwrck: {e}")
179
- raise exceptions.ProviderConnectionError(f"Error communicating with Netwrck: {e}") from e
180
-
181
- return for_stream() if stream else for_non_stream()
182
-
183
- def chat(
184
- self,
185
- prompt: str,
186
- stream: bool = False,
187
- optimizer: Optional[str] = None,
188
- conversationally: bool = False,
189
- ) -> str:
190
- """Generates a response from the Netwrck API."""
191
- if self.logger:
192
- self.logger.debug(f"chat() called with prompt: {prompt}")
193
-
194
- def for_stream():
195
- for response in self.ask(
196
- prompt,
197
- stream=True,
198
- optimizer=optimizer,
199
- conversationally=conversationally
200
- ):
201
- yield self.get_message(response)
202
-
203
- def for_non_stream():
204
- return self.get_message(
205
- self.ask(
206
- prompt,
207
- stream=False,
208
- optimizer=optimizer,
209
- conversationally=conversationally,
210
- )
211
- )
212
-
213
- return for_stream() if stream else for_non_stream()
214
-
215
- def get_message(self, response: Dict[str, Any]) -> str:
216
- """Retrieves message only from response"""
217
- assert isinstance(response, dict), "Response should be of dict data-type only"
218
- return response["text"]
219
-
220
- # Example Usage:
221
- if __name__ == "__main__":
222
- from rich import print
223
-
224
- # Non-streaming example
225
- print("Non-Streaming Response:")
226
- netwrck = Netwrck(model="lumimaid", logging=True)
227
- response = netwrck.chat("What is the capital of France?")
228
- print(response)
229
-
230
- # Streaming example
231
- print("\nStreaming Response:")
232
- response = netwrck.chat("tell me about india", stream=True)
233
- for chunk in response:
234
- print(chunk, end="", flush=True)
1
+ import time
2
+ import uuid
3
+ import requests
4
+ import json
5
+
6
+ from typing import Any, Dict, Optional, Generator, Union
7
+ from dataclasses import dataclass, asdict
8
+ from datetime import date
9
+
10
+ from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
11
+ from webscout.AIbase import Provider
12
+ from webscout import exceptions
13
+ from webscout.Litlogger import LitLogger, LogFormat, ColorScheme
14
+ from webscout.litagent import LitAgent
15
+
16
+
17
+ class Netwrck(Provider):
18
+ """
19
+ A class to interact with the Netwrck.com API. Supports streaming.
20
+ """
21
+ greeting = """An unknown multiverse phenomenon occurred, and you found yourself in a dark space. You looked around and found a source of light in a distance. You approached the light and *whoosh*....\nChoose your origin:\na) As a baby who just got birthed, your fate unknown\nb) As an amnesic stranded on an uninhabited island with mysterious ruins\nc) As an abandoned product of a forbidden experiment\nd) As a slave being sold at an auction\ne) Extremely Chaotic Randomizer\nOr, dive into your own fantasy."""
22
+
23
+ AVAILABLE_MODELS = {
24
+ "lumimaid": "neversleep/llama-3.1-lumimaid-8b",
25
+ "grok": "x-ai/grok-2",
26
+ "claude": "anthropic/claude-3.5-sonnet:beta",
27
+ "euryale": "sao10k/l3-euryale-70b",
28
+ "gpt4mini": "openai/gpt-4o-mini",
29
+ "mythomax": "gryphe/mythomax-l2-13b",
30
+ "gemini": "google/gemini-pro-1.5",
31
+ "lumimaid70b": "neversleep/llama-3.1-lumimaid-70b",
32
+ "nemotron": "nvidia/llama-3.1-nemotron-70b-instruct",
33
+ }
34
+
35
+ def __init__(
36
+ self,
37
+ model: str = "claude",
38
+ is_conversation: bool = True,
39
+ max_tokens: int = 2048,
40
+ timeout: int = 30,
41
+ intro: Optional[str] = None,
42
+ filepath: Optional[str] = None,
43
+ update_file: bool = False,
44
+ proxies: Optional[dict] = None,
45
+ history_offset: int = 0,
46
+ act: Optional[str] = None,
47
+ system_prompt: str = "You are a helpful assistant.",
48
+ temperature: float = 0.7,
49
+ top_p: float = 0.8,
50
+ logging: bool = False
51
+ ):
52
+ """Initializes the Netwrck API client."""
53
+ if model not in self.AVAILABLE_MODELS:
54
+ raise ValueError(f"Invalid model: {model}. Choose from: {list(self.AVAILABLE_MODELS.keys())}")
55
+
56
+ self.model = model
57
+ self.model_name = self.AVAILABLE_MODELS[model]
58
+ self.system_prompt = system_prompt
59
+ self.session = requests.Session()
60
+ self.is_conversation = is_conversation
61
+ self.max_tokens_to_sample = max_tokens
62
+ self.timeout = timeout
63
+ self.last_response: Dict[str, Any] = {}
64
+ self.temperature = temperature
65
+ self.top_p = top_p
66
+
67
+ # Initialize LitAgent for user agent generation
68
+ self.agent = LitAgent()
69
+
70
+ self.headers = {
71
+ 'authority': 'netwrck.com',
72
+ 'accept': '*/*',
73
+ 'accept-language': 'en-US,en;q=0.9',
74
+ 'content-type': 'application/json',
75
+ 'origin': 'https://netwrck.com',
76
+ 'referer': 'https://netwrck.com/',
77
+ 'user-agent': self.agent.random()
78
+ }
79
+ self.session.headers.update(self.headers)
80
+ self.proxies = proxies or {}
81
+
82
+ Conversation.intro = (
83
+ AwesomePrompts().get_act(act, raise_not_found=True, default=None, case_insensitive=True)
84
+ if act
85
+ else intro or Conversation.intro
86
+ )
87
+ self.conversation = Conversation(is_conversation, max_tokens, filepath, update_file)
88
+ self.conversation.history_offset = history_offset
89
+ self.__available_optimizers = (
90
+ method
91
+ for method in dir(Optimizers)
92
+ if callable(getattr(Optimizers, method)) and not method.startswith("__")
93
+ )
94
+
95
+ # Initialize logger
96
+ self.logger = LitLogger(name="Netwrck", format=LogFormat.MODERN_EMOJI, color_scheme=ColorScheme.CYBERPUNK) if logging else None
97
+
98
+ def ask(
99
+ self,
100
+ prompt: str,
101
+ stream: bool = False,
102
+ raw: bool = False,
103
+ optimizer: Optional[str] = None,
104
+ conversationally: bool = False,
105
+ ) -> Union[Dict[str, Any], Generator]:
106
+ """Sends a prompt to the Netwrck API and returns the response."""
107
+
108
+ if self.logger:
109
+ self.logger.debug(f"ask() called with prompt: {prompt}")
110
+
111
+ conversation_prompt = self.conversation.gen_complete_prompt(prompt)
112
+ if optimizer:
113
+ if optimizer in self.__available_optimizers:
114
+ conversation_prompt = getattr(Optimizers, optimizer)(
115
+ conversation_prompt if conversationally else prompt
116
+ )
117
+ else:
118
+ if self.logger:
119
+ self.logger.error(f"Invalid optimizer: {optimizer}")
120
+ raise exceptions.FailedToGenerateResponseError(
121
+ f"Optimizer is not one of {self.__available_optimizers}"
122
+ )
123
+ payload = {
124
+ "query": prompt,
125
+ "context": self.system_prompt,
126
+ "examples": [],
127
+ "model_name": self.model_name,
128
+ "greeting": self.greeting
129
+ }
130
+
131
+ def for_stream():
132
+ try:
133
+ response = self.session.post(
134
+ "https://netwrck.com/api/chatpred_or",
135
+ json=payload,
136
+ headers=self.headers,
137
+ proxies=self.proxies,
138
+ timeout=self.timeout,
139
+ stream=True,
140
+ )
141
+ response.raise_for_status()
142
+
143
+ # Initialize an empty string to accumulate the streaming text
144
+ streaming_text = ""
145
+ for line in response.iter_lines():
146
+ if line:
147
+ decoded_line = line.decode('utf-8').strip('"')
148
+ streaming_text += decoded_line # Accumulate the text
149
+ yield {"text": decoded_line} # Yield each chunk
150
+
151
+ # Optionally, you can update the conversation history with the full streaming text
152
+ self.conversation.update_chat_history(payload["query"], streaming_text)
153
+
154
+ except Exception as e:
155
+ if self.logger:
156
+ self.logger.error(f"Error communicating with Netwrck: {e}")
157
+ raise exceptions.ProviderConnectionError(f"Error communicating with Netwrck: {e}") from e
158
+
159
+ except Exception as e:
160
+ if self.logger:
161
+ self.logger.error(f"Error communicating with Netwrck: {e}")
162
+ raise exceptions.ProviderConnectionError(f"Error communicating with Netwrck: {e}") from e
163
+
164
+ def for_non_stream():
165
+ try:
166
+ response = self.session.post(
167
+ "https://netwrck.com/api/chatpred_or",
168
+ json=payload,
169
+ headers=self.headers,
170
+ proxies=self.proxies,
171
+ timeout=self.timeout,
172
+ )
173
+ response.raise_for_status()
174
+ # print(response.text)
175
+ text = response.text.strip('"')
176
+ self.last_response = {"text": text}
177
+ self.conversation.update_chat_history(prompt, text)
178
+
179
+ return self.last_response
180
+ except Exception as e:
181
+ if self.logger:
182
+ self.logger.error(f"Error communicating with Netwrck: {e}")
183
+ raise exceptions.ProviderConnectionError(f"Error communicating with Netwrck: {e}") from e
184
+
185
+ return for_stream() if stream else for_non_stream()
186
+
187
+ def chat(
188
+ self,
189
+ prompt: str,
190
+ stream: bool = False,
191
+ optimizer: Optional[str] = None,
192
+ conversationally: bool = False,
193
+ ) -> str:
194
+ """Generates a response from the Netwrck API."""
195
+ if self.logger:
196
+ self.logger.debug(f"chat() called with prompt: {prompt}")
197
+
198
+ def for_stream():
199
+ for response in self.ask(
200
+ prompt,
201
+ stream=True,
202
+ optimizer=optimizer,
203
+ conversationally=conversationally
204
+ ):
205
+ yield self.get_message(response)
206
+
207
+ def for_non_stream():
208
+ return self.get_message(
209
+ self.ask(
210
+ prompt,
211
+ stream=False,
212
+ optimizer=optimizer,
213
+ conversationally=conversationally,
214
+ )
215
+ )
216
+
217
+ return for_stream() if stream else for_non_stream()
218
+
219
+ def get_message(self, response: Dict[str, Any]) -> str:
220
+ """Retrieves message only from response"""
221
+ assert isinstance(response, dict), "Response should be of dict data-type only"
222
+ return response["text"]
223
+
224
+ # Example Usage:
225
+ if __name__ == "__main__":
226
+ from rich import print
227
+
228
+ # Non-streaming example
229
+ print("Non-Streaming Response:")
230
+ netwrck = Netwrck(model="claude", logging=True)
231
+ response = netwrck.chat("tell me about Russia")
232
+ print(response)
233
+
234
+ # Streaming example
235
+ print("\nStreaming Response:")
236
+ response = netwrck.chat("tell me about India", stream=True)
237
+ for chunk in response:
238
+ print(chunk, end="", flush=True)
235
239
  print() # Add a newline at the end