webscout 1.1.8__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

webscout/AIbase.py CHANGED
@@ -1,70 +1,70 @@
1
- from abc import ABC
2
- from abc import abstractmethod
3
-
4
-
5
- class Provider(ABC):
6
- """Base class for models class"""
7
-
8
- @abstractmethod
9
- def ask(
10
- self,
11
- prompt: str,
12
- stream: bool = False,
13
- raw: bool = False,
14
- optimizer: str = None,
15
- conversationally: bool = False,
16
- ) -> dict:
17
- """Chat with AI
18
-
19
- Args:
20
- prompt (str): Prompt to be sent
21
- stream (bool, optional): Flag for streaming response. Defaults to False.
22
- raw (bool, optional): Stream back raw response as received
23
- optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
24
- conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
25
- Returns:
26
- dict : {}
27
- ```json
28
- {
29
- "completion": "\nNext: domestic cat breeds with short hair >>",
30
- "stop_reason": null,
31
- "truncated": false,
32
- "stop": null,
33
- "model": "llama-2-13b-chat",
34
- "log_id": "cmpl-3kYiYxSNDvgMShSzFooz6t",
35
- "exception": null
36
- }
37
- ```
38
- """
39
- raise NotImplementedError("Method needs to be implemented in subclass")
40
-
41
- @abstractmethod
42
- def chat(
43
- self,
44
- prompt: str,
45
- stream: bool = False,
46
- optimizer: str = None,
47
- conversationally: bool = False,
48
- ) -> str:
49
- """Generate response `str`
50
- Args:
51
- prompt (str): Prompt to be sent
52
- stream (bool, optional): Flag for streaming response. Defaults to False.
53
- optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
54
- conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
55
- Returns:
56
- str: Response generated
57
- """
58
- raise NotImplementedError("Method needs to be implemented in subclass")
59
-
60
- @abstractmethod
61
- def get_message(self, response: dict) -> str:
62
- """Retrieves message only from response
63
-
64
- Args:
65
- response (dict): Response generated by `self.ask`
66
-
67
- Returns:
68
- str: Message extracted
69
- """
1
+ from abc import ABC
2
+ from abc import abstractmethod
3
+
4
+
5
+ class Provider(ABC):
6
+ """Base class for models class"""
7
+
8
+ @abstractmethod
9
+ def ask(
10
+ self,
11
+ prompt: str,
12
+ stream: bool = False,
13
+ raw: bool = False,
14
+ optimizer: str = None,
15
+ conversationally: bool = False,
16
+ ) -> dict:
17
+ """Chat with AI
18
+
19
+ Args:
20
+ prompt (str): Prompt to be sent
21
+ stream (bool, optional): Flag for streaming response. Defaults to False.
22
+ raw (bool, optional): Stream back raw response as received
23
+ optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
24
+ conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
25
+ Returns:
26
+ dict : {}
27
+ ```json
28
+ {
29
+ "completion": "\nNext: domestic cat breeds with short hair >>",
30
+ "stop_reason": null,
31
+ "truncated": false,
32
+ "stop": null,
33
+ "model": "llama-2-13b-chat",
34
+ "log_id": "cmpl-3kYiYxSNDvgMShSzFooz6t",
35
+ "exception": null
36
+ }
37
+ ```
38
+ """
39
+ raise NotImplementedError("Method needs to be implemented in subclass")
40
+
41
+ @abstractmethod
42
+ def chat(
43
+ self,
44
+ prompt: str,
45
+ stream: bool = False,
46
+ optimizer: str = None,
47
+ conversationally: bool = False,
48
+ ) -> str:
49
+ """Generate response `str`
50
+ Args:
51
+ prompt (str): Prompt to be sent
52
+ stream (bool, optional): Flag for streaming response. Defaults to False.
53
+ optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
54
+ conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
55
+ Returns:
56
+ str: Response generated
57
+ """
58
+ raise NotImplementedError("Method needs to be implemented in subclass")
59
+
60
+ @abstractmethod
61
+ def get_message(self, response: dict) -> str:
62
+ """Retrieves message only from response
63
+
64
+ Args:
65
+ response (dict): Response generated by `self.ask`
66
+
67
+ Returns:
68
+ str: Message extracted
69
+ """
70
70
  raise NotImplementedError("Method needs to be implemented in subclass")