igbot-base 0.0.18__tar.gz → 0.0.20__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {igbot_base-0.0.18 → igbot_base-0.0.20}/PKG-INFO +2 -1
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/agent.py +4 -3
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/agent_response.py +0 -1
- igbot_base-0.0.20/igbot_base/models.py +36 -0
- igbot_base-0.0.20/igbot_base/tokenizer.py +16 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base.egg-info/PKG-INFO +2 -1
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base.egg-info/SOURCES.txt +1 -0
- igbot_base-0.0.20/igbot_base.egg-info/requires.txt +2 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/pyproject.toml +3 -2
- igbot_base-0.0.18/igbot_base/models.py +0 -22
- igbot_base-0.0.18/igbot_base.egg-info/requires.txt +0 -1
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/__init__.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/base_exception.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/exception_handler.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/llm.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/llmmemory.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/logging_adapter.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/persistable_memory.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/prompt_template.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/response_formats.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/retriever.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/tool.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base/vectorstore.py +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base.egg-info/dependency_links.txt +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/igbot_base.egg-info/top_level.txt +0 -0
- {igbot_base-0.0.18 → igbot_base-0.0.20}/setup.cfg +0 -0
@@ -1,8 +1,9 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: igbot_base
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.20
|
4
4
|
Summary: Base classes for igbot
|
5
5
|
Author-email: Igor Kopeć <igor.kopec95@gmail.com>
|
6
6
|
License: LGPL-3.0-or-later
|
7
7
|
Requires-Python: >=3.12
|
8
8
|
Requires-Dist: openai
|
9
|
+
Requires-Dist: tiktoken
|
@@ -4,6 +4,7 @@ from igbot_base.agent_response import AgentResponse
|
|
4
4
|
|
5
5
|
from igbot_base.exception_handler import ExceptionHandler, ReturnFailedResponseGracefully
|
6
6
|
from igbot_base.logging_adapter import get_logger
|
7
|
+
from llmmemory import LlmMemory
|
7
8
|
|
8
9
|
logger = get_logger("application")
|
9
10
|
|
@@ -14,15 +15,15 @@ class Agent(ABC):
|
|
14
15
|
self.__name = name
|
15
16
|
self.__ex_handler = exception_handler
|
16
17
|
|
17
|
-
def invoke(self, query) -> AgentResponse:
|
18
|
+
def invoke(self, query, memory: LlmMemory) -> AgentResponse:
|
18
19
|
try:
|
19
|
-
return self._invoke(query)
|
20
|
+
return self._invoke(query, memory)
|
20
21
|
except Exception as e:
|
21
22
|
logger.exception("Exception occurred at %s for query %s: %s", self.describe(), query, e)
|
22
23
|
return self.__ex_handler.handle(e)
|
23
24
|
|
24
25
|
@abstractmethod
|
25
|
-
def _invoke(self, query) -> AgentResponse:
|
26
|
+
def _invoke(self, query, memory: LlmMemory) -> AgentResponse:
|
26
27
|
pass
|
27
28
|
|
28
29
|
@abstractmethod
|
@@ -0,0 +1,36 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from openai import OpenAI
|
3
|
+
|
4
|
+
from tokenizer import BaseTokenizer, OpenAiTokenizer
|
5
|
+
|
6
|
+
|
7
|
+
class ModelInfo:
|
8
|
+
|
9
|
+
def __init__(self, name, client, tokenizer: BaseTokenizer, max_tokens):
|
10
|
+
self.__name = name
|
11
|
+
self.__client = client
|
12
|
+
self.__tokenizer = tokenizer
|
13
|
+
self.__max_tokens = max_tokens
|
14
|
+
|
15
|
+
def get_name(self):
|
16
|
+
return self.__name
|
17
|
+
|
18
|
+
def get_client(self):
|
19
|
+
return self.__client()
|
20
|
+
|
21
|
+
def get_tokenizer(self) -> BaseTokenizer:
|
22
|
+
return self.__tokenizer
|
23
|
+
|
24
|
+
def get_max_tokens(self):
|
25
|
+
return self.__max_tokens
|
26
|
+
|
27
|
+
|
28
|
+
class Model(Enum):
|
29
|
+
OLLAMA_3_2_LOCAL = ModelInfo("llama3.2", lambda: OpenAI(base_url="http://localhost:11434/v1", api_key='ollama'),
|
30
|
+
OpenAiTokenizer("gpt-4o"), 128_000)
|
31
|
+
OPENAI_GPT_4o_MINI = ModelInfo("gpt-4o-mini", lambda: OpenAI(),
|
32
|
+
OpenAiTokenizer("gpt-4o"), 128_000)
|
33
|
+
OPENAI_GPT_4o = ModelInfo("gpt-4o", lambda: OpenAI(),
|
34
|
+
OpenAiTokenizer("gpt-4o"), 128_000)
|
35
|
+
OPENAI_GPT_4o_MINI_JSON = ModelInfo("gpt-4o-mini-2024-07-18", lambda: OpenAI(),
|
36
|
+
OpenAiTokenizer("gpt-4o"), 128_000)
|
@@ -0,0 +1,16 @@
|
|
1
|
+
from abc import ABC, abstractmethod
|
2
|
+
import tiktoken
|
3
|
+
|
4
|
+
|
5
|
+
class BaseTokenizer(ABC):
|
6
|
+
@abstractmethod
|
7
|
+
def count_tokens(self, text: str):
|
8
|
+
pass
|
9
|
+
|
10
|
+
|
11
|
+
class OpenAiTokenizer(BaseTokenizer):
|
12
|
+
def __init__(self, model_name):
|
13
|
+
self.__tokenizer = tiktoken.encoding_for_model(model_name)
|
14
|
+
|
15
|
+
def count_tokens(self, text: str):
|
16
|
+
return len(self.__tokenizer.encode(text))
|
@@ -1,8 +1,9 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: igbot_base
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.20
|
4
4
|
Summary: Base classes for igbot
|
5
5
|
Author-email: Igor Kopeć <igor.kopec95@gmail.com>
|
6
6
|
License: LGPL-3.0-or-later
|
7
7
|
Requires-Python: >=3.12
|
8
8
|
Requires-Dist: openai
|
9
|
+
Requires-Dist: tiktoken
|
@@ -4,13 +4,14 @@ build-backend = "setuptools.build_meta"
|
|
4
4
|
|
5
5
|
[project]
|
6
6
|
name = "igbot_base"
|
7
|
-
version = "0.0.
|
7
|
+
version = "0.0.20"
|
8
8
|
description = "Base classes for igbot"
|
9
9
|
authors = [{name = "Igor Kopeć", email = "igor.kopec95@gmail.com"}]
|
10
10
|
license={text="LGPL-3.0-or-later"}
|
11
11
|
requires-python = ">=3.12"
|
12
12
|
dependencies = [
|
13
|
-
"openai"
|
13
|
+
"openai",
|
14
|
+
"tiktoken"
|
14
15
|
]
|
15
16
|
|
16
17
|
[tool.setuptools.packages.find]
|
@@ -1,22 +0,0 @@
|
|
1
|
-
from enum import Enum
|
2
|
-
from openai import OpenAI
|
3
|
-
|
4
|
-
|
5
|
-
class ModelInfo:
|
6
|
-
|
7
|
-
def __init__(self, name, client):
|
8
|
-
self.__name = name
|
9
|
-
self.__client = client
|
10
|
-
|
11
|
-
def get_name(self):
|
12
|
-
return self.__name
|
13
|
-
|
14
|
-
def get_client(self):
|
15
|
-
return self.__client()
|
16
|
-
|
17
|
-
|
18
|
-
class Model(Enum):
|
19
|
-
OLLAMA_3_2_LOCAL = ModelInfo("llama3.2", lambda: OpenAI(base_url="http://localhost:11434/v1", api_key='ollama'))
|
20
|
-
OPENAI_GPT_4o_MINI = ModelInfo("gpt-4o-mini", lambda: OpenAI())
|
21
|
-
OPENAI_GPT_4o = ModelInfo("gpt-4o", lambda: OpenAI())
|
22
|
-
OPENAI_GPT_4o_MINI_JSON = ModelInfo("gpt-4o-mini-2024-07-18", lambda: OpenAI())
|
@@ -1 +0,0 @@
|
|
1
|
-
openai
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|