cwyodmodules 0.3.35__py3-none-any.whl → 0.3.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cwyodmodules/api/chat_history.py +10 -9
- cwyodmodules/batch/utilities/chat_history/auth_utils.py +3 -2
- cwyodmodules/batch/utilities/chat_history/cosmosdb.py +13 -12
- cwyodmodules/batch/utilities/chat_history/postgresdbservice.py +13 -12
- cwyodmodules/batch/utilities/common/source_document.py +9 -8
- cwyodmodules/batch/utilities/document_chunking/fixed_size_overlap.py +2 -1
- cwyodmodules/batch/utilities/document_chunking/layout.py +2 -1
- cwyodmodules/batch/utilities/document_chunking/page.py +2 -1
- cwyodmodules/batch/utilities/document_loading/read.py +2 -1
- cwyodmodules/batch/utilities/helpers/azure_computer_vision_client.py +7 -6
- cwyodmodules/batch/utilities/helpers/azure_form_recognizer_helper.py +3 -2
- cwyodmodules/batch/utilities/helpers/azure_postgres_helper.py +11 -10
- cwyodmodules/batch/utilities/helpers/azure_postgres_helper_light_rag.py +11 -10
- cwyodmodules/batch/utilities/helpers/azure_search_helper.py +10 -9
- cwyodmodules/batch/utilities/helpers/config/config_helper.py +20 -19
- cwyodmodules/batch/utilities/helpers/embedders/integrated_vectorization_embedder.py +12 -3
- cwyodmodules/batch/utilities/helpers/embedders/postgres_embedder.py +8 -2
- cwyodmodules/batch/utilities/helpers/embedders/push_embedder.py +11 -2
- cwyodmodules/batch/utilities/helpers/env_helper.py +4 -1
- cwyodmodules/batch/utilities/helpers/lightrag_helper.py +5 -4
- cwyodmodules/batch/utilities/helpers/llm_helper.py +10 -9
- cwyodmodules/batch/utilities/helpers/secret_helper.py +3 -3
- cwyodmodules/batch/utilities/integrated_vectorization/azure_search_index.py +5 -4
- cwyodmodules/batch/utilities/integrated_vectorization/azure_search_indexer.py +4 -3
- cwyodmodules/batch/utilities/integrated_vectorization/azure_search_skillset.py +2 -1
- cwyodmodules/batch/utilities/orchestrator/lang_chain_agent.py +4 -3
- cwyodmodules/batch/utilities/orchestrator/open_ai_functions.py +2 -1
- cwyodmodules/batch/utilities/orchestrator/orchestrator_base.py +5 -4
- cwyodmodules/batch/utilities/orchestrator/prompt_flow.py +5 -4
- cwyodmodules/batch/utilities/orchestrator/semantic_kernel_orchestrator.py +2 -1
- cwyodmodules/batch/utilities/parser/output_parser_tool.py +5 -4
- cwyodmodules/batch/utilities/plugins/outlook_calendar_plugin.py +4 -3
- cwyodmodules/batch/utilities/search/azure_search_handler.py +12 -11
- cwyodmodules/batch/utilities/search/azure_search_handler_light_rag.py +10 -9
- cwyodmodules/batch/utilities/search/integrated_vectorization_search_handler.py +12 -11
- cwyodmodules/batch/utilities/search/lightrag_search_handler.py +9 -8
- cwyodmodules/batch/utilities/search/postgres_search_handler.py +13 -12
- cwyodmodules/batch/utilities/search/postgres_search_handler_light_rag.py +14 -13
- cwyodmodules/batch/utilities/search/search.py +3 -2
- cwyodmodules/batch/utilities/tools/content_safety_checker.py +5 -4
- cwyodmodules/batch/utilities/tools/post_prompt_tool.py +2 -1
- cwyodmodules/batch/utilities/tools/question_answer_tool.py +8 -7
- cwyodmodules/batch/utilities/tools/text_processing_tool.py +2 -1
- {cwyodmodules-0.3.35.dist-info → cwyodmodules-0.3.37.dist-info}/METADATA +2 -2
- {cwyodmodules-0.3.35.dist-info → cwyodmodules-0.3.37.dist-info}/RECORD +48 -48
- {cwyodmodules-0.3.35.dist-info → cwyodmodules-0.3.37.dist-info}/WHEEL +0 -0
- {cwyodmodules-0.3.35.dist-info → cwyodmodules-0.3.37.dist-info}/licenses/LICENSE +0 -0
- {cwyodmodules-0.3.35.dist-info → cwyodmodules-0.3.37.dist-info}/top_level.txt +0 -0
cwyodmodules/api/chat_history.py
CHANGED
@@ -16,10 +16,11 @@ from logging_config import logger
|
|
16
16
|
bp_chat_history_response = Blueprint("chat_history", __name__)
|
17
17
|
|
18
18
|
env_helper: EnvHelper = EnvHelper()
|
19
|
+
log_execution = env_helper.LOG_EXECUTION
|
19
20
|
log_args = env_helper.LOG_ARGS
|
20
21
|
log_result = env_helper.LOG_RESULT
|
21
22
|
|
22
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
23
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
23
24
|
def init_database_client():
|
24
25
|
try:
|
25
26
|
conversation_client = DatabaseFactory.get_conversation_client()
|
@@ -28,7 +29,7 @@ def init_database_client():
|
|
28
29
|
logger.exception("Exception in database initialization: %s", e)
|
29
30
|
raise e
|
30
31
|
|
31
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
32
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
32
33
|
def init_openai_client():
|
33
34
|
try:
|
34
35
|
if env_helper.is_auth_type_keys():
|
@@ -50,7 +51,7 @@ def init_openai_client():
|
|
50
51
|
|
51
52
|
|
52
53
|
@bp_chat_history_response.route("/history/list", methods=["GET"])
|
53
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
54
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
54
55
|
async def list_conversations():
|
55
56
|
config = ConfigHelper.get_active_config_or_default()
|
56
57
|
if not config.enable_chat_history:
|
@@ -90,7 +91,7 @@ async def list_conversations():
|
|
90
91
|
|
91
92
|
|
92
93
|
@bp_chat_history_response.route("/history/rename", methods=["POST"])
|
93
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
94
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
94
95
|
async def rename_conversation():
|
95
96
|
config = ConfigHelper.get_active_config_or_default()
|
96
97
|
if not config.enable_chat_history:
|
@@ -153,7 +154,7 @@ async def rename_conversation():
|
|
153
154
|
|
154
155
|
|
155
156
|
@bp_chat_history_response.route("/history/read", methods=["POST"])
|
156
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
157
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
157
158
|
async def get_conversation():
|
158
159
|
config = ConfigHelper.get_active_config_or_default()
|
159
160
|
if not config.enable_chat_history:
|
@@ -226,7 +227,7 @@ async def get_conversation():
|
|
226
227
|
|
227
228
|
|
228
229
|
@bp_chat_history_response.route("/history/delete", methods=["DELETE"])
|
229
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
230
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
230
231
|
async def delete_conversation():
|
231
232
|
config = ConfigHelper.get_active_config_or_default()
|
232
233
|
if not config.enable_chat_history:
|
@@ -287,7 +288,7 @@ async def delete_conversation():
|
|
287
288
|
|
288
289
|
|
289
290
|
@bp_chat_history_response.route("/history/delete_all", methods=["DELETE"])
|
290
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
291
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
291
292
|
async def delete_all_conversations():
|
292
293
|
config = ConfigHelper.get_active_config_or_default()
|
293
294
|
|
@@ -359,7 +360,7 @@ async def delete_all_conversations():
|
|
359
360
|
|
360
361
|
|
361
362
|
@bp_chat_history_response.route("/history/update", methods=["POST"])
|
362
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
363
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
363
364
|
async def update_conversation():
|
364
365
|
config = ConfigHelper.get_active_config_or_default()
|
365
366
|
if not config.enable_chat_history:
|
@@ -461,7 +462,7 @@ async def update_conversation():
|
|
461
462
|
|
462
463
|
|
463
464
|
@bp_chat_history_response.route("/history/frontend_settings", methods=["GET"])
|
464
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
465
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
465
466
|
def get_frontend_settings():
|
466
467
|
try:
|
467
468
|
# Clear the cache for the config helper method
|
@@ -4,10 +4,11 @@ import json
|
|
4
4
|
from ...utilities.helpers.env_helper import EnvHelper
|
5
5
|
from logging_config import logger
|
6
6
|
env_helper: EnvHelper = EnvHelper()
|
7
|
+
log_execution = env_helper.LOG_EXECUTION
|
7
8
|
log_args = env_helper.LOG_ARGS
|
8
9
|
log_result = env_helper.LOG_RESULT
|
9
10
|
|
10
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
11
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
11
12
|
def get_authenticated_user_details(request_headers):
|
12
13
|
user_object = {}
|
13
14
|
|
@@ -30,7 +31,7 @@ def get_authenticated_user_details(request_headers):
|
|
30
31
|
|
31
32
|
return user_object
|
32
33
|
|
33
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
34
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
34
35
|
def get_tenantid(client_principal_b64):
|
35
36
|
tenant_id = ""
|
36
37
|
if client_principal_b64:
|
@@ -6,6 +6,7 @@ from .database_client_base import DatabaseClientBase
|
|
6
6
|
from ...utilities.helpers.env_helper import EnvHelper
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -48,15 +49,15 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
48
49
|
except exceptions.CosmosResourceNotFoundError:
|
49
50
|
raise ValueError("Invalid CosmosDB container name")
|
50
51
|
|
51
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
52
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
52
53
|
async def connect(self):
|
53
54
|
pass
|
54
55
|
|
55
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
56
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
56
57
|
async def close(self):
|
57
58
|
pass
|
58
59
|
|
59
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
60
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
60
61
|
async def ensure(self):
|
61
62
|
if (
|
62
63
|
not self.cosmosdb_client
|
@@ -79,7 +80,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
79
80
|
|
80
81
|
return True, "CosmosDB client initialized successfully"
|
81
82
|
|
82
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
83
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
83
84
|
async def create_conversation(self, user_id, conversation_id, title=""):
|
84
85
|
conversation = {
|
85
86
|
"id": conversation_id,
|
@@ -97,7 +98,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
97
98
|
else:
|
98
99
|
return False
|
99
100
|
|
100
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
101
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
101
102
|
async def upsert_conversation(self, conversation):
|
102
103
|
resp = await self.container_client.upsert_item(conversation)
|
103
104
|
if resp:
|
@@ -105,7 +106,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
105
106
|
else:
|
106
107
|
return False
|
107
108
|
|
108
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
109
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
109
110
|
async def delete_conversation(self, user_id, conversation_id):
|
110
111
|
conversation = await self.container_client.read_item(
|
111
112
|
item=conversation_id, partition_key=user_id
|
@@ -118,7 +119,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
118
119
|
else:
|
119
120
|
return True
|
120
121
|
|
121
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
122
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
122
123
|
async def delete_messages(self, conversation_id, user_id):
|
123
124
|
# get a list of all the messages in the conversation
|
124
125
|
messages = await self.get_messages(user_id, conversation_id)
|
@@ -131,7 +132,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
131
132
|
response_list.append(resp)
|
132
133
|
return response_list
|
133
134
|
|
134
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
135
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
135
136
|
async def get_conversations(self, user_id, limit, sort_order="DESC", offset=0):
|
136
137
|
parameters = [{"name": "@userId", "value": user_id}]
|
137
138
|
query = f"SELECT * FROM c where c.userId = @userId and c.type='conversation' order by c.updatedAt {sort_order}"
|
@@ -146,7 +147,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
146
147
|
|
147
148
|
return conversations
|
148
149
|
|
149
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
150
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
150
151
|
async def get_conversation(self, user_id, conversation_id):
|
151
152
|
parameters = [
|
152
153
|
{"name": "@conversationId", "value": conversation_id},
|
@@ -165,7 +166,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
165
166
|
else:
|
166
167
|
return conversations[0]
|
167
168
|
|
168
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
169
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
169
170
|
async def create_message(self, uuid, conversation_id, user_id, input_message: dict):
|
170
171
|
message = {
|
171
172
|
"id": uuid,
|
@@ -193,7 +194,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
193
194
|
else:
|
194
195
|
return False
|
195
196
|
|
196
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
197
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
197
198
|
async def update_message_feedback(self, user_id, message_id, feedback):
|
198
199
|
message = await self.container_client.read_item(
|
199
200
|
item=message_id, partition_key=user_id
|
@@ -205,7 +206,7 @@ class CosmosConversationClient(DatabaseClientBase):
|
|
205
206
|
else:
|
206
207
|
return False
|
207
208
|
|
208
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
209
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
209
210
|
async def get_messages(self, user_id, conversation_id):
|
210
211
|
parameters = [
|
211
212
|
{"name": "@conversationId", "value": conversation_id},
|
@@ -6,6 +6,7 @@ from .database_client_base import DatabaseClientBase
|
|
6
6
|
from ...utilities.helpers.env_helper import EnvHelper
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -22,7 +23,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
22
23
|
self.enable_message_feedback = enable_message_feedback
|
23
24
|
self.conn = None
|
24
25
|
|
25
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
26
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
26
27
|
async def connect(self):
|
27
28
|
try:
|
28
29
|
access_information = self.azure_identity_helper.get_token(scopes="https://ossrdbms-aad.database.windows.net/.default")
|
@@ -40,13 +41,13 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
40
41
|
logger.error("Failed to connect to PostgreSQL: %s", e, exc_info=True)
|
41
42
|
raise
|
42
43
|
|
43
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
44
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
44
45
|
async def close(self):
|
45
46
|
if self.conn:
|
46
47
|
await self.conn.close()
|
47
48
|
logger.info("PostgreSQL connection closed")
|
48
49
|
|
49
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
50
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
50
51
|
async def ensure(self):
|
51
52
|
if not self.conn:
|
52
53
|
logger.warning("PostgreSQL client not initialized correctly")
|
@@ -54,7 +55,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
54
55
|
logger.info("PostgreSQL client initialized successfully")
|
55
56
|
return True, "PostgreSQL client initialized successfully"
|
56
57
|
|
57
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
58
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
58
59
|
async def create_conversation(self, conversation_id, user_id, title=""):
|
59
60
|
utc_now = datetime.now(timezone.utc)
|
60
61
|
createdAt = utc_now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
|
@@ -82,7 +83,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
82
83
|
)
|
83
84
|
raise
|
84
85
|
|
85
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
86
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
86
87
|
async def upsert_conversation(self, conversation):
|
87
88
|
query = """
|
88
89
|
INSERT INTO conversations (id, conversation_id, type, "createdAt", "updatedAt", user_id, title)
|
@@ -118,7 +119,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
118
119
|
)
|
119
120
|
raise
|
120
121
|
|
121
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
122
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
122
123
|
async def delete_conversation(self, user_id, conversation_id):
|
123
124
|
query = (
|
124
125
|
"DELETE FROM conversations WHERE conversation_id = $1 AND user_id = $2"
|
@@ -136,7 +137,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
136
137
|
)
|
137
138
|
raise
|
138
139
|
|
139
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
140
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
140
141
|
async def delete_messages(self, conversation_id, user_id):
|
141
142
|
query = "DELETE FROM messages WHERE conversation_id = $1 AND user_id = $2 RETURNING *"
|
142
143
|
try:
|
@@ -152,7 +153,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
152
153
|
)
|
153
154
|
raise
|
154
155
|
|
155
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
156
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
156
157
|
async def get_conversations(self, user_id, limit=None, sort_order="DESC", offset=0):
|
157
158
|
try:
|
158
159
|
offset = int(offset) # Ensure offset is an integer
|
@@ -186,7 +187,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
186
187
|
)
|
187
188
|
return [dict(conversation) for conversation in conversations]
|
188
189
|
|
189
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
190
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
190
191
|
async def get_conversation(self, user_id, conversation_id):
|
191
192
|
query = "SELECT * FROM conversations WHERE id = $1 AND user_id = $2 AND type = 'conversation'"
|
192
193
|
try:
|
@@ -208,7 +209,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
208
209
|
)
|
209
210
|
raise
|
210
211
|
|
211
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
212
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
212
213
|
async def create_message(self, uuid, conversation_id, user_id, input_message: dict):
|
213
214
|
message_id = uuid
|
214
215
|
utc_now = datetime.now(timezone.utc)
|
@@ -252,7 +253,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
252
253
|
)
|
253
254
|
raise
|
254
255
|
|
255
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
256
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
256
257
|
async def update_message_feedback(self, user_id, message_id, feedback):
|
257
258
|
query = "UPDATE messages SET feedback = $1 WHERE id = $2 AND user_id = $3 RETURNING *"
|
258
259
|
try:
|
@@ -274,7 +275,7 @@ class PostgresConversationClient(DatabaseClientBase):
|
|
274
275
|
)
|
275
276
|
raise
|
276
277
|
|
277
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
278
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
278
279
|
async def get_messages(self, user_id, conversation_id):
|
279
280
|
query = 'SELECT * FROM messages WHERE conversation_id = $1 AND user_id = $2 ORDER BY "createdAt" ASC'
|
280
281
|
try:
|
@@ -7,6 +7,7 @@ from ..helpers.azure_blob_storage_client import AzureBlobStorageClient
|
|
7
7
|
from ...utilities.helpers.env_helper import EnvHelper
|
8
8
|
from logging_config import logger
|
9
9
|
env_helper: EnvHelper = EnvHelper()
|
10
|
+
log_execution = env_helper.LOG_EXECUTION
|
10
11
|
log_args = env_helper.LOG_ARGS
|
11
12
|
log_result = env_helper.LOG_RESULT
|
12
13
|
|
@@ -51,20 +52,20 @@ class SourceDocument:
|
|
51
52
|
)
|
52
53
|
return False
|
53
54
|
|
54
|
-
@logger.trace_function(log_args=False, log_result=False)
|
55
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
55
56
|
def to_json(self):
|
56
57
|
json_string = json.dumps(self, cls=SourceDocumentEncoder)
|
57
58
|
logger.debug(f"Serialized SourceDocument to JSON: {json_string}")
|
58
59
|
return json_string
|
59
60
|
|
60
61
|
@classmethod
|
61
|
-
@logger.trace_function(log_args=False, log_result=False)
|
62
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
62
63
|
def from_json(cls, json_string):
|
63
64
|
source_document = json.loads(json_string, cls=SourceDocumentDecoder)
|
64
65
|
return source_document
|
65
66
|
|
66
67
|
@classmethod
|
67
|
-
@logger.trace_function(log_args=False, log_result=False)
|
68
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
68
69
|
def from_dict(cls, dict_obj):
|
69
70
|
logger.debug(f"Creating SourceDocument from dict: {dict_obj}")
|
70
71
|
return cls(
|
@@ -79,7 +80,7 @@ class SourceDocument:
|
|
79
80
|
)
|
80
81
|
|
81
82
|
@classmethod
|
82
|
-
@logger.trace_function(log_args=False, log_result=False)
|
83
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
83
84
|
def from_metadata(
|
84
85
|
cls: Type["SourceDocument"],
|
85
86
|
content: str,
|
@@ -113,7 +114,7 @@ class SourceDocument:
|
|
113
114
|
)
|
114
115
|
return source_document
|
115
116
|
|
116
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
117
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
117
118
|
def get_filename(self, include_path=False):
|
118
119
|
filename = self.source.replace("_SAS_TOKEN_PLACEHOLDER_", "").replace(
|
119
120
|
"http://", ""
|
@@ -127,7 +128,7 @@ class SourceDocument:
|
|
127
128
|
)
|
128
129
|
return filename
|
129
130
|
|
130
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
131
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
131
132
|
def get_markdown_url(self):
|
132
133
|
url = quote(self.source, safe=":/")
|
133
134
|
if "_SAS_TOKEN_PLACEHOLDER_" in url:
|
@@ -139,7 +140,7 @@ class SourceDocument:
|
|
139
140
|
|
140
141
|
|
141
142
|
class SourceDocumentEncoder(json.JSONEncoder):
|
142
|
-
@logger.trace_function(log_args=False, log_result=False)
|
143
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
143
144
|
def default(self, obj):
|
144
145
|
if isinstance(obj, SourceDocument):
|
145
146
|
logger.debug(f"Encoding SourceDocument: {obj}")
|
@@ -157,7 +158,7 @@ class SourceDocumentEncoder(json.JSONEncoder):
|
|
157
158
|
|
158
159
|
|
159
160
|
class SourceDocumentDecoder(json.JSONDecoder):
|
160
|
-
@logger.trace_function(log_args=False, log_result=False)
|
161
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
161
162
|
def decode(self, s, **kwargs):
|
162
163
|
logger.debug(f"Decoding JSON string: {s}")
|
163
164
|
obj = super().decode(s, **kwargs)
|
@@ -6,6 +6,7 @@ from ..common.source_document import SourceDocument
|
|
6
6
|
from ...utilities.helpers.env_helper import EnvHelper
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -13,7 +14,7 @@ class FixedSizeOverlapDocumentChunking(DocumentChunkingBase):
|
|
13
14
|
def __init__(self) -> None:
|
14
15
|
pass
|
15
16
|
|
16
|
-
@logger.trace_function(log_args=False, log_result=False)
|
17
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
17
18
|
def chunk(
|
18
19
|
self, documents: List[SourceDocument], chunking: ChunkingSettings
|
19
20
|
) -> List[SourceDocument]:
|
@@ -6,6 +6,7 @@ from ..common.source_document import SourceDocument
|
|
6
6
|
from ...utilities.helpers.env_helper import EnvHelper
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -13,7 +14,7 @@ class LayoutDocumentChunking(DocumentChunkingBase):
|
|
13
14
|
def __init__(self) -> None:
|
14
15
|
pass
|
15
16
|
|
16
|
-
@logger.trace_function(log_args=False, log_result=False)
|
17
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
17
18
|
def chunk(
|
18
19
|
self, documents: List[SourceDocument], chunking: ChunkingSettings
|
19
20
|
) -> List[SourceDocument]:
|
@@ -6,6 +6,7 @@ from ..common.source_document import SourceDocument
|
|
6
6
|
from ...utilities.helpers.env_helper import EnvHelper
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -13,7 +14,7 @@ class PageDocumentChunking(DocumentChunkingBase):
|
|
13
14
|
def __init__(self) -> None:
|
14
15
|
pass
|
15
16
|
|
16
|
-
@logger.trace_function(log_args=False, log_result=False)
|
17
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
17
18
|
def chunk(
|
18
19
|
self, documents: List[SourceDocument], chunking: ChunkingSettings
|
19
20
|
) -> List[SourceDocument]:
|
@@ -6,6 +6,7 @@ from ..common.source_document import SourceDocument
|
|
6
6
|
from ...utilities.helpers.env_helper import EnvHelper
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -14,7 +15,7 @@ class ReadDocumentLoading(DocumentLoadingBase):
|
|
14
15
|
def __init__(self) -> None:
|
15
16
|
super().__init__()
|
16
17
|
|
17
|
-
@logger.trace_function(log_args=log_args, log_result=False)
|
18
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=False)
|
18
19
|
def load(self, document_url: str) -> List[SourceDocument]:
|
19
20
|
logger.info(f"Loading document from URL: {document_url}")
|
20
21
|
try:
|
@@ -7,6 +7,7 @@ from requests import Response
|
|
7
7
|
from .env_helper import EnvHelper
|
8
8
|
from logging_config import logger
|
9
9
|
env_helper: EnvHelper = EnvHelper()
|
10
|
+
log_execution = env_helper.LOG_EXECUTION
|
10
11
|
log_args = env_helper.LOG_ARGS
|
11
12
|
log_result = env_helper.LOG_RESULT
|
12
13
|
|
@@ -29,7 +30,7 @@ class AzureComputerVisionClient:
|
|
29
30
|
env_helper.AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION
|
30
31
|
)
|
31
32
|
|
32
|
-
@logger.trace_function(log_args=log_args, log_result=False)
|
33
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=False)
|
33
34
|
def vectorize_image(self, image_url: str) -> list[float]:
|
34
35
|
logger.info(f"Making call to computer vision to vectorize image: {image_url}")
|
35
36
|
response = self.__make_request(
|
@@ -41,7 +42,7 @@ class AzureComputerVisionClient:
|
|
41
42
|
response_json = self.__get_json_body(response)
|
42
43
|
return self.__get_vectors(response_json)
|
43
44
|
|
44
|
-
@logger.trace_function(log_args=False, log_result=False)
|
45
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
45
46
|
def vectorize_text(self, text: str) -> list[float]:
|
46
47
|
logger.debug(f"Making call to computer vision to vectorize text: {text}")
|
47
48
|
response = self.__make_request(
|
@@ -53,7 +54,7 @@ class AzureComputerVisionClient:
|
|
53
54
|
response_json = self.__get_json_body(response)
|
54
55
|
return self.__get_vectors(response_json)
|
55
56
|
|
56
|
-
@logger.trace_function(log_args=False, log_result=False)
|
57
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
57
58
|
def __make_request(self, path: str, body) -> Response:
|
58
59
|
try:
|
59
60
|
headers = {}
|
@@ -78,14 +79,14 @@ class AzureComputerVisionClient:
|
|
78
79
|
except Exception as e:
|
79
80
|
raise Exception("Call to Azure Computer Vision failed") from e
|
80
81
|
|
81
|
-
@logger.trace_function(log_args=False, log_result=log_result)
|
82
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=log_result)
|
82
83
|
def __validate_response(self, response: Response):
|
83
84
|
if response.status_code != 200:
|
84
85
|
raise Exception(
|
85
86
|
f"Call to Azure Computer Vision failed with status: {response.status_code}, body: {response.text}"
|
86
87
|
)
|
87
88
|
|
88
|
-
@logger.trace_function(log_args=False, log_result=False)
|
89
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
89
90
|
def __get_json_body(self, response: Response) -> dict:
|
90
91
|
try:
|
91
92
|
return response.json()
|
@@ -94,7 +95,7 @@ class AzureComputerVisionClient:
|
|
94
95
|
f"Call to Azure Computer Vision returned malformed response body: {response.text}",
|
95
96
|
) from e
|
96
97
|
|
97
|
-
@logger.trace_function(log_args=False, log_result=log_result)
|
98
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=log_result)
|
98
99
|
def __get_vectors(self, response_json: dict) -> list[float]:
|
99
100
|
if self.__RESPONSE_VECTOR_KEY in response_json:
|
100
101
|
return response_json[self.__RESPONSE_VECTOR_KEY]
|
@@ -7,6 +7,7 @@ from .env_helper import EnvHelper
|
|
7
7
|
|
8
8
|
from logging_config import logger
|
9
9
|
env_helper: EnvHelper = EnvHelper()
|
10
|
+
log_execution = env_helper.LOG_EXECUTION
|
10
11
|
log_args = env_helper.LOG_ARGS
|
11
12
|
log_result = env_helper.LOG_RESULT
|
12
13
|
|
@@ -47,7 +48,7 @@ class AzureFormRecognizerClient:
|
|
47
48
|
"paragraph": "p",
|
48
49
|
}
|
49
50
|
|
50
|
-
@logger.trace_function(log_args=False, log_result=False)
|
51
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
51
52
|
def _table_to_html(self, table):
|
52
53
|
table_html = "<table>"
|
53
54
|
rows = [
|
@@ -75,7 +76,7 @@ class AzureFormRecognizerClient:
|
|
75
76
|
table_html += "</table>"
|
76
77
|
return table_html
|
77
78
|
|
78
|
-
@logger.trace_function(log_args=log_args, log_result=False)
|
79
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=False)
|
79
80
|
def begin_analyze_document_from_url(
|
80
81
|
self, source_url: str, use_layout: bool = True, paragraph_separator: str = ""
|
81
82
|
):
|
@@ -6,6 +6,7 @@ from .env_helper import EnvHelper
|
|
6
6
|
|
7
7
|
from logging_config import logger
|
8
8
|
env_helper: EnvHelper = EnvHelper()
|
9
|
+
log_execution = env_helper.LOG_EXECUTION
|
9
10
|
log_args = env_helper.LOG_ARGS
|
10
11
|
log_result = env_helper.LOG_RESULT
|
11
12
|
|
@@ -17,7 +18,7 @@ class AzurePostgresHelper:
|
|
17
18
|
self.azure_identity_helper = AzureIdentityHelper()
|
18
19
|
self.conn = None
|
19
20
|
|
20
|
-
@logger.trace_function(log_args=log_args, log_result=False)
|
21
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=False)
|
21
22
|
def _create_search_client(self):
|
22
23
|
"""
|
23
24
|
Establishes a connection to Azure PostgreSQL using AAD authentication.
|
@@ -47,7 +48,7 @@ class AzurePostgresHelper:
|
|
47
48
|
logger.error(f"Error establishing a connection to PostgreSQL: {e}")
|
48
49
|
raise
|
49
50
|
|
50
|
-
@logger.trace_function(log_args=log_args, log_result=False)
|
51
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=False)
|
51
52
|
def get_search_client(self):
|
52
53
|
"""
|
53
54
|
Provides a reusable database connection.
|
@@ -56,7 +57,7 @@ class AzurePostgresHelper:
|
|
56
57
|
self.conn = self._create_search_client()
|
57
58
|
return self.conn
|
58
59
|
|
59
|
-
@logger.trace_function(log_args=False, log_result=False)
|
60
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=False)
|
60
61
|
def get_vector_store(self, embedding_array):
|
61
62
|
"""
|
62
63
|
Fetches search indexes from PostgreSQL based on an embedding vector.
|
@@ -85,7 +86,7 @@ class AzurePostgresHelper:
|
|
85
86
|
finally:
|
86
87
|
conn.close()
|
87
88
|
|
88
|
-
@logger.trace_function(log_args=False, log_result=log_result)
|
89
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=log_result)
|
89
90
|
def create_vector_store(self, documents_to_upload):
|
90
91
|
"""
|
91
92
|
Inserts documents into the `vector_store` table in batch mode.
|
@@ -129,7 +130,7 @@ class AzurePostgresHelper:
|
|
129
130
|
finally:
|
130
131
|
conn.close()
|
131
132
|
|
132
|
-
@logger.trace_function(log_args=False, log_result=log_result)
|
133
|
+
@logger.trace_function(log_execution=log_execution, log_args=False, log_result=log_result)
|
133
134
|
def get_files(self):
|
134
135
|
"""
|
135
136
|
Fetches distinct titles from the PostgreSQL database.
|
@@ -162,7 +163,7 @@ class AzurePostgresHelper:
|
|
162
163
|
finally:
|
163
164
|
conn.close()
|
164
165
|
|
165
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
166
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
166
167
|
def delete_documents(self, ids_to_delete):
|
167
168
|
"""
|
168
169
|
Deletes documents from the PostgreSQL database based on the provided ids.
|
@@ -210,7 +211,7 @@ class AzurePostgresHelper:
|
|
210
211
|
finally:
|
211
212
|
conn.close()
|
212
213
|
|
213
|
-
@logger.trace_function(log_args=log_args, log_result=False)
|
214
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=False)
|
214
215
|
def perform_search(self, title):
|
215
216
|
"""
|
216
217
|
Fetches search results from PostgreSQL based on the title.
|
@@ -237,7 +238,7 @@ class AzurePostgresHelper:
|
|
237
238
|
finally:
|
238
239
|
conn.close()
|
239
240
|
|
240
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
241
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
241
242
|
def get_unique_files(self):
|
242
243
|
"""
|
243
244
|
Fetches unique titles from PostgreSQL.
|
@@ -262,7 +263,7 @@ class AzurePostgresHelper:
|
|
262
263
|
finally:
|
263
264
|
conn.close()
|
264
265
|
|
265
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
266
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
266
267
|
def search_by_blob_url(self, blob_url):
|
267
268
|
"""
|
268
269
|
Fetches unique titles from PostgreSQL based on a given blob URL.
|
@@ -289,7 +290,7 @@ class AzurePostgresHelper:
|
|
289
290
|
finally:
|
290
291
|
conn.close()
|
291
292
|
|
292
|
-
@logger.trace_function(log_args=log_args, log_result=log_result)
|
293
|
+
@logger.trace_function(log_execution=log_execution, log_args=log_args, log_result=log_result)
|
293
294
|
def store_with_lightrag(self, documents_to_upload):
|
294
295
|
"""
|
295
296
|
Stores documents using LightRAG for enhanced vector and text storage capabilities.
|