cwyodmodules 0.3.32__py3-none-any.whl → 0.3.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. cwyodmodules/api/chat_history.py +14 -7
  2. cwyodmodules/batch/utilities/chat_history/auth_utils.py +7 -3
  3. cwyodmodules/batch/utilities/chat_history/cosmosdb.py +17 -1
  4. cwyodmodules/batch/utilities/chat_history/postgresdbservice.py +239 -254
  5. cwyodmodules/batch/utilities/common/source_document.py +60 -61
  6. cwyodmodules/batch/utilities/document_chunking/fixed_size_overlap.py +8 -3
  7. cwyodmodules/batch/utilities/document_chunking/layout.py +8 -3
  8. cwyodmodules/batch/utilities/document_chunking/page.py +8 -3
  9. cwyodmodules/batch/utilities/document_loading/read.py +30 -34
  10. cwyodmodules/batch/utilities/helpers/azure_computer_vision_client.py +10 -3
  11. cwyodmodules/batch/utilities/helpers/azure_form_recognizer_helper.py +6 -2
  12. cwyodmodules/batch/utilities/helpers/azure_postgres_helper.py +14 -2
  13. cwyodmodules/batch/utilities/helpers/azure_postgres_helper_light_rag.py +14 -2
  14. cwyodmodules/batch/utilities/helpers/azure_search_helper.py +15 -6
  15. cwyodmodules/batch/utilities/helpers/config/config_helper.py +24 -2
  16. cwyodmodules/batch/utilities/helpers/env_helper.py +9 -9
  17. cwyodmodules/batch/utilities/helpers/lightrag_helper.py +9 -2
  18. cwyodmodules/batch/utilities/helpers/llm_helper.py +13 -2
  19. cwyodmodules/batch/utilities/helpers/secret_helper.py +9 -9
  20. cwyodmodules/batch/utilities/integrated_vectorization/azure_search_index.py +8 -2
  21. cwyodmodules/batch/utilities/integrated_vectorization/azure_search_indexer.py +9 -2
  22. cwyodmodules/batch/utilities/integrated_vectorization/azure_search_skillset.py +6 -2
  23. cwyodmodules/batch/utilities/orchestrator/lang_chain_agent.py +8 -2
  24. cwyodmodules/batch/utilities/orchestrator/open_ai_functions.py +6 -2
  25. cwyodmodules/batch/utilities/orchestrator/orchestrator_base.py +9 -3
  26. cwyodmodules/batch/utilities/orchestrator/prompt_flow.py +8 -2
  27. cwyodmodules/batch/utilities/orchestrator/semantic_kernel_orchestrator.py +135 -138
  28. cwyodmodules/batch/utilities/parser/output_parser_tool.py +64 -64
  29. cwyodmodules/batch/utilities/plugins/outlook_calendar_plugin.py +91 -93
  30. cwyodmodules/batch/utilities/search/azure_search_handler.py +16 -3
  31. cwyodmodules/batch/utilities/search/azure_search_handler_light_rag.py +14 -2
  32. cwyodmodules/batch/utilities/search/integrated_vectorization_search_handler.py +36 -24
  33. cwyodmodules/batch/utilities/search/lightrag_search_handler.py +14 -2
  34. cwyodmodules/batch/utilities/search/postgres_search_handler.py +100 -97
  35. cwyodmodules/batch/utilities/search/postgres_search_handler_light_rag.py +103 -104
  36. cwyodmodules/batch/utilities/search/search.py +21 -24
  37. cwyodmodules/batch/utilities/tools/content_safety_checker.py +66 -78
  38. cwyodmodules/batch/utilities/tools/post_prompt_tool.py +48 -60
  39. cwyodmodules/batch/utilities/tools/question_answer_tool.py +196 -206
  40. cwyodmodules/batch/utilities/tools/text_processing_tool.py +36 -39
  41. cwyodmodules/logging_config.py +15 -0
  42. {cwyodmodules-0.3.32.dist-info → cwyodmodules-0.3.33.dist-info}/METADATA +2 -1
  43. {cwyodmodules-0.3.32.dist-info → cwyodmodules-0.3.33.dist-info}/RECORD +46 -45
  44. {cwyodmodules-0.3.32.dist-info → cwyodmodules-0.3.33.dist-info}/WHEEL +0 -0
  45. {cwyodmodules-0.3.32.dist-info → cwyodmodules-0.3.33.dist-info}/licenses/LICENSE +0 -0
  46. {cwyodmodules-0.3.32.dist-info → cwyodmodules-0.3.33.dist-info}/top_level.txt +0 -0
@@ -6,134 +6,137 @@ from .search_handler_base import SearchHandlerBase
6
6
  from ..helpers.azure_postgres_helper import AzurePostgresHelper
7
7
  from ..common.source_document import SourceDocument
8
8
 
9
- from logging import getLogger
10
- from opentelemetry import trace, baggage
11
- from opentelemetry.propagate import extract
9
+ from ...utilities.helpers.env_helper import EnvHelper
10
+ from logging_config import logger
11
+ env_helper: EnvHelper = EnvHelper()
12
+ log_args = env_helper.LOG_ARGS
13
+ log_result = env_helper.LOG_RESULT
12
14
 
13
15
 
14
16
  class AzurePostgresHandler(SearchHandlerBase):
15
17
 
16
18
  def __init__(self, env_helper):
17
19
  self.azure_postgres_helper = AzurePostgresHelper()
18
- #self.logger = getLogger("__main__" + ".base_package")
19
- self.logger = getLogger("__main__")
20
- #self.tracer = trace.get_tracer("__main__" + ".base_package")
21
- self.tracer = trace.get_tracer("__main__")
22
20
  super().__init__(env_helper)
23
21
 
22
+
23
+ @logger.trace_function(log_args=False, log_result=False)
24
24
  def query_search(self, question) -> List[SourceDocument]:
25
- with self.tracer.start_as_current_span("query_search"):
26
- self.logger.info(f"Starting query search for question: {question}")
27
- user_input = question
28
- query_embedding = self.azure_postgres_helper.llm_helper.generate_embeddings(
29
- user_input
30
- )
25
+ logger.info(f"Starting query search for question: {question}")
26
+ user_input = question
27
+ query_embedding = self.azure_postgres_helper.llm_helper.generate_embeddings(
28
+ user_input
29
+ )
31
30
 
32
- embedding_array = np.array(query_embedding).tolist()
31
+ embedding_array = np.array(query_embedding).tolist()
33
32
 
34
- search_results = self.azure_postgres_helper.get_vector_store(
35
- embedding_array
36
- )
33
+ search_results = self.azure_postgres_helper.get_vector_store(
34
+ embedding_array
35
+ )
37
36
 
38
- source_documents = self._convert_to_source_documents(search_results)
39
- self.logger.info(f"Found {len(source_documents)} source documents.")
40
- return source_documents
37
+ source_documents = self._convert_to_source_documents(search_results)
38
+ logger.info(f"Found {len(source_documents)} source documents.")
39
+ return source_documents
41
40
 
41
+ @logger.trace_function(log_args=False, log_result=False)
42
42
  def _convert_to_source_documents(self, search_results) -> List[SourceDocument]:
43
- with self.tracer.start_as_current_span("_convert_to_source_documents"):
44
- source_documents = []
45
- for source in search_results:
46
- source_document = SourceDocument(
47
- id=source["id"],
48
- title=source["title"],
49
- chunk=source["chunk"],
50
- offset=source["offset"],
51
- page_number=source["page_number"],
52
- content=source["content"],
53
- source=source["source"],
54
- )
55
- source_documents.append(source_document)
56
- return source_documents
43
+ source_documents = []
44
+ for source in search_results:
45
+ source_document = SourceDocument(
46
+ id=source["id"],
47
+ title=source["title"],
48
+ chunk=source["chunk"],
49
+ offset=source["offset"],
50
+ page_number=source["page_number"],
51
+ content=source["content"],
52
+ source=source["source"],
53
+ )
54
+ source_documents.append(source_document)
55
+ return source_documents
56
+
57
57
 
58
+ @logger.trace_function(log_args=log_args, log_result=False)
58
59
  def create_search_client(self):
59
- with self.tracer.start_as_current_span("create_search_client"):
60
- return self.azure_postgres_helper.get_search_client()
60
+ return self.azure_postgres_helper.get_search_client()
61
+
61
62
 
63
+ @logger.trace_function(log_args=False, log_result=False)
62
64
  def create_vector_store(self, documents_to_upload):
63
- with self.tracer.start_as_current_span("create_vector_store"):
64
- self.logger.info(
65
- f"Creating vector store with {len(documents_to_upload)} documents."
66
- )
67
- return self.azure_postgres_helper.create_vector_store(documents_to_upload)
65
+ logger.info(
66
+ f"Creating vector store with {len(documents_to_upload)} documents."
67
+ )
68
+ return self.azure_postgres_helper.create_vector_store(documents_to_upload)
68
69
 
70
+ @logger.trace_function(log_args=log_args, log_result=False)
69
71
  def perform_search(self, filename):
70
- with self.tracer.start_as_current_span("perform_search"):
71
- self.logger.info(f"Performing search for filename: {filename}")
72
- return self.azure_postgres_helper.perform_search(filename)
72
+ logger.info(f"Performing search for filename: {filename}")
73
+ return self.azure_postgres_helper.perform_search(filename)
73
74
 
75
+ @logger.trace_function(log_args=False, log_result=False)
74
76
  def process_results(self, results):
75
- with self.tracer.start_as_current_span("process_results"):
76
- if results is None:
77
- self.logger.info("No results to process.")
78
- return []
79
- data = [
80
- [json.loads(result["metadata"]).get("chunk", i), result["content"]]
81
- for i, result in enumerate(results)
82
- ]
83
- self.logger.info(f"Processed {len(data)} results.")
84
- return data
85
-
77
+ if results is None:
78
+ logger.info("No results to process.")
79
+ return []
80
+ data = [
81
+ [json.loads(result["metadata"]).get("chunk", i), result["content"]]
82
+ for i, result in enumerate(results)
83
+ ]
84
+ logger.info(f"Processed {len(data)} results.")
85
+ return data
86
+
87
+ @logger.trace_function(log_args=log_args, log_result=False)
86
88
  def get_files(self):
87
- with self.tracer.start_as_current_span("get_files"):
88
- results = self.azure_postgres_helper.get_files()
89
- if results is None or len(results) == 0:
90
- self.logger.info("No files found.")
91
- return []
92
- self.logger.info(f"Found {len(results)} files.")
93
- return results
89
+ results = self.azure_postgres_helper.get_files()
90
+ if results is None or len(results) == 0:
91
+ logger.info("No files found.")
92
+ return []
93
+ logger.info(f"Found {len(results)} files.")
94
+ return results
95
+
94
96
 
97
+ @logger.trace_function(log_args=False, log_result=False)
95
98
  def output_results(self, results):
96
- with self.tracer.start_as_current_span("output_results"):
97
- files = {}
98
- for result in results:
99
- id = result["id"]
100
- filename = result["title"]
101
- if filename in files:
102
- files[filename].append(id)
103
- else:
104
- files[filename] = [id]
105
-
106
- return files
99
+ files = {}
100
+ for result in results:
101
+ id = result["id"]
102
+ filename = result["title"]
103
+ if filename in files:
104
+ files[filename].append(id)
105
+ else:
106
+ files[filename] = [id]
107
107
 
108
+ return files
109
+
110
+
111
+ @logger.trace_function(log_args=log_args, log_result=log_result)
108
112
  def delete_files(self, files):
109
- with self.tracer.start_as_current_span("delete_files"):
110
- ids_to_delete = []
111
- files_to_delete = []
113
+ ids_to_delete = []
114
+ files_to_delete = []
112
115
 
113
- for filename, ids in files.items():
114
- files_to_delete.append(filename)
115
- ids_to_delete += [{"id": id} for id in ids]
116
- self.azure_postgres_helper.delete_documents(ids_to_delete)
116
+ for filename, ids in files.items():
117
+ files_to_delete.append(filename)
118
+ ids_to_delete += [{"id": id} for id in ids]
119
+ self.azure_postgres_helper.delete_documents(ids_to_delete)
117
120
 
118
- return ", ".join(files_to_delete)
121
+ return ", ".join(files_to_delete)
119
122
 
123
+ @logger.trace_function(log_args=log_args, log_result=False)
120
124
  def search_by_blob_url(self, blob_url):
121
- with self.tracer.start_as_current_span("search_by_blob_url"):
122
- self.logger.info(f"Searching by blob URL: {blob_url}")
123
- return self.azure_postgres_helper.search_by_blob_url(blob_url)
125
+ logger.info(f"Searching by blob URL: {blob_url}")
126
+ return self.azure_postgres_helper.search_by_blob_url(blob_url)
124
127
 
128
+ @logger.trace_function(log_args=log_args, log_result=log_result)
125
129
  def delete_from_index(self, blob_url) -> None:
126
- with self.tracer.start_as_current_span("delete_from_index"):
127
- self.logger.info(f"Deleting from index for blob URL: {blob_url}")
128
- documents = self.search_by_blob_url(blob_url)
129
- if documents is None or len(documents) == 0:
130
- self.logger.info("No documents found for blob URL.")
131
- return
132
- files_to_delete = self.output_results(documents)
133
- self.delete_files(files_to_delete)
134
-
130
+ logger.info(f"Deleting from index for blob URL: {blob_url}")
131
+ documents = self.search_by_blob_url(blob_url)
132
+ if documents is None or len(documents) == 0:
133
+ logger.info("No documents found for blob URL.")
134
+ return
135
+ files_to_delete = self.output_results(documents)
136
+ self.delete_files(files_to_delete)
137
+
138
+ @logger.trace_function(log_args=log_args, log_result=log_result)
135
139
  def get_unique_files(self):
136
- with self.tracer.start_as_current_span("get_unique_files"):
137
- results = self.azure_postgres_helper.get_unique_files()
138
- unique_titles = [row["title"] for row in results]
139
- return unique_titles
140
+ results = self.azure_postgres_helper.get_unique_files()
141
+ unique_titles = [row["title"] for row in results]
142
+ return unique_titles
@@ -7,8 +7,11 @@ from ..helpers.azure_postgres_helper import AzurePostgresHelper
7
7
  from ..helpers.lightrag_helper import LightRAGHelper
8
8
  from ..common.source_document import SourceDocument
9
9
 
10
- from logging import getLogger
11
- from opentelemetry import trace
10
+ from ...utilities.helpers.env_helper import EnvHelper
11
+ from logging_config import logger
12
+ env_helper: EnvHelper = EnvHelper()
13
+ log_args = env_helper.LOG_ARGS
14
+ log_result = env_helper.LOG_RESULT
12
15
 
13
16
 
14
17
  class AzurePostgresHandler(SearchHandlerBase):
@@ -16,132 +19,128 @@ class AzurePostgresHandler(SearchHandlerBase):
16
19
  def __init__(self, env_helper):
17
20
  self.azure_postgres_helper = AzurePostgresHelper()
18
21
  self.lightrag_helper = LightRAGHelper()
19
- #self.logger = getLogger("__main__" + ".base_package")
20
- self.logger = getLogger("__main__")
21
- #self.tracer = trace.get_tracer("__main__" + ".base_package")
22
- self.tracer = trace.get_tracer("__main__")
23
22
  super().__init__(env_helper)
24
23
 
24
+ @logger.trace_function(log_args=False, log_result=False)
25
25
  def query_search(self, question) -> List[SourceDocument]:
26
- with self.tracer.start_as_current_span("query_search"):
27
- self.logger.info(f"Starting query search for question: {question}")
28
- user_input = question
29
- query_embedding = self.azure_postgres_helper.llm_helper.generate_embeddings(
30
- user_input
31
- )
26
+ logger.info(f"Starting query search for question: {question}")
27
+ user_input = question
28
+ query_embedding = self.azure_postgres_helper.llm_helper.generate_embeddings(
29
+ user_input
30
+ )
32
31
 
33
- embedding_array = np.array(query_embedding).tolist()
32
+ embedding_array = np.array(query_embedding).tolist()
34
33
 
35
- search_results = self.azure_postgres_helper.get_vector_store(
36
- embedding_array
37
- )
34
+ search_results = self.azure_postgres_helper.get_vector_store(
35
+ embedding_array
36
+ )
38
37
 
39
- source_documents = self._convert_to_source_documents(search_results)
40
- self.logger.info(f"Found {len(source_documents)} source documents.")
41
- return source_documents
38
+ source_documents = self._convert_to_source_documents(search_results)
39
+ logger.info(f"Found {len(source_documents)} source documents.")
40
+ return source_documents
42
41
 
42
+ @logger.trace_function(log_args=False, log_result=False)
43
43
  def _convert_to_source_documents(self, search_results) -> List[SourceDocument]:
44
- with self.tracer.start_as_current_span("_convert_to_source_documents"):
45
- source_documents = []
46
- for source in search_results:
47
- source_document = SourceDocument(
48
- id=source["id"],
49
- title=source["title"],
50
- chunk=source["chunk"],
51
- offset=source["offset"],
52
- page_number=source["page_number"],
53
- content=source["content"],
54
- source=source["source"],
55
- )
56
- source_documents.append(source_document)
57
- return source_documents
44
+ source_documents = []
45
+ for source in search_results:
46
+ source_document = SourceDocument(
47
+ id=source["id"],
48
+ title=source["title"],
49
+ chunk=source["chunk"],
50
+ offset=source["offset"],
51
+ page_number=source["page_number"],
52
+ content=source["content"],
53
+ source=source["source"],
54
+ )
55
+ source_documents.append(source_document)
56
+ return source_documents
58
57
 
58
+ @logger.trace_function(log_args=log_args, log_result=False)
59
59
  def create_search_client(self):
60
- with self.tracer.start_as_current_span("create_search_client"):
61
- return self.azure_postgres_helper.get_search_client()
60
+ return self.azure_postgres_helper.get_search_client()
62
61
 
62
+ @logger.trace_function(log_args=False, log_result=False)
63
63
  def create_vector_store(self, documents_to_upload):
64
- with self.tracer.start_as_current_span("create_vector_store"):
65
- self.logger.info(
66
- f"Creating vector store with {len(documents_to_upload)} documents."
67
- )
68
- return self.azure_postgres_helper.create_vector_store(documents_to_upload)
64
+ logger.info(
65
+ f"Creating vector store with {len(documents_to_upload)} documents."
66
+ )
67
+ return self.azure_postgres_helper.create_vector_store(documents_to_upload)
69
68
 
69
+ @logger.trace_function(log_args=log_args, log_result=False)
70
70
  def perform_search(self, filename):
71
- with self.tracer.start_as_current_span("perform_search"):
72
- self.logger.info(f"Performing search for filename: {filename}")
73
- return self.azure_postgres_helper.perform_search(filename)
71
+ logger.info(f"Performing search for filename: {filename}")
72
+ return self.azure_postgres_helper.perform_search(filename)
74
73
 
74
+ @logger.trace_function(log_args=False, log_result=False)
75
75
  def process_results(self, results):
76
- with self.tracer.start_as_current_span("process_results"):
77
- if results is None:
78
- self.logger.info("No results to process.")
79
- return []
80
- data = [
81
- [json.loads(result["metadata"]).get("chunk", i), result["content"]]
82
- for i, result in enumerate(results)
83
- ]
84
- self.logger.info(f"Processed {len(data)} results.")
85
- return data
86
-
76
+ if results is None:
77
+ logger.info("No results to process.")
78
+ return []
79
+ data = [
80
+ [json.loads(result["metadata"]).get("chunk", i), result["content"]]
81
+ for i, result in enumerate(results)
82
+ ]
83
+ logger.info(f"Processed {len(data)} results.")
84
+ return data
85
+
86
+ @logger.trace_function(log_args=log_args, log_result=False)
87
87
  def get_files(self):
88
- with self.tracer.start_as_current_span("get_files"):
89
- results = self.azure_postgres_helper.get_files()
90
- if results is None or len(results) == 0:
91
- self.logger.info("No files found.")
92
- return []
93
- self.logger.info(f"Found {len(results)} files.")
94
- return results
95
-
88
+ results = self.azure_postgres_helper.get_files()
89
+ if results is None or len(results) == 0:
90
+ logger.info("No files found.")
91
+ return []
92
+ logger.info(f"Found {len(results)} files.")
93
+ return results
94
+
95
+ @logger.trace_function(log_args=False, log_result=False)
96
96
  def output_results(self, results):
97
- with self.tracer.start_as_current_span("output_results"):
98
- files = {}
99
- for result in results:
100
- id = result["id"]
101
- filename = result["title"]
102
- if filename in files:
103
- files[filename].append(id)
104
- else:
105
- files[filename] = [id]
106
-
107
- return files
108
-
97
+ files = {}
98
+ for result in results:
99
+ id = result["id"]
100
+ filename = result["title"]
101
+ if filename in files:
102
+ files[filename].append(id)
103
+ else:
104
+ files[filename] = [id]
105
+
106
+ return files
107
+
108
+ @logger.trace_function(log_args=log_args, log_result=log_result)
109
109
  def delete_files(self, files):
110
- with self.tracer.start_as_current_span("delete_files"):
111
- ids_to_delete = []
112
- files_to_delete = []
113
-
114
- for filename, ids in files.items():
115
- files_to_delete.append(filename)
116
- ids_to_delete += [{"id": id} for id in ids]
117
- self.azure_postgres_helper.delete_documents(ids_to_delete)
110
+ ids_to_delete = []
111
+ files_to_delete = []
112
+ for filename, ids in files.items():
113
+ files_to_delete.append(filename)
114
+ ids_to_delete += [{"id": id} for id in ids]
115
+ self.azure_postgres_helper.delete_documents(ids_to_delete)
118
116
 
119
- return ", ".join(files_to_delete)
117
+ return ", ".join(files_to_delete)
120
118
 
119
+ @logger.trace_function(log_args=log_args, log_result=False)
121
120
  def search_by_blob_url(self, blob_url):
122
- with self.tracer.start_as_current_span("search_by_blob_url"):
123
- self.logger.info(f"Searching by blob URL: {blob_url}")
124
- return self.azure_postgres_helper.search_by_blob_url(blob_url)
121
+ logger.info(f"Searching by blob URL: {blob_url}")
122
+ return self.azure_postgres_helper.search_by_blob_url(blob_url)
125
123
 
124
+ @logger.trace_function(log_args=log_args, log_result=log_result)
126
125
  def delete_from_index(self, blob_url) -> None:
127
- with self.tracer.start_as_current_span("delete_from_index"):
128
- self.logger.info(f"Deleting from index for blob URL: {blob_url}")
129
- documents = self.search_by_blob_url(blob_url)
130
- if documents is None or len(documents) == 0:
131
- self.logger.info("No documents found for blob URL.")
132
- return
133
- files_to_delete = self.output_results(documents)
134
- self.delete_files(files_to_delete)
135
-
126
+ logger.info(f"Deleting from index for blob URL: {blob_url}")
127
+ documents = self.search_by_blob_url(blob_url)
128
+ if documents is None or len(documents) == 0:
129
+ logger.info("No documents found for blob URL.")
130
+ return
131
+ files_to_delete = self.output_results(documents)
132
+ self.delete_files(files_to_delete)
133
+
134
+ @logger.trace_function(log_args=log_args, log_result=False)
136
135
  def get_unique_files(self):
137
- with self.tracer.start_as_current_span("get_unique_files"):
138
- results = self.azure_postgres_helper.get_unique_files()
139
- unique_titles = [row["title"] for row in results]
140
- return unique_titles
136
+ results = self.azure_postgres_helper.get_unique_files()
137
+ unique_titles = [row["title"] for row in results]
138
+ return unique_titles
141
139
 
140
+
141
+ @logger.trace_function(log_args=False, log_result=False)
142
142
  def store_vector_and_text(self, documents_to_store):
143
- with self.tracer.start_as_current_span("store_vector_and_text"):
144
- self.logger.info(
145
- f"Storing {len(documents_to_store)} documents with LightRAG."
146
- )
147
- self.lightrag_helper.store_documents(documents_to_store)
143
+ logger.info(
144
+ f"Storing {len(documents_to_store)} documents with LightRAG."
145
+ )
146
+ self.lightrag_helper.store_documents(documents_to_store)
@@ -8,15 +8,12 @@ from ..search.search_handler_base import SearchHandlerBase
8
8
  from ..common.source_document import SourceDocument
9
9
  from ..helpers.env_helper import EnvHelper
10
10
 
11
- from logging import getLogger
12
- from opentelemetry import trace, baggage
13
- from opentelemetry.propagate import extract
14
11
  from ..search.lightrag_search_handler import LightRAGSearchHandler
15
12
 
16
- # logger = getLogger("__main__" + ".base_package")
17
- logger = getLogger("__main__")
18
- # tracer = trace.get_tracer("__main__" + ".base_package")
19
- tracer = trace.get_tracer("__main__")
13
+ from logging_config import logger
14
+ env_helper: EnvHelper = EnvHelper()
15
+ log_args = env_helper.LOG_ARGS
16
+ log_result = env_helper.LOG_RESULT
20
17
 
21
18
 
22
19
  class Search:
@@ -27,6 +24,7 @@ class Search:
27
24
  """
28
25
 
29
26
  @staticmethod
27
+ @logger.trace_function(log_args=False, log_result=False)
30
28
  def get_search_handler(env_helper: EnvHelper) -> SearchHandlerBase:
31
29
  """
32
30
  Determines and returns the appropriate search handler based on the
@@ -44,23 +42,23 @@ class Search:
44
42
  Since the full workflow for PostgreSQL indexing is not yet complete,
45
43
  you can comment out the condition checking for PostgreSQL.
46
44
  """
47
- with tracer.start_as_current_span("get_search_handler"):
48
- logger.info("Getting search handler...")
49
- # if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
50
- # logger.info("Using LightRAGSearchHandler.")
51
- # return LightRAGSearchHandler(env_helper)
52
- if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
53
- logger.info("Using AzurePostgresHandler.")
54
- return AzurePostgresHandler(env_helper)
45
+ logger.info("Getting search handler...")
46
+ # if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
47
+ # logger.info("Using LightRAGSearchHandler.")
48
+ # return LightRAGSearchHandler(env_helper)
49
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
50
+ logger.info("Using AzurePostgresHandler.")
51
+ return AzurePostgresHandler(env_helper)
52
+ else:
53
+ if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
54
+ logger.info("Using IntegratedVectorizationSearchHandler.")
55
+ return IntegratedVectorizationSearchHandler(env_helper)
55
56
  else:
56
- if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
57
- logger.info("Using IntegratedVectorizationSearchHandler.")
58
- return IntegratedVectorizationSearchHandler(env_helper)
59
- else:
60
- logger.info("Using AzureSearchHandler.")
61
- return AzureSearchHandler(env_helper)
57
+ logger.info("Using AzureSearchHandler.")
58
+ return AzureSearchHandler(env_helper)
62
59
 
63
60
  @staticmethod
61
+ @logger.trace_function(log_args=False, log_result=False)
64
62
  def get_source_documents(
65
63
  search_handler: SearchHandlerBase, question: str
66
64
  ) -> list[SourceDocument]:
@@ -78,6 +76,5 @@ class Search:
78
76
  list[SourceDocument]: A list of SourceDocument instances that match the
79
77
  search query.
80
78
  """
81
- with tracer.start_as_current_span("get_source_documents"):
82
- logger.info(f"Getting source documents for question: {question}")
83
- return search_handler.query_search(question)
79
+ logger.info(f"Getting source documents for question: {question}")
80
+ return search_handler.query_search(question)