unique_toolkit 1.8.1__py3-none-any.whl → 1.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of unique_toolkit might be problematic. Click here for more details.
- unique_toolkit/__init__.py +20 -0
- unique_toolkit/_common/api_calling/human_verification_manager.py +121 -28
- unique_toolkit/_common/chunk_relevancy_sorter/config.py +3 -3
- unique_toolkit/_common/chunk_relevancy_sorter/tests/test_service.py +2 -5
- unique_toolkit/_common/default_language_model.py +9 -3
- unique_toolkit/_common/docx_generator/__init__.py +7 -0
- unique_toolkit/_common/docx_generator/config.py +12 -0
- unique_toolkit/_common/docx_generator/schemas.py +80 -0
- unique_toolkit/_common/docx_generator/service.py +252 -0
- unique_toolkit/_common/docx_generator/template/Doc Template.docx +0 -0
- unique_toolkit/_common/endpoint_builder.py +138 -117
- unique_toolkit/_common/endpoint_requestor.py +240 -14
- unique_toolkit/_common/exception.py +20 -0
- unique_toolkit/_common/feature_flags/schema.py +1 -5
- unique_toolkit/_common/referencing.py +53 -0
- unique_toolkit/_common/string_utilities.py +52 -1
- unique_toolkit/_common/tests/test_referencing.py +521 -0
- unique_toolkit/_common/tests/test_string_utilities.py +506 -0
- unique_toolkit/_common/utils/files.py +43 -0
- unique_toolkit/agentic/debug_info_manager/debug_info_manager.py +16 -6
- unique_toolkit/agentic/debug_info_manager/test/test_debug_info_manager.py +278 -0
- unique_toolkit/agentic/evaluation/config.py +3 -2
- unique_toolkit/agentic/evaluation/context_relevancy/service.py +2 -2
- unique_toolkit/agentic/evaluation/evaluation_manager.py +9 -5
- unique_toolkit/agentic/evaluation/hallucination/constants.py +1 -1
- unique_toolkit/agentic/evaluation/hallucination/hallucination_evaluation.py +26 -3
- unique_toolkit/agentic/history_manager/history_manager.py +14 -11
- unique_toolkit/agentic/history_manager/loop_token_reducer.py +3 -4
- unique_toolkit/agentic/history_manager/utils.py +10 -87
- unique_toolkit/agentic/postprocessor/postprocessor_manager.py +107 -16
- unique_toolkit/agentic/reference_manager/reference_manager.py +1 -1
- unique_toolkit/agentic/responses_api/__init__.py +19 -0
- unique_toolkit/agentic/responses_api/postprocessors/code_display.py +63 -0
- unique_toolkit/agentic/responses_api/postprocessors/generated_files.py +145 -0
- unique_toolkit/agentic/responses_api/stream_handler.py +15 -0
- unique_toolkit/agentic/tools/a2a/__init__.py +18 -2
- unique_toolkit/agentic/tools/a2a/evaluation/__init__.py +2 -0
- unique_toolkit/agentic/tools/a2a/evaluation/_utils.py +3 -3
- unique_toolkit/agentic/tools/a2a/evaluation/config.py +1 -1
- unique_toolkit/agentic/tools/a2a/evaluation/evaluator.py +143 -91
- unique_toolkit/agentic/tools/a2a/manager.py +7 -1
- unique_toolkit/agentic/tools/a2a/postprocessing/__init__.py +11 -3
- unique_toolkit/agentic/tools/a2a/postprocessing/_display_utils.py +185 -0
- unique_toolkit/agentic/tools/a2a/postprocessing/_ref_utils.py +73 -0
- unique_toolkit/agentic/tools/a2a/postprocessing/config.py +21 -0
- unique_toolkit/agentic/tools/a2a/postprocessing/display.py +180 -0
- unique_toolkit/agentic/tools/a2a/postprocessing/references.py +101 -0
- unique_toolkit/agentic/tools/a2a/postprocessing/test/test_display_utils.py +1335 -0
- unique_toolkit/agentic/tools/a2a/postprocessing/test/test_ref_utils.py +603 -0
- unique_toolkit/agentic/tools/a2a/prompts.py +46 -0
- unique_toolkit/agentic/tools/a2a/response_watcher/__init__.py +6 -0
- unique_toolkit/agentic/tools/a2a/response_watcher/service.py +91 -0
- unique_toolkit/agentic/tools/a2a/tool/config.py +15 -5
- unique_toolkit/agentic/tools/a2a/tool/service.py +69 -36
- unique_toolkit/agentic/tools/config.py +16 -2
- unique_toolkit/agentic/tools/factory.py +4 -0
- unique_toolkit/agentic/tools/mcp/tool_wrapper.py +7 -35
- unique_toolkit/agentic/tools/openai_builtin/__init__.py +11 -0
- unique_toolkit/agentic/tools/openai_builtin/base.py +30 -0
- unique_toolkit/agentic/tools/openai_builtin/code_interpreter/__init__.py +8 -0
- unique_toolkit/agentic/tools/openai_builtin/code_interpreter/config.py +57 -0
- unique_toolkit/agentic/tools/openai_builtin/code_interpreter/service.py +230 -0
- unique_toolkit/agentic/tools/openai_builtin/manager.py +62 -0
- unique_toolkit/agentic/tools/test/test_mcp_manager.py +95 -7
- unique_toolkit/agentic/tools/test/test_tool_progress_reporter.py +240 -0
- unique_toolkit/agentic/tools/tool.py +0 -11
- unique_toolkit/agentic/tools/tool_manager.py +337 -122
- unique_toolkit/agentic/tools/tool_progress_reporter.py +81 -15
- unique_toolkit/agentic/tools/utils/__init__.py +18 -0
- unique_toolkit/agentic/tools/utils/execution/execution.py +8 -4
- unique_toolkit/agentic/tools/utils/source_handling/schema.py +1 -1
- unique_toolkit/chat/__init__.py +8 -1
- unique_toolkit/chat/deprecated/service.py +232 -0
- unique_toolkit/chat/functions.py +54 -40
- unique_toolkit/chat/rendering.py +34 -0
- unique_toolkit/chat/responses_api.py +461 -0
- unique_toolkit/chat/schemas.py +1 -1
- unique_toolkit/chat/service.py +96 -1569
- unique_toolkit/content/functions.py +116 -1
- unique_toolkit/content/schemas.py +59 -0
- unique_toolkit/content/service.py +5 -37
- unique_toolkit/content/smart_rules.py +301 -0
- unique_toolkit/framework_utilities/langchain/client.py +27 -3
- unique_toolkit/framework_utilities/openai/client.py +12 -1
- unique_toolkit/framework_utilities/openai/message_builder.py +85 -1
- unique_toolkit/language_model/default_language_model.py +3 -0
- unique_toolkit/language_model/functions.py +25 -9
- unique_toolkit/language_model/infos.py +72 -4
- unique_toolkit/language_model/schemas.py +246 -40
- unique_toolkit/protocols/support.py +91 -9
- unique_toolkit/services/__init__.py +7 -0
- unique_toolkit/services/chat_service.py +1630 -0
- unique_toolkit/services/knowledge_base.py +861 -0
- unique_toolkit/smart_rules/compile.py +56 -301
- unique_toolkit/test_utilities/events.py +197 -0
- {unique_toolkit-1.8.1.dist-info → unique_toolkit-1.23.0.dist-info}/METADATA +173 -3
- {unique_toolkit-1.8.1.dist-info → unique_toolkit-1.23.0.dist-info}/RECORD +99 -67
- unique_toolkit/agentic/tools/a2a/postprocessing/_display.py +0 -122
- unique_toolkit/agentic/tools/a2a/postprocessing/_utils.py +0 -19
- unique_toolkit/agentic/tools/a2a/postprocessing/postprocessor.py +0 -230
- unique_toolkit/agentic/tools/a2a/postprocessing/test/test_consolidate_references.py +0 -665
- unique_toolkit/agentic/tools/a2a/postprocessing/test/test_display.py +0 -391
- unique_toolkit/agentic/tools/a2a/postprocessing/test/test_postprocessor_reference_functions.py +0 -256
- {unique_toolkit-1.8.1.dist-info → unique_toolkit-1.23.0.dist-info}/LICENSE +0 -0
- {unique_toolkit-1.8.1.dist-info → unique_toolkit-1.23.0.dist-info}/WHEEL +0 -0
|
@@ -13,8 +13,12 @@ from unique_toolkit.content.constants import DEFAULT_SEARCH_LANGUAGE
|
|
|
13
13
|
from unique_toolkit.content.schemas import (
|
|
14
14
|
Content,
|
|
15
15
|
ContentChunk,
|
|
16
|
+
ContentInfo,
|
|
16
17
|
ContentRerankerConfig,
|
|
17
18
|
ContentSearchType,
|
|
19
|
+
DeleteContentResponse,
|
|
20
|
+
FolderInfo,
|
|
21
|
+
PaginatedContentInfos,
|
|
18
22
|
)
|
|
19
23
|
from unique_toolkit.content.utils import map_contents, map_to_content_chunks
|
|
20
24
|
|
|
@@ -429,7 +433,7 @@ def _trigger_upload_content(
|
|
|
429
433
|
scope_id=scope_id,
|
|
430
434
|
) # type: ignore
|
|
431
435
|
|
|
432
|
-
return Content(
|
|
436
|
+
return Content.model_validate(created_content, by_alias=True, by_name=True)
|
|
433
437
|
|
|
434
438
|
|
|
435
439
|
def request_content_by_id(
|
|
@@ -577,3 +581,114 @@ def download_content(
|
|
|
577
581
|
raise Exception(error_msg)
|
|
578
582
|
|
|
579
583
|
return content_path
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
def get_content_info(
|
|
587
|
+
user_id: str,
|
|
588
|
+
company_id: str,
|
|
589
|
+
*,
|
|
590
|
+
metadata_filter: dict[str, Any] | None = None,
|
|
591
|
+
skip: int | None = None,
|
|
592
|
+
take: int | None = None,
|
|
593
|
+
file_path: str | None = None,
|
|
594
|
+
):
|
|
595
|
+
"""Gets the info of a content."""
|
|
596
|
+
|
|
597
|
+
get_info_params = unique_sdk.Content.ContentInfoParams(
|
|
598
|
+
metadataFilter=metadata_filter or None, # Dict cannot be empty
|
|
599
|
+
)
|
|
600
|
+
if skip:
|
|
601
|
+
get_info_params["skip"] = skip
|
|
602
|
+
if take:
|
|
603
|
+
get_info_params["take"] = take
|
|
604
|
+
if file_path:
|
|
605
|
+
get_info_params["filePath"] = file_path
|
|
606
|
+
|
|
607
|
+
content_info = unique_sdk.Content.get_infos(
|
|
608
|
+
user_id=user_id, company_id=company_id, **get_info_params
|
|
609
|
+
)
|
|
610
|
+
return PaginatedContentInfos.model_validate(
|
|
611
|
+
content_info, by_alias=True, by_name=True
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def get_folder_info(user_id: str, company_id: str, *, scope_id: str) -> FolderInfo:
|
|
616
|
+
info = unique_sdk.Folder.get_info(
|
|
617
|
+
user_id=user_id, company_id=company_id, scopeId=scope_id
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
return FolderInfo.model_validate(info, by_alias=True, by_name=True)
|
|
621
|
+
|
|
622
|
+
|
|
623
|
+
def update_content(
|
|
624
|
+
user_id: str,
|
|
625
|
+
company_id: str,
|
|
626
|
+
*,
|
|
627
|
+
content_id: str,
|
|
628
|
+
metadata: dict[str, Any],
|
|
629
|
+
file_path: str | None = None,
|
|
630
|
+
owner_id: str | None = None,
|
|
631
|
+
parent_folder_path: str | None = None,
|
|
632
|
+
title: str | None = None,
|
|
633
|
+
) -> ContentInfo:
|
|
634
|
+
"""Updates the metadata of a content."""
|
|
635
|
+
|
|
636
|
+
update_params = unique_sdk.Content.UpdateParams(
|
|
637
|
+
contentId=content_id, metadata=metadata
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
if file_path:
|
|
641
|
+
update_params["filePath"] = file_path
|
|
642
|
+
if owner_id:
|
|
643
|
+
update_params["ownerId"] = owner_id
|
|
644
|
+
if parent_folder_path:
|
|
645
|
+
update_params["parentFolderPath"] = parent_folder_path
|
|
646
|
+
if title:
|
|
647
|
+
update_params["title"] = title
|
|
648
|
+
|
|
649
|
+
content_info = unique_sdk.Content.update(
|
|
650
|
+
user_id=user_id, company_id=company_id, **update_params
|
|
651
|
+
)
|
|
652
|
+
return ContentInfo.model_validate(content_info, by_alias=True, by_name=True)
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def delete_content(
|
|
656
|
+
user_id: str,
|
|
657
|
+
company_id: str,
|
|
658
|
+
*,
|
|
659
|
+
content_id: str | None = None,
|
|
660
|
+
file_path: str | None = None,
|
|
661
|
+
) -> DeleteContentResponse:
|
|
662
|
+
if content_id:
|
|
663
|
+
resp = unique_sdk.Content.delete(
|
|
664
|
+
user_id=user_id, company_id=company_id, contentId=content_id
|
|
665
|
+
)
|
|
666
|
+
elif file_path:
|
|
667
|
+
resp = unique_sdk.Content.delete(
|
|
668
|
+
user_id=user_id, company_id=company_id, filePath=file_path
|
|
669
|
+
)
|
|
670
|
+
else:
|
|
671
|
+
raise ValueError("content_id or file_path must be provided")
|
|
672
|
+
|
|
673
|
+
return DeleteContentResponse.model_validate(resp, by_alias=True, by_name=True)
|
|
674
|
+
|
|
675
|
+
|
|
676
|
+
async def delete_content_async(
|
|
677
|
+
user_id: str,
|
|
678
|
+
company_id: str,
|
|
679
|
+
*,
|
|
680
|
+
content_id: str | None = None,
|
|
681
|
+
file_path: str | None = None,
|
|
682
|
+
) -> DeleteContentResponse:
|
|
683
|
+
if content_id:
|
|
684
|
+
resp = await unique_sdk.Content.delete_async(
|
|
685
|
+
user_id=user_id, company_id=company_id, contentId=content_id
|
|
686
|
+
)
|
|
687
|
+
elif file_path:
|
|
688
|
+
resp = await unique_sdk.Content.delete_async(
|
|
689
|
+
user_id=user_id, company_id=company_id, filePath=file_path
|
|
690
|
+
)
|
|
691
|
+
else:
|
|
692
|
+
raise ValueError("content_id or file_path must be provided")
|
|
693
|
+
|
|
694
|
+
return DeleteContentResponse.model_validate(resp, by_alias=True, by_name=True)
|
|
@@ -2,6 +2,7 @@ from datetime import datetime
|
|
|
2
2
|
from enum import StrEnum
|
|
3
3
|
from typing import Any, Optional
|
|
4
4
|
|
|
5
|
+
import unique_sdk
|
|
5
6
|
from humps import camelize
|
|
6
7
|
from pydantic import BaseModel, ConfigDict, Field
|
|
7
8
|
|
|
@@ -114,6 +115,22 @@ class ContentReference(BaseModel):
|
|
|
114
115
|
description="List of indices in the ChatMessage original_content this reference refers to. This is usually the id in the functionCallResponse. List type due to implementation in node-chat",
|
|
115
116
|
)
|
|
116
117
|
|
|
118
|
+
@classmethod
|
|
119
|
+
def from_sdk_reference(
|
|
120
|
+
cls, reference: unique_sdk.Message.Reference | unique_sdk.Space.Reference
|
|
121
|
+
) -> "ContentReference":
|
|
122
|
+
kwargs = {
|
|
123
|
+
"name": reference["name"],
|
|
124
|
+
"url": reference["url"],
|
|
125
|
+
"sequence_number": reference["sequenceNumber"],
|
|
126
|
+
"source": reference["source"],
|
|
127
|
+
"source_id": reference["sourceId"],
|
|
128
|
+
}
|
|
129
|
+
if "originalIndex" in reference:
|
|
130
|
+
kwargs["original_index"] = reference["originalIndex"]
|
|
131
|
+
|
|
132
|
+
return cls.model_validate(kwargs)
|
|
133
|
+
|
|
117
134
|
|
|
118
135
|
class ContentSearchType(StrEnum):
|
|
119
136
|
COMBINED = "COMBINED"
|
|
@@ -149,3 +166,45 @@ class ContentRerankerConfig(BaseModel):
|
|
|
149
166
|
model_config = model_config
|
|
150
167
|
deployment_name: str = Field(serialization_alias="deploymentName")
|
|
151
168
|
options: dict | None = None
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class ContentInfo(BaseModel):
|
|
172
|
+
model_config = model_config
|
|
173
|
+
id: str
|
|
174
|
+
object: str
|
|
175
|
+
key: str
|
|
176
|
+
url: str | None = None
|
|
177
|
+
title: str | None = None
|
|
178
|
+
metadata: dict[str, Any] | None = None
|
|
179
|
+
byte_size: int
|
|
180
|
+
mime_type: str
|
|
181
|
+
owner_id: str
|
|
182
|
+
created_at: datetime
|
|
183
|
+
updated_at: datetime
|
|
184
|
+
expires_at: datetime | None = None
|
|
185
|
+
deleted_at: datetime | None = None
|
|
186
|
+
expired_at: datetime | None = None
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class PaginatedContentInfos(BaseModel):
|
|
190
|
+
model_config = model_config
|
|
191
|
+
object: str
|
|
192
|
+
content_infos: list[ContentInfo]
|
|
193
|
+
total_count: int
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class FolderInfo(BaseModel):
|
|
197
|
+
model_config = model_config
|
|
198
|
+
id: str
|
|
199
|
+
name: str
|
|
200
|
+
ingestion_config: dict[str, Any]
|
|
201
|
+
createdAt: str | None
|
|
202
|
+
updatedAt: str | None
|
|
203
|
+
parentId: str | None
|
|
204
|
+
externalId: str | None
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class DeleteContentResponse(BaseModel):
|
|
208
|
+
model_config = model_config
|
|
209
|
+
content_id: str
|
|
210
|
+
object: str
|
|
@@ -1,6 +1,4 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
import mimetypes
|
|
3
|
-
from enum import StrEnum
|
|
4
2
|
from pathlib import Path
|
|
5
3
|
from typing import Any, overload
|
|
6
4
|
|
|
@@ -8,6 +6,7 @@ import unique_sdk
|
|
|
8
6
|
from requests import Response
|
|
9
7
|
from typing_extensions import deprecated
|
|
10
8
|
|
|
9
|
+
from unique_toolkit._common.utils.files import is_file_content, is_image_content
|
|
11
10
|
from unique_toolkit._common.validate_required_values import validate_required_values
|
|
12
11
|
from unique_toolkit.app.schemas import BaseEvent, ChatEvent, Event
|
|
13
12
|
from unique_toolkit.app.unique_settings import UniqueSettings
|
|
@@ -35,29 +34,7 @@ from unique_toolkit.content.schemas import (
|
|
|
35
34
|
logger = logging.getLogger(f"toolkit.{DOMAIN_NAME}.{__name__}")
|
|
36
35
|
|
|
37
36
|
|
|
38
|
-
|
|
39
|
-
PDF = "application/pdf"
|
|
40
|
-
DOCX = "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
41
|
-
DOC = "application/msword"
|
|
42
|
-
XLSX = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
|
43
|
-
XLS = "application/vnd.ms-excel"
|
|
44
|
-
PPTX = "application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
|
45
|
-
CSV = "text/csv"
|
|
46
|
-
HTML = "text/html"
|
|
47
|
-
MD = "text/markdown"
|
|
48
|
-
TXT = "text/plain"
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
class ImageMimeType(StrEnum):
|
|
52
|
-
JPEG = "image/jpeg"
|
|
53
|
-
PNG = "image/png"
|
|
54
|
-
GIF = "image/gif"
|
|
55
|
-
BMP = "image/bmp"
|
|
56
|
-
WEBP = "image/webp"
|
|
57
|
-
TIFF = "image/tiff"
|
|
58
|
-
SVG = "image/svg+xml"
|
|
59
|
-
|
|
60
|
-
|
|
37
|
+
@deprecated("Use KnowledgeBaseService instead")
|
|
61
38
|
class ContentService:
|
|
62
39
|
"""
|
|
63
40
|
Provides methods for searching, downloading and uploading content in the knowledge base.
|
|
@@ -337,6 +314,7 @@ class ContentService:
|
|
|
337
314
|
logger.error(f"Error while searching content chunks: {e}")
|
|
338
315
|
raise e
|
|
339
316
|
|
|
317
|
+
@deprecated("Use search_chunks_async instead")
|
|
340
318
|
async def search_content_chunks_async(
|
|
341
319
|
self,
|
|
342
320
|
search_string: str,
|
|
@@ -694,17 +672,7 @@ class ContentService:
|
|
|
694
672
|
return content
|
|
695
673
|
|
|
696
674
|
def is_file_content(self, filename: str) -> bool:
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
if not mimetype:
|
|
700
|
-
return False
|
|
701
|
-
|
|
702
|
-
return mimetype in FileMimeType.__members__.values()
|
|
675
|
+
return is_file_content(filename=filename)
|
|
703
676
|
|
|
704
677
|
def is_image_content(self, filename: str) -> bool:
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
if not mimetype:
|
|
708
|
-
return False
|
|
709
|
-
|
|
710
|
-
return mimetype in ImageMimeType.__members__.values()
|
|
678
|
+
return is_image_content(filename=filename)
|
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from datetime import datetime, timedelta, timezone
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Any, Dict, List, Mapping, Self, Union
|
|
5
|
+
|
|
6
|
+
from pydantic import AliasChoices, BaseModel, Field
|
|
7
|
+
from pydantic.config import ConfigDict
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Operator(str, Enum):
|
|
11
|
+
EQUALS = "equals"
|
|
12
|
+
NOT_EQUALS = "notEquals"
|
|
13
|
+
GREATER_THAN = "greaterThan"
|
|
14
|
+
GREATER_THAN_OR_EQUAL = "greaterThanOrEqual"
|
|
15
|
+
LESS_THAN = "lessThan"
|
|
16
|
+
LESS_THAN_OR_EQUAL = "lessThanOrEqual"
|
|
17
|
+
IN = "in"
|
|
18
|
+
NOT_IN = "notIn"
|
|
19
|
+
CONTAINS = "contains"
|
|
20
|
+
NOT_CONTAINS = "notContains"
|
|
21
|
+
IS_NULL = "isNull"
|
|
22
|
+
IS_NOT_NULL = "isNotNull"
|
|
23
|
+
IS_EMPTY = "isEmpty"
|
|
24
|
+
IS_NOT_EMPTY = "isNotEmpty"
|
|
25
|
+
NESTED = "nested"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class BaseStatement(BaseModel):
|
|
29
|
+
model_config = ConfigDict(serialize_by_alias=True)
|
|
30
|
+
|
|
31
|
+
def with_variables(
|
|
32
|
+
self,
|
|
33
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
34
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
35
|
+
) -> Self:
|
|
36
|
+
return self._fill_in_variables(user_metadata, tool_parameters)
|
|
37
|
+
|
|
38
|
+
def is_compiled(self) -> bool:
|
|
39
|
+
# Serialize the object to json string
|
|
40
|
+
json_str = self.model_dump_json()
|
|
41
|
+
# Check if the json string has <T> or <T+> or <T-> or <toolParameters or <userMetadata
|
|
42
|
+
return (
|
|
43
|
+
"<T>" in json_str
|
|
44
|
+
or "<T+" in json_str
|
|
45
|
+
or "<T-" in json_str
|
|
46
|
+
or "<toolParameters" in json_str
|
|
47
|
+
or "<userMetadata" in json_str
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def _fill_in_variables(
|
|
51
|
+
self,
|
|
52
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
53
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
54
|
+
) -> Self:
|
|
55
|
+
return self.model_copy()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class Statement(BaseStatement):
|
|
59
|
+
operator: Operator
|
|
60
|
+
value: Union[str, int, bool, list[str], "AndStatement", "OrStatement"]
|
|
61
|
+
path: List[str] = Field(default_factory=list)
|
|
62
|
+
|
|
63
|
+
def _fill_in_variables(
|
|
64
|
+
self,
|
|
65
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
66
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
67
|
+
) -> Self:
|
|
68
|
+
new_stmt = self.model_copy()
|
|
69
|
+
new_stmt.value = eval_operator(self, user_metadata, tool_parameters)
|
|
70
|
+
return new_stmt
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class AndStatement(BaseStatement):
|
|
74
|
+
and_list: List[Union["Statement", "AndStatement", "OrStatement"]] = Field(
|
|
75
|
+
validation_alias=AliasChoices("and", "and_list"), serialization_alias="and"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
def _fill_in_variables(
|
|
79
|
+
self,
|
|
80
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
81
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
82
|
+
) -> Self:
|
|
83
|
+
new_stmt = self.model_copy()
|
|
84
|
+
new_stmt.and_list = [
|
|
85
|
+
sub_query._fill_in_variables(user_metadata, tool_parameters)
|
|
86
|
+
for sub_query in self.and_list
|
|
87
|
+
]
|
|
88
|
+
return new_stmt
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class OrStatement(BaseStatement):
|
|
92
|
+
or_list: List[Union["Statement", "AndStatement", "OrStatement"]] = Field(
|
|
93
|
+
validation_alias=AliasChoices("or", "or_list"), serialization_alias="or"
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
def _fill_in_variables(
|
|
97
|
+
self,
|
|
98
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
99
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
100
|
+
) -> Self:
|
|
101
|
+
new_stmt = self.model_copy()
|
|
102
|
+
new_stmt.or_list = [
|
|
103
|
+
sub_query._fill_in_variables(user_metadata, tool_parameters)
|
|
104
|
+
for sub_query in self.or_list
|
|
105
|
+
]
|
|
106
|
+
return new_stmt
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# Update the forward references
|
|
110
|
+
Statement.model_rebuild()
|
|
111
|
+
AndStatement.model_rebuild()
|
|
112
|
+
OrStatement.model_rebuild()
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
UniqueQL = Union[Statement, AndStatement, OrStatement]
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def is_array_of_strings(value: Any) -> bool:
|
|
119
|
+
return isinstance(value, list) and all(isinstance(item, str) for item in value)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def eval_operator(
|
|
123
|
+
query: Statement,
|
|
124
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
125
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
126
|
+
) -> Any:
|
|
127
|
+
if query.operator in [
|
|
128
|
+
Operator.EQUALS,
|
|
129
|
+
Operator.NOT_EQUALS,
|
|
130
|
+
Operator.GREATER_THAN,
|
|
131
|
+
Operator.GREATER_THAN_OR_EQUAL,
|
|
132
|
+
Operator.LESS_THAN,
|
|
133
|
+
Operator.LESS_THAN_OR_EQUAL,
|
|
134
|
+
Operator.CONTAINS,
|
|
135
|
+
Operator.NOT_CONTAINS,
|
|
136
|
+
]:
|
|
137
|
+
return binary_operator(query.value, user_metadata, tool_parameters)
|
|
138
|
+
elif query.operator in [Operator.IS_NULL, Operator.IS_NOT_NULL]:
|
|
139
|
+
return null_operator(query.value, user_metadata, tool_parameters)
|
|
140
|
+
elif query.operator in [Operator.IS_EMPTY, Operator.IS_NOT_EMPTY]:
|
|
141
|
+
return empty_operator(query.operator, user_metadata, tool_parameters)
|
|
142
|
+
elif query.operator == Operator.NESTED:
|
|
143
|
+
return eval_nested_operator(query.value, user_metadata, tool_parameters)
|
|
144
|
+
elif query.operator in [Operator.IN, Operator.NOT_IN]:
|
|
145
|
+
return array_operator(query.value, user_metadata, tool_parameters)
|
|
146
|
+
else:
|
|
147
|
+
raise ValueError(f"Operator {query.operator} not supported")
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def eval_nested_operator(
|
|
151
|
+
value: Any,
|
|
152
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
153
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
154
|
+
) -> Union[AndStatement, OrStatement]:
|
|
155
|
+
if not isinstance(value, (AndStatement, OrStatement)):
|
|
156
|
+
raise ValueError("Nested operator must be an AndStatement or OrStatement")
|
|
157
|
+
return value._fill_in_variables(user_metadata, tool_parameters)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def binary_operator(
|
|
161
|
+
value: Any,
|
|
162
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
163
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
164
|
+
) -> Any:
|
|
165
|
+
return replace_variables(value, user_metadata, tool_parameters)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def array_operator(
|
|
169
|
+
value: Any,
|
|
170
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
171
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
172
|
+
) -> Any:
|
|
173
|
+
if is_array_of_strings(value):
|
|
174
|
+
return [
|
|
175
|
+
replace_variables(item, user_metadata, tool_parameters) for item in value
|
|
176
|
+
]
|
|
177
|
+
return value
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def null_operator(
|
|
181
|
+
value: Any,
|
|
182
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
183
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
184
|
+
) -> Any:
|
|
185
|
+
return value # do nothing for now. No variables to replace
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def empty_operator(
|
|
189
|
+
operator: Operator,
|
|
190
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
191
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
192
|
+
) -> Any:
|
|
193
|
+
"""Handle IS_EMPTY and IS_NOT_EMPTY operators."""
|
|
194
|
+
if operator == Operator.IS_EMPTY:
|
|
195
|
+
return ""
|
|
196
|
+
elif operator == Operator.IS_NOT_EMPTY:
|
|
197
|
+
return "not_empty"
|
|
198
|
+
return None
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def calculate_current_date() -> str:
|
|
202
|
+
"""Calculate current date in UTC with seconds precision."""
|
|
203
|
+
return datetime.now(timezone.utc).isoformat(timespec="seconds")
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def calculate_earlier_date(input_str: str) -> str:
|
|
207
|
+
match = re.search(r"<T-(\d+)>", input_str)
|
|
208
|
+
if not match:
|
|
209
|
+
return calculate_current_date() # Return current date if no match
|
|
210
|
+
days = int(match.group(1))
|
|
211
|
+
return (datetime.now(timezone.utc) - timedelta(days=days)).isoformat(
|
|
212
|
+
timespec="seconds"
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def calculate_later_date(input_str: str) -> str:
|
|
217
|
+
match = re.search(r"<T\+(\d+)>", input_str) # Note: escaped + in regex
|
|
218
|
+
if not match:
|
|
219
|
+
return calculate_current_date() # Return current date if no match
|
|
220
|
+
days = int(match.group(1))
|
|
221
|
+
return (datetime.now(timezone.utc) + timedelta(days=days)).isoformat(
|
|
222
|
+
timespec="seconds"
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def replace_variables(
|
|
227
|
+
value: Any,
|
|
228
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
229
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
230
|
+
) -> Any:
|
|
231
|
+
if isinstance(value, str):
|
|
232
|
+
if "||" in value:
|
|
233
|
+
return get_fallback_values(value, user_metadata, tool_parameters)
|
|
234
|
+
elif value == "<T>":
|
|
235
|
+
return calculate_current_date()
|
|
236
|
+
elif "<T-" in value:
|
|
237
|
+
return calculate_earlier_date(value)
|
|
238
|
+
elif "<T+" in value:
|
|
239
|
+
return calculate_later_date(value)
|
|
240
|
+
|
|
241
|
+
value = replace_tool_parameters_patterns(value, tool_parameters)
|
|
242
|
+
value = replace_user_metadata_patterns(value, user_metadata)
|
|
243
|
+
|
|
244
|
+
if value == "":
|
|
245
|
+
return value
|
|
246
|
+
try:
|
|
247
|
+
return int(value)
|
|
248
|
+
except ValueError:
|
|
249
|
+
if value.lower() in ["true", "false"]:
|
|
250
|
+
return value.lower() == "true"
|
|
251
|
+
return value
|
|
252
|
+
return value
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def replace_tool_parameters_patterns(
|
|
256
|
+
value: str, tool_parameters: Dict[str, Union[str, int, bool]]
|
|
257
|
+
) -> str:
|
|
258
|
+
def replace_match(match):
|
|
259
|
+
param_name = match.group(1)
|
|
260
|
+
return str(tool_parameters.get(param_name, ""))
|
|
261
|
+
|
|
262
|
+
return re.sub(r"<toolParameters\.(\w+)>", replace_match, value)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def replace_user_metadata_patterns(
|
|
266
|
+
value: str, user_metadata: Dict[str, Union[str, int, bool]]
|
|
267
|
+
) -> str:
|
|
268
|
+
def replace_match(match):
|
|
269
|
+
param_name = match.group(1)
|
|
270
|
+
return str(user_metadata.get(param_name, ""))
|
|
271
|
+
|
|
272
|
+
return re.sub(r"<userMetadata\.(\w+)>", replace_match, value)
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def get_fallback_values(
|
|
276
|
+
value: str,
|
|
277
|
+
user_metadata: Mapping[str, Union[str, int, bool]],
|
|
278
|
+
tool_parameters: Mapping[str, Union[str, int, bool]],
|
|
279
|
+
) -> Any:
|
|
280
|
+
values = value.split("||")
|
|
281
|
+
for val in values:
|
|
282
|
+
data = replace_variables(val, user_metadata, tool_parameters)
|
|
283
|
+
if data != "":
|
|
284
|
+
return data
|
|
285
|
+
return values
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
# Example usage:
|
|
289
|
+
def parse_uniqueql(json_data: Dict[str, Any]) -> UniqueQL:
|
|
290
|
+
if "operator" in json_data:
|
|
291
|
+
return Statement.model_validate(json_data)
|
|
292
|
+
elif "or" in json_data:
|
|
293
|
+
return OrStatement.model_validate(
|
|
294
|
+
{"or": [parse_uniqueql(item) for item in json_data["or"]]}
|
|
295
|
+
)
|
|
296
|
+
elif "and" in json_data:
|
|
297
|
+
return AndStatement.model_validate(
|
|
298
|
+
{"and": [parse_uniqueql(item) for item in json_data["and"]]}
|
|
299
|
+
)
|
|
300
|
+
else:
|
|
301
|
+
raise ValueError("Invalid UniqueQL format")
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import importlib.util
|
|
2
2
|
import logging
|
|
3
3
|
|
|
4
|
+
from typing_extensions import deprecated
|
|
5
|
+
|
|
4
6
|
from unique_toolkit.app.unique_settings import UniqueSettings
|
|
5
7
|
from unique_toolkit.framework_utilities.utils import get_default_headers
|
|
6
8
|
|
|
@@ -22,8 +24,11 @@ else:
|
|
|
22
24
|
raise LangchainNotInstalledError()
|
|
23
25
|
|
|
24
26
|
|
|
25
|
-
def
|
|
26
|
-
|
|
27
|
+
def get_langchain_client(
|
|
28
|
+
*,
|
|
29
|
+
unique_settings: UniqueSettings | None = None,
|
|
30
|
+
model: str = "AZURE_GPT_4o_2024_0806",
|
|
31
|
+
additional_headers: dict[str, str] | None = None,
|
|
27
32
|
) -> ChatOpenAI:
|
|
28
33
|
"""Get a Langchain ChatOpenAI client instance.
|
|
29
34
|
|
|
@@ -39,9 +44,28 @@ def get_client(
|
|
|
39
44
|
if unique_settings is None:
|
|
40
45
|
unique_settings = UniqueSettings.from_env_auto()
|
|
41
46
|
|
|
47
|
+
default_headers = get_default_headers(unique_settings.app, unique_settings.auth)
|
|
48
|
+
if additional_headers is not None:
|
|
49
|
+
default_headers.update(additional_headers)
|
|
50
|
+
|
|
42
51
|
return ChatOpenAI(
|
|
43
52
|
base_url=unique_settings.api.openai_proxy_url(),
|
|
44
|
-
default_headers=
|
|
53
|
+
default_headers=default_headers,
|
|
45
54
|
model=model,
|
|
46
55
|
api_key=unique_settings.app.key,
|
|
47
56
|
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@deprecated("Use get_langchain_client instead")
|
|
60
|
+
def get_client(
|
|
61
|
+
unique_settings: UniqueSettings | None = None, model: str = "AZURE_GPT_4o_2024_0806"
|
|
62
|
+
) -> ChatOpenAI:
|
|
63
|
+
"""Get a Langchain ChatOpenAI client instance.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
unique_settings: UniqueSettings instance
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
ChatOpenAI client instance
|
|
70
|
+
"""
|
|
71
|
+
return get_client(unique_settings, model)
|
|
@@ -22,7 +22,11 @@ else:
|
|
|
22
22
|
raise OpenAINotInstalledError()
|
|
23
23
|
|
|
24
24
|
|
|
25
|
-
def get_openai_client(
|
|
25
|
+
def get_openai_client(
|
|
26
|
+
*,
|
|
27
|
+
unique_settings: UniqueSettings | None = None,
|
|
28
|
+
additional_headers: dict[str, str] | None = None,
|
|
29
|
+
) -> OpenAI:
|
|
26
30
|
"""Get an OpenAI client instance.
|
|
27
31
|
|
|
28
32
|
Args:
|
|
@@ -38,6 +42,8 @@ def get_openai_client(unique_settings: UniqueSettings | None = None) -> OpenAI:
|
|
|
38
42
|
unique_settings = UniqueSettings.from_env_auto()
|
|
39
43
|
|
|
40
44
|
default_headers = get_default_headers(unique_settings.app, unique_settings.auth)
|
|
45
|
+
if additional_headers is not None:
|
|
46
|
+
default_headers.update(additional_headers)
|
|
41
47
|
|
|
42
48
|
return OpenAI(
|
|
43
49
|
api_key="dummy_key",
|
|
@@ -47,7 +53,9 @@ def get_openai_client(unique_settings: UniqueSettings | None = None) -> OpenAI:
|
|
|
47
53
|
|
|
48
54
|
|
|
49
55
|
def get_async_openai_client(
|
|
56
|
+
*,
|
|
50
57
|
unique_settings: UniqueSettings | None = None,
|
|
58
|
+
additional_headers: dict[str, str] | None = None,
|
|
51
59
|
) -> AsyncOpenAI:
|
|
52
60
|
"""Get an async OpenAI client instance.
|
|
53
61
|
|
|
@@ -65,6 +73,9 @@ def get_async_openai_client(
|
|
|
65
73
|
|
|
66
74
|
default_headers = get_default_headers(unique_settings.app, unique_settings.auth)
|
|
67
75
|
|
|
76
|
+
if additional_headers is not None:
|
|
77
|
+
default_headers.update(additional_headers)
|
|
78
|
+
|
|
68
79
|
return AsyncOpenAI(
|
|
69
80
|
api_key="dummy_key",
|
|
70
81
|
base_url=unique_settings.api.openai_proxy_url(),
|