usecortex-ai 0.3.4__tar.gz → 0.3.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {usecortex_ai-0.3.4/src/usecortex_ai.egg-info → usecortex_ai-0.3.6}/PKG-INFO +1 -1
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/pyproject.toml +1 -1
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/__init__.py +24 -2
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/search/client.py +36 -4
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/search/raw_client.py +40 -8
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/sources/client.py +97 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/sources/raw_client.py +273 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/__init__.py +24 -2
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/add_user_memory_response.py +6 -1
- usecortex_ai-0.3.6/src/usecortex_ai/types/chunk_graph_relations_response.py +33 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/entity.py +42 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/extended_context.py +0 -3
- usecortex_ai-0.3.6/src/usecortex_ai/types/graph_relations_response.py +33 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/path_triplet.py +38 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/relation_evidence.py +53 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/retrieve_mode.py +5 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/retrieve_response.py +34 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/retrieve_user_memory_response.py +6 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/scored_path_response.py +40 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/scored_triplet_response.py +43 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/search_chunk.py +11 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/triple_with_evidence.py +31 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/user_assistant_pair.py +27 -0
- usecortex_ai-0.3.6/src/usecortex_ai/types/webpage_scrape_request.py +27 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/upload/client.py +276 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/upload/raw_client.py +1179 -339
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/user_memory/client.py +77 -149
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/user_memory/raw_client.py +74 -329
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6/src/usecortex_ai.egg-info}/PKG-INFO +1 -1
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai.egg-info/SOURCES.txt +12 -1
- usecortex_ai-0.3.4/src/usecortex_ai/types/generate_user_memory_response.py +0 -32
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/LICENSE +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/README.md +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/setup.cfg +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/api_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/client_wrapper.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/datetime_utils.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/file.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/force_multipart.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/http_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/http_response.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/jsonable_encoder.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/pydantic_utilities.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/query_encoder.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/remove_none_from_dict.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/request_options.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/core/serialization.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/document/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/document/client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/document/raw_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/embeddings/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/embeddings/client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/embeddings/raw_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/environment.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/bad_request_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/forbidden_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/internal_server_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/not_found_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/service_unavailable_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/unauthorized_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/errors/unprocessable_entity_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/fetch/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/fetch/client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/fetch/raw_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/raw_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/search/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/search/types/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/search/types/alpha.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/sources/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/tenant/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/tenant/client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/tenant/raw_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/actual_error_response.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/app_sources_upload_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/attachment_model.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/batch_upload_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/bm_25_operator_type.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/body_scrape_webpage_upload_scrape_webpage_post.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/body_update_scrape_job_upload_update_webpage_patch.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/content_model.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/delete_memory_request.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/delete_sources.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/delete_sub_tenant_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/delete_user_memory_response.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/embeddings_create_collection_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/embeddings_delete_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/embeddings_get_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/embeddings_search_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/error_response.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/fetch_content_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/file_upload_result.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/http_validation_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/list_sources_response.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/list_user_memories_response.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/markdown_upload_request.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/processing_status.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/related_chunk.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/relations.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/single_upload_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/source.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/source_model.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/sub_tenant_ids_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/tenant_create_data.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/tenant_stats.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/user_memory.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/validation_error.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/types/validation_error_loc_item.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/upload/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/user/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/user/client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/user/raw_client.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai/user_memory/__init__.py +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai.egg-info/dependency_links.txt +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai.egg-info/requires.txt +0 -0
- {usecortex_ai-0.3.4 → usecortex_ai-0.3.6}/src/usecortex_ai.egg-info/top_level.txt +0 -0
|
@@ -11,6 +11,7 @@ from .types import (
|
|
|
11
11
|
Bm25OperatorType,
|
|
12
12
|
BodyScrapeWebpageUploadScrapeWebpagePost,
|
|
13
13
|
BodyUpdateScrapeJobUploadUpdateWebpagePatch,
|
|
14
|
+
ChunkGraphRelationsResponse,
|
|
14
15
|
ContentModel,
|
|
15
16
|
DeleteMemoryRequest,
|
|
16
17
|
DeleteSources,
|
|
@@ -20,19 +21,26 @@ from .types import (
|
|
|
20
21
|
EmbeddingsDeleteData,
|
|
21
22
|
EmbeddingsGetData,
|
|
22
23
|
EmbeddingsSearchData,
|
|
24
|
+
Entity,
|
|
23
25
|
ErrorResponse,
|
|
24
26
|
ExtendedContext,
|
|
25
27
|
FetchContentData,
|
|
26
28
|
FileUploadResult,
|
|
27
|
-
|
|
29
|
+
GraphRelationsResponse,
|
|
28
30
|
HttpValidationError,
|
|
29
31
|
ListSourcesResponse,
|
|
30
32
|
ListUserMemoriesResponse,
|
|
31
33
|
MarkdownUploadRequest,
|
|
34
|
+
PathTriplet,
|
|
32
35
|
ProcessingStatus,
|
|
33
36
|
RelatedChunk,
|
|
37
|
+
RelationEvidence,
|
|
34
38
|
Relations,
|
|
39
|
+
RetrieveMode,
|
|
40
|
+
RetrieveResponse,
|
|
35
41
|
RetrieveUserMemoryResponse,
|
|
42
|
+
ScoredPathResponse,
|
|
43
|
+
ScoredTripletResponse,
|
|
36
44
|
SearchChunk,
|
|
37
45
|
SingleUploadData,
|
|
38
46
|
Source,
|
|
@@ -40,9 +48,12 @@ from .types import (
|
|
|
40
48
|
SubTenantIdsData,
|
|
41
49
|
TenantCreateData,
|
|
42
50
|
TenantStats,
|
|
51
|
+
TripleWithEvidence,
|
|
52
|
+
UserAssistantPair,
|
|
43
53
|
UserMemory,
|
|
44
54
|
ValidationError,
|
|
45
55
|
ValidationErrorLocItem,
|
|
56
|
+
WebpageScrapeRequest,
|
|
46
57
|
)
|
|
47
58
|
from .errors import (
|
|
48
59
|
BadRequestError,
|
|
@@ -70,6 +81,7 @@ __all__ = [
|
|
|
70
81
|
"Bm25OperatorType",
|
|
71
82
|
"BodyScrapeWebpageUploadScrapeWebpagePost",
|
|
72
83
|
"BodyUpdateScrapeJobUploadUpdateWebpagePatch",
|
|
84
|
+
"ChunkGraphRelationsResponse",
|
|
73
85
|
"ContentModel",
|
|
74
86
|
"CortexAI",
|
|
75
87
|
"CortexAIEnvironment",
|
|
@@ -81,22 +93,29 @@ __all__ = [
|
|
|
81
93
|
"EmbeddingsDeleteData",
|
|
82
94
|
"EmbeddingsGetData",
|
|
83
95
|
"EmbeddingsSearchData",
|
|
96
|
+
"Entity",
|
|
84
97
|
"ErrorResponse",
|
|
85
98
|
"ExtendedContext",
|
|
86
99
|
"FetchContentData",
|
|
87
100
|
"FileUploadResult",
|
|
88
101
|
"ForbiddenError",
|
|
89
|
-
"
|
|
102
|
+
"GraphRelationsResponse",
|
|
90
103
|
"HttpValidationError",
|
|
91
104
|
"InternalServerError",
|
|
92
105
|
"ListSourcesResponse",
|
|
93
106
|
"ListUserMemoriesResponse",
|
|
94
107
|
"MarkdownUploadRequest",
|
|
95
108
|
"NotFoundError",
|
|
109
|
+
"PathTriplet",
|
|
96
110
|
"ProcessingStatus",
|
|
97
111
|
"RelatedChunk",
|
|
112
|
+
"RelationEvidence",
|
|
98
113
|
"Relations",
|
|
114
|
+
"RetrieveMode",
|
|
115
|
+
"RetrieveResponse",
|
|
99
116
|
"RetrieveUserMemoryResponse",
|
|
117
|
+
"ScoredPathResponse",
|
|
118
|
+
"ScoredTripletResponse",
|
|
100
119
|
"SearchChunk",
|
|
101
120
|
"ServiceUnavailableError",
|
|
102
121
|
"SingleUploadData",
|
|
@@ -105,11 +124,14 @@ __all__ = [
|
|
|
105
124
|
"SubTenantIdsData",
|
|
106
125
|
"TenantCreateData",
|
|
107
126
|
"TenantStats",
|
|
127
|
+
"TripleWithEvidence",
|
|
108
128
|
"UnauthorizedError",
|
|
109
129
|
"UnprocessableEntityError",
|
|
130
|
+
"UserAssistantPair",
|
|
110
131
|
"UserMemory",
|
|
111
132
|
"ValidationError",
|
|
112
133
|
"ValidationErrorLocItem",
|
|
134
|
+
"WebpageScrapeRequest",
|
|
113
135
|
"document",
|
|
114
136
|
"embeddings",
|
|
115
137
|
"fetch",
|
|
@@ -5,6 +5,8 @@ import typing
|
|
|
5
5
|
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
6
6
|
from ..core.request_options import RequestOptions
|
|
7
7
|
from ..types.bm_25_operator_type import Bm25OperatorType
|
|
8
|
+
from ..types.retrieve_mode import RetrieveMode
|
|
9
|
+
from ..types.retrieve_response import RetrieveResponse
|
|
8
10
|
from ..types.search_chunk import SearchChunk
|
|
9
11
|
from .raw_client import AsyncRawSearchClient, RawSearchClient
|
|
10
12
|
from .types.alpha import Alpha
|
|
@@ -150,11 +152,14 @@ class SearchClient:
|
|
|
150
152
|
tenant_id: str,
|
|
151
153
|
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
152
154
|
max_chunks: typing.Optional[int] = OMIT,
|
|
155
|
+
mode: typing.Optional[RetrieveMode] = OMIT,
|
|
153
156
|
alpha: typing.Optional[Alpha] = OMIT,
|
|
154
157
|
recency_bias: typing.Optional[float] = OMIT,
|
|
155
158
|
personalise_search: typing.Optional[bool] = OMIT,
|
|
159
|
+
graph_context: typing.Optional[bool] = OMIT,
|
|
160
|
+
extra_context: typing.Optional[str] = OMIT,
|
|
156
161
|
request_options: typing.Optional[RequestOptions] = None,
|
|
157
|
-
) ->
|
|
162
|
+
) -> RetrieveResponse:
|
|
158
163
|
"""
|
|
159
164
|
Search for relevant content within your indexed sources.
|
|
160
165
|
|
|
@@ -175,6 +180,9 @@ class SearchClient:
|
|
|
175
180
|
max_chunks : typing.Optional[int]
|
|
176
181
|
Maximum number of results to return
|
|
177
182
|
|
|
183
|
+
mode : typing.Optional[RetrieveMode]
|
|
184
|
+
Retrieval mode to use ('fast' or 'accurate')
|
|
185
|
+
|
|
178
186
|
alpha : typing.Optional[Alpha]
|
|
179
187
|
Search ranking algorithm parameter (0.0-1.0 or 'auto')
|
|
180
188
|
|
|
@@ -184,12 +192,18 @@ class SearchClient:
|
|
|
184
192
|
personalise_search : typing.Optional[bool]
|
|
185
193
|
Enable personalized search results based on user preferences
|
|
186
194
|
|
|
195
|
+
graph_context : typing.Optional[bool]
|
|
196
|
+
Enable graph context for search results
|
|
197
|
+
|
|
198
|
+
extra_context : typing.Optional[str]
|
|
199
|
+
Additional context provided by the user to guide retrieval
|
|
200
|
+
|
|
187
201
|
request_options : typing.Optional[RequestOptions]
|
|
188
202
|
Request-specific configuration.
|
|
189
203
|
|
|
190
204
|
Returns
|
|
191
205
|
-------
|
|
192
|
-
|
|
206
|
+
RetrieveResponse
|
|
193
207
|
Successful Response
|
|
194
208
|
|
|
195
209
|
Examples
|
|
@@ -204,9 +218,12 @@ class SearchClient:
|
|
|
204
218
|
tenant_id=tenant_id,
|
|
205
219
|
sub_tenant_id=sub_tenant_id,
|
|
206
220
|
max_chunks=max_chunks,
|
|
221
|
+
mode=mode,
|
|
207
222
|
alpha=alpha,
|
|
208
223
|
recency_bias=recency_bias,
|
|
209
224
|
personalise_search=personalise_search,
|
|
225
|
+
graph_context=graph_context,
|
|
226
|
+
extra_context=extra_context,
|
|
210
227
|
request_options=request_options,
|
|
211
228
|
)
|
|
212
229
|
return _response.data
|
|
@@ -411,11 +428,14 @@ class AsyncSearchClient:
|
|
|
411
428
|
tenant_id: str,
|
|
412
429
|
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
413
430
|
max_chunks: typing.Optional[int] = OMIT,
|
|
431
|
+
mode: typing.Optional[RetrieveMode] = OMIT,
|
|
414
432
|
alpha: typing.Optional[Alpha] = OMIT,
|
|
415
433
|
recency_bias: typing.Optional[float] = OMIT,
|
|
416
434
|
personalise_search: typing.Optional[bool] = OMIT,
|
|
435
|
+
graph_context: typing.Optional[bool] = OMIT,
|
|
436
|
+
extra_context: typing.Optional[str] = OMIT,
|
|
417
437
|
request_options: typing.Optional[RequestOptions] = None,
|
|
418
|
-
) ->
|
|
438
|
+
) -> RetrieveResponse:
|
|
419
439
|
"""
|
|
420
440
|
Search for relevant content within your indexed sources.
|
|
421
441
|
|
|
@@ -436,6 +456,9 @@ class AsyncSearchClient:
|
|
|
436
456
|
max_chunks : typing.Optional[int]
|
|
437
457
|
Maximum number of results to return
|
|
438
458
|
|
|
459
|
+
mode : typing.Optional[RetrieveMode]
|
|
460
|
+
Retrieval mode to use ('fast' or 'accurate')
|
|
461
|
+
|
|
439
462
|
alpha : typing.Optional[Alpha]
|
|
440
463
|
Search ranking algorithm parameter (0.0-1.0 or 'auto')
|
|
441
464
|
|
|
@@ -445,12 +468,18 @@ class AsyncSearchClient:
|
|
|
445
468
|
personalise_search : typing.Optional[bool]
|
|
446
469
|
Enable personalized search results based on user preferences
|
|
447
470
|
|
|
471
|
+
graph_context : typing.Optional[bool]
|
|
472
|
+
Enable graph context for search results
|
|
473
|
+
|
|
474
|
+
extra_context : typing.Optional[str]
|
|
475
|
+
Additional context provided by the user to guide retrieval
|
|
476
|
+
|
|
448
477
|
request_options : typing.Optional[RequestOptions]
|
|
449
478
|
Request-specific configuration.
|
|
450
479
|
|
|
451
480
|
Returns
|
|
452
481
|
-------
|
|
453
|
-
|
|
482
|
+
RetrieveResponse
|
|
454
483
|
Successful Response
|
|
455
484
|
|
|
456
485
|
Examples
|
|
@@ -469,9 +498,12 @@ class AsyncSearchClient:
|
|
|
469
498
|
tenant_id=tenant_id,
|
|
470
499
|
sub_tenant_id=sub_tenant_id,
|
|
471
500
|
max_chunks=max_chunks,
|
|
501
|
+
mode=mode,
|
|
472
502
|
alpha=alpha,
|
|
473
503
|
recency_bias=recency_bias,
|
|
474
504
|
personalise_search=personalise_search,
|
|
505
|
+
graph_context=graph_context,
|
|
506
|
+
extra_context=extra_context,
|
|
475
507
|
request_options=request_options,
|
|
476
508
|
)
|
|
477
509
|
return _response.data
|
|
@@ -18,6 +18,8 @@ from ..errors.unauthorized_error import UnauthorizedError
|
|
|
18
18
|
from ..errors.unprocessable_entity_error import UnprocessableEntityError
|
|
19
19
|
from ..types.actual_error_response import ActualErrorResponse
|
|
20
20
|
from ..types.bm_25_operator_type import Bm25OperatorType
|
|
21
|
+
from ..types.retrieve_mode import RetrieveMode
|
|
22
|
+
from ..types.retrieve_response import RetrieveResponse
|
|
21
23
|
from ..types.search_chunk import SearchChunk
|
|
22
24
|
from .types.alpha import Alpha
|
|
23
25
|
|
|
@@ -244,11 +246,14 @@ class RawSearchClient:
|
|
|
244
246
|
tenant_id: str,
|
|
245
247
|
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
246
248
|
max_chunks: typing.Optional[int] = OMIT,
|
|
249
|
+
mode: typing.Optional[RetrieveMode] = OMIT,
|
|
247
250
|
alpha: typing.Optional[Alpha] = OMIT,
|
|
248
251
|
recency_bias: typing.Optional[float] = OMIT,
|
|
249
252
|
personalise_search: typing.Optional[bool] = OMIT,
|
|
253
|
+
graph_context: typing.Optional[bool] = OMIT,
|
|
254
|
+
extra_context: typing.Optional[str] = OMIT,
|
|
250
255
|
request_options: typing.Optional[RequestOptions] = None,
|
|
251
|
-
) -> HttpResponse[
|
|
256
|
+
) -> HttpResponse[RetrieveResponse]:
|
|
252
257
|
"""
|
|
253
258
|
Search for relevant content within your indexed sources.
|
|
254
259
|
|
|
@@ -269,6 +274,9 @@ class RawSearchClient:
|
|
|
269
274
|
max_chunks : typing.Optional[int]
|
|
270
275
|
Maximum number of results to return
|
|
271
276
|
|
|
277
|
+
mode : typing.Optional[RetrieveMode]
|
|
278
|
+
Retrieval mode to use ('fast' or 'accurate')
|
|
279
|
+
|
|
272
280
|
alpha : typing.Optional[Alpha]
|
|
273
281
|
Search ranking algorithm parameter (0.0-1.0 or 'auto')
|
|
274
282
|
|
|
@@ -278,12 +286,18 @@ class RawSearchClient:
|
|
|
278
286
|
personalise_search : typing.Optional[bool]
|
|
279
287
|
Enable personalized search results based on user preferences
|
|
280
288
|
|
|
289
|
+
graph_context : typing.Optional[bool]
|
|
290
|
+
Enable graph context for search results
|
|
291
|
+
|
|
292
|
+
extra_context : typing.Optional[str]
|
|
293
|
+
Additional context provided by the user to guide retrieval
|
|
294
|
+
|
|
281
295
|
request_options : typing.Optional[RequestOptions]
|
|
282
296
|
Request-specific configuration.
|
|
283
297
|
|
|
284
298
|
Returns
|
|
285
299
|
-------
|
|
286
|
-
HttpResponse[
|
|
300
|
+
HttpResponse[RetrieveResponse]
|
|
287
301
|
Successful Response
|
|
288
302
|
"""
|
|
289
303
|
_response = self._client_wrapper.httpx_client.request(
|
|
@@ -294,9 +308,12 @@ class RawSearchClient:
|
|
|
294
308
|
"tenant_id": tenant_id,
|
|
295
309
|
"sub_tenant_id": sub_tenant_id,
|
|
296
310
|
"max_chunks": max_chunks,
|
|
311
|
+
"mode": mode,
|
|
297
312
|
"alpha": convert_and_respect_annotation_metadata(object_=alpha, annotation=Alpha, direction="write"),
|
|
298
313
|
"recency_bias": recency_bias,
|
|
299
314
|
"personalise_search": personalise_search,
|
|
315
|
+
"graph_context": graph_context,
|
|
316
|
+
"extra_context": extra_context,
|
|
300
317
|
},
|
|
301
318
|
headers={
|
|
302
319
|
"content-type": "application/json",
|
|
@@ -307,9 +324,9 @@ class RawSearchClient:
|
|
|
307
324
|
try:
|
|
308
325
|
if 200 <= _response.status_code < 300:
|
|
309
326
|
_data = typing.cast(
|
|
310
|
-
|
|
327
|
+
RetrieveResponse,
|
|
311
328
|
parse_obj_as(
|
|
312
|
-
type_=
|
|
329
|
+
type_=RetrieveResponse, # type: ignore
|
|
313
330
|
object_=_response.json(),
|
|
314
331
|
),
|
|
315
332
|
)
|
|
@@ -765,11 +782,14 @@ class AsyncRawSearchClient:
|
|
|
765
782
|
tenant_id: str,
|
|
766
783
|
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
767
784
|
max_chunks: typing.Optional[int] = OMIT,
|
|
785
|
+
mode: typing.Optional[RetrieveMode] = OMIT,
|
|
768
786
|
alpha: typing.Optional[Alpha] = OMIT,
|
|
769
787
|
recency_bias: typing.Optional[float] = OMIT,
|
|
770
788
|
personalise_search: typing.Optional[bool] = OMIT,
|
|
789
|
+
graph_context: typing.Optional[bool] = OMIT,
|
|
790
|
+
extra_context: typing.Optional[str] = OMIT,
|
|
771
791
|
request_options: typing.Optional[RequestOptions] = None,
|
|
772
|
-
) -> AsyncHttpResponse[
|
|
792
|
+
) -> AsyncHttpResponse[RetrieveResponse]:
|
|
773
793
|
"""
|
|
774
794
|
Search for relevant content within your indexed sources.
|
|
775
795
|
|
|
@@ -790,6 +810,9 @@ class AsyncRawSearchClient:
|
|
|
790
810
|
max_chunks : typing.Optional[int]
|
|
791
811
|
Maximum number of results to return
|
|
792
812
|
|
|
813
|
+
mode : typing.Optional[RetrieveMode]
|
|
814
|
+
Retrieval mode to use ('fast' or 'accurate')
|
|
815
|
+
|
|
793
816
|
alpha : typing.Optional[Alpha]
|
|
794
817
|
Search ranking algorithm parameter (0.0-1.0 or 'auto')
|
|
795
818
|
|
|
@@ -799,12 +822,18 @@ class AsyncRawSearchClient:
|
|
|
799
822
|
personalise_search : typing.Optional[bool]
|
|
800
823
|
Enable personalized search results based on user preferences
|
|
801
824
|
|
|
825
|
+
graph_context : typing.Optional[bool]
|
|
826
|
+
Enable graph context for search results
|
|
827
|
+
|
|
828
|
+
extra_context : typing.Optional[str]
|
|
829
|
+
Additional context provided by the user to guide retrieval
|
|
830
|
+
|
|
802
831
|
request_options : typing.Optional[RequestOptions]
|
|
803
832
|
Request-specific configuration.
|
|
804
833
|
|
|
805
834
|
Returns
|
|
806
835
|
-------
|
|
807
|
-
AsyncHttpResponse[
|
|
836
|
+
AsyncHttpResponse[RetrieveResponse]
|
|
808
837
|
Successful Response
|
|
809
838
|
"""
|
|
810
839
|
_response = await self._client_wrapper.httpx_client.request(
|
|
@@ -815,9 +844,12 @@ class AsyncRawSearchClient:
|
|
|
815
844
|
"tenant_id": tenant_id,
|
|
816
845
|
"sub_tenant_id": sub_tenant_id,
|
|
817
846
|
"max_chunks": max_chunks,
|
|
847
|
+
"mode": mode,
|
|
818
848
|
"alpha": convert_and_respect_annotation_metadata(object_=alpha, annotation=Alpha, direction="write"),
|
|
819
849
|
"recency_bias": recency_bias,
|
|
820
850
|
"personalise_search": personalise_search,
|
|
851
|
+
"graph_context": graph_context,
|
|
852
|
+
"extra_context": extra_context,
|
|
821
853
|
},
|
|
822
854
|
headers={
|
|
823
855
|
"content-type": "application/json",
|
|
@@ -828,9 +860,9 @@ class AsyncRawSearchClient:
|
|
|
828
860
|
try:
|
|
829
861
|
if 200 <= _response.status_code < 300:
|
|
830
862
|
_data = typing.cast(
|
|
831
|
-
|
|
863
|
+
RetrieveResponse,
|
|
832
864
|
parse_obj_as(
|
|
833
|
-
type_=
|
|
865
|
+
type_=RetrieveResponse, # type: ignore
|
|
834
866
|
object_=_response.json(),
|
|
835
867
|
),
|
|
836
868
|
)
|
|
@@ -4,6 +4,7 @@ import typing
|
|
|
4
4
|
|
|
5
5
|
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
6
6
|
from ..core.request_options import RequestOptions
|
|
7
|
+
from ..types.graph_relations_response import GraphRelationsResponse
|
|
7
8
|
from ..types.list_sources_response import ListSourcesResponse
|
|
8
9
|
from .raw_client import AsyncRawSourcesClient, RawSourcesClient
|
|
9
10
|
|
|
@@ -114,6 +115,52 @@ class SourcesClient:
|
|
|
114
115
|
)
|
|
115
116
|
return _response.data
|
|
116
117
|
|
|
118
|
+
def get_graph_relations_by_id(
|
|
119
|
+
self,
|
|
120
|
+
*,
|
|
121
|
+
source_id: str,
|
|
122
|
+
tenant_id: typing.Optional[str] = None,
|
|
123
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
124
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
125
|
+
) -> GraphRelationsResponse:
|
|
126
|
+
"""
|
|
127
|
+
Retrieve relations for a specific source.
|
|
128
|
+
|
|
129
|
+
Use this endpoint to fetch all relations associated with a specific source. This is useful when you need to understand the relationships between entities within a source.
|
|
130
|
+
|
|
131
|
+
Provide the source ID in the request body along with your tenant information to get the relations for that source.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
source_id : str
|
|
136
|
+
The source ID to fetch relations for
|
|
137
|
+
|
|
138
|
+
tenant_id : typing.Optional[str]
|
|
139
|
+
Unique identifier for the tenant/organization
|
|
140
|
+
|
|
141
|
+
sub_tenant_id : typing.Optional[str]
|
|
142
|
+
Optional sub-tenant identifier used to organize data within a tenant. If omitted, the default sub-tenant created during tenant setup will be used.
|
|
143
|
+
|
|
144
|
+
request_options : typing.Optional[RequestOptions]
|
|
145
|
+
Request-specific configuration.
|
|
146
|
+
|
|
147
|
+
Returns
|
|
148
|
+
-------
|
|
149
|
+
GraphRelationsResponse
|
|
150
|
+
Successful Response
|
|
151
|
+
|
|
152
|
+
Examples
|
|
153
|
+
--------
|
|
154
|
+
from usecortex-ai import CortexAI
|
|
155
|
+
|
|
156
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
157
|
+
client.sources.get_graph_relations_by_id(source_id='CortexDoc1234', tenant_id='tenant_1234', )
|
|
158
|
+
"""
|
|
159
|
+
_response = self._raw_client.get_graph_relations_by_id(
|
|
160
|
+
source_id=source_id, tenant_id=tenant_id, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
161
|
+
)
|
|
162
|
+
return _response.data
|
|
163
|
+
|
|
117
164
|
|
|
118
165
|
class AsyncSourcesClient:
|
|
119
166
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
@@ -225,3 +272,53 @@ class AsyncSourcesClient:
|
|
|
225
272
|
tenant_id=tenant_id, source_ids=source_ids, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
226
273
|
)
|
|
227
274
|
return _response.data
|
|
275
|
+
|
|
276
|
+
async def get_graph_relations_by_id(
|
|
277
|
+
self,
|
|
278
|
+
*,
|
|
279
|
+
source_id: str,
|
|
280
|
+
tenant_id: typing.Optional[str] = None,
|
|
281
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
282
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
283
|
+
) -> GraphRelationsResponse:
|
|
284
|
+
"""
|
|
285
|
+
Retrieve relations for a specific source.
|
|
286
|
+
|
|
287
|
+
Use this endpoint to fetch all relations associated with a specific source. This is useful when you need to understand the relationships between entities within a source.
|
|
288
|
+
|
|
289
|
+
Provide the source ID in the request body along with your tenant information to get the relations for that source.
|
|
290
|
+
|
|
291
|
+
Parameters
|
|
292
|
+
----------
|
|
293
|
+
source_id : str
|
|
294
|
+
The source ID to fetch relations for
|
|
295
|
+
|
|
296
|
+
tenant_id : typing.Optional[str]
|
|
297
|
+
Unique identifier for the tenant/organization
|
|
298
|
+
|
|
299
|
+
sub_tenant_id : typing.Optional[str]
|
|
300
|
+
Optional sub-tenant identifier used to organize data within a tenant. If omitted, the default sub-tenant created during tenant setup will be used.
|
|
301
|
+
|
|
302
|
+
request_options : typing.Optional[RequestOptions]
|
|
303
|
+
Request-specific configuration.
|
|
304
|
+
|
|
305
|
+
Returns
|
|
306
|
+
-------
|
|
307
|
+
GraphRelationsResponse
|
|
308
|
+
Successful Response
|
|
309
|
+
|
|
310
|
+
Examples
|
|
311
|
+
--------
|
|
312
|
+
import asyncio
|
|
313
|
+
|
|
314
|
+
from usecortex-ai import AsyncCortexAI
|
|
315
|
+
|
|
316
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
317
|
+
async def main() -> None:
|
|
318
|
+
await client.sources.get_graph_relations_by_id(source_id='CortexDoc1234', tenant_id='tenant_1234', )
|
|
319
|
+
asyncio.run(main())
|
|
320
|
+
"""
|
|
321
|
+
_response = await self._raw_client.get_graph_relations_by_id(
|
|
322
|
+
source_id=source_id, tenant_id=tenant_id, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
323
|
+
)
|
|
324
|
+
return _response.data
|