usecortex-ai 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cortex_ai/__init__.py +103 -0
- cortex_ai/client.py +244 -0
- cortex_ai/core/__init__.py +52 -0
- cortex_ai/core/api_error.py +23 -0
- cortex_ai/core/client_wrapper.py +84 -0
- cortex_ai/core/datetime_utils.py +28 -0
- cortex_ai/core/file.py +67 -0
- cortex_ai/core/force_multipart.py +18 -0
- cortex_ai/core/http_client.py +543 -0
- cortex_ai/core/http_response.py +55 -0
- cortex_ai/core/jsonable_encoder.py +100 -0
- cortex_ai/core/pydantic_utilities.py +258 -0
- cortex_ai/core/query_encoder.py +58 -0
- cortex_ai/core/remove_none_from_dict.py +11 -0
- cortex_ai/core/request_options.py +35 -0
- cortex_ai/core/serialization.py +276 -0
- cortex_ai/embeddings/__init__.py +4 -0
- cortex_ai/embeddings/client.py +442 -0
- cortex_ai/embeddings/raw_client.py +1153 -0
- cortex_ai/environment.py +7 -0
- cortex_ai/errors/__init__.py +21 -0
- cortex_ai/errors/bad_request_error.py +11 -0
- cortex_ai/errors/forbidden_error.py +11 -0
- cortex_ai/errors/internal_server_error.py +11 -0
- cortex_ai/errors/not_found_error.py +11 -0
- cortex_ai/errors/service_unavailable_error.py +11 -0
- cortex_ai/errors/unauthorized_error.py +11 -0
- cortex_ai/errors/unprocessable_entity_error.py +10 -0
- cortex_ai/fetch/__init__.py +4 -0
- cortex_ai/fetch/client.py +143 -0
- cortex_ai/fetch/raw_client.py +310 -0
- cortex_ai/raw_client.py +90 -0
- cortex_ai/search/__init__.py +7 -0
- cortex_ai/search/client.py +536 -0
- cortex_ai/search/raw_client.py +1064 -0
- cortex_ai/search/types/__init__.py +7 -0
- cortex_ai/search/types/alpha.py +5 -0
- cortex_ai/sources/__init__.py +4 -0
- cortex_ai/sources/client.py +187 -0
- cortex_ai/sources/raw_client.py +532 -0
- cortex_ai/tenant/__init__.py +4 -0
- cortex_ai/tenant/client.py +120 -0
- cortex_ai/tenant/raw_client.py +283 -0
- cortex_ai/types/__init__.py +69 -0
- cortex_ai/types/actual_error_response.py +20 -0
- cortex_ai/types/app_sources_upload_data.py +22 -0
- cortex_ai/types/attachment_model.py +26 -0
- cortex_ai/types/batch_upload_data.py +22 -0
- cortex_ai/types/bm_25_operator_type.py +5 -0
- cortex_ai/types/content_model.py +26 -0
- cortex_ai/types/delete_memory_request.py +21 -0
- cortex_ai/types/embeddings_create_collection_data.py +22 -0
- cortex_ai/types/embeddings_delete_data.py +22 -0
- cortex_ai/types/embeddings_get_data.py +22 -0
- cortex_ai/types/embeddings_search_data.py +22 -0
- cortex_ai/types/error_response.py +22 -0
- cortex_ai/types/extended_context.py +20 -0
- cortex_ai/types/fetch_content_data.py +23 -0
- cortex_ai/types/file_upload_result.py +20 -0
- cortex_ai/types/full_text_search_data.py +22 -0
- cortex_ai/types/http_validation_error.py +20 -0
- cortex_ai/types/list_sources_response.py +22 -0
- cortex_ai/types/markdown_upload_request.py +21 -0
- cortex_ai/types/processing_status.py +22 -0
- cortex_ai/types/related_chunk.py +22 -0
- cortex_ai/types/search_chunk.py +34 -0
- cortex_ai/types/search_data.py +22 -0
- cortex_ai/types/single_upload_data.py +21 -0
- cortex_ai/types/source.py +32 -0
- cortex_ai/types/source_content.py +26 -0
- cortex_ai/types/source_model.py +32 -0
- cortex_ai/types/tenant_create_data.py +22 -0
- cortex_ai/types/tenant_stats.py +23 -0
- cortex_ai/types/validation_error.py +22 -0
- cortex_ai/types/validation_error_loc_item.py +5 -0
- cortex_ai/upload/__init__.py +4 -0
- cortex_ai/upload/client.py +1572 -0
- cortex_ai/upload/raw_client.py +4202 -0
- cortex_ai/user/__init__.py +4 -0
- cortex_ai/user/client.py +125 -0
- cortex_ai/user/raw_client.py +300 -0
- cortex_ai/user_memory/__init__.py +4 -0
- cortex_ai/user_memory/client.py +443 -0
- cortex_ai/user_memory/raw_client.py +651 -0
- usecortex_ai-0.1.0.dist-info/METADATA +136 -0
- usecortex_ai-0.1.0.dist-info/RECORD +89 -0
- usecortex_ai-0.1.0.dist-info/WHEEL +5 -0
- usecortex_ai-0.1.0.dist-info/licenses/LICENSE +22 -0
- usecortex_ai-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1572 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from .. import core
|
|
6
|
+
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
7
|
+
from ..core.request_options import RequestOptions
|
|
8
|
+
from ..types.app_sources_upload_data import AppSourcesUploadData
|
|
9
|
+
from ..types.batch_upload_data import BatchUploadData
|
|
10
|
+
from ..types.processing_status import ProcessingStatus
|
|
11
|
+
from ..types.single_upload_data import SingleUploadData
|
|
12
|
+
from ..types.source_model import SourceModel
|
|
13
|
+
from .raw_client import AsyncRawUploadClient, RawUploadClient
|
|
14
|
+
|
|
15
|
+
# this is used as the default value for optional parameters
|
|
16
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class UploadClient:
|
|
20
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
21
|
+
self._raw_client = RawUploadClient(client_wrapper=client_wrapper)
|
|
22
|
+
|
|
23
|
+
@property
|
|
24
|
+
def with_raw_response(self) -> RawUploadClient:
|
|
25
|
+
"""
|
|
26
|
+
Retrieves a raw implementation of this client that returns raw responses.
|
|
27
|
+
|
|
28
|
+
Returns
|
|
29
|
+
-------
|
|
30
|
+
RawUploadClient
|
|
31
|
+
"""
|
|
32
|
+
return self._raw_client
|
|
33
|
+
|
|
34
|
+
def batch_upload(
|
|
35
|
+
self,
|
|
36
|
+
*,
|
|
37
|
+
tenant_id: str,
|
|
38
|
+
files: typing.List[core.File],
|
|
39
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
40
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
41
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
42
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
43
|
+
) -> BatchUploadData:
|
|
44
|
+
"""
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
tenant_id : str
|
|
48
|
+
|
|
49
|
+
files : typing.List[core.File]
|
|
50
|
+
See core.File for more documentation
|
|
51
|
+
|
|
52
|
+
sub_tenant_id : typing.Optional[str]
|
|
53
|
+
|
|
54
|
+
tenant_metadata : typing.Optional[str]
|
|
55
|
+
|
|
56
|
+
document_metadata : typing.Optional[str]
|
|
57
|
+
|
|
58
|
+
request_options : typing.Optional[RequestOptions]
|
|
59
|
+
Request-specific configuration.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
BatchUploadData
|
|
64
|
+
Successful Response
|
|
65
|
+
|
|
66
|
+
Examples
|
|
67
|
+
--------
|
|
68
|
+
from cortex-ai import CortexAI
|
|
69
|
+
|
|
70
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
71
|
+
client.upload.batch_upload(tenant_id='tenant_id', )
|
|
72
|
+
"""
|
|
73
|
+
_response = self._raw_client.batch_upload(
|
|
74
|
+
tenant_id=tenant_id,
|
|
75
|
+
files=files,
|
|
76
|
+
sub_tenant_id=sub_tenant_id,
|
|
77
|
+
tenant_metadata=tenant_metadata,
|
|
78
|
+
document_metadata=document_metadata,
|
|
79
|
+
request_options=request_options,
|
|
80
|
+
)
|
|
81
|
+
return _response.data
|
|
82
|
+
|
|
83
|
+
def batch_update(
|
|
84
|
+
self,
|
|
85
|
+
*,
|
|
86
|
+
tenant_id: str,
|
|
87
|
+
files: typing.List[core.File],
|
|
88
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
89
|
+
source_ids: typing.Optional[typing.List[str]] = OMIT,
|
|
90
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
91
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
92
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
93
|
+
) -> BatchUploadData:
|
|
94
|
+
"""
|
|
95
|
+
Parameters
|
|
96
|
+
----------
|
|
97
|
+
tenant_id : str
|
|
98
|
+
|
|
99
|
+
files : typing.List[core.File]
|
|
100
|
+
See core.File for more documentation
|
|
101
|
+
|
|
102
|
+
sub_tenant_id : typing.Optional[str]
|
|
103
|
+
|
|
104
|
+
source_ids : typing.Optional[typing.List[str]]
|
|
105
|
+
|
|
106
|
+
tenant_metadata : typing.Optional[str]
|
|
107
|
+
|
|
108
|
+
document_metadata : typing.Optional[str]
|
|
109
|
+
|
|
110
|
+
request_options : typing.Optional[RequestOptions]
|
|
111
|
+
Request-specific configuration.
|
|
112
|
+
|
|
113
|
+
Returns
|
|
114
|
+
-------
|
|
115
|
+
BatchUploadData
|
|
116
|
+
Successful Response
|
|
117
|
+
|
|
118
|
+
Examples
|
|
119
|
+
--------
|
|
120
|
+
from cortex-ai import CortexAI
|
|
121
|
+
|
|
122
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
123
|
+
client.upload.batch_update(tenant_id='tenant_id', )
|
|
124
|
+
"""
|
|
125
|
+
_response = self._raw_client.batch_update(
|
|
126
|
+
tenant_id=tenant_id,
|
|
127
|
+
files=files,
|
|
128
|
+
sub_tenant_id=sub_tenant_id,
|
|
129
|
+
source_ids=source_ids,
|
|
130
|
+
tenant_metadata=tenant_metadata,
|
|
131
|
+
document_metadata=document_metadata,
|
|
132
|
+
request_options=request_options,
|
|
133
|
+
)
|
|
134
|
+
return _response.data
|
|
135
|
+
|
|
136
|
+
def upload_document(
|
|
137
|
+
self,
|
|
138
|
+
*,
|
|
139
|
+
tenant_id: str,
|
|
140
|
+
file: core.File,
|
|
141
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
142
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
143
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
144
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
145
|
+
) -> SingleUploadData:
|
|
146
|
+
"""
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
tenant_id : str
|
|
150
|
+
|
|
151
|
+
file : core.File
|
|
152
|
+
See core.File for more documentation
|
|
153
|
+
|
|
154
|
+
sub_tenant_id : typing.Optional[str]
|
|
155
|
+
|
|
156
|
+
tenant_metadata : typing.Optional[str]
|
|
157
|
+
|
|
158
|
+
document_metadata : typing.Optional[str]
|
|
159
|
+
|
|
160
|
+
request_options : typing.Optional[RequestOptions]
|
|
161
|
+
Request-specific configuration.
|
|
162
|
+
|
|
163
|
+
Returns
|
|
164
|
+
-------
|
|
165
|
+
SingleUploadData
|
|
166
|
+
Successful Response
|
|
167
|
+
|
|
168
|
+
Examples
|
|
169
|
+
--------
|
|
170
|
+
from cortex-ai import CortexAI
|
|
171
|
+
|
|
172
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
173
|
+
client.upload.upload_document(tenant_id='tenant_id', )
|
|
174
|
+
"""
|
|
175
|
+
_response = self._raw_client.upload_document(
|
|
176
|
+
tenant_id=tenant_id,
|
|
177
|
+
file=file,
|
|
178
|
+
sub_tenant_id=sub_tenant_id,
|
|
179
|
+
tenant_metadata=tenant_metadata,
|
|
180
|
+
document_metadata=document_metadata,
|
|
181
|
+
request_options=request_options,
|
|
182
|
+
)
|
|
183
|
+
return _response.data
|
|
184
|
+
|
|
185
|
+
def update_document(
|
|
186
|
+
self,
|
|
187
|
+
*,
|
|
188
|
+
source_id: str,
|
|
189
|
+
tenant_id: str,
|
|
190
|
+
file: core.File,
|
|
191
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
192
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
193
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
194
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
195
|
+
) -> SingleUploadData:
|
|
196
|
+
"""
|
|
197
|
+
Parameters
|
|
198
|
+
----------
|
|
199
|
+
source_id : str
|
|
200
|
+
|
|
201
|
+
tenant_id : str
|
|
202
|
+
|
|
203
|
+
file : core.File
|
|
204
|
+
See core.File for more documentation
|
|
205
|
+
|
|
206
|
+
sub_tenant_id : typing.Optional[str]
|
|
207
|
+
|
|
208
|
+
tenant_metadata : typing.Optional[str]
|
|
209
|
+
|
|
210
|
+
document_metadata : typing.Optional[str]
|
|
211
|
+
|
|
212
|
+
request_options : typing.Optional[RequestOptions]
|
|
213
|
+
Request-specific configuration.
|
|
214
|
+
|
|
215
|
+
Returns
|
|
216
|
+
-------
|
|
217
|
+
SingleUploadData
|
|
218
|
+
Successful Response
|
|
219
|
+
|
|
220
|
+
Examples
|
|
221
|
+
--------
|
|
222
|
+
from cortex-ai import CortexAI
|
|
223
|
+
|
|
224
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
225
|
+
client.upload.update_document(source_id='source_id', tenant_id='tenant_id', )
|
|
226
|
+
"""
|
|
227
|
+
_response = self._raw_client.update_document(
|
|
228
|
+
source_id=source_id,
|
|
229
|
+
tenant_id=tenant_id,
|
|
230
|
+
file=file,
|
|
231
|
+
sub_tenant_id=sub_tenant_id,
|
|
232
|
+
tenant_metadata=tenant_metadata,
|
|
233
|
+
document_metadata=document_metadata,
|
|
234
|
+
request_options=request_options,
|
|
235
|
+
)
|
|
236
|
+
return _response.data
|
|
237
|
+
|
|
238
|
+
def upload_app_sources(
|
|
239
|
+
self,
|
|
240
|
+
*,
|
|
241
|
+
tenant_id: str,
|
|
242
|
+
request: typing.Sequence[SourceModel],
|
|
243
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
244
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
245
|
+
) -> AppSourcesUploadData:
|
|
246
|
+
"""
|
|
247
|
+
Parameters
|
|
248
|
+
----------
|
|
249
|
+
tenant_id : str
|
|
250
|
+
|
|
251
|
+
request : typing.Sequence[SourceModel]
|
|
252
|
+
|
|
253
|
+
sub_tenant_id : typing.Optional[str]
|
|
254
|
+
|
|
255
|
+
request_options : typing.Optional[RequestOptions]
|
|
256
|
+
Request-specific configuration.
|
|
257
|
+
|
|
258
|
+
Returns
|
|
259
|
+
-------
|
|
260
|
+
AppSourcesUploadData
|
|
261
|
+
Successful Response
|
|
262
|
+
|
|
263
|
+
Examples
|
|
264
|
+
--------
|
|
265
|
+
from cortex-ai import CortexAI, SourceModel
|
|
266
|
+
|
|
267
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
268
|
+
client.upload.upload_app_sources(tenant_id='tenant_id', request=[SourceModel()], )
|
|
269
|
+
"""
|
|
270
|
+
_response = self._raw_client.upload_app_sources(
|
|
271
|
+
tenant_id=tenant_id, request=request, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
272
|
+
)
|
|
273
|
+
return _response.data
|
|
274
|
+
|
|
275
|
+
def upload_markdown(
|
|
276
|
+
self,
|
|
277
|
+
*,
|
|
278
|
+
tenant_id: str,
|
|
279
|
+
content: str,
|
|
280
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
281
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
282
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
283
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
284
|
+
) -> SingleUploadData:
|
|
285
|
+
"""
|
|
286
|
+
Upload markdown content. If request.document_metadata contains a 'file_id' key (non-empty), it will be used as the file ID;
|
|
287
|
+
otherwise, a new file_id will be generated for that file. This allows file-specific IDs to be set directly in the metadata.
|
|
288
|
+
|
|
289
|
+
Parameters
|
|
290
|
+
----------
|
|
291
|
+
tenant_id : str
|
|
292
|
+
|
|
293
|
+
content : str
|
|
294
|
+
|
|
295
|
+
sub_tenant_id : typing.Optional[str]
|
|
296
|
+
|
|
297
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
298
|
+
|
|
299
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
300
|
+
|
|
301
|
+
request_options : typing.Optional[RequestOptions]
|
|
302
|
+
Request-specific configuration.
|
|
303
|
+
|
|
304
|
+
Returns
|
|
305
|
+
-------
|
|
306
|
+
SingleUploadData
|
|
307
|
+
Successful Response
|
|
308
|
+
|
|
309
|
+
Examples
|
|
310
|
+
--------
|
|
311
|
+
from cortex-ai import CortexAI
|
|
312
|
+
|
|
313
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
314
|
+
client.upload.upload_markdown(tenant_id='tenant_id', content='content', )
|
|
315
|
+
"""
|
|
316
|
+
_response = self._raw_client.upload_markdown(
|
|
317
|
+
tenant_id=tenant_id,
|
|
318
|
+
content=content,
|
|
319
|
+
sub_tenant_id=sub_tenant_id,
|
|
320
|
+
tenant_metadata=tenant_metadata,
|
|
321
|
+
document_metadata=document_metadata,
|
|
322
|
+
request_options=request_options,
|
|
323
|
+
)
|
|
324
|
+
return _response.data
|
|
325
|
+
|
|
326
|
+
def upload_text(
|
|
327
|
+
self,
|
|
328
|
+
*,
|
|
329
|
+
tenant_id: str,
|
|
330
|
+
content: str,
|
|
331
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
332
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
333
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
334
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
335
|
+
) -> SingleUploadData:
|
|
336
|
+
"""
|
|
337
|
+
Upload markdown content. If request.document_metadata contains a 'file_id' key (non-empty), it will be used as the file ID;
|
|
338
|
+
otherwise, a new file_id will be generated for that file. This allows file-specific IDs to be set directly in the metadata.
|
|
339
|
+
|
|
340
|
+
Parameters
|
|
341
|
+
----------
|
|
342
|
+
tenant_id : str
|
|
343
|
+
|
|
344
|
+
content : str
|
|
345
|
+
|
|
346
|
+
sub_tenant_id : typing.Optional[str]
|
|
347
|
+
|
|
348
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
349
|
+
|
|
350
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
351
|
+
|
|
352
|
+
request_options : typing.Optional[RequestOptions]
|
|
353
|
+
Request-specific configuration.
|
|
354
|
+
|
|
355
|
+
Returns
|
|
356
|
+
-------
|
|
357
|
+
SingleUploadData
|
|
358
|
+
Successful Response
|
|
359
|
+
|
|
360
|
+
Examples
|
|
361
|
+
--------
|
|
362
|
+
from cortex-ai import CortexAI
|
|
363
|
+
|
|
364
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
365
|
+
client.upload.upload_text(tenant_id='tenant_id', content='content', )
|
|
366
|
+
"""
|
|
367
|
+
_response = self._raw_client.upload_text(
|
|
368
|
+
tenant_id=tenant_id,
|
|
369
|
+
content=content,
|
|
370
|
+
sub_tenant_id=sub_tenant_id,
|
|
371
|
+
tenant_metadata=tenant_metadata,
|
|
372
|
+
document_metadata=document_metadata,
|
|
373
|
+
request_options=request_options,
|
|
374
|
+
)
|
|
375
|
+
return _response.data
|
|
376
|
+
|
|
377
|
+
def update_markdown(
|
|
378
|
+
self,
|
|
379
|
+
*,
|
|
380
|
+
source_id: str,
|
|
381
|
+
tenant_id: str,
|
|
382
|
+
content: str,
|
|
383
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
384
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
385
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
386
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
387
|
+
) -> SingleUploadData:
|
|
388
|
+
"""
|
|
389
|
+
Parameters
|
|
390
|
+
----------
|
|
391
|
+
source_id : str
|
|
392
|
+
|
|
393
|
+
tenant_id : str
|
|
394
|
+
|
|
395
|
+
content : str
|
|
396
|
+
|
|
397
|
+
sub_tenant_id : typing.Optional[str]
|
|
398
|
+
|
|
399
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
400
|
+
|
|
401
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
402
|
+
|
|
403
|
+
request_options : typing.Optional[RequestOptions]
|
|
404
|
+
Request-specific configuration.
|
|
405
|
+
|
|
406
|
+
Returns
|
|
407
|
+
-------
|
|
408
|
+
SingleUploadData
|
|
409
|
+
Successful Response
|
|
410
|
+
|
|
411
|
+
Examples
|
|
412
|
+
--------
|
|
413
|
+
from cortex-ai import CortexAI
|
|
414
|
+
|
|
415
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
416
|
+
client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
417
|
+
"""
|
|
418
|
+
_response = self._raw_client.update_markdown(
|
|
419
|
+
source_id=source_id,
|
|
420
|
+
tenant_id=tenant_id,
|
|
421
|
+
content=content,
|
|
422
|
+
sub_tenant_id=sub_tenant_id,
|
|
423
|
+
tenant_metadata=tenant_metadata,
|
|
424
|
+
document_metadata=document_metadata,
|
|
425
|
+
request_options=request_options,
|
|
426
|
+
)
|
|
427
|
+
return _response.data
|
|
428
|
+
|
|
429
|
+
def update_text(
|
|
430
|
+
self,
|
|
431
|
+
*,
|
|
432
|
+
source_id: str,
|
|
433
|
+
tenant_id: str,
|
|
434
|
+
content: str,
|
|
435
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
436
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
437
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
438
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
439
|
+
) -> SingleUploadData:
|
|
440
|
+
"""
|
|
441
|
+
Parameters
|
|
442
|
+
----------
|
|
443
|
+
source_id : str
|
|
444
|
+
|
|
445
|
+
tenant_id : str
|
|
446
|
+
|
|
447
|
+
content : str
|
|
448
|
+
|
|
449
|
+
sub_tenant_id : typing.Optional[str]
|
|
450
|
+
|
|
451
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
452
|
+
|
|
453
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
454
|
+
|
|
455
|
+
request_options : typing.Optional[RequestOptions]
|
|
456
|
+
Request-specific configuration.
|
|
457
|
+
|
|
458
|
+
Returns
|
|
459
|
+
-------
|
|
460
|
+
SingleUploadData
|
|
461
|
+
Successful Response
|
|
462
|
+
|
|
463
|
+
Examples
|
|
464
|
+
--------
|
|
465
|
+
from cortex-ai import CortexAI
|
|
466
|
+
|
|
467
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
468
|
+
client.upload.update_text(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
469
|
+
"""
|
|
470
|
+
_response = self._raw_client.update_text(
|
|
471
|
+
source_id=source_id,
|
|
472
|
+
tenant_id=tenant_id,
|
|
473
|
+
content=content,
|
|
474
|
+
sub_tenant_id=sub_tenant_id,
|
|
475
|
+
tenant_metadata=tenant_metadata,
|
|
476
|
+
document_metadata=document_metadata,
|
|
477
|
+
request_options=request_options,
|
|
478
|
+
)
|
|
479
|
+
return _response.data
|
|
480
|
+
|
|
481
|
+
def upload_embeddings(
|
|
482
|
+
self,
|
|
483
|
+
*,
|
|
484
|
+
tenant_id: str,
|
|
485
|
+
embeddings: typing.Sequence[typing.Sequence[float]],
|
|
486
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
487
|
+
file_id: typing.Optional[str] = OMIT,
|
|
488
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
489
|
+
) -> SingleUploadData:
|
|
490
|
+
"""
|
|
491
|
+
Parameters
|
|
492
|
+
----------
|
|
493
|
+
tenant_id : str
|
|
494
|
+
|
|
495
|
+
embeddings : typing.Sequence[typing.Sequence[float]]
|
|
496
|
+
|
|
497
|
+
sub_tenant_id : typing.Optional[str]
|
|
498
|
+
|
|
499
|
+
file_id : typing.Optional[str]
|
|
500
|
+
|
|
501
|
+
request_options : typing.Optional[RequestOptions]
|
|
502
|
+
Request-specific configuration.
|
|
503
|
+
|
|
504
|
+
Returns
|
|
505
|
+
-------
|
|
506
|
+
SingleUploadData
|
|
507
|
+
Successful Response
|
|
508
|
+
|
|
509
|
+
Examples
|
|
510
|
+
--------
|
|
511
|
+
from cortex-ai import CortexAI
|
|
512
|
+
|
|
513
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
514
|
+
client.upload.upload_embeddings(tenant_id='tenant_id', embeddings=[[1.1]], )
|
|
515
|
+
"""
|
|
516
|
+
_response = self._raw_client.upload_embeddings(
|
|
517
|
+
tenant_id=tenant_id,
|
|
518
|
+
embeddings=embeddings,
|
|
519
|
+
sub_tenant_id=sub_tenant_id,
|
|
520
|
+
file_id=file_id,
|
|
521
|
+
request_options=request_options,
|
|
522
|
+
)
|
|
523
|
+
return _response.data
|
|
524
|
+
|
|
525
|
+
def update_embeddings(
|
|
526
|
+
self,
|
|
527
|
+
*,
|
|
528
|
+
tenant_id: str,
|
|
529
|
+
embeddings: typing.Dict[str, typing.Sequence[float]],
|
|
530
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
531
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
532
|
+
) -> SingleUploadData:
|
|
533
|
+
"""
|
|
534
|
+
Parameters
|
|
535
|
+
----------
|
|
536
|
+
tenant_id : str
|
|
537
|
+
|
|
538
|
+
embeddings : typing.Dict[str, typing.Sequence[float]]
|
|
539
|
+
Dictionary with chunk_id as key and embedding array as value
|
|
540
|
+
|
|
541
|
+
sub_tenant_id : typing.Optional[str]
|
|
542
|
+
|
|
543
|
+
request_options : typing.Optional[RequestOptions]
|
|
544
|
+
Request-specific configuration.
|
|
545
|
+
|
|
546
|
+
Returns
|
|
547
|
+
-------
|
|
548
|
+
SingleUploadData
|
|
549
|
+
Successful Response
|
|
550
|
+
|
|
551
|
+
Examples
|
|
552
|
+
--------
|
|
553
|
+
from cortex-ai import CortexAI
|
|
554
|
+
|
|
555
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
556
|
+
client.upload.update_embeddings(tenant_id='tenant_id', embeddings={'key': [1.1]}, )
|
|
557
|
+
"""
|
|
558
|
+
_response = self._raw_client.update_embeddings(
|
|
559
|
+
tenant_id=tenant_id, embeddings=embeddings, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
560
|
+
)
|
|
561
|
+
return _response.data
|
|
562
|
+
|
|
563
|
+
def scrape_webpage(
|
|
564
|
+
self,
|
|
565
|
+
*,
|
|
566
|
+
web_url: str,
|
|
567
|
+
tenant_id: str,
|
|
568
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
569
|
+
file_id: typing.Optional[str] = None,
|
|
570
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
571
|
+
) -> SingleUploadData:
|
|
572
|
+
"""
|
|
573
|
+
Parameters
|
|
574
|
+
----------
|
|
575
|
+
web_url : str
|
|
576
|
+
|
|
577
|
+
tenant_id : str
|
|
578
|
+
|
|
579
|
+
sub_tenant_id : typing.Optional[str]
|
|
580
|
+
|
|
581
|
+
file_id : typing.Optional[str]
|
|
582
|
+
|
|
583
|
+
request_options : typing.Optional[RequestOptions]
|
|
584
|
+
Request-specific configuration.
|
|
585
|
+
|
|
586
|
+
Returns
|
|
587
|
+
-------
|
|
588
|
+
SingleUploadData
|
|
589
|
+
Successful Response
|
|
590
|
+
|
|
591
|
+
Examples
|
|
592
|
+
--------
|
|
593
|
+
from cortex-ai import CortexAI
|
|
594
|
+
|
|
595
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
596
|
+
client.upload.scrape_webpage(web_url='web_url', tenant_id='tenant_id', )
|
|
597
|
+
"""
|
|
598
|
+
_response = self._raw_client.scrape_webpage(
|
|
599
|
+
web_url=web_url,
|
|
600
|
+
tenant_id=tenant_id,
|
|
601
|
+
sub_tenant_id=sub_tenant_id,
|
|
602
|
+
file_id=file_id,
|
|
603
|
+
request_options=request_options,
|
|
604
|
+
)
|
|
605
|
+
return _response.data
|
|
606
|
+
|
|
607
|
+
def update_webpage(
|
|
608
|
+
self,
|
|
609
|
+
*,
|
|
610
|
+
web_url: str,
|
|
611
|
+
source_id: str,
|
|
612
|
+
tenant_id: str,
|
|
613
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
614
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
615
|
+
) -> SingleUploadData:
|
|
616
|
+
"""
|
|
617
|
+
Parameters
|
|
618
|
+
----------
|
|
619
|
+
web_url : str
|
|
620
|
+
|
|
621
|
+
source_id : str
|
|
622
|
+
|
|
623
|
+
tenant_id : str
|
|
624
|
+
|
|
625
|
+
sub_tenant_id : typing.Optional[str]
|
|
626
|
+
|
|
627
|
+
request_options : typing.Optional[RequestOptions]
|
|
628
|
+
Request-specific configuration.
|
|
629
|
+
|
|
630
|
+
Returns
|
|
631
|
+
-------
|
|
632
|
+
SingleUploadData
|
|
633
|
+
Successful Response
|
|
634
|
+
|
|
635
|
+
Examples
|
|
636
|
+
--------
|
|
637
|
+
from cortex-ai import CortexAI
|
|
638
|
+
|
|
639
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
640
|
+
client.upload.update_webpage(web_url='web_url', source_id='source_id', tenant_id='tenant_id', )
|
|
641
|
+
"""
|
|
642
|
+
_response = self._raw_client.update_webpage(
|
|
643
|
+
web_url=web_url,
|
|
644
|
+
source_id=source_id,
|
|
645
|
+
tenant_id=tenant_id,
|
|
646
|
+
sub_tenant_id=sub_tenant_id,
|
|
647
|
+
request_options=request_options,
|
|
648
|
+
)
|
|
649
|
+
return _response.data
|
|
650
|
+
|
|
651
|
+
def delete_source(
|
|
652
|
+
self,
|
|
653
|
+
*,
|
|
654
|
+
tenant_id: str,
|
|
655
|
+
source_ids: typing.Sequence[str],
|
|
656
|
+
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
657
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
658
|
+
) -> typing.Optional[typing.Any]:
|
|
659
|
+
"""
|
|
660
|
+
Parameters
|
|
661
|
+
----------
|
|
662
|
+
tenant_id : str
|
|
663
|
+
|
|
664
|
+
source_ids : typing.Sequence[str]
|
|
665
|
+
|
|
666
|
+
sub_tenant_id : typing.Optional[str]
|
|
667
|
+
|
|
668
|
+
request_options : typing.Optional[RequestOptions]
|
|
669
|
+
Request-specific configuration.
|
|
670
|
+
|
|
671
|
+
Returns
|
|
672
|
+
-------
|
|
673
|
+
typing.Optional[typing.Any]
|
|
674
|
+
Successful Response
|
|
675
|
+
|
|
676
|
+
Examples
|
|
677
|
+
--------
|
|
678
|
+
from cortex-ai import CortexAI
|
|
679
|
+
|
|
680
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
681
|
+
client.upload.delete_source(tenant_id='tenant_id', source_ids=['source_ids'], )
|
|
682
|
+
"""
|
|
683
|
+
_response = self._raw_client.delete_source(
|
|
684
|
+
tenant_id=tenant_id, source_ids=source_ids, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
685
|
+
)
|
|
686
|
+
return _response.data
|
|
687
|
+
|
|
688
|
+
def delete_memory(
|
|
689
|
+
self,
|
|
690
|
+
*,
|
|
691
|
+
tenant_id: str,
|
|
692
|
+
source_ids: typing.Sequence[str],
|
|
693
|
+
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
694
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
695
|
+
) -> typing.Optional[typing.Any]:
|
|
696
|
+
"""
|
|
697
|
+
Parameters
|
|
698
|
+
----------
|
|
699
|
+
tenant_id : str
|
|
700
|
+
|
|
701
|
+
source_ids : typing.Sequence[str]
|
|
702
|
+
|
|
703
|
+
sub_tenant_id : typing.Optional[str]
|
|
704
|
+
|
|
705
|
+
request_options : typing.Optional[RequestOptions]
|
|
706
|
+
Request-specific configuration.
|
|
707
|
+
|
|
708
|
+
Returns
|
|
709
|
+
-------
|
|
710
|
+
typing.Optional[typing.Any]
|
|
711
|
+
Successful Response
|
|
712
|
+
|
|
713
|
+
Examples
|
|
714
|
+
--------
|
|
715
|
+
from cortex-ai import CortexAI
|
|
716
|
+
|
|
717
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
718
|
+
client.upload.delete_memory(tenant_id='tenant_id', source_ids=['source_ids'], )
|
|
719
|
+
"""
|
|
720
|
+
_response = self._raw_client.delete_memory(
|
|
721
|
+
tenant_id=tenant_id, source_ids=source_ids, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
722
|
+
)
|
|
723
|
+
return _response.data
|
|
724
|
+
|
|
725
|
+
def verify_processing(
|
|
726
|
+
self,
|
|
727
|
+
*,
|
|
728
|
+
file_id: str,
|
|
729
|
+
tenant_id: typing.Optional[str] = None,
|
|
730
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
731
|
+
) -> ProcessingStatus:
|
|
732
|
+
"""
|
|
733
|
+
Verify the processing status of an uploaded file.
|
|
734
|
+
|
|
735
|
+
Returns:
|
|
736
|
+
ProcessingStatusResponse: Processing status information
|
|
737
|
+
|
|
738
|
+
Parameters
|
|
739
|
+
----------
|
|
740
|
+
file_id : str
|
|
741
|
+
|
|
742
|
+
tenant_id : typing.Optional[str]
|
|
743
|
+
|
|
744
|
+
request_options : typing.Optional[RequestOptions]
|
|
745
|
+
Request-specific configuration.
|
|
746
|
+
|
|
747
|
+
Returns
|
|
748
|
+
-------
|
|
749
|
+
ProcessingStatus
|
|
750
|
+
Successful Response
|
|
751
|
+
|
|
752
|
+
Examples
|
|
753
|
+
--------
|
|
754
|
+
from cortex-ai import CortexAI
|
|
755
|
+
|
|
756
|
+
client = CortexAI(token="YOUR_TOKEN", )
|
|
757
|
+
client.upload.verify_processing(file_id='file_id', )
|
|
758
|
+
"""
|
|
759
|
+
_response = self._raw_client.verify_processing(
|
|
760
|
+
file_id=file_id, tenant_id=tenant_id, request_options=request_options
|
|
761
|
+
)
|
|
762
|
+
return _response.data
|
|
763
|
+
|
|
764
|
+
|
|
765
|
+
class AsyncUploadClient:
|
|
766
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
767
|
+
self._raw_client = AsyncRawUploadClient(client_wrapper=client_wrapper)
|
|
768
|
+
|
|
769
|
+
@property
|
|
770
|
+
def with_raw_response(self) -> AsyncRawUploadClient:
|
|
771
|
+
"""
|
|
772
|
+
Retrieves a raw implementation of this client that returns raw responses.
|
|
773
|
+
|
|
774
|
+
Returns
|
|
775
|
+
-------
|
|
776
|
+
AsyncRawUploadClient
|
|
777
|
+
"""
|
|
778
|
+
return self._raw_client
|
|
779
|
+
|
|
780
|
+
async def batch_upload(
|
|
781
|
+
self,
|
|
782
|
+
*,
|
|
783
|
+
tenant_id: str,
|
|
784
|
+
files: typing.List[core.File],
|
|
785
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
786
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
787
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
788
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
789
|
+
) -> BatchUploadData:
|
|
790
|
+
"""
|
|
791
|
+
Parameters
|
|
792
|
+
----------
|
|
793
|
+
tenant_id : str
|
|
794
|
+
|
|
795
|
+
files : typing.List[core.File]
|
|
796
|
+
See core.File for more documentation
|
|
797
|
+
|
|
798
|
+
sub_tenant_id : typing.Optional[str]
|
|
799
|
+
|
|
800
|
+
tenant_metadata : typing.Optional[str]
|
|
801
|
+
|
|
802
|
+
document_metadata : typing.Optional[str]
|
|
803
|
+
|
|
804
|
+
request_options : typing.Optional[RequestOptions]
|
|
805
|
+
Request-specific configuration.
|
|
806
|
+
|
|
807
|
+
Returns
|
|
808
|
+
-------
|
|
809
|
+
BatchUploadData
|
|
810
|
+
Successful Response
|
|
811
|
+
|
|
812
|
+
Examples
|
|
813
|
+
--------
|
|
814
|
+
import asyncio
|
|
815
|
+
|
|
816
|
+
from cortex-ai import AsyncCortexAI
|
|
817
|
+
|
|
818
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
819
|
+
async def main() -> None:
|
|
820
|
+
await client.upload.batch_upload(tenant_id='tenant_id', )
|
|
821
|
+
asyncio.run(main())
|
|
822
|
+
"""
|
|
823
|
+
_response = await self._raw_client.batch_upload(
|
|
824
|
+
tenant_id=tenant_id,
|
|
825
|
+
files=files,
|
|
826
|
+
sub_tenant_id=sub_tenant_id,
|
|
827
|
+
tenant_metadata=tenant_metadata,
|
|
828
|
+
document_metadata=document_metadata,
|
|
829
|
+
request_options=request_options,
|
|
830
|
+
)
|
|
831
|
+
return _response.data
|
|
832
|
+
|
|
833
|
+
async def batch_update(
|
|
834
|
+
self,
|
|
835
|
+
*,
|
|
836
|
+
tenant_id: str,
|
|
837
|
+
files: typing.List[core.File],
|
|
838
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
839
|
+
source_ids: typing.Optional[typing.List[str]] = OMIT,
|
|
840
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
841
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
842
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
843
|
+
) -> BatchUploadData:
|
|
844
|
+
"""
|
|
845
|
+
Parameters
|
|
846
|
+
----------
|
|
847
|
+
tenant_id : str
|
|
848
|
+
|
|
849
|
+
files : typing.List[core.File]
|
|
850
|
+
See core.File for more documentation
|
|
851
|
+
|
|
852
|
+
sub_tenant_id : typing.Optional[str]
|
|
853
|
+
|
|
854
|
+
source_ids : typing.Optional[typing.List[str]]
|
|
855
|
+
|
|
856
|
+
tenant_metadata : typing.Optional[str]
|
|
857
|
+
|
|
858
|
+
document_metadata : typing.Optional[str]
|
|
859
|
+
|
|
860
|
+
request_options : typing.Optional[RequestOptions]
|
|
861
|
+
Request-specific configuration.
|
|
862
|
+
|
|
863
|
+
Returns
|
|
864
|
+
-------
|
|
865
|
+
BatchUploadData
|
|
866
|
+
Successful Response
|
|
867
|
+
|
|
868
|
+
Examples
|
|
869
|
+
--------
|
|
870
|
+
import asyncio
|
|
871
|
+
|
|
872
|
+
from cortex-ai import AsyncCortexAI
|
|
873
|
+
|
|
874
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
875
|
+
async def main() -> None:
|
|
876
|
+
await client.upload.batch_update(tenant_id='tenant_id', )
|
|
877
|
+
asyncio.run(main())
|
|
878
|
+
"""
|
|
879
|
+
_response = await self._raw_client.batch_update(
|
|
880
|
+
tenant_id=tenant_id,
|
|
881
|
+
files=files,
|
|
882
|
+
sub_tenant_id=sub_tenant_id,
|
|
883
|
+
source_ids=source_ids,
|
|
884
|
+
tenant_metadata=tenant_metadata,
|
|
885
|
+
document_metadata=document_metadata,
|
|
886
|
+
request_options=request_options,
|
|
887
|
+
)
|
|
888
|
+
return _response.data
|
|
889
|
+
|
|
890
|
+
async def upload_document(
|
|
891
|
+
self,
|
|
892
|
+
*,
|
|
893
|
+
tenant_id: str,
|
|
894
|
+
file: core.File,
|
|
895
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
896
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
897
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
898
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
899
|
+
) -> SingleUploadData:
|
|
900
|
+
"""
|
|
901
|
+
Parameters
|
|
902
|
+
----------
|
|
903
|
+
tenant_id : str
|
|
904
|
+
|
|
905
|
+
file : core.File
|
|
906
|
+
See core.File for more documentation
|
|
907
|
+
|
|
908
|
+
sub_tenant_id : typing.Optional[str]
|
|
909
|
+
|
|
910
|
+
tenant_metadata : typing.Optional[str]
|
|
911
|
+
|
|
912
|
+
document_metadata : typing.Optional[str]
|
|
913
|
+
|
|
914
|
+
request_options : typing.Optional[RequestOptions]
|
|
915
|
+
Request-specific configuration.
|
|
916
|
+
|
|
917
|
+
Returns
|
|
918
|
+
-------
|
|
919
|
+
SingleUploadData
|
|
920
|
+
Successful Response
|
|
921
|
+
|
|
922
|
+
Examples
|
|
923
|
+
--------
|
|
924
|
+
import asyncio
|
|
925
|
+
|
|
926
|
+
from cortex-ai import AsyncCortexAI
|
|
927
|
+
|
|
928
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
929
|
+
async def main() -> None:
|
|
930
|
+
await client.upload.upload_document(tenant_id='tenant_id', )
|
|
931
|
+
asyncio.run(main())
|
|
932
|
+
"""
|
|
933
|
+
_response = await self._raw_client.upload_document(
|
|
934
|
+
tenant_id=tenant_id,
|
|
935
|
+
file=file,
|
|
936
|
+
sub_tenant_id=sub_tenant_id,
|
|
937
|
+
tenant_metadata=tenant_metadata,
|
|
938
|
+
document_metadata=document_metadata,
|
|
939
|
+
request_options=request_options,
|
|
940
|
+
)
|
|
941
|
+
return _response.data
|
|
942
|
+
|
|
943
|
+
async def update_document(
|
|
944
|
+
self,
|
|
945
|
+
*,
|
|
946
|
+
source_id: str,
|
|
947
|
+
tenant_id: str,
|
|
948
|
+
file: core.File,
|
|
949
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
950
|
+
tenant_metadata: typing.Optional[str] = OMIT,
|
|
951
|
+
document_metadata: typing.Optional[str] = OMIT,
|
|
952
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
953
|
+
) -> SingleUploadData:
|
|
954
|
+
"""
|
|
955
|
+
Parameters
|
|
956
|
+
----------
|
|
957
|
+
source_id : str
|
|
958
|
+
|
|
959
|
+
tenant_id : str
|
|
960
|
+
|
|
961
|
+
file : core.File
|
|
962
|
+
See core.File for more documentation
|
|
963
|
+
|
|
964
|
+
sub_tenant_id : typing.Optional[str]
|
|
965
|
+
|
|
966
|
+
tenant_metadata : typing.Optional[str]
|
|
967
|
+
|
|
968
|
+
document_metadata : typing.Optional[str]
|
|
969
|
+
|
|
970
|
+
request_options : typing.Optional[RequestOptions]
|
|
971
|
+
Request-specific configuration.
|
|
972
|
+
|
|
973
|
+
Returns
|
|
974
|
+
-------
|
|
975
|
+
SingleUploadData
|
|
976
|
+
Successful Response
|
|
977
|
+
|
|
978
|
+
Examples
|
|
979
|
+
--------
|
|
980
|
+
import asyncio
|
|
981
|
+
|
|
982
|
+
from cortex-ai import AsyncCortexAI
|
|
983
|
+
|
|
984
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
985
|
+
async def main() -> None:
|
|
986
|
+
await client.upload.update_document(source_id='source_id', tenant_id='tenant_id', )
|
|
987
|
+
asyncio.run(main())
|
|
988
|
+
"""
|
|
989
|
+
_response = await self._raw_client.update_document(
|
|
990
|
+
source_id=source_id,
|
|
991
|
+
tenant_id=tenant_id,
|
|
992
|
+
file=file,
|
|
993
|
+
sub_tenant_id=sub_tenant_id,
|
|
994
|
+
tenant_metadata=tenant_metadata,
|
|
995
|
+
document_metadata=document_metadata,
|
|
996
|
+
request_options=request_options,
|
|
997
|
+
)
|
|
998
|
+
return _response.data
|
|
999
|
+
|
|
1000
|
+
async def upload_app_sources(
|
|
1001
|
+
self,
|
|
1002
|
+
*,
|
|
1003
|
+
tenant_id: str,
|
|
1004
|
+
request: typing.Sequence[SourceModel],
|
|
1005
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1006
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1007
|
+
) -> AppSourcesUploadData:
|
|
1008
|
+
"""
|
|
1009
|
+
Parameters
|
|
1010
|
+
----------
|
|
1011
|
+
tenant_id : str
|
|
1012
|
+
|
|
1013
|
+
request : typing.Sequence[SourceModel]
|
|
1014
|
+
|
|
1015
|
+
sub_tenant_id : typing.Optional[str]
|
|
1016
|
+
|
|
1017
|
+
request_options : typing.Optional[RequestOptions]
|
|
1018
|
+
Request-specific configuration.
|
|
1019
|
+
|
|
1020
|
+
Returns
|
|
1021
|
+
-------
|
|
1022
|
+
AppSourcesUploadData
|
|
1023
|
+
Successful Response
|
|
1024
|
+
|
|
1025
|
+
Examples
|
|
1026
|
+
--------
|
|
1027
|
+
import asyncio
|
|
1028
|
+
|
|
1029
|
+
from cortex-ai import AsyncCortexAI, SourceModel
|
|
1030
|
+
|
|
1031
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1032
|
+
async def main() -> None:
|
|
1033
|
+
await client.upload.upload_app_sources(tenant_id='tenant_id', request=[SourceModel()], )
|
|
1034
|
+
asyncio.run(main())
|
|
1035
|
+
"""
|
|
1036
|
+
_response = await self._raw_client.upload_app_sources(
|
|
1037
|
+
tenant_id=tenant_id, request=request, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
1038
|
+
)
|
|
1039
|
+
return _response.data
|
|
1040
|
+
|
|
1041
|
+
async def upload_markdown(
|
|
1042
|
+
self,
|
|
1043
|
+
*,
|
|
1044
|
+
tenant_id: str,
|
|
1045
|
+
content: str,
|
|
1046
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1047
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1048
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1049
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1050
|
+
) -> SingleUploadData:
|
|
1051
|
+
"""
|
|
1052
|
+
Upload markdown content. If request.document_metadata contains a 'file_id' key (non-empty), it will be used as the file ID;
|
|
1053
|
+
otherwise, a new file_id will be generated for that file. This allows file-specific IDs to be set directly in the metadata.
|
|
1054
|
+
|
|
1055
|
+
Parameters
|
|
1056
|
+
----------
|
|
1057
|
+
tenant_id : str
|
|
1058
|
+
|
|
1059
|
+
content : str
|
|
1060
|
+
|
|
1061
|
+
sub_tenant_id : typing.Optional[str]
|
|
1062
|
+
|
|
1063
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1064
|
+
|
|
1065
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1066
|
+
|
|
1067
|
+
request_options : typing.Optional[RequestOptions]
|
|
1068
|
+
Request-specific configuration.
|
|
1069
|
+
|
|
1070
|
+
Returns
|
|
1071
|
+
-------
|
|
1072
|
+
SingleUploadData
|
|
1073
|
+
Successful Response
|
|
1074
|
+
|
|
1075
|
+
Examples
|
|
1076
|
+
--------
|
|
1077
|
+
import asyncio
|
|
1078
|
+
|
|
1079
|
+
from cortex-ai import AsyncCortexAI
|
|
1080
|
+
|
|
1081
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1082
|
+
async def main() -> None:
|
|
1083
|
+
await client.upload.upload_markdown(tenant_id='tenant_id', content='content', )
|
|
1084
|
+
asyncio.run(main())
|
|
1085
|
+
"""
|
|
1086
|
+
_response = await self._raw_client.upload_markdown(
|
|
1087
|
+
tenant_id=tenant_id,
|
|
1088
|
+
content=content,
|
|
1089
|
+
sub_tenant_id=sub_tenant_id,
|
|
1090
|
+
tenant_metadata=tenant_metadata,
|
|
1091
|
+
document_metadata=document_metadata,
|
|
1092
|
+
request_options=request_options,
|
|
1093
|
+
)
|
|
1094
|
+
return _response.data
|
|
1095
|
+
|
|
1096
|
+
async def upload_text(
|
|
1097
|
+
self,
|
|
1098
|
+
*,
|
|
1099
|
+
tenant_id: str,
|
|
1100
|
+
content: str,
|
|
1101
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1102
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1103
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1104
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1105
|
+
) -> SingleUploadData:
|
|
1106
|
+
"""
|
|
1107
|
+
Upload markdown content. If request.document_metadata contains a 'file_id' key (non-empty), it will be used as the file ID;
|
|
1108
|
+
otherwise, a new file_id will be generated for that file. This allows file-specific IDs to be set directly in the metadata.
|
|
1109
|
+
|
|
1110
|
+
Parameters
|
|
1111
|
+
----------
|
|
1112
|
+
tenant_id : str
|
|
1113
|
+
|
|
1114
|
+
content : str
|
|
1115
|
+
|
|
1116
|
+
sub_tenant_id : typing.Optional[str]
|
|
1117
|
+
|
|
1118
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1119
|
+
|
|
1120
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1121
|
+
|
|
1122
|
+
request_options : typing.Optional[RequestOptions]
|
|
1123
|
+
Request-specific configuration.
|
|
1124
|
+
|
|
1125
|
+
Returns
|
|
1126
|
+
-------
|
|
1127
|
+
SingleUploadData
|
|
1128
|
+
Successful Response
|
|
1129
|
+
|
|
1130
|
+
Examples
|
|
1131
|
+
--------
|
|
1132
|
+
import asyncio
|
|
1133
|
+
|
|
1134
|
+
from cortex-ai import AsyncCortexAI
|
|
1135
|
+
|
|
1136
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1137
|
+
async def main() -> None:
|
|
1138
|
+
await client.upload.upload_text(tenant_id='tenant_id', content='content', )
|
|
1139
|
+
asyncio.run(main())
|
|
1140
|
+
"""
|
|
1141
|
+
_response = await self._raw_client.upload_text(
|
|
1142
|
+
tenant_id=tenant_id,
|
|
1143
|
+
content=content,
|
|
1144
|
+
sub_tenant_id=sub_tenant_id,
|
|
1145
|
+
tenant_metadata=tenant_metadata,
|
|
1146
|
+
document_metadata=document_metadata,
|
|
1147
|
+
request_options=request_options,
|
|
1148
|
+
)
|
|
1149
|
+
return _response.data
|
|
1150
|
+
|
|
1151
|
+
async def update_markdown(
|
|
1152
|
+
self,
|
|
1153
|
+
*,
|
|
1154
|
+
source_id: str,
|
|
1155
|
+
tenant_id: str,
|
|
1156
|
+
content: str,
|
|
1157
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1158
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1159
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1160
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1161
|
+
) -> SingleUploadData:
|
|
1162
|
+
"""
|
|
1163
|
+
Parameters
|
|
1164
|
+
----------
|
|
1165
|
+
source_id : str
|
|
1166
|
+
|
|
1167
|
+
tenant_id : str
|
|
1168
|
+
|
|
1169
|
+
content : str
|
|
1170
|
+
|
|
1171
|
+
sub_tenant_id : typing.Optional[str]
|
|
1172
|
+
|
|
1173
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1174
|
+
|
|
1175
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1176
|
+
|
|
1177
|
+
request_options : typing.Optional[RequestOptions]
|
|
1178
|
+
Request-specific configuration.
|
|
1179
|
+
|
|
1180
|
+
Returns
|
|
1181
|
+
-------
|
|
1182
|
+
SingleUploadData
|
|
1183
|
+
Successful Response
|
|
1184
|
+
|
|
1185
|
+
Examples
|
|
1186
|
+
--------
|
|
1187
|
+
import asyncio
|
|
1188
|
+
|
|
1189
|
+
from cortex-ai import AsyncCortexAI
|
|
1190
|
+
|
|
1191
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1192
|
+
async def main() -> None:
|
|
1193
|
+
await client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
1194
|
+
asyncio.run(main())
|
|
1195
|
+
"""
|
|
1196
|
+
_response = await self._raw_client.update_markdown(
|
|
1197
|
+
source_id=source_id,
|
|
1198
|
+
tenant_id=tenant_id,
|
|
1199
|
+
content=content,
|
|
1200
|
+
sub_tenant_id=sub_tenant_id,
|
|
1201
|
+
tenant_metadata=tenant_metadata,
|
|
1202
|
+
document_metadata=document_metadata,
|
|
1203
|
+
request_options=request_options,
|
|
1204
|
+
)
|
|
1205
|
+
return _response.data
|
|
1206
|
+
|
|
1207
|
+
async def update_text(
|
|
1208
|
+
self,
|
|
1209
|
+
*,
|
|
1210
|
+
source_id: str,
|
|
1211
|
+
tenant_id: str,
|
|
1212
|
+
content: str,
|
|
1213
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1214
|
+
tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1215
|
+
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1216
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1217
|
+
) -> SingleUploadData:
|
|
1218
|
+
"""
|
|
1219
|
+
Parameters
|
|
1220
|
+
----------
|
|
1221
|
+
source_id : str
|
|
1222
|
+
|
|
1223
|
+
tenant_id : str
|
|
1224
|
+
|
|
1225
|
+
content : str
|
|
1226
|
+
|
|
1227
|
+
sub_tenant_id : typing.Optional[str]
|
|
1228
|
+
|
|
1229
|
+
tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1230
|
+
|
|
1231
|
+
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1232
|
+
|
|
1233
|
+
request_options : typing.Optional[RequestOptions]
|
|
1234
|
+
Request-specific configuration.
|
|
1235
|
+
|
|
1236
|
+
Returns
|
|
1237
|
+
-------
|
|
1238
|
+
SingleUploadData
|
|
1239
|
+
Successful Response
|
|
1240
|
+
|
|
1241
|
+
Examples
|
|
1242
|
+
--------
|
|
1243
|
+
import asyncio
|
|
1244
|
+
|
|
1245
|
+
from cortex-ai import AsyncCortexAI
|
|
1246
|
+
|
|
1247
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1248
|
+
async def main() -> None:
|
|
1249
|
+
await client.upload.update_text(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
1250
|
+
asyncio.run(main())
|
|
1251
|
+
"""
|
|
1252
|
+
_response = await self._raw_client.update_text(
|
|
1253
|
+
source_id=source_id,
|
|
1254
|
+
tenant_id=tenant_id,
|
|
1255
|
+
content=content,
|
|
1256
|
+
sub_tenant_id=sub_tenant_id,
|
|
1257
|
+
tenant_metadata=tenant_metadata,
|
|
1258
|
+
document_metadata=document_metadata,
|
|
1259
|
+
request_options=request_options,
|
|
1260
|
+
)
|
|
1261
|
+
return _response.data
|
|
1262
|
+
|
|
1263
|
+
async def upload_embeddings(
|
|
1264
|
+
self,
|
|
1265
|
+
*,
|
|
1266
|
+
tenant_id: str,
|
|
1267
|
+
embeddings: typing.Sequence[typing.Sequence[float]],
|
|
1268
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1269
|
+
file_id: typing.Optional[str] = OMIT,
|
|
1270
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1271
|
+
) -> SingleUploadData:
|
|
1272
|
+
"""
|
|
1273
|
+
Parameters
|
|
1274
|
+
----------
|
|
1275
|
+
tenant_id : str
|
|
1276
|
+
|
|
1277
|
+
embeddings : typing.Sequence[typing.Sequence[float]]
|
|
1278
|
+
|
|
1279
|
+
sub_tenant_id : typing.Optional[str]
|
|
1280
|
+
|
|
1281
|
+
file_id : typing.Optional[str]
|
|
1282
|
+
|
|
1283
|
+
request_options : typing.Optional[RequestOptions]
|
|
1284
|
+
Request-specific configuration.
|
|
1285
|
+
|
|
1286
|
+
Returns
|
|
1287
|
+
-------
|
|
1288
|
+
SingleUploadData
|
|
1289
|
+
Successful Response
|
|
1290
|
+
|
|
1291
|
+
Examples
|
|
1292
|
+
--------
|
|
1293
|
+
import asyncio
|
|
1294
|
+
|
|
1295
|
+
from cortex-ai import AsyncCortexAI
|
|
1296
|
+
|
|
1297
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1298
|
+
async def main() -> None:
|
|
1299
|
+
await client.upload.upload_embeddings(tenant_id='tenant_id', embeddings=[[1.1]], )
|
|
1300
|
+
asyncio.run(main())
|
|
1301
|
+
"""
|
|
1302
|
+
_response = await self._raw_client.upload_embeddings(
|
|
1303
|
+
tenant_id=tenant_id,
|
|
1304
|
+
embeddings=embeddings,
|
|
1305
|
+
sub_tenant_id=sub_tenant_id,
|
|
1306
|
+
file_id=file_id,
|
|
1307
|
+
request_options=request_options,
|
|
1308
|
+
)
|
|
1309
|
+
return _response.data
|
|
1310
|
+
|
|
1311
|
+
async def update_embeddings(
|
|
1312
|
+
self,
|
|
1313
|
+
*,
|
|
1314
|
+
tenant_id: str,
|
|
1315
|
+
embeddings: typing.Dict[str, typing.Sequence[float]],
|
|
1316
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1317
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1318
|
+
) -> SingleUploadData:
|
|
1319
|
+
"""
|
|
1320
|
+
Parameters
|
|
1321
|
+
----------
|
|
1322
|
+
tenant_id : str
|
|
1323
|
+
|
|
1324
|
+
embeddings : typing.Dict[str, typing.Sequence[float]]
|
|
1325
|
+
Dictionary with chunk_id as key and embedding array as value
|
|
1326
|
+
|
|
1327
|
+
sub_tenant_id : typing.Optional[str]
|
|
1328
|
+
|
|
1329
|
+
request_options : typing.Optional[RequestOptions]
|
|
1330
|
+
Request-specific configuration.
|
|
1331
|
+
|
|
1332
|
+
Returns
|
|
1333
|
+
-------
|
|
1334
|
+
SingleUploadData
|
|
1335
|
+
Successful Response
|
|
1336
|
+
|
|
1337
|
+
Examples
|
|
1338
|
+
--------
|
|
1339
|
+
import asyncio
|
|
1340
|
+
|
|
1341
|
+
from cortex-ai import AsyncCortexAI
|
|
1342
|
+
|
|
1343
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1344
|
+
async def main() -> None:
|
|
1345
|
+
await client.upload.update_embeddings(tenant_id='tenant_id', embeddings={'key': [1.1]}, )
|
|
1346
|
+
asyncio.run(main())
|
|
1347
|
+
"""
|
|
1348
|
+
_response = await self._raw_client.update_embeddings(
|
|
1349
|
+
tenant_id=tenant_id, embeddings=embeddings, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
1350
|
+
)
|
|
1351
|
+
return _response.data
|
|
1352
|
+
|
|
1353
|
+
async def scrape_webpage(
|
|
1354
|
+
self,
|
|
1355
|
+
*,
|
|
1356
|
+
web_url: str,
|
|
1357
|
+
tenant_id: str,
|
|
1358
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1359
|
+
file_id: typing.Optional[str] = None,
|
|
1360
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1361
|
+
) -> SingleUploadData:
|
|
1362
|
+
"""
|
|
1363
|
+
Parameters
|
|
1364
|
+
----------
|
|
1365
|
+
web_url : str
|
|
1366
|
+
|
|
1367
|
+
tenant_id : str
|
|
1368
|
+
|
|
1369
|
+
sub_tenant_id : typing.Optional[str]
|
|
1370
|
+
|
|
1371
|
+
file_id : typing.Optional[str]
|
|
1372
|
+
|
|
1373
|
+
request_options : typing.Optional[RequestOptions]
|
|
1374
|
+
Request-specific configuration.
|
|
1375
|
+
|
|
1376
|
+
Returns
|
|
1377
|
+
-------
|
|
1378
|
+
SingleUploadData
|
|
1379
|
+
Successful Response
|
|
1380
|
+
|
|
1381
|
+
Examples
|
|
1382
|
+
--------
|
|
1383
|
+
import asyncio
|
|
1384
|
+
|
|
1385
|
+
from cortex-ai import AsyncCortexAI
|
|
1386
|
+
|
|
1387
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1388
|
+
async def main() -> None:
|
|
1389
|
+
await client.upload.scrape_webpage(web_url='web_url', tenant_id='tenant_id', )
|
|
1390
|
+
asyncio.run(main())
|
|
1391
|
+
"""
|
|
1392
|
+
_response = await self._raw_client.scrape_webpage(
|
|
1393
|
+
web_url=web_url,
|
|
1394
|
+
tenant_id=tenant_id,
|
|
1395
|
+
sub_tenant_id=sub_tenant_id,
|
|
1396
|
+
file_id=file_id,
|
|
1397
|
+
request_options=request_options,
|
|
1398
|
+
)
|
|
1399
|
+
return _response.data
|
|
1400
|
+
|
|
1401
|
+
async def update_webpage(
|
|
1402
|
+
self,
|
|
1403
|
+
*,
|
|
1404
|
+
web_url: str,
|
|
1405
|
+
source_id: str,
|
|
1406
|
+
tenant_id: str,
|
|
1407
|
+
sub_tenant_id: typing.Optional[str] = None,
|
|
1408
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1409
|
+
) -> SingleUploadData:
|
|
1410
|
+
"""
|
|
1411
|
+
Parameters
|
|
1412
|
+
----------
|
|
1413
|
+
web_url : str
|
|
1414
|
+
|
|
1415
|
+
source_id : str
|
|
1416
|
+
|
|
1417
|
+
tenant_id : str
|
|
1418
|
+
|
|
1419
|
+
sub_tenant_id : typing.Optional[str]
|
|
1420
|
+
|
|
1421
|
+
request_options : typing.Optional[RequestOptions]
|
|
1422
|
+
Request-specific configuration.
|
|
1423
|
+
|
|
1424
|
+
Returns
|
|
1425
|
+
-------
|
|
1426
|
+
SingleUploadData
|
|
1427
|
+
Successful Response
|
|
1428
|
+
|
|
1429
|
+
Examples
|
|
1430
|
+
--------
|
|
1431
|
+
import asyncio
|
|
1432
|
+
|
|
1433
|
+
from cortex-ai import AsyncCortexAI
|
|
1434
|
+
|
|
1435
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1436
|
+
async def main() -> None:
|
|
1437
|
+
await client.upload.update_webpage(web_url='web_url', source_id='source_id', tenant_id='tenant_id', )
|
|
1438
|
+
asyncio.run(main())
|
|
1439
|
+
"""
|
|
1440
|
+
_response = await self._raw_client.update_webpage(
|
|
1441
|
+
web_url=web_url,
|
|
1442
|
+
source_id=source_id,
|
|
1443
|
+
tenant_id=tenant_id,
|
|
1444
|
+
sub_tenant_id=sub_tenant_id,
|
|
1445
|
+
request_options=request_options,
|
|
1446
|
+
)
|
|
1447
|
+
return _response.data
|
|
1448
|
+
|
|
1449
|
+
async def delete_source(
|
|
1450
|
+
self,
|
|
1451
|
+
*,
|
|
1452
|
+
tenant_id: str,
|
|
1453
|
+
source_ids: typing.Sequence[str],
|
|
1454
|
+
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
1455
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1456
|
+
) -> typing.Optional[typing.Any]:
|
|
1457
|
+
"""
|
|
1458
|
+
Parameters
|
|
1459
|
+
----------
|
|
1460
|
+
tenant_id : str
|
|
1461
|
+
|
|
1462
|
+
source_ids : typing.Sequence[str]
|
|
1463
|
+
|
|
1464
|
+
sub_tenant_id : typing.Optional[str]
|
|
1465
|
+
|
|
1466
|
+
request_options : typing.Optional[RequestOptions]
|
|
1467
|
+
Request-specific configuration.
|
|
1468
|
+
|
|
1469
|
+
Returns
|
|
1470
|
+
-------
|
|
1471
|
+
typing.Optional[typing.Any]
|
|
1472
|
+
Successful Response
|
|
1473
|
+
|
|
1474
|
+
Examples
|
|
1475
|
+
--------
|
|
1476
|
+
import asyncio
|
|
1477
|
+
|
|
1478
|
+
from cortex-ai import AsyncCortexAI
|
|
1479
|
+
|
|
1480
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1481
|
+
async def main() -> None:
|
|
1482
|
+
await client.upload.delete_source(tenant_id='tenant_id', source_ids=['source_ids'], )
|
|
1483
|
+
asyncio.run(main())
|
|
1484
|
+
"""
|
|
1485
|
+
_response = await self._raw_client.delete_source(
|
|
1486
|
+
tenant_id=tenant_id, source_ids=source_ids, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
1487
|
+
)
|
|
1488
|
+
return _response.data
|
|
1489
|
+
|
|
1490
|
+
async def delete_memory(
|
|
1491
|
+
self,
|
|
1492
|
+
*,
|
|
1493
|
+
tenant_id: str,
|
|
1494
|
+
source_ids: typing.Sequence[str],
|
|
1495
|
+
sub_tenant_id: typing.Optional[str] = OMIT,
|
|
1496
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1497
|
+
) -> typing.Optional[typing.Any]:
|
|
1498
|
+
"""
|
|
1499
|
+
Parameters
|
|
1500
|
+
----------
|
|
1501
|
+
tenant_id : str
|
|
1502
|
+
|
|
1503
|
+
source_ids : typing.Sequence[str]
|
|
1504
|
+
|
|
1505
|
+
sub_tenant_id : typing.Optional[str]
|
|
1506
|
+
|
|
1507
|
+
request_options : typing.Optional[RequestOptions]
|
|
1508
|
+
Request-specific configuration.
|
|
1509
|
+
|
|
1510
|
+
Returns
|
|
1511
|
+
-------
|
|
1512
|
+
typing.Optional[typing.Any]
|
|
1513
|
+
Successful Response
|
|
1514
|
+
|
|
1515
|
+
Examples
|
|
1516
|
+
--------
|
|
1517
|
+
import asyncio
|
|
1518
|
+
|
|
1519
|
+
from cortex-ai import AsyncCortexAI
|
|
1520
|
+
|
|
1521
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1522
|
+
async def main() -> None:
|
|
1523
|
+
await client.upload.delete_memory(tenant_id='tenant_id', source_ids=['source_ids'], )
|
|
1524
|
+
asyncio.run(main())
|
|
1525
|
+
"""
|
|
1526
|
+
_response = await self._raw_client.delete_memory(
|
|
1527
|
+
tenant_id=tenant_id, source_ids=source_ids, sub_tenant_id=sub_tenant_id, request_options=request_options
|
|
1528
|
+
)
|
|
1529
|
+
return _response.data
|
|
1530
|
+
|
|
1531
|
+
async def verify_processing(
|
|
1532
|
+
self,
|
|
1533
|
+
*,
|
|
1534
|
+
file_id: str,
|
|
1535
|
+
tenant_id: typing.Optional[str] = None,
|
|
1536
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1537
|
+
) -> ProcessingStatus:
|
|
1538
|
+
"""
|
|
1539
|
+
Verify the processing status of an uploaded file.
|
|
1540
|
+
|
|
1541
|
+
Returns:
|
|
1542
|
+
ProcessingStatusResponse: Processing status information
|
|
1543
|
+
|
|
1544
|
+
Parameters
|
|
1545
|
+
----------
|
|
1546
|
+
file_id : str
|
|
1547
|
+
|
|
1548
|
+
tenant_id : typing.Optional[str]
|
|
1549
|
+
|
|
1550
|
+
request_options : typing.Optional[RequestOptions]
|
|
1551
|
+
Request-specific configuration.
|
|
1552
|
+
|
|
1553
|
+
Returns
|
|
1554
|
+
-------
|
|
1555
|
+
ProcessingStatus
|
|
1556
|
+
Successful Response
|
|
1557
|
+
|
|
1558
|
+
Examples
|
|
1559
|
+
--------
|
|
1560
|
+
import asyncio
|
|
1561
|
+
|
|
1562
|
+
from cortex-ai import AsyncCortexAI
|
|
1563
|
+
|
|
1564
|
+
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1565
|
+
async def main() -> None:
|
|
1566
|
+
await client.upload.verify_processing(file_id='file_id', )
|
|
1567
|
+
asyncio.run(main())
|
|
1568
|
+
"""
|
|
1569
|
+
_response = await self._raw_client.verify_processing(
|
|
1570
|
+
file_id=file_id, tenant_id=tenant_id, request_options=request_options
|
|
1571
|
+
)
|
|
1572
|
+
return _response.data
|