usecortex-ai 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cortex_ai → usecortex_ai}/__init__.py +6 -4
- {cortex_ai → usecortex_ai}/client.py +4 -4
- {cortex_ai → usecortex_ai}/embeddings/client.py +8 -8
- {cortex_ai → usecortex_ai}/fetch/client.py +2 -2
- {cortex_ai → usecortex_ai}/search/client.py +15 -16
- {cortex_ai → usecortex_ai}/search/raw_client.py +17 -18
- {cortex_ai → usecortex_ai}/sources/client.py +4 -4
- {cortex_ai → usecortex_ai}/tenant/client.py +2 -2
- {cortex_ai → usecortex_ai}/types/__init__.py +6 -4
- {cortex_ai → usecortex_ai}/types/app_sources_upload_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/batch_upload_data.py +1 -1
- usecortex_ai/types/body_scrape_webpage_upload_scrape_webpage_post.py +19 -0
- usecortex_ai/types/body_update_scrape_job_upload_update_webpage_patch.py +19 -0
- {cortex_ai → usecortex_ai}/types/embeddings_create_collection_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/embeddings_delete_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/embeddings_get_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/embeddings_search_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/error_response.py +1 -1
- {cortex_ai → usecortex_ai}/types/extended_context.py +1 -1
- {cortex_ai → usecortex_ai}/types/fetch_content_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/list_sources_response.py +1 -1
- {cortex_ai → usecortex_ai}/types/processing_status.py +1 -1
- usecortex_ai/types/relations.py +27 -0
- {cortex_ai → usecortex_ai}/types/single_upload_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/tenant_create_data.py +1 -1
- {cortex_ai → usecortex_ai}/types/tenant_stats.py +1 -1
- {cortex_ai → usecortex_ai}/upload/client.py +138 -120
- {cortex_ai → usecortex_ai}/upload/raw_client.py +154 -80
- {cortex_ai → usecortex_ai}/user/client.py +2 -2
- {cortex_ai → usecortex_ai}/user_memory/client.py +10 -10
- {usecortex_ai-0.1.0.dist-info → usecortex_ai-0.2.0.dist-info}/METADATA +1 -1
- usecortex_ai-0.2.0.dist-info/RECORD +90 -0
- usecortex_ai-0.2.0.dist-info/top_level.txt +1 -0
- cortex_ai/types/full_text_search_data.py +0 -22
- cortex_ai/types/search_data.py +0 -22
- usecortex_ai-0.1.0.dist-info/RECORD +0 -89
- usecortex_ai-0.1.0.dist-info/top_level.txt +0 -1
- {cortex_ai → usecortex_ai}/core/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/core/api_error.py +0 -0
- {cortex_ai → usecortex_ai}/core/client_wrapper.py +0 -0
- {cortex_ai → usecortex_ai}/core/datetime_utils.py +0 -0
- {cortex_ai → usecortex_ai}/core/file.py +0 -0
- {cortex_ai → usecortex_ai}/core/force_multipart.py +0 -0
- {cortex_ai → usecortex_ai}/core/http_client.py +0 -0
- {cortex_ai → usecortex_ai}/core/http_response.py +0 -0
- {cortex_ai → usecortex_ai}/core/jsonable_encoder.py +0 -0
- {cortex_ai → usecortex_ai}/core/pydantic_utilities.py +0 -0
- {cortex_ai → usecortex_ai}/core/query_encoder.py +0 -0
- {cortex_ai → usecortex_ai}/core/remove_none_from_dict.py +0 -0
- {cortex_ai → usecortex_ai}/core/request_options.py +0 -0
- {cortex_ai → usecortex_ai}/core/serialization.py +0 -0
- {cortex_ai → usecortex_ai}/embeddings/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/embeddings/raw_client.py +0 -0
- {cortex_ai → usecortex_ai}/environment.py +0 -0
- {cortex_ai → usecortex_ai}/errors/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/errors/bad_request_error.py +0 -0
- {cortex_ai → usecortex_ai}/errors/forbidden_error.py +0 -0
- {cortex_ai → usecortex_ai}/errors/internal_server_error.py +0 -0
- {cortex_ai → usecortex_ai}/errors/not_found_error.py +0 -0
- {cortex_ai → usecortex_ai}/errors/service_unavailable_error.py +0 -0
- {cortex_ai → usecortex_ai}/errors/unauthorized_error.py +0 -0
- {cortex_ai → usecortex_ai}/errors/unprocessable_entity_error.py +0 -0
- {cortex_ai → usecortex_ai}/fetch/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/fetch/raw_client.py +0 -0
- {cortex_ai → usecortex_ai}/raw_client.py +0 -0
- {cortex_ai → usecortex_ai}/search/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/search/types/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/search/types/alpha.py +0 -0
- {cortex_ai → usecortex_ai}/sources/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/sources/raw_client.py +0 -0
- {cortex_ai → usecortex_ai}/tenant/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/tenant/raw_client.py +0 -0
- {cortex_ai → usecortex_ai}/types/actual_error_response.py +0 -0
- {cortex_ai → usecortex_ai}/types/attachment_model.py +0 -0
- {cortex_ai → usecortex_ai}/types/bm_25_operator_type.py +0 -0
- {cortex_ai → usecortex_ai}/types/content_model.py +0 -0
- {cortex_ai → usecortex_ai}/types/delete_memory_request.py +0 -0
- {cortex_ai → usecortex_ai}/types/file_upload_result.py +0 -0
- {cortex_ai → usecortex_ai}/types/http_validation_error.py +0 -0
- {cortex_ai → usecortex_ai}/types/markdown_upload_request.py +0 -0
- {cortex_ai → usecortex_ai}/types/related_chunk.py +0 -0
- {cortex_ai → usecortex_ai}/types/search_chunk.py +0 -0
- {cortex_ai → usecortex_ai}/types/source.py +0 -0
- {cortex_ai → usecortex_ai}/types/source_content.py +0 -0
- {cortex_ai → usecortex_ai}/types/source_model.py +0 -0
- {cortex_ai → usecortex_ai}/types/validation_error.py +0 -0
- {cortex_ai → usecortex_ai}/types/validation_error_loc_item.py +0 -0
- {cortex_ai → usecortex_ai}/upload/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/user/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/user/raw_client.py +0 -0
- {cortex_ai → usecortex_ai}/user_memory/__init__.py +0 -0
- {cortex_ai → usecortex_ai}/user_memory/raw_client.py +0 -0
- {usecortex_ai-0.1.0.dist-info → usecortex_ai-0.2.0.dist-info}/WHEEL +0 -0
- {usecortex_ai-0.1.0.dist-info → usecortex_ai-0.2.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -7,7 +7,9 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
|
7
7
|
from ..core.request_options import RequestOptions
|
|
8
8
|
from ..types.app_sources_upload_data import AppSourcesUploadData
|
|
9
9
|
from ..types.batch_upload_data import BatchUploadData
|
|
10
|
+
from ..types.markdown_upload_request import MarkdownUploadRequest
|
|
10
11
|
from ..types.processing_status import ProcessingStatus
|
|
12
|
+
from ..types.relations import Relations
|
|
11
13
|
from ..types.single_upload_data import SingleUploadData
|
|
12
14
|
from ..types.source_model import SourceModel
|
|
13
15
|
from .raw_client import AsyncRawUploadClient, RawUploadClient
|
|
@@ -39,6 +41,7 @@ class UploadClient:
|
|
|
39
41
|
sub_tenant_id: typing.Optional[str] = None,
|
|
40
42
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
41
43
|
document_metadata: typing.Optional[str] = OMIT,
|
|
44
|
+
relations: typing.Optional[str] = OMIT,
|
|
42
45
|
request_options: typing.Optional[RequestOptions] = None,
|
|
43
46
|
) -> BatchUploadData:
|
|
44
47
|
"""
|
|
@@ -55,6 +58,8 @@ class UploadClient:
|
|
|
55
58
|
|
|
56
59
|
document_metadata : typing.Optional[str]
|
|
57
60
|
|
|
61
|
+
relations : typing.Optional[str]
|
|
62
|
+
|
|
58
63
|
request_options : typing.Optional[RequestOptions]
|
|
59
64
|
Request-specific configuration.
|
|
60
65
|
|
|
@@ -65,7 +70,7 @@ class UploadClient:
|
|
|
65
70
|
|
|
66
71
|
Examples
|
|
67
72
|
--------
|
|
68
|
-
from
|
|
73
|
+
from usecortex-ai import CortexAI
|
|
69
74
|
|
|
70
75
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
71
76
|
client.upload.batch_upload(tenant_id='tenant_id', )
|
|
@@ -76,6 +81,7 @@ class UploadClient:
|
|
|
76
81
|
sub_tenant_id=sub_tenant_id,
|
|
77
82
|
tenant_metadata=tenant_metadata,
|
|
78
83
|
document_metadata=document_metadata,
|
|
84
|
+
relations=relations,
|
|
79
85
|
request_options=request_options,
|
|
80
86
|
)
|
|
81
87
|
return _response.data
|
|
@@ -89,6 +95,7 @@ class UploadClient:
|
|
|
89
95
|
source_ids: typing.Optional[typing.List[str]] = OMIT,
|
|
90
96
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
91
97
|
document_metadata: typing.Optional[str] = OMIT,
|
|
98
|
+
relations: typing.Optional[str] = OMIT,
|
|
92
99
|
request_options: typing.Optional[RequestOptions] = None,
|
|
93
100
|
) -> BatchUploadData:
|
|
94
101
|
"""
|
|
@@ -107,6 +114,8 @@ class UploadClient:
|
|
|
107
114
|
|
|
108
115
|
document_metadata : typing.Optional[str]
|
|
109
116
|
|
|
117
|
+
relations : typing.Optional[str]
|
|
118
|
+
|
|
110
119
|
request_options : typing.Optional[RequestOptions]
|
|
111
120
|
Request-specific configuration.
|
|
112
121
|
|
|
@@ -117,7 +126,7 @@ class UploadClient:
|
|
|
117
126
|
|
|
118
127
|
Examples
|
|
119
128
|
--------
|
|
120
|
-
from
|
|
129
|
+
from usecortex-ai import CortexAI
|
|
121
130
|
|
|
122
131
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
123
132
|
client.upload.batch_update(tenant_id='tenant_id', )
|
|
@@ -129,6 +138,7 @@ class UploadClient:
|
|
|
129
138
|
source_ids=source_ids,
|
|
130
139
|
tenant_metadata=tenant_metadata,
|
|
131
140
|
document_metadata=document_metadata,
|
|
141
|
+
relations=relations,
|
|
132
142
|
request_options=request_options,
|
|
133
143
|
)
|
|
134
144
|
return _response.data
|
|
@@ -141,6 +151,7 @@ class UploadClient:
|
|
|
141
151
|
sub_tenant_id: typing.Optional[str] = None,
|
|
142
152
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
143
153
|
document_metadata: typing.Optional[str] = OMIT,
|
|
154
|
+
relations: typing.Optional[str] = OMIT,
|
|
144
155
|
request_options: typing.Optional[RequestOptions] = None,
|
|
145
156
|
) -> SingleUploadData:
|
|
146
157
|
"""
|
|
@@ -157,6 +168,8 @@ class UploadClient:
|
|
|
157
168
|
|
|
158
169
|
document_metadata : typing.Optional[str]
|
|
159
170
|
|
|
171
|
+
relations : typing.Optional[str]
|
|
172
|
+
|
|
160
173
|
request_options : typing.Optional[RequestOptions]
|
|
161
174
|
Request-specific configuration.
|
|
162
175
|
|
|
@@ -167,7 +180,7 @@ class UploadClient:
|
|
|
167
180
|
|
|
168
181
|
Examples
|
|
169
182
|
--------
|
|
170
|
-
from
|
|
183
|
+
from usecortex-ai import CortexAI
|
|
171
184
|
|
|
172
185
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
173
186
|
client.upload.upload_document(tenant_id='tenant_id', )
|
|
@@ -178,6 +191,7 @@ class UploadClient:
|
|
|
178
191
|
sub_tenant_id=sub_tenant_id,
|
|
179
192
|
tenant_metadata=tenant_metadata,
|
|
180
193
|
document_metadata=document_metadata,
|
|
194
|
+
relations=relations,
|
|
181
195
|
request_options=request_options,
|
|
182
196
|
)
|
|
183
197
|
return _response.data
|
|
@@ -191,6 +205,7 @@ class UploadClient:
|
|
|
191
205
|
sub_tenant_id: typing.Optional[str] = None,
|
|
192
206
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
193
207
|
document_metadata: typing.Optional[str] = OMIT,
|
|
208
|
+
relations: typing.Optional[str] = OMIT,
|
|
194
209
|
request_options: typing.Optional[RequestOptions] = None,
|
|
195
210
|
) -> SingleUploadData:
|
|
196
211
|
"""
|
|
@@ -209,6 +224,8 @@ class UploadClient:
|
|
|
209
224
|
|
|
210
225
|
document_metadata : typing.Optional[str]
|
|
211
226
|
|
|
227
|
+
relations : typing.Optional[str]
|
|
228
|
+
|
|
212
229
|
request_options : typing.Optional[RequestOptions]
|
|
213
230
|
Request-specific configuration.
|
|
214
231
|
|
|
@@ -219,7 +236,7 @@ class UploadClient:
|
|
|
219
236
|
|
|
220
237
|
Examples
|
|
221
238
|
--------
|
|
222
|
-
from
|
|
239
|
+
from usecortex-ai import CortexAI
|
|
223
240
|
|
|
224
241
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
225
242
|
client.upload.update_document(source_id='source_id', tenant_id='tenant_id', )
|
|
@@ -231,6 +248,7 @@ class UploadClient:
|
|
|
231
248
|
sub_tenant_id=sub_tenant_id,
|
|
232
249
|
tenant_metadata=tenant_metadata,
|
|
233
250
|
document_metadata=document_metadata,
|
|
251
|
+
relations=relations,
|
|
234
252
|
request_options=request_options,
|
|
235
253
|
)
|
|
236
254
|
return _response.data
|
|
@@ -262,7 +280,7 @@ class UploadClient:
|
|
|
262
280
|
|
|
263
281
|
Examples
|
|
264
282
|
--------
|
|
265
|
-
from
|
|
283
|
+
from usecortex-ai import CortexAI, SourceModel
|
|
266
284
|
|
|
267
285
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
268
286
|
client.upload.upload_app_sources(tenant_id='tenant_id', request=[SourceModel()], )
|
|
@@ -276,10 +294,9 @@ class UploadClient:
|
|
|
276
294
|
self,
|
|
277
295
|
*,
|
|
278
296
|
tenant_id: str,
|
|
279
|
-
|
|
297
|
+
request: MarkdownUploadRequest,
|
|
280
298
|
sub_tenant_id: typing.Optional[str] = None,
|
|
281
|
-
|
|
282
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
299
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
283
300
|
request_options: typing.Optional[RequestOptions] = None,
|
|
284
301
|
) -> SingleUploadData:
|
|
285
302
|
"""
|
|
@@ -290,13 +307,11 @@ class UploadClient:
|
|
|
290
307
|
----------
|
|
291
308
|
tenant_id : str
|
|
292
309
|
|
|
293
|
-
|
|
310
|
+
request : MarkdownUploadRequest
|
|
294
311
|
|
|
295
312
|
sub_tenant_id : typing.Optional[str]
|
|
296
313
|
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
314
|
+
relations : typing.Optional[Relations]
|
|
300
315
|
|
|
301
316
|
request_options : typing.Optional[RequestOptions]
|
|
302
317
|
Request-specific configuration.
|
|
@@ -308,17 +323,16 @@ class UploadClient:
|
|
|
308
323
|
|
|
309
324
|
Examples
|
|
310
325
|
--------
|
|
311
|
-
from
|
|
326
|
+
from usecortex-ai import CortexAI, MarkdownUploadRequest
|
|
312
327
|
|
|
313
328
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
314
|
-
client.upload.upload_markdown(tenant_id='tenant_id', content='content', )
|
|
329
|
+
client.upload.upload_markdown(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
315
330
|
"""
|
|
316
331
|
_response = self._raw_client.upload_markdown(
|
|
317
332
|
tenant_id=tenant_id,
|
|
318
|
-
|
|
333
|
+
request=request,
|
|
319
334
|
sub_tenant_id=sub_tenant_id,
|
|
320
|
-
|
|
321
|
-
document_metadata=document_metadata,
|
|
335
|
+
relations=relations,
|
|
322
336
|
request_options=request_options,
|
|
323
337
|
)
|
|
324
338
|
return _response.data
|
|
@@ -327,10 +341,9 @@ class UploadClient:
|
|
|
327
341
|
self,
|
|
328
342
|
*,
|
|
329
343
|
tenant_id: str,
|
|
330
|
-
|
|
344
|
+
request: MarkdownUploadRequest,
|
|
331
345
|
sub_tenant_id: typing.Optional[str] = None,
|
|
332
|
-
|
|
333
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
346
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
334
347
|
request_options: typing.Optional[RequestOptions] = None,
|
|
335
348
|
) -> SingleUploadData:
|
|
336
349
|
"""
|
|
@@ -341,13 +354,11 @@ class UploadClient:
|
|
|
341
354
|
----------
|
|
342
355
|
tenant_id : str
|
|
343
356
|
|
|
344
|
-
|
|
357
|
+
request : MarkdownUploadRequest
|
|
345
358
|
|
|
346
359
|
sub_tenant_id : typing.Optional[str]
|
|
347
360
|
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
361
|
+
relations : typing.Optional[Relations]
|
|
351
362
|
|
|
352
363
|
request_options : typing.Optional[RequestOptions]
|
|
353
364
|
Request-specific configuration.
|
|
@@ -359,17 +370,16 @@ class UploadClient:
|
|
|
359
370
|
|
|
360
371
|
Examples
|
|
361
372
|
--------
|
|
362
|
-
from
|
|
373
|
+
from usecortex-ai import CortexAI, MarkdownUploadRequest
|
|
363
374
|
|
|
364
375
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
365
|
-
client.upload.upload_text(tenant_id='tenant_id', content='content', )
|
|
376
|
+
client.upload.upload_text(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
366
377
|
"""
|
|
367
378
|
_response = self._raw_client.upload_text(
|
|
368
379
|
tenant_id=tenant_id,
|
|
369
|
-
|
|
380
|
+
request=request,
|
|
370
381
|
sub_tenant_id=sub_tenant_id,
|
|
371
|
-
|
|
372
|
-
document_metadata=document_metadata,
|
|
382
|
+
relations=relations,
|
|
373
383
|
request_options=request_options,
|
|
374
384
|
)
|
|
375
385
|
return _response.data
|
|
@@ -379,10 +389,9 @@ class UploadClient:
|
|
|
379
389
|
*,
|
|
380
390
|
source_id: str,
|
|
381
391
|
tenant_id: str,
|
|
382
|
-
|
|
392
|
+
request: MarkdownUploadRequest,
|
|
383
393
|
sub_tenant_id: typing.Optional[str] = None,
|
|
384
|
-
|
|
385
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
394
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
386
395
|
request_options: typing.Optional[RequestOptions] = None,
|
|
387
396
|
) -> SingleUploadData:
|
|
388
397
|
"""
|
|
@@ -392,13 +401,11 @@ class UploadClient:
|
|
|
392
401
|
|
|
393
402
|
tenant_id : str
|
|
394
403
|
|
|
395
|
-
|
|
404
|
+
request : MarkdownUploadRequest
|
|
396
405
|
|
|
397
406
|
sub_tenant_id : typing.Optional[str]
|
|
398
407
|
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
408
|
+
relations : typing.Optional[Relations]
|
|
402
409
|
|
|
403
410
|
request_options : typing.Optional[RequestOptions]
|
|
404
411
|
Request-specific configuration.
|
|
@@ -410,18 +417,17 @@ class UploadClient:
|
|
|
410
417
|
|
|
411
418
|
Examples
|
|
412
419
|
--------
|
|
413
|
-
from
|
|
420
|
+
from usecortex-ai import CortexAI, MarkdownUploadRequest
|
|
414
421
|
|
|
415
422
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
416
|
-
client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
423
|
+
client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
417
424
|
"""
|
|
418
425
|
_response = self._raw_client.update_markdown(
|
|
419
426
|
source_id=source_id,
|
|
420
427
|
tenant_id=tenant_id,
|
|
421
|
-
|
|
428
|
+
request=request,
|
|
422
429
|
sub_tenant_id=sub_tenant_id,
|
|
423
|
-
|
|
424
|
-
document_metadata=document_metadata,
|
|
430
|
+
relations=relations,
|
|
425
431
|
request_options=request_options,
|
|
426
432
|
)
|
|
427
433
|
return _response.data
|
|
@@ -431,10 +437,9 @@ class UploadClient:
|
|
|
431
437
|
*,
|
|
432
438
|
source_id: str,
|
|
433
439
|
tenant_id: str,
|
|
434
|
-
|
|
440
|
+
request: MarkdownUploadRequest,
|
|
435
441
|
sub_tenant_id: typing.Optional[str] = None,
|
|
436
|
-
|
|
437
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
442
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
438
443
|
request_options: typing.Optional[RequestOptions] = None,
|
|
439
444
|
) -> SingleUploadData:
|
|
440
445
|
"""
|
|
@@ -444,13 +449,11 @@ class UploadClient:
|
|
|
444
449
|
|
|
445
450
|
tenant_id : str
|
|
446
451
|
|
|
447
|
-
|
|
452
|
+
request : MarkdownUploadRequest
|
|
448
453
|
|
|
449
454
|
sub_tenant_id : typing.Optional[str]
|
|
450
455
|
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
456
|
+
relations : typing.Optional[Relations]
|
|
454
457
|
|
|
455
458
|
request_options : typing.Optional[RequestOptions]
|
|
456
459
|
Request-specific configuration.
|
|
@@ -462,18 +465,17 @@ class UploadClient:
|
|
|
462
465
|
|
|
463
466
|
Examples
|
|
464
467
|
--------
|
|
465
|
-
from
|
|
468
|
+
from usecortex-ai import CortexAI, MarkdownUploadRequest
|
|
466
469
|
|
|
467
470
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
468
|
-
client.upload.update_text(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
471
|
+
client.upload.update_text(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
469
472
|
"""
|
|
470
473
|
_response = self._raw_client.update_text(
|
|
471
474
|
source_id=source_id,
|
|
472
475
|
tenant_id=tenant_id,
|
|
473
|
-
|
|
476
|
+
request=request,
|
|
474
477
|
sub_tenant_id=sub_tenant_id,
|
|
475
|
-
|
|
476
|
-
document_metadata=document_metadata,
|
|
478
|
+
relations=relations,
|
|
477
479
|
request_options=request_options,
|
|
478
480
|
)
|
|
479
481
|
return _response.data
|
|
@@ -508,7 +510,7 @@ class UploadClient:
|
|
|
508
510
|
|
|
509
511
|
Examples
|
|
510
512
|
--------
|
|
511
|
-
from
|
|
513
|
+
from usecortex-ai import CortexAI
|
|
512
514
|
|
|
513
515
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
514
516
|
client.upload.upload_embeddings(tenant_id='tenant_id', embeddings=[[1.1]], )
|
|
@@ -550,7 +552,7 @@ class UploadClient:
|
|
|
550
552
|
|
|
551
553
|
Examples
|
|
552
554
|
--------
|
|
553
|
-
from
|
|
555
|
+
from usecortex-ai import CortexAI
|
|
554
556
|
|
|
555
557
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
556
558
|
client.upload.update_embeddings(tenant_id='tenant_id', embeddings={'key': [1.1]}, )
|
|
@@ -567,6 +569,7 @@ class UploadClient:
|
|
|
567
569
|
tenant_id: str,
|
|
568
570
|
sub_tenant_id: typing.Optional[str] = None,
|
|
569
571
|
file_id: typing.Optional[str] = None,
|
|
572
|
+
relations: typing.Optional[str] = OMIT,
|
|
570
573
|
request_options: typing.Optional[RequestOptions] = None,
|
|
571
574
|
) -> SingleUploadData:
|
|
572
575
|
"""
|
|
@@ -580,6 +583,8 @@ class UploadClient:
|
|
|
580
583
|
|
|
581
584
|
file_id : typing.Optional[str]
|
|
582
585
|
|
|
586
|
+
relations : typing.Optional[str]
|
|
587
|
+
|
|
583
588
|
request_options : typing.Optional[RequestOptions]
|
|
584
589
|
Request-specific configuration.
|
|
585
590
|
|
|
@@ -590,7 +595,7 @@ class UploadClient:
|
|
|
590
595
|
|
|
591
596
|
Examples
|
|
592
597
|
--------
|
|
593
|
-
from
|
|
598
|
+
from usecortex-ai import CortexAI
|
|
594
599
|
|
|
595
600
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
596
601
|
client.upload.scrape_webpage(web_url='web_url', tenant_id='tenant_id', )
|
|
@@ -600,6 +605,7 @@ class UploadClient:
|
|
|
600
605
|
tenant_id=tenant_id,
|
|
601
606
|
sub_tenant_id=sub_tenant_id,
|
|
602
607
|
file_id=file_id,
|
|
608
|
+
relations=relations,
|
|
603
609
|
request_options=request_options,
|
|
604
610
|
)
|
|
605
611
|
return _response.data
|
|
@@ -611,6 +617,7 @@ class UploadClient:
|
|
|
611
617
|
source_id: str,
|
|
612
618
|
tenant_id: str,
|
|
613
619
|
sub_tenant_id: typing.Optional[str] = None,
|
|
620
|
+
relations: typing.Optional[str] = OMIT,
|
|
614
621
|
request_options: typing.Optional[RequestOptions] = None,
|
|
615
622
|
) -> SingleUploadData:
|
|
616
623
|
"""
|
|
@@ -624,6 +631,8 @@ class UploadClient:
|
|
|
624
631
|
|
|
625
632
|
sub_tenant_id : typing.Optional[str]
|
|
626
633
|
|
|
634
|
+
relations : typing.Optional[str]
|
|
635
|
+
|
|
627
636
|
request_options : typing.Optional[RequestOptions]
|
|
628
637
|
Request-specific configuration.
|
|
629
638
|
|
|
@@ -634,7 +643,7 @@ class UploadClient:
|
|
|
634
643
|
|
|
635
644
|
Examples
|
|
636
645
|
--------
|
|
637
|
-
from
|
|
646
|
+
from usecortex-ai import CortexAI
|
|
638
647
|
|
|
639
648
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
640
649
|
client.upload.update_webpage(web_url='web_url', source_id='source_id', tenant_id='tenant_id', )
|
|
@@ -644,6 +653,7 @@ class UploadClient:
|
|
|
644
653
|
source_id=source_id,
|
|
645
654
|
tenant_id=tenant_id,
|
|
646
655
|
sub_tenant_id=sub_tenant_id,
|
|
656
|
+
relations=relations,
|
|
647
657
|
request_options=request_options,
|
|
648
658
|
)
|
|
649
659
|
return _response.data
|
|
@@ -675,7 +685,7 @@ class UploadClient:
|
|
|
675
685
|
|
|
676
686
|
Examples
|
|
677
687
|
--------
|
|
678
|
-
from
|
|
688
|
+
from usecortex-ai import CortexAI
|
|
679
689
|
|
|
680
690
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
681
691
|
client.upload.delete_source(tenant_id='tenant_id', source_ids=['source_ids'], )
|
|
@@ -712,7 +722,7 @@ class UploadClient:
|
|
|
712
722
|
|
|
713
723
|
Examples
|
|
714
724
|
--------
|
|
715
|
-
from
|
|
725
|
+
from usecortex-ai import CortexAI
|
|
716
726
|
|
|
717
727
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
718
728
|
client.upload.delete_memory(tenant_id='tenant_id', source_ids=['source_ids'], )
|
|
@@ -751,7 +761,7 @@ class UploadClient:
|
|
|
751
761
|
|
|
752
762
|
Examples
|
|
753
763
|
--------
|
|
754
|
-
from
|
|
764
|
+
from usecortex-ai import CortexAI
|
|
755
765
|
|
|
756
766
|
client = CortexAI(token="YOUR_TOKEN", )
|
|
757
767
|
client.upload.verify_processing(file_id='file_id', )
|
|
@@ -785,6 +795,7 @@ class AsyncUploadClient:
|
|
|
785
795
|
sub_tenant_id: typing.Optional[str] = None,
|
|
786
796
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
787
797
|
document_metadata: typing.Optional[str] = OMIT,
|
|
798
|
+
relations: typing.Optional[str] = OMIT,
|
|
788
799
|
request_options: typing.Optional[RequestOptions] = None,
|
|
789
800
|
) -> BatchUploadData:
|
|
790
801
|
"""
|
|
@@ -801,6 +812,8 @@ class AsyncUploadClient:
|
|
|
801
812
|
|
|
802
813
|
document_metadata : typing.Optional[str]
|
|
803
814
|
|
|
815
|
+
relations : typing.Optional[str]
|
|
816
|
+
|
|
804
817
|
request_options : typing.Optional[RequestOptions]
|
|
805
818
|
Request-specific configuration.
|
|
806
819
|
|
|
@@ -813,7 +826,7 @@ class AsyncUploadClient:
|
|
|
813
826
|
--------
|
|
814
827
|
import asyncio
|
|
815
828
|
|
|
816
|
-
from
|
|
829
|
+
from usecortex-ai import AsyncCortexAI
|
|
817
830
|
|
|
818
831
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
819
832
|
async def main() -> None:
|
|
@@ -826,6 +839,7 @@ class AsyncUploadClient:
|
|
|
826
839
|
sub_tenant_id=sub_tenant_id,
|
|
827
840
|
tenant_metadata=tenant_metadata,
|
|
828
841
|
document_metadata=document_metadata,
|
|
842
|
+
relations=relations,
|
|
829
843
|
request_options=request_options,
|
|
830
844
|
)
|
|
831
845
|
return _response.data
|
|
@@ -839,6 +853,7 @@ class AsyncUploadClient:
|
|
|
839
853
|
source_ids: typing.Optional[typing.List[str]] = OMIT,
|
|
840
854
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
841
855
|
document_metadata: typing.Optional[str] = OMIT,
|
|
856
|
+
relations: typing.Optional[str] = OMIT,
|
|
842
857
|
request_options: typing.Optional[RequestOptions] = None,
|
|
843
858
|
) -> BatchUploadData:
|
|
844
859
|
"""
|
|
@@ -857,6 +872,8 @@ class AsyncUploadClient:
|
|
|
857
872
|
|
|
858
873
|
document_metadata : typing.Optional[str]
|
|
859
874
|
|
|
875
|
+
relations : typing.Optional[str]
|
|
876
|
+
|
|
860
877
|
request_options : typing.Optional[RequestOptions]
|
|
861
878
|
Request-specific configuration.
|
|
862
879
|
|
|
@@ -869,7 +886,7 @@ class AsyncUploadClient:
|
|
|
869
886
|
--------
|
|
870
887
|
import asyncio
|
|
871
888
|
|
|
872
|
-
from
|
|
889
|
+
from usecortex-ai import AsyncCortexAI
|
|
873
890
|
|
|
874
891
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
875
892
|
async def main() -> None:
|
|
@@ -883,6 +900,7 @@ class AsyncUploadClient:
|
|
|
883
900
|
source_ids=source_ids,
|
|
884
901
|
tenant_metadata=tenant_metadata,
|
|
885
902
|
document_metadata=document_metadata,
|
|
903
|
+
relations=relations,
|
|
886
904
|
request_options=request_options,
|
|
887
905
|
)
|
|
888
906
|
return _response.data
|
|
@@ -895,6 +913,7 @@ class AsyncUploadClient:
|
|
|
895
913
|
sub_tenant_id: typing.Optional[str] = None,
|
|
896
914
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
897
915
|
document_metadata: typing.Optional[str] = OMIT,
|
|
916
|
+
relations: typing.Optional[str] = OMIT,
|
|
898
917
|
request_options: typing.Optional[RequestOptions] = None,
|
|
899
918
|
) -> SingleUploadData:
|
|
900
919
|
"""
|
|
@@ -911,6 +930,8 @@ class AsyncUploadClient:
|
|
|
911
930
|
|
|
912
931
|
document_metadata : typing.Optional[str]
|
|
913
932
|
|
|
933
|
+
relations : typing.Optional[str]
|
|
934
|
+
|
|
914
935
|
request_options : typing.Optional[RequestOptions]
|
|
915
936
|
Request-specific configuration.
|
|
916
937
|
|
|
@@ -923,7 +944,7 @@ class AsyncUploadClient:
|
|
|
923
944
|
--------
|
|
924
945
|
import asyncio
|
|
925
946
|
|
|
926
|
-
from
|
|
947
|
+
from usecortex-ai import AsyncCortexAI
|
|
927
948
|
|
|
928
949
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
929
950
|
async def main() -> None:
|
|
@@ -936,6 +957,7 @@ class AsyncUploadClient:
|
|
|
936
957
|
sub_tenant_id=sub_tenant_id,
|
|
937
958
|
tenant_metadata=tenant_metadata,
|
|
938
959
|
document_metadata=document_metadata,
|
|
960
|
+
relations=relations,
|
|
939
961
|
request_options=request_options,
|
|
940
962
|
)
|
|
941
963
|
return _response.data
|
|
@@ -949,6 +971,7 @@ class AsyncUploadClient:
|
|
|
949
971
|
sub_tenant_id: typing.Optional[str] = None,
|
|
950
972
|
tenant_metadata: typing.Optional[str] = OMIT,
|
|
951
973
|
document_metadata: typing.Optional[str] = OMIT,
|
|
974
|
+
relations: typing.Optional[str] = OMIT,
|
|
952
975
|
request_options: typing.Optional[RequestOptions] = None,
|
|
953
976
|
) -> SingleUploadData:
|
|
954
977
|
"""
|
|
@@ -967,6 +990,8 @@ class AsyncUploadClient:
|
|
|
967
990
|
|
|
968
991
|
document_metadata : typing.Optional[str]
|
|
969
992
|
|
|
993
|
+
relations : typing.Optional[str]
|
|
994
|
+
|
|
970
995
|
request_options : typing.Optional[RequestOptions]
|
|
971
996
|
Request-specific configuration.
|
|
972
997
|
|
|
@@ -979,7 +1004,7 @@ class AsyncUploadClient:
|
|
|
979
1004
|
--------
|
|
980
1005
|
import asyncio
|
|
981
1006
|
|
|
982
|
-
from
|
|
1007
|
+
from usecortex-ai import AsyncCortexAI
|
|
983
1008
|
|
|
984
1009
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
985
1010
|
async def main() -> None:
|
|
@@ -993,6 +1018,7 @@ class AsyncUploadClient:
|
|
|
993
1018
|
sub_tenant_id=sub_tenant_id,
|
|
994
1019
|
tenant_metadata=tenant_metadata,
|
|
995
1020
|
document_metadata=document_metadata,
|
|
1021
|
+
relations=relations,
|
|
996
1022
|
request_options=request_options,
|
|
997
1023
|
)
|
|
998
1024
|
return _response.data
|
|
@@ -1026,7 +1052,7 @@ class AsyncUploadClient:
|
|
|
1026
1052
|
--------
|
|
1027
1053
|
import asyncio
|
|
1028
1054
|
|
|
1029
|
-
from
|
|
1055
|
+
from usecortex-ai import AsyncCortexAI, SourceModel
|
|
1030
1056
|
|
|
1031
1057
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1032
1058
|
async def main() -> None:
|
|
@@ -1042,10 +1068,9 @@ class AsyncUploadClient:
|
|
|
1042
1068
|
self,
|
|
1043
1069
|
*,
|
|
1044
1070
|
tenant_id: str,
|
|
1045
|
-
|
|
1071
|
+
request: MarkdownUploadRequest,
|
|
1046
1072
|
sub_tenant_id: typing.Optional[str] = None,
|
|
1047
|
-
|
|
1048
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1073
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
1049
1074
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1050
1075
|
) -> SingleUploadData:
|
|
1051
1076
|
"""
|
|
@@ -1056,13 +1081,11 @@ class AsyncUploadClient:
|
|
|
1056
1081
|
----------
|
|
1057
1082
|
tenant_id : str
|
|
1058
1083
|
|
|
1059
|
-
|
|
1084
|
+
request : MarkdownUploadRequest
|
|
1060
1085
|
|
|
1061
1086
|
sub_tenant_id : typing.Optional[str]
|
|
1062
1087
|
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1088
|
+
relations : typing.Optional[Relations]
|
|
1066
1089
|
|
|
1067
1090
|
request_options : typing.Optional[RequestOptions]
|
|
1068
1091
|
Request-specific configuration.
|
|
@@ -1076,19 +1099,18 @@ class AsyncUploadClient:
|
|
|
1076
1099
|
--------
|
|
1077
1100
|
import asyncio
|
|
1078
1101
|
|
|
1079
|
-
from
|
|
1102
|
+
from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
|
|
1080
1103
|
|
|
1081
1104
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1082
1105
|
async def main() -> None:
|
|
1083
|
-
await client.upload.upload_markdown(tenant_id='tenant_id', content='content', )
|
|
1106
|
+
await client.upload.upload_markdown(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
1084
1107
|
asyncio.run(main())
|
|
1085
1108
|
"""
|
|
1086
1109
|
_response = await self._raw_client.upload_markdown(
|
|
1087
1110
|
tenant_id=tenant_id,
|
|
1088
|
-
|
|
1111
|
+
request=request,
|
|
1089
1112
|
sub_tenant_id=sub_tenant_id,
|
|
1090
|
-
|
|
1091
|
-
document_metadata=document_metadata,
|
|
1113
|
+
relations=relations,
|
|
1092
1114
|
request_options=request_options,
|
|
1093
1115
|
)
|
|
1094
1116
|
return _response.data
|
|
@@ -1097,10 +1119,9 @@ class AsyncUploadClient:
|
|
|
1097
1119
|
self,
|
|
1098
1120
|
*,
|
|
1099
1121
|
tenant_id: str,
|
|
1100
|
-
|
|
1122
|
+
request: MarkdownUploadRequest,
|
|
1101
1123
|
sub_tenant_id: typing.Optional[str] = None,
|
|
1102
|
-
|
|
1103
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1124
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
1104
1125
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1105
1126
|
) -> SingleUploadData:
|
|
1106
1127
|
"""
|
|
@@ -1111,13 +1132,11 @@ class AsyncUploadClient:
|
|
|
1111
1132
|
----------
|
|
1112
1133
|
tenant_id : str
|
|
1113
1134
|
|
|
1114
|
-
|
|
1135
|
+
request : MarkdownUploadRequest
|
|
1115
1136
|
|
|
1116
1137
|
sub_tenant_id : typing.Optional[str]
|
|
1117
1138
|
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1139
|
+
relations : typing.Optional[Relations]
|
|
1121
1140
|
|
|
1122
1141
|
request_options : typing.Optional[RequestOptions]
|
|
1123
1142
|
Request-specific configuration.
|
|
@@ -1131,19 +1150,18 @@ class AsyncUploadClient:
|
|
|
1131
1150
|
--------
|
|
1132
1151
|
import asyncio
|
|
1133
1152
|
|
|
1134
|
-
from
|
|
1153
|
+
from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
|
|
1135
1154
|
|
|
1136
1155
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1137
1156
|
async def main() -> None:
|
|
1138
|
-
await client.upload.upload_text(tenant_id='tenant_id', content='content', )
|
|
1157
|
+
await client.upload.upload_text(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
1139
1158
|
asyncio.run(main())
|
|
1140
1159
|
"""
|
|
1141
1160
|
_response = await self._raw_client.upload_text(
|
|
1142
1161
|
tenant_id=tenant_id,
|
|
1143
|
-
|
|
1162
|
+
request=request,
|
|
1144
1163
|
sub_tenant_id=sub_tenant_id,
|
|
1145
|
-
|
|
1146
|
-
document_metadata=document_metadata,
|
|
1164
|
+
relations=relations,
|
|
1147
1165
|
request_options=request_options,
|
|
1148
1166
|
)
|
|
1149
1167
|
return _response.data
|
|
@@ -1153,10 +1171,9 @@ class AsyncUploadClient:
|
|
|
1153
1171
|
*,
|
|
1154
1172
|
source_id: str,
|
|
1155
1173
|
tenant_id: str,
|
|
1156
|
-
|
|
1174
|
+
request: MarkdownUploadRequest,
|
|
1157
1175
|
sub_tenant_id: typing.Optional[str] = None,
|
|
1158
|
-
|
|
1159
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1176
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
1160
1177
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1161
1178
|
) -> SingleUploadData:
|
|
1162
1179
|
"""
|
|
@@ -1166,13 +1183,11 @@ class AsyncUploadClient:
|
|
|
1166
1183
|
|
|
1167
1184
|
tenant_id : str
|
|
1168
1185
|
|
|
1169
|
-
|
|
1186
|
+
request : MarkdownUploadRequest
|
|
1170
1187
|
|
|
1171
1188
|
sub_tenant_id : typing.Optional[str]
|
|
1172
1189
|
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1190
|
+
relations : typing.Optional[Relations]
|
|
1176
1191
|
|
|
1177
1192
|
request_options : typing.Optional[RequestOptions]
|
|
1178
1193
|
Request-specific configuration.
|
|
@@ -1186,20 +1201,19 @@ class AsyncUploadClient:
|
|
|
1186
1201
|
--------
|
|
1187
1202
|
import asyncio
|
|
1188
1203
|
|
|
1189
|
-
from
|
|
1204
|
+
from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
|
|
1190
1205
|
|
|
1191
1206
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1192
1207
|
async def main() -> None:
|
|
1193
|
-
await client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
1208
|
+
await client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
1194
1209
|
asyncio.run(main())
|
|
1195
1210
|
"""
|
|
1196
1211
|
_response = await self._raw_client.update_markdown(
|
|
1197
1212
|
source_id=source_id,
|
|
1198
1213
|
tenant_id=tenant_id,
|
|
1199
|
-
|
|
1214
|
+
request=request,
|
|
1200
1215
|
sub_tenant_id=sub_tenant_id,
|
|
1201
|
-
|
|
1202
|
-
document_metadata=document_metadata,
|
|
1216
|
+
relations=relations,
|
|
1203
1217
|
request_options=request_options,
|
|
1204
1218
|
)
|
|
1205
1219
|
return _response.data
|
|
@@ -1209,10 +1223,9 @@ class AsyncUploadClient:
|
|
|
1209
1223
|
*,
|
|
1210
1224
|
source_id: str,
|
|
1211
1225
|
tenant_id: str,
|
|
1212
|
-
|
|
1226
|
+
request: MarkdownUploadRequest,
|
|
1213
1227
|
sub_tenant_id: typing.Optional[str] = None,
|
|
1214
|
-
|
|
1215
|
-
document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1228
|
+
relations: typing.Optional[Relations] = OMIT,
|
|
1216
1229
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1217
1230
|
) -> SingleUploadData:
|
|
1218
1231
|
"""
|
|
@@ -1222,13 +1235,11 @@ class AsyncUploadClient:
|
|
|
1222
1235
|
|
|
1223
1236
|
tenant_id : str
|
|
1224
1237
|
|
|
1225
|
-
|
|
1238
|
+
request : MarkdownUploadRequest
|
|
1226
1239
|
|
|
1227
1240
|
sub_tenant_id : typing.Optional[str]
|
|
1228
1241
|
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1242
|
+
relations : typing.Optional[Relations]
|
|
1232
1243
|
|
|
1233
1244
|
request_options : typing.Optional[RequestOptions]
|
|
1234
1245
|
Request-specific configuration.
|
|
@@ -1242,20 +1253,19 @@ class AsyncUploadClient:
|
|
|
1242
1253
|
--------
|
|
1243
1254
|
import asyncio
|
|
1244
1255
|
|
|
1245
|
-
from
|
|
1256
|
+
from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
|
|
1246
1257
|
|
|
1247
1258
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1248
1259
|
async def main() -> None:
|
|
1249
|
-
await client.upload.update_text(source_id='source_id', tenant_id='tenant_id', content='content', )
|
|
1260
|
+
await client.upload.update_text(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
|
|
1250
1261
|
asyncio.run(main())
|
|
1251
1262
|
"""
|
|
1252
1263
|
_response = await self._raw_client.update_text(
|
|
1253
1264
|
source_id=source_id,
|
|
1254
1265
|
tenant_id=tenant_id,
|
|
1255
|
-
|
|
1266
|
+
request=request,
|
|
1256
1267
|
sub_tenant_id=sub_tenant_id,
|
|
1257
|
-
|
|
1258
|
-
document_metadata=document_metadata,
|
|
1268
|
+
relations=relations,
|
|
1259
1269
|
request_options=request_options,
|
|
1260
1270
|
)
|
|
1261
1271
|
return _response.data
|
|
@@ -1292,7 +1302,7 @@ class AsyncUploadClient:
|
|
|
1292
1302
|
--------
|
|
1293
1303
|
import asyncio
|
|
1294
1304
|
|
|
1295
|
-
from
|
|
1305
|
+
from usecortex-ai import AsyncCortexAI
|
|
1296
1306
|
|
|
1297
1307
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1298
1308
|
async def main() -> None:
|
|
@@ -1338,7 +1348,7 @@ class AsyncUploadClient:
|
|
|
1338
1348
|
--------
|
|
1339
1349
|
import asyncio
|
|
1340
1350
|
|
|
1341
|
-
from
|
|
1351
|
+
from usecortex-ai import AsyncCortexAI
|
|
1342
1352
|
|
|
1343
1353
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1344
1354
|
async def main() -> None:
|
|
@@ -1357,6 +1367,7 @@ class AsyncUploadClient:
|
|
|
1357
1367
|
tenant_id: str,
|
|
1358
1368
|
sub_tenant_id: typing.Optional[str] = None,
|
|
1359
1369
|
file_id: typing.Optional[str] = None,
|
|
1370
|
+
relations: typing.Optional[str] = OMIT,
|
|
1360
1371
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1361
1372
|
) -> SingleUploadData:
|
|
1362
1373
|
"""
|
|
@@ -1370,6 +1381,8 @@ class AsyncUploadClient:
|
|
|
1370
1381
|
|
|
1371
1382
|
file_id : typing.Optional[str]
|
|
1372
1383
|
|
|
1384
|
+
relations : typing.Optional[str]
|
|
1385
|
+
|
|
1373
1386
|
request_options : typing.Optional[RequestOptions]
|
|
1374
1387
|
Request-specific configuration.
|
|
1375
1388
|
|
|
@@ -1382,7 +1395,7 @@ class AsyncUploadClient:
|
|
|
1382
1395
|
--------
|
|
1383
1396
|
import asyncio
|
|
1384
1397
|
|
|
1385
|
-
from
|
|
1398
|
+
from usecortex-ai import AsyncCortexAI
|
|
1386
1399
|
|
|
1387
1400
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1388
1401
|
async def main() -> None:
|
|
@@ -1394,6 +1407,7 @@ class AsyncUploadClient:
|
|
|
1394
1407
|
tenant_id=tenant_id,
|
|
1395
1408
|
sub_tenant_id=sub_tenant_id,
|
|
1396
1409
|
file_id=file_id,
|
|
1410
|
+
relations=relations,
|
|
1397
1411
|
request_options=request_options,
|
|
1398
1412
|
)
|
|
1399
1413
|
return _response.data
|
|
@@ -1405,6 +1419,7 @@ class AsyncUploadClient:
|
|
|
1405
1419
|
source_id: str,
|
|
1406
1420
|
tenant_id: str,
|
|
1407
1421
|
sub_tenant_id: typing.Optional[str] = None,
|
|
1422
|
+
relations: typing.Optional[str] = OMIT,
|
|
1408
1423
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1409
1424
|
) -> SingleUploadData:
|
|
1410
1425
|
"""
|
|
@@ -1418,6 +1433,8 @@ class AsyncUploadClient:
|
|
|
1418
1433
|
|
|
1419
1434
|
sub_tenant_id : typing.Optional[str]
|
|
1420
1435
|
|
|
1436
|
+
relations : typing.Optional[str]
|
|
1437
|
+
|
|
1421
1438
|
request_options : typing.Optional[RequestOptions]
|
|
1422
1439
|
Request-specific configuration.
|
|
1423
1440
|
|
|
@@ -1430,7 +1447,7 @@ class AsyncUploadClient:
|
|
|
1430
1447
|
--------
|
|
1431
1448
|
import asyncio
|
|
1432
1449
|
|
|
1433
|
-
from
|
|
1450
|
+
from usecortex-ai import AsyncCortexAI
|
|
1434
1451
|
|
|
1435
1452
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1436
1453
|
async def main() -> None:
|
|
@@ -1442,6 +1459,7 @@ class AsyncUploadClient:
|
|
|
1442
1459
|
source_id=source_id,
|
|
1443
1460
|
tenant_id=tenant_id,
|
|
1444
1461
|
sub_tenant_id=sub_tenant_id,
|
|
1462
|
+
relations=relations,
|
|
1445
1463
|
request_options=request_options,
|
|
1446
1464
|
)
|
|
1447
1465
|
return _response.data
|
|
@@ -1475,7 +1493,7 @@ class AsyncUploadClient:
|
|
|
1475
1493
|
--------
|
|
1476
1494
|
import asyncio
|
|
1477
1495
|
|
|
1478
|
-
from
|
|
1496
|
+
from usecortex-ai import AsyncCortexAI
|
|
1479
1497
|
|
|
1480
1498
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1481
1499
|
async def main() -> None:
|
|
@@ -1516,7 +1534,7 @@ class AsyncUploadClient:
|
|
|
1516
1534
|
--------
|
|
1517
1535
|
import asyncio
|
|
1518
1536
|
|
|
1519
|
-
from
|
|
1537
|
+
from usecortex-ai import AsyncCortexAI
|
|
1520
1538
|
|
|
1521
1539
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1522
1540
|
async def main() -> None:
|
|
@@ -1559,7 +1577,7 @@ class AsyncUploadClient:
|
|
|
1559
1577
|
--------
|
|
1560
1578
|
import asyncio
|
|
1561
1579
|
|
|
1562
|
-
from
|
|
1580
|
+
from usecortex-ai import AsyncCortexAI
|
|
1563
1581
|
|
|
1564
1582
|
client = AsyncCortexAI(token="YOUR_TOKEN", )
|
|
1565
1583
|
async def main() -> None:
|