usecortex-ai 0.1.1__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. {cortex_ai → usecortex_ai}/__init__.py +2 -0
  2. {cortex_ai → usecortex_ai}/embeddings/client.py +10 -12
  3. {cortex_ai → usecortex_ai}/embeddings/raw_client.py +8 -10
  4. {cortex_ai → usecortex_ai}/sources/client.py +83 -0
  5. {cortex_ai → usecortex_ai}/sources/raw_client.py +259 -0
  6. {cortex_ai → usecortex_ai}/types/__init__.py +2 -0
  7. {cortex_ai → usecortex_ai}/types/markdown_upload_request.py +2 -0
  8. usecortex_ai/types/relations.py +20 -0
  9. {cortex_ai → usecortex_ai}/types/search_chunk.py +5 -4
  10. {cortex_ai → usecortex_ai}/types/source.py +4 -3
  11. {cortex_ai → usecortex_ai}/types/source_content.py +2 -2
  12. {cortex_ai → usecortex_ai}/types/source_model.py +2 -0
  13. usecortex_ai/types/sub_tenant_ids_data.py +23 -0
  14. {cortex_ai → usecortex_ai}/upload/client.py +114 -53
  15. {cortex_ai → usecortex_ai}/upload/raw_client.py +94 -49
  16. {usecortex_ai-0.1.1.dist-info → usecortex_ai-0.2.1.dist-info}/METADATA +1 -1
  17. usecortex_ai-0.2.1.dist-info/RECORD +91 -0
  18. usecortex_ai-0.2.1.dist-info/top_level.txt +1 -0
  19. cortex_ai/types/relations.py +0 -27
  20. usecortex_ai-0.1.1.dist-info/RECORD +0 -90
  21. usecortex_ai-0.1.1.dist-info/top_level.txt +0 -1
  22. {cortex_ai → usecortex_ai}/client.py +0 -0
  23. {cortex_ai → usecortex_ai}/core/__init__.py +0 -0
  24. {cortex_ai → usecortex_ai}/core/api_error.py +0 -0
  25. {cortex_ai → usecortex_ai}/core/client_wrapper.py +0 -0
  26. {cortex_ai → usecortex_ai}/core/datetime_utils.py +0 -0
  27. {cortex_ai → usecortex_ai}/core/file.py +0 -0
  28. {cortex_ai → usecortex_ai}/core/force_multipart.py +0 -0
  29. {cortex_ai → usecortex_ai}/core/http_client.py +0 -0
  30. {cortex_ai → usecortex_ai}/core/http_response.py +0 -0
  31. {cortex_ai → usecortex_ai}/core/jsonable_encoder.py +0 -0
  32. {cortex_ai → usecortex_ai}/core/pydantic_utilities.py +0 -0
  33. {cortex_ai → usecortex_ai}/core/query_encoder.py +0 -0
  34. {cortex_ai → usecortex_ai}/core/remove_none_from_dict.py +0 -0
  35. {cortex_ai → usecortex_ai}/core/request_options.py +0 -0
  36. {cortex_ai → usecortex_ai}/core/serialization.py +0 -0
  37. {cortex_ai → usecortex_ai}/embeddings/__init__.py +0 -0
  38. {cortex_ai → usecortex_ai}/environment.py +0 -0
  39. {cortex_ai → usecortex_ai}/errors/__init__.py +0 -0
  40. {cortex_ai → usecortex_ai}/errors/bad_request_error.py +0 -0
  41. {cortex_ai → usecortex_ai}/errors/forbidden_error.py +0 -0
  42. {cortex_ai → usecortex_ai}/errors/internal_server_error.py +0 -0
  43. {cortex_ai → usecortex_ai}/errors/not_found_error.py +0 -0
  44. {cortex_ai → usecortex_ai}/errors/service_unavailable_error.py +0 -0
  45. {cortex_ai → usecortex_ai}/errors/unauthorized_error.py +0 -0
  46. {cortex_ai → usecortex_ai}/errors/unprocessable_entity_error.py +0 -0
  47. {cortex_ai → usecortex_ai}/fetch/__init__.py +0 -0
  48. {cortex_ai → usecortex_ai}/fetch/client.py +0 -0
  49. {cortex_ai → usecortex_ai}/fetch/raw_client.py +0 -0
  50. {cortex_ai → usecortex_ai}/raw_client.py +0 -0
  51. {cortex_ai → usecortex_ai}/search/__init__.py +0 -0
  52. {cortex_ai → usecortex_ai}/search/client.py +0 -0
  53. {cortex_ai → usecortex_ai}/search/raw_client.py +0 -0
  54. {cortex_ai → usecortex_ai}/search/types/__init__.py +0 -0
  55. {cortex_ai → usecortex_ai}/search/types/alpha.py +0 -0
  56. {cortex_ai → usecortex_ai}/sources/__init__.py +0 -0
  57. {cortex_ai → usecortex_ai}/tenant/__init__.py +0 -0
  58. {cortex_ai → usecortex_ai}/tenant/client.py +0 -0
  59. {cortex_ai → usecortex_ai}/tenant/raw_client.py +0 -0
  60. {cortex_ai → usecortex_ai}/types/actual_error_response.py +0 -0
  61. {cortex_ai → usecortex_ai}/types/app_sources_upload_data.py +1 -1
  62. {cortex_ai → usecortex_ai}/types/attachment_model.py +0 -0
  63. {cortex_ai → usecortex_ai}/types/batch_upload_data.py +1 -1
  64. {cortex_ai → usecortex_ai}/types/bm_25_operator_type.py +0 -0
  65. {cortex_ai → usecortex_ai}/types/body_scrape_webpage_upload_scrape_webpage_post.py +0 -0
  66. {cortex_ai → usecortex_ai}/types/body_update_scrape_job_upload_update_webpage_patch.py +0 -0
  67. {cortex_ai → usecortex_ai}/types/content_model.py +0 -0
  68. {cortex_ai → usecortex_ai}/types/delete_memory_request.py +0 -0
  69. {cortex_ai → usecortex_ai}/types/embeddings_create_collection_data.py +2 -2
  70. {cortex_ai → usecortex_ai}/types/embeddings_delete_data.py +2 -2
  71. {cortex_ai → usecortex_ai}/types/embeddings_get_data.py +2 -2
  72. {cortex_ai → usecortex_ai}/types/embeddings_search_data.py +2 -2
  73. {cortex_ai → usecortex_ai}/types/error_response.py +0 -0
  74. {cortex_ai → usecortex_ai}/types/extended_context.py +0 -0
  75. {cortex_ai → usecortex_ai}/types/fetch_content_data.py +2 -2
  76. {cortex_ai → usecortex_ai}/types/file_upload_result.py +0 -0
  77. {cortex_ai → usecortex_ai}/types/http_validation_error.py +0 -0
  78. {cortex_ai → usecortex_ai}/types/list_sources_response.py +1 -1
  79. {cortex_ai → usecortex_ai}/types/processing_status.py +2 -2
  80. {cortex_ai → usecortex_ai}/types/related_chunk.py +0 -0
  81. {cortex_ai → usecortex_ai}/types/single_upload_data.py +1 -1
  82. {cortex_ai → usecortex_ai}/types/tenant_create_data.py +2 -2
  83. {cortex_ai → usecortex_ai}/types/tenant_stats.py +3 -3
  84. {cortex_ai → usecortex_ai}/types/validation_error.py +0 -0
  85. {cortex_ai → usecortex_ai}/types/validation_error_loc_item.py +0 -0
  86. {cortex_ai → usecortex_ai}/upload/__init__.py +0 -0
  87. {cortex_ai → usecortex_ai}/user/__init__.py +0 -0
  88. {cortex_ai → usecortex_ai}/user/client.py +0 -0
  89. {cortex_ai → usecortex_ai}/user/raw_client.py +0 -0
  90. {cortex_ai → usecortex_ai}/user_memory/__init__.py +0 -0
  91. {cortex_ai → usecortex_ai}/user_memory/client.py +0 -0
  92. {cortex_ai → usecortex_ai}/user_memory/raw_client.py +0 -0
  93. {usecortex_ai-0.1.1.dist-info → usecortex_ai-0.2.1.dist-info}/WHEEL +0 -0
  94. {usecortex_ai-0.1.1.dist-info → usecortex_ai-0.2.1.dist-info}/licenses/LICENSE +0 -0
@@ -7,7 +7,6 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
7
7
  from ..core.request_options import RequestOptions
8
8
  from ..types.app_sources_upload_data import AppSourcesUploadData
9
9
  from ..types.batch_upload_data import BatchUploadData
10
- from ..types.markdown_upload_request import MarkdownUploadRequest
11
10
  from ..types.processing_status import ProcessingStatus
12
11
  from ..types.relations import Relations
13
12
  from ..types.single_upload_data import SingleUploadData
@@ -294,8 +293,10 @@ class UploadClient:
294
293
  self,
295
294
  *,
296
295
  tenant_id: str,
297
- request: MarkdownUploadRequest,
296
+ content: str,
298
297
  sub_tenant_id: typing.Optional[str] = None,
298
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
299
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
299
300
  relations: typing.Optional[Relations] = OMIT,
300
301
  request_options: typing.Optional[RequestOptions] = None,
301
302
  ) -> SingleUploadData:
@@ -307,10 +308,14 @@ class UploadClient:
307
308
  ----------
308
309
  tenant_id : str
309
310
 
310
- request : MarkdownUploadRequest
311
+ content : str
311
312
 
312
313
  sub_tenant_id : typing.Optional[str]
313
314
 
315
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
316
+
317
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
318
+
314
319
  relations : typing.Optional[Relations]
315
320
 
316
321
  request_options : typing.Optional[RequestOptions]
@@ -323,15 +328,17 @@ class UploadClient:
323
328
 
324
329
  Examples
325
330
  --------
326
- from usecortex-ai import CortexAI, MarkdownUploadRequest
331
+ from usecortex-ai import CortexAI
327
332
 
328
333
  client = CortexAI(token="YOUR_TOKEN", )
329
- client.upload.upload_markdown(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
334
+ client.upload.upload_markdown(tenant_id='tenant_id', content='content', )
330
335
  """
331
336
  _response = self._raw_client.upload_markdown(
332
337
  tenant_id=tenant_id,
333
- request=request,
338
+ content=content,
334
339
  sub_tenant_id=sub_tenant_id,
340
+ tenant_metadata=tenant_metadata,
341
+ document_metadata=document_metadata,
335
342
  relations=relations,
336
343
  request_options=request_options,
337
344
  )
@@ -341,8 +348,10 @@ class UploadClient:
341
348
  self,
342
349
  *,
343
350
  tenant_id: str,
344
- request: MarkdownUploadRequest,
351
+ content: str,
345
352
  sub_tenant_id: typing.Optional[str] = None,
353
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
354
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
346
355
  relations: typing.Optional[Relations] = OMIT,
347
356
  request_options: typing.Optional[RequestOptions] = None,
348
357
  ) -> SingleUploadData:
@@ -354,10 +363,14 @@ class UploadClient:
354
363
  ----------
355
364
  tenant_id : str
356
365
 
357
- request : MarkdownUploadRequest
366
+ content : str
358
367
 
359
368
  sub_tenant_id : typing.Optional[str]
360
369
 
370
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
371
+
372
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
373
+
361
374
  relations : typing.Optional[Relations]
362
375
 
363
376
  request_options : typing.Optional[RequestOptions]
@@ -370,15 +383,17 @@ class UploadClient:
370
383
 
371
384
  Examples
372
385
  --------
373
- from usecortex-ai import CortexAI, MarkdownUploadRequest
386
+ from usecortex-ai import CortexAI
374
387
 
375
388
  client = CortexAI(token="YOUR_TOKEN", )
376
- client.upload.upload_text(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
389
+ client.upload.upload_text(tenant_id='tenant_id', content='content', )
377
390
  """
378
391
  _response = self._raw_client.upload_text(
379
392
  tenant_id=tenant_id,
380
- request=request,
393
+ content=content,
381
394
  sub_tenant_id=sub_tenant_id,
395
+ tenant_metadata=tenant_metadata,
396
+ document_metadata=document_metadata,
382
397
  relations=relations,
383
398
  request_options=request_options,
384
399
  )
@@ -389,8 +404,10 @@ class UploadClient:
389
404
  *,
390
405
  source_id: str,
391
406
  tenant_id: str,
392
- request: MarkdownUploadRequest,
407
+ content: str,
393
408
  sub_tenant_id: typing.Optional[str] = None,
409
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
410
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
394
411
  relations: typing.Optional[Relations] = OMIT,
395
412
  request_options: typing.Optional[RequestOptions] = None,
396
413
  ) -> SingleUploadData:
@@ -401,10 +418,14 @@ class UploadClient:
401
418
 
402
419
  tenant_id : str
403
420
 
404
- request : MarkdownUploadRequest
421
+ content : str
405
422
 
406
423
  sub_tenant_id : typing.Optional[str]
407
424
 
425
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
426
+
427
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
428
+
408
429
  relations : typing.Optional[Relations]
409
430
 
410
431
  request_options : typing.Optional[RequestOptions]
@@ -417,16 +438,18 @@ class UploadClient:
417
438
 
418
439
  Examples
419
440
  --------
420
- from usecortex-ai import CortexAI, MarkdownUploadRequest
441
+ from usecortex-ai import CortexAI
421
442
 
422
443
  client = CortexAI(token="YOUR_TOKEN", )
423
- client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
444
+ client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', content='content', )
424
445
  """
425
446
  _response = self._raw_client.update_markdown(
426
447
  source_id=source_id,
427
448
  tenant_id=tenant_id,
428
- request=request,
449
+ content=content,
429
450
  sub_tenant_id=sub_tenant_id,
451
+ tenant_metadata=tenant_metadata,
452
+ document_metadata=document_metadata,
430
453
  relations=relations,
431
454
  request_options=request_options,
432
455
  )
@@ -437,8 +460,10 @@ class UploadClient:
437
460
  *,
438
461
  source_id: str,
439
462
  tenant_id: str,
440
- request: MarkdownUploadRequest,
463
+ content: str,
441
464
  sub_tenant_id: typing.Optional[str] = None,
465
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
466
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
442
467
  relations: typing.Optional[Relations] = OMIT,
443
468
  request_options: typing.Optional[RequestOptions] = None,
444
469
  ) -> SingleUploadData:
@@ -449,10 +474,14 @@ class UploadClient:
449
474
 
450
475
  tenant_id : str
451
476
 
452
- request : MarkdownUploadRequest
477
+ content : str
453
478
 
454
479
  sub_tenant_id : typing.Optional[str]
455
480
 
481
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
482
+
483
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
484
+
456
485
  relations : typing.Optional[Relations]
457
486
 
458
487
  request_options : typing.Optional[RequestOptions]
@@ -465,16 +494,18 @@ class UploadClient:
465
494
 
466
495
  Examples
467
496
  --------
468
- from usecortex-ai import CortexAI, MarkdownUploadRequest
497
+ from usecortex-ai import CortexAI
469
498
 
470
499
  client = CortexAI(token="YOUR_TOKEN", )
471
- client.upload.update_text(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
500
+ client.upload.update_text(source_id='source_id', tenant_id='tenant_id', content='content', )
472
501
  """
473
502
  _response = self._raw_client.update_text(
474
503
  source_id=source_id,
475
504
  tenant_id=tenant_id,
476
- request=request,
505
+ content=content,
477
506
  sub_tenant_id=sub_tenant_id,
507
+ tenant_metadata=tenant_metadata,
508
+ document_metadata=document_metadata,
478
509
  relations=relations,
479
510
  request_options=request_options,
480
511
  )
@@ -528,8 +559,8 @@ class UploadClient:
528
559
  self,
529
560
  *,
530
561
  tenant_id: str,
531
- embeddings: typing.Dict[str, typing.Sequence[float]],
532
562
  sub_tenant_id: typing.Optional[str] = None,
563
+ embeddings: typing.Optional[typing.Dict[str, typing.Sequence[float]]] = OMIT,
533
564
  request_options: typing.Optional[RequestOptions] = None,
534
565
  ) -> SingleUploadData:
535
566
  """
@@ -537,11 +568,10 @@ class UploadClient:
537
568
  ----------
538
569
  tenant_id : str
539
570
 
540
- embeddings : typing.Dict[str, typing.Sequence[float]]
541
- Dictionary with chunk_id as key and embedding array as value
542
-
543
571
  sub_tenant_id : typing.Optional[str]
544
572
 
573
+ embeddings : typing.Optional[typing.Dict[str, typing.Sequence[float]]]
574
+
545
575
  request_options : typing.Optional[RequestOptions]
546
576
  Request-specific configuration.
547
577
 
@@ -555,10 +585,10 @@ class UploadClient:
555
585
  from usecortex-ai import CortexAI
556
586
 
557
587
  client = CortexAI(token="YOUR_TOKEN", )
558
- client.upload.update_embeddings(tenant_id='tenant_id', embeddings={'key': [1.1]}, )
588
+ client.upload.update_embeddings(tenant_id='tenant_id', )
559
589
  """
560
590
  _response = self._raw_client.update_embeddings(
561
- tenant_id=tenant_id, embeddings=embeddings, sub_tenant_id=sub_tenant_id, request_options=request_options
591
+ tenant_id=tenant_id, sub_tenant_id=sub_tenant_id, embeddings=embeddings, request_options=request_options
562
592
  )
563
593
  return _response.data
564
594
 
@@ -1068,8 +1098,10 @@ class AsyncUploadClient:
1068
1098
  self,
1069
1099
  *,
1070
1100
  tenant_id: str,
1071
- request: MarkdownUploadRequest,
1101
+ content: str,
1072
1102
  sub_tenant_id: typing.Optional[str] = None,
1103
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1104
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1073
1105
  relations: typing.Optional[Relations] = OMIT,
1074
1106
  request_options: typing.Optional[RequestOptions] = None,
1075
1107
  ) -> SingleUploadData:
@@ -1081,10 +1113,14 @@ class AsyncUploadClient:
1081
1113
  ----------
1082
1114
  tenant_id : str
1083
1115
 
1084
- request : MarkdownUploadRequest
1116
+ content : str
1085
1117
 
1086
1118
  sub_tenant_id : typing.Optional[str]
1087
1119
 
1120
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1121
+
1122
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1123
+
1088
1124
  relations : typing.Optional[Relations]
1089
1125
 
1090
1126
  request_options : typing.Optional[RequestOptions]
@@ -1099,17 +1135,19 @@ class AsyncUploadClient:
1099
1135
  --------
1100
1136
  import asyncio
1101
1137
 
1102
- from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
1138
+ from usecortex-ai import AsyncCortexAI
1103
1139
 
1104
1140
  client = AsyncCortexAI(token="YOUR_TOKEN", )
1105
1141
  async def main() -> None:
1106
- await client.upload.upload_markdown(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
1142
+ await client.upload.upload_markdown(tenant_id='tenant_id', content='content', )
1107
1143
  asyncio.run(main())
1108
1144
  """
1109
1145
  _response = await self._raw_client.upload_markdown(
1110
1146
  tenant_id=tenant_id,
1111
- request=request,
1147
+ content=content,
1112
1148
  sub_tenant_id=sub_tenant_id,
1149
+ tenant_metadata=tenant_metadata,
1150
+ document_metadata=document_metadata,
1113
1151
  relations=relations,
1114
1152
  request_options=request_options,
1115
1153
  )
@@ -1119,8 +1157,10 @@ class AsyncUploadClient:
1119
1157
  self,
1120
1158
  *,
1121
1159
  tenant_id: str,
1122
- request: MarkdownUploadRequest,
1160
+ content: str,
1123
1161
  sub_tenant_id: typing.Optional[str] = None,
1162
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1163
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1124
1164
  relations: typing.Optional[Relations] = OMIT,
1125
1165
  request_options: typing.Optional[RequestOptions] = None,
1126
1166
  ) -> SingleUploadData:
@@ -1132,10 +1172,14 @@ class AsyncUploadClient:
1132
1172
  ----------
1133
1173
  tenant_id : str
1134
1174
 
1135
- request : MarkdownUploadRequest
1175
+ content : str
1136
1176
 
1137
1177
  sub_tenant_id : typing.Optional[str]
1138
1178
 
1179
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1180
+
1181
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1182
+
1139
1183
  relations : typing.Optional[Relations]
1140
1184
 
1141
1185
  request_options : typing.Optional[RequestOptions]
@@ -1150,17 +1194,19 @@ class AsyncUploadClient:
1150
1194
  --------
1151
1195
  import asyncio
1152
1196
 
1153
- from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
1197
+ from usecortex-ai import AsyncCortexAI
1154
1198
 
1155
1199
  client = AsyncCortexAI(token="YOUR_TOKEN", )
1156
1200
  async def main() -> None:
1157
- await client.upload.upload_text(tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
1201
+ await client.upload.upload_text(tenant_id='tenant_id', content='content', )
1158
1202
  asyncio.run(main())
1159
1203
  """
1160
1204
  _response = await self._raw_client.upload_text(
1161
1205
  tenant_id=tenant_id,
1162
- request=request,
1206
+ content=content,
1163
1207
  sub_tenant_id=sub_tenant_id,
1208
+ tenant_metadata=tenant_metadata,
1209
+ document_metadata=document_metadata,
1164
1210
  relations=relations,
1165
1211
  request_options=request_options,
1166
1212
  )
@@ -1171,8 +1217,10 @@ class AsyncUploadClient:
1171
1217
  *,
1172
1218
  source_id: str,
1173
1219
  tenant_id: str,
1174
- request: MarkdownUploadRequest,
1220
+ content: str,
1175
1221
  sub_tenant_id: typing.Optional[str] = None,
1222
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1223
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1176
1224
  relations: typing.Optional[Relations] = OMIT,
1177
1225
  request_options: typing.Optional[RequestOptions] = None,
1178
1226
  ) -> SingleUploadData:
@@ -1183,10 +1231,14 @@ class AsyncUploadClient:
1183
1231
 
1184
1232
  tenant_id : str
1185
1233
 
1186
- request : MarkdownUploadRequest
1234
+ content : str
1187
1235
 
1188
1236
  sub_tenant_id : typing.Optional[str]
1189
1237
 
1238
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1239
+
1240
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1241
+
1190
1242
  relations : typing.Optional[Relations]
1191
1243
 
1192
1244
  request_options : typing.Optional[RequestOptions]
@@ -1201,18 +1253,20 @@ class AsyncUploadClient:
1201
1253
  --------
1202
1254
  import asyncio
1203
1255
 
1204
- from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
1256
+ from usecortex-ai import AsyncCortexAI
1205
1257
 
1206
1258
  client = AsyncCortexAI(token="YOUR_TOKEN", )
1207
1259
  async def main() -> None:
1208
- await client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
1260
+ await client.upload.update_markdown(source_id='source_id', tenant_id='tenant_id', content='content', )
1209
1261
  asyncio.run(main())
1210
1262
  """
1211
1263
  _response = await self._raw_client.update_markdown(
1212
1264
  source_id=source_id,
1213
1265
  tenant_id=tenant_id,
1214
- request=request,
1266
+ content=content,
1215
1267
  sub_tenant_id=sub_tenant_id,
1268
+ tenant_metadata=tenant_metadata,
1269
+ document_metadata=document_metadata,
1216
1270
  relations=relations,
1217
1271
  request_options=request_options,
1218
1272
  )
@@ -1223,8 +1277,10 @@ class AsyncUploadClient:
1223
1277
  *,
1224
1278
  source_id: str,
1225
1279
  tenant_id: str,
1226
- request: MarkdownUploadRequest,
1280
+ content: str,
1227
1281
  sub_tenant_id: typing.Optional[str] = None,
1282
+ tenant_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1283
+ document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1228
1284
  relations: typing.Optional[Relations] = OMIT,
1229
1285
  request_options: typing.Optional[RequestOptions] = None,
1230
1286
  ) -> SingleUploadData:
@@ -1235,10 +1291,14 @@ class AsyncUploadClient:
1235
1291
 
1236
1292
  tenant_id : str
1237
1293
 
1238
- request : MarkdownUploadRequest
1294
+ content : str
1239
1295
 
1240
1296
  sub_tenant_id : typing.Optional[str]
1241
1297
 
1298
+ tenant_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1299
+
1300
+ document_metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1301
+
1242
1302
  relations : typing.Optional[Relations]
1243
1303
 
1244
1304
  request_options : typing.Optional[RequestOptions]
@@ -1253,18 +1313,20 @@ class AsyncUploadClient:
1253
1313
  --------
1254
1314
  import asyncio
1255
1315
 
1256
- from usecortex-ai import AsyncCortexAI, MarkdownUploadRequest
1316
+ from usecortex-ai import AsyncCortexAI
1257
1317
 
1258
1318
  client = AsyncCortexAI(token="YOUR_TOKEN", )
1259
1319
  async def main() -> None:
1260
- await client.upload.update_text(source_id='source_id', tenant_id='tenant_id', request=MarkdownUploadRequest(content='content', ), )
1320
+ await client.upload.update_text(source_id='source_id', tenant_id='tenant_id', content='content', )
1261
1321
  asyncio.run(main())
1262
1322
  """
1263
1323
  _response = await self._raw_client.update_text(
1264
1324
  source_id=source_id,
1265
1325
  tenant_id=tenant_id,
1266
- request=request,
1326
+ content=content,
1267
1327
  sub_tenant_id=sub_tenant_id,
1328
+ tenant_metadata=tenant_metadata,
1329
+ document_metadata=document_metadata,
1268
1330
  relations=relations,
1269
1331
  request_options=request_options,
1270
1332
  )
@@ -1322,8 +1384,8 @@ class AsyncUploadClient:
1322
1384
  self,
1323
1385
  *,
1324
1386
  tenant_id: str,
1325
- embeddings: typing.Dict[str, typing.Sequence[float]],
1326
1387
  sub_tenant_id: typing.Optional[str] = None,
1388
+ embeddings: typing.Optional[typing.Dict[str, typing.Sequence[float]]] = OMIT,
1327
1389
  request_options: typing.Optional[RequestOptions] = None,
1328
1390
  ) -> SingleUploadData:
1329
1391
  """
@@ -1331,11 +1393,10 @@ class AsyncUploadClient:
1331
1393
  ----------
1332
1394
  tenant_id : str
1333
1395
 
1334
- embeddings : typing.Dict[str, typing.Sequence[float]]
1335
- Dictionary with chunk_id as key and embedding array as value
1336
-
1337
1396
  sub_tenant_id : typing.Optional[str]
1338
1397
 
1398
+ embeddings : typing.Optional[typing.Dict[str, typing.Sequence[float]]]
1399
+
1339
1400
  request_options : typing.Optional[RequestOptions]
1340
1401
  Request-specific configuration.
1341
1402
 
@@ -1352,11 +1413,11 @@ class AsyncUploadClient:
1352
1413
 
1353
1414
  client = AsyncCortexAI(token="YOUR_TOKEN", )
1354
1415
  async def main() -> None:
1355
- await client.upload.update_embeddings(tenant_id='tenant_id', embeddings={'key': [1.1]}, )
1416
+ await client.upload.update_embeddings(tenant_id='tenant_id', )
1356
1417
  asyncio.run(main())
1357
1418
  """
1358
1419
  _response = await self._raw_client.update_embeddings(
1359
- tenant_id=tenant_id, embeddings=embeddings, sub_tenant_id=sub_tenant_id, request_options=request_options
1420
+ tenant_id=tenant_id, sub_tenant_id=sub_tenant_id, embeddings=embeddings, request_options=request_options
1360
1421
  )
1361
1422
  return _response.data
1362
1423