usecortex-ai 0.3.6__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. usecortex_ai/__init__.py +80 -70
  2. usecortex_ai/client.py +25 -23
  3. usecortex_ai/dashboard/client.py +448 -0
  4. usecortex_ai/{user_memory → dashboard}/raw_client.py +371 -530
  5. usecortex_ai/embeddings/client.py +229 -102
  6. usecortex_ai/embeddings/raw_client.py +323 -211
  7. usecortex_ai/errors/__init__.py +2 -0
  8. usecortex_ai/errors/bad_request_error.py +1 -2
  9. usecortex_ai/errors/forbidden_error.py +1 -2
  10. usecortex_ai/errors/internal_server_error.py +1 -2
  11. usecortex_ai/errors/not_found_error.py +1 -2
  12. usecortex_ai/errors/service_unavailable_error.py +1 -2
  13. usecortex_ai/errors/too_many_requests_error.py +11 -0
  14. usecortex_ai/errors/unauthorized_error.py +1 -2
  15. usecortex_ai/fetch/client.py +350 -29
  16. usecortex_ai/fetch/raw_client.py +919 -65
  17. usecortex_ai/raw_client.py +8 -2
  18. usecortex_ai/search/client.py +293 -257
  19. usecortex_ai/search/raw_client.py +445 -346
  20. usecortex_ai/search/types/alpha.py +1 -1
  21. usecortex_ai/sources/client.py +29 -216
  22. usecortex_ai/sources/raw_client.py +51 -589
  23. usecortex_ai/tenant/client.py +155 -118
  24. usecortex_ai/tenant/raw_client.py +227 -350
  25. usecortex_ai/types/__init__.py +74 -66
  26. usecortex_ai/types/add_memory_response.py +39 -0
  27. usecortex_ai/types/{relations.py → api_key_info.py} +25 -5
  28. usecortex_ai/types/app_sources_upload_data.py +15 -6
  29. usecortex_ai/types/{file_upload_result.py → collection_stats.py} +5 -5
  30. usecortex_ai/types/custom_property_definition.py +75 -0
  31. usecortex_ai/types/dashboard_apis_response.py +33 -0
  32. usecortex_ai/types/dashboard_sources_response.py +33 -0
  33. usecortex_ai/types/dashboard_tenants_response.py +33 -0
  34. usecortex_ai/types/{list_sources_response.py → delete_result.py} +10 -7
  35. usecortex_ai/types/delete_user_memory_response.py +1 -1
  36. usecortex_ai/types/entity.py +4 -4
  37. usecortex_ai/types/fetch_mode.py +5 -0
  38. usecortex_ai/types/graph_context.py +26 -0
  39. usecortex_ai/types/{delete_sources.py → infra.py} +4 -3
  40. usecortex_ai/types/{fetch_content_data.py → insert_result.py} +12 -8
  41. usecortex_ai/types/memory_item.py +82 -0
  42. usecortex_ai/types/memory_result_item.py +47 -0
  43. usecortex_ai/types/milvus_data_type.py +21 -0
  44. usecortex_ai/types/path_triplet.py +3 -18
  45. usecortex_ai/types/processing_status.py +3 -2
  46. usecortex_ai/types/processing_status_indexing_status.py +7 -0
  47. usecortex_ai/types/qn_a_search_response.py +49 -0
  48. usecortex_ai/types/{retrieve_response.py → raw_embedding_document.py} +11 -8
  49. usecortex_ai/types/raw_embedding_search_result.py +47 -0
  50. usecortex_ai/types/{user_memory.py → raw_embedding_vector.py} +6 -6
  51. usecortex_ai/types/relation_evidence.py +24 -5
  52. usecortex_ai/types/retrieval_result.py +26 -0
  53. usecortex_ai/types/scored_path_response.py +5 -19
  54. usecortex_ai/types/search_mode.py +5 -0
  55. usecortex_ai/types/{batch_upload_data.py → source_delete_response.py} +8 -8
  56. usecortex_ai/types/{list_user_memories_response.py → source_delete_result_item.py} +11 -7
  57. usecortex_ai/types/source_fetch_response.py +70 -0
  58. usecortex_ai/types/{graph_relations_response.py → source_graph_relations_response.py} +3 -3
  59. usecortex_ai/types/{single_upload_data.py → source_list_response.py} +7 -10
  60. usecortex_ai/types/source_model.py +11 -1
  61. usecortex_ai/types/source_status.py +5 -0
  62. usecortex_ai/types/source_upload_response.py +35 -0
  63. usecortex_ai/types/source_upload_result_item.py +38 -0
  64. usecortex_ai/types/supported_llm_providers.py +5 -0
  65. usecortex_ai/types/{embeddings_create_collection_data.py → tenant_create_response.py} +9 -7
  66. usecortex_ai/types/{webpage_scrape_request.py → tenant_info.py} +10 -5
  67. usecortex_ai/types/{scored_triplet_response.py → tenant_metadata_schema_info.py} +8 -15
  68. usecortex_ai/types/{tenant_create_data.py → tenant_stats_response.py} +9 -8
  69. usecortex_ai/types/{triple_with_evidence.py → triplet_with_evidence.py} +5 -1
  70. usecortex_ai/types/user_assistant_pair.py +4 -0
  71. usecortex_ai/types/{search_chunk.py → vector_store_chunk.py} +3 -14
  72. usecortex_ai/upload/__init__.py +3 -0
  73. usecortex_ai/upload/client.py +233 -1937
  74. usecortex_ai/upload/raw_client.py +364 -4401
  75. usecortex_ai/upload/types/__init__.py +7 -0
  76. usecortex_ai/upload/types/body_upload_app_ingestion_upload_app_post_app_sources.py +7 -0
  77. {usecortex_ai-0.3.6.dist-info → usecortex_ai-0.4.0.dist-info}/METADATA +2 -2
  78. usecortex_ai-0.4.0.dist-info/RECORD +113 -0
  79. {usecortex_ai-0.3.6.dist-info → usecortex_ai-0.4.0.dist-info}/WHEEL +1 -1
  80. usecortex_ai/document/client.py +0 -139
  81. usecortex_ai/document/raw_client.py +0 -312
  82. usecortex_ai/types/add_user_memory_response.py +0 -41
  83. usecortex_ai/types/body_scrape_webpage_upload_scrape_webpage_post.py +0 -17
  84. usecortex_ai/types/body_update_scrape_job_upload_update_webpage_patch.py +0 -17
  85. usecortex_ai/types/chunk_graph_relations_response.py +0 -33
  86. usecortex_ai/types/delete_memory_request.py +0 -32
  87. usecortex_ai/types/delete_sub_tenant_data.py +0 -42
  88. usecortex_ai/types/embeddings_delete_data.py +0 -37
  89. usecortex_ai/types/embeddings_get_data.py +0 -37
  90. usecortex_ai/types/embeddings_search_data.py +0 -37
  91. usecortex_ai/types/extended_context.py +0 -17
  92. usecortex_ai/types/markdown_upload_request.py +0 -41
  93. usecortex_ai/types/related_chunk.py +0 -22
  94. usecortex_ai/types/retrieve_user_memory_response.py +0 -38
  95. usecortex_ai/types/source.py +0 -52
  96. usecortex_ai/types/sub_tenant_ids_data.py +0 -47
  97. usecortex_ai/types/tenant_stats.py +0 -42
  98. usecortex_ai/user/__init__.py +0 -4
  99. usecortex_ai/user/client.py +0 -145
  100. usecortex_ai/user/raw_client.py +0 -316
  101. usecortex_ai/user_memory/__init__.py +0 -4
  102. usecortex_ai/user_memory/client.py +0 -515
  103. usecortex_ai-0.3.6.dist-info/RECORD +0 -112
  104. /usecortex_ai/{document → dashboard}/__init__.py +0 -0
  105. {usecortex_ai-0.3.6.dist-info → usecortex_ai-0.4.0.dist-info}/licenses/LICENSE +0 -0
  106. {usecortex_ai-0.3.6.dist-info → usecortex_ai-0.4.0.dist-info}/top_level.txt +0 -0
@@ -4,21 +4,18 @@ import typing
4
4
 
5
5
  import pydantic
6
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
+ from .source_model import SourceModel
7
8
 
8
9
 
9
- class SingleUploadData(UniversalBaseModel):
10
- file_id: str = pydantic.Field()
11
- """
12
- Unique identifier for the file being processed
13
- """
14
-
15
- message: str = pydantic.Field()
10
+ class SourceListResponse(UniversalBaseModel):
11
+ success: typing.Optional[bool] = None
12
+ message: typing.Optional[str] = None
13
+ sources: typing.Optional[typing.List[SourceModel]] = None
14
+ total: int = pydantic.Field()
16
15
  """
17
- Status message indicating document parsing scheduled or update completed
16
+ Total number of sources matching the query.
18
17
  """
19
18
 
20
- success: typing.Optional[bool] = None
21
-
22
19
  if IS_PYDANTIC_V2:
23
20
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
24
21
  else:
@@ -9,11 +9,21 @@ from .content_model import ContentModel
9
9
 
10
10
 
11
11
  class SourceModel(UniversalBaseModel):
12
- id: typing.Optional[str] = pydantic.Field(default=None)
12
+ id: str = pydantic.Field()
13
13
  """
14
14
  Stable, unique identifier for the source. If omitted, one may be generated upstream.
15
15
  """
16
16
 
17
+ tenant_id: str = pydantic.Field()
18
+ """
19
+ Unique identifier for the tenant/organization
20
+ """
21
+
22
+ sub_tenant_id: str = pydantic.Field()
23
+ """
24
+ Optional sub-tenant identifier used to organize data within a tenant. If omitted, the default sub-tenant created during tenant setup will be used.
25
+ """
26
+
17
27
  title: typing.Optional[str] = pydantic.Field(default=None)
18
28
  """
19
29
  Short human-readable title for the source.
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ SourceStatus = typing.Union[typing.Literal["queued", "processing", "completed", "failed"], typing.Any]
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ import pydantic
6
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
+ from .source_upload_result_item import SourceUploadResultItem
8
+
9
+
10
+ class SourceUploadResponse(UniversalBaseModel):
11
+ success: typing.Optional[bool] = None
12
+ message: typing.Optional[str] = None
13
+ results: typing.Optional[typing.List[SourceUploadResultItem]] = pydantic.Field(default=None)
14
+ """
15
+ List of upload results for each source.
16
+ """
17
+
18
+ success_count: typing.Optional[int] = pydantic.Field(default=None)
19
+ """
20
+ Number of sources successfully queued.
21
+ """
22
+
23
+ failed_count: typing.Optional[int] = pydantic.Field(default=None)
24
+ """
25
+ Number of sources that failed to upload.
26
+ """
27
+
28
+ if IS_PYDANTIC_V2:
29
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
30
+ else:
31
+
32
+ class Config:
33
+ frozen = True
34
+ smart_union = True
35
+ extra = pydantic.Extra.allow
@@ -0,0 +1,38 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ import pydantic
6
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
+ from .source_status import SourceStatus
8
+
9
+
10
+ class SourceUploadResultItem(UniversalBaseModel):
11
+ source_id: str = pydantic.Field()
12
+ """
13
+ Unique identifier for the uploaded source.
14
+ """
15
+
16
+ filename: typing.Optional[str] = pydantic.Field(default=None)
17
+ """
18
+ Original filename if present.
19
+ """
20
+
21
+ status: typing.Optional[SourceStatus] = pydantic.Field(default=None)
22
+ """
23
+ Initial processing status.
24
+ """
25
+
26
+ error: typing.Optional[str] = pydantic.Field(default=None)
27
+ """
28
+ Error message if upload failed.
29
+ """
30
+
31
+ if IS_PYDANTIC_V2:
32
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
33
+ else:
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ extra = pydantic.Extra.allow
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ SupportedLlmProviders = typing.Union[typing.Literal["groq", "cerebras", "openai", "anthropic", "gemini"], typing.Any]
@@ -4,27 +4,29 @@ import typing
4
4
 
5
5
  import pydantic
6
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
+ from .infra import Infra
8
+ from .tenant_metadata_schema_info import TenantMetadataSchemaInfo
7
9
 
8
10
 
9
- class EmbeddingsCreateCollectionData(UniversalBaseModel):
11
+ class TenantCreateResponse(UniversalBaseModel):
10
12
  tenant_id: str = pydantic.Field()
11
13
  """
12
- Identifier for the tenant/organization
14
+ Identifier provided by user
13
15
  """
14
16
 
15
- sub_tenant_id: str = pydantic.Field()
17
+ infra: Infra = pydantic.Field()
16
18
  """
17
- Identifier for the sub-tenant within the tenant
19
+ Infra status
18
20
  """
19
21
 
20
- success: typing.Optional[bool] = pydantic.Field(default=None)
22
+ metadata_schema: typing.Optional[TenantMetadataSchemaInfo] = pydantic.Field(default=None)
21
23
  """
22
- Indicates whether the embeddings tenant creation completed successfully
24
+ Summary of configured tenant metadata schema (if provided)
23
25
  """
24
26
 
25
27
  message: typing.Optional[str] = pydantic.Field(default=None)
26
28
  """
27
- Status message about the tenant creation operation
29
+ Summary message
28
30
  """
29
31
 
30
32
  if IS_PYDANTIC_V2:
@@ -6,15 +6,20 @@ import pydantic
6
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
7
 
8
8
 
9
- class WebpageScrapeRequest(UniversalBaseModel):
10
- web_url: str = pydantic.Field()
9
+ class TenantInfo(UniversalBaseModel):
10
+ tenant_id: str = pydantic.Field()
11
11
  """
12
- The URL of the webpage to scrape and index
12
+ Organization tenant ID
13
13
  """
14
14
 
15
- file_id: typing.Optional[str] = pydantic.Field(default=None)
15
+ organisation: str = pydantic.Field()
16
16
  """
17
- Optional custom file ID for the scraped content. If not provided, a unique ID will be generated
17
+ Organization name
18
+ """
19
+
20
+ timestamp: str = pydantic.Field()
21
+ """
22
+ Creation timestamp
18
23
  """
19
24
 
20
25
  if IS_PYDANTIC_V2:
@@ -4,33 +4,26 @@ import typing
4
4
 
5
5
  import pydantic
6
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
- from .entity import Entity
8
- from .relation_evidence import RelationEvidence
9
7
 
10
8
 
11
- class ScoredTripletResponse(UniversalBaseModel):
9
+ class TenantMetadataSchemaInfo(UniversalBaseModel):
12
10
  """
13
- Individual scored triplet for entity-based search results
11
+ Summary of configured tenant metadata schema fields.
14
12
  """
15
13
 
16
- source: Entity = pydantic.Field()
14
+ field_count: int = pydantic.Field()
17
15
  """
18
- Source entity
16
+ Number of custom metadata fields configured
19
17
  """
20
18
 
21
- target: Entity = pydantic.Field()
19
+ dense_embedding_fields: typing.Optional[typing.List[str]] = pydantic.Field(default=None)
22
20
  """
23
- Target entity
21
+ Fields with dense embeddings enabled for semantic search
24
22
  """
25
23
 
26
- relation: RelationEvidence = pydantic.Field()
24
+ sparse_embedding_fields: typing.Optional[typing.List[str]] = pydantic.Field(default=None)
27
25
  """
28
- Relation between entities
29
- """
30
-
31
- relevancy_score: typing.Optional[float] = pydantic.Field(default=None)
32
- """
33
- Relevancy score from reranking
26
+ Fields with sparse embeddings enabled for keyword search
34
27
  """
35
28
 
36
29
  if IS_PYDANTIC_V2:
@@ -4,27 +4,28 @@ import typing
4
4
 
5
5
  import pydantic
6
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
+ from .collection_stats import CollectionStats
7
8
 
8
9
 
9
- class TenantCreateData(UniversalBaseModel):
10
- status: str = pydantic.Field()
10
+ class TenantStatsResponse(UniversalBaseModel):
11
+ tenant_id: str = pydantic.Field()
11
12
  """
12
- Current status of the tenant creation process
13
+ Tenant identifier
13
14
  """
14
15
 
15
- tenant_id: str = pydantic.Field()
16
+ normal_collection: CollectionStats = pydantic.Field()
16
17
  """
17
- Unique identifier assigned to the new tenant
18
+ Statistics for the normal (context) collection
18
19
  """
19
20
 
20
- success: typing.Optional[bool] = pydantic.Field(default=None)
21
+ memory_collection: CollectionStats = pydantic.Field()
21
22
  """
22
- Indicates whether the tenant creation was successful
23
+ Statistics for the memory collection
23
24
  """
24
25
 
25
26
  message: typing.Optional[str] = pydantic.Field(default=None)
26
27
  """
27
- Response message describing the creation result
28
+ Summary message
28
29
  """
29
30
 
30
31
  if IS_PYDANTIC_V2:
@@ -8,7 +8,11 @@ from .entity import Entity
8
8
  from .relation_evidence import RelationEvidence
9
9
 
10
10
 
11
- class TripleWithEvidence(UniversalBaseModel):
11
+ class TripletWithEvidence(UniversalBaseModel):
12
+ """
13
+ Triple with multiple evidence items from different chunks
14
+ """
15
+
12
16
  source: Entity
13
17
  target: Entity
14
18
  relations: typing.List[RelationEvidence] = pydantic.Field()
@@ -7,6 +7,10 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
7
 
8
8
 
9
9
  class UserAssistantPair(UniversalBaseModel):
10
+ """
11
+ Represents a user-assistant conversation pair.
12
+ """
13
+
10
14
  user: str = pydantic.Field()
11
15
  """
12
16
  User's message in the conversation
@@ -4,10 +4,9 @@ import typing
4
4
 
5
5
  import pydantic
6
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
- from .extended_context import ExtendedContext
8
7
 
9
8
 
10
- class SearchChunk(UniversalBaseModel):
9
+ class VectorStoreChunk(UniversalBaseModel):
11
10
  chunk_uuid: str = pydantic.Field()
12
11
  """
13
12
  Unique identifier for this content chunk
@@ -45,12 +44,12 @@ class SearchChunk(UniversalBaseModel):
45
44
 
46
45
  layout: typing.Optional[str] = pydantic.Field(default=None)
47
46
  """
48
- Layout of the chunk in original document. You will generally receive a stringified dict with 2 keys, `offsets` and `page`(optional). Offsets will have `document_level_start_index` and `page_level_start_index`(optional)
47
+ Layout of the chunk in original document. You will generally receive a stringified dict with 2 keys, `offsets` and `page`(optional). Offsets will have `document_level_start_index` and `page_level_start_index`(optional)
49
48
  """
50
49
 
51
50
  relevancy_score: typing.Optional[float] = pydantic.Field(default=None)
52
51
  """
53
- Score indicating how relevant this chunk is to your search query, with higher values indicating better matches
52
+ Score indicating how relevant this chunk is to your search query, with higher values indicating better matches
54
53
  """
55
54
 
56
55
  document_metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
@@ -63,16 +62,6 @@ class SearchChunk(UniversalBaseModel):
63
62
  Custom metadata associated with your tenant
64
63
  """
65
64
 
66
- extra_context: typing.Optional[ExtendedContext] = pydantic.Field(default=None)
67
- """
68
- Additional context for this chunk
69
- """
70
-
71
- graph_triplet_ids: typing.Optional[typing.List[str]] = pydantic.Field(default=None)
72
- """
73
- List of group IDs for triplets linked to this chunk. Lookup triplet data in graph_relations.chunk_relations[group_id]
74
- """
75
-
76
65
  if IS_PYDANTIC_V2:
77
66
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
78
67
  else:
@@ -2,3 +2,6 @@
2
2
 
3
3
  # isort: skip_file
4
4
 
5
+ from .types import BodyUploadAppIngestionUploadAppPostAppSources
6
+
7
+ __all__ = ["BodyUploadAppIngestionUploadAppPostAppSources"]