llama-cloud 0.1.20__py3-none-any.whl → 0.1.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (30) hide show
  1. llama_cloud/__init__.py +2 -124
  2. llama_cloud/resources/__init__.py +2 -41
  3. llama_cloud/resources/data_sinks/__init__.py +2 -18
  4. llama_cloud/resources/data_sinks/client.py +94 -2
  5. llama_cloud/resources/data_sinks/types/__init__.py +2 -18
  6. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -65
  7. llama_cloud/resources/data_sources/__init__.py +2 -30
  8. llama_cloud/resources/data_sources/types/__init__.py +1 -28
  9. llama_cloud/resources/data_sources/types/data_source_update_component.py +23 -2
  10. llama_cloud/resources/pipelines/client.py +14 -0
  11. llama_cloud/types/__init__.py +4 -102
  12. llama_cloud/types/cloud_jira_data_source.py +4 -0
  13. llama_cloud/types/data_sink_component.py +7 -65
  14. llama_cloud/types/data_sink_create_component.py +7 -65
  15. llama_cloud/types/data_source_component.py +23 -2
  16. llama_cloud/types/data_source_create_component.py +23 -2
  17. llama_cloud/types/extract_config.py +3 -0
  18. llama_cloud/types/pipeline.py +2 -0
  19. llama_cloud/types/pipeline_data_source_component.py +23 -2
  20. llama_cloud/types/pipeline_status.py +17 -0
  21. llama_cloud/types/prompt_conf.py +1 -0
  22. llama_cloud/types/supported_llm_model.py +1 -2
  23. {llama_cloud-0.1.20.dist-info → llama_cloud-0.1.21.dist-info}/METADATA +5 -3
  24. {llama_cloud-0.1.20.dist-info → llama_cloud-0.1.21.dist-info}/RECORD +26 -29
  25. {llama_cloud-0.1.20.dist-info → llama_cloud-0.1.21.dist-info}/WHEEL +1 -1
  26. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +0 -122
  27. llama_cloud/types/data_source_component_one.py +0 -122
  28. llama_cloud/types/data_source_create_component_one.py +0 -122
  29. llama_cloud/types/pipeline_data_source_component_one.py +0 -122
  30. {llama_cloud-0.1.20.dist-info → llama_cloud-0.1.21.dist-info}/LICENSE +0 -0
@@ -1024,6 +1024,8 @@ class PipelinesClient:
1024
1024
  self,
1025
1025
  pipeline_id: str,
1026
1026
  *,
1027
+ project_id: typing.Optional[str] = None,
1028
+ organization_id: typing.Optional[str] = None,
1027
1029
  dense_similarity_top_k: typing.Optional[int] = OMIT,
1028
1030
  dense_similarity_cutoff: typing.Optional[float] = OMIT,
1029
1031
  sparse_similarity_top_k: typing.Optional[int] = OMIT,
@@ -1043,6 +1045,10 @@ class PipelinesClient:
1043
1045
  Parameters:
1044
1046
  - pipeline_id: str.
1045
1047
 
1048
+ - project_id: typing.Optional[str].
1049
+
1050
+ - organization_id: typing.Optional[str].
1051
+
1046
1052
  - dense_similarity_top_k: typing.Optional[int].
1047
1053
 
1048
1054
  - dense_similarity_cutoff: typing.Optional[float].
@@ -1109,6 +1115,7 @@ class PipelinesClient:
1109
1115
  _response = self._client_wrapper.httpx_client.request(
1110
1116
  "POST",
1111
1117
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/retrieve"),
1118
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1112
1119
  json=jsonable_encoder(_request),
1113
1120
  headers=self._client_wrapper.get_headers(),
1114
1121
  timeout=60,
@@ -2633,6 +2640,8 @@ class AsyncPipelinesClient:
2633
2640
  self,
2634
2641
  pipeline_id: str,
2635
2642
  *,
2643
+ project_id: typing.Optional[str] = None,
2644
+ organization_id: typing.Optional[str] = None,
2636
2645
  dense_similarity_top_k: typing.Optional[int] = OMIT,
2637
2646
  dense_similarity_cutoff: typing.Optional[float] = OMIT,
2638
2647
  sparse_similarity_top_k: typing.Optional[int] = OMIT,
@@ -2652,6 +2661,10 @@ class AsyncPipelinesClient:
2652
2661
  Parameters:
2653
2662
  - pipeline_id: str.
2654
2663
 
2664
+ - project_id: typing.Optional[str].
2665
+
2666
+ - organization_id: typing.Optional[str].
2667
+
2655
2668
  - dense_similarity_top_k: typing.Optional[int].
2656
2669
 
2657
2670
  - dense_similarity_cutoff: typing.Optional[float].
@@ -2718,6 +2731,7 @@ class AsyncPipelinesClient:
2718
2731
  _response = await self._client_wrapper.httpx_client.request(
2719
2732
  "POST",
2720
2733
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/retrieve"),
2734
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2721
2735
  json=jsonable_encoder(_request),
2722
2736
  headers=self._client_wrapper.get_headers(),
2723
2737
  timeout=60,
@@ -67,56 +67,14 @@ from .configurable_transformation_definition import ConfigurableTransformationDe
67
67
  from .configurable_transformation_names import ConfigurableTransformationNames
68
68
  from .credit_type import CreditType
69
69
  from .data_sink import DataSink
70
- from .data_sink_component import (
71
- DataSinkComponent,
72
- DataSinkComponent_AzureAiSearch,
73
- DataSinkComponent_Milvus,
74
- DataSinkComponent_MongodbAtlas,
75
- DataSinkComponent_Pinecone,
76
- DataSinkComponent_Postgres,
77
- DataSinkComponent_Qdrant,
78
- )
70
+ from .data_sink_component import DataSinkComponent
79
71
  from .data_sink_create import DataSinkCreate
80
- from .data_sink_create_component import (
81
- DataSinkCreateComponent,
82
- DataSinkCreateComponent_AzureAiSearch,
83
- DataSinkCreateComponent_Milvus,
84
- DataSinkCreateComponent_MongodbAtlas,
85
- DataSinkCreateComponent_Pinecone,
86
- DataSinkCreateComponent_Postgres,
87
- DataSinkCreateComponent_Qdrant,
88
- )
72
+ from .data_sink_create_component import DataSinkCreateComponent
89
73
  from .data_sink_definition import DataSinkDefinition
90
74
  from .data_source import DataSource
91
75
  from .data_source_component import DataSourceComponent
92
- from .data_source_component_one import (
93
- DataSourceComponentOne,
94
- DataSourceComponentOne_AzureStorageBlob,
95
- DataSourceComponentOne_Box,
96
- DataSourceComponentOne_Confluence,
97
- DataSourceComponentOne_GoogleDrive,
98
- DataSourceComponentOne_Jira,
99
- DataSourceComponentOne_MicrosoftOnedrive,
100
- DataSourceComponentOne_MicrosoftSharepoint,
101
- DataSourceComponentOne_NotionPage,
102
- DataSourceComponentOne_S3,
103
- DataSourceComponentOne_Slack,
104
- )
105
76
  from .data_source_create import DataSourceCreate
106
77
  from .data_source_create_component import DataSourceCreateComponent
107
- from .data_source_create_component_one import (
108
- DataSourceCreateComponentOne,
109
- DataSourceCreateComponentOne_AzureStorageBlob,
110
- DataSourceCreateComponentOne_Box,
111
- DataSourceCreateComponentOne_Confluence,
112
- DataSourceCreateComponentOne_GoogleDrive,
113
- DataSourceCreateComponentOne_Jira,
114
- DataSourceCreateComponentOne_MicrosoftOnedrive,
115
- DataSourceCreateComponentOne_MicrosoftSharepoint,
116
- DataSourceCreateComponentOne_NotionPage,
117
- DataSourceCreateComponentOne_S3,
118
- DataSourceCreateComponentOne_Slack,
119
- )
120
78
  from .data_source_create_custom_metadata_value import DataSourceCreateCustomMetadataValue
121
79
  from .data_source_custom_metadata_value import DataSourceCustomMetadataValue
122
80
  from .data_source_definition import DataSourceDefinition
@@ -277,19 +235,6 @@ from .pipeline_create_embedding_config import (
277
235
  from .pipeline_create_transform_config import PipelineCreateTransformConfig
278
236
  from .pipeline_data_source import PipelineDataSource
279
237
  from .pipeline_data_source_component import PipelineDataSourceComponent
280
- from .pipeline_data_source_component_one import (
281
- PipelineDataSourceComponentOne,
282
- PipelineDataSourceComponentOne_AzureStorageBlob,
283
- PipelineDataSourceComponentOne_Box,
284
- PipelineDataSourceComponentOne_Confluence,
285
- PipelineDataSourceComponentOne_GoogleDrive,
286
- PipelineDataSourceComponentOne_Jira,
287
- PipelineDataSourceComponentOne_MicrosoftOnedrive,
288
- PipelineDataSourceComponentOne_MicrosoftSharepoint,
289
- PipelineDataSourceComponentOne_NotionPage,
290
- PipelineDataSourceComponentOne_S3,
291
- PipelineDataSourceComponentOne_Slack,
292
- )
293
238
  from .pipeline_data_source_create import PipelineDataSourceCreate
294
239
  from .pipeline_data_source_custom_metadata_value import PipelineDataSourceCustomMetadataValue
295
240
  from .pipeline_data_source_status import PipelineDataSourceStatus
@@ -316,6 +261,7 @@ from .pipeline_file_update_dispatcher_config import PipelineFileUpdateDispatcher
316
261
  from .pipeline_file_updater_config import PipelineFileUpdaterConfig
317
262
  from .pipeline_managed_ingestion_job_params import PipelineManagedIngestionJobParams
318
263
  from .pipeline_metadata_config import PipelineMetadataConfig
264
+ from .pipeline_status import PipelineStatus
319
265
  from .pipeline_transform_config import (
320
266
  PipelineTransformConfig,
321
267
  PipelineTransformConfig_Advanced,
@@ -458,47 +404,13 @@ __all__ = [
458
404
  "CreditType",
459
405
  "DataSink",
460
406
  "DataSinkComponent",
461
- "DataSinkComponent_AzureAiSearch",
462
- "DataSinkComponent_Milvus",
463
- "DataSinkComponent_MongodbAtlas",
464
- "DataSinkComponent_Pinecone",
465
- "DataSinkComponent_Postgres",
466
- "DataSinkComponent_Qdrant",
467
407
  "DataSinkCreate",
468
408
  "DataSinkCreateComponent",
469
- "DataSinkCreateComponent_AzureAiSearch",
470
- "DataSinkCreateComponent_Milvus",
471
- "DataSinkCreateComponent_MongodbAtlas",
472
- "DataSinkCreateComponent_Pinecone",
473
- "DataSinkCreateComponent_Postgres",
474
- "DataSinkCreateComponent_Qdrant",
475
409
  "DataSinkDefinition",
476
410
  "DataSource",
477
411
  "DataSourceComponent",
478
- "DataSourceComponentOne",
479
- "DataSourceComponentOne_AzureStorageBlob",
480
- "DataSourceComponentOne_Box",
481
- "DataSourceComponentOne_Confluence",
482
- "DataSourceComponentOne_GoogleDrive",
483
- "DataSourceComponentOne_Jira",
484
- "DataSourceComponentOne_MicrosoftOnedrive",
485
- "DataSourceComponentOne_MicrosoftSharepoint",
486
- "DataSourceComponentOne_NotionPage",
487
- "DataSourceComponentOne_S3",
488
- "DataSourceComponentOne_Slack",
489
412
  "DataSourceCreate",
490
413
  "DataSourceCreateComponent",
491
- "DataSourceCreateComponentOne",
492
- "DataSourceCreateComponentOne_AzureStorageBlob",
493
- "DataSourceCreateComponentOne_Box",
494
- "DataSourceCreateComponentOne_Confluence",
495
- "DataSourceCreateComponentOne_GoogleDrive",
496
- "DataSourceCreateComponentOne_Jira",
497
- "DataSourceCreateComponentOne_MicrosoftOnedrive",
498
- "DataSourceCreateComponentOne_MicrosoftSharepoint",
499
- "DataSourceCreateComponentOne_NotionPage",
500
- "DataSourceCreateComponentOne_S3",
501
- "DataSourceCreateComponentOne_Slack",
502
414
  "DataSourceCreateCustomMetadataValue",
503
415
  "DataSourceCustomMetadataValue",
504
416
  "DataSourceDefinition",
@@ -649,17 +561,6 @@ __all__ = [
649
561
  "PipelineCreateTransformConfig",
650
562
  "PipelineDataSource",
651
563
  "PipelineDataSourceComponent",
652
- "PipelineDataSourceComponentOne",
653
- "PipelineDataSourceComponentOne_AzureStorageBlob",
654
- "PipelineDataSourceComponentOne_Box",
655
- "PipelineDataSourceComponentOne_Confluence",
656
- "PipelineDataSourceComponentOne_GoogleDrive",
657
- "PipelineDataSourceComponentOne_Jira",
658
- "PipelineDataSourceComponentOne_MicrosoftOnedrive",
659
- "PipelineDataSourceComponentOne_MicrosoftSharepoint",
660
- "PipelineDataSourceComponentOne_NotionPage",
661
- "PipelineDataSourceComponentOne_S3",
662
- "PipelineDataSourceComponentOne_Slack",
663
564
  "PipelineDataSourceCreate",
664
565
  "PipelineDataSourceCustomMetadataValue",
665
566
  "PipelineDataSourceStatus",
@@ -684,6 +585,7 @@ __all__ = [
684
585
  "PipelineFileUpdaterConfig",
685
586
  "PipelineManagedIngestionJobParams",
686
587
  "PipelineMetadataConfig",
588
+ "PipelineStatus",
687
589
  "PipelineTransformConfig",
688
590
  "PipelineTransformConfig_Advanced",
689
591
  "PipelineTransformConfig_Auto",
@@ -15,6 +15,10 @@ except ImportError:
15
15
 
16
16
 
17
17
  class CloudJiraDataSource(pydantic.BaseModel):
18
+ """
19
+ Cloud Jira Data Source integrating JiraReader.
20
+ """
21
+
18
22
  supports_access_control: typing.Optional[bool]
19
23
  email: typing.Optional[str]
20
24
  api_token: typing.Optional[str]
@@ -1,11 +1,7 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from __future__ import annotations
4
-
5
3
  import typing
6
4
 
7
- import typing_extensions
8
-
9
5
  from .cloud_azure_ai_search_vector_store import CloudAzureAiSearchVectorStore
10
6
  from .cloud_milvus_vector_store import CloudMilvusVectorStore
11
7
  from .cloud_mongo_db_atlas_vector_search import CloudMongoDbAtlasVectorSearch
@@ -13,66 +9,12 @@ from .cloud_pinecone_vector_store import CloudPineconeVectorStore
13
9
  from .cloud_postgres_vector_store import CloudPostgresVectorStore
14
10
  from .cloud_qdrant_vector_store import CloudQdrantVectorStore
15
11
 
16
-
17
- class DataSinkComponent_AzureAiSearch(CloudAzureAiSearchVectorStore):
18
- type: typing_extensions.Literal["azure_ai_search"]
19
-
20
- class Config:
21
- frozen = True
22
- smart_union = True
23
- allow_population_by_field_name = True
24
-
25
-
26
- class DataSinkComponent_Milvus(CloudMilvusVectorStore):
27
- type: typing_extensions.Literal["milvus"]
28
-
29
- class Config:
30
- frozen = True
31
- smart_union = True
32
- allow_population_by_field_name = True
33
-
34
-
35
- class DataSinkComponent_MongodbAtlas(CloudMongoDbAtlasVectorSearch):
36
- type: typing_extensions.Literal["mongodb_atlas"]
37
-
38
- class Config:
39
- frozen = True
40
- smart_union = True
41
- allow_population_by_field_name = True
42
-
43
-
44
- class DataSinkComponent_Pinecone(CloudPineconeVectorStore):
45
- type: typing_extensions.Literal["pinecone"]
46
-
47
- class Config:
48
- frozen = True
49
- smart_union = True
50
- allow_population_by_field_name = True
51
-
52
-
53
- class DataSinkComponent_Postgres(CloudPostgresVectorStore):
54
- type: typing_extensions.Literal["postgres"]
55
-
56
- class Config:
57
- frozen = True
58
- smart_union = True
59
- allow_population_by_field_name = True
60
-
61
-
62
- class DataSinkComponent_Qdrant(CloudQdrantVectorStore):
63
- type: typing_extensions.Literal["qdrant"]
64
-
65
- class Config:
66
- frozen = True
67
- smart_union = True
68
- allow_population_by_field_name = True
69
-
70
-
71
12
  DataSinkComponent = typing.Union[
72
- DataSinkComponent_AzureAiSearch,
73
- DataSinkComponent_Milvus,
74
- DataSinkComponent_MongodbAtlas,
75
- DataSinkComponent_Pinecone,
76
- DataSinkComponent_Postgres,
77
- DataSinkComponent_Qdrant,
13
+ typing.Dict[str, typing.Any],
14
+ CloudPineconeVectorStore,
15
+ CloudPostgresVectorStore,
16
+ CloudQdrantVectorStore,
17
+ CloudAzureAiSearchVectorStore,
18
+ CloudMongoDbAtlasVectorSearch,
19
+ CloudMilvusVectorStore,
78
20
  ]
@@ -1,11 +1,7 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from __future__ import annotations
4
-
5
3
  import typing
6
4
 
7
- import typing_extensions
8
-
9
5
  from .cloud_azure_ai_search_vector_store import CloudAzureAiSearchVectorStore
10
6
  from .cloud_milvus_vector_store import CloudMilvusVectorStore
11
7
  from .cloud_mongo_db_atlas_vector_search import CloudMongoDbAtlasVectorSearch
@@ -13,66 +9,12 @@ from .cloud_pinecone_vector_store import CloudPineconeVectorStore
13
9
  from .cloud_postgres_vector_store import CloudPostgresVectorStore
14
10
  from .cloud_qdrant_vector_store import CloudQdrantVectorStore
15
11
 
16
-
17
- class DataSinkCreateComponent_AzureAiSearch(CloudAzureAiSearchVectorStore):
18
- type: typing_extensions.Literal["azure_ai_search"]
19
-
20
- class Config:
21
- frozen = True
22
- smart_union = True
23
- allow_population_by_field_name = True
24
-
25
-
26
- class DataSinkCreateComponent_Milvus(CloudMilvusVectorStore):
27
- type: typing_extensions.Literal["milvus"]
28
-
29
- class Config:
30
- frozen = True
31
- smart_union = True
32
- allow_population_by_field_name = True
33
-
34
-
35
- class DataSinkCreateComponent_MongodbAtlas(CloudMongoDbAtlasVectorSearch):
36
- type: typing_extensions.Literal["mongodb_atlas"]
37
-
38
- class Config:
39
- frozen = True
40
- smart_union = True
41
- allow_population_by_field_name = True
42
-
43
-
44
- class DataSinkCreateComponent_Pinecone(CloudPineconeVectorStore):
45
- type: typing_extensions.Literal["pinecone"]
46
-
47
- class Config:
48
- frozen = True
49
- smart_union = True
50
- allow_population_by_field_name = True
51
-
52
-
53
- class DataSinkCreateComponent_Postgres(CloudPostgresVectorStore):
54
- type: typing_extensions.Literal["postgres"]
55
-
56
- class Config:
57
- frozen = True
58
- smart_union = True
59
- allow_population_by_field_name = True
60
-
61
-
62
- class DataSinkCreateComponent_Qdrant(CloudQdrantVectorStore):
63
- type: typing_extensions.Literal["qdrant"]
64
-
65
- class Config:
66
- frozen = True
67
- smart_union = True
68
- allow_population_by_field_name = True
69
-
70
-
71
12
  DataSinkCreateComponent = typing.Union[
72
- DataSinkCreateComponent_AzureAiSearch,
73
- DataSinkCreateComponent_Milvus,
74
- DataSinkCreateComponent_MongodbAtlas,
75
- DataSinkCreateComponent_Pinecone,
76
- DataSinkCreateComponent_Postgres,
77
- DataSinkCreateComponent_Qdrant,
13
+ typing.Dict[str, typing.Any],
14
+ CloudPineconeVectorStore,
15
+ CloudPostgresVectorStore,
16
+ CloudQdrantVectorStore,
17
+ CloudAzureAiSearchVectorStore,
18
+ CloudMongoDbAtlasVectorSearch,
19
+ CloudMilvusVectorStore,
78
20
  ]
@@ -2,6 +2,27 @@
2
2
 
3
3
  import typing
4
4
 
5
- from .data_source_component_one import DataSourceComponentOne
5
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_box_data_source import CloudBoxDataSource
7
+ from .cloud_confluence_data_source import CloudConfluenceDataSource
8
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
9
+ from .cloud_jira_data_source import CloudJiraDataSource
10
+ from .cloud_notion_page_data_source import CloudNotionPageDataSource
11
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
12
+ from .cloud_s_3_data_source import CloudS3DataSource
13
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
14
+ from .cloud_slack_data_source import CloudSlackDataSource
6
15
 
7
- DataSourceComponent = typing.Union[typing.Dict[str, typing.Any], DataSourceComponentOne]
16
+ DataSourceComponent = typing.Union[
17
+ typing.Dict[str, typing.Any],
18
+ CloudS3DataSource,
19
+ CloudAzStorageBlobDataSource,
20
+ CloudGoogleDriveDataSource,
21
+ CloudOneDriveDataSource,
22
+ CloudSharepointDataSource,
23
+ CloudSlackDataSource,
24
+ CloudNotionPageDataSource,
25
+ CloudConfluenceDataSource,
26
+ CloudJiraDataSource,
27
+ CloudBoxDataSource,
28
+ ]
@@ -2,6 +2,27 @@
2
2
 
3
3
  import typing
4
4
 
5
- from .data_source_create_component_one import DataSourceCreateComponentOne
5
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_box_data_source import CloudBoxDataSource
7
+ from .cloud_confluence_data_source import CloudConfluenceDataSource
8
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
9
+ from .cloud_jira_data_source import CloudJiraDataSource
10
+ from .cloud_notion_page_data_source import CloudNotionPageDataSource
11
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
12
+ from .cloud_s_3_data_source import CloudS3DataSource
13
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
14
+ from .cloud_slack_data_source import CloudSlackDataSource
6
15
 
7
- DataSourceCreateComponent = typing.Union[typing.Dict[str, typing.Any], DataSourceCreateComponentOne]
16
+ DataSourceCreateComponent = typing.Union[
17
+ typing.Dict[str, typing.Any],
18
+ CloudS3DataSource,
19
+ CloudAzStorageBlobDataSource,
20
+ CloudGoogleDriveDataSource,
21
+ CloudOneDriveDataSource,
22
+ CloudSharepointDataSource,
23
+ CloudSlackDataSource,
24
+ CloudNotionPageDataSource,
25
+ CloudConfluenceDataSource,
26
+ CloudJiraDataSource,
27
+ CloudBoxDataSource,
28
+ ]
@@ -26,6 +26,9 @@ class ExtractConfig(pydantic.BaseModel):
26
26
  system_prompt: typing.Optional[str]
27
27
  use_reasoning: typing.Optional[bool] = pydantic.Field(description="Whether to use reasoning for the extraction.")
28
28
  cite_sources: typing.Optional[bool] = pydantic.Field(description="Whether to cite sources for the extraction.")
29
+ invalidate_cache: typing.Optional[bool] = pydantic.Field(
30
+ description="Whether to invalidate the cache for the extraction."
31
+ )
29
32
 
30
33
  def json(self, **kwargs: typing.Any) -> str:
31
34
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -10,6 +10,7 @@ from .llama_parse_parameters import LlamaParseParameters
10
10
  from .pipeline_configuration_hashes import PipelineConfigurationHashes
11
11
  from .pipeline_embedding_config import PipelineEmbeddingConfig
12
12
  from .pipeline_metadata_config import PipelineMetadataConfig
13
+ from .pipeline_status import PipelineStatus
13
14
  from .pipeline_transform_config import PipelineTransformConfig
14
15
  from .pipeline_type import PipelineType
15
16
  from .preset_retrieval_params import PresetRetrievalParams
@@ -51,6 +52,7 @@ class Pipeline(pydantic.BaseModel):
51
52
  )
52
53
  llama_parse_parameters: typing.Optional[LlamaParseParameters]
53
54
  data_sink: typing.Optional[DataSink]
55
+ status: typing.Optional[PipelineStatus]
54
56
  metadata_config: typing.Optional[PipelineMetadataConfig]
55
57
 
56
58
  def json(self, **kwargs: typing.Any) -> str:
@@ -2,6 +2,27 @@
2
2
 
3
3
  import typing
4
4
 
5
- from .pipeline_data_source_component_one import PipelineDataSourceComponentOne
5
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_box_data_source import CloudBoxDataSource
7
+ from .cloud_confluence_data_source import CloudConfluenceDataSource
8
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
9
+ from .cloud_jira_data_source import CloudJiraDataSource
10
+ from .cloud_notion_page_data_source import CloudNotionPageDataSource
11
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
12
+ from .cloud_s_3_data_source import CloudS3DataSource
13
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
14
+ from .cloud_slack_data_source import CloudSlackDataSource
6
15
 
7
- PipelineDataSourceComponent = typing.Union[typing.Dict[str, typing.Any], PipelineDataSourceComponentOne]
16
+ PipelineDataSourceComponent = typing.Union[
17
+ typing.Dict[str, typing.Any],
18
+ CloudS3DataSource,
19
+ CloudAzStorageBlobDataSource,
20
+ CloudGoogleDriveDataSource,
21
+ CloudOneDriveDataSource,
22
+ CloudSharepointDataSource,
23
+ CloudSlackDataSource,
24
+ CloudNotionPageDataSource,
25
+ CloudConfluenceDataSource,
26
+ CloudJiraDataSource,
27
+ CloudBoxDataSource,
28
+ ]
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class PipelineStatus(str, enum.Enum):
10
+ CREATED = "CREATED"
11
+ DELETING = "DELETING"
12
+
13
+ def visit(self, created: typing.Callable[[], T_Result], deleting: typing.Callable[[], T_Result]) -> T_Result:
14
+ if self is PipelineStatus.CREATED:
15
+ return created()
16
+ if self is PipelineStatus.DELETING:
17
+ return deleting()
@@ -22,6 +22,7 @@ class PromptConf(pydantic.BaseModel):
22
22
  cite_sources_prompt: typing.Optional[typing.Dict[str, str]] = pydantic.Field(
23
23
  description="The prompt to use for citing sources."
24
24
  )
25
+ scratchpad_prompt: typing.Optional[str] = pydantic.Field(description="The prompt to use for scratchpad.")
25
26
 
26
27
  def json(self, **kwargs: typing.Any) -> str:
27
28
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -5,7 +5,6 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .llm_model_data import LlmModelData
8
- from .supported_llm_model_names import SupportedLlmModelNames
9
8
 
10
9
  try:
11
10
  import pydantic
@@ -21,7 +20,7 @@ class SupportedLlmModel(pydantic.BaseModel):
21
20
  Response Schema for a supported eval LLM model.
22
21
  """
23
22
 
24
- name: SupportedLlmModelNames = pydantic.Field(description="The name of the supported LLM model.")
23
+ name: str = pydantic.Field(description="The name of the supported LLM model.")
25
24
  enabled: typing.Optional[bool] = pydantic.Field(
26
25
  description="Whether the LLM model is enabled for use in LlamaCloud."
27
26
  )
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: llama-cloud
3
- Version: 0.1.20
3
+ Version: 0.1.21
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Logan Markewich
@@ -13,6 +13,7 @@ Classifier: Programming Language :: Python :: 3.9
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
16
17
  Requires-Dist: certifi (>=2024.7.4)
17
18
  Requires-Dist: httpx (>=0.20.0)
18
19
  Requires-Dist: pydantic (>=1.10)
@@ -26,6 +27,7 @@ To publish:
26
27
  - update the version in `pyproject.toml`
27
28
  - run `poetry publish --build`
28
29
 
29
- Credentials:
30
+ Setup credentials:
30
31
  - run `poetry config pypi-token.pypi <my-token>`
32
+ - Get token form PyPi once logged in with credentials in [1Password](https://start.1password.com/open/i?a=32SA66TZ3JCRXOCMASLSDCT5TI&v=lhv7hvb5o46cwo257c3hviqkle&i=yvslwei7jtf6tgqamzcdantqi4&h=llamaindex.1password.com)
31
33