llama-cloud 0.1.19__py3-none-any.whl → 0.1.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (52) hide show
  1. llama_cloud/__init__.py +166 -26
  2. llama_cloud/resources/__init__.py +41 -2
  3. llama_cloud/resources/data_sinks/__init__.py +18 -2
  4. llama_cloud/resources/data_sinks/client.py +2 -94
  5. llama_cloud/resources/data_sinks/types/__init__.py +18 -2
  6. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +65 -7
  7. llama_cloud/resources/data_sources/__init__.py +30 -2
  8. llama_cloud/resources/data_sources/types/__init__.py +28 -1
  9. llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -23
  10. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +122 -0
  11. llama_cloud/resources/files/client.py +18 -4
  12. llama_cloud/resources/parsing/client.py +8 -0
  13. llama_cloud/resources/pipelines/client.py +11 -11
  14. llama_cloud/types/__init__.py +146 -28
  15. llama_cloud/types/cloud_jira_data_source.py +0 -4
  16. llama_cloud/types/data_sink_component.py +65 -7
  17. llama_cloud/types/data_sink_create_component.py +65 -7
  18. llama_cloud/types/data_source_component.py +2 -23
  19. llama_cloud/types/data_source_component_one.py +122 -0
  20. llama_cloud/types/data_source_create_component.py +2 -23
  21. llama_cloud/types/data_source_create_component_one.py +122 -0
  22. llama_cloud/types/{base_prompt_template.py → data_source_update_dispatcher_config.py} +9 -7
  23. llama_cloud/types/{node_parser.py → delete_params.py} +7 -9
  24. llama_cloud/types/document_ingestion_job_params.py +43 -0
  25. llama_cloud/types/job_record.py +2 -2
  26. llama_cloud/types/job_record_parameters.py +111 -0
  27. llama_cloud/types/{page_splitter_node_parser.py → l_lama_parse_transform_config.py} +5 -10
  28. llama_cloud/types/legacy_parse_job_config.py +189 -0
  29. llama_cloud/types/llama_parse_parameters.py +1 -0
  30. llama_cloud/types/load_files_job_config.py +35 -0
  31. llama_cloud/types/parse_job_config.py +134 -0
  32. llama_cloud/types/pipeline.py +2 -4
  33. llama_cloud/types/pipeline_create.py +2 -2
  34. llama_cloud/types/pipeline_data_source_component.py +2 -23
  35. llama_cloud/types/pipeline_data_source_component_one.py +122 -0
  36. llama_cloud/types/pipeline_file_update_dispatcher_config.py +38 -0
  37. llama_cloud/types/{configured_transformation_item.py → pipeline_file_updater_config.py} +13 -12
  38. llama_cloud/types/pipeline_managed_ingestion_job_params.py +37 -0
  39. llama_cloud/types/pipeline_metadata_config.py +36 -0
  40. {llama_cloud-0.1.19.dist-info → llama_cloud-0.1.20.dist-info}/METADATA +4 -2
  41. {llama_cloud-0.1.19.dist-info → llama_cloud-0.1.20.dist-info}/RECORD +43 -40
  42. {llama_cloud-0.1.19.dist-info → llama_cloud-0.1.20.dist-info}/WHEEL +1 -1
  43. llama_cloud/types/character_splitter.py +0 -46
  44. llama_cloud/types/code_splitter.py +0 -50
  45. llama_cloud/types/configured_transformation_item_component.py +0 -22
  46. llama_cloud/types/llm.py +0 -60
  47. llama_cloud/types/markdown_element_node_parser.py +0 -51
  48. llama_cloud/types/markdown_node_parser.py +0 -52
  49. llama_cloud/types/pydantic_program_mode.py +0 -41
  50. llama_cloud/types/sentence_splitter.py +0 -50
  51. llama_cloud/types/token_text_splitter.py +0 -50
  52. {llama_cloud-0.1.19.dist-info → llama_cloud-0.1.20.dist-info}/LICENSE +0 -0
@@ -0,0 +1,122 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
10
+ from .cloud_box_data_source import CloudBoxDataSource
11
+ from .cloud_confluence_data_source import CloudConfluenceDataSource
12
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
13
+ from .cloud_jira_data_source import CloudJiraDataSource
14
+ from .cloud_notion_page_data_source import CloudNotionPageDataSource
15
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
16
+ from .cloud_s_3_data_source import CloudS3DataSource
17
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
18
+ from .cloud_slack_data_source import CloudSlackDataSource
19
+
20
+
21
+ class DataSourceComponentOne_AzureStorageBlob(CloudAzStorageBlobDataSource):
22
+ type: typing_extensions.Literal["AZURE_STORAGE_BLOB"]
23
+
24
+ class Config:
25
+ frozen = True
26
+ smart_union = True
27
+ allow_population_by_field_name = True
28
+
29
+
30
+ class DataSourceComponentOne_Box(CloudBoxDataSource):
31
+ type: typing_extensions.Literal["BOX"]
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ allow_population_by_field_name = True
37
+
38
+
39
+ class DataSourceComponentOne_Confluence(CloudConfluenceDataSource):
40
+ type: typing_extensions.Literal["CONFLUENCE"]
41
+
42
+ class Config:
43
+ frozen = True
44
+ smart_union = True
45
+ allow_population_by_field_name = True
46
+
47
+
48
+ class DataSourceComponentOne_GoogleDrive(CloudGoogleDriveDataSource):
49
+ type: typing_extensions.Literal["GOOGLE_DRIVE"]
50
+
51
+ class Config:
52
+ frozen = True
53
+ smart_union = True
54
+ allow_population_by_field_name = True
55
+
56
+
57
+ class DataSourceComponentOne_Jira(CloudJiraDataSource):
58
+ type: typing_extensions.Literal["JIRA"]
59
+
60
+ class Config:
61
+ frozen = True
62
+ smart_union = True
63
+ allow_population_by_field_name = True
64
+
65
+
66
+ class DataSourceComponentOne_MicrosoftOnedrive(CloudOneDriveDataSource):
67
+ type: typing_extensions.Literal["MICROSOFT_ONEDRIVE"]
68
+
69
+ class Config:
70
+ frozen = True
71
+ smart_union = True
72
+ allow_population_by_field_name = True
73
+
74
+
75
+ class DataSourceComponentOne_MicrosoftSharepoint(CloudSharepointDataSource):
76
+ type: typing_extensions.Literal["MICROSOFT_SHAREPOINT"]
77
+
78
+ class Config:
79
+ frozen = True
80
+ smart_union = True
81
+ allow_population_by_field_name = True
82
+
83
+
84
+ class DataSourceComponentOne_NotionPage(CloudNotionPageDataSource):
85
+ type: typing_extensions.Literal["NOTION_PAGE"]
86
+
87
+ class Config:
88
+ frozen = True
89
+ smart_union = True
90
+ allow_population_by_field_name = True
91
+
92
+
93
+ class DataSourceComponentOne_S3(CloudS3DataSource):
94
+ type: typing_extensions.Literal["S3"]
95
+
96
+ class Config:
97
+ frozen = True
98
+ smart_union = True
99
+ allow_population_by_field_name = True
100
+
101
+
102
+ class DataSourceComponentOne_Slack(CloudSlackDataSource):
103
+ type: typing_extensions.Literal["SLACK"]
104
+
105
+ class Config:
106
+ frozen = True
107
+ smart_union = True
108
+ allow_population_by_field_name = True
109
+
110
+
111
+ DataSourceComponentOne = typing.Union[
112
+ DataSourceComponentOne_AzureStorageBlob,
113
+ DataSourceComponentOne_Box,
114
+ DataSourceComponentOne_Confluence,
115
+ DataSourceComponentOne_GoogleDrive,
116
+ DataSourceComponentOne_Jira,
117
+ DataSourceComponentOne_MicrosoftOnedrive,
118
+ DataSourceComponentOne_MicrosoftSharepoint,
119
+ DataSourceComponentOne_NotionPage,
120
+ DataSourceComponentOne_S3,
121
+ DataSourceComponentOne_Slack,
122
+ ]
@@ -2,27 +2,6 @@
2
2
 
3
3
  import typing
4
4
 
5
- from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
- from .cloud_box_data_source import CloudBoxDataSource
7
- from .cloud_confluence_data_source import CloudConfluenceDataSource
8
- from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
9
- from .cloud_jira_data_source import CloudJiraDataSource
10
- from .cloud_notion_page_data_source import CloudNotionPageDataSource
11
- from .cloud_one_drive_data_source import CloudOneDriveDataSource
12
- from .cloud_s_3_data_source import CloudS3DataSource
13
- from .cloud_sharepoint_data_source import CloudSharepointDataSource
14
- from .cloud_slack_data_source import CloudSlackDataSource
5
+ from .data_source_create_component_one import DataSourceCreateComponentOne
15
6
 
16
- DataSourceCreateComponent = typing.Union[
17
- typing.Dict[str, typing.Any],
18
- CloudS3DataSource,
19
- CloudAzStorageBlobDataSource,
20
- CloudGoogleDriveDataSource,
21
- CloudOneDriveDataSource,
22
- CloudSharepointDataSource,
23
- CloudSlackDataSource,
24
- CloudNotionPageDataSource,
25
- CloudConfluenceDataSource,
26
- CloudJiraDataSource,
27
- CloudBoxDataSource,
28
- ]
7
+ DataSourceCreateComponent = typing.Union[typing.Dict[str, typing.Any], DataSourceCreateComponentOne]
@@ -0,0 +1,122 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
10
+ from .cloud_box_data_source import CloudBoxDataSource
11
+ from .cloud_confluence_data_source import CloudConfluenceDataSource
12
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
13
+ from .cloud_jira_data_source import CloudJiraDataSource
14
+ from .cloud_notion_page_data_source import CloudNotionPageDataSource
15
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
16
+ from .cloud_s_3_data_source import CloudS3DataSource
17
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
18
+ from .cloud_slack_data_source import CloudSlackDataSource
19
+
20
+
21
+ class DataSourceCreateComponentOne_AzureStorageBlob(CloudAzStorageBlobDataSource):
22
+ type: typing_extensions.Literal["AZURE_STORAGE_BLOB"]
23
+
24
+ class Config:
25
+ frozen = True
26
+ smart_union = True
27
+ allow_population_by_field_name = True
28
+
29
+
30
+ class DataSourceCreateComponentOne_Box(CloudBoxDataSource):
31
+ type: typing_extensions.Literal["BOX"]
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ allow_population_by_field_name = True
37
+
38
+
39
+ class DataSourceCreateComponentOne_Confluence(CloudConfluenceDataSource):
40
+ type: typing_extensions.Literal["CONFLUENCE"]
41
+
42
+ class Config:
43
+ frozen = True
44
+ smart_union = True
45
+ allow_population_by_field_name = True
46
+
47
+
48
+ class DataSourceCreateComponentOne_GoogleDrive(CloudGoogleDriveDataSource):
49
+ type: typing_extensions.Literal["GOOGLE_DRIVE"]
50
+
51
+ class Config:
52
+ frozen = True
53
+ smart_union = True
54
+ allow_population_by_field_name = True
55
+
56
+
57
+ class DataSourceCreateComponentOne_Jira(CloudJiraDataSource):
58
+ type: typing_extensions.Literal["JIRA"]
59
+
60
+ class Config:
61
+ frozen = True
62
+ smart_union = True
63
+ allow_population_by_field_name = True
64
+
65
+
66
+ class DataSourceCreateComponentOne_MicrosoftOnedrive(CloudOneDriveDataSource):
67
+ type: typing_extensions.Literal["MICROSOFT_ONEDRIVE"]
68
+
69
+ class Config:
70
+ frozen = True
71
+ smart_union = True
72
+ allow_population_by_field_name = True
73
+
74
+
75
+ class DataSourceCreateComponentOne_MicrosoftSharepoint(CloudSharepointDataSource):
76
+ type: typing_extensions.Literal["MICROSOFT_SHAREPOINT"]
77
+
78
+ class Config:
79
+ frozen = True
80
+ smart_union = True
81
+ allow_population_by_field_name = True
82
+
83
+
84
+ class DataSourceCreateComponentOne_NotionPage(CloudNotionPageDataSource):
85
+ type: typing_extensions.Literal["NOTION_PAGE"]
86
+
87
+ class Config:
88
+ frozen = True
89
+ smart_union = True
90
+ allow_population_by_field_name = True
91
+
92
+
93
+ class DataSourceCreateComponentOne_S3(CloudS3DataSource):
94
+ type: typing_extensions.Literal["S3"]
95
+
96
+ class Config:
97
+ frozen = True
98
+ smart_union = True
99
+ allow_population_by_field_name = True
100
+
101
+
102
+ class DataSourceCreateComponentOne_Slack(CloudSlackDataSource):
103
+ type: typing_extensions.Literal["SLACK"]
104
+
105
+ class Config:
106
+ frozen = True
107
+ smart_union = True
108
+ allow_population_by_field_name = True
109
+
110
+
111
+ DataSourceCreateComponentOne = typing.Union[
112
+ DataSourceCreateComponentOne_AzureStorageBlob,
113
+ DataSourceCreateComponentOne_Box,
114
+ DataSourceCreateComponentOne_Confluence,
115
+ DataSourceCreateComponentOne_GoogleDrive,
116
+ DataSourceCreateComponentOne_Jira,
117
+ DataSourceCreateComponentOne_MicrosoftOnedrive,
118
+ DataSourceCreateComponentOne_MicrosoftSharepoint,
119
+ DataSourceCreateComponentOne_NotionPage,
120
+ DataSourceCreateComponentOne_S3,
121
+ DataSourceCreateComponentOne_Slack,
122
+ ]
@@ -4,6 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
+ from .delete_params import DeleteParams
7
8
 
8
9
  try:
9
10
  import pydantic
@@ -14,13 +15,14 @@ except ImportError:
14
15
  import pydantic # type: ignore
15
16
 
16
17
 
17
- class BasePromptTemplate(pydantic.BaseModel):
18
- metadata: typing.Dict[str, typing.Any]
19
- template_vars: typing.List[str]
20
- kwargs: typing.Dict[str, str]
21
- output_parser: typing.Any
22
- template_var_mappings: typing.Optional[typing.Dict[str, typing.Any]]
23
- function_mappings: typing.Optional[typing.Dict[str, typing.Optional[str]]]
18
+ class DataSourceUpdateDispatcherConfig(pydantic.BaseModel):
19
+ """
20
+ Schema for the parameters of a data source dispatcher job.
21
+ """
22
+
23
+ should_delete: typing.Optional[bool]
24
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Any]]
25
+ delete_info: typing.Optional[DeleteParams]
24
26
 
25
27
  def json(self, **kwargs: typing.Any) -> str:
26
28
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -14,18 +14,16 @@ except ImportError:
14
14
  import pydantic # type: ignore
15
15
 
16
16
 
17
- class NodeParser(pydantic.BaseModel):
17
+ class DeleteParams(pydantic.BaseModel):
18
18
  """
19
- Base interface for node parser.
19
+ Schema for the parameters of a delete job.
20
20
  """
21
21
 
22
- include_metadata: typing.Optional[bool] = pydantic.Field(
23
- description="Whether or not to consider metadata when splitting."
24
- )
25
- include_prev_next_rel: typing.Optional[bool] = pydantic.Field(description="Include prev/next node relationships.")
26
- callback_manager: typing.Optional[typing.Any]
27
- id_func: typing.Optional[str]
28
- class_name: typing.Optional[str]
22
+ document_ids_to_delete: typing.Optional[typing.List[str]]
23
+ files_ids_to_delete: typing.Optional[typing.List[str]]
24
+ data_sources_ids_to_delete: typing.Optional[typing.List[str]]
25
+ embed_collection_name: typing.Optional[str]
26
+ data_sink_id: typing.Optional[str]
29
27
 
30
28
  def json(self, **kwargs: typing.Any) -> str:
31
29
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,43 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .delete_params import DeleteParams
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class DocumentIngestionJobParams(pydantic.BaseModel):
19
+ """
20
+ Schema for the parameters of a document ingestion job.
21
+ """
22
+
23
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Any]]
24
+ resource_info: typing.Optional[typing.Dict[str, typing.Any]]
25
+ should_delete: typing.Optional[bool]
26
+ document_ids: typing.Optional[typing.List[str]]
27
+ pipeline_file_id: typing.Optional[str]
28
+ delete_info: typing.Optional[DeleteParams]
29
+ is_new_file: typing.Optional[bool] = pydantic.Field(description="Whether the file is new")
30
+ page_count: typing.Optional[int]
31
+
32
+ def json(self, **kwargs: typing.Any) -> str:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().json(**kwargs_with_defaults)
35
+
36
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
37
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
38
+ return super().dict(**kwargs_with_defaults)
39
+
40
+ class Config:
41
+ frozen = True
42
+ smart_union = True
43
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -5,6 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .job_names import JobNames
8
+ from .job_record_parameters import JobRecordParameters
8
9
  from .status_enum import StatusEnum
9
10
 
10
11
  try:
@@ -25,7 +26,7 @@ class JobRecord(pydantic.BaseModel):
25
26
  partitions: typing.Dict[str, str] = pydantic.Field(
26
27
  description="The partitions for this execution. Used for determining where to save job output."
27
28
  )
28
- parameters: typing.Optional[typing.Any]
29
+ parameters: typing.Optional[JobRecordParameters]
29
30
  session_id: typing.Optional[str]
30
31
  correlation_id: typing.Optional[str]
31
32
  parent_job_execution_id: typing.Optional[str]
@@ -40,7 +41,6 @@ class JobRecord(pydantic.BaseModel):
40
41
  started_at: typing.Optional[dt.datetime]
41
42
  ended_at: typing.Optional[dt.datetime]
42
43
  updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
43
- data: typing.Optional[typing.Any]
44
44
 
45
45
  def json(self, **kwargs: typing.Any) -> str:
46
46
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,111 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .data_source_update_dispatcher_config import DataSourceUpdateDispatcherConfig
10
+ from .document_ingestion_job_params import DocumentIngestionJobParams
11
+ from .l_lama_parse_transform_config import LLamaParseTransformConfig
12
+ from .legacy_parse_job_config import LegacyParseJobConfig
13
+ from .load_files_job_config import LoadFilesJobConfig
14
+ from .parse_job_config import ParseJobConfig
15
+ from .pipeline_file_update_dispatcher_config import PipelineFileUpdateDispatcherConfig
16
+ from .pipeline_file_updater_config import PipelineFileUpdaterConfig
17
+ from .pipeline_managed_ingestion_job_params import PipelineManagedIngestionJobParams
18
+
19
+
20
+ class JobRecordParameters_DataSourceUpdateDispatcher(DataSourceUpdateDispatcherConfig):
21
+ type: typing_extensions.Literal["data_source_update_dispatcher"]
22
+
23
+ class Config:
24
+ frozen = True
25
+ smart_union = True
26
+ allow_population_by_field_name = True
27
+
28
+
29
+ class JobRecordParameters_DocumentIngestion(DocumentIngestionJobParams):
30
+ type: typing_extensions.Literal["document_ingestion"]
31
+
32
+ class Config:
33
+ frozen = True
34
+ smart_union = True
35
+ allow_population_by_field_name = True
36
+
37
+
38
+ class JobRecordParameters_LegacyParse(LegacyParseJobConfig):
39
+ type: typing_extensions.Literal["legacy_parse"]
40
+
41
+ class Config:
42
+ frozen = True
43
+ smart_union = True
44
+ allow_population_by_field_name = True
45
+
46
+
47
+ class JobRecordParameters_LlamaParseTransform(LLamaParseTransformConfig):
48
+ type: typing_extensions.Literal["llama_parse_transform"]
49
+
50
+ class Config:
51
+ frozen = True
52
+ smart_union = True
53
+ allow_population_by_field_name = True
54
+
55
+
56
+ class JobRecordParameters_LoadFiles(LoadFilesJobConfig):
57
+ type: typing_extensions.Literal["load_files"]
58
+
59
+ class Config:
60
+ frozen = True
61
+ smart_union = True
62
+ allow_population_by_field_name = True
63
+
64
+
65
+ class JobRecordParameters_Parse(ParseJobConfig):
66
+ type: typing_extensions.Literal["parse"]
67
+
68
+ class Config:
69
+ frozen = True
70
+ smart_union = True
71
+ allow_population_by_field_name = True
72
+
73
+
74
+ class JobRecordParameters_PipelineFileUpdateDispatcher(PipelineFileUpdateDispatcherConfig):
75
+ type: typing_extensions.Literal["pipeline_file_update_dispatcher"]
76
+
77
+ class Config:
78
+ frozen = True
79
+ smart_union = True
80
+ allow_population_by_field_name = True
81
+
82
+
83
+ class JobRecordParameters_PipelineFileUpdater(PipelineFileUpdaterConfig):
84
+ type: typing_extensions.Literal["pipeline_file_updater"]
85
+
86
+ class Config:
87
+ frozen = True
88
+ smart_union = True
89
+ allow_population_by_field_name = True
90
+
91
+
92
+ class JobRecordParameters_PipelineManagedIngestion(PipelineManagedIngestionJobParams):
93
+ type: typing_extensions.Literal["pipeline_managed_ingestion"]
94
+
95
+ class Config:
96
+ frozen = True
97
+ smart_union = True
98
+ allow_population_by_field_name = True
99
+
100
+
101
+ JobRecordParameters = typing.Union[
102
+ JobRecordParameters_DataSourceUpdateDispatcher,
103
+ JobRecordParameters_DocumentIngestion,
104
+ JobRecordParameters_LegacyParse,
105
+ JobRecordParameters_LlamaParseTransform,
106
+ JobRecordParameters_LoadFiles,
107
+ JobRecordParameters_Parse,
108
+ JobRecordParameters_PipelineFileUpdateDispatcher,
109
+ JobRecordParameters_PipelineFileUpdater,
110
+ JobRecordParameters_PipelineManagedIngestion,
111
+ ]
@@ -14,19 +14,14 @@ except ImportError:
14
14
  import pydantic # type: ignore
15
15
 
16
16
 
17
- class PageSplitterNodeParser(pydantic.BaseModel):
17
+ class LLamaParseTransformConfig(pydantic.BaseModel):
18
18
  """
19
- Split text into pages.
19
+ Schema for the parameters of llamaparse transform job.
20
20
  """
21
21
 
22
- include_metadata: typing.Optional[bool] = pydantic.Field(
23
- description="Whether or not to consider metadata when splitting."
24
- )
25
- include_prev_next_rel: typing.Optional[bool] = pydantic.Field(description="Include prev/next node relationships.")
26
- callback_manager: typing.Optional[typing.Any]
27
- id_func: typing.Optional[str]
28
- page_separator: typing.Optional[str]
29
- class_name: typing.Optional[str]
22
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Any]]
23
+ resource_info: typing.Optional[typing.Dict[str, typing.Any]]
24
+ file_output: str = pydantic.Field(description="Whether to delete the files")
30
25
 
31
26
  def json(self, **kwargs: typing.Any) -> str:
32
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}