llama-cloud 0.1.5__py3-none-any.whl → 0.1.7a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (129) hide show
  1. llama_cloud/__init__.py +138 -2
  2. llama_cloud/client.py +15 -0
  3. llama_cloud/resources/__init__.py +17 -1
  4. llama_cloud/resources/chat_apps/__init__.py +2 -0
  5. llama_cloud/resources/chat_apps/client.py +620 -0
  6. llama_cloud/resources/data_sinks/client.py +2 -2
  7. llama_cloud/resources/data_sources/client.py +2 -2
  8. llama_cloud/resources/embedding_model_configs/client.py +4 -4
  9. llama_cloud/resources/files/__init__.py +2 -2
  10. llama_cloud/resources/files/client.py +21 -0
  11. llama_cloud/resources/files/types/__init__.py +2 -1
  12. llama_cloud/resources/files/types/file_create_permission_info_value.py +7 -0
  13. llama_cloud/resources/jobs/__init__.py +2 -0
  14. llama_cloud/resources/jobs/client.py +148 -0
  15. llama_cloud/resources/llama_extract/__init__.py +5 -0
  16. llama_cloud/resources/llama_extract/client.py +1038 -0
  17. llama_cloud/resources/llama_extract/types/__init__.py +6 -0
  18. llama_cloud/resources/llama_extract/types/extract_agent_create_data_schema_value.py +7 -0
  19. llama_cloud/resources/llama_extract/types/extract_agent_update_data_schema_value.py +7 -0
  20. llama_cloud/resources/organizations/client.py +14 -14
  21. llama_cloud/resources/parsing/client.py +480 -229
  22. llama_cloud/resources/pipelines/client.py +182 -126
  23. llama_cloud/resources/projects/client.py +210 -102
  24. llama_cloud/resources/reports/__init__.py +5 -0
  25. llama_cloud/resources/reports/client.py +1198 -0
  26. llama_cloud/resources/reports/types/__init__.py +7 -0
  27. llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +25 -0
  28. llama_cloud/resources/retrievers/__init__.py +2 -0
  29. llama_cloud/resources/retrievers/client.py +654 -0
  30. llama_cloud/types/__init__.py +124 -2
  31. llama_cloud/types/{chat_message.py → app_schema_chat_chat_message.py} +2 -2
  32. llama_cloud/types/chat_app.py +44 -0
  33. llama_cloud/types/chat_app_response.py +41 -0
  34. llama_cloud/types/cloud_az_storage_blob_data_source.py +1 -0
  35. llama_cloud/types/cloud_box_data_source.py +1 -0
  36. llama_cloud/types/cloud_confluence_data_source.py +1 -0
  37. llama_cloud/types/cloud_google_drive_data_source.py +1 -0
  38. llama_cloud/types/cloud_jira_data_source.py +1 -0
  39. llama_cloud/types/cloud_notion_page_data_source.py +1 -0
  40. llama_cloud/types/cloud_one_drive_data_source.py +1 -0
  41. llama_cloud/types/cloud_postgres_vector_store.py +1 -0
  42. llama_cloud/types/cloud_s_3_data_source.py +1 -0
  43. llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
  44. llama_cloud/types/cloud_slack_data_source.py +1 -0
  45. llama_cloud/types/composite_retrieval_mode.py +21 -0
  46. llama_cloud/types/composite_retrieval_result.py +38 -0
  47. llama_cloud/types/composite_retrieved_text_node.py +42 -0
  48. llama_cloud/types/data_sink.py +1 -1
  49. llama_cloud/types/data_sink_create.py +1 -1
  50. llama_cloud/types/data_source.py +1 -1
  51. llama_cloud/types/data_source_create.py +1 -1
  52. llama_cloud/types/edit_suggestion.py +39 -0
  53. llama_cloud/types/eval_dataset_job_record.py +1 -0
  54. llama_cloud/types/extract_agent.py +45 -0
  55. llama_cloud/types/extract_agent_data_schema_value.py +5 -0
  56. llama_cloud/types/extract_config.py +40 -0
  57. llama_cloud/types/extract_job.py +35 -0
  58. llama_cloud/types/extract_job_create.py +40 -0
  59. llama_cloud/types/extract_job_create_data_schema_override_value.py +7 -0
  60. llama_cloud/types/extract_mode.py +17 -0
  61. llama_cloud/types/extract_resultset.py +46 -0
  62. llama_cloud/types/extract_resultset_data.py +11 -0
  63. llama_cloud/types/extract_resultset_data_item_value.py +7 -0
  64. llama_cloud/types/extract_resultset_data_zero_value.py +7 -0
  65. llama_cloud/types/extract_resultset_extraction_metadata_value.py +7 -0
  66. llama_cloud/types/file.py +3 -0
  67. llama_cloud/types/file_permission_info_value.py +5 -0
  68. llama_cloud/types/filter_condition.py +9 -1
  69. llama_cloud/types/filter_operator.py +4 -0
  70. llama_cloud/types/image_block.py +35 -0
  71. llama_cloud/types/input_message.py +1 -1
  72. llama_cloud/types/job_name_mapping.py +4 -0
  73. llama_cloud/types/job_names.py +89 -0
  74. llama_cloud/types/job_record.py +57 -0
  75. llama_cloud/types/job_record_with_usage_metrics.py +36 -0
  76. llama_cloud/types/llama_index_core_base_llms_types_chat_message.py +39 -0
  77. llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +33 -0
  78. llama_cloud/types/llama_parse_parameters.py +15 -0
  79. llama_cloud/types/llm.py +1 -0
  80. llama_cloud/types/llm_model_data.py +1 -0
  81. llama_cloud/types/llm_parameters.py +1 -0
  82. llama_cloud/types/managed_ingestion_status.py +4 -0
  83. llama_cloud/types/managed_ingestion_status_response.py +1 -0
  84. llama_cloud/types/object_type.py +4 -0
  85. llama_cloud/types/organization.py +5 -0
  86. llama_cloud/types/paginated_jobs_history_with_metrics.py +35 -0
  87. llama_cloud/types/paginated_report_response.py +35 -0
  88. llama_cloud/types/parse_plan_level.py +21 -0
  89. llama_cloud/types/parsing_job_structured_result.py +32 -0
  90. llama_cloud/types/pipeline_create.py +3 -1
  91. llama_cloud/types/pipeline_data_source.py +1 -1
  92. llama_cloud/types/pipeline_file.py +3 -0
  93. llama_cloud/types/pipeline_file_permission_info_value.py +7 -0
  94. llama_cloud/types/playground_session.py +2 -2
  95. llama_cloud/types/preset_retrieval_params.py +1 -0
  96. llama_cloud/types/progress_event.py +44 -0
  97. llama_cloud/types/progress_event_status.py +33 -0
  98. llama_cloud/types/prompt_spec.py +2 -2
  99. llama_cloud/types/related_node_info.py +2 -2
  100. llama_cloud/types/related_node_info_node_type.py +7 -0
  101. llama_cloud/types/report.py +33 -0
  102. llama_cloud/types/report_block.py +34 -0
  103. llama_cloud/types/report_block_dependency.py +29 -0
  104. llama_cloud/types/report_create_response.py +31 -0
  105. llama_cloud/types/report_event_item.py +40 -0
  106. llama_cloud/types/report_event_item_event_data.py +45 -0
  107. llama_cloud/types/report_event_type.py +37 -0
  108. llama_cloud/types/report_metadata.py +39 -0
  109. llama_cloud/types/report_plan.py +36 -0
  110. llama_cloud/types/report_plan_block.py +36 -0
  111. llama_cloud/types/report_query.py +33 -0
  112. llama_cloud/types/report_response.py +41 -0
  113. llama_cloud/types/report_state.py +37 -0
  114. llama_cloud/types/report_state_event.py +38 -0
  115. llama_cloud/types/report_update_event.py +38 -0
  116. llama_cloud/types/retrieve_results.py +1 -1
  117. llama_cloud/types/retriever.py +45 -0
  118. llama_cloud/types/retriever_create.py +37 -0
  119. llama_cloud/types/retriever_pipeline.py +37 -0
  120. llama_cloud/types/status_enum.py +4 -0
  121. llama_cloud/types/supported_llm_model_names.py +4 -0
  122. llama_cloud/types/text_block.py +31 -0
  123. llama_cloud/types/text_node.py +13 -6
  124. llama_cloud/types/usage_metric_response.py +34 -0
  125. llama_cloud/types/user_job_record.py +32 -0
  126. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/METADATA +3 -1
  127. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/RECORD +129 -59
  128. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/WHEEL +1 -1
  129. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/LICENSE +0 -0
@@ -0,0 +1,39 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .llama_index_core_base_llms_types_chat_message_blocks_item import LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem
8
+ from .message_role import MessageRole
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class LlamaIndexCoreBaseLlmsTypesChatMessage(pydantic.BaseModel):
20
+ """
21
+ Chat message.
22
+ """
23
+
24
+ role: typing.Optional[MessageRole]
25
+ additional_kwargs: typing.Optional[typing.Dict[str, typing.Any]]
26
+ blocks: typing.Optional[typing.List[LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem]]
27
+
28
+ def json(self, **kwargs: typing.Any) -> str:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().json(**kwargs_with_defaults)
31
+
32
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().dict(**kwargs_with_defaults)
35
+
36
+ class Config:
37
+ frozen = True
38
+ smart_union = True
39
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .image_block import ImageBlock
10
+ from .text_block import TextBlock
11
+
12
+
13
+ class LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem_Image(ImageBlock):
14
+ block_type: typing_extensions.Literal["image"]
15
+
16
+ class Config:
17
+ frozen = True
18
+ smart_union = True
19
+ allow_population_by_field_name = True
20
+
21
+
22
+ class LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem_Text(TextBlock):
23
+ block_type: typing_extensions.Literal["text"]
24
+
25
+ class Config:
26
+ frozen = True
27
+ smart_union = True
28
+ allow_population_by_field_name = True
29
+
30
+
31
+ LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem = typing.Union[
32
+ LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem_Image, LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem_Text
33
+ ]
@@ -27,15 +27,24 @@ class LlamaParseParameters(pydantic.BaseModel):
27
27
  disable_reconstruction: typing.Optional[bool]
28
28
  disable_image_extraction: typing.Optional[bool]
29
29
  invalidate_cache: typing.Optional[bool]
30
+ output_pdf_of_document: typing.Optional[bool]
30
31
  do_not_cache: typing.Optional[bool]
31
32
  fast_mode: typing.Optional[bool]
32
33
  skip_diagonal_text: typing.Optional[bool]
33
34
  gpt_4_o_mode: typing.Optional[bool] = pydantic.Field(alias="gpt4o_mode")
34
35
  gpt_4_o_api_key: typing.Optional[str] = pydantic.Field(alias="gpt4o_api_key")
35
36
  do_not_unroll_columns: typing.Optional[bool]
37
+ extract_layout: typing.Optional[bool]
38
+ html_make_all_elements_visible: typing.Optional[bool]
39
+ html_remove_navigation_elements: typing.Optional[bool]
40
+ html_remove_fixed_elements: typing.Optional[bool]
36
41
  guess_xlsx_sheet_name: typing.Optional[bool]
37
42
  page_separator: typing.Optional[str]
38
43
  bounding_box: typing.Optional[str]
44
+ bbox_top: typing.Optional[float]
45
+ bbox_right: typing.Optional[float]
46
+ bbox_bottom: typing.Optional[float]
47
+ bbox_left: typing.Optional[float]
39
48
  target_pages: typing.Optional[str]
40
49
  use_vendor_multimodal_model: typing.Optional[bool]
41
50
  vendor_multimodal_model_name: typing.Optional[str]
@@ -61,6 +70,12 @@ class LlamaParseParameters(pydantic.BaseModel):
61
70
  auto_mode_trigger_on_text_in_page: typing.Optional[str]
62
71
  auto_mode_trigger_on_table_in_page: typing.Optional[bool]
63
72
  auto_mode_trigger_on_image_in_page: typing.Optional[bool]
73
+ structured_output: typing.Optional[bool]
74
+ structured_output_json_schema: typing.Optional[str]
75
+ structured_output_json_schema_name: typing.Optional[str]
76
+ max_pages: typing.Optional[int]
77
+ max_pages_enforced: typing.Optional[int]
78
+ extract_charts: typing.Optional[bool]
64
79
 
65
80
  def json(self, **kwargs: typing.Any) -> str:
66
81
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
llama_cloud/types/llm.py CHANGED
@@ -44,6 +44,7 @@ class Llm(pydantic.BaseModel):
44
44
  output_parser: typing.Optional[typing.Any]
45
45
  pydantic_program_mode: typing.Optional[PydanticProgramMode]
46
46
  query_wrapper_prompt: typing.Optional[BasePromptTemplate]
47
+ class_name: typing.Optional[str]
47
48
 
48
49
  def json(self, **kwargs: typing.Any) -> str:
49
50
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -22,6 +22,7 @@ class LlmModelData(pydantic.BaseModel):
22
22
  name: str = pydantic.Field(description="The name of the LLM model.")
23
23
  description: str = pydantic.Field(description="The description of the LLM model.")
24
24
  multi_modal: bool = pydantic.Field(description="Whether the model supports multi-modal image input")
25
+ model_name: typing.Optional[str]
25
26
 
26
27
  def json(self, **kwargs: typing.Any) -> str:
27
28
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -21,6 +21,7 @@ class LlmParameters(pydantic.BaseModel):
21
21
  )
22
22
  system_prompt: typing.Optional[str]
23
23
  temperature: typing.Optional[float]
24
+ use_chain_of_thought_reasoning: typing.Optional[bool]
24
25
  class_name: typing.Optional[str]
25
26
 
26
27
  def json(self, **kwargs: typing.Any) -> str:
@@ -16,6 +16,7 @@ class ManagedIngestionStatus(str, enum.Enum):
16
16
  SUCCESS = "SUCCESS"
17
17
  ERROR = "ERROR"
18
18
  PARTIAL_SUCCESS = "PARTIAL_SUCCESS"
19
+ CANCELLED = "CANCELLED"
19
20
 
20
21
  def visit(
21
22
  self,
@@ -24,6 +25,7 @@ class ManagedIngestionStatus(str, enum.Enum):
24
25
  success: typing.Callable[[], T_Result],
25
26
  error: typing.Callable[[], T_Result],
26
27
  partial_success: typing.Callable[[], T_Result],
28
+ cancelled: typing.Callable[[], T_Result],
27
29
  ) -> T_Result:
28
30
  if self is ManagedIngestionStatus.NOT_STARTED:
29
31
  return not_started()
@@ -35,3 +37,5 @@ class ManagedIngestionStatus(str, enum.Enum):
35
37
  return error()
36
38
  if self is ManagedIngestionStatus.PARTIAL_SUCCESS:
37
39
  return partial_success()
40
+ if self is ManagedIngestionStatus.CANCELLED:
41
+ return cancelled()
@@ -21,6 +21,7 @@ class ManagedIngestionStatusResponse(pydantic.BaseModel):
21
21
  deployment_date: typing.Optional[dt.datetime]
22
22
  status: ManagedIngestionStatus = pydantic.Field(description="Status of the ingestion.")
23
23
  error: typing.Optional[typing.List[IngestionErrorResponse]]
24
+ effective_at: typing.Optional[dt.datetime]
24
25
 
25
26
  def json(self, **kwargs: typing.Any) -> str:
26
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -11,6 +11,7 @@ class ObjectType(str, enum.Enum):
11
11
  TWO = "2"
12
12
  THREE = "3"
13
13
  FOUR = "4"
14
+ FIVE = "5"
14
15
 
15
16
  def visit(
16
17
  self,
@@ -18,6 +19,7 @@ class ObjectType(str, enum.Enum):
18
19
  two: typing.Callable[[], T_Result],
19
20
  three: typing.Callable[[], T_Result],
20
21
  four: typing.Callable[[], T_Result],
22
+ five: typing.Callable[[], T_Result],
21
23
  ) -> T_Result:
22
24
  if self is ObjectType.ONE:
23
25
  return one()
@@ -27,3 +29,5 @@ class ObjectType(str, enum.Enum):
27
29
  return three()
28
30
  if self is ObjectType.FOUR:
29
31
  return four()
32
+ if self is ObjectType.FIVE:
33
+ return five()
@@ -4,6 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
+ from .parse_plan_level import ParsePlanLevel
7
8
 
8
9
  try:
9
10
  import pydantic
@@ -23,6 +24,10 @@ class Organization(pydantic.BaseModel):
23
24
  created_at: typing.Optional[dt.datetime]
24
25
  updated_at: typing.Optional[dt.datetime]
25
26
  name: str = pydantic.Field(description="A name for the organization.")
27
+ stripe_customer_id: typing.Optional[str]
28
+ parse_plan_level: typing.Optional[ParsePlanLevel] = pydantic.Field(
29
+ description="Whether the organization is a Parse Premium customer."
30
+ )
26
31
 
27
32
  def json(self, **kwargs: typing.Any) -> str:
28
33
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .job_record_with_usage_metrics import JobRecordWithUsageMetrics
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class PaginatedJobsHistoryWithMetrics(pydantic.BaseModel):
19
+ jobs: typing.List[JobRecordWithUsageMetrics]
20
+ total_count: int
21
+ limit: int
22
+ offset: int
23
+
24
+ def json(self, **kwargs: typing.Any) -> str:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().json(**kwargs_with_defaults)
27
+
28
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().dict(**kwargs_with_defaults)
31
+
32
+ class Config:
33
+ frozen = True
34
+ smart_union = True
35
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .report_response import ReportResponse
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class PaginatedReportResponse(pydantic.BaseModel):
19
+ report_responses: typing.List[ReportResponse]
20
+ limit: int
21
+ offset: int
22
+ total_count: int
23
+
24
+ def json(self, **kwargs: typing.Any) -> str:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().json(**kwargs_with_defaults)
27
+
28
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().dict(**kwargs_with_defaults)
31
+
32
+ class Config:
33
+ frozen = True
34
+ smart_union = True
35
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,21 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ParsePlanLevel(str, enum.Enum):
10
+ """
11
+ Enum for the Parse plan level.
12
+ """
13
+
14
+ DEFAULT = "DEFAULT"
15
+ PREMIUM = "PREMIUM"
16
+
17
+ def visit(self, default: typing.Callable[[], T_Result], premium: typing.Callable[[], T_Result]) -> T_Result:
18
+ if self is ParsePlanLevel.DEFAULT:
19
+ return default()
20
+ if self is ParsePlanLevel.PREMIUM:
21
+ return premium()
@@ -0,0 +1,32 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class ParsingJobStructuredResult(pydantic.BaseModel):
18
+ structured: typing.Any
19
+ job_metadata: typing.Any
20
+
21
+ def json(self, **kwargs: typing.Any) -> str:
22
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
23
+ return super().json(**kwargs_with_defaults)
24
+
25
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().dict(**kwargs_with_defaults)
28
+
29
+ class Config:
30
+ frozen = True
31
+ smart_union = True
32
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -41,7 +41,9 @@ class PipelineCreate(pydantic.BaseModel):
41
41
  eval_parameters: typing.Optional[EvalExecutionParams] = pydantic.Field(
42
42
  description="Eval parameters for the pipeline."
43
43
  )
44
- llama_parse_parameters: typing.Optional[LlamaParseParameters]
44
+ llama_parse_parameters: typing.Optional[LlamaParseParameters] = pydantic.Field(
45
+ description="Settings that can be configured for how to use LlamaParse to parse files within a LlamaCloud pipeline."
46
+ )
45
47
  name: str
46
48
  pipeline_type: typing.Optional[PipelineType] = pydantic.Field(
47
49
  description="Type of pipeline. Either PLAYGROUND or MANAGED."
@@ -28,7 +28,7 @@ class PipelineDataSource(pydantic.BaseModel):
28
28
  name: str = pydantic.Field(description="The name of the data source.")
29
29
  source_type: ConfigurableDataSourceNames
30
30
  custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineDataSourceCustomMetadataValue]]]
31
- component: PipelineDataSourceComponent
31
+ component: PipelineDataSourceComponent = pydantic.Field(description="Component that implements the data source")
32
32
  project_id: str
33
33
  data_source_id: str = pydantic.Field(description="The ID of the data source.")
34
34
  pipeline_id: str = pydantic.Field(description="The ID of the pipeline.")
@@ -6,6 +6,7 @@ import typing
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .pipeline_file_config_hash_value import PipelineFileConfigHashValue
8
8
  from .pipeline_file_custom_metadata_value import PipelineFileCustomMetadataValue
9
+ from .pipeline_file_permission_info_value import PipelineFilePermissionInfoValue
9
10
  from .pipeline_file_resource_info_value import PipelineFileResourceInfoValue
10
11
 
11
12
  try:
@@ -26,11 +27,13 @@ class PipelineFile(pydantic.BaseModel):
26
27
  created_at: typing.Optional[dt.datetime]
27
28
  updated_at: typing.Optional[dt.datetime]
28
29
  name: typing.Optional[str]
30
+ external_file_id: typing.Optional[str]
29
31
  file_size: typing.Optional[int]
30
32
  file_type: typing.Optional[str]
31
33
  project_id: str = pydantic.Field(description="The ID of the project that the file belongs to")
32
34
  last_modified_at: typing.Optional[dt.datetime]
33
35
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileResourceInfoValue]]]
36
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFilePermissionInfoValue]]]
34
37
  data_source_id: typing.Optional[str]
35
38
  file_id: typing.Optional[str]
36
39
  pipeline_id: str = pydantic.Field(description="The ID of the pipeline that the file is associated with")
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ PipelineFilePermissionInfoValue = typing.Union[
6
+ typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool
7
+ ]
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .chat_message import ChatMessage
7
+ from .app_schema_chat_chat_message import AppSchemaChatChatMessage
8
8
  from .llm_parameters import LlmParameters
9
9
  from .preset_retrieval_params import PresetRetrievalParams
10
10
 
@@ -33,7 +33,7 @@ class PlaygroundSession(pydantic.BaseModel):
33
33
  retrieval_params: typing.Optional[PresetRetrievalParams] = pydantic.Field(
34
34
  description="Preset retrieval parameters last used in this session."
35
35
  )
36
- chat_messages: typing.Optional[typing.List[ChatMessage]] = pydantic.Field(
36
+ chat_messages: typing.Optional[typing.List[AppSchemaChatChatMessage]] = pydantic.Field(
37
37
  description="Chat message history for this session."
38
38
  )
39
39
 
@@ -22,6 +22,7 @@ class PresetRetrievalParams(pydantic.BaseModel):
22
22
  """
23
23
 
24
24
  dense_similarity_top_k: typing.Optional[int]
25
+ dense_similarity_cutoff: typing.Optional[float]
25
26
  sparse_similarity_top_k: typing.Optional[int]
26
27
  enable_reranking: typing.Optional[bool]
27
28
  rerank_top_n: typing.Optional[int]
@@ -0,0 +1,44 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .progress_event_status import ProgressEventStatus
8
+ from .report_event_type import ReportEventType
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class ProgressEvent(pydantic.BaseModel):
20
+ """
21
+ Event for tracking progress of operations in workflows.
22
+ """
23
+
24
+ timestamp: typing.Optional[dt.datetime]
25
+ id: typing.Optional[str] = pydantic.Field(description="The ID of the event")
26
+ group_id: typing.Optional[str] = pydantic.Field(description="The ID of the group this event belongs to")
27
+ variant: ReportEventType
28
+ msg: str = pydantic.Field(description="The message to display to the user")
29
+ progress: typing.Optional[float]
30
+ status: typing.Optional[ProgressEventStatus] = pydantic.Field(description="Current status of the operation")
31
+ extra_detail: typing.Optional[typing.Dict[str, typing.Any]]
32
+
33
+ def json(self, **kwargs: typing.Any) -> str:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().json(**kwargs_with_defaults)
36
+
37
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
38
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
39
+ return super().dict(**kwargs_with_defaults)
40
+
41
+ class Config:
42
+ frozen = True
43
+ smart_union = True
44
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ProgressEventStatus(str, enum.Enum):
10
+ """
11
+ Current status of the operation
12
+ """
13
+
14
+ PENDING = "pending"
15
+ IN_PROGRESS = "in_progress"
16
+ COMPLETED = "completed"
17
+ ERROR = "error"
18
+
19
+ def visit(
20
+ self,
21
+ pending: typing.Callable[[], T_Result],
22
+ in_progress: typing.Callable[[], T_Result],
23
+ completed: typing.Callable[[], T_Result],
24
+ error: typing.Callable[[], T_Result],
25
+ ) -> T_Result:
26
+ if self is ProgressEventStatus.PENDING:
27
+ return pending()
28
+ if self is ProgressEventStatus.IN_PROGRESS:
29
+ return in_progress()
30
+ if self is ProgressEventStatus.COMPLETED:
31
+ return completed()
32
+ if self is ProgressEventStatus.ERROR:
33
+ return error()
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .chat_message import ChatMessage
7
+ from .app_schema_chat_chat_message import AppSchemaChatChatMessage
8
8
 
9
9
  try:
10
10
  import pydantic
@@ -20,7 +20,7 @@ class PromptSpec(pydantic.BaseModel):
20
20
  prompt_class: str = pydantic.Field(description="The class of the prompt (PromptTemplate or ChatPromptTemplate).")
21
21
  prompt_type: str = pydantic.Field(description="The type of prompt.")
22
22
  template: typing.Optional[str]
23
- message_templates: typing.Optional[typing.List[ChatMessage]]
23
+ message_templates: typing.Optional[typing.List[AppSchemaChatChatMessage]]
24
24
 
25
25
  def json(self, **kwargs: typing.Any) -> str:
26
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .object_type import ObjectType
7
+ from .related_node_info_node_type import RelatedNodeInfoNodeType
8
8
 
9
9
  try:
10
10
  import pydantic
@@ -17,7 +17,7 @@ except ImportError:
17
17
 
18
18
  class RelatedNodeInfo(pydantic.BaseModel):
19
19
  node_id: str
20
- node_type: typing.Optional[ObjectType]
20
+ node_type: typing.Optional[RelatedNodeInfoNodeType]
21
21
  metadata: typing.Optional[typing.Dict[str, typing.Any]]
22
22
  hash: typing.Optional[str]
23
23
  class_name: typing.Optional[str]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .object_type import ObjectType
6
+
7
+ RelatedNodeInfoNodeType = typing.Union[ObjectType, str]
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .report_block import ReportBlock
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class Report(pydantic.BaseModel):
19
+ id: str = pydantic.Field(description="The id of the report")
20
+ blocks: typing.Optional[typing.List[ReportBlock]] = pydantic.Field(description="The blocks of the report")
21
+
22
+ def json(self, **kwargs: typing.Any) -> str:
23
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ return super().json(**kwargs_with_defaults)
25
+
26
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().dict(**kwargs_with_defaults)
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .text_node_with_score import TextNodeWithScore
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ReportBlock(pydantic.BaseModel):
19
+ idx: int = pydantic.Field(description="The index of the block")
20
+ template: str = pydantic.Field(description="The content of the block")
21
+ sources: typing.Optional[typing.List[TextNodeWithScore]] = pydantic.Field(description="The sources for the block")
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ReportBlockDependency(str, enum.Enum):
10
+ NONE = "none"
11
+ ALL = "all"
12
+ PREVIOUS = "previous"
13
+ NEXT = "next"
14
+
15
+ def visit(
16
+ self,
17
+ none: typing.Callable[[], T_Result],
18
+ all: typing.Callable[[], T_Result],
19
+ previous: typing.Callable[[], T_Result],
20
+ next: typing.Callable[[], T_Result],
21
+ ) -> T_Result:
22
+ if self is ReportBlockDependency.NONE:
23
+ return none()
24
+ if self is ReportBlockDependency.ALL:
25
+ return all()
26
+ if self is ReportBlockDependency.PREVIOUS:
27
+ return previous()
28
+ if self is ReportBlockDependency.NEXT:
29
+ return next()