llama-cloud 0.1.6__py3-none-any.whl → 0.1.7a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (173) hide show
  1. llama_cloud/__init__.py +140 -6
  2. llama_cloud/client.py +15 -0
  3. llama_cloud/environment.py +1 -1
  4. llama_cloud/resources/__init__.py +15 -0
  5. llama_cloud/{types/token.py → resources/chat_apps/__init__.py} +0 -3
  6. llama_cloud/resources/chat_apps/client.py +620 -0
  7. llama_cloud/resources/data_sinks/client.py +12 -12
  8. llama_cloud/resources/data_sources/client.py +14 -14
  9. llama_cloud/resources/embedding_model_configs/client.py +20 -76
  10. llama_cloud/resources/evals/client.py +26 -36
  11. llama_cloud/resources/extraction/client.py +32 -32
  12. llama_cloud/resources/files/client.py +40 -44
  13. llama_cloud/resources/jobs/__init__.py +2 -0
  14. llama_cloud/resources/jobs/client.py +148 -0
  15. llama_cloud/resources/llama_extract/__init__.py +5 -0
  16. llama_cloud/resources/llama_extract/client.py +1038 -0
  17. llama_cloud/resources/llama_extract/types/__init__.py +6 -0
  18. llama_cloud/resources/llama_extract/types/extract_agent_create_data_schema_value.py +7 -0
  19. llama_cloud/resources/llama_extract/types/extract_agent_update_data_schema_value.py +7 -0
  20. llama_cloud/resources/organizations/client.py +66 -70
  21. llama_cloud/resources/parsing/client.py +448 -428
  22. llama_cloud/resources/pipelines/client.py +256 -344
  23. llama_cloud/resources/projects/client.py +34 -60
  24. llama_cloud/resources/reports/__init__.py +5 -0
  25. llama_cloud/resources/reports/client.py +1198 -0
  26. llama_cloud/resources/reports/types/__init__.py +7 -0
  27. llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +25 -0
  28. llama_cloud/resources/retrievers/__init__.py +2 -0
  29. llama_cloud/resources/retrievers/client.py +654 -0
  30. llama_cloud/types/__init__.py +128 -6
  31. llama_cloud/types/{chat_message.py → app_schema_chat_chat_message.py} +3 -3
  32. llama_cloud/types/azure_open_ai_embedding.py +6 -12
  33. llama_cloud/types/base_prompt_template.py +2 -6
  34. llama_cloud/types/bedrock_embedding.py +6 -12
  35. llama_cloud/types/character_splitter.py +2 -4
  36. llama_cloud/types/chat_app.py +44 -0
  37. llama_cloud/types/chat_app_response.py +41 -0
  38. llama_cloud/types/cloud_az_storage_blob_data_source.py +7 -15
  39. llama_cloud/types/cloud_box_data_source.py +6 -12
  40. llama_cloud/types/cloud_confluence_data_source.py +6 -6
  41. llama_cloud/types/cloud_document.py +1 -3
  42. llama_cloud/types/cloud_document_create.py +1 -3
  43. llama_cloud/types/cloud_jira_data_source.py +4 -6
  44. llama_cloud/types/cloud_notion_page_data_source.py +2 -2
  45. llama_cloud/types/cloud_one_drive_data_source.py +3 -5
  46. llama_cloud/types/cloud_postgres_vector_store.py +1 -0
  47. llama_cloud/types/cloud_s_3_data_source.py +4 -8
  48. llama_cloud/types/cloud_sharepoint_data_source.py +6 -8
  49. llama_cloud/types/cloud_slack_data_source.py +6 -6
  50. llama_cloud/types/code_splitter.py +1 -1
  51. llama_cloud/types/cohere_embedding.py +3 -7
  52. llama_cloud/types/composite_retrieval_mode.py +21 -0
  53. llama_cloud/types/composite_retrieval_result.py +38 -0
  54. llama_cloud/types/composite_retrieved_text_node.py +42 -0
  55. llama_cloud/types/data_sink.py +4 -4
  56. llama_cloud/types/data_sink_component.py +20 -0
  57. llama_cloud/types/data_source.py +5 -7
  58. llama_cloud/types/data_source_component.py +28 -0
  59. llama_cloud/types/data_source_create.py +1 -3
  60. llama_cloud/types/edit_suggestion.py +39 -0
  61. llama_cloud/types/embedding_model_config.py +2 -2
  62. llama_cloud/types/embedding_model_config_update.py +2 -4
  63. llama_cloud/types/eval_dataset.py +2 -2
  64. llama_cloud/types/eval_dataset_job_record.py +8 -13
  65. llama_cloud/types/eval_execution_params_override.py +2 -6
  66. llama_cloud/types/eval_question.py +2 -2
  67. llama_cloud/types/extract_agent.py +45 -0
  68. llama_cloud/types/extract_agent_data_schema_value.py +5 -0
  69. llama_cloud/types/extract_config.py +40 -0
  70. llama_cloud/types/extract_job.py +35 -0
  71. llama_cloud/types/extract_job_create.py +40 -0
  72. llama_cloud/types/extract_job_create_data_schema_override_value.py +7 -0
  73. llama_cloud/types/extract_mode.py +17 -0
  74. llama_cloud/types/extract_resultset.py +46 -0
  75. llama_cloud/types/extract_resultset_data.py +11 -0
  76. llama_cloud/types/extract_resultset_data_item_value.py +7 -0
  77. llama_cloud/types/extract_resultset_data_zero_value.py +7 -0
  78. llama_cloud/types/extract_resultset_extraction_metadata_value.py +7 -0
  79. llama_cloud/types/extraction_result.py +2 -2
  80. llama_cloud/types/extraction_schema.py +3 -5
  81. llama_cloud/types/file.py +9 -14
  82. llama_cloud/types/filter_condition.py +9 -1
  83. llama_cloud/types/filter_operator.py +6 -2
  84. llama_cloud/types/gemini_embedding.py +6 -10
  85. llama_cloud/types/hugging_face_inference_api_embedding.py +11 -27
  86. llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
  87. llama_cloud/types/image_block.py +35 -0
  88. llama_cloud/types/input_message.py +2 -4
  89. llama_cloud/types/job_names.py +89 -0
  90. llama_cloud/types/job_record.py +57 -0
  91. llama_cloud/types/job_record_with_usage_metrics.py +36 -0
  92. llama_cloud/types/llama_index_core_base_llms_types_chat_message.py +39 -0
  93. llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +33 -0
  94. llama_cloud/types/llama_parse_parameters.py +4 -0
  95. llama_cloud/types/llm.py +3 -4
  96. llama_cloud/types/llm_model_data.py +1 -0
  97. llama_cloud/types/llm_parameters.py +3 -5
  98. llama_cloud/types/local_eval.py +8 -10
  99. llama_cloud/types/local_eval_results.py +1 -1
  100. llama_cloud/types/managed_ingestion_status.py +4 -0
  101. llama_cloud/types/managed_ingestion_status_response.py +4 -5
  102. llama_cloud/types/markdown_element_node_parser.py +3 -5
  103. llama_cloud/types/markdown_node_parser.py +1 -1
  104. llama_cloud/types/metadata_filter.py +2 -2
  105. llama_cloud/types/metadata_filter_value.py +5 -0
  106. llama_cloud/types/metric_result.py +3 -3
  107. llama_cloud/types/node_parser.py +1 -1
  108. llama_cloud/types/object_type.py +4 -0
  109. llama_cloud/types/open_ai_embedding.py +6 -12
  110. llama_cloud/types/organization.py +7 -2
  111. llama_cloud/types/page_splitter_node_parser.py +2 -2
  112. llama_cloud/types/paginated_jobs_history_with_metrics.py +35 -0
  113. llama_cloud/types/paginated_report_response.py +35 -0
  114. llama_cloud/types/parse_plan_level.py +21 -0
  115. llama_cloud/types/permission.py +3 -3
  116. llama_cloud/types/pipeline.py +7 -17
  117. llama_cloud/types/pipeline_configuration_hashes.py +3 -3
  118. llama_cloud/types/pipeline_create.py +8 -16
  119. llama_cloud/types/pipeline_data_source.py +7 -13
  120. llama_cloud/types/pipeline_data_source_component.py +28 -0
  121. llama_cloud/types/pipeline_data_source_create.py +1 -3
  122. llama_cloud/types/pipeline_deployment.py +4 -4
  123. llama_cloud/types/pipeline_file.py +13 -24
  124. llama_cloud/types/pipeline_file_create.py +1 -3
  125. llama_cloud/types/playground_session.py +4 -4
  126. llama_cloud/types/preset_retrieval_params.py +8 -14
  127. llama_cloud/types/presigned_url.py +1 -3
  128. llama_cloud/types/progress_event.py +44 -0
  129. llama_cloud/types/progress_event_status.py +33 -0
  130. llama_cloud/types/project.py +2 -2
  131. llama_cloud/types/prompt_mixin_prompts.py +1 -1
  132. llama_cloud/types/prompt_spec.py +3 -5
  133. llama_cloud/types/related_node_info.py +2 -2
  134. llama_cloud/types/related_node_info_node_type.py +7 -0
  135. llama_cloud/types/report.py +33 -0
  136. llama_cloud/types/report_block.py +34 -0
  137. llama_cloud/types/report_block_dependency.py +29 -0
  138. llama_cloud/types/report_create_response.py +31 -0
  139. llama_cloud/types/report_event_item.py +40 -0
  140. llama_cloud/types/report_event_item_event_data.py +45 -0
  141. llama_cloud/types/report_event_type.py +37 -0
  142. llama_cloud/types/report_metadata.py +39 -0
  143. llama_cloud/types/report_plan.py +36 -0
  144. llama_cloud/types/report_plan_block.py +36 -0
  145. llama_cloud/types/report_query.py +33 -0
  146. llama_cloud/types/report_response.py +41 -0
  147. llama_cloud/types/report_state.py +37 -0
  148. llama_cloud/types/report_state_event.py +38 -0
  149. llama_cloud/types/report_update_event.py +38 -0
  150. llama_cloud/types/retrieve_results.py +1 -1
  151. llama_cloud/types/retriever.py +45 -0
  152. llama_cloud/types/retriever_create.py +37 -0
  153. llama_cloud/types/retriever_pipeline.py +37 -0
  154. llama_cloud/types/role.py +3 -3
  155. llama_cloud/types/sentence_splitter.py +2 -4
  156. llama_cloud/types/status_enum.py +4 -0
  157. llama_cloud/types/supported_llm_model_names.py +4 -0
  158. llama_cloud/types/text_block.py +31 -0
  159. llama_cloud/types/text_node.py +15 -8
  160. llama_cloud/types/token_text_splitter.py +1 -1
  161. llama_cloud/types/usage_metric_response.py +34 -0
  162. llama_cloud/types/user_job_record.py +32 -0
  163. llama_cloud/types/user_organization.py +5 -9
  164. llama_cloud/types/user_organization_create.py +4 -4
  165. llama_cloud/types/user_organization_delete.py +2 -2
  166. llama_cloud/types/user_organization_role.py +2 -2
  167. llama_cloud/types/vertex_text_embedding.py +5 -9
  168. {llama_cloud-0.1.6.dist-info → llama_cloud-0.1.7a1.dist-info}/METADATA +2 -1
  169. llama_cloud-0.1.7a1.dist-info/RECORD +310 -0
  170. llama_cloud/types/value.py +0 -5
  171. llama_cloud-0.1.6.dist-info/RECORD +0 -241
  172. {llama_cloud-0.1.6.dist-info → llama_cloud-0.1.7a1.dist-info}/LICENSE +0 -0
  173. {llama_cloud-0.1.6.dist-info → llama_cloud-0.1.7a1.dist-info}/WHEEL +0 -0
@@ -20,9 +20,7 @@ class PipelineDataSourceCreate(pydantic.BaseModel):
20
20
  """
21
21
 
22
22
  data_source_id: str = pydantic.Field(description="The ID of the data source.")
23
- sync_interval: typing.Optional[float] = pydantic.Field(
24
- description="The interval at which the data source should be synced."
25
- )
23
+ sync_interval: typing.Optional[float]
26
24
 
27
25
  def json(self, **kwargs: typing.Any) -> str:
28
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -17,11 +17,11 @@ except ImportError:
17
17
 
18
18
  class PipelineDeployment(pydantic.BaseModel):
19
19
  id: str = pydantic.Field(description="Unique identifier")
20
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
21
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
20
+ created_at: typing.Optional[dt.datetime]
21
+ updated_at: typing.Optional[dt.datetime]
22
22
  status: ManagedIngestionStatus = pydantic.Field(description="Status of the pipeline deployment.")
23
- started_at: typing.Optional[dt.datetime] = pydantic.Field(description="Time the pipeline deployment started.")
24
- ended_at: typing.Optional[dt.datetime] = pydantic.Field(description="Time the pipeline deployment finished.")
23
+ started_at: typing.Optional[dt.datetime]
24
+ ended_at: typing.Optional[dt.datetime]
25
25
 
26
26
  def json(self, **kwargs: typing.Any) -> str:
27
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -24,33 +24,22 @@ class PipelineFile(pydantic.BaseModel):
24
24
  """
25
25
 
26
26
  id: str = pydantic.Field(description="Unique identifier")
27
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
28
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
27
+ created_at: typing.Optional[dt.datetime]
28
+ updated_at: typing.Optional[dt.datetime]
29
29
  name: typing.Optional[str]
30
- file_size: typing.Optional[int] = pydantic.Field(description="Size of the file in bytes")
31
- file_type: typing.Optional[str] = pydantic.Field(description="File type (e.g. pdf, docx, etc.)")
30
+ external_file_id: typing.Optional[str]
31
+ file_size: typing.Optional[int]
32
+ file_type: typing.Optional[str]
32
33
  project_id: str = pydantic.Field(description="The ID of the project that the file belongs to")
33
- last_modified_at: typing.Optional[dt.datetime] = pydantic.Field(description="The last modified time of the file")
34
- resource_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileResourceInfoValue]]] = pydantic.Field(
35
- description="Resource information for the file"
36
- )
37
- permission_info: typing.Optional[
38
- typing.Dict[str, typing.Optional[PipelineFilePermissionInfoValue]]
39
- ] = pydantic.Field(description="Permission information for the file")
40
- data_source_id: typing.Optional[str] = pydantic.Field(
41
- description="The ID of the data source that the file belongs to"
42
- )
43
- file_id: typing.Optional[str] = pydantic.Field(description="The ID of the file")
34
+ last_modified_at: typing.Optional[dt.datetime]
35
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileResourceInfoValue]]]
36
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFilePermissionInfoValue]]]
37
+ data_source_id: typing.Optional[str]
38
+ file_id: typing.Optional[str]
44
39
  pipeline_id: str = pydantic.Field(description="The ID of the pipeline that the file is associated with")
45
- custom_metadata: typing.Optional[
46
- typing.Dict[str, typing.Optional[PipelineFileCustomMetadataValue]]
47
- ] = pydantic.Field(description="Custom metadata for the file")
48
- config_hash: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileConfigHashValue]]] = pydantic.Field(
49
- description="Hashes for the configuration of the pipeline."
50
- )
51
- indexed_page_count: typing.Optional[int] = pydantic.Field(
52
- description="The number of pages that have been indexed for this file"
53
- )
40
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileCustomMetadataValue]]]
41
+ config_hash: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileConfigHashValue]]]
42
+ indexed_page_count: typing.Optional[int]
54
43
 
55
44
  def json(self, **kwargs: typing.Any) -> str:
56
45
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -21,9 +21,7 @@ class PipelineFileCreate(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  file_id: str = pydantic.Field(description="The ID of the file")
24
- custom_metadata: typing.Optional[
25
- typing.Dict[str, typing.Optional[PipelineFileCreateCustomMetadataValue]]
26
- ] = pydantic.Field(description="Custom metadata for the file")
24
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileCreateCustomMetadataValue]]]
27
25
 
28
26
  def json(self, **kwargs: typing.Any) -> str:
29
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .chat_message import ChatMessage
7
+ from .app_schema_chat_chat_message import AppSchemaChatChatMessage
8
8
  from .llm_parameters import LlmParameters
9
9
  from .preset_retrieval_params import PresetRetrievalParams
10
10
 
@@ -23,8 +23,8 @@ class PlaygroundSession(pydantic.BaseModel):
23
23
  """
24
24
 
25
25
  id: str = pydantic.Field(description="Unique identifier")
26
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
27
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
+ created_at: typing.Optional[dt.datetime]
27
+ updated_at: typing.Optional[dt.datetime]
28
28
  pipeline_id: str
29
29
  user_id: str
30
30
  llm_params_id: str
@@ -33,7 +33,7 @@ class PlaygroundSession(pydantic.BaseModel):
33
33
  retrieval_params: typing.Optional[PresetRetrievalParams] = pydantic.Field(
34
34
  description="Preset retrieval parameters last used in this session."
35
35
  )
36
- chat_messages: typing.Optional[typing.List[ChatMessage]] = pydantic.Field(
36
+ chat_messages: typing.Optional[typing.List[AppSchemaChatChatMessage]] = pydantic.Field(
37
37
  description="Chat message history for this session."
38
38
  )
39
39
 
@@ -21,20 +21,14 @@ class PresetRetrievalParams(pydantic.BaseModel):
21
21
  Schema for the search params for an retrieval execution that can be preset for a pipeline.
22
22
  """
23
23
 
24
- dense_similarity_top_k: typing.Optional[int] = pydantic.Field(description="Number of nodes for dense retrieval.")
25
- dense_similarity_cutoff: typing.Optional[float] = pydantic.Field(
26
- description="Minimum similarity score wrt query for retrieval"
27
- )
28
- sparse_similarity_top_k: typing.Optional[int] = pydantic.Field(description="Number of nodes for sparse retrieval.")
29
- enable_reranking: typing.Optional[bool] = pydantic.Field(description="Enable reranking for retrieval")
30
- rerank_top_n: typing.Optional[int] = pydantic.Field(description="Number of reranked nodes for returning.")
31
- alpha: typing.Optional[float] = pydantic.Field(
32
- description="Alpha value for hybrid retrieval to determine the weights between dense and sparse retrieval. 0 is sparse retrieval and 1 is dense retrieval."
33
- )
34
- search_filters: typing.Optional[MetadataFilters] = pydantic.Field(description="Search filters for retrieval.")
35
- files_top_k: typing.Optional[int] = pydantic.Field(
36
- description="Number of files to retrieve (only for retrieval mode files_via_metadata and files_via_content)."
37
- )
24
+ dense_similarity_top_k: typing.Optional[int]
25
+ dense_similarity_cutoff: typing.Optional[float]
26
+ sparse_similarity_top_k: typing.Optional[int]
27
+ enable_reranking: typing.Optional[bool]
28
+ rerank_top_n: typing.Optional[int]
29
+ alpha: typing.Optional[float]
30
+ search_filters: typing.Optional[MetadataFilters]
31
+ files_top_k: typing.Optional[int]
38
32
  retrieval_mode: typing.Optional[RetrievalMode] = pydantic.Field(description="The retrieval mode for the query.")
39
33
  retrieve_image_nodes: typing.Optional[bool] = pydantic.Field(description="Whether to retrieve image nodes.")
40
34
  class_name: typing.Optional[str]
@@ -21,9 +21,7 @@ class PresignedUrl(pydantic.BaseModel):
21
21
 
22
22
  url: str = pydantic.Field(description="A presigned URL for IO operations against a private file")
23
23
  expires_at: dt.datetime = pydantic.Field(description="The time at which the presigned URL expires")
24
- form_fields: typing.Optional[typing.Dict[str, typing.Optional[str]]] = pydantic.Field(
25
- description="Form fields for a presigned POST request"
26
- )
24
+ form_fields: typing.Optional[typing.Dict[str, typing.Optional[str]]]
27
25
 
28
26
  def json(self, **kwargs: typing.Any) -> str:
29
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,44 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .progress_event_status import ProgressEventStatus
8
+ from .report_event_type import ReportEventType
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class ProgressEvent(pydantic.BaseModel):
20
+ """
21
+ Event for tracking progress of operations in workflows.
22
+ """
23
+
24
+ timestamp: typing.Optional[dt.datetime]
25
+ id: typing.Optional[str] = pydantic.Field(description="The ID of the event")
26
+ group_id: typing.Optional[str] = pydantic.Field(description="The ID of the group this event belongs to")
27
+ variant: ReportEventType
28
+ msg: str = pydantic.Field(description="The message to display to the user")
29
+ progress: typing.Optional[float]
30
+ status: typing.Optional[ProgressEventStatus] = pydantic.Field(description="Current status of the operation")
31
+ extra_detail: typing.Optional[typing.Dict[str, typing.Any]]
32
+
33
+ def json(self, **kwargs: typing.Any) -> str:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().json(**kwargs_with_defaults)
36
+
37
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
38
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
39
+ return super().dict(**kwargs_with_defaults)
40
+
41
+ class Config:
42
+ frozen = True
43
+ smart_union = True
44
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ProgressEventStatus(str, enum.Enum):
10
+ """
11
+ Current status of the operation
12
+ """
13
+
14
+ PENDING = "pending"
15
+ IN_PROGRESS = "in_progress"
16
+ COMPLETED = "completed"
17
+ ERROR = "error"
18
+
19
+ def visit(
20
+ self,
21
+ pending: typing.Callable[[], T_Result],
22
+ in_progress: typing.Callable[[], T_Result],
23
+ completed: typing.Callable[[], T_Result],
24
+ error: typing.Callable[[], T_Result],
25
+ ) -> T_Result:
26
+ if self is ProgressEventStatus.PENDING:
27
+ return pending()
28
+ if self is ProgressEventStatus.IN_PROGRESS:
29
+ return in_progress()
30
+ if self is ProgressEventStatus.COMPLETED:
31
+ return completed()
32
+ if self is ProgressEventStatus.ERROR:
33
+ return error()
@@ -21,8 +21,8 @@ class Project(pydantic.BaseModel):
21
21
 
22
22
  name: str
23
23
  id: str = pydantic.Field(description="Unique identifier")
24
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
25
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
24
+ created_at: typing.Optional[dt.datetime]
25
+ updated_at: typing.Optional[dt.datetime]
26
26
  ad_hoc_eval_dataset_id: typing.Optional[str]
27
27
  organization_id: str = pydantic.Field(description="The Organization ID the project is under.")
28
28
  is_default: typing.Optional[bool] = pydantic.Field(
@@ -21,7 +21,7 @@ class PromptMixinPrompts(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  project_id: str = pydantic.Field(description="The ID of the project.")
24
- id: typing.Optional[str] = pydantic.Field(description="The ID of the prompt set.")
24
+ id: typing.Optional[str]
25
25
  name: str = pydantic.Field(description="The name of the prompt set.")
26
26
  prompts: typing.List[PromptSpec] = pydantic.Field(description="The prompts.")
27
27
 
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .chat_message import ChatMessage
7
+ from .app_schema_chat_chat_message import AppSchemaChatChatMessage
8
8
 
9
9
  try:
10
10
  import pydantic
@@ -19,10 +19,8 @@ class PromptSpec(pydantic.BaseModel):
19
19
  prompt_key: str = pydantic.Field(description="The key of the prompt in the PromptMixin.")
20
20
  prompt_class: str = pydantic.Field(description="The class of the prompt (PromptTemplate or ChatPromptTemplate).")
21
21
  prompt_type: str = pydantic.Field(description="The type of prompt.")
22
- template: typing.Optional[str] = pydantic.Field(description="The template of the prompt.")
23
- message_templates: typing.Optional[typing.List[ChatMessage]] = pydantic.Field(
24
- description="The chat message templates of the prompt."
25
- )
22
+ template: typing.Optional[str]
23
+ message_templates: typing.Optional[typing.List[AppSchemaChatChatMessage]]
26
24
 
27
25
  def json(self, **kwargs: typing.Any) -> str:
28
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .object_type import ObjectType
7
+ from .related_node_info_node_type import RelatedNodeInfoNodeType
8
8
 
9
9
  try:
10
10
  import pydantic
@@ -17,7 +17,7 @@ except ImportError:
17
17
 
18
18
  class RelatedNodeInfo(pydantic.BaseModel):
19
19
  node_id: str
20
- node_type: typing.Optional[ObjectType]
20
+ node_type: typing.Optional[RelatedNodeInfoNodeType]
21
21
  metadata: typing.Optional[typing.Dict[str, typing.Any]]
22
22
  hash: typing.Optional[str]
23
23
  class_name: typing.Optional[str]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .object_type import ObjectType
6
+
7
+ RelatedNodeInfoNodeType = typing.Union[ObjectType, str]
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .report_block import ReportBlock
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class Report(pydantic.BaseModel):
19
+ id: str = pydantic.Field(description="The id of the report")
20
+ blocks: typing.Optional[typing.List[ReportBlock]] = pydantic.Field(description="The blocks of the report")
21
+
22
+ def json(self, **kwargs: typing.Any) -> str:
23
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ return super().json(**kwargs_with_defaults)
25
+
26
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().dict(**kwargs_with_defaults)
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .text_node_with_score import TextNodeWithScore
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ReportBlock(pydantic.BaseModel):
19
+ idx: int = pydantic.Field(description="The index of the block")
20
+ template: str = pydantic.Field(description="The content of the block")
21
+ sources: typing.Optional[typing.List[TextNodeWithScore]] = pydantic.Field(description="The sources for the block")
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ReportBlockDependency(str, enum.Enum):
10
+ NONE = "none"
11
+ ALL = "all"
12
+ PREVIOUS = "previous"
13
+ NEXT = "next"
14
+
15
+ def visit(
16
+ self,
17
+ none: typing.Callable[[], T_Result],
18
+ all: typing.Callable[[], T_Result],
19
+ previous: typing.Callable[[], T_Result],
20
+ next: typing.Callable[[], T_Result],
21
+ ) -> T_Result:
22
+ if self is ReportBlockDependency.NONE:
23
+ return none()
24
+ if self is ReportBlockDependency.ALL:
25
+ return all()
26
+ if self is ReportBlockDependency.PREVIOUS:
27
+ return previous()
28
+ if self is ReportBlockDependency.NEXT:
29
+ return next()
@@ -0,0 +1,31 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class ReportCreateResponse(pydantic.BaseModel):
18
+ id: str = pydantic.Field(description="The id of the report")
19
+
20
+ def json(self, **kwargs: typing.Any) -> str:
21
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
22
+ return super().json(**kwargs_with_defaults)
23
+
24
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().dict(**kwargs_with_defaults)
27
+
28
+ class Config:
29
+ frozen = True
30
+ smart_union = True
31
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .report_event_item_event_data import ReportEventItemEventData
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ReportEventItem(pydantic.BaseModel):
19
+ """
20
+ From backend schema
21
+ """
22
+
23
+ id: str = pydantic.Field(description="The id of the event")
24
+ report_id: str = pydantic.Field(description="The id of the report")
25
+ event_type: str = pydantic.Field(description="The type of the event")
26
+ event_data: ReportEventItemEventData = pydantic.Field(description="The data for the event")
27
+ timestamp: dt.datetime = pydantic.Field(description="The timestamp for the event")
28
+
29
+ def json(self, **kwargs: typing.Any) -> str:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().json(**kwargs_with_defaults)
32
+
33
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().dict(**kwargs_with_defaults)
36
+
37
+ class Config:
38
+ frozen = True
39
+ smart_union = True
40
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,45 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .progress_event import ProgressEvent
10
+ from .report_state_event import ReportStateEvent
11
+ from .report_update_event import ReportUpdateEvent
12
+
13
+
14
+ class ReportEventItemEventData_Progress(ProgressEvent):
15
+ type: typing_extensions.Literal["progress"]
16
+
17
+ class Config:
18
+ frozen = True
19
+ smart_union = True
20
+ allow_population_by_field_name = True
21
+
22
+
23
+ class ReportEventItemEventData_ReportBlockUpdate(ReportUpdateEvent):
24
+ type: typing_extensions.Literal["report_block_update"]
25
+
26
+ class Config:
27
+ frozen = True
28
+ smart_union = True
29
+ allow_population_by_field_name = True
30
+
31
+
32
+ class ReportEventItemEventData_ReportStateUpdate(ReportStateEvent):
33
+ type: typing_extensions.Literal["report_state_update"]
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ allow_population_by_field_name = True
39
+
40
+
41
+ ReportEventItemEventData = typing.Union[
42
+ ReportEventItemEventData_Progress,
43
+ ReportEventItemEventData_ReportBlockUpdate,
44
+ ReportEventItemEventData_ReportStateUpdate,
45
+ ]
@@ -0,0 +1,37 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ReportEventType(str, enum.Enum):
10
+ LOAD_TEMPLATE = "load_template"
11
+ EXTRACT_PLAN = "extract_plan"
12
+ SUMMARIZE = "summarize"
13
+ FILE_PROCESSING = "file_processing"
14
+ GENERATE_BLOCK = "generate_block"
15
+ EDITING = "editing"
16
+
17
+ def visit(
18
+ self,
19
+ load_template: typing.Callable[[], T_Result],
20
+ extract_plan: typing.Callable[[], T_Result],
21
+ summarize: typing.Callable[[], T_Result],
22
+ file_processing: typing.Callable[[], T_Result],
23
+ generate_block: typing.Callable[[], T_Result],
24
+ editing: typing.Callable[[], T_Result],
25
+ ) -> T_Result:
26
+ if self is ReportEventType.LOAD_TEMPLATE:
27
+ return load_template()
28
+ if self is ReportEventType.EXTRACT_PLAN:
29
+ return extract_plan()
30
+ if self is ReportEventType.SUMMARIZE:
31
+ return summarize()
32
+ if self is ReportEventType.FILE_PROCESSING:
33
+ return file_processing()
34
+ if self is ReportEventType.GENERATE_BLOCK:
35
+ return generate_block()
36
+ if self is ReportEventType.EDITING:
37
+ return editing()
@@ -0,0 +1,39 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .report_state import ReportState
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ReportMetadata(pydantic.BaseModel):
19
+ """
20
+ Used to update the metadata of a report.
21
+ """
22
+
23
+ id: str = pydantic.Field(description="The id of the report")
24
+ name: str = pydantic.Field(description="The name of the report")
25
+ report_metadata: typing.Dict[str, typing.Any] = pydantic.Field(description="The metadata for the report")
26
+ state: ReportState = pydantic.Field(description="The state of the report")
27
+
28
+ def json(self, **kwargs: typing.Any) -> str:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().json(**kwargs_with_defaults)
31
+
32
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().dict(**kwargs_with_defaults)
35
+
36
+ class Config:
37
+ frozen = True
38
+ smart_union = True
39
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,36 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .report_plan_block import ReportPlanBlock
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ReportPlan(pydantic.BaseModel):
19
+ id: typing.Optional[str] = pydantic.Field(description="The id of the report plan")
20
+ blocks: typing.Optional[typing.List[ReportPlanBlock]] = pydantic.Field(description="The blocks of the report")
21
+ generated_at: typing.Optional[dt.datetime] = pydantic.Field(
22
+ description="The timestamp of when the plan was generated"
23
+ )
24
+
25
+ def json(self, **kwargs: typing.Any) -> str:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().json(**kwargs_with_defaults)
28
+
29
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().dict(**kwargs_with_defaults)
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ json_encoders = {dt.datetime: serialize_datetime}