llama-cloud 0.0.8__tar.gz → 0.0.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/PKG-INFO +2 -1
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/__init__.py +16 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/client.py +3 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/__init__.py +13 -1
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sinks/client.py +40 -8
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sources/client.py +48 -12
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sources/types/data_source_update_component_one.py +2 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/extraction/client.py +4 -20
- llama_cloud-0.0.9/llama_cloud/resources/organizations/client.py +786 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/pipelines/client.py +166 -10
- llama_cloud-0.0.9/llama_cloud/resources/projects/__init__.py +2 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/projects/client.py +28 -8
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/__init__.py +14 -0
- llama_cloud-0.0.9/llama_cloud/types/chat_params.py +38 -0
- llama_cloud-0.0.9/llama_cloud/types/cloud_jira_data_source.py +43 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configurable_data_source_names.py +4 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_component_one.py +2 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_create_component_one.py +2 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_dataset_job_record.py +1 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/extraction_schema.py +0 -1
- llama_cloud-0.0.9/llama_cloud/types/organization.py +38 -0
- llama_cloud-0.0.9/llama_cloud/types/organization_create.py +35 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_data_source_component_one.py +2 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/preset_retrieval_params.py +5 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/project.py +1 -1
- llama_cloud-0.0.9/llama_cloud/types/retrieval_mode.py +29 -0
- llama_cloud-0.0.9/llama_cloud/types/user_organization.py +40 -0
- llama_cloud-0.0.9/llama_cloud/types/user_organization_create.py +36 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/pyproject.toml +1 -1
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/LICENSE +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/README.md +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/core/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/core/api_error.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/core/client_wrapper.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/core/datetime_utils.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/core/jsonable_encoder.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/core/remove_none_from_dict.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/environment.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/errors/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/errors/unprocessable_entity_error.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/component_definitions/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/component_definitions/client.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sinks/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sinks/types/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sinks/types/data_sink_update_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sources/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sources/types/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sources/types/data_source_update_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/evals/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/evals/client.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/extraction/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/extraction/types/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/extraction/types/extraction_schema_update_data_schema_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/files/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/files/client.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/files/types/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/files/types/file_create_resource_info_value.py +0 -0
- {llama_cloud-0.0.8/llama_cloud/resources/parsing → llama_cloud-0.0.9/llama_cloud/resources/organizations}/__init__.py +0 -0
- {llama_cloud-0.0.8/llama_cloud/resources/projects → llama_cloud-0.0.9/llama_cloud/resources/parsing}/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/parsing/client.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/pipelines/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/pipelines/types/__init__.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/azure_open_ai_embedding.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/base.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/base_prompt_template.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/bedrock_embedding.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/chat_message.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_az_storage_blob_data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_azure_ai_search_vector_store.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_chroma_vector_store.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_document.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_document_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_notion_page_data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_one_drive_data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_pinecone_vector_store.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_postgres_vector_store.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_qdrant_vector_store.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_s_3_data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_slack_data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cloud_weaviate_vector_store.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/code_splitter.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/cohere_embedding.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configurable_data_sink_names.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configurable_transformation_definition.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configurable_transformation_names.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configured_transformation_item.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configured_transformation_item_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/configured_transformation_item_component_one.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink_component_one.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink_create_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink_create_component_one.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_sink_definition.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_create_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_create_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/data_source_definition.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_dataset.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_dataset_job_params.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_execution_params.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_execution_params_override.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_llm_model_data.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_question.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_question_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/eval_question_result.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/extraction_result.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/extraction_result_data_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/extraction_schema_data_schema_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/file.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/file_resource_info_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/filter_condition.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/filter_operator.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/gemini_embedding.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/html_node_parser.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/http_validation_error.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/hugging_face_inference_api_embedding.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/hugging_face_inference_api_embedding_token.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/json_node_parser.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/llama_parse_parameters.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/llama_parse_supported_file_extensions.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/llm.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/local_eval.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/local_eval_results.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/local_eval_sets.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/managed_ingestion_status.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/managed_ingestion_status_response.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/markdown_element_node_parser.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/markdown_node_parser.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/message_role.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/metadata_filter.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/metadata_filter_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/metadata_filters.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/metadata_filters_filters_item.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/metric_result.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/node_parser.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/object_type.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/open_ai_embedding.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parser_languages.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parsing_history_item.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parsing_job.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parsing_job_json_result.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parsing_job_markdown_result.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parsing_job_text_result.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/parsing_usage.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_data_source.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_data_source_component.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_data_source_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_data_source_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_deployment.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_file.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_file_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_file_create_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_file_custom_metadata_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_file_resource_info_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pipeline_type.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pooling.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/presigned_url.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/project_create.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/prompt_mixin_prompts.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/prompt_spec.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/pydantic_program_mode.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/related_node_info.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/retrieve_results.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/sentence_splitter.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/simple_file_node_parser.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/status_enum.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/supported_eval_llm_model.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/supported_eval_llm_model_names.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/text_node.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/text_node_relationships_value.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/text_node_with_score.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/token_text_splitter.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/transformation_category_names.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/validation_error.py +0 -0
- {llama_cloud-0.0.8 → llama_cloud-0.0.9}/llama_cloud/types/validation_error_loc_item.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: llama-cloud
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.9
|
|
4
4
|
Summary:
|
|
5
5
|
Author: Logan Markewich
|
|
6
6
|
Author-email: logan@runllama.ai
|
|
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3.8
|
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.9
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.10
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
14
|
Requires-Dist: httpx (>=0.20.0)
|
|
14
15
|
Requires-Dist: pydantic (>=1.10)
|
|
15
16
|
Description-Content-Type: text/markdown
|
|
@@ -6,11 +6,13 @@ from .types import (
|
|
|
6
6
|
BasePromptTemplate,
|
|
7
7
|
BedrockEmbedding,
|
|
8
8
|
ChatMessage,
|
|
9
|
+
ChatParams,
|
|
9
10
|
CloudAzStorageBlobDataSource,
|
|
10
11
|
CloudAzureAiSearchVectorStore,
|
|
11
12
|
CloudChromaVectorStore,
|
|
12
13
|
CloudDocument,
|
|
13
14
|
CloudDocumentCreate,
|
|
15
|
+
CloudJiraDataSource,
|
|
14
16
|
CloudNotionPageDataSource,
|
|
15
17
|
CloudOneDriveDataSource,
|
|
16
18
|
CloudPineconeVectorStore,
|
|
@@ -87,6 +89,8 @@ from .types import (
|
|
|
87
89
|
NodeParser,
|
|
88
90
|
ObjectType,
|
|
89
91
|
OpenAiEmbedding,
|
|
92
|
+
Organization,
|
|
93
|
+
OrganizationCreate,
|
|
90
94
|
ParserLanguages,
|
|
91
95
|
ParsingHistoryItem,
|
|
92
96
|
ParsingJob,
|
|
@@ -117,6 +121,7 @@ from .types import (
|
|
|
117
121
|
PromptSpec,
|
|
118
122
|
PydanticProgramMode,
|
|
119
123
|
RelatedNodeInfo,
|
|
124
|
+
RetrievalMode,
|
|
120
125
|
RetrieveResults,
|
|
121
126
|
SentenceSplitter,
|
|
122
127
|
SimpleFileNodeParser,
|
|
@@ -128,6 +133,8 @@ from .types import (
|
|
|
128
133
|
TextNodeWithScore,
|
|
129
134
|
TokenTextSplitter,
|
|
130
135
|
TransformationCategoryNames,
|
|
136
|
+
UserOrganization,
|
|
137
|
+
UserOrganizationCreate,
|
|
131
138
|
ValidationError,
|
|
132
139
|
ValidationErrorLocItem,
|
|
133
140
|
)
|
|
@@ -147,6 +154,7 @@ from .resources import (
|
|
|
147
154
|
evals,
|
|
148
155
|
extraction,
|
|
149
156
|
files,
|
|
157
|
+
organizations,
|
|
150
158
|
parsing,
|
|
151
159
|
pipelines,
|
|
152
160
|
projects,
|
|
@@ -159,11 +167,13 @@ __all__ = [
|
|
|
159
167
|
"BasePromptTemplate",
|
|
160
168
|
"BedrockEmbedding",
|
|
161
169
|
"ChatMessage",
|
|
170
|
+
"ChatParams",
|
|
162
171
|
"CloudAzStorageBlobDataSource",
|
|
163
172
|
"CloudAzureAiSearchVectorStore",
|
|
164
173
|
"CloudChromaVectorStore",
|
|
165
174
|
"CloudDocument",
|
|
166
175
|
"CloudDocumentCreate",
|
|
176
|
+
"CloudJiraDataSource",
|
|
167
177
|
"CloudNotionPageDataSource",
|
|
168
178
|
"CloudOneDriveDataSource",
|
|
169
179
|
"CloudPineconeVectorStore",
|
|
@@ -248,6 +258,8 @@ __all__ = [
|
|
|
248
258
|
"NodeParser",
|
|
249
259
|
"ObjectType",
|
|
250
260
|
"OpenAiEmbedding",
|
|
261
|
+
"Organization",
|
|
262
|
+
"OrganizationCreate",
|
|
251
263
|
"ParserLanguages",
|
|
252
264
|
"ParsingHistoryItem",
|
|
253
265
|
"ParsingJob",
|
|
@@ -279,6 +291,7 @@ __all__ = [
|
|
|
279
291
|
"PromptSpec",
|
|
280
292
|
"PydanticProgramMode",
|
|
281
293
|
"RelatedNodeInfo",
|
|
294
|
+
"RetrievalMode",
|
|
282
295
|
"RetrieveResults",
|
|
283
296
|
"SentenceSplitter",
|
|
284
297
|
"SimpleFileNodeParser",
|
|
@@ -291,6 +304,8 @@ __all__ = [
|
|
|
291
304
|
"TokenTextSplitter",
|
|
292
305
|
"TransformationCategoryNames",
|
|
293
306
|
"UnprocessableEntityError",
|
|
307
|
+
"UserOrganization",
|
|
308
|
+
"UserOrganizationCreate",
|
|
294
309
|
"ValidationError",
|
|
295
310
|
"ValidationErrorLocItem",
|
|
296
311
|
"component_definitions",
|
|
@@ -299,6 +314,7 @@ __all__ = [
|
|
|
299
314
|
"evals",
|
|
300
315
|
"extraction",
|
|
301
316
|
"files",
|
|
317
|
+
"organizations",
|
|
302
318
|
"parsing",
|
|
303
319
|
"pipelines",
|
|
304
320
|
"projects",
|
|
@@ -12,6 +12,7 @@ from .resources.data_sources.client import AsyncDataSourcesClient, DataSourcesCl
|
|
|
12
12
|
from .resources.evals.client import AsyncEvalsClient, EvalsClient
|
|
13
13
|
from .resources.extraction.client import AsyncExtractionClient, ExtractionClient
|
|
14
14
|
from .resources.files.client import AsyncFilesClient, FilesClient
|
|
15
|
+
from .resources.organizations.client import AsyncOrganizationsClient, OrganizationsClient
|
|
15
16
|
from .resources.parsing.client import AsyncParsingClient, ParsingClient
|
|
16
17
|
from .resources.pipelines.client import AsyncPipelinesClient, PipelinesClient
|
|
17
18
|
from .resources.projects.client import AsyncProjectsClient, ProjectsClient
|
|
@@ -34,6 +35,7 @@ class LlamaCloud:
|
|
|
34
35
|
)
|
|
35
36
|
self.data_sinks = DataSinksClient(client_wrapper=self._client_wrapper)
|
|
36
37
|
self.data_sources = DataSourcesClient(client_wrapper=self._client_wrapper)
|
|
38
|
+
self.organizations = OrganizationsClient(client_wrapper=self._client_wrapper)
|
|
37
39
|
self.projects = ProjectsClient(client_wrapper=self._client_wrapper)
|
|
38
40
|
self.files = FilesClient(client_wrapper=self._client_wrapper)
|
|
39
41
|
self.pipelines = PipelinesClient(client_wrapper=self._client_wrapper)
|
|
@@ -60,6 +62,7 @@ class AsyncLlamaCloud:
|
|
|
60
62
|
)
|
|
61
63
|
self.data_sinks = AsyncDataSinksClient(client_wrapper=self._client_wrapper)
|
|
62
64
|
self.data_sources = AsyncDataSourcesClient(client_wrapper=self._client_wrapper)
|
|
65
|
+
self.organizations = AsyncOrganizationsClient(client_wrapper=self._client_wrapper)
|
|
63
66
|
self.projects = AsyncProjectsClient(client_wrapper=self._client_wrapper)
|
|
64
67
|
self.files = AsyncFilesClient(client_wrapper=self._client_wrapper)
|
|
65
68
|
self.pipelines = AsyncPipelinesClient(client_wrapper=self._client_wrapper)
|
|
@@ -1,6 +1,17 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
-
from . import
|
|
3
|
+
from . import (
|
|
4
|
+
component_definitions,
|
|
5
|
+
data_sinks,
|
|
6
|
+
data_sources,
|
|
7
|
+
evals,
|
|
8
|
+
extraction,
|
|
9
|
+
files,
|
|
10
|
+
organizations,
|
|
11
|
+
parsing,
|
|
12
|
+
pipelines,
|
|
13
|
+
projects,
|
|
14
|
+
)
|
|
4
15
|
from .data_sinks import DataSinkUpdateComponent, DataSinkUpdateComponentOne
|
|
5
16
|
from .data_sources import DataSourceUpdateComponent, DataSourceUpdateComponentOne, DataSourceUpdateCustomMetadataValue
|
|
6
17
|
from .extraction import ExtractionSchemaUpdateDataSchemaValue
|
|
@@ -22,6 +33,7 @@ __all__ = [
|
|
|
22
33
|
"evals",
|
|
23
34
|
"extraction",
|
|
24
35
|
"files",
|
|
36
|
+
"organizations",
|
|
25
37
|
"parsing",
|
|
26
38
|
"pipelines",
|
|
27
39
|
"projects",
|
|
@@ -63,13 +63,21 @@ class DataSinksClient:
|
|
|
63
63
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
64
64
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
65
65
|
|
|
66
|
-
def create_data_sink(
|
|
66
|
+
def create_data_sink(
|
|
67
|
+
self,
|
|
68
|
+
*,
|
|
69
|
+
project_id: typing.Optional[str] = None,
|
|
70
|
+
organization_id: typing.Optional[str] = None,
|
|
71
|
+
request: DataSinkCreate,
|
|
72
|
+
) -> DataSink:
|
|
67
73
|
"""
|
|
68
74
|
Create a new data sink.
|
|
69
75
|
|
|
70
76
|
Parameters:
|
|
71
77
|
- project_id: typing.Optional[str].
|
|
72
78
|
|
|
79
|
+
- organization_id: typing.Optional[str].
|
|
80
|
+
|
|
73
81
|
- request: DataSinkCreate.
|
|
74
82
|
---
|
|
75
83
|
from llama_cloud import ConfigurableDataSinkNames, DataSinkCreate
|
|
@@ -88,7 +96,7 @@ class DataSinksClient:
|
|
|
88
96
|
_response = self._client_wrapper.httpx_client.request(
|
|
89
97
|
"POST",
|
|
90
98
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
|
|
91
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
99
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
92
100
|
json=jsonable_encoder(request),
|
|
93
101
|
headers=self._client_wrapper.get_headers(),
|
|
94
102
|
timeout=60,
|
|
@@ -103,7 +111,13 @@ class DataSinksClient:
|
|
|
103
111
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
104
112
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
105
113
|
|
|
106
|
-
def upsert_data_sink(
|
|
114
|
+
def upsert_data_sink(
|
|
115
|
+
self,
|
|
116
|
+
*,
|
|
117
|
+
project_id: typing.Optional[str] = None,
|
|
118
|
+
organization_id: typing.Optional[str] = None,
|
|
119
|
+
request: DataSinkCreate,
|
|
120
|
+
) -> DataSink:
|
|
107
121
|
"""
|
|
108
122
|
Upserts a data sink.
|
|
109
123
|
Updates if a data sink with the same name and project_id already exists. Otherwise, creates a new data sink.
|
|
@@ -111,6 +125,8 @@ class DataSinksClient:
|
|
|
111
125
|
Parameters:
|
|
112
126
|
- project_id: typing.Optional[str].
|
|
113
127
|
|
|
128
|
+
- organization_id: typing.Optional[str].
|
|
129
|
+
|
|
114
130
|
- request: DataSinkCreate.
|
|
115
131
|
---
|
|
116
132
|
from llama_cloud import ConfigurableDataSinkNames, DataSinkCreate
|
|
@@ -129,7 +145,7 @@ class DataSinksClient:
|
|
|
129
145
|
_response = self._client_wrapper.httpx_client.request(
|
|
130
146
|
"PUT",
|
|
131
147
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
|
|
132
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
148
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
133
149
|
json=jsonable_encoder(request),
|
|
134
150
|
headers=self._client_wrapper.get_headers(),
|
|
135
151
|
timeout=60,
|
|
@@ -298,13 +314,21 @@ class AsyncDataSinksClient:
|
|
|
298
314
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
299
315
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
300
316
|
|
|
301
|
-
async def create_data_sink(
|
|
317
|
+
async def create_data_sink(
|
|
318
|
+
self,
|
|
319
|
+
*,
|
|
320
|
+
project_id: typing.Optional[str] = None,
|
|
321
|
+
organization_id: typing.Optional[str] = None,
|
|
322
|
+
request: DataSinkCreate,
|
|
323
|
+
) -> DataSink:
|
|
302
324
|
"""
|
|
303
325
|
Create a new data sink.
|
|
304
326
|
|
|
305
327
|
Parameters:
|
|
306
328
|
- project_id: typing.Optional[str].
|
|
307
329
|
|
|
330
|
+
- organization_id: typing.Optional[str].
|
|
331
|
+
|
|
308
332
|
- request: DataSinkCreate.
|
|
309
333
|
---
|
|
310
334
|
from llama_cloud import ConfigurableDataSinkNames, DataSinkCreate
|
|
@@ -323,7 +347,7 @@ class AsyncDataSinksClient:
|
|
|
323
347
|
_response = await self._client_wrapper.httpx_client.request(
|
|
324
348
|
"POST",
|
|
325
349
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
|
|
326
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
350
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
327
351
|
json=jsonable_encoder(request),
|
|
328
352
|
headers=self._client_wrapper.get_headers(),
|
|
329
353
|
timeout=60,
|
|
@@ -338,7 +362,13 @@ class AsyncDataSinksClient:
|
|
|
338
362
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
339
363
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
340
364
|
|
|
341
|
-
async def upsert_data_sink(
|
|
365
|
+
async def upsert_data_sink(
|
|
366
|
+
self,
|
|
367
|
+
*,
|
|
368
|
+
project_id: typing.Optional[str] = None,
|
|
369
|
+
organization_id: typing.Optional[str] = None,
|
|
370
|
+
request: DataSinkCreate,
|
|
371
|
+
) -> DataSink:
|
|
342
372
|
"""
|
|
343
373
|
Upserts a data sink.
|
|
344
374
|
Updates if a data sink with the same name and project_id already exists. Otherwise, creates a new data sink.
|
|
@@ -346,6 +376,8 @@ class AsyncDataSinksClient:
|
|
|
346
376
|
Parameters:
|
|
347
377
|
- project_id: typing.Optional[str].
|
|
348
378
|
|
|
379
|
+
- organization_id: typing.Optional[str].
|
|
380
|
+
|
|
349
381
|
- request: DataSinkCreate.
|
|
350
382
|
---
|
|
351
383
|
from llama_cloud import ConfigurableDataSinkNames, DataSinkCreate
|
|
@@ -364,7 +396,7 @@ class AsyncDataSinksClient:
|
|
|
364
396
|
_response = await self._client_wrapper.httpx_client.request(
|
|
365
397
|
"PUT",
|
|
366
398
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
|
|
367
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
399
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
368
400
|
json=jsonable_encoder(request),
|
|
369
401
|
headers=self._client_wrapper.get_headers(),
|
|
370
402
|
timeout=60,
|
|
@@ -32,13 +32,17 @@ class DataSourcesClient:
|
|
|
32
32
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
33
33
|
self._client_wrapper = client_wrapper
|
|
34
34
|
|
|
35
|
-
def list_data_sources(
|
|
35
|
+
def list_data_sources(
|
|
36
|
+
self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
37
|
+
) -> typing.List[DataSource]:
|
|
36
38
|
"""
|
|
37
39
|
List data sources for a given project.
|
|
38
40
|
If project_id is not provided, uses the default project.
|
|
39
41
|
|
|
40
42
|
Parameters:
|
|
41
43
|
- project_id: typing.Optional[str].
|
|
44
|
+
|
|
45
|
+
- organization_id: typing.Optional[str].
|
|
42
46
|
---
|
|
43
47
|
from llama_cloud.client import LlamaCloud
|
|
44
48
|
|
|
@@ -50,7 +54,7 @@ class DataSourcesClient:
|
|
|
50
54
|
_response = self._client_wrapper.httpx_client.request(
|
|
51
55
|
"GET",
|
|
52
56
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
|
|
53
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
57
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
54
58
|
headers=self._client_wrapper.get_headers(),
|
|
55
59
|
timeout=60,
|
|
56
60
|
)
|
|
@@ -64,13 +68,21 @@ class DataSourcesClient:
|
|
|
64
68
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
65
69
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
66
70
|
|
|
67
|
-
def create_data_source(
|
|
71
|
+
def create_data_source(
|
|
72
|
+
self,
|
|
73
|
+
*,
|
|
74
|
+
project_id: typing.Optional[str] = None,
|
|
75
|
+
organization_id: typing.Optional[str] = None,
|
|
76
|
+
request: DataSourceCreate,
|
|
77
|
+
) -> DataSource:
|
|
68
78
|
"""
|
|
69
79
|
Create a new data source.
|
|
70
80
|
|
|
71
81
|
Parameters:
|
|
72
82
|
- project_id: typing.Optional[str].
|
|
73
83
|
|
|
84
|
+
- organization_id: typing.Optional[str].
|
|
85
|
+
|
|
74
86
|
- request: DataSourceCreate.
|
|
75
87
|
---
|
|
76
88
|
from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
|
|
@@ -89,7 +101,7 @@ class DataSourcesClient:
|
|
|
89
101
|
_response = self._client_wrapper.httpx_client.request(
|
|
90
102
|
"POST",
|
|
91
103
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
|
|
92
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
104
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
93
105
|
json=jsonable_encoder(request),
|
|
94
106
|
headers=self._client_wrapper.get_headers(),
|
|
95
107
|
timeout=60,
|
|
@@ -104,7 +116,13 @@ class DataSourcesClient:
|
|
|
104
116
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
105
117
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
106
118
|
|
|
107
|
-
def upsert_data_source(
|
|
119
|
+
def upsert_data_source(
|
|
120
|
+
self,
|
|
121
|
+
*,
|
|
122
|
+
project_id: typing.Optional[str] = None,
|
|
123
|
+
organization_id: typing.Optional[str] = None,
|
|
124
|
+
request: DataSourceCreate,
|
|
125
|
+
) -> DataSource:
|
|
108
126
|
"""
|
|
109
127
|
Upserts a data source.
|
|
110
128
|
Updates if a data source with the same name and project_id already exists. Otherwise, creates a new data source.
|
|
@@ -112,6 +130,8 @@ class DataSourcesClient:
|
|
|
112
130
|
Parameters:
|
|
113
131
|
- project_id: typing.Optional[str].
|
|
114
132
|
|
|
133
|
+
- organization_id: typing.Optional[str].
|
|
134
|
+
|
|
115
135
|
- request: DataSourceCreate.
|
|
116
136
|
---
|
|
117
137
|
from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
|
|
@@ -130,7 +150,7 @@ class DataSourcesClient:
|
|
|
130
150
|
_response = self._client_wrapper.httpx_client.request(
|
|
131
151
|
"PUT",
|
|
132
152
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
|
|
133
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
153
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
134
154
|
json=jsonable_encoder(request),
|
|
135
155
|
headers=self._client_wrapper.get_headers(),
|
|
136
156
|
timeout=60,
|
|
@@ -272,13 +292,17 @@ class AsyncDataSourcesClient:
|
|
|
272
292
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
273
293
|
self._client_wrapper = client_wrapper
|
|
274
294
|
|
|
275
|
-
async def list_data_sources(
|
|
295
|
+
async def list_data_sources(
|
|
296
|
+
self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
297
|
+
) -> typing.List[DataSource]:
|
|
276
298
|
"""
|
|
277
299
|
List data sources for a given project.
|
|
278
300
|
If project_id is not provided, uses the default project.
|
|
279
301
|
|
|
280
302
|
Parameters:
|
|
281
303
|
- project_id: typing.Optional[str].
|
|
304
|
+
|
|
305
|
+
- organization_id: typing.Optional[str].
|
|
282
306
|
---
|
|
283
307
|
from llama_cloud.client import AsyncLlamaCloud
|
|
284
308
|
|
|
@@ -290,7 +314,7 @@ class AsyncDataSourcesClient:
|
|
|
290
314
|
_response = await self._client_wrapper.httpx_client.request(
|
|
291
315
|
"GET",
|
|
292
316
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
|
|
293
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
317
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
294
318
|
headers=self._client_wrapper.get_headers(),
|
|
295
319
|
timeout=60,
|
|
296
320
|
)
|
|
@@ -305,7 +329,11 @@ class AsyncDataSourcesClient:
|
|
|
305
329
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
306
330
|
|
|
307
331
|
async def create_data_source(
|
|
308
|
-
self,
|
|
332
|
+
self,
|
|
333
|
+
*,
|
|
334
|
+
project_id: typing.Optional[str] = None,
|
|
335
|
+
organization_id: typing.Optional[str] = None,
|
|
336
|
+
request: DataSourceCreate,
|
|
309
337
|
) -> DataSource:
|
|
310
338
|
"""
|
|
311
339
|
Create a new data source.
|
|
@@ -313,6 +341,8 @@ class AsyncDataSourcesClient:
|
|
|
313
341
|
Parameters:
|
|
314
342
|
- project_id: typing.Optional[str].
|
|
315
343
|
|
|
344
|
+
- organization_id: typing.Optional[str].
|
|
345
|
+
|
|
316
346
|
- request: DataSourceCreate.
|
|
317
347
|
---
|
|
318
348
|
from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
|
|
@@ -331,7 +361,7 @@ class AsyncDataSourcesClient:
|
|
|
331
361
|
_response = await self._client_wrapper.httpx_client.request(
|
|
332
362
|
"POST",
|
|
333
363
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
|
|
334
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
364
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
335
365
|
json=jsonable_encoder(request),
|
|
336
366
|
headers=self._client_wrapper.get_headers(),
|
|
337
367
|
timeout=60,
|
|
@@ -347,7 +377,11 @@ class AsyncDataSourcesClient:
|
|
|
347
377
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
348
378
|
|
|
349
379
|
async def upsert_data_source(
|
|
350
|
-
self,
|
|
380
|
+
self,
|
|
381
|
+
*,
|
|
382
|
+
project_id: typing.Optional[str] = None,
|
|
383
|
+
organization_id: typing.Optional[str] = None,
|
|
384
|
+
request: DataSourceCreate,
|
|
351
385
|
) -> DataSource:
|
|
352
386
|
"""
|
|
353
387
|
Upserts a data source.
|
|
@@ -356,6 +390,8 @@ class AsyncDataSourcesClient:
|
|
|
356
390
|
Parameters:
|
|
357
391
|
- project_id: typing.Optional[str].
|
|
358
392
|
|
|
393
|
+
- organization_id: typing.Optional[str].
|
|
394
|
+
|
|
359
395
|
- request: DataSourceCreate.
|
|
360
396
|
---
|
|
361
397
|
from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
|
|
@@ -374,7 +410,7 @@ class AsyncDataSourcesClient:
|
|
|
374
410
|
_response = await self._client_wrapper.httpx_client.request(
|
|
375
411
|
"PUT",
|
|
376
412
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
|
|
377
|
-
params=remove_none_from_dict({"project_id": project_id}),
|
|
413
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
378
414
|
json=jsonable_encoder(request),
|
|
379
415
|
headers=self._client_wrapper.get_headers(),
|
|
380
416
|
timeout=60,
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import typing
|
|
4
4
|
|
|
5
5
|
from ....types.cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
6
|
+
from ....types.cloud_jira_data_source import CloudJiraDataSource
|
|
6
7
|
from ....types.cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
7
8
|
from ....types.cloud_one_drive_data_source import CloudOneDriveDataSource
|
|
8
9
|
from ....types.cloud_s_3_data_source import CloudS3DataSource
|
|
@@ -16,4 +17,5 @@ DataSourceUpdateComponentOne = typing.Union[
|
|
|
16
17
|
CloudSharepointDataSource,
|
|
17
18
|
CloudSlackDataSource,
|
|
18
19
|
CloudNotionPageDataSource,
|
|
20
|
+
CloudJiraDataSource,
|
|
19
21
|
]
|
|
@@ -31,7 +31,7 @@ class ExtractionClient:
|
|
|
31
31
|
self._client_wrapper = client_wrapper
|
|
32
32
|
|
|
33
33
|
def infer_schema(
|
|
34
|
-
self, *, name: str, project_id: typing.Optional[str] = OMIT, file_ids: typing.List[str]
|
|
34
|
+
self, *, name: str, project_id: typing.Optional[str] = OMIT, file_ids: typing.List[str]
|
|
35
35
|
) -> ExtractionSchema:
|
|
36
36
|
"""
|
|
37
37
|
Parameters:
|
|
@@ -40,8 +40,6 @@ class ExtractionClient:
|
|
|
40
40
|
- project_id: typing.Optional[str]. The ID of the project that the extraction schema belongs to
|
|
41
41
|
|
|
42
42
|
- file_ids: typing.List[str]. The IDs of the files that the extraction schema contains
|
|
43
|
-
|
|
44
|
-
- openai_api_key: str. The API key for the OpenAI API
|
|
45
43
|
---
|
|
46
44
|
from llama_cloud.client import LlamaCloud
|
|
47
45
|
|
|
@@ -51,10 +49,9 @@ class ExtractionClient:
|
|
|
51
49
|
client.extraction.infer_schema(
|
|
52
50
|
name="string",
|
|
53
51
|
file_ids=[],
|
|
54
|
-
openai_api_key="string",
|
|
55
52
|
)
|
|
56
53
|
"""
|
|
57
|
-
_request: typing.Dict[str, typing.Any] = {"name": name, "file_ids": file_ids
|
|
54
|
+
_request: typing.Dict[str, typing.Any] = {"name": name, "file_ids": file_ids}
|
|
58
55
|
if project_id is not OMIT:
|
|
59
56
|
_request["project_id"] = project_id
|
|
60
57
|
_response = self._client_wrapper.httpx_client.request(
|
|
@@ -138,15 +135,12 @@ class ExtractionClient:
|
|
|
138
135
|
schema_id: str,
|
|
139
136
|
*,
|
|
140
137
|
data_schema: typing.Optional[typing.Dict[str, ExtractionSchemaUpdateDataSchemaValue]] = OMIT,
|
|
141
|
-
openai_api_key: typing.Optional[str] = OMIT,
|
|
142
138
|
) -> ExtractionSchema:
|
|
143
139
|
"""
|
|
144
140
|
Parameters:
|
|
145
141
|
- schema_id: str.
|
|
146
142
|
|
|
147
143
|
- data_schema: typing.Optional[typing.Dict[str, ExtractionSchemaUpdateDataSchemaValue]]. The schema of the data
|
|
148
|
-
|
|
149
|
-
- openai_api_key: typing.Optional[str]. The API key for the OpenAI API
|
|
150
144
|
---
|
|
151
145
|
from llama_cloud.client import LlamaCloud
|
|
152
146
|
|
|
@@ -160,8 +154,6 @@ class ExtractionClient:
|
|
|
160
154
|
_request: typing.Dict[str, typing.Any] = {}
|
|
161
155
|
if data_schema is not OMIT:
|
|
162
156
|
_request["data_schema"] = data_schema
|
|
163
|
-
if openai_api_key is not OMIT:
|
|
164
|
-
_request["openai_api_key"] = openai_api_key
|
|
165
157
|
_response = self._client_wrapper.httpx_client.request(
|
|
166
158
|
"PUT",
|
|
167
159
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/schemas/{schema_id}"),
|
|
@@ -342,7 +334,7 @@ class AsyncExtractionClient:
|
|
|
342
334
|
self._client_wrapper = client_wrapper
|
|
343
335
|
|
|
344
336
|
async def infer_schema(
|
|
345
|
-
self, *, name: str, project_id: typing.Optional[str] = OMIT, file_ids: typing.List[str]
|
|
337
|
+
self, *, name: str, project_id: typing.Optional[str] = OMIT, file_ids: typing.List[str]
|
|
346
338
|
) -> ExtractionSchema:
|
|
347
339
|
"""
|
|
348
340
|
Parameters:
|
|
@@ -351,8 +343,6 @@ class AsyncExtractionClient:
|
|
|
351
343
|
- project_id: typing.Optional[str]. The ID of the project that the extraction schema belongs to
|
|
352
344
|
|
|
353
345
|
- file_ids: typing.List[str]. The IDs of the files that the extraction schema contains
|
|
354
|
-
|
|
355
|
-
- openai_api_key: str. The API key for the OpenAI API
|
|
356
346
|
---
|
|
357
347
|
from llama_cloud.client import AsyncLlamaCloud
|
|
358
348
|
|
|
@@ -362,10 +352,9 @@ class AsyncExtractionClient:
|
|
|
362
352
|
await client.extraction.infer_schema(
|
|
363
353
|
name="string",
|
|
364
354
|
file_ids=[],
|
|
365
|
-
openai_api_key="string",
|
|
366
355
|
)
|
|
367
356
|
"""
|
|
368
|
-
_request: typing.Dict[str, typing.Any] = {"name": name, "file_ids": file_ids
|
|
357
|
+
_request: typing.Dict[str, typing.Any] = {"name": name, "file_ids": file_ids}
|
|
369
358
|
if project_id is not OMIT:
|
|
370
359
|
_request["project_id"] = project_id
|
|
371
360
|
_response = await self._client_wrapper.httpx_client.request(
|
|
@@ -449,15 +438,12 @@ class AsyncExtractionClient:
|
|
|
449
438
|
schema_id: str,
|
|
450
439
|
*,
|
|
451
440
|
data_schema: typing.Optional[typing.Dict[str, ExtractionSchemaUpdateDataSchemaValue]] = OMIT,
|
|
452
|
-
openai_api_key: typing.Optional[str] = OMIT,
|
|
453
441
|
) -> ExtractionSchema:
|
|
454
442
|
"""
|
|
455
443
|
Parameters:
|
|
456
444
|
- schema_id: str.
|
|
457
445
|
|
|
458
446
|
- data_schema: typing.Optional[typing.Dict[str, ExtractionSchemaUpdateDataSchemaValue]]. The schema of the data
|
|
459
|
-
|
|
460
|
-
- openai_api_key: typing.Optional[str]. The API key for the OpenAI API
|
|
461
447
|
---
|
|
462
448
|
from llama_cloud.client import AsyncLlamaCloud
|
|
463
449
|
|
|
@@ -471,8 +457,6 @@ class AsyncExtractionClient:
|
|
|
471
457
|
_request: typing.Dict[str, typing.Any] = {}
|
|
472
458
|
if data_schema is not OMIT:
|
|
473
459
|
_request["data_schema"] = data_schema
|
|
474
|
-
if openai_api_key is not OMIT:
|
|
475
|
-
_request["openai_api_key"] = openai_api_key
|
|
476
460
|
_response = await self._client_wrapper.httpx_client.request(
|
|
477
461
|
"PUT",
|
|
478
462
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/schemas/{schema_id}"),
|