cumulusci-plus 5.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cumulusci-plus might be problematic. Click here for more details.
- cumulusci/__about__.py +1 -0
- cumulusci/__init__.py +22 -0
- cumulusci/__main__.py +3 -0
- cumulusci/cli/__init__.py +0 -0
- cumulusci/cli/cci.py +244 -0
- cumulusci/cli/error.py +125 -0
- cumulusci/cli/flow.py +185 -0
- cumulusci/cli/logger.py +72 -0
- cumulusci/cli/org.py +692 -0
- cumulusci/cli/plan.py +181 -0
- cumulusci/cli/project.py +391 -0
- cumulusci/cli/robot.py +116 -0
- cumulusci/cli/runtime.py +190 -0
- cumulusci/cli/service.py +521 -0
- cumulusci/cli/task.py +295 -0
- cumulusci/cli/tests/__init__.py +0 -0
- cumulusci/cli/tests/test_cci.py +545 -0
- cumulusci/cli/tests/test_error.py +170 -0
- cumulusci/cli/tests/test_flow.py +276 -0
- cumulusci/cli/tests/test_logger.py +25 -0
- cumulusci/cli/tests/test_org.py +1438 -0
- cumulusci/cli/tests/test_plan.py +245 -0
- cumulusci/cli/tests/test_project.py +235 -0
- cumulusci/cli/tests/test_robot.py +177 -0
- cumulusci/cli/tests/test_runtime.py +197 -0
- cumulusci/cli/tests/test_service.py +853 -0
- cumulusci/cli/tests/test_task.py +266 -0
- cumulusci/cli/tests/test_ui.py +310 -0
- cumulusci/cli/tests/test_utils.py +122 -0
- cumulusci/cli/tests/utils.py +52 -0
- cumulusci/cli/ui.py +234 -0
- cumulusci/cli/utils.py +150 -0
- cumulusci/conftest.py +181 -0
- cumulusci/core/__init__.py +0 -0
- cumulusci/core/config/BaseConfig.py +5 -0
- cumulusci/core/config/BaseTaskFlowConfig.py +5 -0
- cumulusci/core/config/OrgConfig.py +5 -0
- cumulusci/core/config/ScratchOrgConfig.py +5 -0
- cumulusci/core/config/__init__.py +125 -0
- cumulusci/core/config/base_config.py +111 -0
- cumulusci/core/config/base_task_flow_config.py +82 -0
- cumulusci/core/config/marketing_cloud_service_config.py +83 -0
- cumulusci/core/config/oauth2_service_config.py +17 -0
- cumulusci/core/config/org_config.py +604 -0
- cumulusci/core/config/project_config.py +782 -0
- cumulusci/core/config/scratch_org_config.py +251 -0
- cumulusci/core/config/sfdx_org_config.py +220 -0
- cumulusci/core/config/tests/_test_config_backwards_compatibility.py +33 -0
- cumulusci/core/config/tests/test_config.py +1895 -0
- cumulusci/core/config/tests/test_config_expensive.py +839 -0
- cumulusci/core/config/tests/test_config_util.py +91 -0
- cumulusci/core/config/universal_config.py +88 -0
- cumulusci/core/config/util.py +18 -0
- cumulusci/core/datasets.py +303 -0
- cumulusci/core/debug.py +33 -0
- cumulusci/core/dependencies/__init__.py +55 -0
- cumulusci/core/dependencies/base.py +561 -0
- cumulusci/core/dependencies/dependencies.py +273 -0
- cumulusci/core/dependencies/github.py +177 -0
- cumulusci/core/dependencies/github_resolvers.py +244 -0
- cumulusci/core/dependencies/resolvers.py +580 -0
- cumulusci/core/dependencies/tests/__init__.py +0 -0
- cumulusci/core/dependencies/tests/conftest.py +385 -0
- cumulusci/core/dependencies/tests/test_dependencies.py +950 -0
- cumulusci/core/dependencies/tests/test_github.py +83 -0
- cumulusci/core/dependencies/tests/test_resolvers.py +1027 -0
- cumulusci/core/dependencies/utils.py +13 -0
- cumulusci/core/enums.py +11 -0
- cumulusci/core/exceptions.py +311 -0
- cumulusci/core/flowrunner.py +888 -0
- cumulusci/core/github.py +665 -0
- cumulusci/core/keychain/__init__.py +24 -0
- cumulusci/core/keychain/base_project_keychain.py +441 -0
- cumulusci/core/keychain/encrypted_file_project_keychain.py +945 -0
- cumulusci/core/keychain/environment_project_keychain.py +7 -0
- cumulusci/core/keychain/serialization.py +152 -0
- cumulusci/core/keychain/subprocess_keychain.py +24 -0
- cumulusci/core/keychain/tests/conftest.py +50 -0
- cumulusci/core/keychain/tests/test_base_project_keychain.py +299 -0
- cumulusci/core/keychain/tests/test_encrypted_file_project_keychain.py +1228 -0
- cumulusci/core/metadeploy/__init__.py +0 -0
- cumulusci/core/metadeploy/api.py +88 -0
- cumulusci/core/metadeploy/plans.py +25 -0
- cumulusci/core/metadeploy/tests/test_api.py +276 -0
- cumulusci/core/runtime.py +115 -0
- cumulusci/core/sfdx.py +162 -0
- cumulusci/core/source/__init__.py +16 -0
- cumulusci/core/source/github.py +50 -0
- cumulusci/core/source/local_folder.py +35 -0
- cumulusci/core/source_transforms/__init__.py +0 -0
- cumulusci/core/source_transforms/tests/test_transforms.py +1091 -0
- cumulusci/core/source_transforms/transforms.py +532 -0
- cumulusci/core/tasks.py +404 -0
- cumulusci/core/template_utils.py +59 -0
- cumulusci/core/tests/__init__.py +0 -0
- cumulusci/core/tests/cassettes/TestDatasetsE2E.test_datasets_e2e.yaml +215 -0
- cumulusci/core/tests/cassettes/TestDatasetsE2E.test_datasets_extract_standard_objects.yaml +199 -0
- cumulusci/core/tests/cassettes/TestDatasetsE2E.test_datasets_read_explicit_extract_declaration.yaml +3 -0
- cumulusci/core/tests/fake_remote_repo/cumulusci.yml +32 -0
- cumulusci/core/tests/fake_remote_repo/tasks/directory/example_2.py +6 -0
- cumulusci/core/tests/fake_remote_repo/tasks/example.py +43 -0
- cumulusci/core/tests/fake_remote_repo_2/cumulusci.yml +11 -0
- cumulusci/core/tests/fake_remote_repo_2/tasks/example_3.py +6 -0
- cumulusci/core/tests/test_datasets_e2e.py +386 -0
- cumulusci/core/tests/test_exceptions.py +11 -0
- cumulusci/core/tests/test_flowrunner.py +836 -0
- cumulusci/core/tests/test_github.py +942 -0
- cumulusci/core/tests/test_sfdx.py +138 -0
- cumulusci/core/tests/test_source.py +678 -0
- cumulusci/core/tests/test_tasks.py +262 -0
- cumulusci/core/tests/test_utils.py +141 -0
- cumulusci/core/tests/test_utils_merge_config.py +276 -0
- cumulusci/core/tests/test_versions.py +76 -0
- cumulusci/core/tests/untrusted_repo_child/cumulusci.yml +7 -0
- cumulusci/core/tests/untrusted_repo_child/tasks/untrusted_child.py +6 -0
- cumulusci/core/tests/untrusted_repo_parent/cumulusci.yml +26 -0
- cumulusci/core/tests/untrusted_repo_parent/tasks/untrusted_parent.py +6 -0
- cumulusci/core/tests/utils.py +116 -0
- cumulusci/core/tests/yaml/global.yaml +0 -0
- cumulusci/core/utils.py +402 -0
- cumulusci/core/versions.py +149 -0
- cumulusci/cumulusci.yml +1621 -0
- cumulusci/files/admin_profile.xml +20 -0
- cumulusci/files/delete_excludes.txt +424 -0
- cumulusci/files/templates/project/README.md +12 -0
- cumulusci/files/templates/project/cumulusci.yml +63 -0
- cumulusci/files/templates/project/dot-gitignore +60 -0
- cumulusci/files/templates/project/mapping.yml +45 -0
- cumulusci/files/templates/project/scratch_def.json +25 -0
- cumulusci/oauth/__init__.py +0 -0
- cumulusci/oauth/client.py +400 -0
- cumulusci/oauth/exceptions.py +9 -0
- cumulusci/oauth/salesforce.py +95 -0
- cumulusci/oauth/tests/__init__.py +0 -0
- cumulusci/oauth/tests/cassettes/test_get_device_code.yaml +22 -0
- cumulusci/oauth/tests/cassettes/test_get_device_oauth_token.yaml +74 -0
- cumulusci/oauth/tests/test_client.py +308 -0
- cumulusci/oauth/tests/test_salesforce.py +46 -0
- cumulusci/plugins/__init__.py +3 -0
- cumulusci/plugins/plugin_base.py +93 -0
- cumulusci/plugins/plugin_loader.py +59 -0
- cumulusci/robotframework/CumulusCI.py +340 -0
- cumulusci/robotframework/CumulusCI.robot +7 -0
- cumulusci/robotframework/Performance.py +165 -0
- cumulusci/robotframework/Salesforce.py +936 -0
- cumulusci/robotframework/Salesforce.robot +192 -0
- cumulusci/robotframework/SalesforceAPI.py +416 -0
- cumulusci/robotframework/SalesforcePlaywright.py +220 -0
- cumulusci/robotframework/SalesforcePlaywright.robot +40 -0
- cumulusci/robotframework/__init__.py +2 -0
- cumulusci/robotframework/base_library.py +39 -0
- cumulusci/robotframework/faker_mixin.py +89 -0
- cumulusci/robotframework/form_handlers.py +222 -0
- cumulusci/robotframework/javascript/cci_init.js +34 -0
- cumulusci/robotframework/javascript/cumulusci.js +4 -0
- cumulusci/robotframework/locator_manager.py +197 -0
- cumulusci/robotframework/locators_56.py +88 -0
- cumulusci/robotframework/locators_57.py +5 -0
- cumulusci/robotframework/pageobjects/BasePageObjects.py +433 -0
- cumulusci/robotframework/pageobjects/ObjectManagerPageObject.py +246 -0
- cumulusci/robotframework/pageobjects/PageObjectLibrary.py +45 -0
- cumulusci/robotframework/pageobjects/PageObjects.py +351 -0
- cumulusci/robotframework/pageobjects/__init__.py +12 -0
- cumulusci/robotframework/pageobjects/baseobjects.py +120 -0
- cumulusci/robotframework/perftests/short/collection_perf.robot +105 -0
- cumulusci/robotframework/tests/CustomObjectTestPage.py +10 -0
- cumulusci/robotframework/tests/FooTestPage.py +8 -0
- cumulusci/robotframework/tests/cumulusci/base.robot +40 -0
- cumulusci/robotframework/tests/cumulusci/bulkdata.robot +38 -0
- cumulusci/robotframework/tests/cumulusci/communities.robot +57 -0
- cumulusci/robotframework/tests/cumulusci/datagen.robot +84 -0
- cumulusci/robotframework/tests/salesforce/TestLibraryA.py +24 -0
- cumulusci/robotframework/tests/salesforce/TestLibraryB.py +20 -0
- cumulusci/robotframework/tests/salesforce/TestListener.py +93 -0
- cumulusci/robotframework/tests/salesforce/api.robot +178 -0
- cumulusci/robotframework/tests/salesforce/browsers.robot +143 -0
- cumulusci/robotframework/tests/salesforce/classic.robot +51 -0
- cumulusci/robotframework/tests/salesforce/create_contact.robot +59 -0
- cumulusci/robotframework/tests/salesforce/faker.robot +68 -0
- cumulusci/robotframework/tests/salesforce/forms.robot +172 -0
- cumulusci/robotframework/tests/salesforce/label_locator.robot +244 -0
- cumulusci/robotframework/tests/salesforce/labels.html +33 -0
- cumulusci/robotframework/tests/salesforce/locators.robot +149 -0
- cumulusci/robotframework/tests/salesforce/pageobjects/base_pageobjects.robot +100 -0
- cumulusci/robotframework/tests/salesforce/pageobjects/example_page_object.py +25 -0
- cumulusci/robotframework/tests/salesforce/pageobjects/listing_page.robot +115 -0
- cumulusci/robotframework/tests/salesforce/pageobjects/objectmanager.robot +74 -0
- cumulusci/robotframework/tests/salesforce/pageobjects/pageobjects.robot +171 -0
- cumulusci/robotframework/tests/salesforce/performance.robot +109 -0
- cumulusci/robotframework/tests/salesforce/playwright/javascript_keywords.robot +33 -0
- cumulusci/robotframework/tests/salesforce/playwright/open_test_browser.robot +48 -0
- cumulusci/robotframework/tests/salesforce/playwright/playwright.robot +24 -0
- cumulusci/robotframework/tests/salesforce/playwright/ui.robot +32 -0
- cumulusci/robotframework/tests/salesforce/populate.robot +89 -0
- cumulusci/robotframework/tests/salesforce/test_testlistener.py +37 -0
- cumulusci/robotframework/tests/salesforce/ui.robot +361 -0
- cumulusci/robotframework/tests/test_cumulusci_library.py +304 -0
- cumulusci/robotframework/tests/test_locator_manager.py +158 -0
- cumulusci/robotframework/tests/test_pageobjects.py +291 -0
- cumulusci/robotframework/tests/test_performance.py +38 -0
- cumulusci/robotframework/tests/test_salesforce.py +79 -0
- cumulusci/robotframework/tests/test_salesforce_locators.py +73 -0
- cumulusci/robotframework/tests/test_template_util.py +53 -0
- cumulusci/robotframework/tests/test_utils.py +106 -0
- cumulusci/robotframework/utils.py +283 -0
- cumulusci/salesforce_api/__init__.py +0 -0
- cumulusci/salesforce_api/exceptions.py +23 -0
- cumulusci/salesforce_api/filterable_objects.py +96 -0
- cumulusci/salesforce_api/mc_soap_envelopes.py +89 -0
- cumulusci/salesforce_api/metadata.py +721 -0
- cumulusci/salesforce_api/org_schema.py +571 -0
- cumulusci/salesforce_api/org_schema_models.py +226 -0
- cumulusci/salesforce_api/package_install.py +265 -0
- cumulusci/salesforce_api/package_zip.py +301 -0
- cumulusci/salesforce_api/rest_deploy.py +148 -0
- cumulusci/salesforce_api/retrieve_profile_api.py +301 -0
- cumulusci/salesforce_api/soap_envelopes.py +177 -0
- cumulusci/salesforce_api/tests/__init__.py +0 -0
- cumulusci/salesforce_api/tests/metadata_test_strings.py +24 -0
- cumulusci/salesforce_api/tests/test_metadata.py +1015 -0
- cumulusci/salesforce_api/tests/test_package_install.py +219 -0
- cumulusci/salesforce_api/tests/test_package_zip.py +380 -0
- cumulusci/salesforce_api/tests/test_rest_deploy.py +264 -0
- cumulusci/salesforce_api/tests/test_retrieve_profile_api.py +337 -0
- cumulusci/salesforce_api/tests/test_utils.py +124 -0
- cumulusci/salesforce_api/utils.py +51 -0
- cumulusci/schema/cumulusci.jsonschema.json +782 -0
- cumulusci/tasks/__init__.py +0 -0
- cumulusci/tasks/apex/__init__.py +0 -0
- cumulusci/tasks/apex/anon.py +157 -0
- cumulusci/tasks/apex/batch.py +180 -0
- cumulusci/tasks/apex/testrunner.py +835 -0
- cumulusci/tasks/apex/tests/cassettes/ManualEditTestApexIntegrationTests.test_run_tests__integration_test.yaml +703 -0
- cumulusci/tasks/apex/tests/test_apex_tasks.py +1558 -0
- cumulusci/tasks/base_source_control_task.py +17 -0
- cumulusci/tasks/bulkdata/__init__.py +15 -0
- cumulusci/tasks/bulkdata/base_generate_data_task.py +96 -0
- cumulusci/tasks/bulkdata/dates.py +97 -0
- cumulusci/tasks/bulkdata/delete.py +156 -0
- cumulusci/tasks/bulkdata/extract.py +441 -0
- cumulusci/tasks/bulkdata/extract_dataset_utils/calculate_dependencies.py +117 -0
- cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +123 -0
- cumulusci/tasks/bulkdata/extract_dataset_utils/hardcoded_default_declarations.py +49 -0
- cumulusci/tasks/bulkdata/extract_dataset_utils/synthesize_extract_declarations.py +283 -0
- cumulusci/tasks/bulkdata/extract_dataset_utils/tests/test_extract_yml.py +142 -0
- cumulusci/tasks/bulkdata/extract_dataset_utils/tests/test_synthesize_extract_declarations.py +575 -0
- cumulusci/tasks/bulkdata/factory_utils.py +134 -0
- cumulusci/tasks/bulkdata/generate.py +4 -0
- cumulusci/tasks/bulkdata/generate_and_load_data.py +232 -0
- cumulusci/tasks/bulkdata/generate_and_load_data_from_yaml.py +19 -0
- cumulusci/tasks/bulkdata/generate_from_yaml.py +183 -0
- cumulusci/tasks/bulkdata/generate_mapping.py +434 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/dependency_map.py +169 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/extract_mapping_file_generator.py +45 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/generate_mapping_from_declarations.py +121 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/load_mapping_file_generator.py +127 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/mapping_generator_post_processes.py +53 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/mapping_transforms.py +139 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_extract_mapping_from_declarations.py +135 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py +330 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_mapping_generator_post_processes.py +60 -0
- cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_mapping_transforms.py +188 -0
- cumulusci/tasks/bulkdata/load.py +1196 -0
- cumulusci/tasks/bulkdata/mapping_parser.py +811 -0
- cumulusci/tasks/bulkdata/query_transformers.py +264 -0
- cumulusci/tasks/bulkdata/select_utils.py +792 -0
- cumulusci/tasks/bulkdata/snowfakery.py +753 -0
- cumulusci/tasks/bulkdata/snowfakery_utils/queue_manager.py +478 -0
- cumulusci/tasks/bulkdata/snowfakery_utils/snowfakery_run_until.py +141 -0
- cumulusci/tasks/bulkdata/snowfakery_utils/snowfakery_working_directory.py +53 -0
- cumulusci/tasks/bulkdata/snowfakery_utils/subtask_configurator.py +64 -0
- cumulusci/tasks/bulkdata/step.py +1242 -0
- cumulusci/tasks/bulkdata/tests/__init__.py +0 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml +147 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_annoy_strategy.yaml +123 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy.yaml +313 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy_bulk.yaml +550 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml +175 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml +147 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSnowfakery.test_run_until_records_in_org__multiple_needed.yaml +69 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSnowfakery.test_run_until_records_in_org__none_needed.yaml +22 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSnowfakery.test_run_until_records_in_org__one_needed.yaml +24 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestSnowfakery.test_snowfakery_query_salesforce.yaml +25 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestUpdatesIntegrationTests.test_updates_task.yaml +80 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestUpsert.test_simple_upsert__rest.yaml +270 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestUpsert.test_upsert__rest.yaml +267 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestUpsert.test_upsert_complex_external_id_field__rest.yaml +369 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestUpsert.test_upsert_complex_external_id_field_rest__duplicate_error.yaml +204 -0
- cumulusci/tasks/bulkdata/tests/cassettes/TestUpsert.test_upsert_complex_fields__bulk.yaml +675 -0
- cumulusci/tasks/bulkdata/tests/dummy_data_factory.py +36 -0
- cumulusci/tasks/bulkdata/tests/integration_test_utils.py +49 -0
- cumulusci/tasks/bulkdata/tests/mapping-oid.yml +87 -0
- cumulusci/tasks/bulkdata/tests/mapping_after.yml +38 -0
- cumulusci/tasks/bulkdata/tests/mapping_poly.yml +34 -0
- cumulusci/tasks/bulkdata/tests/mapping_poly_incomplete.yml +20 -0
- cumulusci/tasks/bulkdata/tests/mapping_poly_wrong.yml +21 -0
- cumulusci/tasks/bulkdata/tests/mapping_select.yml +20 -0
- cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml +20 -0
- cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml +21 -0
- cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml +21 -0
- cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml +21 -0
- cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml +22 -0
- cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml +18 -0
- cumulusci/tasks/bulkdata/tests/mapping_simple.yml +27 -0
- cumulusci/tasks/bulkdata/tests/mapping_v1.yml +28 -0
- cumulusci/tasks/bulkdata/tests/mapping_v2.yml +21 -0
- cumulusci/tasks/bulkdata/tests/mapping_v3.yml +32 -0
- cumulusci/tasks/bulkdata/tests/mapping_vanilla_sf.yml +69 -0
- cumulusci/tasks/bulkdata/tests/mock_data_factory_without_mapping.py +12 -0
- cumulusci/tasks/bulkdata/tests/person_accounts.yml +23 -0
- cumulusci/tasks/bulkdata/tests/person_accounts_minimal.yml +15 -0
- cumulusci/tasks/bulkdata/tests/recordtypes.yml +8 -0
- cumulusci/tasks/bulkdata/tests/recordtypes_2.yml +6 -0
- cumulusci/tasks/bulkdata/tests/recordtypes_with_ispersontype.yml +8 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/child/child2.yml +3 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/child.yml +4 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/gen_npsp_standard_objects.recipe.yml +89 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/include_parent.yml +3 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/npsp_standard_objects_macros.yml +34 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/options.recipe.yml +6 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/query_snowfakery.recipe.yml +16 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/sf_standard_object_macros.yml +83 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/simple_snowfakery.load.yml +2 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/simple_snowfakery.recipe.yml +13 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/simple_snowfakery_2.load.yml +5 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/simple_snowfakery_channels.load.yml +13 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/simple_snowfakery_channels.recipe.yml +12 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/simple_snowfakery_channels_2.load.yml +13 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/unique_values.recipe.yml +4 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/upsert.recipe.yml +23 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/upsert_2.recipe.yml +29 -0
- cumulusci/tasks/bulkdata/tests/snowfakery/upsert_before.yml +10 -0
- cumulusci/tasks/bulkdata/tests/test_base_generate_data_tasks.py +61 -0
- cumulusci/tasks/bulkdata/tests/test_dates.py +99 -0
- cumulusci/tasks/bulkdata/tests/test_delete.py +404 -0
- cumulusci/tasks/bulkdata/tests/test_extract.py +1311 -0
- cumulusci/tasks/bulkdata/tests/test_factory_utils.py +55 -0
- cumulusci/tasks/bulkdata/tests/test_generate_and_load.py +252 -0
- cumulusci/tasks/bulkdata/tests/test_generate_from_snowfakery_task.py +343 -0
- cumulusci/tasks/bulkdata/tests/test_generatemapping.py +1039 -0
- cumulusci/tasks/bulkdata/tests/test_load.py +3175 -0
- cumulusci/tasks/bulkdata/tests/test_mapping_parser.py +1658 -0
- cumulusci/tasks/bulkdata/tests/test_query_db__joins_self_lookups.yml +12 -0
- cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.yml +26 -0
- cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml +48 -0
- cumulusci/tasks/bulkdata/tests/test_select.py +171 -0
- cumulusci/tasks/bulkdata/tests/test_select_utils.py +1057 -0
- cumulusci/tasks/bulkdata/tests/test_snowfakery.py +1153 -0
- cumulusci/tasks/bulkdata/tests/test_step.py +3957 -0
- cumulusci/tasks/bulkdata/tests/test_updates.py +513 -0
- cumulusci/tasks/bulkdata/tests/test_upsert.py +1015 -0
- cumulusci/tasks/bulkdata/tests/test_utils.py +158 -0
- cumulusci/tasks/bulkdata/tests/testdata.db +0 -0
- cumulusci/tasks/bulkdata/tests/update_describe.py +50 -0
- cumulusci/tasks/bulkdata/tests/update_person_accounts.yml +23 -0
- cumulusci/tasks/bulkdata/tests/utils.py +114 -0
- cumulusci/tasks/bulkdata/update_data.py +260 -0
- cumulusci/tasks/bulkdata/upsert_utils.py +130 -0
- cumulusci/tasks/bulkdata/utils.py +249 -0
- cumulusci/tasks/command.py +178 -0
- cumulusci/tasks/connectedapp.py +186 -0
- cumulusci/tasks/create_package_version.py +778 -0
- cumulusci/tasks/datadictionary.py +745 -0
- cumulusci/tasks/dx_convert_from.py +26 -0
- cumulusci/tasks/github/__init__.py +17 -0
- cumulusci/tasks/github/base.py +16 -0
- cumulusci/tasks/github/commit_status.py +13 -0
- cumulusci/tasks/github/merge.py +11 -0
- cumulusci/tasks/github/publish.py +11 -0
- cumulusci/tasks/github/pull_request.py +11 -0
- cumulusci/tasks/github/release.py +11 -0
- cumulusci/tasks/github/release_report.py +11 -0
- cumulusci/tasks/github/tag.py +11 -0
- cumulusci/tasks/github/tests/__init__.py +0 -0
- cumulusci/tasks/github/tests/test_util.py +202 -0
- cumulusci/tasks/github/tests/test_vcs_migration.py +44 -0
- cumulusci/tasks/github/tests/util_github_api.py +666 -0
- cumulusci/tasks/github/util.py +252 -0
- cumulusci/tasks/marketing_cloud/__init__.py +0 -0
- cumulusci/tasks/marketing_cloud/api.py +188 -0
- cumulusci/tasks/marketing_cloud/base.py +38 -0
- cumulusci/tasks/marketing_cloud/deploy.py +345 -0
- cumulusci/tasks/marketing_cloud/get_user_info.py +40 -0
- cumulusci/tasks/marketing_cloud/mc_constants.py +1 -0
- cumulusci/tasks/marketing_cloud/tests/__init__.py +0 -0
- cumulusci/tasks/marketing_cloud/tests/conftest.py +46 -0
- cumulusci/tasks/marketing_cloud/tests/expected-payload.json +110 -0
- cumulusci/tasks/marketing_cloud/tests/test_api.py +97 -0
- cumulusci/tasks/marketing_cloud/tests/test_api_soap_envelopes.py +145 -0
- cumulusci/tasks/marketing_cloud/tests/test_base.py +14 -0
- cumulusci/tasks/marketing_cloud/tests/test_deploy.py +400 -0
- cumulusci/tasks/marketing_cloud/tests/test_get_user_info.py +141 -0
- cumulusci/tasks/marketing_cloud/tests/validation-response.json +39 -0
- cumulusci/tasks/metadata/__init__.py +0 -0
- cumulusci/tasks/metadata/ee_src.py +94 -0
- cumulusci/tasks/metadata/managed_src.py +100 -0
- cumulusci/tasks/metadata/metadata_map.yml +868 -0
- cumulusci/tasks/metadata/modify.py +99 -0
- cumulusci/tasks/metadata/package.py +684 -0
- cumulusci/tasks/metadata/tests/__init__.py +0 -0
- cumulusci/tasks/metadata/tests/package_metadata/namespaced_report_folder/.hidden/.keep +0 -0
- cumulusci/tasks/metadata/tests/package_metadata/namespaced_report_folder/destructiveChanges.xml +9 -0
- cumulusci/tasks/metadata/tests/package_metadata/namespaced_report_folder/package.xml +9 -0
- cumulusci/tasks/metadata/tests/package_metadata/namespaced_report_folder/package_install_uninstall.xml +11 -0
- cumulusci/tasks/metadata/tests/package_metadata/namespaced_report_folder/reports/namespace__TestFolder/TestReport.report +3 -0
- cumulusci/tasks/metadata/tests/sample_package.xml +9 -0
- cumulusci/tasks/metadata/tests/test_ee_src.py +112 -0
- cumulusci/tasks/metadata/tests/test_managed_src.py +111 -0
- cumulusci/tasks/metadata/tests/test_modify.py +123 -0
- cumulusci/tasks/metadata/tests/test_package.py +476 -0
- cumulusci/tasks/metadata_etl/__init__.py +29 -0
- cumulusci/tasks/metadata_etl/base.py +436 -0
- cumulusci/tasks/metadata_etl/duplicate_rules.py +24 -0
- cumulusci/tasks/metadata_etl/field_sets.py +70 -0
- cumulusci/tasks/metadata_etl/help_text.py +92 -0
- cumulusci/tasks/metadata_etl/layouts.py +550 -0
- cumulusci/tasks/metadata_etl/objects.py +68 -0
- cumulusci/tasks/metadata_etl/permissions.py +167 -0
- cumulusci/tasks/metadata_etl/picklists.py +221 -0
- cumulusci/tasks/metadata_etl/remote_site_settings.py +99 -0
- cumulusci/tasks/metadata_etl/sharing.py +138 -0
- cumulusci/tasks/metadata_etl/tests/test_base.py +512 -0
- cumulusci/tasks/metadata_etl/tests/test_duplicate_rules.py +22 -0
- cumulusci/tasks/metadata_etl/tests/test_field_sets.py +156 -0
- cumulusci/tasks/metadata_etl/tests/test_help_text.py +387 -0
- cumulusci/tasks/metadata_etl/tests/test_ip_ranges.py +85 -0
- cumulusci/tasks/metadata_etl/tests/test_layouts.py +858 -0
- cumulusci/tasks/metadata_etl/tests/test_objects.py +236 -0
- cumulusci/tasks/metadata_etl/tests/test_permissions.py +223 -0
- cumulusci/tasks/metadata_etl/tests/test_picklists.py +547 -0
- cumulusci/tasks/metadata_etl/tests/test_remote_site_settings.py +46 -0
- cumulusci/tasks/metadata_etl/tests/test_sharing.py +333 -0
- cumulusci/tasks/metadata_etl/tests/test_value_sets.py +298 -0
- cumulusci/tasks/metadata_etl/value_sets.py +106 -0
- cumulusci/tasks/metadeploy.py +393 -0
- cumulusci/tasks/metaxml.py +88 -0
- cumulusci/tasks/preflight/__init__.py +0 -0
- cumulusci/tasks/preflight/dataset_load.py +49 -0
- cumulusci/tasks/preflight/licenses.py +86 -0
- cumulusci/tasks/preflight/packages.py +14 -0
- cumulusci/tasks/preflight/permsets.py +23 -0
- cumulusci/tasks/preflight/recordtypes.py +16 -0
- cumulusci/tasks/preflight/retrieve_tasks.py +30 -0
- cumulusci/tasks/preflight/settings.py +77 -0
- cumulusci/tasks/preflight/sobjects.py +202 -0
- cumulusci/tasks/preflight/tests/test_dataset_load.py +85 -0
- cumulusci/tasks/preflight/tests/test_licenses.py +174 -0
- cumulusci/tasks/preflight/tests/test_packages.py +14 -0
- cumulusci/tasks/preflight/tests/test_permset_preflights.py +51 -0
- cumulusci/tasks/preflight/tests/test_recordtypes.py +30 -0
- cumulusci/tasks/preflight/tests/test_retrieve_tasks.py +62 -0
- cumulusci/tasks/preflight/tests/test_settings.py +130 -0
- cumulusci/tasks/preflight/tests/test_sobjects.py +231 -0
- cumulusci/tasks/push/README.md +59 -0
- cumulusci/tasks/push/__init__.py +0 -0
- cumulusci/tasks/push/push_api.py +659 -0
- cumulusci/tasks/push/pushfails.py +136 -0
- cumulusci/tasks/push/tasks.py +476 -0
- cumulusci/tasks/push/tests/conftest.py +263 -0
- cumulusci/tasks/push/tests/test_push_api.py +951 -0
- cumulusci/tasks/push/tests/test_push_tasks.py +659 -0
- cumulusci/tasks/release_notes/README.md +63 -0
- cumulusci/tasks/release_notes/__init__.py +0 -0
- cumulusci/tasks/release_notes/exceptions.py +5 -0
- cumulusci/tasks/release_notes/generator.py +137 -0
- cumulusci/tasks/release_notes/parser.py +232 -0
- cumulusci/tasks/release_notes/provider.py +44 -0
- cumulusci/tasks/release_notes/task.py +300 -0
- cumulusci/tasks/release_notes/tests/__init__.py +0 -0
- cumulusci/tasks/release_notes/tests/change_notes/full/example1.md +17 -0
- cumulusci/tasks/release_notes/tests/change_notes/multi/1.txt +1 -0
- cumulusci/tasks/release_notes/tests/change_notes/multi/2.txt +1 -0
- cumulusci/tasks/release_notes/tests/change_notes/multi/3.txt +1 -0
- cumulusci/tasks/release_notes/tests/change_notes/single/1.txt +1 -0
- cumulusci/tasks/release_notes/tests/test_generator.py +582 -0
- cumulusci/tasks/release_notes/tests/test_parser.py +867 -0
- cumulusci/tasks/release_notes/tests/test_provider.py +512 -0
- cumulusci/tasks/release_notes/tests/test_task.py +461 -0
- cumulusci/tasks/release_notes/tests/utils.py +153 -0
- cumulusci/tasks/robotframework/__init__.py +3 -0
- cumulusci/tasks/robotframework/debugger/DebugListener.py +100 -0
- cumulusci/tasks/robotframework/debugger/__init__.py +10 -0
- cumulusci/tasks/robotframework/debugger/model.py +87 -0
- cumulusci/tasks/robotframework/debugger/ui.py +259 -0
- cumulusci/tasks/robotframework/libdoc.py +269 -0
- cumulusci/tasks/robotframework/robotframework.py +392 -0
- cumulusci/tasks/robotframework/stylesheet.css +130 -0
- cumulusci/tasks/robotframework/template.html +109 -0
- cumulusci/tasks/robotframework/tests/TestLibrary.py +18 -0
- cumulusci/tasks/robotframework/tests/TestPageObjects.py +31 -0
- cumulusci/tasks/robotframework/tests/TestResource.robot +8 -0
- cumulusci/tasks/robotframework/tests/failing_tests.robot +16 -0
- cumulusci/tasks/robotframework/tests/performance.robot +23 -0
- cumulusci/tasks/robotframework/tests/test_browser_proxies.py +137 -0
- cumulusci/tasks/robotframework/tests/test_debugger.py +360 -0
- cumulusci/tasks/robotframework/tests/test_robot_parallel.py +141 -0
- cumulusci/tasks/robotframework/tests/test_robotframework.py +860 -0
- cumulusci/tasks/salesforce/BaseRetrieveMetadata.py +58 -0
- cumulusci/tasks/salesforce/BaseSalesforceApiTask.py +45 -0
- cumulusci/tasks/salesforce/BaseSalesforceMetadataApiTask.py +18 -0
- cumulusci/tasks/salesforce/BaseSalesforceTask.py +4 -0
- cumulusci/tasks/salesforce/BaseUninstallMetadata.py +41 -0
- cumulusci/tasks/salesforce/CreateCommunity.py +124 -0
- cumulusci/tasks/salesforce/CreatePackage.py +29 -0
- cumulusci/tasks/salesforce/Deploy.py +240 -0
- cumulusci/tasks/salesforce/DeployBundles.py +88 -0
- cumulusci/tasks/salesforce/DescribeMetadataTypes.py +26 -0
- cumulusci/tasks/salesforce/EnsureRecordTypes.py +202 -0
- cumulusci/tasks/salesforce/GetInstalledPackages.py +8 -0
- cumulusci/tasks/salesforce/ListCommunities.py +40 -0
- cumulusci/tasks/salesforce/ListCommunityTemplates.py +19 -0
- cumulusci/tasks/salesforce/PublishCommunity.py +62 -0
- cumulusci/tasks/salesforce/RetrievePackaged.py +41 -0
- cumulusci/tasks/salesforce/RetrieveReportsAndDashboards.py +82 -0
- cumulusci/tasks/salesforce/RetrieveUnpackaged.py +36 -0
- cumulusci/tasks/salesforce/SOQLQuery.py +39 -0
- cumulusci/tasks/salesforce/UninstallLocal.py +15 -0
- cumulusci/tasks/salesforce/UninstallLocalBundles.py +28 -0
- cumulusci/tasks/salesforce/UninstallLocalNamespacedBundles.py +58 -0
- cumulusci/tasks/salesforce/UninstallPackage.py +32 -0
- cumulusci/tasks/salesforce/UninstallPackaged.py +56 -0
- cumulusci/tasks/salesforce/UpdateAdminProfile.py +8 -0
- cumulusci/tasks/salesforce/__init__.py +79 -0
- cumulusci/tasks/salesforce/activate_flow.py +74 -0
- cumulusci/tasks/salesforce/check_components.py +324 -0
- cumulusci/tasks/salesforce/composite.py +142 -0
- cumulusci/tasks/salesforce/create_permission_sets.py +35 -0
- cumulusci/tasks/salesforce/custom_settings.py +134 -0
- cumulusci/tasks/salesforce/custom_settings_wait.py +132 -0
- cumulusci/tasks/salesforce/enable_prediction.py +107 -0
- cumulusci/tasks/salesforce/insert_record.py +40 -0
- cumulusci/tasks/salesforce/install_package_version.py +242 -0
- cumulusci/tasks/salesforce/license_preflights.py +8 -0
- cumulusci/tasks/salesforce/network_member_group.py +178 -0
- cumulusci/tasks/salesforce/nonsourcetracking.py +228 -0
- cumulusci/tasks/salesforce/org_settings.py +193 -0
- cumulusci/tasks/salesforce/package_upload.py +328 -0
- cumulusci/tasks/salesforce/profiles.py +74 -0
- cumulusci/tasks/salesforce/promote_package_version.py +376 -0
- cumulusci/tasks/salesforce/retrieve_profile.py +195 -0
- cumulusci/tasks/salesforce/salesforce_files.py +244 -0
- cumulusci/tasks/salesforce/sourcetracking.py +507 -0
- cumulusci/tasks/salesforce/tests/__init__.py +3 -0
- cumulusci/tasks/salesforce/tests/test_CreateCommunity.py +278 -0
- cumulusci/tasks/salesforce/tests/test_CreatePackage.py +22 -0
- cumulusci/tasks/salesforce/tests/test_Deploy.py +470 -0
- cumulusci/tasks/salesforce/tests/test_DeployBundles.py +76 -0
- cumulusci/tasks/salesforce/tests/test_EnsureRecordTypes.py +345 -0
- cumulusci/tasks/salesforce/tests/test_ListCommunities.py +84 -0
- cumulusci/tasks/salesforce/tests/test_ListCommunityTemplates.py +49 -0
- cumulusci/tasks/salesforce/tests/test_PackageUpload.py +547 -0
- cumulusci/tasks/salesforce/tests/test_ProfileGrantAllAccess.py +699 -0
- cumulusci/tasks/salesforce/tests/test_PublishCommunity.py +181 -0
- cumulusci/tasks/salesforce/tests/test_RetrievePackaged.py +24 -0
- cumulusci/tasks/salesforce/tests/test_RetrieveReportsAndDashboards.py +56 -0
- cumulusci/tasks/salesforce/tests/test_RetrieveUnpackaged.py +21 -0
- cumulusci/tasks/salesforce/tests/test_SOQLQuery.py +30 -0
- cumulusci/tasks/salesforce/tests/test_UninstallLocal.py +15 -0
- cumulusci/tasks/salesforce/tests/test_UninstallLocalBundles.py +19 -0
- cumulusci/tasks/salesforce/tests/test_UninstallLocalNamespacedBundles.py +22 -0
- cumulusci/tasks/salesforce/tests/test_UninstallPackage.py +19 -0
- cumulusci/tasks/salesforce/tests/test_UninstallPackaged.py +66 -0
- cumulusci/tasks/salesforce/tests/test_UninstallPackagedIncremental.py +127 -0
- cumulusci/tasks/salesforce/tests/test_activate_flow.py +132 -0
- cumulusci/tasks/salesforce/tests/test_base_tasks.py +110 -0
- cumulusci/tasks/salesforce/tests/test_check_components.py +445 -0
- cumulusci/tasks/salesforce/tests/test_composite.py +250 -0
- cumulusci/tasks/salesforce/tests/test_create_permission_sets.py +41 -0
- cumulusci/tasks/salesforce/tests/test_custom_settings.py +227 -0
- cumulusci/tasks/salesforce/tests/test_custom_settings_wait.py +174 -0
- cumulusci/tasks/salesforce/tests/test_describemetadatatypes.py +18 -0
- cumulusci/tasks/salesforce/tests/test_enable_prediction.py +240 -0
- cumulusci/tasks/salesforce/tests/test_insert_record.py +110 -0
- cumulusci/tasks/salesforce/tests/test_install_package_version.py +464 -0
- cumulusci/tasks/salesforce/tests/test_network_member_group.py +444 -0
- cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py +235 -0
- cumulusci/tasks/salesforce/tests/test_org_settings.py +407 -0
- cumulusci/tasks/salesforce/tests/test_profiles.py +202 -0
- cumulusci/tasks/salesforce/tests/test_retrieve_profile.py +287 -0
- cumulusci/tasks/salesforce/tests/test_salesforce_files.py +228 -0
- cumulusci/tasks/salesforce/tests/test_sourcetracking.py +350 -0
- cumulusci/tasks/salesforce/tests/test_trigger_handlers.py +300 -0
- cumulusci/tasks/salesforce/tests/test_update_dependencies.py +509 -0
- cumulusci/tasks/salesforce/tests/util.py +79 -0
- cumulusci/tasks/salesforce/trigger_handlers.py +119 -0
- cumulusci/tasks/salesforce/uninstall_packaged_incremental.py +136 -0
- cumulusci/tasks/salesforce/update_dependencies.py +290 -0
- cumulusci/tasks/salesforce/update_profile.py +339 -0
- cumulusci/tasks/salesforce/users/permsets.py +227 -0
- cumulusci/tasks/salesforce/users/photos.py +162 -0
- cumulusci/tasks/salesforce/users/tests/photo.mock.txt +1 -0
- cumulusci/tasks/salesforce/users/tests/test_permsets.py +950 -0
- cumulusci/tasks/salesforce/users/tests/test_photos.py +373 -0
- cumulusci/tasks/sample_data/capture_sample_data.py +77 -0
- cumulusci/tasks/sample_data/load_sample_data.py +85 -0
- cumulusci/tasks/sample_data/test_capture_sample_data.py +117 -0
- cumulusci/tasks/sample_data/test_load_sample_data.py +121 -0
- cumulusci/tasks/sfdx.py +83 -0
- cumulusci/tasks/tests/__init__.py +1 -0
- cumulusci/tasks/tests/conftest.py +30 -0
- cumulusci/tasks/tests/test_command.py +129 -0
- cumulusci/tasks/tests/test_connectedapp.py +236 -0
- cumulusci/tasks/tests/test_create_package_version.py +847 -0
- cumulusci/tasks/tests/test_datadictionary.py +1575 -0
- cumulusci/tasks/tests/test_dx_convert_from.py +60 -0
- cumulusci/tasks/tests/test_metadeploy.py +624 -0
- cumulusci/tasks/tests/test_metaxml.py +99 -0
- cumulusci/tasks/tests/test_promote_package_version.py +488 -0
- cumulusci/tasks/tests/test_pushfails.py +96 -0
- cumulusci/tasks/tests/test_salesforce.py +72 -0
- cumulusci/tasks/tests/test_sfdx.py +105 -0
- cumulusci/tasks/tests/test_util.py +207 -0
- cumulusci/tasks/util.py +261 -0
- cumulusci/tasks/vcs/__init__.py +19 -0
- cumulusci/tasks/vcs/commit_status.py +58 -0
- cumulusci/tasks/vcs/create_commit_status.py +37 -0
- cumulusci/tasks/vcs/download_extract.py +199 -0
- cumulusci/tasks/vcs/merge.py +298 -0
- cumulusci/tasks/vcs/publish.py +207 -0
- cumulusci/tasks/vcs/pull_request.py +9 -0
- cumulusci/tasks/vcs/release.py +134 -0
- cumulusci/tasks/vcs/release_report.py +105 -0
- cumulusci/tasks/vcs/tag.py +31 -0
- cumulusci/tasks/vcs/tests/github/test_commit_status.py +196 -0
- cumulusci/tasks/vcs/tests/github/test_download_extract.py +896 -0
- cumulusci/tasks/vcs/tests/github/test_merge.py +1118 -0
- cumulusci/tasks/vcs/tests/github/test_publish.py +823 -0
- cumulusci/tasks/vcs/tests/github/test_pull_request.py +29 -0
- cumulusci/tasks/vcs/tests/github/test_release.py +390 -0
- cumulusci/tasks/vcs/tests/github/test_release_report.py +109 -0
- cumulusci/tasks/vcs/tests/github/test_tag.py +90 -0
- cumulusci/tasks/vlocity/exceptions.py +2 -0
- cumulusci/tasks/vlocity/tests/test_vlocity.py +283 -0
- cumulusci/tasks/vlocity/vlocity.py +342 -0
- cumulusci/tests/__init__.py +1 -0
- cumulusci/tests/cassettes/GET_sobjects_Account_PersonAccount_describe.yaml +18 -0
- cumulusci/tests/cassettes/TestIntegrationInfrastructure.test_integration_tests.yaml +19 -0
- cumulusci/tests/pytest_plugins/pytest_sf_orgconnect.py +307 -0
- cumulusci/tests/pytest_plugins/pytest_sf_vcr.py +275 -0
- cumulusci/tests/pytest_plugins/pytest_sf_vcr_serializer.py +160 -0
- cumulusci/tests/pytest_plugins/pytest_typeguard.py +5 -0
- cumulusci/tests/pytest_plugins/test_vcr_string_compressor.py +49 -0
- cumulusci/tests/pytest_plugins/vcr_string_compressor.py +97 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Account_describe.yaml +18 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Case_describe.yaml +18 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Contact_describe.yaml +4838 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Custom__c_describe.yaml +242 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Event_describe.yaml +19 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Global_describe.yaml +1338 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Lead_describe.yaml +18 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_OpportunityContactRole_describe.yaml +34 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Opportunity_describe.yaml +1261 -0
- cumulusci/tests/shared_cassettes/GET_sobjects_Organization.yaml +49 -0
- cumulusci/tests/shared_cassettes/vcr_string_templates/batchInfoList_xml.tpl +15 -0
- cumulusci/tests/shared_cassettes/vcr_string_templates/batchInfo_xml.tpl +13 -0
- cumulusci/tests/shared_cassettes/vcr_string_templates/jobInfo_insert_xml.tpl +24 -0
- cumulusci/tests/shared_cassettes/vcr_string_templates/jobInfo_upsert_xml.tpl +25 -0
- cumulusci/tests/test_entry_points.py +20 -0
- cumulusci/tests/test_integration_infrastructure.py +131 -0
- cumulusci/tests/test_main.py +9 -0
- cumulusci/tests/test_schema.py +32 -0
- cumulusci/tests/test_utils.py +657 -0
- cumulusci/tests/test_vcr_serializer.py +134 -0
- cumulusci/tests/uncompressed_cassette.yaml +83 -0
- cumulusci/tests/util.py +344 -0
- cumulusci/utils/__init__.py +731 -0
- cumulusci/utils/classutils.py +9 -0
- cumulusci/utils/collections.py +32 -0
- cumulusci/utils/deprecation.py +11 -0
- cumulusci/utils/encryption.py +31 -0
- cumulusci/utils/fileutils.py +295 -0
- cumulusci/utils/git.py +142 -0
- cumulusci/utils/http/multi_request.py +214 -0
- cumulusci/utils/http/requests_utils.py +103 -0
- cumulusci/utils/http/tests/cassettes/ManualEditTestCompositeParallelSalesforce.test_http_headers.yaml +32 -0
- cumulusci/utils/http/tests/cassettes/TestCompositeParallelSalesforce.test_composite_parallel_salesforce.yaml +65 -0
- cumulusci/utils/http/tests/cassettes/TestCompositeParallelSalesforce.test_errors.yaml +24 -0
- cumulusci/utils/http/tests/cassettes/TestCompositeParallelSalesforce.test_reference_ids.yaml +49 -0
- cumulusci/utils/http/tests/test_multi_request.py +255 -0
- cumulusci/utils/iterators.py +21 -0
- cumulusci/utils/logging.py +128 -0
- cumulusci/utils/metaprogramming.py +10 -0
- cumulusci/utils/options.py +138 -0
- cumulusci/utils/parallel/queries_in_parallel/run_queries_in_parallel.py +29 -0
- cumulusci/utils/parallel/queries_in_parallel/tests/test_run_queries_in_parallel.py +50 -0
- cumulusci/utils/parallel/task_worker_queues/parallel_worker.py +238 -0
- cumulusci/utils/parallel/task_worker_queues/parallel_worker_queue.py +243 -0
- cumulusci/utils/parallel/task_worker_queues/tests/test_parallel_worker.py +353 -0
- cumulusci/utils/salesforce/count_sobjects.py +46 -0
- cumulusci/utils/salesforce/soql.py +17 -0
- cumulusci/utils/salesforce/tests/cassettes/ManualEdit_TestCountSObjects.test_count_sobjects__network_errors.yaml +23 -0
- cumulusci/utils/salesforce/tests/cassettes/TestCountSObjects.test_count_sobjects__errors.yaml +33 -0
- cumulusci/utils/salesforce/tests/cassettes/TestCountSObjects.test_count_sobjects_simple.yaml +29 -0
- cumulusci/utils/salesforce/tests/test_count_sobjects.py +29 -0
- cumulusci/utils/salesforce/tests/test_soql.py +30 -0
- cumulusci/utils/tests/cassettes/ManualEditTestDescribeOrg.test_minimal_schema.yaml +36 -0
- cumulusci/utils/tests/cassettes/ManualEdit_test_describe_to_sql.yaml +191 -0
- cumulusci/utils/tests/test_fileutils.py +284 -0
- cumulusci/utils/tests/test_git.py +85 -0
- cumulusci/utils/tests/test_logging.py +70 -0
- cumulusci/utils/tests/test_option_parsing.py +188 -0
- cumulusci/utils/tests/test_org_schema.py +691 -0
- cumulusci/utils/tests/test_org_schema_models.py +79 -0
- cumulusci/utils/tests/test_waiting.py +25 -0
- cumulusci/utils/version_strings.py +391 -0
- cumulusci/utils/waiting.py +42 -0
- cumulusci/utils/xml/__init__.py +91 -0
- cumulusci/utils/xml/metadata_tree.py +299 -0
- cumulusci/utils/xml/robot_xml.py +114 -0
- cumulusci/utils/xml/salesforce_encoding.py +100 -0
- cumulusci/utils/xml/test/test_metadata_tree.py +251 -0
- cumulusci/utils/xml/test/test_salesforce_encoding.py +173 -0
- cumulusci/utils/yaml/cumulusci_yml.py +401 -0
- cumulusci/utils/yaml/model_parser.py +156 -0
- cumulusci/utils/yaml/safer_loader.py +74 -0
- cumulusci/utils/yaml/tests/bad_cci.yml +5 -0
- cumulusci/utils/yaml/tests/cassettes/TestCumulusciYml.test_validate_url__with_errors.yaml +20 -0
- cumulusci/utils/yaml/tests/test_cumulusci_yml.py +286 -0
- cumulusci/utils/yaml/tests/test_model_parser.py +175 -0
- cumulusci/utils/yaml/tests/test_safer_loader.py +88 -0
- cumulusci/utils/ziputils.py +61 -0
- cumulusci/vcs/base.py +143 -0
- cumulusci/vcs/bootstrap.py +272 -0
- cumulusci/vcs/github/__init__.py +24 -0
- cumulusci/vcs/github/adapter.py +689 -0
- cumulusci/vcs/github/release_notes/generator.py +219 -0
- cumulusci/vcs/github/release_notes/parser.py +151 -0
- cumulusci/vcs/github/release_notes/provider.py +143 -0
- cumulusci/vcs/github/service.py +569 -0
- cumulusci/vcs/github/tests/test_adapter.py +138 -0
- cumulusci/vcs/github/tests/test_service.py +408 -0
- cumulusci/vcs/models.py +586 -0
- cumulusci/vcs/tests/conftest.py +41 -0
- cumulusci/vcs/tests/dummy_service.py +241 -0
- cumulusci/vcs/tests/test_vcs_base.py +687 -0
- cumulusci/vcs/tests/test_vcs_bootstrap.py +727 -0
- cumulusci/vcs/utils/__init__.py +31 -0
- cumulusci/vcs/vcs_source.py +287 -0
- cumulusci_plus-5.0.0.dist-info/METADATA +145 -0
- cumulusci_plus-5.0.0.dist-info/RECORD +744 -0
- cumulusci_plus-5.0.0.dist-info/WHEEL +4 -0
- cumulusci_plus-5.0.0.dist-info/entry_points.txt +3 -0
- cumulusci_plus-5.0.0.dist-info/licenses/AUTHORS.rst +41 -0
- cumulusci_plus-5.0.0.dist-info/licenses/LICENSE +30 -0
|
@@ -0,0 +1,1242 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
import io
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
import tempfile
|
|
7
|
+
import time
|
|
8
|
+
from abc import ABCMeta, abstractmethod
|
|
9
|
+
from contextlib import contextmanager
|
|
10
|
+
from itertools import tee
|
|
11
|
+
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
|
12
|
+
from urllib.parse import quote
|
|
13
|
+
|
|
14
|
+
import requests
|
|
15
|
+
import salesforce_bulk
|
|
16
|
+
|
|
17
|
+
from cumulusci.core.enums import StrEnum
|
|
18
|
+
from cumulusci.core.exceptions import BulkDataException
|
|
19
|
+
from cumulusci.core.utils import process_bool_arg
|
|
20
|
+
from cumulusci.tasks.bulkdata.select_utils import (
|
|
21
|
+
SelectOperationExecutor,
|
|
22
|
+
SelectRecordRetrievalMode,
|
|
23
|
+
SelectStrategy,
|
|
24
|
+
split_and_filter_fields,
|
|
25
|
+
)
|
|
26
|
+
from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks
|
|
27
|
+
from cumulusci.utils.classutils import namedtuple_as_simple_dict
|
|
28
|
+
from cumulusci.utils.xml import lxml_parse_string
|
|
29
|
+
|
|
30
|
+
DEFAULT_BULK_BATCH_SIZE = 10_000
|
|
31
|
+
DEFAULT_REST_BATCH_SIZE = 200
|
|
32
|
+
MAX_REST_BATCH_SIZE = 200
|
|
33
|
+
HIGH_PRIORITY_VALUE = 3
|
|
34
|
+
LOW_PRIORITY_VALUE = 0.5
|
|
35
|
+
csv.field_size_limit(2**27) # 128 MB
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class DataOperationType(StrEnum):
|
|
39
|
+
"""Enum defining the API data operation requested."""
|
|
40
|
+
|
|
41
|
+
INSERT = "insert"
|
|
42
|
+
UPDATE = "update"
|
|
43
|
+
DELETE = "delete"
|
|
44
|
+
HARD_DELETE = "hardDelete"
|
|
45
|
+
QUERY = "query"
|
|
46
|
+
UPSERT = "upsert"
|
|
47
|
+
ETL_UPSERT = "etl_upsert"
|
|
48
|
+
SMART_UPSERT = "smart_upsert" # currently undocumented
|
|
49
|
+
SELECT = "select"
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class DataOperationStatus(StrEnum):
|
|
53
|
+
"""Enum defining outcome values for a data operation."""
|
|
54
|
+
|
|
55
|
+
SUCCESS = "Success"
|
|
56
|
+
ROW_FAILURE = "Row failure"
|
|
57
|
+
JOB_FAILURE = "Job failure"
|
|
58
|
+
IN_PROGRESS = "In progress"
|
|
59
|
+
ABORTED = "Aborted"
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class DataOperationResult(NamedTuple):
|
|
63
|
+
id: str
|
|
64
|
+
success: bool
|
|
65
|
+
error: str
|
|
66
|
+
created: Optional[bool] = None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class DataOperationJobResult(NamedTuple):
|
|
70
|
+
status: DataOperationStatus
|
|
71
|
+
job_errors: List[str]
|
|
72
|
+
records_processed: int
|
|
73
|
+
total_row_errors: int = 0
|
|
74
|
+
|
|
75
|
+
def simplify(self):
|
|
76
|
+
return namedtuple_as_simple_dict(self)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@contextmanager
|
|
80
|
+
def download_file(uri, bulk_api, *, chunk_size=8192):
|
|
81
|
+
"""Download the Bulk API result file for a single batch,
|
|
82
|
+
and remove it when the context manager exits."""
|
|
83
|
+
try:
|
|
84
|
+
(handle, path) = tempfile.mkstemp(text=False)
|
|
85
|
+
resp = requests.get(uri, headers=bulk_api.headers(), stream=True)
|
|
86
|
+
resp.raise_for_status()
|
|
87
|
+
f = os.fdopen(handle, "wb")
|
|
88
|
+
for chunk in resp.iter_content(chunk_size=chunk_size): # VCR needs a chunk_size
|
|
89
|
+
# specific chunk_size seems to make no measurable perf difference
|
|
90
|
+
f.write(chunk)
|
|
91
|
+
|
|
92
|
+
f.close()
|
|
93
|
+
with open(path, "r", newline="", encoding="utf-8") as f:
|
|
94
|
+
yield f
|
|
95
|
+
finally:
|
|
96
|
+
pathlib.Path(path).unlink()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class BulkJobMixin:
|
|
100
|
+
"""Provides mixin utilities for classes that manage Bulk API jobs."""
|
|
101
|
+
|
|
102
|
+
def _job_state_from_batches(self, job_id):
|
|
103
|
+
"""Query for batches under job_id and return overall status
|
|
104
|
+
inferred from batch-level status values."""
|
|
105
|
+
uri = f"{self.bulk.endpoint}/job/{job_id}/batch"
|
|
106
|
+
response = requests.get(uri, headers=self.bulk.headers())
|
|
107
|
+
response.raise_for_status()
|
|
108
|
+
return self._parse_job_state(response.content)
|
|
109
|
+
|
|
110
|
+
def _parse_job_state(self, xml: str):
|
|
111
|
+
"""Parse the Bulk API return value and generate a summary status record for the job."""
|
|
112
|
+
tree = lxml_parse_string(xml)
|
|
113
|
+
statuses = [el.text for el in tree.iterfind(".//{%s}state" % self.bulk.jobNS)]
|
|
114
|
+
state_messages = [
|
|
115
|
+
el.text for el in tree.iterfind(".//{%s}stateMessage" % self.bulk.jobNS)
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
# Get how many total records failed across all the batches.
|
|
119
|
+
failures = tree.findall(".//{%s}numberRecordsFailed" % self.bulk.jobNS)
|
|
120
|
+
record_failure_count = sum([int(failure.text) for failure in (failures or [])])
|
|
121
|
+
|
|
122
|
+
# Get how many total records processed across all the batches.
|
|
123
|
+
processed = tree.findall(".//{%s}numberRecordsProcessed" % self.bulk.jobNS)
|
|
124
|
+
records_processed_count = sum(
|
|
125
|
+
[int(processed.text) for processed in (processed or [])]
|
|
126
|
+
)
|
|
127
|
+
# FIXME: "Not Processed" to be expected for original batch with PK Chunking Query
|
|
128
|
+
# PK Chunking is not currently supported.
|
|
129
|
+
if "Not Processed" in statuses:
|
|
130
|
+
return DataOperationJobResult(
|
|
131
|
+
DataOperationStatus.ABORTED,
|
|
132
|
+
[],
|
|
133
|
+
records_processed_count,
|
|
134
|
+
record_failure_count,
|
|
135
|
+
)
|
|
136
|
+
elif "InProgress" in statuses or "Queued" in statuses:
|
|
137
|
+
return DataOperationJobResult(
|
|
138
|
+
DataOperationStatus.IN_PROGRESS,
|
|
139
|
+
[],
|
|
140
|
+
records_processed_count,
|
|
141
|
+
record_failure_count,
|
|
142
|
+
)
|
|
143
|
+
elif "Failed" in statuses:
|
|
144
|
+
return DataOperationJobResult(
|
|
145
|
+
DataOperationStatus.JOB_FAILURE,
|
|
146
|
+
state_messages,
|
|
147
|
+
records_processed_count,
|
|
148
|
+
record_failure_count,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
# All the records submitted in this job failed.
|
|
152
|
+
if record_failure_count:
|
|
153
|
+
return DataOperationJobResult(
|
|
154
|
+
DataOperationStatus.ROW_FAILURE,
|
|
155
|
+
[],
|
|
156
|
+
records_processed_count,
|
|
157
|
+
record_failure_count,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
return DataOperationJobResult(
|
|
161
|
+
DataOperationStatus.SUCCESS,
|
|
162
|
+
[],
|
|
163
|
+
records_processed_count,
|
|
164
|
+
record_failure_count,
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
def _wait_for_job(self, job_id):
|
|
168
|
+
"""Wait for the given job to enter a completed state (success or failure)."""
|
|
169
|
+
while True:
|
|
170
|
+
job_status = self.bulk.job_status(job_id)
|
|
171
|
+
self.logger.info(
|
|
172
|
+
f"Waiting for job {job_id} ({job_status['numberBatchesCompleted']}/{job_status['numberBatchesTotal']} batches complete)"
|
|
173
|
+
)
|
|
174
|
+
result = self._job_state_from_batches(job_id)
|
|
175
|
+
if result.status is not DataOperationStatus.IN_PROGRESS:
|
|
176
|
+
break
|
|
177
|
+
|
|
178
|
+
time.sleep(10)
|
|
179
|
+
plural_errors = "Errors" if result.total_row_errors != 1 else "Error"
|
|
180
|
+
errors = (
|
|
181
|
+
f": {result.total_row_errors} {plural_errors}"
|
|
182
|
+
if result.total_row_errors
|
|
183
|
+
else ""
|
|
184
|
+
)
|
|
185
|
+
self.logger.info(
|
|
186
|
+
f"Job {job_id} finished with result: {result.status.value}{errors}"
|
|
187
|
+
)
|
|
188
|
+
if result.status is DataOperationStatus.JOB_FAILURE:
|
|
189
|
+
for state_message in result.job_errors:
|
|
190
|
+
self.logger.error(f"Batch failure message: {state_message}")
|
|
191
|
+
|
|
192
|
+
return result
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class BaseDataOperation(metaclass=ABCMeta):
|
|
196
|
+
"""Abstract base class for all data operations (queries and DML)."""
|
|
197
|
+
|
|
198
|
+
def __init__(self, *, sobject, operation, api_options, context):
|
|
199
|
+
self.sobject = sobject
|
|
200
|
+
self.operation = operation
|
|
201
|
+
self.api_options = api_options
|
|
202
|
+
self.context = context
|
|
203
|
+
self.bulk = context.bulk
|
|
204
|
+
self.sf = context.sf
|
|
205
|
+
self.logger = context.logger
|
|
206
|
+
self.job_result = None
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
class BaseQueryOperation(BaseDataOperation, metaclass=ABCMeta):
|
|
210
|
+
"""Abstract base class for query operations in all APIs."""
|
|
211
|
+
|
|
212
|
+
def __init__(self, *, sobject, api_options, context, query):
|
|
213
|
+
super().__init__(
|
|
214
|
+
sobject=sobject,
|
|
215
|
+
operation=DataOperationType.QUERY,
|
|
216
|
+
api_options=api_options,
|
|
217
|
+
context=context,
|
|
218
|
+
)
|
|
219
|
+
self.soql = query
|
|
220
|
+
|
|
221
|
+
def __enter__(self):
|
|
222
|
+
self.query()
|
|
223
|
+
return self
|
|
224
|
+
|
|
225
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
226
|
+
pass
|
|
227
|
+
|
|
228
|
+
@abstractmethod
|
|
229
|
+
def query(self):
|
|
230
|
+
"""Execute requested query and block until results are available."""
|
|
231
|
+
pass
|
|
232
|
+
|
|
233
|
+
@abstractmethod
|
|
234
|
+
def get_results(self):
|
|
235
|
+
"""Return a generator of rows from the query."""
|
|
236
|
+
pass
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class BulkApiQueryOperation(BaseQueryOperation, BulkJobMixin):
|
|
240
|
+
"""Operation class for Bulk API query jobs."""
|
|
241
|
+
|
|
242
|
+
def query(self):
|
|
243
|
+
self.job_id = self.bulk.create_query_job(self.sobject, contentType="CSV")
|
|
244
|
+
self.logger.info(f"Created Bulk API query job {self.job_id}")
|
|
245
|
+
self.batch_id = self.bulk.query(self.job_id, self.soql)
|
|
246
|
+
|
|
247
|
+
self.job_result = self._wait_for_job(self.job_id)
|
|
248
|
+
self.bulk.close_job(self.job_id)
|
|
249
|
+
|
|
250
|
+
def get_results(self):
|
|
251
|
+
# FIXME: For PK Chunking, need to get new batch Ids
|
|
252
|
+
# and retrieve their results. Original batch will not be processed.
|
|
253
|
+
|
|
254
|
+
result_ids = self.bulk.get_query_batch_result_ids(
|
|
255
|
+
self.batch_id, job_id=self.job_id
|
|
256
|
+
)
|
|
257
|
+
for result_id in result_ids:
|
|
258
|
+
uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}"
|
|
259
|
+
|
|
260
|
+
with download_file(uri, self.bulk) as f:
|
|
261
|
+
reader = csv.reader(f)
|
|
262
|
+
self.headers = next(reader)
|
|
263
|
+
if "Records not found for this query" in self.headers:
|
|
264
|
+
return
|
|
265
|
+
|
|
266
|
+
yield from reader
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class RestApiQueryOperation(BaseQueryOperation):
|
|
270
|
+
"""Operation class for REST API query jobs."""
|
|
271
|
+
|
|
272
|
+
def __init__(self, *, sobject, fields, api_options, context, query):
|
|
273
|
+
super().__init__(
|
|
274
|
+
sobject=sobject, api_options=api_options, context=context, query=query
|
|
275
|
+
)
|
|
276
|
+
self.fields = fields
|
|
277
|
+
|
|
278
|
+
def query(self):
|
|
279
|
+
self.response = self.sf.query(self.soql)
|
|
280
|
+
self.job_result = DataOperationJobResult(
|
|
281
|
+
DataOperationStatus.SUCCESS, [], self.response["totalSize"], 0
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
def get_results(self):
|
|
285
|
+
def convert(rec):
|
|
286
|
+
return [str(rec[f]) if rec[f] is not None else "" for f in self.fields]
|
|
287
|
+
|
|
288
|
+
while True:
|
|
289
|
+
yield from (convert(rec) for rec in self.response["records"])
|
|
290
|
+
if not self.response["done"]:
|
|
291
|
+
self.response = self.sf.query_more(
|
|
292
|
+
self.response["nextRecordsUrl"], identifier_is_url=True
|
|
293
|
+
)
|
|
294
|
+
else:
|
|
295
|
+
return
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
class BaseDmlOperation(BaseDataOperation, metaclass=ABCMeta):
|
|
299
|
+
"""Abstract base class for DML operations in all APIs."""
|
|
300
|
+
|
|
301
|
+
def __init__(self, *, sobject, operation, api_options, context, fields):
|
|
302
|
+
super().__init__(
|
|
303
|
+
sobject=sobject,
|
|
304
|
+
operation=operation,
|
|
305
|
+
api_options=api_options,
|
|
306
|
+
context=context,
|
|
307
|
+
)
|
|
308
|
+
self.fields = fields
|
|
309
|
+
|
|
310
|
+
def __enter__(self):
|
|
311
|
+
self.start()
|
|
312
|
+
return self
|
|
313
|
+
|
|
314
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
315
|
+
self.end()
|
|
316
|
+
|
|
317
|
+
def start(self):
|
|
318
|
+
"""Perform any required setup, such as job initialization, for the operation."""
|
|
319
|
+
pass
|
|
320
|
+
|
|
321
|
+
@abstractmethod
|
|
322
|
+
def get_prev_record_values(self, records):
|
|
323
|
+
"""Get the previous records values in case of UPSERT and UPDATE to prepare for rollback"""
|
|
324
|
+
pass
|
|
325
|
+
|
|
326
|
+
@abstractmethod
|
|
327
|
+
def select_records(self, records):
|
|
328
|
+
"""Perform the requested DML operation on the supplied row iterator."""
|
|
329
|
+
pass
|
|
330
|
+
|
|
331
|
+
@abstractmethod
|
|
332
|
+
def load_records(self, records):
|
|
333
|
+
"""Perform the requested DML operation on the supplied row iterator."""
|
|
334
|
+
pass
|
|
335
|
+
|
|
336
|
+
def end(self):
|
|
337
|
+
"""Perform any required teardown for the operation before results are returned."""
|
|
338
|
+
pass
|
|
339
|
+
|
|
340
|
+
@abstractmethod
|
|
341
|
+
def get_results(self):
|
|
342
|
+
"""Return a generator of DataOperationResult objects."""
|
|
343
|
+
pass
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
class BulkApiDmlOperation(BaseDmlOperation, BulkJobMixin):
|
|
347
|
+
"""Operation class for all DML operations run using the Bulk API."""
|
|
348
|
+
|
|
349
|
+
def __init__(
|
|
350
|
+
self,
|
|
351
|
+
*,
|
|
352
|
+
sobject,
|
|
353
|
+
operation,
|
|
354
|
+
api_options,
|
|
355
|
+
context,
|
|
356
|
+
fields,
|
|
357
|
+
selection_strategy=SelectStrategy.STANDARD,
|
|
358
|
+
selection_filter=None,
|
|
359
|
+
selection_priority_fields=None,
|
|
360
|
+
content_type=None,
|
|
361
|
+
threshold=None,
|
|
362
|
+
):
|
|
363
|
+
super().__init__(
|
|
364
|
+
sobject=sobject,
|
|
365
|
+
operation=operation,
|
|
366
|
+
api_options=api_options,
|
|
367
|
+
context=context,
|
|
368
|
+
fields=fields,
|
|
369
|
+
)
|
|
370
|
+
self.api_options = api_options.copy()
|
|
371
|
+
self.api_options["batch_size"] = (
|
|
372
|
+
self.api_options.get("batch_size") or DEFAULT_BULK_BATCH_SIZE
|
|
373
|
+
)
|
|
374
|
+
self.csv_buff = io.StringIO(newline="")
|
|
375
|
+
self.csv_writer = csv.writer(self.csv_buff, quoting=csv.QUOTE_ALL)
|
|
376
|
+
|
|
377
|
+
self.select_operation_executor = SelectOperationExecutor(selection_strategy)
|
|
378
|
+
self.selection_filter = selection_filter
|
|
379
|
+
self.weights = assign_weights(
|
|
380
|
+
priority_fields=selection_priority_fields, fields=fields
|
|
381
|
+
)
|
|
382
|
+
self.content_type = content_type if content_type else "CSV"
|
|
383
|
+
self.threshold = threshold
|
|
384
|
+
|
|
385
|
+
def start(self):
|
|
386
|
+
self.job_id = self.bulk.create_job(
|
|
387
|
+
self.sobject,
|
|
388
|
+
self.operation.value,
|
|
389
|
+
contentType=self.content_type,
|
|
390
|
+
concurrency=self.api_options.get("bulk_mode", "Parallel"),
|
|
391
|
+
external_id_name=self.api_options.get("update_key"),
|
|
392
|
+
)
|
|
393
|
+
|
|
394
|
+
def end(self):
|
|
395
|
+
self.bulk.close_job(self.job_id)
|
|
396
|
+
if not self.job_result:
|
|
397
|
+
self.job_result = self._wait_for_job(self.job_id)
|
|
398
|
+
|
|
399
|
+
def get_prev_record_values(self, records):
|
|
400
|
+
"""Get the previous values of the records based on the update key
|
|
401
|
+
to ensure rollback can be performed"""
|
|
402
|
+
# Function to be called only for UPSERT and UPDATE
|
|
403
|
+
assert self.operation in [DataOperationType.UPSERT, DataOperationType.UPDATE]
|
|
404
|
+
|
|
405
|
+
self.logger.info(f"Retrieving Previous Record Values of {self.sobject}")
|
|
406
|
+
prev_record_values = []
|
|
407
|
+
relevant_fields = set(self.fields + ["Id"])
|
|
408
|
+
|
|
409
|
+
# Set update key
|
|
410
|
+
update_key = (
|
|
411
|
+
self.api_options.get("update_key")
|
|
412
|
+
if self.operation == DataOperationType.UPSERT
|
|
413
|
+
else "Id"
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
for count, batch in enumerate(
|
|
417
|
+
self._batch(records, self.api_options["batch_size"])
|
|
418
|
+
):
|
|
419
|
+
self.context.logger.info(f"Querying batch {count + 1}")
|
|
420
|
+
|
|
421
|
+
# Extract update key values from the batch
|
|
422
|
+
update_key_values = [
|
|
423
|
+
rec[update_key]
|
|
424
|
+
for rec in csv.DictReader([line.decode("utf-8") for line in batch])
|
|
425
|
+
]
|
|
426
|
+
|
|
427
|
+
# Construct the SOQL query
|
|
428
|
+
query_fields = ", ".join(relevant_fields)
|
|
429
|
+
query_values = ", ".join(f"'{value}'" for value in update_key_values)
|
|
430
|
+
query = f"SELECT {query_fields} FROM {self.sobject} WHERE {update_key} IN ({query_values})"
|
|
431
|
+
|
|
432
|
+
# Execute the query using Bulk API
|
|
433
|
+
job_id = self.bulk.create_query_job(self.sobject, contentType="JSON")
|
|
434
|
+
batch_id = self.bulk.query(job_id, query)
|
|
435
|
+
self.bulk.wait_for_batch(job_id, batch_id)
|
|
436
|
+
self.bulk.close_job(job_id)
|
|
437
|
+
results = self.bulk.get_all_results_for_query_batch(batch_id)
|
|
438
|
+
|
|
439
|
+
# Extract relevant fields from results and append to the respective lists
|
|
440
|
+
for result in results:
|
|
441
|
+
result = json.load(salesforce_bulk.util.IteratorBytesIO(result))
|
|
442
|
+
prev_record_values.extend(
|
|
443
|
+
[[res[key] for key in relevant_fields] for res in result]
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
self.logger.info("Done")
|
|
447
|
+
return prev_record_values, tuple(relevant_fields)
|
|
448
|
+
|
|
449
|
+
def load_records(self, records):
|
|
450
|
+
self.batch_ids = []
|
|
451
|
+
|
|
452
|
+
batch_size = self.api_options["batch_size"]
|
|
453
|
+
for count, csv_batch in enumerate(self._batch(records, batch_size)):
|
|
454
|
+
self.context.logger.info(f"Uploading batch {count + 1}")
|
|
455
|
+
self.batch_ids.append(self.bulk.post_batch(self.job_id, iter(csv_batch)))
|
|
456
|
+
|
|
457
|
+
def select_records(self, records):
|
|
458
|
+
"""Executes a SOQL query to select records and adds them to results"""
|
|
459
|
+
|
|
460
|
+
self.select_results = [] # Store selected records
|
|
461
|
+
query_records = []
|
|
462
|
+
# Create a copy of the generator using tee
|
|
463
|
+
records, records_copy = tee(records)
|
|
464
|
+
# Count total number of records to fetch using the copy
|
|
465
|
+
total_num_records = sum(1 for _ in records_copy)
|
|
466
|
+
|
|
467
|
+
# In the case that records are zero, return success
|
|
468
|
+
if total_num_records == 0:
|
|
469
|
+
self.logger.info(f"No records present for {self.sobject}")
|
|
470
|
+
self.job_result = DataOperationJobResult(
|
|
471
|
+
status=DataOperationStatus.SUCCESS,
|
|
472
|
+
job_errors=[],
|
|
473
|
+
records_processed=0,
|
|
474
|
+
total_row_errors=0,
|
|
475
|
+
)
|
|
476
|
+
return
|
|
477
|
+
|
|
478
|
+
limit_clause = self._determine_limit_clause(total_num_records=total_num_records)
|
|
479
|
+
|
|
480
|
+
# Generate and execute SOQL query
|
|
481
|
+
# (not passing offset as it is not supported in Bulk)
|
|
482
|
+
(
|
|
483
|
+
select_query,
|
|
484
|
+
query_fields,
|
|
485
|
+
) = self.select_operation_executor.select_generate_query(
|
|
486
|
+
sobject=self.sobject,
|
|
487
|
+
fields=self.fields,
|
|
488
|
+
user_filter=self.selection_filter if self.selection_filter else None,
|
|
489
|
+
limit=limit_clause,
|
|
490
|
+
offset=None,
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
# Execute the main select query using Bulk API
|
|
494
|
+
self.logger.info("Retrieving records from org...")
|
|
495
|
+
select_query_records = self._execute_select_query(
|
|
496
|
+
select_query=select_query, query_fields=query_fields
|
|
497
|
+
)
|
|
498
|
+
self.logger.info(f"Retrieved {len(select_query_records)} from org")
|
|
499
|
+
|
|
500
|
+
query_records.extend(select_query_records)
|
|
501
|
+
# Post-process the query results
|
|
502
|
+
(
|
|
503
|
+
selected_records,
|
|
504
|
+
insert_records,
|
|
505
|
+
error_message,
|
|
506
|
+
) = self.select_operation_executor.select_post_process(
|
|
507
|
+
load_records=records,
|
|
508
|
+
query_records=query_records,
|
|
509
|
+
fields=self.fields,
|
|
510
|
+
num_records=total_num_records,
|
|
511
|
+
sobject=self.sobject,
|
|
512
|
+
weights=self.weights,
|
|
513
|
+
threshold=self.threshold,
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
# Log the number of selected and prepared for insertion records
|
|
517
|
+
num_selected = sum(1 for record in selected_records if record)
|
|
518
|
+
num_prepared = len(insert_records) if insert_records else 0
|
|
519
|
+
|
|
520
|
+
self.logger.info(
|
|
521
|
+
f"{num_selected} records selected."
|
|
522
|
+
+ (
|
|
523
|
+
f" {num_prepared} records prepared for insertion."
|
|
524
|
+
if num_prepared > 0
|
|
525
|
+
else ""
|
|
526
|
+
)
|
|
527
|
+
)
|
|
528
|
+
|
|
529
|
+
if insert_records:
|
|
530
|
+
self._process_insert_records(insert_records, selected_records)
|
|
531
|
+
|
|
532
|
+
if not error_message:
|
|
533
|
+
self.select_results.extend(selected_records)
|
|
534
|
+
|
|
535
|
+
# Update job result based on selection outcome
|
|
536
|
+
self.job_result = DataOperationJobResult(
|
|
537
|
+
status=(
|
|
538
|
+
DataOperationStatus.SUCCESS
|
|
539
|
+
if len(self.select_results)
|
|
540
|
+
else DataOperationStatus.JOB_FAILURE
|
|
541
|
+
),
|
|
542
|
+
job_errors=[error_message] if error_message else [],
|
|
543
|
+
records_processed=len(self.select_results),
|
|
544
|
+
total_row_errors=0,
|
|
545
|
+
)
|
|
546
|
+
|
|
547
|
+
def _process_insert_records(self, insert_records, selected_records):
|
|
548
|
+
"""Processes and inserts records if necessary."""
|
|
549
|
+
insert_fields, _ = split_and_filter_fields(fields=self.fields)
|
|
550
|
+
insert_step = BulkApiDmlOperation(
|
|
551
|
+
sobject=self.sobject,
|
|
552
|
+
operation=DataOperationType.INSERT,
|
|
553
|
+
api_options=self.api_options,
|
|
554
|
+
context=self.context,
|
|
555
|
+
fields=insert_fields,
|
|
556
|
+
)
|
|
557
|
+
insert_step.start()
|
|
558
|
+
insert_step.load_records(insert_records)
|
|
559
|
+
insert_step.end()
|
|
560
|
+
# Retrieve insert results
|
|
561
|
+
insert_results = []
|
|
562
|
+
for batch_id in insert_step.batch_ids:
|
|
563
|
+
try:
|
|
564
|
+
results_url = f"{insert_step.bulk.endpoint}/job/{insert_step.job_id}/batch/{batch_id}/result"
|
|
565
|
+
# Download entire result file to a temporary file first
|
|
566
|
+
# to avoid the server dropping connections
|
|
567
|
+
with download_file(results_url, insert_step.bulk) as f:
|
|
568
|
+
self.logger.info(f"Downloaded results for batch {batch_id}")
|
|
569
|
+
reader = csv.reader(f)
|
|
570
|
+
next(reader) # Skip header row
|
|
571
|
+
for row in reader:
|
|
572
|
+
success = process_bool_arg(row[1])
|
|
573
|
+
created = process_bool_arg(row[2])
|
|
574
|
+
insert_results.append(
|
|
575
|
+
{"id": row[0], "success": success, "created": created}
|
|
576
|
+
)
|
|
577
|
+
except Exception as e:
|
|
578
|
+
raise BulkDataException(
|
|
579
|
+
f"Failed to download results for batch {batch_id} ({str(e)})"
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
insert_index = 0
|
|
583
|
+
for idx, record in enumerate(selected_records):
|
|
584
|
+
if record is None:
|
|
585
|
+
selected_records[idx] = insert_results[insert_index]
|
|
586
|
+
insert_index += 1
|
|
587
|
+
|
|
588
|
+
def _determine_limit_clause(self, total_num_records):
|
|
589
|
+
"""Determines the LIMIT clause based on the retrieval mode."""
|
|
590
|
+
if (
|
|
591
|
+
self.select_operation_executor.retrieval_mode
|
|
592
|
+
== SelectRecordRetrievalMode.ALL
|
|
593
|
+
):
|
|
594
|
+
return None
|
|
595
|
+
elif (
|
|
596
|
+
self.select_operation_executor.retrieval_mode
|
|
597
|
+
== SelectRecordRetrievalMode.MATCH
|
|
598
|
+
):
|
|
599
|
+
return total_num_records
|
|
600
|
+
|
|
601
|
+
def _execute_select_query(self, select_query: str, query_fields: List[str]):
|
|
602
|
+
"""Executes the select Bulk API query, retrieves results in JSON, and converts to CSV format if needed."""
|
|
603
|
+
self.batch_id = self.bulk.query(self.job_id, select_query)
|
|
604
|
+
self.bulk.wait_for_batch(self.job_id, self.batch_id)
|
|
605
|
+
result_ids = self.bulk.get_query_batch_result_ids(
|
|
606
|
+
self.batch_id, job_id=self.job_id
|
|
607
|
+
)
|
|
608
|
+
select_query_records = []
|
|
609
|
+
|
|
610
|
+
for result_id in result_ids:
|
|
611
|
+
# Modify URI to request JSON format
|
|
612
|
+
uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}?format=json"
|
|
613
|
+
# Download JSON data
|
|
614
|
+
with download_file(uri, self.bulk) as f:
|
|
615
|
+
data = json.load(f)
|
|
616
|
+
# Get headers from fields, expanding nested structures for TYPEOF results
|
|
617
|
+
self.headers = query_fields
|
|
618
|
+
|
|
619
|
+
# Convert each record to a flat row
|
|
620
|
+
for record in data:
|
|
621
|
+
flat_record = flatten_record(record, self.headers)
|
|
622
|
+
select_query_records.append(flat_record)
|
|
623
|
+
|
|
624
|
+
return select_query_records
|
|
625
|
+
|
|
626
|
+
def _batch(self, records, n, char_limit=10000000):
|
|
627
|
+
"""Given an iterator of records, yields batches of
|
|
628
|
+
records serialized in .csv format.
|
|
629
|
+
|
|
630
|
+
Batches adhere to the following, in order of precedence:
|
|
631
|
+
(1) They do not exceed the given character limit
|
|
632
|
+
(2) They do not contain more than n records per batch
|
|
633
|
+
"""
|
|
634
|
+
serialized_csv_fields = self._serialize_csv_record(self.fields)
|
|
635
|
+
len_csv_fields = len(serialized_csv_fields)
|
|
636
|
+
|
|
637
|
+
# append fields to first row
|
|
638
|
+
batch = [serialized_csv_fields]
|
|
639
|
+
current_chars = len_csv_fields
|
|
640
|
+
for record in records:
|
|
641
|
+
serialized_record = self._serialize_csv_record(record)
|
|
642
|
+
# Does the next record put us over the character limit?
|
|
643
|
+
if len(serialized_record) + current_chars > char_limit:
|
|
644
|
+
yield batch
|
|
645
|
+
batch = [serialized_csv_fields]
|
|
646
|
+
current_chars = len_csv_fields
|
|
647
|
+
|
|
648
|
+
batch.append(serialized_record)
|
|
649
|
+
current_chars += len(serialized_record)
|
|
650
|
+
|
|
651
|
+
# yield batch if we're at desired size
|
|
652
|
+
# -1 due to first row being field names
|
|
653
|
+
if len(batch) - 1 == n:
|
|
654
|
+
yield batch
|
|
655
|
+
batch = [serialized_csv_fields]
|
|
656
|
+
current_chars = len_csv_fields
|
|
657
|
+
|
|
658
|
+
# give back anything leftover
|
|
659
|
+
if len(batch) > 1:
|
|
660
|
+
yield batch
|
|
661
|
+
|
|
662
|
+
def _serialize_csv_record(self, record):
|
|
663
|
+
"""Given a list of strings (record) return
|
|
664
|
+
the corresponding record serialized in .csv format"""
|
|
665
|
+
self.csv_writer.writerow(record)
|
|
666
|
+
serialized = self.csv_buff.getvalue().encode("utf-8")
|
|
667
|
+
# flush buffer
|
|
668
|
+
self.csv_buff.truncate(0)
|
|
669
|
+
self.csv_buff.seek(0)
|
|
670
|
+
|
|
671
|
+
return serialized
|
|
672
|
+
|
|
673
|
+
def get_results(self):
|
|
674
|
+
"""
|
|
675
|
+
Retrieves and processes the results of a Bulk API operation.
|
|
676
|
+
"""
|
|
677
|
+
|
|
678
|
+
if self.operation is DataOperationType.QUERY:
|
|
679
|
+
yield from self._get_query_results()
|
|
680
|
+
else:
|
|
681
|
+
yield from self._get_batch_results()
|
|
682
|
+
|
|
683
|
+
def _get_query_results(self):
|
|
684
|
+
"""Handles results for QUERY (select) operations"""
|
|
685
|
+
for row in self.select_results:
|
|
686
|
+
success = process_bool_arg(row["success"])
|
|
687
|
+
created = process_bool_arg(row["created"])
|
|
688
|
+
yield DataOperationResult(
|
|
689
|
+
row["id"] if success else "",
|
|
690
|
+
success,
|
|
691
|
+
"",
|
|
692
|
+
created,
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
def _get_batch_results(self):
|
|
696
|
+
"""Handles results for other DataOperationTypes (insert, update, etc.)"""
|
|
697
|
+
for batch_id in self.batch_ids:
|
|
698
|
+
try:
|
|
699
|
+
results_url = (
|
|
700
|
+
f"{self.bulk.endpoint}/job/{self.job_id}/batch/{batch_id}/result"
|
|
701
|
+
)
|
|
702
|
+
# Download entire result file to a temporary file first
|
|
703
|
+
# to avoid the server dropping connections
|
|
704
|
+
with download_file(results_url, self.bulk) as f:
|
|
705
|
+
self.logger.info(f"Downloaded results for batch {batch_id}")
|
|
706
|
+
yield from self._parse_batch_results(f)
|
|
707
|
+
|
|
708
|
+
except Exception as e:
|
|
709
|
+
raise BulkDataException(
|
|
710
|
+
f"Failed to download results for batch {batch_id} ({str(e)})"
|
|
711
|
+
)
|
|
712
|
+
|
|
713
|
+
def _parse_batch_results(self, f):
|
|
714
|
+
"""Parses batch results from the downloaded file"""
|
|
715
|
+
reader = csv.reader(f)
|
|
716
|
+
next(reader) # Skip header row
|
|
717
|
+
|
|
718
|
+
for row in reader:
|
|
719
|
+
success = process_bool_arg(row[1])
|
|
720
|
+
created = process_bool_arg(row[2])
|
|
721
|
+
yield DataOperationResult(
|
|
722
|
+
row[0] if success else None,
|
|
723
|
+
success,
|
|
724
|
+
row[3] if not success else None,
|
|
725
|
+
created,
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
|
|
729
|
+
class RestApiDmlOperation(BaseDmlOperation):
|
|
730
|
+
"""Operation class for all DML operations run using the REST API."""
|
|
731
|
+
|
|
732
|
+
def __init__(
|
|
733
|
+
self,
|
|
734
|
+
*,
|
|
735
|
+
sobject,
|
|
736
|
+
operation,
|
|
737
|
+
api_options,
|
|
738
|
+
context,
|
|
739
|
+
fields,
|
|
740
|
+
selection_strategy=SelectStrategy.STANDARD,
|
|
741
|
+
selection_filter=None,
|
|
742
|
+
selection_priority_fields=None,
|
|
743
|
+
content_type=None,
|
|
744
|
+
threshold=None,
|
|
745
|
+
):
|
|
746
|
+
super().__init__(
|
|
747
|
+
sobject=sobject,
|
|
748
|
+
operation=operation,
|
|
749
|
+
api_options=api_options,
|
|
750
|
+
context=context,
|
|
751
|
+
fields=fields,
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
# Because we send values in JSON, we must convert Booleans and nulls
|
|
755
|
+
describe = {
|
|
756
|
+
field["name"]: field
|
|
757
|
+
for field in getattr(context.sf, sobject).describe()["fields"]
|
|
758
|
+
}
|
|
759
|
+
self.boolean_fields = [
|
|
760
|
+
f for f in fields if "." not in f and describe[f]["type"] == "boolean"
|
|
761
|
+
]
|
|
762
|
+
self.api_options = api_options.copy()
|
|
763
|
+
self.api_options["batch_size"] = (
|
|
764
|
+
self.api_options.get("batch_size") or DEFAULT_REST_BATCH_SIZE
|
|
765
|
+
)
|
|
766
|
+
self.api_options["batch_size"] = min(
|
|
767
|
+
self.api_options["batch_size"], MAX_REST_BATCH_SIZE
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
self.select_operation_executor = SelectOperationExecutor(selection_strategy)
|
|
771
|
+
self.selection_filter = selection_filter
|
|
772
|
+
self.weights = assign_weights(
|
|
773
|
+
priority_fields=selection_priority_fields, fields=fields
|
|
774
|
+
)
|
|
775
|
+
self.content_type = content_type
|
|
776
|
+
self.threshold = threshold
|
|
777
|
+
|
|
778
|
+
def _record_to_json(self, rec):
|
|
779
|
+
result = dict(zip(self.fields, rec))
|
|
780
|
+
for boolean_field in self.boolean_fields:
|
|
781
|
+
try:
|
|
782
|
+
result[boolean_field] = process_bool_arg(result[boolean_field] or False)
|
|
783
|
+
except TypeError as e:
|
|
784
|
+
raise BulkDataException(e)
|
|
785
|
+
|
|
786
|
+
# Remove empty fields (different semantics in REST API)
|
|
787
|
+
# We do this for insert only - on update, any fields set to `null`
|
|
788
|
+
# are meant to be blanked out.
|
|
789
|
+
if self.operation is DataOperationType.INSERT:
|
|
790
|
+
result = {
|
|
791
|
+
k: result[k]
|
|
792
|
+
for k in result
|
|
793
|
+
if result[k] is not None and result[k] != ""
|
|
794
|
+
}
|
|
795
|
+
elif self.operation in (DataOperationType.UPDATE, DataOperationType.UPSERT):
|
|
796
|
+
result = {k: (result[k] if result[k] != "" else None) for k in result}
|
|
797
|
+
|
|
798
|
+
result["attributes"] = {"type": self.sobject}
|
|
799
|
+
return result
|
|
800
|
+
|
|
801
|
+
def get_prev_record_values(self, records):
|
|
802
|
+
"""Get the previous values of the records based on the update key
|
|
803
|
+
to ensure rollback can be performed"""
|
|
804
|
+
# Function to be called only for UPSERT and UPDATE
|
|
805
|
+
assert self.operation in [DataOperationType.UPSERT, DataOperationType.UPDATE]
|
|
806
|
+
|
|
807
|
+
self.logger.info(f"Retrieving Previous Record Values of {self.sobject}")
|
|
808
|
+
prev_record_values = []
|
|
809
|
+
relevant_fields = set(self.fields + ["Id"])
|
|
810
|
+
|
|
811
|
+
# Set update key
|
|
812
|
+
update_key = (
|
|
813
|
+
self.api_options.get("update_key")
|
|
814
|
+
if self.operation == DataOperationType.UPSERT
|
|
815
|
+
else "Id"
|
|
816
|
+
)
|
|
817
|
+
|
|
818
|
+
for chunk in iterate_in_chunks(self.api_options.get("batch_size"), records):
|
|
819
|
+
update_key_values = tuple(
|
|
820
|
+
filter(None, (self._record_to_json(rec)[update_key] for rec in chunk))
|
|
821
|
+
)
|
|
822
|
+
|
|
823
|
+
# Construct the query string
|
|
824
|
+
query_fields = ", ".join(relevant_fields)
|
|
825
|
+
query = f"SELECT {query_fields} FROM {self.sobject} WHERE {update_key} IN {update_key_values}"
|
|
826
|
+
|
|
827
|
+
# Execute the query
|
|
828
|
+
results = self.sf.query(query)
|
|
829
|
+
|
|
830
|
+
# Extract relevant fields from results and extend the list
|
|
831
|
+
prev_record_values.extend(
|
|
832
|
+
[[res[key] for key in relevant_fields] for res in results["records"]]
|
|
833
|
+
)
|
|
834
|
+
|
|
835
|
+
self.logger.info("Done")
|
|
836
|
+
return prev_record_values, tuple(relevant_fields)
|
|
837
|
+
|
|
838
|
+
def load_records(self, records):
|
|
839
|
+
"""Load, update, upsert or delete records into the org"""
|
|
840
|
+
|
|
841
|
+
self.results = []
|
|
842
|
+
method = {
|
|
843
|
+
DataOperationType.INSERT: "POST",
|
|
844
|
+
DataOperationType.UPDATE: "PATCH",
|
|
845
|
+
DataOperationType.DELETE: "DELETE",
|
|
846
|
+
DataOperationType.UPSERT: "PATCH",
|
|
847
|
+
}[self.operation]
|
|
848
|
+
|
|
849
|
+
update_key = self.api_options.get("update_key")
|
|
850
|
+
for chunk in iterate_in_chunks(self.api_options.get("batch_size"), records):
|
|
851
|
+
if self.operation is DataOperationType.DELETE:
|
|
852
|
+
url_string = "?ids=" + ",".join(
|
|
853
|
+
self._record_to_json(rec)["Id"] for rec in chunk
|
|
854
|
+
)
|
|
855
|
+
json = None
|
|
856
|
+
else:
|
|
857
|
+
if update_key:
|
|
858
|
+
assert self.operation == DataOperationType.UPSERT
|
|
859
|
+
url_string = f"/{self.sobject}/{update_key}"
|
|
860
|
+
else:
|
|
861
|
+
url_string = ""
|
|
862
|
+
json = {
|
|
863
|
+
"allOrNone": False,
|
|
864
|
+
"records": [self._record_to_json(rec) for rec in chunk],
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
self.results.extend(
|
|
868
|
+
self.sf.restful(
|
|
869
|
+
f"composite/sobjects{url_string}", method=method, json=json
|
|
870
|
+
)
|
|
871
|
+
)
|
|
872
|
+
|
|
873
|
+
row_errors = len([res for res in self.results if not res["success"]])
|
|
874
|
+
self.job_result = DataOperationJobResult(
|
|
875
|
+
(
|
|
876
|
+
DataOperationStatus.SUCCESS
|
|
877
|
+
if not row_errors
|
|
878
|
+
else DataOperationStatus.ROW_FAILURE
|
|
879
|
+
),
|
|
880
|
+
[],
|
|
881
|
+
len(self.results),
|
|
882
|
+
row_errors,
|
|
883
|
+
)
|
|
884
|
+
|
|
885
|
+
def select_records(self, records):
|
|
886
|
+
"""Executes a SOQL query to select records and adds them to results"""
|
|
887
|
+
|
|
888
|
+
self.results = []
|
|
889
|
+
query_records = []
|
|
890
|
+
|
|
891
|
+
# Create a copy of the generator using tee
|
|
892
|
+
records, records_copy = tee(records)
|
|
893
|
+
|
|
894
|
+
# Count total number of records to fetch using the copy
|
|
895
|
+
total_num_records = sum(1 for _ in records_copy)
|
|
896
|
+
|
|
897
|
+
# In the case that records are zero, return success
|
|
898
|
+
self.logger.info(f"No records present for {self.sobject}")
|
|
899
|
+
if total_num_records == 0:
|
|
900
|
+
self.job_result = DataOperationJobResult(
|
|
901
|
+
status=DataOperationStatus.SUCCESS,
|
|
902
|
+
job_errors=[],
|
|
903
|
+
records_processed=0,
|
|
904
|
+
total_row_errors=0,
|
|
905
|
+
)
|
|
906
|
+
return
|
|
907
|
+
|
|
908
|
+
# Set LIMIT condition
|
|
909
|
+
limit_clause = self._determine_limit_clause(total_num_records)
|
|
910
|
+
|
|
911
|
+
# Generate the SOQL query based on the selection strategy
|
|
912
|
+
(
|
|
913
|
+
select_query,
|
|
914
|
+
query_fields,
|
|
915
|
+
) = self.select_operation_executor.select_generate_query(
|
|
916
|
+
sobject=self.sobject,
|
|
917
|
+
fields=self.fields,
|
|
918
|
+
user_filter=self.selection_filter or None,
|
|
919
|
+
limit=limit_clause,
|
|
920
|
+
offset=None,
|
|
921
|
+
)
|
|
922
|
+
|
|
923
|
+
# Execute the query and gather the records
|
|
924
|
+
self.logger.info("Retrieving records from org...")
|
|
925
|
+
query_records = self._execute_soql_query(select_query, query_fields)
|
|
926
|
+
self.logger.info(f"Retrieved {len(query_records)} from org")
|
|
927
|
+
|
|
928
|
+
# Post-process the query results for this batch
|
|
929
|
+
(
|
|
930
|
+
selected_records,
|
|
931
|
+
insert_records,
|
|
932
|
+
error_message,
|
|
933
|
+
) = self.select_operation_executor.select_post_process(
|
|
934
|
+
load_records=records,
|
|
935
|
+
query_records=query_records,
|
|
936
|
+
fields=self.fields,
|
|
937
|
+
num_records=total_num_records,
|
|
938
|
+
sobject=self.sobject,
|
|
939
|
+
weights=self.weights,
|
|
940
|
+
threshold=self.threshold,
|
|
941
|
+
)
|
|
942
|
+
|
|
943
|
+
# Log the number of selected and prepared for insertion records
|
|
944
|
+
num_selected = sum(1 for record in selected_records if record)
|
|
945
|
+
num_prepared = len(insert_records) if insert_records else 0
|
|
946
|
+
|
|
947
|
+
self.logger.info(
|
|
948
|
+
f"{num_selected} records selected."
|
|
949
|
+
+ (
|
|
950
|
+
f" {num_prepared} records prepared for insertion."
|
|
951
|
+
if num_prepared > 0
|
|
952
|
+
else ""
|
|
953
|
+
)
|
|
954
|
+
)
|
|
955
|
+
|
|
956
|
+
if insert_records:
|
|
957
|
+
self._process_insert_records(insert_records, selected_records)
|
|
958
|
+
|
|
959
|
+
if not error_message:
|
|
960
|
+
# Add selected records from this batch to the overall results
|
|
961
|
+
self.results.extend(selected_records)
|
|
962
|
+
|
|
963
|
+
# Update the job result based on the overall selection outcome
|
|
964
|
+
self._update_job_result(error_message)
|
|
965
|
+
|
|
966
|
+
def _determine_limit_clause(self, total_num_records):
|
|
967
|
+
"""Determines the LIMIT clause based on the retrieval mode."""
|
|
968
|
+
if (
|
|
969
|
+
self.select_operation_executor.retrieval_mode
|
|
970
|
+
== SelectRecordRetrievalMode.ALL
|
|
971
|
+
):
|
|
972
|
+
return None
|
|
973
|
+
elif (
|
|
974
|
+
self.select_operation_executor.retrieval_mode
|
|
975
|
+
== SelectRecordRetrievalMode.MATCH
|
|
976
|
+
):
|
|
977
|
+
return total_num_records
|
|
978
|
+
|
|
979
|
+
def _execute_soql_query(self, select_query, query_fields):
|
|
980
|
+
"""Executes the SOQL query and returns the flattened records."""
|
|
981
|
+
query_records = []
|
|
982
|
+
response = self.sf.restful(f"query/?q={quote(select_query)}", method="GET")
|
|
983
|
+
query_records.extend(self._flatten_response_records(response, query_fields))
|
|
984
|
+
|
|
985
|
+
while not response["done"]:
|
|
986
|
+
response = self.sf.query_more(
|
|
987
|
+
response["nextRecordsUrl"], identifier_is_url=True
|
|
988
|
+
)
|
|
989
|
+
query_records.extend(self._flatten_response_records(response, query_fields))
|
|
990
|
+
|
|
991
|
+
return query_records
|
|
992
|
+
|
|
993
|
+
def _flatten_response_records(self, response, query_fields):
|
|
994
|
+
"""Flattens the response records and returns them as a list."""
|
|
995
|
+
return [flatten_record(record, query_fields) for record in response["records"]]
|
|
996
|
+
|
|
997
|
+
def _process_insert_records(self, insert_records, selected_records):
|
|
998
|
+
"""Processes and inserts records if necessary."""
|
|
999
|
+
insert_fields, _ = split_and_filter_fields(fields=self.fields)
|
|
1000
|
+
insert_step = RestApiDmlOperation(
|
|
1001
|
+
sobject=self.sobject,
|
|
1002
|
+
operation=DataOperationType.INSERT,
|
|
1003
|
+
api_options=self.api_options,
|
|
1004
|
+
context=self.context,
|
|
1005
|
+
fields=insert_fields,
|
|
1006
|
+
)
|
|
1007
|
+
insert_step.start()
|
|
1008
|
+
insert_step.load_records(insert_records)
|
|
1009
|
+
insert_step.end()
|
|
1010
|
+
insert_results = insert_step.results
|
|
1011
|
+
|
|
1012
|
+
insert_index = 0
|
|
1013
|
+
for idx, record in enumerate(selected_records):
|
|
1014
|
+
if record is None:
|
|
1015
|
+
selected_records[idx] = insert_results[insert_index]
|
|
1016
|
+
insert_index += 1
|
|
1017
|
+
|
|
1018
|
+
def _update_job_result(self, error_message):
|
|
1019
|
+
"""Updates the job result based on the selection outcome."""
|
|
1020
|
+
self.job_result = DataOperationJobResult(
|
|
1021
|
+
status=(
|
|
1022
|
+
DataOperationStatus.SUCCESS
|
|
1023
|
+
if len(self.results)
|
|
1024
|
+
else DataOperationStatus.JOB_FAILURE
|
|
1025
|
+
),
|
|
1026
|
+
job_errors=[error_message] if error_message else [],
|
|
1027
|
+
records_processed=len(self.results),
|
|
1028
|
+
total_row_errors=0,
|
|
1029
|
+
)
|
|
1030
|
+
|
|
1031
|
+
def get_results(self):
|
|
1032
|
+
"""Return a generator of DataOperationResult objects."""
|
|
1033
|
+
|
|
1034
|
+
def _convert(res):
|
|
1035
|
+
# TODO: make DataOperationResult handle this error variant
|
|
1036
|
+
if res.get("errors"):
|
|
1037
|
+
errors = "\n".join(
|
|
1038
|
+
f"{e['statusCode']}: {e['message']} ({','.join(e['fields'])})"
|
|
1039
|
+
for e in res["errors"]
|
|
1040
|
+
)
|
|
1041
|
+
else:
|
|
1042
|
+
errors = ""
|
|
1043
|
+
|
|
1044
|
+
if self.operation == DataOperationType.INSERT:
|
|
1045
|
+
created = True
|
|
1046
|
+
elif self.operation == DataOperationType.UPDATE:
|
|
1047
|
+
created = False
|
|
1048
|
+
else:
|
|
1049
|
+
created = res.get("created")
|
|
1050
|
+
|
|
1051
|
+
return DataOperationResult(res.get("id"), res["success"], errors, created)
|
|
1052
|
+
|
|
1053
|
+
yield from (_convert(res) for res in self.results)
|
|
1054
|
+
|
|
1055
|
+
|
|
1056
|
+
def get_query_operation(
|
|
1057
|
+
*,
|
|
1058
|
+
sobject: str,
|
|
1059
|
+
fields: List[str],
|
|
1060
|
+
api_options: Dict,
|
|
1061
|
+
context: Any,
|
|
1062
|
+
query: str,
|
|
1063
|
+
api: Optional[DataApi] = DataApi.SMART,
|
|
1064
|
+
) -> BaseQueryOperation:
|
|
1065
|
+
"""Create an appropriate QueryOperation instance for the given parameters, selecting
|
|
1066
|
+
between REST and Bulk APIs based upon volume (Bulk > 2000 records) if DataApi.SMART
|
|
1067
|
+
is provided."""
|
|
1068
|
+
|
|
1069
|
+
# The Record Count endpoint requires API 40.0. REST Collections requires 42.0.
|
|
1070
|
+
api_version = float(context.sf.sf_version)
|
|
1071
|
+
if api_version < 42.0 and api is not DataApi.BULK:
|
|
1072
|
+
api = DataApi.BULK
|
|
1073
|
+
|
|
1074
|
+
if api in (DataApi.SMART, None):
|
|
1075
|
+
record_count_response = context.sf.restful(
|
|
1076
|
+
f"limits/recordCount?sObjects={sobject}"
|
|
1077
|
+
)
|
|
1078
|
+
sobject_map = {
|
|
1079
|
+
entry["name"]: entry["count"] for entry in record_count_response["sObjects"]
|
|
1080
|
+
}
|
|
1081
|
+
api = (
|
|
1082
|
+
DataApi.BULK
|
|
1083
|
+
if sobject in sobject_map and sobject_map[sobject] >= 2000
|
|
1084
|
+
else DataApi.REST
|
|
1085
|
+
)
|
|
1086
|
+
|
|
1087
|
+
if api is DataApi.BULK:
|
|
1088
|
+
return BulkApiQueryOperation(
|
|
1089
|
+
sobject=sobject, api_options=api_options, context=context, query=query
|
|
1090
|
+
)
|
|
1091
|
+
elif api is DataApi.REST:
|
|
1092
|
+
return RestApiQueryOperation(
|
|
1093
|
+
sobject=sobject,
|
|
1094
|
+
api_options=api_options,
|
|
1095
|
+
context=context,
|
|
1096
|
+
query=query,
|
|
1097
|
+
fields=fields,
|
|
1098
|
+
)
|
|
1099
|
+
else:
|
|
1100
|
+
raise AssertionError(f"Unknown API: {api}")
|
|
1101
|
+
|
|
1102
|
+
|
|
1103
|
+
def get_dml_operation(
|
|
1104
|
+
*,
|
|
1105
|
+
sobject: str,
|
|
1106
|
+
operation: DataOperationType,
|
|
1107
|
+
fields: List[str],
|
|
1108
|
+
api_options: Dict,
|
|
1109
|
+
context: Any,
|
|
1110
|
+
volume: int,
|
|
1111
|
+
api: Optional[DataApi] = DataApi.SMART,
|
|
1112
|
+
selection_strategy: SelectStrategy = SelectStrategy.STANDARD,
|
|
1113
|
+
selection_filter: Union[str, None] = None,
|
|
1114
|
+
selection_priority_fields: Union[dict, None] = None,
|
|
1115
|
+
content_type: Union[str, None] = None,
|
|
1116
|
+
threshold: Union[float, None] = None,
|
|
1117
|
+
) -> BaseDmlOperation:
|
|
1118
|
+
"""Create an appropriate DmlOperation instance for the given parameters, selecting
|
|
1119
|
+
between REST and Bulk APIs based upon volume (Bulk used at volumes over 2000 records,
|
|
1120
|
+
or if the operation is HARD_DELETE, which is only available for Bulk)."""
|
|
1121
|
+
|
|
1122
|
+
context.logger.debug(f"Creating {operation} Operation for {sobject} using {api}")
|
|
1123
|
+
assert isinstance(operation, DataOperationType)
|
|
1124
|
+
|
|
1125
|
+
# REST Collections requires 42.0.
|
|
1126
|
+
api_version = float(context.sf.sf_version)
|
|
1127
|
+
if api_version < 42.0 and api is not DataApi.BULK:
|
|
1128
|
+
api = DataApi.BULK
|
|
1129
|
+
|
|
1130
|
+
if api in (DataApi.SMART, None):
|
|
1131
|
+
api = (
|
|
1132
|
+
DataApi.BULK
|
|
1133
|
+
if volume >= 2000 or operation is DataOperationType.HARD_DELETE
|
|
1134
|
+
else DataApi.REST
|
|
1135
|
+
)
|
|
1136
|
+
|
|
1137
|
+
if api is DataApi.BULK:
|
|
1138
|
+
api_class = BulkApiDmlOperation
|
|
1139
|
+
elif api is DataApi.REST:
|
|
1140
|
+
api_class = RestApiDmlOperation
|
|
1141
|
+
else:
|
|
1142
|
+
raise AssertionError(f"Unknown API: {api}")
|
|
1143
|
+
|
|
1144
|
+
return api_class(
|
|
1145
|
+
sobject=sobject,
|
|
1146
|
+
operation=operation,
|
|
1147
|
+
api_options=api_options,
|
|
1148
|
+
context=context,
|
|
1149
|
+
fields=fields,
|
|
1150
|
+
selection_strategy=selection_strategy,
|
|
1151
|
+
selection_filter=selection_filter,
|
|
1152
|
+
selection_priority_fields=selection_priority_fields,
|
|
1153
|
+
content_type=content_type,
|
|
1154
|
+
threshold=threshold,
|
|
1155
|
+
)
|
|
1156
|
+
|
|
1157
|
+
|
|
1158
|
+
def extract_flattened_headers(query_fields):
|
|
1159
|
+
"""Extract headers from query fields, including handling of TYPEOF fields."""
|
|
1160
|
+
headers = []
|
|
1161
|
+
|
|
1162
|
+
for field in query_fields:
|
|
1163
|
+
if isinstance(field, dict):
|
|
1164
|
+
# Handle TYPEOF / polymorphic fields
|
|
1165
|
+
for lookup, references in field.items():
|
|
1166
|
+
# Assuming each reference is a list of dictionaries
|
|
1167
|
+
for ref_type in references:
|
|
1168
|
+
for ref_obj, ref_fields in ref_type.items():
|
|
1169
|
+
for nested_field in ref_fields:
|
|
1170
|
+
headers.append(
|
|
1171
|
+
f"{lookup}.{ref_obj}.{nested_field}"
|
|
1172
|
+
) # Flatten the structure
|
|
1173
|
+
else:
|
|
1174
|
+
# Regular fields
|
|
1175
|
+
headers.append(field)
|
|
1176
|
+
|
|
1177
|
+
return headers
|
|
1178
|
+
|
|
1179
|
+
|
|
1180
|
+
def flatten_record(record, headers):
|
|
1181
|
+
"""Flatten each record to match headers, handling nested fields."""
|
|
1182
|
+
flat_record = []
|
|
1183
|
+
|
|
1184
|
+
for field in headers:
|
|
1185
|
+
components = field.split(".")
|
|
1186
|
+
value = ""
|
|
1187
|
+
|
|
1188
|
+
# Handle lookup fields with two or three components
|
|
1189
|
+
if len(components) >= 2:
|
|
1190
|
+
lookup_field = components[0]
|
|
1191
|
+
lookup = record.get(lookup_field, None)
|
|
1192
|
+
|
|
1193
|
+
# Check if lookup field exists in the record
|
|
1194
|
+
if lookup is None:
|
|
1195
|
+
value = ""
|
|
1196
|
+
else:
|
|
1197
|
+
if len(components) == 2:
|
|
1198
|
+
# Handle fields with two components: {lookup}.{ref_field}
|
|
1199
|
+
ref_field = components[1]
|
|
1200
|
+
value = lookup.get(ref_field, "")
|
|
1201
|
+
elif len(components) == 3:
|
|
1202
|
+
# Handle fields with three components: {lookup}.{ref_obj}.{ref_field}
|
|
1203
|
+
ref_obj, ref_field = components[1], components[2]
|
|
1204
|
+
# Check if the type matches the specified ref_obj
|
|
1205
|
+
if lookup.get("attributes", {}).get("type") == ref_obj:
|
|
1206
|
+
value = lookup.get(ref_field, "")
|
|
1207
|
+
else:
|
|
1208
|
+
value = ""
|
|
1209
|
+
|
|
1210
|
+
else:
|
|
1211
|
+
# Regular fields or non-polymorphic fields
|
|
1212
|
+
value = record.get(field, "")
|
|
1213
|
+
|
|
1214
|
+
# Set None values to empty string
|
|
1215
|
+
if value is None:
|
|
1216
|
+
value = ""
|
|
1217
|
+
elif not isinstance(value, str):
|
|
1218
|
+
value = str(value)
|
|
1219
|
+
|
|
1220
|
+
# Append the resolved value to the flattened record
|
|
1221
|
+
flat_record.append(value)
|
|
1222
|
+
|
|
1223
|
+
return flat_record
|
|
1224
|
+
|
|
1225
|
+
|
|
1226
|
+
def assign_weights(
|
|
1227
|
+
priority_fields: Union[Dict[str, str], None], fields: List[str]
|
|
1228
|
+
) -> list:
|
|
1229
|
+
# If priority_fields is None or an empty dictionary, set all weights to 1
|
|
1230
|
+
if not priority_fields:
|
|
1231
|
+
return [1] * len(fields)
|
|
1232
|
+
|
|
1233
|
+
# Initialize the weight list with LOW_PRIORITY_VALUE
|
|
1234
|
+
weights = [LOW_PRIORITY_VALUE] * len(fields)
|
|
1235
|
+
|
|
1236
|
+
# Iterate over the fields and assign weights based on priority_fields
|
|
1237
|
+
for i, field in enumerate(fields):
|
|
1238
|
+
if field in priority_fields:
|
|
1239
|
+
# Set weight to HIGH_PRIORITY_VALUE if field is in priority_fields
|
|
1240
|
+
weights[i] = HIGH_PRIORITY_VALUE
|
|
1241
|
+
|
|
1242
|
+
return weights
|