cumulusci-plus 5.0.21__py3-none-any.whl → 5.0.43__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cumulusci/__about__.py +1 -1
- cumulusci/cli/logger.py +2 -2
- cumulusci/cli/service.py +20 -0
- cumulusci/cli/task.py +19 -3
- cumulusci/cli/tests/test_error.py +3 -1
- cumulusci/cli/tests/test_flow.py +279 -2
- cumulusci/cli/tests/test_org.py +5 -0
- cumulusci/cli/tests/test_service.py +15 -12
- cumulusci/cli/tests/test_task.py +122 -2
- cumulusci/cli/tests/utils.py +1 -4
- cumulusci/core/config/__init__.py +1 -0
- cumulusci/core/config/base_task_flow_config.py +26 -1
- cumulusci/core/config/org_config.py +2 -1
- cumulusci/core/config/project_config.py +14 -20
- cumulusci/core/config/scratch_org_config.py +12 -0
- cumulusci/core/config/tests/test_config.py +1 -0
- cumulusci/core/config/tests/test_config_expensive.py +9 -3
- cumulusci/core/config/universal_config.py +3 -4
- cumulusci/core/dependencies/base.py +5 -1
- cumulusci/core/dependencies/dependencies.py +1 -1
- cumulusci/core/dependencies/github.py +1 -2
- cumulusci/core/dependencies/resolvers.py +1 -1
- cumulusci/core/dependencies/tests/test_dependencies.py +1 -1
- cumulusci/core/dependencies/tests/test_resolvers.py +1 -1
- cumulusci/core/flowrunner.py +90 -6
- cumulusci/core/github.py +1 -1
- cumulusci/core/sfdx.py +3 -1
- cumulusci/core/source_transforms/tests/test_transforms.py +1 -1
- cumulusci/core/source_transforms/transforms.py +1 -1
- cumulusci/core/tasks.py +13 -2
- cumulusci/core/tests/test_flowrunner.py +100 -0
- cumulusci/core/tests/test_tasks.py +65 -0
- cumulusci/core/utils.py +3 -1
- cumulusci/core/versions.py +1 -1
- cumulusci/cumulusci.yml +73 -1
- cumulusci/oauth/client.py +1 -1
- cumulusci/plugins/plugin_base.py +5 -3
- cumulusci/robotframework/pageobjects/ObjectManagerPageObject.py +1 -1
- cumulusci/salesforce_api/rest_deploy.py +1 -1
- cumulusci/schema/cumulusci.jsonschema.json +69 -0
- cumulusci/tasks/apex/anon.py +1 -1
- cumulusci/tasks/apex/testrunner.py +421 -144
- cumulusci/tasks/apex/tests/test_apex_tasks.py +917 -1
- cumulusci/tasks/bulkdata/extract.py +0 -1
- cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +1 -1
- cumulusci/tasks/bulkdata/extract_dataset_utils/synthesize_extract_declarations.py +1 -1
- cumulusci/tasks/bulkdata/extract_dataset_utils/tests/test_extract_yml.py +1 -1
- cumulusci/tasks/bulkdata/generate_and_load_data.py +136 -12
- cumulusci/tasks/bulkdata/mapping_parser.py +139 -44
- cumulusci/tasks/bulkdata/select_utils.py +1 -1
- cumulusci/tasks/bulkdata/snowfakery.py +100 -25
- cumulusci/tasks/bulkdata/tests/test_generate_and_load.py +159 -0
- cumulusci/tasks/bulkdata/tests/test_load.py +0 -2
- cumulusci/tasks/bulkdata/tests/test_mapping_parser.py +763 -1
- cumulusci/tasks/bulkdata/tests/test_select_utils.py +46 -0
- cumulusci/tasks/bulkdata/tests/test_snowfakery.py +133 -0
- cumulusci/tasks/create_package_version.py +190 -16
- cumulusci/tasks/datadictionary.py +1 -1
- cumulusci/tasks/metadata_etl/__init__.py +2 -0
- cumulusci/tasks/metadata_etl/applications.py +256 -0
- cumulusci/tasks/metadata_etl/base.py +7 -3
- cumulusci/tasks/metadata_etl/layouts.py +1 -1
- cumulusci/tasks/metadata_etl/permissions.py +1 -1
- cumulusci/tasks/metadata_etl/remote_site_settings.py +2 -2
- cumulusci/tasks/metadata_etl/tests/test_applications.py +710 -0
- cumulusci/tasks/push/README.md +15 -17
- cumulusci/tasks/release_notes/README.md +13 -13
- cumulusci/tasks/release_notes/generator.py +13 -8
- cumulusci/tasks/robotframework/tests/test_robotframework.py +6 -1
- cumulusci/tasks/salesforce/Deploy.py +53 -2
- cumulusci/tasks/salesforce/SfPackageCommands.py +363 -0
- cumulusci/tasks/salesforce/__init__.py +1 -0
- cumulusci/tasks/salesforce/assign_ps_psg.py +448 -0
- cumulusci/tasks/salesforce/composite.py +1 -1
- cumulusci/tasks/salesforce/custom_settings_wait.py +1 -1
- cumulusci/tasks/salesforce/enable_prediction.py +5 -1
- cumulusci/tasks/salesforce/getPackageVersion.py +89 -0
- cumulusci/tasks/salesforce/insert_record.py +18 -19
- cumulusci/tasks/salesforce/sourcetracking.py +1 -1
- cumulusci/tasks/salesforce/tests/test_Deploy.py +316 -1
- cumulusci/tasks/salesforce/tests/test_SfPackageCommands.py +554 -0
- cumulusci/tasks/salesforce/tests/test_assign_ps_psg.py +1055 -0
- cumulusci/tasks/salesforce/tests/test_enable_prediction.py +4 -2
- cumulusci/tasks/salesforce/tests/test_getPackageVersion.py +651 -0
- cumulusci/tasks/salesforce/tests/test_update_dependencies.py +1 -1
- cumulusci/tasks/salesforce/tests/test_update_external_auth_identity_provider.py +927 -0
- cumulusci/tasks/salesforce/tests/test_update_external_credential.py +1427 -0
- cumulusci/tasks/salesforce/tests/test_update_named_credential.py +1042 -0
- cumulusci/tasks/salesforce/tests/test_update_record.py +512 -0
- cumulusci/tasks/salesforce/update_dependencies.py +2 -2
- cumulusci/tasks/salesforce/update_external_auth_identity_provider.py +551 -0
- cumulusci/tasks/salesforce/update_external_credential.py +647 -0
- cumulusci/tasks/salesforce/update_named_credential.py +441 -0
- cumulusci/tasks/salesforce/update_profile.py +17 -13
- cumulusci/tasks/salesforce/update_record.py +217 -0
- cumulusci/tasks/salesforce/users/permsets.py +62 -5
- cumulusci/tasks/salesforce/users/tests/test_permsets.py +237 -11
- cumulusci/tasks/sfdmu/__init__.py +0 -0
- cumulusci/tasks/sfdmu/sfdmu.py +376 -0
- cumulusci/tasks/sfdmu/tests/__init__.py +1 -0
- cumulusci/tasks/sfdmu/tests/test_runner.py +212 -0
- cumulusci/tasks/sfdmu/tests/test_sfdmu.py +1012 -0
- cumulusci/tasks/tests/test_create_package_version.py +716 -1
- cumulusci/tasks/tests/test_util.py +42 -0
- cumulusci/tasks/util.py +37 -1
- cumulusci/tasks/utility/copyContents.py +402 -0
- cumulusci/tasks/utility/credentialManager.py +302 -0
- cumulusci/tasks/utility/directoryRecreator.py +30 -0
- cumulusci/tasks/utility/env_management.py +1 -1
- cumulusci/tasks/utility/secretsToEnv.py +135 -0
- cumulusci/tasks/utility/tests/test_copyContents.py +1719 -0
- cumulusci/tasks/utility/tests/test_credentialManager.py +1150 -0
- cumulusci/tasks/utility/tests/test_directoryRecreator.py +439 -0
- cumulusci/tasks/utility/tests/test_secretsToEnv.py +1118 -0
- cumulusci/tests/test_integration_infrastructure.py +3 -1
- cumulusci/tests/test_utils.py +70 -6
- cumulusci/utils/__init__.py +54 -9
- cumulusci/utils/classutils.py +5 -2
- cumulusci/utils/http/tests/cassettes/ManualEditTestCompositeParallelSalesforce.test_http_headers.yaml +31 -30
- cumulusci/utils/options.py +23 -1
- cumulusci/utils/parallel/task_worker_queues/parallel_worker.py +1 -1
- cumulusci/utils/yaml/cumulusci_yml.py +8 -3
- cumulusci/utils/yaml/model_parser.py +2 -2
- cumulusci/utils/yaml/tests/test_cumulusci_yml.py +1 -1
- cumulusci/utils/yaml/tests/test_model_parser.py +3 -3
- cumulusci/vcs/base.py +23 -15
- cumulusci/vcs/bootstrap.py +5 -4
- cumulusci/vcs/utils/list_modified_files.py +189 -0
- cumulusci/vcs/utils/tests/test_list_modified_files.py +588 -0
- {cumulusci_plus-5.0.21.dist-info → cumulusci_plus-5.0.43.dist-info}/METADATA +11 -10
- {cumulusci_plus-5.0.21.dist-info → cumulusci_plus-5.0.43.dist-info}/RECORD +135 -104
- {cumulusci_plus-5.0.21.dist-info → cumulusci_plus-5.0.43.dist-info}/WHEEL +1 -1
- {cumulusci_plus-5.0.21.dist-info → cumulusci_plus-5.0.43.dist-info}/entry_points.txt +0 -0
- {cumulusci_plus-5.0.21.dist-info → cumulusci_plus-5.0.43.dist-info}/licenses/AUTHORS.rst +0 -0
- {cumulusci_plus-5.0.21.dist-info → cumulusci_plus-5.0.43.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
1
3
|
import pytest
|
|
2
4
|
|
|
3
5
|
from cumulusci.tasks.bulkdata.select_utils import (
|
|
@@ -618,6 +620,14 @@ def test_vectorize_records_mixed_numerical_boolean_categorical():
|
|
|
618
620
|
not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
|
|
619
621
|
reason="requires optional dependencies for annoy",
|
|
620
622
|
)
|
|
623
|
+
@pytest.mark.skipif(
|
|
624
|
+
sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
|
|
625
|
+
reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
|
|
626
|
+
)
|
|
627
|
+
@pytest.mark.skipif(
|
|
628
|
+
sys.platform == "linux" and sys.version_info[:2] == (3, 12),
|
|
629
|
+
reason="Annoy library has known compatibility issues on Linux with Python 3.12",
|
|
630
|
+
)
|
|
621
631
|
def test_annoy_post_process():
|
|
622
632
|
# Test data
|
|
623
633
|
load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
|
|
@@ -648,6 +658,14 @@ def test_annoy_post_process():
|
|
|
648
658
|
not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
|
|
649
659
|
reason="requires optional dependencies for annoy",
|
|
650
660
|
)
|
|
661
|
+
@pytest.mark.skipif(
|
|
662
|
+
sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
|
|
663
|
+
reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
|
|
664
|
+
)
|
|
665
|
+
@pytest.mark.skipif(
|
|
666
|
+
sys.platform == "linux" and sys.version_info[:2] == (3, 12),
|
|
667
|
+
reason="Annoy library has known compatibility issues on Linux with Python 3.12",
|
|
668
|
+
)
|
|
651
669
|
def test_annoy_post_process__insert_records():
|
|
652
670
|
# Test data
|
|
653
671
|
load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
|
|
@@ -689,6 +707,18 @@ def test_annoy_post_process__insert_records():
|
|
|
689
707
|
assert insert_record in [["Alice", "Engineer"], ["Bob", "Doctor"]]
|
|
690
708
|
|
|
691
709
|
|
|
710
|
+
@pytest.mark.skipif(
|
|
711
|
+
not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
|
|
712
|
+
reason="requires optional dependencies for annoy",
|
|
713
|
+
)
|
|
714
|
+
@pytest.mark.skipif(
|
|
715
|
+
sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
|
|
716
|
+
reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
|
|
717
|
+
)
|
|
718
|
+
@pytest.mark.skipif(
|
|
719
|
+
sys.platform == "linux" and sys.version_info[:2] == (3, 12),
|
|
720
|
+
reason="Annoy library has known compatibility issues on Linux with Python 3.12",
|
|
721
|
+
)
|
|
692
722
|
def test_annoy_post_process__no_query_records():
|
|
693
723
|
# Test data
|
|
694
724
|
load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
|
|
@@ -721,6 +751,14 @@ def test_annoy_post_process__no_query_records():
|
|
|
721
751
|
not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
|
|
722
752
|
reason="requires optional dependencies for annoy",
|
|
723
753
|
)
|
|
754
|
+
@pytest.mark.skipif(
|
|
755
|
+
sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
|
|
756
|
+
reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
|
|
757
|
+
)
|
|
758
|
+
@pytest.mark.skipif(
|
|
759
|
+
sys.platform == "linux" and sys.version_info[:2] == (3, 12),
|
|
760
|
+
reason="Annoy library has known compatibility issues on Linux with Python 3.12",
|
|
761
|
+
)
|
|
724
762
|
def test_annoy_post_process__insert_records_with_polymorphic_fields():
|
|
725
763
|
# Test data
|
|
726
764
|
load_records = [
|
|
@@ -778,6 +816,14 @@ def test_annoy_post_process__insert_records_with_polymorphic_fields():
|
|
|
778
816
|
not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
|
|
779
817
|
reason="requires optional dependencies for annoy",
|
|
780
818
|
)
|
|
819
|
+
@pytest.mark.skipif(
|
|
820
|
+
sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
|
|
821
|
+
reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
|
|
822
|
+
)
|
|
823
|
+
@pytest.mark.skipif(
|
|
824
|
+
sys.platform == "linux" and sys.version_info[:2] == (3, 12),
|
|
825
|
+
reason="Annoy library has known compatibility issues on Linux with Python 3.12",
|
|
826
|
+
)
|
|
781
827
|
def test_single_record_match_annoy_post_process():
|
|
782
828
|
# Mock data where only the first query record matches the first load record
|
|
783
829
|
load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
|
|
@@ -824,6 +824,139 @@ class TestSnowfakery:
|
|
|
824
824
|
"Account",
|
|
825
825
|
}
|
|
826
826
|
|
|
827
|
+
@mock.patch(
|
|
828
|
+
"cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
|
|
829
|
+
)
|
|
830
|
+
def test_validate_only_mode(self, mock_subtask, create_task):
|
|
831
|
+
"""Test that validate_only mode validates without loading data"""
|
|
832
|
+
from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
|
|
833
|
+
|
|
834
|
+
# Mock subtask return value
|
|
835
|
+
validation_result = ValidationResult()
|
|
836
|
+
mock_subtask.return_value = {"validation_result": validation_result}
|
|
837
|
+
|
|
838
|
+
task = create_task(
|
|
839
|
+
Snowfakery,
|
|
840
|
+
{
|
|
841
|
+
"recipe": sample_yaml,
|
|
842
|
+
"validate_only": True,
|
|
843
|
+
},
|
|
844
|
+
)
|
|
845
|
+
|
|
846
|
+
task()
|
|
847
|
+
|
|
848
|
+
# Verify subtask was called with validate_only=True
|
|
849
|
+
mock_subtask.assert_called_once()
|
|
850
|
+
call_args = mock_subtask.call_args
|
|
851
|
+
assert call_args.kwargs.get("validate_only")
|
|
852
|
+
|
|
853
|
+
# Verify return values contain validation_result
|
|
854
|
+
assert "validation_result" in task.return_values
|
|
855
|
+
assert task.return_values["validation_result"] == validation_result
|
|
856
|
+
|
|
857
|
+
@mock.patch(
|
|
858
|
+
"cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
|
|
859
|
+
)
|
|
860
|
+
def test_validate_only_with_errors(self, mock_subtask, create_task):
|
|
861
|
+
"""Test that validate_only mode returns errors without raising exception"""
|
|
862
|
+
from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
|
|
863
|
+
|
|
864
|
+
# Mock ValidationResult with errors
|
|
865
|
+
validation_result = ValidationResult()
|
|
866
|
+
validation_result.add_error("Test error: Field does not exist")
|
|
867
|
+
validation_result.add_warning("Test warning: Field has no permissions")
|
|
868
|
+
mock_subtask.return_value = {"validation_result": validation_result}
|
|
869
|
+
|
|
870
|
+
task = create_task(
|
|
871
|
+
Snowfakery,
|
|
872
|
+
{
|
|
873
|
+
"recipe": sample_yaml,
|
|
874
|
+
"validate_only": True,
|
|
875
|
+
},
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
# Should not raise exception even with errors
|
|
879
|
+
task()
|
|
880
|
+
|
|
881
|
+
# Verify subtask was called
|
|
882
|
+
mock_subtask.assert_called_once()
|
|
883
|
+
|
|
884
|
+
# Verify return values contain validation_result with errors
|
|
885
|
+
assert "validation_result" in task.return_values
|
|
886
|
+
assert task.return_values["validation_result"].has_errors()
|
|
887
|
+
assert len(task.return_values["validation_result"].errors) == 1
|
|
888
|
+
assert len(task.return_values["validation_result"].warnings) == 1
|
|
889
|
+
|
|
890
|
+
def test_validate_only_false_loads_data(self, mock_load_data, create_task):
|
|
891
|
+
"""Test that validate_only=False performs normal data loading"""
|
|
892
|
+
task = create_task(
|
|
893
|
+
Snowfakery,
|
|
894
|
+
{
|
|
895
|
+
"recipe": sample_yaml,
|
|
896
|
+
"validate_only": False,
|
|
897
|
+
},
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
task()
|
|
901
|
+
|
|
902
|
+
# Verify load WAS called
|
|
903
|
+
assert len(mock_load_data.mock_calls) > 0
|
|
904
|
+
|
|
905
|
+
# Verify return values do not contain validation_result
|
|
906
|
+
assert "validation_result" not in task.return_values
|
|
907
|
+
|
|
908
|
+
@mock.patch(
|
|
909
|
+
"cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
|
|
910
|
+
)
|
|
911
|
+
def test_validate_only_with_working_directory(self, mock_subtask, snowfakery):
|
|
912
|
+
"""Test that validate_only respects working_directory option"""
|
|
913
|
+
from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
|
|
914
|
+
|
|
915
|
+
validation_result = ValidationResult()
|
|
916
|
+
mock_subtask.return_value = {"validation_result": validation_result}
|
|
917
|
+
|
|
918
|
+
with TemporaryDirectory() as t:
|
|
919
|
+
working_dir = Path(t) / "snowfakery_validation"
|
|
920
|
+
task = snowfakery(
|
|
921
|
+
recipe=sample_yaml,
|
|
922
|
+
validate_only=True,
|
|
923
|
+
working_directory=str(working_dir),
|
|
924
|
+
)
|
|
925
|
+
|
|
926
|
+
task()
|
|
927
|
+
|
|
928
|
+
# Verify subtask was called
|
|
929
|
+
mock_subtask.assert_called_once()
|
|
930
|
+
|
|
931
|
+
# Verify working directory was created
|
|
932
|
+
assert working_dir.exists()
|
|
933
|
+
|
|
934
|
+
# Verify return values contain validation_result
|
|
935
|
+
assert "validation_result" in task.return_values
|
|
936
|
+
|
|
937
|
+
@mock.patch(
|
|
938
|
+
"cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
|
|
939
|
+
)
|
|
940
|
+
def test_validate_only_skips_channels_and_queues(self, mock_subtask, create_task):
|
|
941
|
+
"""Test that validate_only does not set up channels and queues"""
|
|
942
|
+
from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
|
|
943
|
+
|
|
944
|
+
validation_result = ValidationResult()
|
|
945
|
+
mock_subtask.return_value = {"validation_result": validation_result}
|
|
946
|
+
|
|
947
|
+
task = create_task(
|
|
948
|
+
Snowfakery,
|
|
949
|
+
{
|
|
950
|
+
"recipe": sample_yaml,
|
|
951
|
+
"validate_only": True,
|
|
952
|
+
},
|
|
953
|
+
)
|
|
954
|
+
|
|
955
|
+
task()
|
|
956
|
+
|
|
957
|
+
# Verify queue_manager was never created
|
|
958
|
+
assert not hasattr(task, "queue_manager")
|
|
959
|
+
|
|
827
960
|
@mock.patch("cumulusci.tasks.bulkdata.snowfakery.MIN_PORTION_SIZE", 2)
|
|
828
961
|
def test_serial_mode(self, mock_load_data, create_task):
|
|
829
962
|
task = create_task(
|
|
@@ -3,9 +3,9 @@ import io
|
|
|
3
3
|
import json
|
|
4
4
|
import pathlib
|
|
5
5
|
import zipfile
|
|
6
|
-
from typing import List, Optional
|
|
6
|
+
from typing import Dict, List, Optional, Union
|
|
7
7
|
|
|
8
|
-
from pydantic import BaseModel, validator
|
|
8
|
+
from pydantic.v1 import BaseModel, validator
|
|
9
9
|
from simple_salesforce.exceptions import SalesforceMalformedRequest
|
|
10
10
|
|
|
11
11
|
from cumulusci.core.config.util import get_devhub_config
|
|
@@ -29,7 +29,7 @@ from cumulusci.core.exceptions import (
|
|
|
29
29
|
VcsException,
|
|
30
30
|
)
|
|
31
31
|
from cumulusci.core.sfdx import convert_sfdx_source
|
|
32
|
-
from cumulusci.core.utils import process_bool_arg
|
|
32
|
+
from cumulusci.core.utils import process_bool_arg, process_list_arg
|
|
33
33
|
from cumulusci.core.versions import PackageType, PackageVersionNumber, VersionTypeEnum
|
|
34
34
|
from cumulusci.salesforce_api.package_zip import (
|
|
35
35
|
BasePackageZipBuilder,
|
|
@@ -38,6 +38,10 @@ from cumulusci.salesforce_api.package_zip import (
|
|
|
38
38
|
from cumulusci.salesforce_api.utils import get_simple_salesforce_connection
|
|
39
39
|
from cumulusci.tasks.salesforce.BaseSalesforceApiTask import BaseSalesforceApiTask
|
|
40
40
|
from cumulusci.tasks.salesforce.org_settings import build_settings_package
|
|
41
|
+
from cumulusci.tasks.utility.copyContents import (
|
|
42
|
+
clean_temp_directory,
|
|
43
|
+
consolidate_metadata,
|
|
44
|
+
)
|
|
41
45
|
from cumulusci.utils.salesforce.soql import (
|
|
42
46
|
format_subscriber_package_version_where_clause,
|
|
43
47
|
)
|
|
@@ -68,6 +72,11 @@ class PackageConfig(BaseModel):
|
|
|
68
72
|
version_name: str
|
|
69
73
|
version_base: Optional[str] = None
|
|
70
74
|
version_type: VersionTypeEnum = VersionTypeEnum.minor
|
|
75
|
+
apex_test_access: Optional[dict[str, list[str]]] = None
|
|
76
|
+
package_metadata_access: Optional[dict[str, list[str]]] = None
|
|
77
|
+
unpackaged_metadata_path: Optional[
|
|
78
|
+
Union[str, List[str], Dict[str, Union[str, List[str]]]]
|
|
79
|
+
] = None
|
|
71
80
|
|
|
72
81
|
@validator("org_dependent")
|
|
73
82
|
def org_dependent_must_be_unlocked(cls, v, values):
|
|
@@ -87,6 +96,20 @@ class PackageConfig(BaseModel):
|
|
|
87
96
|
raise ValueError("Only managed packages can have an uninstall script.")
|
|
88
97
|
return v
|
|
89
98
|
|
|
99
|
+
@validator("apex_test_access")
|
|
100
|
+
def apex_test_access_must_be_managed(cls, v, values):
|
|
101
|
+
if v and values["package_type"] != PackageTypeEnum.managed:
|
|
102
|
+
raise ValueError(
|
|
103
|
+
"Only managed packages can have Apex Test Access. Assign permission sets and permission set licenses to the user in context when your Apex tests run at package version creation."
|
|
104
|
+
)
|
|
105
|
+
return v
|
|
106
|
+
|
|
107
|
+
@validator("package_metadata_access")
|
|
108
|
+
def package_metadata_access_must_be_managed(cls, v, values):
|
|
109
|
+
if v and values["package_type"] != PackageTypeEnum.managed:
|
|
110
|
+
raise ValueError("Only managed packages can have Package Metadata Access.")
|
|
111
|
+
return v
|
|
112
|
+
|
|
90
113
|
|
|
91
114
|
class CreatePackageVersion(BaseSalesforceApiTask):
|
|
92
115
|
"""Creates a new second-generation package version.
|
|
@@ -114,15 +137,21 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
114
137
|
"version_base": {
|
|
115
138
|
"description": "The version number to use as a base before incrementing. "
|
|
116
139
|
"Optional; defaults to the highest existing version number of this package. "
|
|
117
|
-
"Can be set to ``latest_vcs_release`` to use the version of the most recent release published to
|
|
140
|
+
"Can be set to ``latest_vcs_release`` to use the version of the most recent release published to VCS."
|
|
141
|
+
"If version_number is set, version_base and version_type will be ignored"
|
|
118
142
|
},
|
|
119
143
|
"version_type": {
|
|
120
144
|
"description": "The part of the version number to increment. "
|
|
121
145
|
"Options are major, minor, patch, build. Defaults to build"
|
|
146
|
+
"If version_number is set, version_base and version_type will be ignored"
|
|
147
|
+
},
|
|
148
|
+
"version_number": {
|
|
149
|
+
"description": "Set a fixed version number, if not using version_base and version_type"
|
|
122
150
|
},
|
|
123
151
|
"skip_validation": {
|
|
124
152
|
"description": "If true, skip validation of the package version. Default: false. "
|
|
125
153
|
"Skipping validation creates packages more quickly, but they cannot be promoted for release."
|
|
154
|
+
"And package version is created without reference to dependencies."
|
|
126
155
|
},
|
|
127
156
|
"org_dependent": {
|
|
128
157
|
"description": "If true, create an org-dependent unlocked package. Default: false."
|
|
@@ -156,6 +185,17 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
156
185
|
"description": "If True, create unlocked packages for unpackaged metadata in this project and dependencies. "
|
|
157
186
|
"Defaults to False."
|
|
158
187
|
},
|
|
188
|
+
"dependencies": {
|
|
189
|
+
"description": "The list of dependencies to use when creating the package version. Defaults to None."
|
|
190
|
+
"If not provided, the dependencies will be resolved using the resolution_strategy."
|
|
191
|
+
"The format should be a pcakge version Ids i.e '04t...,04t...'"
|
|
192
|
+
},
|
|
193
|
+
"async_validation": {
|
|
194
|
+
"description": "If True, validate the package version asynchronously. Defaults to False. You also can't specify both skip validation and async validation at the same time."
|
|
195
|
+
},
|
|
196
|
+
"is_dev_use_pkg_zip_requested": {
|
|
197
|
+
"description": "If True, request a dev use package zip. Defaults to False. If true, a downloadable package zip file containing package metadata is generated when a new package version is created."
|
|
198
|
+
},
|
|
159
199
|
}
|
|
160
200
|
|
|
161
201
|
def _init_options(self, kwargs):
|
|
@@ -190,16 +230,40 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
190
230
|
version_name=self.options.get("version_name") or "Release",
|
|
191
231
|
version_base=self.options.get("version_base"),
|
|
192
232
|
version_type=self.options.get("version_type") or VersionTypeEnum("build"),
|
|
233
|
+
apex_test_access=self.project_config.project__package__apex_test_access,
|
|
234
|
+
package_metadata_access=self.project_config.project__package__package_metadata_access,
|
|
235
|
+
unpackaged_metadata_path=self.project_config.project__package__unpackaged_metadata_path,
|
|
193
236
|
)
|
|
194
237
|
self.options["skip_validation"] = process_bool_arg(
|
|
195
238
|
self.options.get("skip_validation") or False
|
|
196
239
|
)
|
|
240
|
+
self.options["async_validation"] = process_bool_arg(
|
|
241
|
+
self.options.get("async_validation") or False
|
|
242
|
+
)
|
|
243
|
+
self.options["is_dev_use_pkg_zip_requested"] = process_bool_arg(
|
|
244
|
+
self.options.get("is_dev_use_pkg_zip_requested") or False
|
|
245
|
+
)
|
|
197
246
|
self.options["force_upload"] = process_bool_arg(
|
|
198
247
|
self.options.get("force_upload") or False
|
|
199
248
|
)
|
|
200
249
|
self.options["create_unlocked_dependency_packages"] = process_bool_arg(
|
|
201
250
|
self.options.get("create_unlocked_dependency_packages") or False
|
|
202
251
|
)
|
|
252
|
+
self.options["version_number"] = (
|
|
253
|
+
PackageVersionNumber.parse(
|
|
254
|
+
self.options.get("version_number"), package_type=PackageType.SECOND_GEN
|
|
255
|
+
)
|
|
256
|
+
if self.options.get("version_number")
|
|
257
|
+
else None
|
|
258
|
+
)
|
|
259
|
+
self.options["dependencies"] = (
|
|
260
|
+
[
|
|
261
|
+
{"subscriberPackageVersionId": x}
|
|
262
|
+
for x in process_list_arg(self.options.get("dependencies"))
|
|
263
|
+
]
|
|
264
|
+
if self.options.get("dependencies")
|
|
265
|
+
else None
|
|
266
|
+
)
|
|
203
267
|
|
|
204
268
|
def _init_task(self):
|
|
205
269
|
self.tooling = get_simple_salesforce_connection(
|
|
@@ -223,13 +287,7 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
223
287
|
self.return_values["package_id"] = self.package_id
|
|
224
288
|
|
|
225
289
|
# submit request to create package version
|
|
226
|
-
options =
|
|
227
|
-
"package_type": self.package_config.package_type.value,
|
|
228
|
-
"namespace_inject": self.package_config.namespace,
|
|
229
|
-
"namespaced_org": self.package_config.namespace is not None,
|
|
230
|
-
}
|
|
231
|
-
if "static_resource_path" in self.options:
|
|
232
|
-
options["static_resource_path"] = self.options["static_resource_path"]
|
|
290
|
+
options = self._get_package_zip_builder_options()
|
|
233
291
|
|
|
234
292
|
package_zip_builder = None
|
|
235
293
|
with convert_sfdx_source(
|
|
@@ -292,6 +350,20 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
292
350
|
self.logger.info(f" Version Number: {self.return_values['version_number']}")
|
|
293
351
|
self.logger.info(f" Dependencies: {self.return_values['dependencies']}")
|
|
294
352
|
|
|
353
|
+
def _get_package_zip_builder_options(self):
|
|
354
|
+
if not self.package_config:
|
|
355
|
+
return {}
|
|
356
|
+
|
|
357
|
+
options = {
|
|
358
|
+
"package_type": self.package_config.package_type.value,
|
|
359
|
+
"namespace_inject": self.package_config.namespace,
|
|
360
|
+
"namespaced_org": self.package_config.namespace is not None,
|
|
361
|
+
}
|
|
362
|
+
if "static_resource_path" in self.options:
|
|
363
|
+
options["static_resource_path"] = self.options["static_resource_path"]
|
|
364
|
+
|
|
365
|
+
return options
|
|
366
|
+
|
|
295
367
|
def _get_or_create_package(self, package_config: PackageConfig):
|
|
296
368
|
"""Find or create the Package2
|
|
297
369
|
|
|
@@ -384,9 +456,13 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
384
456
|
return res["records"][0]["Id"]
|
|
385
457
|
|
|
386
458
|
# Create the package descriptor
|
|
387
|
-
version_number = self.
|
|
459
|
+
version_number = self.options.get(
|
|
460
|
+
"version_number"
|
|
461
|
+
) or self._get_base_version_number(
|
|
388
462
|
package_config.version_base, package_id
|
|
389
|
-
).increment(
|
|
463
|
+
).increment(
|
|
464
|
+
package_config.version_type
|
|
465
|
+
)
|
|
390
466
|
|
|
391
467
|
package_descriptor = {
|
|
392
468
|
"id": package_id,
|
|
@@ -403,6 +479,57 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
403
479
|
if package_config.uninstall_script:
|
|
404
480
|
package_descriptor["uninstallScript"] = package_config.uninstall_script
|
|
405
481
|
|
|
482
|
+
if package_config.apex_test_access:
|
|
483
|
+
if "permission_set_names" in package_config.apex_test_access:
|
|
484
|
+
perm_sets = package_config.apex_test_access["permission_set_names"]
|
|
485
|
+
if isinstance(perm_sets, str):
|
|
486
|
+
perm_sets = perm_sets.split(",")
|
|
487
|
+
|
|
488
|
+
package_descriptor["permissionSetNames"] = [
|
|
489
|
+
s.strip() for s in perm_sets
|
|
490
|
+
]
|
|
491
|
+
if "permission_set_license_names" in package_config.apex_test_access:
|
|
492
|
+
psl = package_config.apex_test_access[
|
|
493
|
+
"permission_set_license_names"
|
|
494
|
+
]
|
|
495
|
+
if isinstance(psl, str):
|
|
496
|
+
psl = psl.split(",")
|
|
497
|
+
package_descriptor["permissionSetLicenseDeveloperNames"] = [
|
|
498
|
+
s.strip() for s in psl
|
|
499
|
+
]
|
|
500
|
+
|
|
501
|
+
if package_config.package_metadata_access:
|
|
502
|
+
if "permission_set_names" in package_config.package_metadata_access:
|
|
503
|
+
perm_sets = package_config.package_metadata_access[
|
|
504
|
+
"permission_set_names"
|
|
505
|
+
]
|
|
506
|
+
if isinstance(perm_sets, str):
|
|
507
|
+
perm_sets = perm_sets.split(",")
|
|
508
|
+
|
|
509
|
+
if isinstance(perm_sets, list):
|
|
510
|
+
package_descriptor["packageMetadataPermissionSetNames"] = [
|
|
511
|
+
s.strip() for s in perm_sets
|
|
512
|
+
]
|
|
513
|
+
|
|
514
|
+
if (
|
|
515
|
+
"permission_set_license_names"
|
|
516
|
+
in package_config.package_metadata_access
|
|
517
|
+
):
|
|
518
|
+
psl = package_config.package_metadata_access[
|
|
519
|
+
"permission_set_license_names"
|
|
520
|
+
]
|
|
521
|
+
if isinstance(psl, str):
|
|
522
|
+
psl = psl.split(",")
|
|
523
|
+
if isinstance(psl, list):
|
|
524
|
+
package_descriptor[
|
|
525
|
+
"packageMetadataPermissionSetLicenseNames"
|
|
526
|
+
] = [s.strip() for s in psl]
|
|
527
|
+
|
|
528
|
+
if package_config.unpackaged_metadata_path:
|
|
529
|
+
self._get_unpackaged_metadata_path(
|
|
530
|
+
package_config.unpackaged_metadata_path, version_info
|
|
531
|
+
)
|
|
532
|
+
|
|
406
533
|
# Add org shape
|
|
407
534
|
with open(self.org_config.config_file, "r") as f:
|
|
408
535
|
scratch_org_def = json.load(f)
|
|
@@ -442,7 +569,9 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
442
569
|
and not is_dependency
|
|
443
570
|
):
|
|
444
571
|
self.logger.info("Determining dependencies for package")
|
|
445
|
-
dependencies =
|
|
572
|
+
dependencies = (
|
|
573
|
+
self.options.get("dependencies") or self._get_dependencies()
|
|
574
|
+
)
|
|
446
575
|
if dependencies:
|
|
447
576
|
package_descriptor["dependencies"] = dependencies
|
|
448
577
|
|
|
@@ -458,12 +587,25 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
458
587
|
)
|
|
459
588
|
request = {
|
|
460
589
|
"Package2Id": package_id,
|
|
461
|
-
"SkipValidation": skip_validation,
|
|
462
590
|
"Tag": f"hash:{package_hash}",
|
|
463
591
|
"VersionInfo": version_info,
|
|
464
|
-
"
|
|
592
|
+
"IsDevUsePkgZipRequested": self.options.get("is_dev_use_pkg_zip_requested"),
|
|
465
593
|
}
|
|
466
594
|
|
|
595
|
+
if self.options.get("async_validation") is not True:
|
|
596
|
+
request.update(
|
|
597
|
+
{
|
|
598
|
+
"SkipValidation": skip_validation,
|
|
599
|
+
"CalculateCodeCoverage": not skip_validation,
|
|
600
|
+
}
|
|
601
|
+
)
|
|
602
|
+
else:
|
|
603
|
+
request.update(
|
|
604
|
+
{
|
|
605
|
+
"AsyncValidation": True,
|
|
606
|
+
}
|
|
607
|
+
)
|
|
608
|
+
|
|
467
609
|
install_key = self.options.get("install_key")
|
|
468
610
|
if install_key:
|
|
469
611
|
request["InstallKey"] = install_key
|
|
@@ -785,3 +927,35 @@ class CreatePackageVersion(BaseSalesforceApiTask):
|
|
|
785
927
|
]
|
|
786
928
|
|
|
787
929
|
return []
|
|
930
|
+
|
|
931
|
+
def _get_unpackaged_metadata_path(
|
|
932
|
+
self,
|
|
933
|
+
metadata_path: Union[str, List[str], Dict[str, Union[str, List[str]]]],
|
|
934
|
+
version_info: zipfile.ZipFile,
|
|
935
|
+
) -> zipfile.ZipFile:
|
|
936
|
+
|
|
937
|
+
final_metadata_path, file_count = consolidate_metadata(
|
|
938
|
+
metadata_path, self.project_config.repo_root, logger=self.logger
|
|
939
|
+
)
|
|
940
|
+
|
|
941
|
+
if file_count == 0:
|
|
942
|
+
return version_info
|
|
943
|
+
|
|
944
|
+
# Use the consolidated temp directory with convert_sfdx_source
|
|
945
|
+
with convert_sfdx_source(
|
|
946
|
+
final_metadata_path, "unpackaged-metadata-package", self.logger
|
|
947
|
+
) as src_path:
|
|
948
|
+
unpackaged_metadata_zip_builder = MetadataPackageZipBuilder(
|
|
949
|
+
path=src_path,
|
|
950
|
+
name="unpackaged-metadata-package",
|
|
951
|
+
context=self.context,
|
|
952
|
+
options=self._get_package_zip_builder_options(),
|
|
953
|
+
)
|
|
954
|
+
version_info.writestr(
|
|
955
|
+
"unpackaged-metadata-package.zip",
|
|
956
|
+
unpackaged_metadata_zip_builder.as_bytes(),
|
|
957
|
+
)
|
|
958
|
+
|
|
959
|
+
clean_temp_directory(final_metadata_path)
|
|
960
|
+
|
|
961
|
+
return version_info
|
|
@@ -5,7 +5,7 @@ from typing import List, Optional, Union
|
|
|
5
5
|
from unittest.mock import Mock
|
|
6
6
|
from zipfile import ZipFile
|
|
7
7
|
|
|
8
|
-
from pydantic import BaseModel
|
|
8
|
+
from pydantic.v1 import BaseModel
|
|
9
9
|
|
|
10
10
|
from cumulusci.core.config.project_config import BaseProjectConfig
|
|
11
11
|
from cumulusci.core.dependencies import parse_dependencies
|
|
@@ -6,6 +6,7 @@ from cumulusci.tasks.metadata_etl.base import (
|
|
|
6
6
|
MetadataOperation,
|
|
7
7
|
UpdateMetadataFirstChildTextTask,
|
|
8
8
|
)
|
|
9
|
+
from cumulusci.tasks.metadata_etl.applications import AddProfileActionOverrides
|
|
9
10
|
from cumulusci.tasks.metadata_etl.duplicate_rules import SetDuplicateRuleStatus
|
|
10
11
|
from cumulusci.tasks.metadata_etl.layouts import AddRelatedLists
|
|
11
12
|
from cumulusci.tasks.metadata_etl.objects import SetObjectSettings
|
|
@@ -18,6 +19,7 @@ flake8 = (
|
|
|
18
19
|
BaseMetadataSynthesisTask,
|
|
19
20
|
BaseMetadataTransformTask,
|
|
20
21
|
MetadataSingleEntityTransformTask,
|
|
22
|
+
AddProfileActionOverrides,
|
|
21
23
|
AddRelatedLists,
|
|
22
24
|
AddPermissionSetPermissions,
|
|
23
25
|
AddValueSetEntries,
|