cumulusci-plus 5.0.19__py3-none-any.whl → 5.0.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. cumulusci/__about__.py +1 -1
  2. cumulusci/cli/logger.py +2 -2
  3. cumulusci/cli/service.py +20 -0
  4. cumulusci/cli/task.py +17 -0
  5. cumulusci/cli/tests/test_error.py +3 -1
  6. cumulusci/cli/tests/test_flow.py +279 -2
  7. cumulusci/cli/tests/test_service.py +15 -12
  8. cumulusci/cli/tests/test_task.py +88 -2
  9. cumulusci/cli/tests/utils.py +1 -4
  10. cumulusci/core/config/base_task_flow_config.py +26 -1
  11. cumulusci/core/config/project_config.py +2 -20
  12. cumulusci/core/config/tests/test_config_expensive.py +9 -3
  13. cumulusci/core/config/universal_config.py +3 -4
  14. cumulusci/core/dependencies/base.py +1 -1
  15. cumulusci/core/dependencies/dependencies.py +1 -1
  16. cumulusci/core/dependencies/github.py +1 -2
  17. cumulusci/core/dependencies/resolvers.py +1 -1
  18. cumulusci/core/dependencies/tests/test_dependencies.py +1 -1
  19. cumulusci/core/dependencies/tests/test_resolvers.py +1 -1
  20. cumulusci/core/flowrunner.py +90 -6
  21. cumulusci/core/github.py +1 -1
  22. cumulusci/core/sfdx.py +3 -1
  23. cumulusci/core/source_transforms/tests/test_transforms.py +1 -1
  24. cumulusci/core/source_transforms/transforms.py +1 -1
  25. cumulusci/core/tasks.py +13 -2
  26. cumulusci/core/tests/test_flowrunner.py +100 -0
  27. cumulusci/core/tests/test_tasks.py +65 -0
  28. cumulusci/core/utils.py +3 -1
  29. cumulusci/core/versions.py +1 -1
  30. cumulusci/cumulusci.yml +55 -0
  31. cumulusci/oauth/client.py +1 -1
  32. cumulusci/plugins/plugin_base.py +5 -3
  33. cumulusci/robotframework/pageobjects/ObjectManagerPageObject.py +1 -1
  34. cumulusci/salesforce_api/rest_deploy.py +1 -1
  35. cumulusci/schema/cumulusci.jsonschema.json +64 -0
  36. cumulusci/tasks/apex/anon.py +1 -1
  37. cumulusci/tasks/apex/testrunner.py +416 -142
  38. cumulusci/tasks/apex/tests/test_apex_tasks.py +917 -1
  39. cumulusci/tasks/bulkdata/extract.py +0 -1
  40. cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +1 -1
  41. cumulusci/tasks/bulkdata/extract_dataset_utils/synthesize_extract_declarations.py +1 -1
  42. cumulusci/tasks/bulkdata/extract_dataset_utils/tests/test_extract_yml.py +1 -1
  43. cumulusci/tasks/bulkdata/generate_and_load_data.py +136 -12
  44. cumulusci/tasks/bulkdata/mapping_parser.py +139 -44
  45. cumulusci/tasks/bulkdata/select_utils.py +1 -1
  46. cumulusci/tasks/bulkdata/snowfakery.py +100 -25
  47. cumulusci/tasks/bulkdata/tests/test_generate_and_load.py +159 -0
  48. cumulusci/tasks/bulkdata/tests/test_load.py +0 -2
  49. cumulusci/tasks/bulkdata/tests/test_mapping_parser.py +763 -1
  50. cumulusci/tasks/bulkdata/tests/test_select_utils.py +26 -0
  51. cumulusci/tasks/bulkdata/tests/test_snowfakery.py +133 -0
  52. cumulusci/tasks/create_package_version.py +190 -16
  53. cumulusci/tasks/datadictionary.py +1 -1
  54. cumulusci/tasks/metadata_etl/base.py +7 -3
  55. cumulusci/tasks/metadata_etl/layouts.py +1 -1
  56. cumulusci/tasks/metadata_etl/permissions.py +1 -1
  57. cumulusci/tasks/metadata_etl/remote_site_settings.py +2 -2
  58. cumulusci/tasks/push/README.md +15 -17
  59. cumulusci/tasks/release_notes/README.md +13 -13
  60. cumulusci/tasks/release_notes/generator.py +13 -8
  61. cumulusci/tasks/robotframework/tests/test_robotframework.py +6 -1
  62. cumulusci/tasks/salesforce/Deploy.py +53 -2
  63. cumulusci/tasks/salesforce/SfPackageCommands.py +363 -0
  64. cumulusci/tasks/salesforce/__init__.py +1 -0
  65. cumulusci/tasks/salesforce/assign_ps_psg.py +448 -0
  66. cumulusci/tasks/salesforce/composite.py +1 -1
  67. cumulusci/tasks/salesforce/custom_settings_wait.py +1 -1
  68. cumulusci/tasks/salesforce/enable_prediction.py +5 -1
  69. cumulusci/tasks/salesforce/getPackageVersion.py +89 -0
  70. cumulusci/tasks/salesforce/profiles.py +13 -9
  71. cumulusci/tasks/salesforce/sourcetracking.py +1 -1
  72. cumulusci/tasks/salesforce/tests/test_Deploy.py +316 -1
  73. cumulusci/tasks/salesforce/tests/test_SfPackageCommands.py +554 -0
  74. cumulusci/tasks/salesforce/tests/test_assign_ps_psg.py +1055 -0
  75. cumulusci/tasks/salesforce/tests/test_getPackageVersion.py +651 -0
  76. cumulusci/tasks/salesforce/tests/test_profiles.py +43 -3
  77. cumulusci/tasks/salesforce/tests/test_update_dependencies.py +1 -1
  78. cumulusci/tasks/salesforce/tests/test_update_external_credential.py +912 -0
  79. cumulusci/tasks/salesforce/tests/test_update_named_credential.py +1042 -0
  80. cumulusci/tasks/salesforce/update_dependencies.py +2 -2
  81. cumulusci/tasks/salesforce/update_external_credential.py +562 -0
  82. cumulusci/tasks/salesforce/update_named_credential.py +441 -0
  83. cumulusci/tasks/salesforce/update_profile.py +17 -13
  84. cumulusci/tasks/salesforce/users/permsets.py +62 -5
  85. cumulusci/tasks/salesforce/users/tests/test_permsets.py +237 -11
  86. cumulusci/tasks/sfdmu/__init__.py +0 -0
  87. cumulusci/tasks/sfdmu/sfdmu.py +363 -0
  88. cumulusci/tasks/sfdmu/tests/__init__.py +1 -0
  89. cumulusci/tasks/sfdmu/tests/test_runner.py +212 -0
  90. cumulusci/tasks/sfdmu/tests/test_sfdmu.py +1012 -0
  91. cumulusci/tasks/tests/test_create_package_version.py +716 -1
  92. cumulusci/tasks/tests/test_util.py +42 -0
  93. cumulusci/tasks/util.py +37 -1
  94. cumulusci/tasks/utility/copyContents.py +402 -0
  95. cumulusci/tasks/utility/credentialManager.py +256 -0
  96. cumulusci/tasks/utility/directoryRecreator.py +30 -0
  97. cumulusci/tasks/utility/env_management.py +1 -1
  98. cumulusci/tasks/utility/secretsToEnv.py +135 -0
  99. cumulusci/tasks/utility/tests/test_copyContents.py +1719 -0
  100. cumulusci/tasks/utility/tests/test_credentialManager.py +564 -0
  101. cumulusci/tasks/utility/tests/test_directoryRecreator.py +439 -0
  102. cumulusci/tasks/utility/tests/test_secretsToEnv.py +1091 -0
  103. cumulusci/tests/test_integration_infrastructure.py +3 -1
  104. cumulusci/tests/test_utils.py +70 -6
  105. cumulusci/utils/__init__.py +54 -9
  106. cumulusci/utils/classutils.py +5 -2
  107. cumulusci/utils/http/tests/cassettes/ManualEditTestCompositeParallelSalesforce.test_http_headers.yaml +31 -30
  108. cumulusci/utils/options.py +23 -1
  109. cumulusci/utils/parallel/task_worker_queues/parallel_worker.py +1 -1
  110. cumulusci/utils/yaml/cumulusci_yml.py +7 -3
  111. cumulusci/utils/yaml/model_parser.py +2 -2
  112. cumulusci/utils/yaml/tests/test_cumulusci_yml.py +1 -1
  113. cumulusci/utils/yaml/tests/test_model_parser.py +3 -3
  114. cumulusci/vcs/base.py +23 -15
  115. cumulusci/vcs/bootstrap.py +5 -4
  116. cumulusci/vcs/utils/list_modified_files.py +189 -0
  117. cumulusci/vcs/utils/tests/test_list_modified_files.py +588 -0
  118. {cumulusci_plus-5.0.19.dist-info → cumulusci_plus-5.0.35.dist-info}/METADATA +12 -10
  119. {cumulusci_plus-5.0.19.dist-info → cumulusci_plus-5.0.35.dist-info}/RECORD +123 -98
  120. {cumulusci_plus-5.0.19.dist-info → cumulusci_plus-5.0.35.dist-info}/WHEEL +0 -0
  121. {cumulusci_plus-5.0.19.dist-info → cumulusci_plus-5.0.35.dist-info}/entry_points.txt +0 -0
  122. {cumulusci_plus-5.0.19.dist-info → cumulusci_plus-5.0.35.dist-info}/licenses/AUTHORS.rst +0 -0
  123. {cumulusci_plus-5.0.19.dist-info → cumulusci_plus-5.0.35.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,5 @@
1
+ import sys
2
+
1
3
  import pytest
2
4
 
3
5
  from cumulusci.tasks.bulkdata.select_utils import (
@@ -618,6 +620,10 @@ def test_vectorize_records_mixed_numerical_boolean_categorical():
618
620
  not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
619
621
  reason="requires optional dependencies for annoy",
620
622
  )
623
+ @pytest.mark.skipif(
624
+ sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
625
+ reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
626
+ )
621
627
  def test_annoy_post_process():
622
628
  # Test data
623
629
  load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
@@ -648,6 +654,10 @@ def test_annoy_post_process():
648
654
  not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
649
655
  reason="requires optional dependencies for annoy",
650
656
  )
657
+ @pytest.mark.skipif(
658
+ sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
659
+ reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
660
+ )
651
661
  def test_annoy_post_process__insert_records():
652
662
  # Test data
653
663
  load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
@@ -689,6 +699,14 @@ def test_annoy_post_process__insert_records():
689
699
  assert insert_record in [["Alice", "Engineer"], ["Bob", "Doctor"]]
690
700
 
691
701
 
702
+ @pytest.mark.skipif(
703
+ not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
704
+ reason="requires optional dependencies for annoy",
705
+ )
706
+ @pytest.mark.skipif(
707
+ sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
708
+ reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
709
+ )
692
710
  def test_annoy_post_process__no_query_records():
693
711
  # Test data
694
712
  load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
@@ -721,6 +739,10 @@ def test_annoy_post_process__no_query_records():
721
739
  not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
722
740
  reason="requires optional dependencies for annoy",
723
741
  )
742
+ @pytest.mark.skipif(
743
+ sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
744
+ reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
745
+ )
724
746
  def test_annoy_post_process__insert_records_with_polymorphic_fields():
725
747
  # Test data
726
748
  load_records = [
@@ -778,6 +800,10 @@ def test_annoy_post_process__insert_records_with_polymorphic_fields():
778
800
  not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE,
779
801
  reason="requires optional dependencies for annoy",
780
802
  )
803
+ @pytest.mark.skipif(
804
+ sys.platform == "darwin" and sys.version_info[:2] in [(3, 11), (3, 13)],
805
+ reason="Annoy library has known compatibility issues on macOS with Python 3.11 and 3.13",
806
+ )
781
807
  def test_single_record_match_annoy_post_process():
782
808
  # Mock data where only the first query record matches the first load record
783
809
  load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]]
@@ -824,6 +824,139 @@ class TestSnowfakery:
824
824
  "Account",
825
825
  }
826
826
 
827
+ @mock.patch(
828
+ "cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
829
+ )
830
+ def test_validate_only_mode(self, mock_subtask, create_task):
831
+ """Test that validate_only mode validates without loading data"""
832
+ from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
833
+
834
+ # Mock subtask return value
835
+ validation_result = ValidationResult()
836
+ mock_subtask.return_value = {"validation_result": validation_result}
837
+
838
+ task = create_task(
839
+ Snowfakery,
840
+ {
841
+ "recipe": sample_yaml,
842
+ "validate_only": True,
843
+ },
844
+ )
845
+
846
+ task()
847
+
848
+ # Verify subtask was called with validate_only=True
849
+ mock_subtask.assert_called_once()
850
+ call_args = mock_subtask.call_args
851
+ assert call_args.kwargs.get("validate_only")
852
+
853
+ # Verify return values contain validation_result
854
+ assert "validation_result" in task.return_values
855
+ assert task.return_values["validation_result"] == validation_result
856
+
857
+ @mock.patch(
858
+ "cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
859
+ )
860
+ def test_validate_only_with_errors(self, mock_subtask, create_task):
861
+ """Test that validate_only mode returns errors without raising exception"""
862
+ from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
863
+
864
+ # Mock ValidationResult with errors
865
+ validation_result = ValidationResult()
866
+ validation_result.add_error("Test error: Field does not exist")
867
+ validation_result.add_warning("Test warning: Field has no permissions")
868
+ mock_subtask.return_value = {"validation_result": validation_result}
869
+
870
+ task = create_task(
871
+ Snowfakery,
872
+ {
873
+ "recipe": sample_yaml,
874
+ "validate_only": True,
875
+ },
876
+ )
877
+
878
+ # Should not raise exception even with errors
879
+ task()
880
+
881
+ # Verify subtask was called
882
+ mock_subtask.assert_called_once()
883
+
884
+ # Verify return values contain validation_result with errors
885
+ assert "validation_result" in task.return_values
886
+ assert task.return_values["validation_result"].has_errors()
887
+ assert len(task.return_values["validation_result"].errors) == 1
888
+ assert len(task.return_values["validation_result"].warnings) == 1
889
+
890
+ def test_validate_only_false_loads_data(self, mock_load_data, create_task):
891
+ """Test that validate_only=False performs normal data loading"""
892
+ task = create_task(
893
+ Snowfakery,
894
+ {
895
+ "recipe": sample_yaml,
896
+ "validate_only": False,
897
+ },
898
+ )
899
+
900
+ task()
901
+
902
+ # Verify load WAS called
903
+ assert len(mock_load_data.mock_calls) > 0
904
+
905
+ # Verify return values do not contain validation_result
906
+ assert "validation_result" not in task.return_values
907
+
908
+ @mock.patch(
909
+ "cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
910
+ )
911
+ def test_validate_only_with_working_directory(self, mock_subtask, snowfakery):
912
+ """Test that validate_only respects working_directory option"""
913
+ from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
914
+
915
+ validation_result = ValidationResult()
916
+ mock_subtask.return_value = {"validation_result": validation_result}
917
+
918
+ with TemporaryDirectory() as t:
919
+ working_dir = Path(t) / "snowfakery_validation"
920
+ task = snowfakery(
921
+ recipe=sample_yaml,
922
+ validate_only=True,
923
+ working_directory=str(working_dir),
924
+ )
925
+
926
+ task()
927
+
928
+ # Verify subtask was called
929
+ mock_subtask.assert_called_once()
930
+
931
+ # Verify working directory was created
932
+ assert working_dir.exists()
933
+
934
+ # Verify return values contain validation_result
935
+ assert "validation_result" in task.return_values
936
+
937
+ @mock.patch(
938
+ "cumulusci.tasks.bulkdata.snowfakery.Snowfakery._run_generate_and_load_subtask"
939
+ )
940
+ def test_validate_only_skips_channels_and_queues(self, mock_subtask, create_task):
941
+ """Test that validate_only does not set up channels and queues"""
942
+ from cumulusci.tasks.bulkdata.mapping_parser import ValidationResult
943
+
944
+ validation_result = ValidationResult()
945
+ mock_subtask.return_value = {"validation_result": validation_result}
946
+
947
+ task = create_task(
948
+ Snowfakery,
949
+ {
950
+ "recipe": sample_yaml,
951
+ "validate_only": True,
952
+ },
953
+ )
954
+
955
+ task()
956
+
957
+ # Verify queue_manager was never created
958
+ assert not hasattr(task, "queue_manager")
959
+
827
960
  @mock.patch("cumulusci.tasks.bulkdata.snowfakery.MIN_PORTION_SIZE", 2)
828
961
  def test_serial_mode(self, mock_load_data, create_task):
829
962
  task = create_task(
@@ -3,9 +3,9 @@ import io
3
3
  import json
4
4
  import pathlib
5
5
  import zipfile
6
- from typing import List, Optional
6
+ from typing import Dict, List, Optional, Union
7
7
 
8
- from pydantic import BaseModel, validator
8
+ from pydantic.v1 import BaseModel, validator
9
9
  from simple_salesforce.exceptions import SalesforceMalformedRequest
10
10
 
11
11
  from cumulusci.core.config.util import get_devhub_config
@@ -29,7 +29,7 @@ from cumulusci.core.exceptions import (
29
29
  VcsException,
30
30
  )
31
31
  from cumulusci.core.sfdx import convert_sfdx_source
32
- from cumulusci.core.utils import process_bool_arg
32
+ from cumulusci.core.utils import process_bool_arg, process_list_arg
33
33
  from cumulusci.core.versions import PackageType, PackageVersionNumber, VersionTypeEnum
34
34
  from cumulusci.salesforce_api.package_zip import (
35
35
  BasePackageZipBuilder,
@@ -38,6 +38,10 @@ from cumulusci.salesforce_api.package_zip import (
38
38
  from cumulusci.salesforce_api.utils import get_simple_salesforce_connection
39
39
  from cumulusci.tasks.salesforce.BaseSalesforceApiTask import BaseSalesforceApiTask
40
40
  from cumulusci.tasks.salesforce.org_settings import build_settings_package
41
+ from cumulusci.tasks.utility.copyContents import (
42
+ clean_temp_directory,
43
+ consolidate_metadata,
44
+ )
41
45
  from cumulusci.utils.salesforce.soql import (
42
46
  format_subscriber_package_version_where_clause,
43
47
  )
@@ -68,6 +72,11 @@ class PackageConfig(BaseModel):
68
72
  version_name: str
69
73
  version_base: Optional[str] = None
70
74
  version_type: VersionTypeEnum = VersionTypeEnum.minor
75
+ apex_test_access: Optional[dict[str, list[str]]] = None
76
+ package_metadata_access: Optional[dict[str, list[str]]] = None
77
+ unpackaged_metadata_path: Optional[
78
+ Union[str, List[str], Dict[str, Union[str, List[str]]]]
79
+ ] = None
71
80
 
72
81
  @validator("org_dependent")
73
82
  def org_dependent_must_be_unlocked(cls, v, values):
@@ -87,6 +96,20 @@ class PackageConfig(BaseModel):
87
96
  raise ValueError("Only managed packages can have an uninstall script.")
88
97
  return v
89
98
 
99
+ @validator("apex_test_access")
100
+ def apex_test_access_must_be_managed(cls, v, values):
101
+ if v and values["package_type"] != PackageTypeEnum.managed:
102
+ raise ValueError(
103
+ "Only managed packages can have Apex Test Access. Assign permission sets and permission set licenses to the user in context when your Apex tests run at package version creation."
104
+ )
105
+ return v
106
+
107
+ @validator("package_metadata_access")
108
+ def package_metadata_access_must_be_managed(cls, v, values):
109
+ if v and values["package_type"] != PackageTypeEnum.managed:
110
+ raise ValueError("Only managed packages can have Package Metadata Access.")
111
+ return v
112
+
90
113
 
91
114
  class CreatePackageVersion(BaseSalesforceApiTask):
92
115
  """Creates a new second-generation package version.
@@ -114,15 +137,21 @@ class CreatePackageVersion(BaseSalesforceApiTask):
114
137
  "version_base": {
115
138
  "description": "The version number to use as a base before incrementing. "
116
139
  "Optional; defaults to the highest existing version number of this package. "
117
- "Can be set to ``latest_vcs_release`` to use the version of the most recent release published to GitHub."
140
+ "Can be set to ``latest_vcs_release`` to use the version of the most recent release published to VCS."
141
+ "If version_number is set, version_base and version_type will be ignored"
118
142
  },
119
143
  "version_type": {
120
144
  "description": "The part of the version number to increment. "
121
145
  "Options are major, minor, patch, build. Defaults to build"
146
+ "If version_number is set, version_base and version_type will be ignored"
147
+ },
148
+ "version_number": {
149
+ "description": "Set a fixed version number, if not using version_base and version_type"
122
150
  },
123
151
  "skip_validation": {
124
152
  "description": "If true, skip validation of the package version. Default: false. "
125
153
  "Skipping validation creates packages more quickly, but they cannot be promoted for release."
154
+ "And package version is created without reference to dependencies."
126
155
  },
127
156
  "org_dependent": {
128
157
  "description": "If true, create an org-dependent unlocked package. Default: false."
@@ -156,6 +185,17 @@ class CreatePackageVersion(BaseSalesforceApiTask):
156
185
  "description": "If True, create unlocked packages for unpackaged metadata in this project and dependencies. "
157
186
  "Defaults to False."
158
187
  },
188
+ "dependencies": {
189
+ "description": "The list of dependencies to use when creating the package version. Defaults to None."
190
+ "If not provided, the dependencies will be resolved using the resolution_strategy."
191
+ "The format should be a pcakge version Ids i.e '04t...,04t...'"
192
+ },
193
+ "async_validation": {
194
+ "description": "If True, validate the package version asynchronously. Defaults to False. You also can't specify both skip validation and async validation at the same time."
195
+ },
196
+ "is_dev_use_pkg_zip_requested": {
197
+ "description": "If True, request a dev use package zip. Defaults to False. If true, a downloadable package zip file containing package metadata is generated when a new package version is created."
198
+ },
159
199
  }
160
200
 
161
201
  def _init_options(self, kwargs):
@@ -190,16 +230,40 @@ class CreatePackageVersion(BaseSalesforceApiTask):
190
230
  version_name=self.options.get("version_name") or "Release",
191
231
  version_base=self.options.get("version_base"),
192
232
  version_type=self.options.get("version_type") or VersionTypeEnum("build"),
233
+ apex_test_access=self.project_config.project__package__apex_test_access,
234
+ package_metadata_access=self.project_config.project__package__package_metadata_access,
235
+ unpackaged_metadata_path=self.project_config.project__package__unpackaged_metadata_path,
193
236
  )
194
237
  self.options["skip_validation"] = process_bool_arg(
195
238
  self.options.get("skip_validation") or False
196
239
  )
240
+ self.options["async_validation"] = process_bool_arg(
241
+ self.options.get("async_validation") or False
242
+ )
243
+ self.options["is_dev_use_pkg_zip_requested"] = process_bool_arg(
244
+ self.options.get("is_dev_use_pkg_zip_requested") or False
245
+ )
197
246
  self.options["force_upload"] = process_bool_arg(
198
247
  self.options.get("force_upload") or False
199
248
  )
200
249
  self.options["create_unlocked_dependency_packages"] = process_bool_arg(
201
250
  self.options.get("create_unlocked_dependency_packages") or False
202
251
  )
252
+ self.options["version_number"] = (
253
+ PackageVersionNumber.parse(
254
+ self.options.get("version_number"), package_type=PackageType.SECOND_GEN
255
+ )
256
+ if self.options.get("version_number")
257
+ else None
258
+ )
259
+ self.options["dependencies"] = (
260
+ [
261
+ {"subscriberPackageVersionId": x}
262
+ for x in process_list_arg(self.options.get("dependencies"))
263
+ ]
264
+ if self.options.get("dependencies")
265
+ else None
266
+ )
203
267
 
204
268
  def _init_task(self):
205
269
  self.tooling = get_simple_salesforce_connection(
@@ -223,13 +287,7 @@ class CreatePackageVersion(BaseSalesforceApiTask):
223
287
  self.return_values["package_id"] = self.package_id
224
288
 
225
289
  # submit request to create package version
226
- options = {
227
- "package_type": self.package_config.package_type.value,
228
- "namespace_inject": self.package_config.namespace,
229
- "namespaced_org": self.package_config.namespace is not None,
230
- }
231
- if "static_resource_path" in self.options:
232
- options["static_resource_path"] = self.options["static_resource_path"]
290
+ options = self._get_package_zip_builder_options()
233
291
 
234
292
  package_zip_builder = None
235
293
  with convert_sfdx_source(
@@ -292,6 +350,20 @@ class CreatePackageVersion(BaseSalesforceApiTask):
292
350
  self.logger.info(f" Version Number: {self.return_values['version_number']}")
293
351
  self.logger.info(f" Dependencies: {self.return_values['dependencies']}")
294
352
 
353
+ def _get_package_zip_builder_options(self):
354
+ if not self.package_config:
355
+ return {}
356
+
357
+ options = {
358
+ "package_type": self.package_config.package_type.value,
359
+ "namespace_inject": self.package_config.namespace,
360
+ "namespaced_org": self.package_config.namespace is not None,
361
+ }
362
+ if "static_resource_path" in self.options:
363
+ options["static_resource_path"] = self.options["static_resource_path"]
364
+
365
+ return options
366
+
295
367
  def _get_or_create_package(self, package_config: PackageConfig):
296
368
  """Find or create the Package2
297
369
 
@@ -384,9 +456,13 @@ class CreatePackageVersion(BaseSalesforceApiTask):
384
456
  return res["records"][0]["Id"]
385
457
 
386
458
  # Create the package descriptor
387
- version_number = self._get_base_version_number(
459
+ version_number = self.options.get(
460
+ "version_number"
461
+ ) or self._get_base_version_number(
388
462
  package_config.version_base, package_id
389
- ).increment(package_config.version_type)
463
+ ).increment(
464
+ package_config.version_type
465
+ )
390
466
 
391
467
  package_descriptor = {
392
468
  "id": package_id,
@@ -403,6 +479,57 @@ class CreatePackageVersion(BaseSalesforceApiTask):
403
479
  if package_config.uninstall_script:
404
480
  package_descriptor["uninstallScript"] = package_config.uninstall_script
405
481
 
482
+ if package_config.apex_test_access:
483
+ if "permission_set_names" in package_config.apex_test_access:
484
+ perm_sets = package_config.apex_test_access["permission_set_names"]
485
+ if isinstance(perm_sets, str):
486
+ perm_sets = perm_sets.split(",")
487
+
488
+ package_descriptor["permissionSetNames"] = [
489
+ s.strip() for s in perm_sets
490
+ ]
491
+ if "permission_set_license_names" in package_config.apex_test_access:
492
+ psl = package_config.apex_test_access[
493
+ "permission_set_license_names"
494
+ ]
495
+ if isinstance(psl, str):
496
+ psl = psl.split(",")
497
+ package_descriptor["permissionSetLicenseDeveloperNames"] = [
498
+ s.strip() for s in psl
499
+ ]
500
+
501
+ if package_config.package_metadata_access:
502
+ if "permission_set_names" in package_config.package_metadata_access:
503
+ perm_sets = package_config.package_metadata_access[
504
+ "permission_set_names"
505
+ ]
506
+ if isinstance(perm_sets, str):
507
+ perm_sets = perm_sets.split(",")
508
+
509
+ if isinstance(perm_sets, list):
510
+ package_descriptor["packageMetadataPermissionSetNames"] = [
511
+ s.strip() for s in perm_sets
512
+ ]
513
+
514
+ if (
515
+ "permission_set_license_names"
516
+ in package_config.package_metadata_access
517
+ ):
518
+ psl = package_config.package_metadata_access[
519
+ "permission_set_license_names"
520
+ ]
521
+ if isinstance(psl, str):
522
+ psl = psl.split(",")
523
+ if isinstance(psl, list):
524
+ package_descriptor[
525
+ "packageMetadataPermissionSetLicenseNames"
526
+ ] = [s.strip() for s in psl]
527
+
528
+ if package_config.unpackaged_metadata_path:
529
+ self._get_unpackaged_metadata_path(
530
+ package_config.unpackaged_metadata_path, version_info
531
+ )
532
+
406
533
  # Add org shape
407
534
  with open(self.org_config.config_file, "r") as f:
408
535
  scratch_org_def = json.load(f)
@@ -442,7 +569,9 @@ class CreatePackageVersion(BaseSalesforceApiTask):
442
569
  and not is_dependency
443
570
  ):
444
571
  self.logger.info("Determining dependencies for package")
445
- dependencies = self._get_dependencies()
572
+ dependencies = (
573
+ self.options.get("dependencies") or self._get_dependencies()
574
+ )
446
575
  if dependencies:
447
576
  package_descriptor["dependencies"] = dependencies
448
577
 
@@ -458,12 +587,25 @@ class CreatePackageVersion(BaseSalesforceApiTask):
458
587
  )
459
588
  request = {
460
589
  "Package2Id": package_id,
461
- "SkipValidation": skip_validation,
462
590
  "Tag": f"hash:{package_hash}",
463
591
  "VersionInfo": version_info,
464
- "CalculateCodeCoverage": not skip_validation,
592
+ "IsDevUsePkgZipRequested": self.options.get("is_dev_use_pkg_zip_requested"),
465
593
  }
466
594
 
595
+ if self.options.get("async_validation") is not True:
596
+ request.update(
597
+ {
598
+ "SkipValidation": skip_validation,
599
+ "CalculateCodeCoverage": not skip_validation,
600
+ }
601
+ )
602
+ else:
603
+ request.update(
604
+ {
605
+ "AsyncValidation": True,
606
+ }
607
+ )
608
+
467
609
  install_key = self.options.get("install_key")
468
610
  if install_key:
469
611
  request["InstallKey"] = install_key
@@ -785,3 +927,35 @@ class CreatePackageVersion(BaseSalesforceApiTask):
785
927
  ]
786
928
 
787
929
  return []
930
+
931
+ def _get_unpackaged_metadata_path(
932
+ self,
933
+ metadata_path: Union[str, List[str], Dict[str, Union[str, List[str]]]],
934
+ version_info: zipfile.ZipFile,
935
+ ) -> zipfile.ZipFile:
936
+
937
+ final_metadata_path, file_count = consolidate_metadata(
938
+ metadata_path, self.project_config.repo_root, logger=self.logger
939
+ )
940
+
941
+ if file_count == 0:
942
+ return version_info
943
+
944
+ # Use the consolidated temp directory with convert_sfdx_source
945
+ with convert_sfdx_source(
946
+ final_metadata_path, "unpackaged-metadata-package", self.logger
947
+ ) as src_path:
948
+ unpackaged_metadata_zip_builder = MetadataPackageZipBuilder(
949
+ path=src_path,
950
+ name="unpackaged-metadata-package",
951
+ context=self.context,
952
+ options=self._get_package_zip_builder_options(),
953
+ )
954
+ version_info.writestr(
955
+ "unpackaged-metadata-package.zip",
956
+ unpackaged_metadata_zip_builder.as_bytes(),
957
+ )
958
+
959
+ clean_temp_directory(final_metadata_path)
960
+
961
+ return version_info
@@ -5,7 +5,7 @@ from typing import List, Optional, Union
5
5
  from unittest.mock import Mock
6
6
  from zipfile import ZipFile
7
7
 
8
- from pydantic import BaseModel
8
+ from pydantic.v1 import BaseModel
9
9
 
10
10
  from cumulusci.core.config.project_config import BaseProjectConfig
11
11
  from cumulusci.core.dependencies import parse_dependencies
@@ -7,7 +7,11 @@ from cumulusci.core.config import TaskConfig
7
7
  from cumulusci.core.enums import StrEnum
8
8
  from cumulusci.core.exceptions import CumulusCIException, TaskOptionsError
9
9
  from cumulusci.core.tasks import BaseSalesforceTask
10
- from cumulusci.core.utils import process_bool_arg, process_list_arg, determine_managed_mode
10
+ from cumulusci.core.utils import (
11
+ determine_managed_mode,
12
+ process_bool_arg,
13
+ process_list_arg,
14
+ )
11
15
  from cumulusci.salesforce_api.metadata import ApiRetrieveUnpackaged
12
16
  from cumulusci.tasks.metadata.package import PackageXmlGenerator
13
17
  from cumulusci.utils import inject_namespace
@@ -74,8 +78,8 @@ class BaseMetadataETLTask(BaseSalesforceTask, metaclass=ABCMeta):
74
78
  self.options["namespaced_org"] or False
75
79
  )
76
80
  else:
77
- self.options["namespaced_org"] = (
78
- bool(namespace) and namespace == getattr(self.org_config, 'namespace', None)
81
+ self.options["namespaced_org"] = bool(namespace) and namespace == getattr(
82
+ self.org_config, "namespace", None
79
83
  )
80
84
 
81
85
  def _inject_namespace(self, text):
@@ -1,6 +1,6 @@
1
1
  from typing import List, Optional
2
2
 
3
- from pydantic import BaseModel, root_validator
3
+ from pydantic.v1 import BaseModel, root_validator
4
4
  from typing_extensions import Literal
5
5
 
6
6
  from cumulusci.core.exceptions import TaskOptionsError
@@ -1,6 +1,6 @@
1
1
  import typing as T
2
2
 
3
- import pydantic
3
+ import pydantic.v1 as pydantic
4
4
 
5
5
  from cumulusci.core.exceptions import TaskOptionsError
6
6
  from cumulusci.tasks.metadata_etl import MetadataSingleEntityTransformTask
@@ -1,7 +1,7 @@
1
1
  from typing import List, Optional
2
2
 
3
- import pydantic
4
- from pydantic import BaseModel
3
+ import pydantic.v1 as pydantic
4
+ from pydantic.v1 import BaseModel
5
5
 
6
6
  from cumulusci.core.exceptions import TaskOptionsError
7
7
  from cumulusci.tasks.metadata_etl.base import BaseMetadataSynthesisTask
@@ -1,21 +1,20 @@
1
1
  # Push Upgrade API Scripts
2
2
 
3
- These scripts are designed to work with the Salesforce Push Upgrade API (in Pilot in Winter 16) which exposes new objects via the Tooling API that allow interacting with push upgrades in a packaging org. The main purpose of these scripts is to use the Push Upgrade API to automate push upgrades through Jenkins.
3
+ These scripts are designed to work with the Salesforce Push Upgrade API (in Pilot in Winter 16) which exposes new objects via the Tooling API that allow interacting with push upgrades in a packaging org. The main purpose of these scripts is to use the Push Upgrade API to automate push upgrades through Jenkins.
4
4
 
5
5
  # push_api.py - Python Wrapper for Push Upgrade API
6
6
 
7
- This python file provides wrapper classes around the Tooling API objects and abstracts interaction with them and their related data to make writing scripts easier. All the other scripts in this directory use the SalesforcePushApi wrapper to interact with the Tooling API.
7
+ This python file provides wrapper classes around the Tooling API objects and abstracts interaction with them and their related data to make writing scripts easier. All the other scripts in this directory use the SalesforcePushApi wrapper to interact with the Tooling API.
8
8
 
9
9
  Initializing the SalesforcePushApi wrapper can be done with the following python code:
10
10
 
11
11
  push_api = SalesforcePushApi(sf_user, sf_pass, sf_serverurl)
12
12
 
13
13
  You can also pass two optional keyword args to the initialization to control the wrapper's behavior
14
-
15
- * **lazy**: A list of objects that should be lazily looked up. Currently, the only implementations for this are 'jobs' and 'subscribers'. If either are included in the list, they will be looked up on demand when needed by a referenced object. For example, if you are querying all jobs and subscribers is not set to lazy, all subscribers will first be retrieved. If lazy is enabled, subscriber orgs will only be retrieved when trying to resolve references for a particular job. Generally, if you have a lot of subscribers and only expect your script to need to lookup a small number of them, enabling lazy for subscribers will reduce api calls and cause the script to run faster.
16
14
 
17
- * **default_where**: A dictionary with Push Upgrade API objects as key and a value containing a SOQL WHERE clause statement which is applied to all queries against the object to effectively set the universe for a given object. For example:
18
-
15
+ - **lazy**: A list of objects that should be lazily looked up. Currently, the only implementations for this are 'jobs' and 'subscribers'. If either are included in the list, they will be looked up on demand when needed by a referenced object. For example, if you are querying all jobs and subscribers is not set to lazy, all subscribers will first be retrieved. If lazy is enabled, subscriber orgs will only be retrieved when trying to resolve references for a particular job. Generally, if you have a lot of subscribers and only expect your script to need to lookup a small number of them, enabling lazy for subscribers will reduce api calls and cause the script to run faster.
16
+
17
+ - **default_where**: A dictionary with Push Upgrade API objects as key and a value containing a SOQL WHERE clause statement which is applied to all queries against the object to effectively set the universe for a given object. For example:
19
18
  default_where = {'PackageSubscriber': "OrgType = 'Sandbox'"}
20
19
 
21
20
  In the example above, the wrapper would never return a PackageSubscriber which is not a Sandbox org.
@@ -24,22 +23,22 @@ In the example above, the wrapper would never return a PackageSubscriber which i
24
23
 
25
24
  ## Common Environment Variables
26
25
 
27
- The push scripts are all designed to receive their arguments via environment variables. The following are common amongst all of the Push Scripts
26
+ The push scripts are all designed to receive their arguments via environment variables. The following are common amongst all of the Push Scripts
28
27
 
29
- * **SF_USERNAME**: The Salesforce username for the packaging org
30
- * **SF_PASSWORD**: The Salesforce password and security token for the packaging org
31
- * **SF_SERVERURL**: The login url for the Salesforce packaging org.
28
+ - **SF_USERNAME**: The Salesforce username for the packaging org
29
+ - **SF_PASSWORD**: The Salesforce password and security token for the packaging org
30
+ - **SF_SERVERURL**: The login url for the Salesforce packaging org.
32
31
 
33
32
  ## get_version_id.py
34
33
 
35
- Takes a namespace and version string and looks up the given version. Returns the version's Salesforce Id.
34
+ Takes a namespace and version string and looks up the given version. Returns the version's Salesforce Id.
36
35
 
37
36
  The script handles parsing the version number string into a SOQL query against the MetadataPackageVersion object with the correct MajorVersion, MinorVersion, PatchVersion, ReleaseState, and BuildNumber (i.e. Beta number).
38
37
 
39
38
  ### Required Environment Variables
40
39
 
41
- * **NAMESPACE**: The Package's namespace prefix
42
- * **VERSION_NUMBER**: The version number string.
40
+ - **NAMESPACE**: The Package's namespace prefix
41
+ - **VERSION_NUMBER**: The version number string.
43
42
 
44
43
  ## orgs_for_push.py
45
44
 
@@ -47,13 +46,12 @@ Takes a MetadataPackageVersion Id and optionally a where clause to filter Subscr
47
46
 
48
47
  ### Required Environment Variables
49
48
 
50
- * **VERSION**: The MetadataPackageVersion Id of the version you want to push upgrade. This is used to look for all users not on the version or a newer version
49
+ - **VERSION**: The MetadataPackageVersion Id of the version you want to push upgrade. This is used to look for all users not on the version or a newer version
51
50
 
52
51
  ### Optional Environment Variables
53
52
 
54
- * **SUBSCRIBER_WHERE**: An extra filter to be applied to all Subscriber queries. For example, setting this to OrgType = 'Sandbox' would find all Sandbox orgs eligible for push upgrade to the specified version
53
+ - **SUBSCRIBER_WHERE**: An extra filter to be applied to all Subscriber queries. For example, setting this to OrgType = 'Sandbox' would find all Sandbox orgs eligible for push upgrade to the specified version
55
54
 
56
55
  ## failed_orgs_for_push.py
57
56
 
58
- Takes a PackagePushRequest Id and optionally a where clause to filter Subscribers and returns a list of OrgId's one per line for all orgs which failed the
59
-
57
+ Takes a PackagePushRequest Id and optionally a where clause to filter Subscribers and returns a list of OrgId's one per line for all orgs which failed the