sf-decomposer 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +9 -0
- package/README.md +341 -0
- package/lib/commands/decomposer/decompose.d.ts +17 -0
- package/lib/commands/decomposer/decompose.js +70 -0
- package/lib/commands/decomposer/decompose.js.map +1 -0
- package/lib/commands/decomposer/recompose.d.ts +16 -0
- package/lib/commands/decomposer/recompose.js +64 -0
- package/lib/commands/decomposer/recompose.js.map +1 -0
- package/lib/helpers/constants.d.ts +5 -0
- package/lib/helpers/constants.js +7 -0
- package/lib/helpers/constants.js.map +1 -0
- package/lib/hooks/scopedPostRetrieve.d.ts +12 -0
- package/lib/hooks/scopedPostRetrieve.js +51 -0
- package/lib/hooks/scopedPostRetrieve.js.map +1 -0
- package/lib/index.d.ts +2 -0
- package/lib/index.js +2 -0
- package/lib/index.js.map +1 -0
- package/lib/metadata/getPackageDirectories.d.ts +1 -0
- package/lib/metadata/getPackageDirectories.js +50 -0
- package/lib/metadata/getPackageDirectories.js.map +1 -0
- package/lib/metadata/getRegistryValuesBySuffix.d.ts +9 -0
- package/lib/metadata/getRegistryValuesBySuffix.js +38 -0
- package/lib/metadata/getRegistryValuesBySuffix.js.map +1 -0
- package/lib/metadata/getUniqueIdElements.d.ts +1 -0
- package/lib/metadata/getUniqueIdElements.js +13 -0
- package/lib/metadata/getUniqueIdElements.js.map +1 -0
- package/lib/metadata/uniqueIdElements.json +66 -0
- package/lib/service/checkLogforErrors.d.ts +2 -0
- package/lib/service/checkLogforErrors.js +29 -0
- package/lib/service/checkLogforErrors.js.map +1 -0
- package/lib/service/decomposeFileHandler.d.ts +7 -0
- package/lib/service/decomposeFileHandler.js +74 -0
- package/lib/service/decomposeFileHandler.js.map +1 -0
- package/lib/service/moveFiles.d.ts +1 -0
- package/lib/service/moveFiles.js +17 -0
- package/lib/service/moveFiles.js.map +1 -0
- package/lib/service/recomposeFileHandler.d.ts +6 -0
- package/lib/service/recomposeFileHandler.js +82 -0
- package/lib/service/recomposeFileHandler.js.map +1 -0
- package/lib/service/renameBotVersionFiles.d.ts +1 -0
- package/lib/service/renameBotVersionFiles.js +23 -0
- package/lib/service/renameBotVersionFiles.js.map +1 -0
- package/messages/decomposer.decompose.md +36 -0
- package/messages/decomposer.recompose.md +30 -0
- package/oclif.lock +12481 -0
- package/oclif.manifest.json +166 -0
- package/package.json +202 -0
package/LICENSE.md
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Matthew Carvin
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
6
|
+
|
|
7
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
8
|
+
|
|
9
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
# sf-decomposer
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/sf-decomposer) [](https://npmjs.org/package/sf-decomposer) [](https://raw.githubusercontent.com/mcarvin8/sf-decomposer/main/LICENSE.md)
|
|
4
|
+
|
|
5
|
+
The `sf-decomposer` is a Salesforce plugin to read the original metadata files (XML) and create smaller, more manageable files for version control. The inverse function (`recompose`) will recreate metadata files for deployments.
|
|
6
|
+
|
|
7
|
+
This plugin requires [git](https://git-scm.com/downloads) to be installed and that it can be called using the command `git`.
|
|
8
|
+
|
|
9
|
+
This will parse and retain the following in the original XMLs:
|
|
10
|
+
|
|
11
|
+
- Character Data (CDATA)
|
|
12
|
+
- Comments
|
|
13
|
+
- Attributes
|
|
14
|
+
|
|
15
|
+
The decomposed file format can be XML, JSON, or YAML. Based on testing, XML and YAML handles CDATA formatting nicer than JSON.
|
|
16
|
+
|
|
17
|
+
**DISCLAIMERS:**
|
|
18
|
+
|
|
19
|
+
- You must update the `.forceignore` to have the Salesforce CLI ignore the decomposed files created by this plugin. See section `Ignore Files`. Updates to the `.gitignore` are optional and can be updated based on what you want staged in your repo.
|
|
20
|
+
- It is highly recommended that you extensively test this plugin in a sandbox environment on the metadata types you wish to use this tool for.
|
|
21
|
+
- Do not change your production/QA pipelines until you have tested this and are happy with the results.
|
|
22
|
+
- Confirm your deployment pipelines are stable prior to implementing this plugin.
|
|
23
|
+
|
|
24
|
+
## Install
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
sf plugins install sf-decomposer@x.y.z
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## Commands
|
|
31
|
+
|
|
32
|
+
The `sf-decomposer` supports 2 commands:
|
|
33
|
+
|
|
34
|
+
- `sf decomposer decompose`
|
|
35
|
+
- `sf decomposer recompose`
|
|
36
|
+
|
|
37
|
+
Both commands need to be ran somewhere inside your Salesforce DX git repository, whether in the root folder (recommended) or in a subfolder. This plugin will determine the root folder of this repository and read the `sfdx-project.json` file in the root folder. All package directories listed in the `sfdx-project.json` file will be processed when running this plugin.
|
|
38
|
+
|
|
39
|
+
## `sf decomposer decompose`
|
|
40
|
+
|
|
41
|
+
Decomposes the original metadata files into smaller files for version control. Excluding custom labels, the smaller files will be placed into new sub-directories:
|
|
42
|
+
|
|
43
|
+
<img src="https://raw.githubusercontent.com/mcarvin8/sf-decomposer/main/.github/images/decomposed-perm-set.png">
|
|
44
|
+
|
|
45
|
+
<br>
|
|
46
|
+
|
|
47
|
+
Custom Labels will be decomposed directly in the root labels folder:
|
|
48
|
+
|
|
49
|
+
<img src="https://raw.githubusercontent.com/mcarvin8/sf-decomposer/main/.github/images/decomposed-labels.png">
|
|
50
|
+
|
|
51
|
+
<br>
|
|
52
|
+
|
|
53
|
+
Unique ID elements are used to name decomposed files for nested elements. The default unique ID elements for all metadata types are `<fullName>` and `<name>`. In this example XML below, the `<fullName>` tag is included in the nested element and its contents (`quoteAuto`) will be used to name the decomposed file.
|
|
54
|
+
|
|
55
|
+
```xml
|
|
56
|
+
<labels>
|
|
57
|
+
<fullName>quoteAuto</fullName>
|
|
58
|
+
<value>This is an automatically generated quote.</value>
|
|
59
|
+
<language>en_US</language>
|
|
60
|
+
<protected>false</protected>
|
|
61
|
+
<shortDescription>Automatic Quote</shortDescription>
|
|
62
|
+
</labels>
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
If the default unique ID elements are not found in the nested element, the plugin will look for any other metadata specific unique ID elements (see `CONTRIBUTING` section for more information).
|
|
66
|
+
|
|
67
|
+
If a unique ID element is not found in the nested element, the short SHA-256 hash of the element contents will be used to name the decomposed file, as shown below.
|
|
68
|
+
|
|
69
|
+
It's recommended to add the `--prepurge` flag to the `decompose` command to remove pre-existing decomposed files that may conflict with newer decomposed files due to different SHA hashes.
|
|
70
|
+
|
|
71
|
+
Using the `--format` flag, you can set the desired file type for the decomposed files to XML (default), YAML, or JSON. **Note**: The `--format` flag for the recompose command must match what you selected for the decompose `--format`.
|
|
72
|
+
|
|
73
|
+
<img src="https://raw.githubusercontent.com/mcarvin8/sf-decomposer/main/.github/images/decomposed-apps-hashes.png">
|
|
74
|
+
|
|
75
|
+
<br>
|
|
76
|
+
|
|
77
|
+
```
|
|
78
|
+
USAGE
|
|
79
|
+
$ sf decomposer decompose -m <value> -f <value> [--prepurge --postpurge --debug --json]
|
|
80
|
+
|
|
81
|
+
FLAGS
|
|
82
|
+
-m, --metadata-type=<value> The metadata suffix to process, such as 'flow', 'labels', etc. You can provide this flag multiple times to process multiple metadata types at once.
|
|
83
|
+
-f, --format=<value> [default: 'xml'] The file type for the decomposed files.
|
|
84
|
+
--prepurge [default: false] If provided, purge directories of pre-existing decomposed files.
|
|
85
|
+
--postpurge [default: false] If provided, purge the original files after decomposing them.
|
|
86
|
+
--debug [default: false] If provided, log debugging results to a text file (disassemble.log).
|
|
87
|
+
|
|
88
|
+
GLOBAL FLAGS
|
|
89
|
+
--json Format output as json.
|
|
90
|
+
|
|
91
|
+
DESCRIPTION
|
|
92
|
+
This command will read all of the original metadata files and separate them into smaller files in each package directory.
|
|
93
|
+
|
|
94
|
+
These smaller decomposed files can be XMLs, YAMLs, or JSONs.
|
|
95
|
+
|
|
96
|
+
You should run this after retrieving metadata from an org.
|
|
97
|
+
|
|
98
|
+
EXAMPLES
|
|
99
|
+
Decompose all flows:
|
|
100
|
+
|
|
101
|
+
$ sf decomposer decompose -m "flow" -f "xml" --prepurge --postpurge --debug
|
|
102
|
+
|
|
103
|
+
Decompose all flows and custom labels:
|
|
104
|
+
|
|
105
|
+
$ sf decomposer decompose -m "flow" -m "labels" -f "xml" --prepurge --postpurge --debug
|
|
106
|
+
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
## `sf decomposer recompose`
|
|
110
|
+
|
|
111
|
+
Reads all of the files created by the decompose command and recreates metadata files suitable for deployments.
|
|
112
|
+
|
|
113
|
+
Ensure the `--format` flag of the recompose command matches the file format selected for the `--format` flag in the decompose command. File formats for the decomposed files can be XML (default), YAML, or JSON.
|
|
114
|
+
|
|
115
|
+
This command will always create XMLs as its output format.
|
|
116
|
+
|
|
117
|
+
```
|
|
118
|
+
USAGE
|
|
119
|
+
$ sf decomposer recompose -m <value> -f <value> [--postpurge --debug --json]
|
|
120
|
+
|
|
121
|
+
FLAGS
|
|
122
|
+
-m, --metadata-type=<value> The metadata suffix to process, such as 'flow', 'labels', etc. You can provide this flag multiple times to process multiple metadata types at once.
|
|
123
|
+
-f, --format=<value> [default: 'xml'] The file format for the decomposed files.
|
|
124
|
+
--postpurge [default: false] If provided, purge the decomposed files after recomposing them.
|
|
125
|
+
--debug [default: false] If provided, log debugging results to a text file (disassemble.log).
|
|
126
|
+
|
|
127
|
+
GLOBAL FLAGS
|
|
128
|
+
--json Format output as json.
|
|
129
|
+
|
|
130
|
+
DESCRIPTION
|
|
131
|
+
This command will read all of the decomposed files and recreate deployment compatible metadata files in each package directory.
|
|
132
|
+
|
|
133
|
+
You should run this before you deploy the metadata to an org.
|
|
134
|
+
|
|
135
|
+
EXAMPLES
|
|
136
|
+
Recompose all flows:
|
|
137
|
+
|
|
138
|
+
$ sf decomposer recompose -m "flow" -f "xml" --postpurge --debug
|
|
139
|
+
|
|
140
|
+
Recompose all flows and custom labels:
|
|
141
|
+
|
|
142
|
+
$ sf decomposer recompose -m "flow" -m "labels" -f "xml" --postpurge --debug
|
|
143
|
+
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
## Supported Metadata
|
|
147
|
+
|
|
148
|
+
All parent metadata types imported from this plugin's version of @salesforce/source-deploy-retrieve (SDR) toolkit are supported except for certain types.
|
|
149
|
+
|
|
150
|
+
The `--metadata-type`/`-m` flag should be the metadata's `"suffix"` value as listed in the [metadataRegistry.json](https://github.com/forcedotcom/source-deploy-retrieve/blob/main/src/registry/metadataRegistry.json).
|
|
151
|
+
|
|
152
|
+
The suffix is this part of the original meta file name - `labels` is the suffix in `*.labels-meta.xml`.
|
|
153
|
+
|
|
154
|
+
Here are some examples:
|
|
155
|
+
|
|
156
|
+
- Custom Labels (`--metadata-type "labels"`)
|
|
157
|
+
- Workflows (`--metadata-type "workflow"`)
|
|
158
|
+
- Profiles (`--metadata-type "profile"`)
|
|
159
|
+
- Permission Sets (`--metadata-type "permissionset"`)
|
|
160
|
+
- Flows (`--metadata-type "flow"`)
|
|
161
|
+
- Matching Rules (`--metadata-type "matchingRule"`)
|
|
162
|
+
- Assignment Rules (`--metadata-type "assignmentRules"`)
|
|
163
|
+
- Escalation Rules (`--metadata-type "escalationRules"`)
|
|
164
|
+
- Sharing Rules (`--metadata-type "sharingRules"`)
|
|
165
|
+
- Auto Response Rules (`--metadata-type "autoResponseRules"`)
|
|
166
|
+
- Global Value Set Translation (`--metadata-type "globalValueSetTranslation"`)
|
|
167
|
+
- Standard Value Set Translation (`--metadata-type "standardValueSetTranslation"`)
|
|
168
|
+
- Translations (`--metadata-type "translation"`)
|
|
169
|
+
- Standard Value Sets (`--metadata-type "standardValueSet"`)
|
|
170
|
+
- Global Value Sets (`--metadata-type "globalValueSet"`)
|
|
171
|
+
- AI Scoring Model Definition (`--metadata-type "aiScoringModelDefinition"`)
|
|
172
|
+
- Decision Matrix Definition (`--metadata-type "decisionMatrixDefinition"`)
|
|
173
|
+
- Bot (`--metadata-type "bot"`)
|
|
174
|
+
- **NOTE**: Running "bot" will also decompose and recompose Bot Version meta files
|
|
175
|
+
- The `botVersion` meta suffix will be blocked from running directly
|
|
176
|
+
- Marketing App Extension (`--metadata-type "marketingappextension"`)
|
|
177
|
+
|
|
178
|
+
### Exceptions
|
|
179
|
+
|
|
180
|
+
`botVersion` is blocked from being ran directly. Please use the `bot` meta suffix to decompose and recompose bots and bot versions.
|
|
181
|
+
|
|
182
|
+
```
|
|
183
|
+
Error (1): `botVersion` suffix should not be used. Please use `bot` to decompose/recompose bot and bot version files.
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
Custom Objects are not supported by this plugin.
|
|
187
|
+
|
|
188
|
+
```
|
|
189
|
+
Error (1): Custom Objects are not supported by this plugin.
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
Metadata types such as Apex Classes, Apex Components, Triggers, etc. with certain SDR adapter strategies (`matchingContentFile`, `digitalExperience`, `mixedContent`, `bundle`) are not supported by this plugin.
|
|
193
|
+
|
|
194
|
+
```
|
|
195
|
+
Error (1): Metadata types with [matchingContentFile, digitalExperience, mixedContent, bundle] strategies are not supported by this plugin.
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
Children metadata types (ex: custom fields) are not supported and will result in this general error:
|
|
199
|
+
|
|
200
|
+
```
|
|
201
|
+
Error (1): Metadata type not found for the given suffix: field.
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
### Issues
|
|
205
|
+
|
|
206
|
+
Please create "Issues" in this repository if you experience problems decomposing and recomposing specific metadata types or if this plugin's version of SDR needs to be updated to account for new metadata types.
|
|
207
|
+
|
|
208
|
+
## Warnings and Logging
|
|
209
|
+
|
|
210
|
+
The package used to decompose and recompose XMLs, `xml-disassembler`, will log errors, and optionally debugging statements, to a log file, `disassemble.log`. This log will be created in the working directory and will be created when runnign this plugin at all times. If there were no XML decomposing/recomposing errors, this log will simply be empty.
|
|
211
|
+
|
|
212
|
+
By default, this package will only log errors to the file. This plugin will print `xml-disassembler` errors as warnings in the command terminal to allow all other files to be processed.
|
|
213
|
+
|
|
214
|
+
These warnings when running `decompose` and `recompose` commands will look as such:
|
|
215
|
+
|
|
216
|
+
```
|
|
217
|
+
Warning: [2024-04-08T19:27:43.622] [ERROR] default - C:\Users\matth\Documents\sf-decomposer\test\baselines\flows\Get_Info\actionCalls\Get_Info.actionCalls-meta.xml was unabled to be parsed and will not be processed. Confirm formatting and try again.
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
To add additional debugging statements to the log file, provide the `--debug` flag to either command to generate additional logging statements to `disassemble.log`.
|
|
221
|
+
|
|
222
|
+
General debugging statements in the log file will look like:
|
|
223
|
+
|
|
224
|
+
```
|
|
225
|
+
[2024-03-30T14:28:37.959] [DEBUG] default - Created disassembled file: mock\no-nested-elements\HR_Admin\HR_Admin.permissionset-meta.xml
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
Recommend adding the `disassemble.log` to your `.gitignore` file.
|
|
229
|
+
|
|
230
|
+
## Hook
|
|
231
|
+
|
|
232
|
+
A post-retrieve hook has been configured if you elect to use it. The post-retrieve hook will automatically decompose the desired metadata types after every Salesforce CLI retrieval if you create this file in the root of your repo: `.sfdecomposer.config.json`
|
|
233
|
+
|
|
234
|
+
The `.sfdecomposer.config.json` should look like this:
|
|
235
|
+
|
|
236
|
+
```json
|
|
237
|
+
{
|
|
238
|
+
"metadataSuffixes": "labels,workflow,profile",
|
|
239
|
+
"prePurge": true,
|
|
240
|
+
"postPurge": true,
|
|
241
|
+
"decomposedFormat": "xml"
|
|
242
|
+
}
|
|
243
|
+
```
|
|
244
|
+
|
|
245
|
+
- `metadataSuffixes` is required and should be a comma-separated string of metadata suffixes to decompose automatically after retrievals.
|
|
246
|
+
- `prePurge` is optional and should be a boolean. If true, this will delete any existing decomposed files before decomposing the files. If you do not provide this, the default will be `false`.
|
|
247
|
+
- `postPurge` is optional and should be a boolean. If true, this will delete the retrieval file after decomposing it. If you do not provide this, the default will be `false`.
|
|
248
|
+
- `decomposedFormat` is optional and should be either `xml`, `json`, or `yaml`, depending on what file format you want the decomposed files created as. If you do not provide this, the default will be `xml`.
|
|
249
|
+
|
|
250
|
+
If the `.sfdecomposer.config.json` file isn't found, the hook will be skipped.
|
|
251
|
+
|
|
252
|
+
**NOTE:** In order to avoid errors during the retrieval, you must configure your `.forceignore` file to have the Salesforce CLI ignore the decomposed files. See section below.
|
|
253
|
+
|
|
254
|
+
## Ignore Files
|
|
255
|
+
|
|
256
|
+
The `.gitignore` and `.forceignore` files in your repository should be updated based on the metadata types you wish to decompose.
|
|
257
|
+
|
|
258
|
+
Reference the below examples:
|
|
259
|
+
|
|
260
|
+
### `.gitignore` updates
|
|
261
|
+
|
|
262
|
+
Git should ignore the recomposed files.
|
|
263
|
+
|
|
264
|
+
```
|
|
265
|
+
# Ignore recomposed files
|
|
266
|
+
**/permissionsets/*.permissionset-meta.xml
|
|
267
|
+
**/profiles/*.profile-meta.xml
|
|
268
|
+
**/labels/CustomLabels.labels-meta.xml
|
|
269
|
+
**/workflows/*.workflow-meta.xml
|
|
270
|
+
**/flows/*.flow-meta.xml
|
|
271
|
+
**/matchingRules/*.matchingRule-meta.xml
|
|
272
|
+
**/assignmentRules/*.assignmentRules-meta.xml
|
|
273
|
+
**/escalationRules/*.escalationRules-meta.xml
|
|
274
|
+
**/sharingRules/*.sharingRules-meta.xml
|
|
275
|
+
**/autoResponseRules/*.autoResponseRules-meta.xml
|
|
276
|
+
**/globalValueSetTranslations/*.globalValueSetTranslation-meta.xml
|
|
277
|
+
**/standardValueSetTranslations/*.standardValueSetTranslation-meta.xml
|
|
278
|
+
**/translations/*.translation-meta.xml
|
|
279
|
+
**/globalValueSets/*.globalValueSet-meta.xml
|
|
280
|
+
**/standardValueSets/*.standardValueSet-meta.xml
|
|
281
|
+
**/decisionMatrixDefinition/*.decisionMatrixDefinition-meta.xml
|
|
282
|
+
**/aiScoringModelDefinitions/*.aiScoringModelDefinition-meta.xml
|
|
283
|
+
**/bots/*/*.botVersion-meta.xml
|
|
284
|
+
**/bots/*/*.bot-meta.xml
|
|
285
|
+
**/marketingappextensions/*.marketingappextension-meta.xml
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
Git should also ignore the log created by the `xml-disassembler` package (see previous section).
|
|
289
|
+
|
|
290
|
+
```
|
|
291
|
+
disassemble.log
|
|
292
|
+
```
|
|
293
|
+
|
|
294
|
+
### `.forceignore` updates
|
|
295
|
+
|
|
296
|
+
The Salesforce CLI should ignore the decomposed files and should allow the recomposed files. Update based on the decomposed file format you are using (`.xml`, `.json`, or `.yaml`).
|
|
297
|
+
|
|
298
|
+
```
|
|
299
|
+
# Ignore decomposed files
|
|
300
|
+
**/profiles/**/*.xml
|
|
301
|
+
**/permissionsets/**/*.xml
|
|
302
|
+
**/labels/*.xml
|
|
303
|
+
**/workflows/**/*.xml
|
|
304
|
+
**/flows/**/*.xml
|
|
305
|
+
**/matchingRules/**/*.xml
|
|
306
|
+
**/assignmentRules/**/*.xml
|
|
307
|
+
**/escalationRules/**/*.xml
|
|
308
|
+
**/sharingRules/**/*.xml
|
|
309
|
+
**/autoResponseRules/**/*.xml
|
|
310
|
+
**/globalValueSetTranslations/**/*.xml
|
|
311
|
+
**/standardValueSetTranslations/**/*.xml
|
|
312
|
+
**/translations/**/*.xml
|
|
313
|
+
**/globalValueSets/**/*.xml
|
|
314
|
+
**/standardValueSets/**/*.xml
|
|
315
|
+
**/decisionMatrixDefinition/**/*.xml
|
|
316
|
+
**/aiScoringModelDefinitions/**/*.xml
|
|
317
|
+
**/bots/**/*.xml
|
|
318
|
+
**/marketingappextensions/**/*.xml
|
|
319
|
+
|
|
320
|
+
# Allow the recomposed files
|
|
321
|
+
!**/permissionsets/*.permissionset-meta.xml
|
|
322
|
+
!**/labels/CustomLabels.labels-meta.xml
|
|
323
|
+
!**/workflows/*.workflow-meta.xml
|
|
324
|
+
!**/profiles/*.profile-meta.xml
|
|
325
|
+
!**/flows/*.flow-meta.xml
|
|
326
|
+
!**/matchingRules/*.matchingRule-meta.xml
|
|
327
|
+
!**/assignmentRules/*.assignmentRules-meta.xml
|
|
328
|
+
!**/escalationRules/*.escalationRules-meta.xml
|
|
329
|
+
!**/sharingRules/*.sharingRules-meta.xml
|
|
330
|
+
!**/autoResponseRules/*.autoResponseRules-meta.xml
|
|
331
|
+
!**/globalValueSetTranslations/*.globalValueSetTranslation-meta.xml
|
|
332
|
+
!**/standardValueSetTranslations/*.standardValueSetTranslation-meta.xml
|
|
333
|
+
!**/translations/*.translation-meta.xml
|
|
334
|
+
!**/globalValueSets/*.globalValueSet-meta.xml
|
|
335
|
+
!**/standardValueSets/*.standardValueSet-meta.xml
|
|
336
|
+
!**/decisionMatrixDefinition/*.decisionMatrixDefinition-meta.xml
|
|
337
|
+
!**/aiScoringModelDefinitions/*.aiScoringModelDefinition-meta.xml
|
|
338
|
+
!**/bots/*/*.botVersion-meta.xml
|
|
339
|
+
!**/bots/*/*.bot-meta.xml
|
|
340
|
+
!**/marketingappextensions/*.marketingappextension-meta.xml
|
|
341
|
+
```
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { SfCommand } from '@salesforce/sf-plugins-core';
|
|
2
|
+
export type DecomposerDecomposeResult = {
|
|
3
|
+
metadata: string[];
|
|
4
|
+
};
|
|
5
|
+
export default class DecomposerDecompose extends SfCommand<DecomposerDecomposeResult> {
|
|
6
|
+
static readonly summary: string;
|
|
7
|
+
static readonly description: string;
|
|
8
|
+
static readonly examples: string[];
|
|
9
|
+
static readonly flags: {
|
|
10
|
+
'metadata-type': import("@oclif/core/lib/interfaces/parser.js").OptionFlag<string[], import("@oclif/core/lib/interfaces/parser.js").CustomOptions>;
|
|
11
|
+
prepurge: import("@oclif/core/lib/interfaces/parser.js").BooleanFlag<boolean>;
|
|
12
|
+
postpurge: import("@oclif/core/lib/interfaces/parser.js").BooleanFlag<boolean>;
|
|
13
|
+
debug: import("@oclif/core/lib/interfaces/parser.js").BooleanFlag<boolean>;
|
|
14
|
+
format: import("@oclif/core/lib/interfaces/parser.js").OptionFlag<string, import("@oclif/core/lib/interfaces/parser.js").CustomOptions>;
|
|
15
|
+
};
|
|
16
|
+
run(): Promise<DecomposerDecomposeResult>;
|
|
17
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
/* eslint-disable no-await-in-loop */
|
|
3
|
+
import { SfCommand, Flags } from '@salesforce/sf-plugins-core';
|
|
4
|
+
import { Messages } from '@salesforce/core';
|
|
5
|
+
import { LOG_FILE, DECOMPOSED_FILE_TYPES } from '../../helpers/constants.js';
|
|
6
|
+
import { decomposeFileHandler } from '../../service/decomposeFileHandler.js';
|
|
7
|
+
import { getRegistryValuesBySuffix } from '../../metadata/getRegistryValuesBySuffix.js';
|
|
8
|
+
import { readOriginalLogFile, checkLogForErrors } from '../../service/checkLogforErrors.js';
|
|
9
|
+
Messages.importMessagesDirectoryFromMetaUrl(import.meta.url);
|
|
10
|
+
const messages = Messages.loadMessages('sf-decomposer', 'decomposer.decompose');
|
|
11
|
+
export default class DecomposerDecompose extends SfCommand {
|
|
12
|
+
static summary = messages.getMessage('summary');
|
|
13
|
+
static description = messages.getMessage('description');
|
|
14
|
+
static examples = messages.getMessages('examples');
|
|
15
|
+
static flags = {
|
|
16
|
+
'metadata-type': Flags.string({
|
|
17
|
+
summary: messages.getMessage('flags.metadata-type.summary'),
|
|
18
|
+
char: 'm',
|
|
19
|
+
multiple: true,
|
|
20
|
+
required: true,
|
|
21
|
+
}),
|
|
22
|
+
prepurge: Flags.boolean({
|
|
23
|
+
summary: messages.getMessage('flags.prepurge.summary'),
|
|
24
|
+
required: false,
|
|
25
|
+
default: false,
|
|
26
|
+
}),
|
|
27
|
+
postpurge: Flags.boolean({
|
|
28
|
+
summary: messages.getMessage('flags.postpurge.summary'),
|
|
29
|
+
required: false,
|
|
30
|
+
default: false,
|
|
31
|
+
}),
|
|
32
|
+
debug: Flags.boolean({
|
|
33
|
+
summary: messages.getMessage('flags.debug.summary'),
|
|
34
|
+
required: false,
|
|
35
|
+
default: false,
|
|
36
|
+
}),
|
|
37
|
+
format: Flags.string({
|
|
38
|
+
summary: messages.getMessage('flags.format.summary'),
|
|
39
|
+
char: 'f',
|
|
40
|
+
required: true,
|
|
41
|
+
multiple: false,
|
|
42
|
+
default: 'xml',
|
|
43
|
+
options: DECOMPOSED_FILE_TYPES,
|
|
44
|
+
}),
|
|
45
|
+
};
|
|
46
|
+
async run() {
|
|
47
|
+
const { flags } = await this.parse(DecomposerDecompose);
|
|
48
|
+
const metadataTypes = flags['metadata-type'];
|
|
49
|
+
const prepurge = flags['prepurge'];
|
|
50
|
+
const postpurge = flags['postpurge'];
|
|
51
|
+
const debug = flags['debug'];
|
|
52
|
+
const format = flags['format'];
|
|
53
|
+
for (const metadataType of metadataTypes) {
|
|
54
|
+
const metaAttributes = await getRegistryValuesBySuffix(metadataType, 'decompose');
|
|
55
|
+
const currentLogFile = await readOriginalLogFile(LOG_FILE);
|
|
56
|
+
await decomposeFileHandler(metaAttributes, prepurge, postpurge, debug, format);
|
|
57
|
+
const decomposeErrors = await checkLogForErrors(LOG_FILE, currentLogFile);
|
|
58
|
+
if (decomposeErrors.length > 0) {
|
|
59
|
+
decomposeErrors.forEach((error) => {
|
|
60
|
+
this.warn(error);
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
this.log(`All metadata files have been decomposed for the metadata type: ${metadataType}`);
|
|
64
|
+
}
|
|
65
|
+
return {
|
|
66
|
+
metadata: metadataTypes,
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
//# sourceMappingURL=decompose.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"decompose.js","sourceRoot":"","sources":["../../../src/commands/decomposer/decompose.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;AACb,qCAAqC;AAErC,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,6BAA6B,CAAC;AAC/D,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAE5C,OAAO,EAAE,QAAQ,EAAE,qBAAqB,EAAE,MAAM,4BAA4B,CAAC;AAC7E,OAAO,EAAE,oBAAoB,EAAE,MAAM,uCAAuC,CAAC;AAC7E,OAAO,EAAE,yBAAyB,EAAE,MAAM,6CAA6C,CAAC;AACxF,OAAO,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,MAAM,oCAAoC,CAAC;AAE5F,QAAQ,CAAC,kCAAkC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC7D,MAAM,QAAQ,GAAG,QAAQ,CAAC,YAAY,CAAC,eAAe,EAAE,sBAAsB,CAAC,CAAC;AAMhF,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAoC;IAC5E,MAAM,CAAU,OAAO,GAAG,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IACzD,MAAM,CAAU,WAAW,GAAG,QAAQ,CAAC,UAAU,CAAC,aAAa,CAAC,CAAC;IACjE,MAAM,CAAU,QAAQ,GAAG,QAAQ,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAE5D,MAAM,CAAU,KAAK,GAAG;QAC7B,eAAe,EAAE,KAAK,CAAC,MAAM,CAAC;YAC5B,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,6BAA6B,CAAC;YAC3D,IAAI,EAAE,GAAG;YACT,QAAQ,EAAE,IAAI;YACd,QAAQ,EAAE,IAAI;SACf,CAAC;QACF,QAAQ,EAAE,KAAK,CAAC,OAAO,CAAC;YACtB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,wBAAwB,CAAC;YACtD,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;SACf,CAAC;QACF,SAAS,EAAE,KAAK,CAAC,OAAO,CAAC;YACvB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,yBAAyB,CAAC;YACvD,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;SACf,CAAC;QACF,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC;YACnB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,qBAAqB,CAAC;YACnD,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;SACf,CAAC;QACF,MAAM,EAAE,KAAK,CAAC,MAAM,CAAC;YACnB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,sBAAsB,CAAC;YACpD,IAAI,EAAE,GAAG;YACT,QAAQ,EAAE,IAAI;YACd,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;YACd,OAAO,EAAE,qBAAqB;SAC/B,CAAC;KACH,CAAC;IAEK,KAAK,CAAC,GAAG;QACd,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;QACxD,MAAM,aAAa,GAAG,KAAK,CAAC,eAAe,CAAC,CAAC;QAC7C,MAAM,QAAQ,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC;QACnC,MAAM,SAAS,GAAG,KAAK,CAAC,WAAW,CAAC,CAAC;QACrC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QAC7B,MAAM,MAAM,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/B,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE,CAAC;YACzC,MAAM,cAAc,GAAG,MAAM,yBAAyB,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;YAElF,MAAM,cAAc,GAAG,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;YAC3D,MAAM,oBAAoB,CAAC,cAAc,EAAE,QAAQ,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;YAC/E,MAAM,eAAe,GAAG,MAAM,iBAAiB,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC;YAC1E,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC/B,eAAe,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE;oBAChC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACnB,CAAC,CAAC,CAAC;YACL,CAAC;YACD,IAAI,CAAC,GAAG,CAAC,kEAAkE,YAAY,EAAE,CAAC,CAAC;QAC7F,CAAC;QACD,OAAO;YACL,QAAQ,EAAE,aAAa;SACxB,CAAC;IACJ,CAAC"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { SfCommand } from '@salesforce/sf-plugins-core';
|
|
2
|
+
export type DecomposerRecomposeResult = {
|
|
3
|
+
metadata: string[];
|
|
4
|
+
};
|
|
5
|
+
export default class DecomposerRecompose extends SfCommand<DecomposerRecomposeResult> {
|
|
6
|
+
static readonly summary: string;
|
|
7
|
+
static readonly description: string;
|
|
8
|
+
static readonly examples: string[];
|
|
9
|
+
static readonly flags: {
|
|
10
|
+
'metadata-type': import("@oclif/core/lib/interfaces/parser.js").OptionFlag<string[], import("@oclif/core/lib/interfaces/parser.js").CustomOptions>;
|
|
11
|
+
postpurge: import("@oclif/core/lib/interfaces/parser.js").BooleanFlag<boolean>;
|
|
12
|
+
debug: import("@oclif/core/lib/interfaces/parser.js").BooleanFlag<boolean>;
|
|
13
|
+
format: import("@oclif/core/lib/interfaces/parser.js").OptionFlag<string, import("@oclif/core/lib/interfaces/parser.js").CustomOptions>;
|
|
14
|
+
};
|
|
15
|
+
run(): Promise<DecomposerRecomposeResult>;
|
|
16
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
/* eslint-disable no-await-in-loop */
|
|
3
|
+
import { SfCommand, Flags } from '@salesforce/sf-plugins-core';
|
|
4
|
+
import { Messages } from '@salesforce/core';
|
|
5
|
+
import { LOG_FILE, DECOMPOSED_FILE_TYPES } from '../../helpers/constants.js';
|
|
6
|
+
import { recomposeFileHandler } from '../../service/recomposeFileHandler.js';
|
|
7
|
+
import { getRegistryValuesBySuffix } from '../../metadata/getRegistryValuesBySuffix.js';
|
|
8
|
+
import { readOriginalLogFile, checkLogForErrors } from '../../service/checkLogforErrors.js';
|
|
9
|
+
Messages.importMessagesDirectoryFromMetaUrl(import.meta.url);
|
|
10
|
+
const messages = Messages.loadMessages('sf-decomposer', 'decomposer.recompose');
|
|
11
|
+
export default class DecomposerRecompose extends SfCommand {
|
|
12
|
+
static summary = messages.getMessage('summary');
|
|
13
|
+
static description = messages.getMessage('description');
|
|
14
|
+
static examples = messages.getMessages('examples');
|
|
15
|
+
static flags = {
|
|
16
|
+
'metadata-type': Flags.string({
|
|
17
|
+
summary: messages.getMessage('flags.metadata-type.summary'),
|
|
18
|
+
char: 'm',
|
|
19
|
+
multiple: true,
|
|
20
|
+
required: true,
|
|
21
|
+
}),
|
|
22
|
+
postpurge: Flags.boolean({
|
|
23
|
+
summary: messages.getMessage('flags.postpurge.summary'),
|
|
24
|
+
required: false,
|
|
25
|
+
default: false,
|
|
26
|
+
}),
|
|
27
|
+
debug: Flags.boolean({
|
|
28
|
+
summary: messages.getMessage('flags.debug.summary'),
|
|
29
|
+
required: false,
|
|
30
|
+
default: false,
|
|
31
|
+
}),
|
|
32
|
+
format: Flags.string({
|
|
33
|
+
summary: messages.getMessage('flags.format.summary'),
|
|
34
|
+
char: 'f',
|
|
35
|
+
required: true,
|
|
36
|
+
multiple: false,
|
|
37
|
+
default: 'xml',
|
|
38
|
+
options: DECOMPOSED_FILE_TYPES,
|
|
39
|
+
}),
|
|
40
|
+
};
|
|
41
|
+
async run() {
|
|
42
|
+
const { flags } = await this.parse(DecomposerRecompose);
|
|
43
|
+
const metadataTypes = flags['metadata-type'];
|
|
44
|
+
const postpurge = flags['postpurge'];
|
|
45
|
+
const debug = flags['debug'];
|
|
46
|
+
const format = flags['format'];
|
|
47
|
+
for (const metadataType of metadataTypes) {
|
|
48
|
+
const metaAttributes = await getRegistryValuesBySuffix(metadataType, 'recompose');
|
|
49
|
+
const currentLogFile = await readOriginalLogFile(LOG_FILE);
|
|
50
|
+
await recomposeFileHandler(metaAttributes, postpurge, debug, format);
|
|
51
|
+
const recomposeErrors = await checkLogForErrors(LOG_FILE, currentLogFile);
|
|
52
|
+
if (recomposeErrors.length > 0) {
|
|
53
|
+
recomposeErrors.forEach((error) => {
|
|
54
|
+
this.warn(error);
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
this.log(`All metadata files have been recomposed for the metadata type: ${metadataType}`);
|
|
58
|
+
}
|
|
59
|
+
return {
|
|
60
|
+
metadata: metadataTypes,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
//# sourceMappingURL=recompose.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"recompose.js","sourceRoot":"","sources":["../../../src/commands/decomposer/recompose.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;AACb,qCAAqC;AAErC,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,6BAA6B,CAAC;AAC/D,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAE5C,OAAO,EAAE,QAAQ,EAAE,qBAAqB,EAAE,MAAM,4BAA4B,CAAC;AAC7E,OAAO,EAAE,oBAAoB,EAAE,MAAM,uCAAuC,CAAC;AAC7E,OAAO,EAAE,yBAAyB,EAAE,MAAM,6CAA6C,CAAC;AACxF,OAAO,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,MAAM,oCAAoC,CAAC;AAE5F,QAAQ,CAAC,kCAAkC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC7D,MAAM,QAAQ,GAAG,QAAQ,CAAC,YAAY,CAAC,eAAe,EAAE,sBAAsB,CAAC,CAAC;AAMhF,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAoC;IAC5E,MAAM,CAAU,OAAO,GAAG,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IACzD,MAAM,CAAU,WAAW,GAAG,QAAQ,CAAC,UAAU,CAAC,aAAa,CAAC,CAAC;IACjE,MAAM,CAAU,QAAQ,GAAG,QAAQ,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAE5D,MAAM,CAAU,KAAK,GAAG;QAC7B,eAAe,EAAE,KAAK,CAAC,MAAM,CAAC;YAC5B,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,6BAA6B,CAAC;YAC3D,IAAI,EAAE,GAAG;YACT,QAAQ,EAAE,IAAI;YACd,QAAQ,EAAE,IAAI;SACf,CAAC;QACF,SAAS,EAAE,KAAK,CAAC,OAAO,CAAC;YACvB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,yBAAyB,CAAC;YACvD,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;SACf,CAAC;QACF,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC;YACnB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,qBAAqB,CAAC;YACnD,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;SACf,CAAC;QACF,MAAM,EAAE,KAAK,CAAC,MAAM,CAAC;YACnB,OAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,sBAAsB,CAAC;YACpD,IAAI,EAAE,GAAG;YACT,QAAQ,EAAE,IAAI;YACd,QAAQ,EAAE,KAAK;YACf,OAAO,EAAE,KAAK;YACd,OAAO,EAAE,qBAAqB;SAC/B,CAAC;KACH,CAAC;IAEK,KAAK,CAAC,GAAG;QACd,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;QACxD,MAAM,aAAa,GAAG,KAAK,CAAC,eAAe,CAAC,CAAC;QAC7C,MAAM,SAAS,GAAG,KAAK,CAAC,WAAW,CAAC,CAAC;QACrC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QAC7B,MAAM,MAAM,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/B,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE,CAAC;YACzC,MAAM,cAAc,GAAG,MAAM,yBAAyB,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;YAElF,MAAM,cAAc,GAAG,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;YAC3D,MAAM,oBAAoB,CAAC,cAAc,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;YACrE,MAAM,eAAe,GAAG,MAAM,iBAAiB,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC;YAC1E,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC/B,eAAe,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE;oBAChC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACnB,CAAC,CAAC,CAAC;YACL,CAAC;YACD,IAAI,CAAC,GAAG,CAAC,kEAAkE,YAAY,EAAE,CAAC,CAAC;QAC7F,CAAC;QACD,OAAO;YACL,QAAQ,EAAE,aAAa;SACxB,CAAC;IACJ,CAAC"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export declare const SFDX_PROJECT_FILE_NAME = "sfdx-project.json";
|
|
2
|
+
export declare const CUSTOM_LABELS_FILE = "CustomLabels.labels-meta.xml";
|
|
3
|
+
export declare const DEFAULT_UNIQUE_ID_ELEMENTS: string;
|
|
4
|
+
export declare const LOG_FILE = "disassemble.log";
|
|
5
|
+
export declare const DECOMPOSED_FILE_TYPES: string[];
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
export const SFDX_PROJECT_FILE_NAME = 'sfdx-project.json';
|
|
3
|
+
export const CUSTOM_LABELS_FILE = 'CustomLabels.labels-meta.xml';
|
|
4
|
+
export const DEFAULT_UNIQUE_ID_ELEMENTS = 'fullName,name';
|
|
5
|
+
export const LOG_FILE = 'disassemble.log';
|
|
6
|
+
export const DECOMPOSED_FILE_TYPES = ['xml', 'json', 'yaml'];
|
|
7
|
+
//# sourceMappingURL=constants.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/helpers/constants.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;AACb,MAAM,CAAC,MAAM,sBAAsB,GAAG,mBAAmB,CAAC;AAC1D,MAAM,CAAC,MAAM,kBAAkB,GAAG,8BAA8B,CAAC;AACjE,MAAM,CAAC,MAAM,0BAA0B,GAAW,eAAe,CAAC;AAClE,MAAM,CAAC,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAC1C,MAAM,CAAC,MAAM,qBAAqB,GAAa,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Command, Hook, Config } from '@oclif/core';
|
|
2
|
+
import { ScopedPostRetrieve } from '@salesforce/source-deploy-retrieve';
|
|
3
|
+
type HookFunction = (this: Hook.Context, options: HookOptions) => Promise<void>;
|
|
4
|
+
type HookOptions = {
|
|
5
|
+
Command: Command;
|
|
6
|
+
argv: string[];
|
|
7
|
+
commandId: string;
|
|
8
|
+
result?: ScopedPostRetrieve;
|
|
9
|
+
config: Config;
|
|
10
|
+
};
|
|
11
|
+
export declare const scopedPostRetrieve: HookFunction;
|
|
12
|
+
export {};
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
import { readFile } from 'node:fs/promises';
|
|
3
|
+
import { resolve } from 'node:path';
|
|
4
|
+
import { simpleGit } from 'simple-git';
|
|
5
|
+
import DecomposerDecompose from '../commands/decomposer/decompose.js';
|
|
6
|
+
export const scopedPostRetrieve = async function (options) {
|
|
7
|
+
if (!options.result?.retrieveResult.response.status) {
|
|
8
|
+
return;
|
|
9
|
+
}
|
|
10
|
+
let configFile;
|
|
11
|
+
const gitOptions = {
|
|
12
|
+
baseDir: process.cwd(),
|
|
13
|
+
binary: 'git',
|
|
14
|
+
maxConcurrentProcesses: 6,
|
|
15
|
+
trimmed: true,
|
|
16
|
+
};
|
|
17
|
+
const git = simpleGit(gitOptions);
|
|
18
|
+
const repoRoot = (await git.revparse('--show-toplevel')).trim();
|
|
19
|
+
const configPath = resolve(repoRoot, '.sfdecomposer.config.json');
|
|
20
|
+
try {
|
|
21
|
+
const jsonString = await readFile(configPath, 'utf-8');
|
|
22
|
+
configFile = JSON.parse(jsonString);
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
const metadataTypes = configFile.metadataSuffixes || '.';
|
|
28
|
+
const format = configFile.decomposedFormat || 'xml';
|
|
29
|
+
const prepurge = configFile.prePurge || false;
|
|
30
|
+
const postpurge = configFile.postPurge || false;
|
|
31
|
+
if (metadataTypes.trim() === '.') {
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
const metadataTypesArray = metadataTypes.split(',');
|
|
35
|
+
const commandArgs = [];
|
|
36
|
+
for (const metadataType of metadataTypesArray) {
|
|
37
|
+
const sanitizedMetadataType = metadataType.replace(/,/g, '');
|
|
38
|
+
commandArgs.push('--metadata-type');
|
|
39
|
+
commandArgs.push(sanitizedMetadataType);
|
|
40
|
+
}
|
|
41
|
+
commandArgs.push('--format');
|
|
42
|
+
commandArgs.push(format);
|
|
43
|
+
if (prepurge) {
|
|
44
|
+
commandArgs.push('--prepurge');
|
|
45
|
+
}
|
|
46
|
+
if (postpurge) {
|
|
47
|
+
commandArgs.push('--postpurge');
|
|
48
|
+
}
|
|
49
|
+
await DecomposerDecompose.run(commandArgs);
|
|
50
|
+
};
|
|
51
|
+
//# sourceMappingURL=scopedPostRetrieve.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scopedPostRetrieve.js","sourceRoot":"","sources":["../../src/hooks/scopedPostRetrieve.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;AAEb,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAGpC,OAAO,EAAE,SAAS,EAA+B,MAAM,YAAY,CAAC;AACpE,OAAO,mBAAmB,MAAM,qCAAqC,CAAC;AAmBtE,MAAM,CAAC,MAAM,kBAAkB,GAAiB,KAAK,WAAW,OAAO;IACrE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;QACpD,OAAO;IACT,CAAC;IACD,IAAI,UAAsB,CAAC;IAC3B,MAAM,UAAU,GAA8B;QAC5C,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;QACtB,MAAM,EAAE,KAAK;QACb,sBAAsB,EAAE,CAAC;QACzB,OAAO,EAAE,IAAI;KACd,CAAC;IAEF,MAAM,GAAG,GAAc,SAAS,CAAC,UAAU,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,CAAC,MAAM,GAAG,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;IAChE,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,EAAE,2BAA2B,CAAC,CAAC;IAElE,IAAI,CAAC;QACH,MAAM,UAAU,GAAW,MAAM,QAAQ,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC/D,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAe,CAAC;IACpD,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,OAAO;IACT,CAAC;IAED,MAAM,aAAa,GAAW,UAAU,CAAC,gBAAgB,IAAI,GAAG,CAAC;IACjE,MAAM,MAAM,GAAW,UAAU,CAAC,gBAAgB,IAAI,KAAK,CAAC;IAC5D,MAAM,QAAQ,GAAY,UAAU,CAAC,QAAQ,IAAI,KAAK,CAAC;IACvD,MAAM,SAAS,GAAY,UAAU,CAAC,SAAS,IAAI,KAAK,CAAC;IAEzD,IAAI,aAAa,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC;QACjC,OAAO;IACT,CAAC;IAED,MAAM,kBAAkB,GAAa,aAAa,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE9D,MAAM,WAAW,GAAa,EAAE,CAAC;IACjC,KAAK,MAAM,YAAY,IAAI,kBAAkB,EAAE,CAAC;QAC9C,MAAM,qBAAqB,GAAG,YAAY,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC7D,WAAW,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;QACpC,WAAW,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;IAC1C,CAAC;IACD,WAAW,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAC7B,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzB,IAAI,QAAQ,EAAE,CAAC;QACb,WAAW,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;IACjC,CAAC;IACD,IAAI,SAAS,EAAE,CAAC;QACd,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;IAClC,CAAC;IACD,MAAM,mBAAmB,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AAC7C,CAAC,CAAC"}
|
package/lib/index.d.ts
ADDED
package/lib/index.js
ADDED
package/lib/index.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,eAAe,EAAE,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function getPackageDirectories(metaDirectory: string): Promise<string[]>;
|