@ds-sfdc/sfparty 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +28 -0
- package/README.md +18 -0
- package/index.js +535 -0
- package/lib/fileUtils.js +165 -0
- package/lib/label/combine.js +203 -0
- package/lib/label/definition.js +12 -0
- package/lib/label/split.js +213 -0
- package/lib/permset/combine.js +286 -0
- package/lib/permset/definition.js +74 -0
- package/lib/permset/split.js +287 -0
- package/lib/profile/combine.js +309 -0
- package/lib/profile/definition.js +55 -0
- package/lib/profile/split.js +983 -0
- package/lib/workflow/combine.js +330 -0
- package/lib/workflow/definition.js +55 -0
- package/lib/workflow/split.js +278 -0
- package/nodemon.json +4 -0
- package/package.json +51 -0
- package/sfdx-project.json +11 -0
- package/tests/root.spec.js +14 -0
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
import path from 'path'
|
|
2
|
+
import logUpdate from 'log-update'
|
|
3
|
+
import chalk from 'chalk'
|
|
4
|
+
import convertHrtime from 'convert-hrtime'
|
|
5
|
+
import cliSpinners from 'cli-spinners'
|
|
6
|
+
import * as xml2js from 'xml2js'
|
|
7
|
+
import * as fileUtils from '../fileUtils.js'
|
|
8
|
+
|
|
9
|
+
const spinner = cliSpinners['dots']
|
|
10
|
+
|
|
11
|
+
export class Combine {
|
|
12
|
+
#type = undefined
|
|
13
|
+
#root = undefined
|
|
14
|
+
#spinnerMessage = ''
|
|
15
|
+
#startTime = 0
|
|
16
|
+
#fileName = {
|
|
17
|
+
fullName: undefined,
|
|
18
|
+
shortName: undefined,
|
|
19
|
+
}
|
|
20
|
+
#errorMessage = ''
|
|
21
|
+
#frameIndex = 0
|
|
22
|
+
#types = []
|
|
23
|
+
#fileStats = {
|
|
24
|
+
atime: undefined,
|
|
25
|
+
mtime: undefined
|
|
26
|
+
}
|
|
27
|
+
#json = {}
|
|
28
|
+
|
|
29
|
+
constructor(config) {
|
|
30
|
+
this.metadataDefinition = config.metadataDefinition
|
|
31
|
+
this.sourceDir = config.sourceDir
|
|
32
|
+
this.targetDir = config.targetDir
|
|
33
|
+
this.metaDir = config.metaDir
|
|
34
|
+
this.sequence = config.sequence
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
get metadataDefinition() {
|
|
38
|
+
return this._metadataDefinition
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
set metadataDefinition(definition) {
|
|
42
|
+
this._metadataDefinition = definition
|
|
43
|
+
this.#type = definition.filetype
|
|
44
|
+
this.#root = definition.root
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
get metaDir() {
|
|
48
|
+
return this._metaDir
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
set metaDir(metaDir) {
|
|
52
|
+
this._metaDir = metaDir
|
|
53
|
+
this.#fileName.fullName = path.join(
|
|
54
|
+
this.targetDir,
|
|
55
|
+
metaDir.split(path.sep).pop() + `.${this.#type}-meta.xml`
|
|
56
|
+
)
|
|
57
|
+
this.#fileName.shortName = metaDir.split(path.sep).pop()
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
get sequence() {
|
|
61
|
+
if (global.process.current > this._sequence) {
|
|
62
|
+
return global.process.current
|
|
63
|
+
} else {
|
|
64
|
+
return this._sequence
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
set sequence(sequence) {
|
|
69
|
+
this._sequence = sequence
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
combine() {
|
|
73
|
+
return new Promise((resolve, reject) => {
|
|
74
|
+
const that = this
|
|
75
|
+
if (!fileUtils.directoryExists(that.sourceDir)) reject(`Path does not exist: ${that.sourceDir}`)
|
|
76
|
+
|
|
77
|
+
if (that.metadataDefinition.directories !== undefined) {
|
|
78
|
+
that.#types = that.#types.concat(that.metadataDefinition.directories)
|
|
79
|
+
}
|
|
80
|
+
if (that.metadataDefinition.singleFiles !== undefined) {
|
|
81
|
+
that.#types = that.#types.concat(that.metadataDefinition.singleFiles)
|
|
82
|
+
}
|
|
83
|
+
if (that.metadataDefinition.main !== undefined) {
|
|
84
|
+
that.#types = that.#types.concat(that.metadataDefinition.main)
|
|
85
|
+
}
|
|
86
|
+
that.#types.sort((a, b) => {
|
|
87
|
+
if (a == '$') return -1
|
|
88
|
+
if (a < b) return -1
|
|
89
|
+
if (a > b) return 1
|
|
90
|
+
return 0
|
|
91
|
+
})
|
|
92
|
+
|
|
93
|
+
that.#types.forEach(key => {
|
|
94
|
+
that.#json[key] = undefined
|
|
95
|
+
})
|
|
96
|
+
|
|
97
|
+
getXML(that)
|
|
98
|
+
|
|
99
|
+
saveXML(that)
|
|
100
|
+
resolve(true)
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
function getXML(that) {
|
|
104
|
+
that.#startTime = process.hrtime.bigint()
|
|
105
|
+
that.#spinnerMessage = `[%1] of ${global.processed.total} - ${that.#root}: [%4]${chalk.yellowBright('[%5]')}[%2][%3]`
|
|
106
|
+
|
|
107
|
+
that.#types.forEach(key => {
|
|
108
|
+
// display message
|
|
109
|
+
logUpdate(that.#spinnerMessage
|
|
110
|
+
.replace('[%1]', that.sequence.toString().padStart(global.processed.total.toString().length, ' '))
|
|
111
|
+
.replace('[%2]', `\n${chalk.magentaBright(nextFrame(that))} ${key}`)
|
|
112
|
+
.replace('[%3]', `${that.#errorMessage}`)
|
|
113
|
+
.replace('[%4]', `${global.statusLevel.working} `)
|
|
114
|
+
.replace('[%5]', `${that.#fileName.shortName} `)
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
if (that.metadataDefinition.main.includes(key)) {
|
|
119
|
+
// TODO process main
|
|
120
|
+
const fileObj = {
|
|
121
|
+
shortName: 'Main',
|
|
122
|
+
fullName: path.join(that.sourceDir, that.metaDir, `main.${global.format}`),
|
|
123
|
+
}
|
|
124
|
+
processFile(that, key, fileObj)
|
|
125
|
+
} else if (that.metadataDefinition.singleFiles.includes(key)) {
|
|
126
|
+
// TODO process single file
|
|
127
|
+
} else if (that.metadataDefinition.directories.includes(key)) {
|
|
128
|
+
processDirectory(that, key)
|
|
129
|
+
} else {
|
|
130
|
+
global.logger.warn(`Unexpected metadata type: ${chalk.redBright(key)}`)
|
|
131
|
+
}
|
|
132
|
+
})
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
function processDirectory(that, key) {
|
|
136
|
+
// Process the directory sourceDir/metaDir/key
|
|
137
|
+
const currentDir = path.join(that.sourceDir, that.metaDir, key)
|
|
138
|
+
// ensure the directory exists
|
|
139
|
+
if (fileUtils.directoryExists(currentDir)) {
|
|
140
|
+
const fileList = fileUtils.getFiles(currentDir, global.format)
|
|
141
|
+
fileList.sort() // process files alphabetically
|
|
142
|
+
that.#json[key] = []
|
|
143
|
+
|
|
144
|
+
// iterate over fileList
|
|
145
|
+
fileList.forEach((file, index) => {
|
|
146
|
+
logUpdate(that.#spinnerMessage
|
|
147
|
+
.replace('[%1]', that.sequence.toString().padStart(global.processed.total.toString().length, ' '))
|
|
148
|
+
.replace('[%2]', `\n${chalk.magentaBright(nextFrame(that))} ${key} - ${index + 1} of ${fileList.length} - ${chalk.magentaBright(file)}`)
|
|
149
|
+
.replace('[%3]', `${that.#errorMessage}`)
|
|
150
|
+
.replace('[%4]', `${global.statusLevel.working} `)
|
|
151
|
+
.replace('[%5]', `${that.#fileName.shortName} `)
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
const fileObj = {
|
|
155
|
+
shortName: file,
|
|
156
|
+
fullName: path.join(that.sourceDir, that.metaDir, key, file),
|
|
157
|
+
}
|
|
158
|
+
processFile(that, key, fileObj)
|
|
159
|
+
})
|
|
160
|
+
|
|
161
|
+
}
|
|
162
|
+
return true
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
function processFile(that, key, fileObj = undefined) {
|
|
166
|
+
if (
|
|
167
|
+
fileObj === undefined ||
|
|
168
|
+
typeof fileObj != 'object' ||
|
|
169
|
+
fileObj.shortName === undefined ||
|
|
170
|
+
fileObj.fullName === undefined
|
|
171
|
+
) {
|
|
172
|
+
that.#errorMessage += `\n${global.statusLevel.warn} Invalid file information passed ${chalk.redBright(fileObj)}`
|
|
173
|
+
return false
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
if (fileUtils.fileExists(fileObj.fullName)) {
|
|
177
|
+
let result = fileUtils.readPartFile(fileObj.fullName)
|
|
178
|
+
result = sortAndArrange(that, result, key)
|
|
179
|
+
if (Array.isArray(that.#json[key])) {
|
|
180
|
+
that.#json[key].push(result[key])
|
|
181
|
+
} else {
|
|
182
|
+
that.#json[key] = result[key][key][key]
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
updateFileStats(that, fileUtils.fileInfo(fileObj.fullName).stats)
|
|
187
|
+
// genericXML(that, key, fileObj.fullName)
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
function updateFileStats(that, stats) {
|
|
191
|
+
if (that.#fileStats.atime === undefined || stats.atime > that.#fileStats.atime) {
|
|
192
|
+
that.#fileStats.atime = stats.atime
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if (that.#fileStats.mtime === undefined || stats.mtime > that.#fileStats.mtime) {
|
|
196
|
+
that.#fileStats.mtime = stats.mtime
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
function saveXML(that) {
|
|
201
|
+
const builder = new xml2js.Builder(
|
|
202
|
+
{
|
|
203
|
+
cdata: false,
|
|
204
|
+
rootName: that.#root,
|
|
205
|
+
xmldec: { 'version': '1.0', 'encoding': 'UTF-8' }
|
|
206
|
+
}
|
|
207
|
+
)
|
|
208
|
+
fileUtils.createDirectory(that.targetDir)
|
|
209
|
+
|
|
210
|
+
Object.keys(that.#json).forEach(key => {
|
|
211
|
+
if (that.#json[key] === undefined) delete that.#json[key]
|
|
212
|
+
})
|
|
213
|
+
const xml = builder.buildObject(that.#json)
|
|
214
|
+
|
|
215
|
+
fileUtils.writeFile(
|
|
216
|
+
that.#fileName.fullName,
|
|
217
|
+
xml,
|
|
218
|
+
that.#fileStats.atime,
|
|
219
|
+
that.#fileStats.mtime
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
// display the finish message
|
|
223
|
+
finishMessage(that)
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
function finishMessage(that) {
|
|
227
|
+
let executionTime = getTimeDiff(BigInt(that.#startTime))
|
|
228
|
+
let durationMessage = `${executionTime.seconds}.${executionTime.milliseconds}s`
|
|
229
|
+
let stateIcon = (that.#errorMessage == '') ? global.statusLevel.success : global.statusLevel.fail
|
|
230
|
+
|
|
231
|
+
logUpdate(that.#spinnerMessage
|
|
232
|
+
.replace('[%1]', that.sequence.toString().padStart(global.processed.total.toString().length, ' '))
|
|
233
|
+
.replace('[%2]', `. Processed in ${durationMessage}.`)
|
|
234
|
+
.replace('[%3]', `${that.#errorMessage}`)
|
|
235
|
+
.replace('[%4]', `${stateIcon} `)
|
|
236
|
+
.replace('[%5]', that.#fileName.shortName)
|
|
237
|
+
)
|
|
238
|
+
logUpdate.done()
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
function nextFrame(that) {
|
|
242
|
+
return spinner.frames[that.#frameIndex = ++that.#frameIndex % spinner.frames.length]
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// end of functions
|
|
246
|
+
// end of combine
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// end of class
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
function sortJSON(json, key) {
|
|
253
|
+
if (Array.isArray(json)) {
|
|
254
|
+
json.sort((a, b) => {
|
|
255
|
+
if (a[key] < b[key]) return -1
|
|
256
|
+
if (a[key] > b[key]) return 1
|
|
257
|
+
return 0
|
|
258
|
+
})
|
|
259
|
+
}
|
|
260
|
+
return json
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
function sortAndArrange(that, json, key = undefined, topLevel = true) {
|
|
264
|
+
// sort and order keys
|
|
265
|
+
const sortKey = that.metadataDefinition.sortKeys[key]
|
|
266
|
+
let jsonResult = {}
|
|
267
|
+
json = arrangeKeys(that, json, key)
|
|
268
|
+
json = sortJSON(json, sortKey)
|
|
269
|
+
|
|
270
|
+
Object.keys(json).forEach((subKey, index, thisObj) => {
|
|
271
|
+
if (typeof json[subKey] == 'object') {
|
|
272
|
+
if (!Array.isArray(json[subKey])) {
|
|
273
|
+
// call recursively on object
|
|
274
|
+
json[subKey] = sortAndArrange(that, json[subKey], subKey)
|
|
275
|
+
} else {
|
|
276
|
+
// iterate array for objects
|
|
277
|
+
json[subKey].forEach((arrItem, index) => {
|
|
278
|
+
if (typeof arrItem == 'object') {
|
|
279
|
+
json[subKey][index] = sortAndArrange(that, json[subKey][index], subKey, false)
|
|
280
|
+
}
|
|
281
|
+
})
|
|
282
|
+
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
})
|
|
286
|
+
|
|
287
|
+
// we need to include the key into the json for the top-level only
|
|
288
|
+
if (topLevel) {
|
|
289
|
+
jsonResult[key] = json
|
|
290
|
+
} else {
|
|
291
|
+
jsonResult = json
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
return jsonResult
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
function arrangeKeys(that, json, key = undefined) {
|
|
298
|
+
json = Object.keys(json)
|
|
299
|
+
.sort((a, b) => {
|
|
300
|
+
if (that.metadataDefinition.xmlOrder !== undefined) {
|
|
301
|
+
if (that.metadataDefinition.xmlOrder[key] !== undefined) {
|
|
302
|
+
let aIndex = that.metadataDefinition.xmlOrder[key].indexOf(a)
|
|
303
|
+
let bIndex = that.metadataDefinition.xmlOrder[key].indexOf(b)
|
|
304
|
+
if (aIndex == -1) aIndex = 99
|
|
305
|
+
if (bIndex == -1) bIndex = 99
|
|
306
|
+
|
|
307
|
+
if (aIndex < bIndex && aIndex != 99) return -1
|
|
308
|
+
if (aIndex > bIndex && bIndex != 99) return 1
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
if (a < b) return -1
|
|
312
|
+
if (a > b) return 1
|
|
313
|
+
return 0
|
|
314
|
+
})
|
|
315
|
+
.reduce((accumulator, key) => {
|
|
316
|
+
accumulator[key] = json[key]
|
|
317
|
+
|
|
318
|
+
return accumulator
|
|
319
|
+
}, {})
|
|
320
|
+
return json
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
function getTimeDiff(startTime, endTime = process.hrtime.bigint()) {
|
|
324
|
+
const diff = BigInt(endTime) - BigInt(startTime)
|
|
325
|
+
let executionTime = convertHrtime(diff)
|
|
326
|
+
executionTime.seconds = Math.round(executionTime.seconds)
|
|
327
|
+
executionTime.milliseconds = Math.round(executionTime.milliseconds / 1000)
|
|
328
|
+
if (executionTime.milliseconds == 0 && executionTime.nanoseconds > 0) executionTime.milliseconds = 1
|
|
329
|
+
return executionTime
|
|
330
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
export const metadataDefinition = {
|
|
2
|
+
metaUrl: 'https://developer.salesforce.com/docs/atlas.en-us.api_meta.meta/api_meta/meta_workflow.htm',
|
|
3
|
+
filetype: 'workflow',
|
|
4
|
+
root: 'Workflow',
|
|
5
|
+
main: [
|
|
6
|
+
'$',
|
|
7
|
+
],
|
|
8
|
+
singleFiles: [
|
|
9
|
+
],
|
|
10
|
+
directories: [
|
|
11
|
+
'alerts',
|
|
12
|
+
'fieldUpdates',
|
|
13
|
+
'flowActions',
|
|
14
|
+
'knowledgePublishes',
|
|
15
|
+
'outboundMessages',
|
|
16
|
+
'rules',
|
|
17
|
+
'tasks',
|
|
18
|
+
],
|
|
19
|
+
sortKeys: {
|
|
20
|
+
'alerts': 'fullName',
|
|
21
|
+
'fieldUpdates': 'fullName',
|
|
22
|
+
'flowActions': 'label',
|
|
23
|
+
'knowledgePublishes': 'label',
|
|
24
|
+
'outboundMessages': 'fullName',
|
|
25
|
+
'rules': 'fullName',
|
|
26
|
+
'tasks': 'fullName',
|
|
27
|
+
'recipients': 'type',
|
|
28
|
+
'flowInputs': 'name',
|
|
29
|
+
'criteriaItems': 'field',
|
|
30
|
+
'actions': 'name',
|
|
31
|
+
'workflowTimeTriggers': 'offsetFromField',
|
|
32
|
+
},
|
|
33
|
+
keyOrder: {
|
|
34
|
+
'alerts': ['fullName', 'description', 'template', 'protected', 'senderType', 'senderAddress', 'ccEmails', 'recipients'],
|
|
35
|
+
'fieldUpdates': ['fullName', 'name', 'description', 'field', 'notifyAssignee', 'protected', 'reevaluateOnChange', 'targetObject', 'operation', 'formula', 'literalValue', 'lookupValueType', 'lookupValue'],
|
|
36
|
+
'flowActions': ['fullName', 'label', 'flow', 'description', 'language', 'protected', 'flowInputs'],
|
|
37
|
+
'knowledgePublishes': ['label', 'description', 'action', 'language', 'protected'],
|
|
38
|
+
'outboundMessages': ['fullName', 'name', 'description', 'endpointUrl', 'apiVersion', 'integrationUser', 'includeSessionId', 'protected', 'useDeadLetterQueue', 'fields'],
|
|
39
|
+
'rules': ['fullName', 'description', 'triggerType', 'active', 'booleanFilter', 'formula', 'criteriaItems', 'actions', 'workflowTimeTriggers'],
|
|
40
|
+
'tasks': ['fullName'],
|
|
41
|
+
'recipients': ['type', 'field', 'recipient'],
|
|
42
|
+
'flowInputs': ['name', 'value'],
|
|
43
|
+
'criteriaItems': ['field', 'operation', 'value', 'valueField'],
|
|
44
|
+
'actions': ['name', 'type'],
|
|
45
|
+
'workflowTimeTriggers': ['offsetFromField', 'timeLength', 'workflowTimeTriggerUnit', 'actions'],
|
|
46
|
+
},
|
|
47
|
+
xmlOrder: {
|
|
48
|
+
'alerts': ['fullName'],
|
|
49
|
+
'fieldUpdates': ['fullName'],
|
|
50
|
+
'flowActions': ['fullName'],
|
|
51
|
+
'rules': ['fullName'],
|
|
52
|
+
'outboundMessages': ['fullName'],
|
|
53
|
+
'tasks': ['fullName'],
|
|
54
|
+
}
|
|
55
|
+
}
|
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
import path from 'path'
|
|
4
|
+
import fs from 'fs'
|
|
5
|
+
import os from 'os'
|
|
6
|
+
import { readFile } from 'fs'
|
|
7
|
+
import { Parser } from 'xml2js'
|
|
8
|
+
import logUpdate from 'log-update'
|
|
9
|
+
import chalk from 'chalk'
|
|
10
|
+
import convertHrtime from 'convert-hrtime'
|
|
11
|
+
import cliSpinners from 'cli-spinners'
|
|
12
|
+
import * as fileUtils from '../fileUtils.js'
|
|
13
|
+
|
|
14
|
+
const spinner = cliSpinners['dots']
|
|
15
|
+
|
|
16
|
+
export class Split {
|
|
17
|
+
#type = undefined
|
|
18
|
+
#root = undefined
|
|
19
|
+
#xmlns = undefined
|
|
20
|
+
#fileName = {
|
|
21
|
+
fullName: undefined,
|
|
22
|
+
shortName: undefined,
|
|
23
|
+
}
|
|
24
|
+
#json = undefined
|
|
25
|
+
#errorMessage = ''
|
|
26
|
+
#index = 0
|
|
27
|
+
#startTime = 0
|
|
28
|
+
#spinnerMessage = ''
|
|
29
|
+
|
|
30
|
+
constructor(config) {
|
|
31
|
+
this.metadataDefinition = config.metadataDefinition
|
|
32
|
+
this.sourceDir = config.sourceDir
|
|
33
|
+
this.targetDir = config.targetDir
|
|
34
|
+
this.metaFilePath = config.metaFilePath
|
|
35
|
+
this.sequence = config.sequence
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
get metadataDefinition() {
|
|
39
|
+
return this._metadataDefinition
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
set metadataDefinition(definition) {
|
|
43
|
+
this._metadataDefinition = definition
|
|
44
|
+
this.#type = definition.filetype
|
|
45
|
+
this.#root = definition.root
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
get metaFilePath() {
|
|
49
|
+
return this._metaFilePath
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
set metaFilePath(value) {
|
|
53
|
+
value = value.trim()
|
|
54
|
+
if (value === '') {
|
|
55
|
+
throw 'The file path cannot be empty'
|
|
56
|
+
}
|
|
57
|
+
this._metaFilePath = value
|
|
58
|
+
this.#fileName.fullName = fileUtils.fileInfo(value).filename
|
|
59
|
+
this.#fileName.shortName = fileUtils.fileInfo(value).filename.replace(`.${this.#type}-meta.xml`, '')
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
split() {
|
|
63
|
+
const that = this
|
|
64
|
+
return new Promise((resolve, reject) => {
|
|
65
|
+
if (!that.#fileName || !that.sourceDir || !that.targetDir || !that.metaFilePath) {
|
|
66
|
+
global.logger.error('Invalid information passed to split')
|
|
67
|
+
process.exit(1)
|
|
68
|
+
}
|
|
69
|
+
if (!fileUtils.fileExists(that.metaFilePath)) {
|
|
70
|
+
global.logger.error(`file not found: ${that.metaFilePath}`)
|
|
71
|
+
process.exit(1)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
that.targetDir = path.join(that.targetDir, that.#fileName.shortName)
|
|
75
|
+
let parser = new Parser()
|
|
76
|
+
const getJSON = new Promise((resolve, reject) => {
|
|
77
|
+
readFile(that.metaFilePath, function (err, data) {
|
|
78
|
+
parser.parseString(data, function (err, result) {
|
|
79
|
+
if (result) {
|
|
80
|
+
resolve(result)
|
|
81
|
+
} else {
|
|
82
|
+
global.logger.error(`error converting xml to json: ${that.metaFilePath}`)
|
|
83
|
+
process.exit(1)
|
|
84
|
+
}
|
|
85
|
+
})
|
|
86
|
+
})
|
|
87
|
+
})
|
|
88
|
+
getJSON.then((result) => {
|
|
89
|
+
try {
|
|
90
|
+
result[that.#root]['$'].xmlns = result[that.#root]['$'].xmlns.replace('http:', 'https:')
|
|
91
|
+
} catch (error) {
|
|
92
|
+
global.logger.error(`${that.#fileName.fullName} has an invalid XML root`)
|
|
93
|
+
resolve(false)
|
|
94
|
+
return
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// modify the json to remove unwanted arrays
|
|
98
|
+
that.#json = transformJSON(that, result, that.#root)
|
|
99
|
+
fileUtils.deleteDirectory(that.targetDir, true) // recursive delete existing directory
|
|
100
|
+
fileUtils.createDirectory(that.targetDir) // create directory
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
processJSON(that, that.#json[that.#root], that.targetDir)
|
|
104
|
+
completeFile(that)
|
|
105
|
+
} catch (error) {
|
|
106
|
+
console.log(that.#fileName.shortName)
|
|
107
|
+
global.logger.error(error)
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
resolve(true)
|
|
111
|
+
})
|
|
112
|
+
})
|
|
113
|
+
|
|
114
|
+
function processJSON(that, json, baseDir) {
|
|
115
|
+
that.#startTime = process.hrtime.bigint()
|
|
116
|
+
that.#spinnerMessage = `[%1] of ${global.processed.total} - Workflow: [%4]${chalk.yellowBright(that.#fileName.shortName)}[%2][%3]`
|
|
117
|
+
|
|
118
|
+
let targetDir = baseDir
|
|
119
|
+
Object.keys(json).forEach(key => {
|
|
120
|
+
that.sequence = global.processed.current
|
|
121
|
+
logUpdate(that.#spinnerMessage
|
|
122
|
+
.replace('[%1]', that.sequence.toString().padStart(global.processed.total.toString().length, ' '))
|
|
123
|
+
.replace('[%2]', `\n${chalk.magentaBright(nextFrame(that))} ${key}`)
|
|
124
|
+
.replace('[%3]', `${that.#errorMessage}`)
|
|
125
|
+
.replace('[%4]', `${global.statusLevel.working} `)
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
if (that.metadataDefinition.directories.includes(key)) {
|
|
129
|
+
targetDir = path.join(baseDir, key)
|
|
130
|
+
fileUtils.createDirectory(targetDir) // create directory
|
|
131
|
+
if (Array.isArray(json[key])) {
|
|
132
|
+
processDirectory(that, json[key], key, targetDir)
|
|
133
|
+
}
|
|
134
|
+
} else if (that.metadataDefinition.singleFiles.includes(key)) {
|
|
135
|
+
console.log(key, 'single file')
|
|
136
|
+
} else if (that.metadataDefinition.main.includes(key)) {
|
|
137
|
+
// Main will get processed in it's own call
|
|
138
|
+
} else {
|
|
139
|
+
console.log(key, 'unknown')
|
|
140
|
+
}
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
Main(that)
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
function Main(that) {
|
|
147
|
+
let fileName = path.join(that.targetDir, `main.${global.format}`)
|
|
148
|
+
let mainInfo = {}
|
|
149
|
+
mainInfo.name = that.#fileName.shortName
|
|
150
|
+
that.metadataDefinition.main.forEach(key => {
|
|
151
|
+
that.sequence = global.processed.current
|
|
152
|
+
logUpdate(that.#spinnerMessage
|
|
153
|
+
.replace('[%1]', that.sequence.toString().padStart(global.processed.total.toString().length, ' '))
|
|
154
|
+
.replace('[%2]', `\n${chalk.magentaBright(nextFrame(that))} ${key}`)
|
|
155
|
+
.replace('[%3]', `${that.#errorMessage}`)
|
|
156
|
+
.replace('[%4]', `${global.statusLevel.working} `)
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
if (that.#json[that.#root][key] !== undefined) {
|
|
160
|
+
mainInfo[key] = that.#json[that.#root][key]
|
|
161
|
+
}
|
|
162
|
+
})
|
|
163
|
+
|
|
164
|
+
fileUtils.savePartFile(mainInfo, fileName, global.format)
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function nextFrame(that) {
|
|
168
|
+
return spinner.frames[that.#index = ++that.#index % spinner.frames.length]
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
function completeFile(that) {
|
|
172
|
+
let executionTime = getTimeDiff(BigInt(that.#startTime))
|
|
173
|
+
let durationMessage = `${executionTime.seconds}.${executionTime.milliseconds}s`
|
|
174
|
+
let stateIcon = (that.#errorMessage == '') ? global.statusLevel.success : global.statusLevel.fail
|
|
175
|
+
logUpdate(that.#spinnerMessage
|
|
176
|
+
.replace('[%1]', that.sequence.toString().padStart(global.processed.total.toString().length, ' '))
|
|
177
|
+
.replace('[%2]', `. Processed in ${durationMessage}.`)
|
|
178
|
+
.replace('[%3]', `${that.#errorMessage}`)
|
|
179
|
+
.replace('[%4]', `${stateIcon} `)
|
|
180
|
+
)
|
|
181
|
+
logUpdate.done()
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
function processDirectory(that, json, key, baseDir) {
|
|
187
|
+
json.forEach(arrItem => {
|
|
188
|
+
const sortKey = that.metadataDefinition.sortKeys[key]
|
|
189
|
+
let fileName = path.join(baseDir, `${arrItem[sortKey]}.${global.format}`)
|
|
190
|
+
fileUtils.savePartFile(arrItem, fileName, global.format)
|
|
191
|
+
})
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
function transformJSON(that, result, rootTag) {
|
|
195
|
+
let jsonString = JSON.stringify(result, (name, value) => {
|
|
196
|
+
if (Object.keys(that.metadataDefinition.sortKeys).includes(name)) {
|
|
197
|
+
return value
|
|
198
|
+
} else {
|
|
199
|
+
return xml2json(value)
|
|
200
|
+
}
|
|
201
|
+
})
|
|
202
|
+
result = JSON.parse(jsonString)
|
|
203
|
+
|
|
204
|
+
Object.keys(result[rootTag]).forEach(key => {
|
|
205
|
+
try {
|
|
206
|
+
result[rootTag][key] = keySort(that, key, result[rootTag][key])
|
|
207
|
+
} catch (error) {
|
|
208
|
+
throw error
|
|
209
|
+
}
|
|
210
|
+
})
|
|
211
|
+
|
|
212
|
+
return result
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
function keySort(that, key, json) {
|
|
216
|
+
const keyOrder = that.metadataDefinition.keyOrder[key]
|
|
217
|
+
const sortKey = that.metadataDefinition.sortKeys[key]
|
|
218
|
+
|
|
219
|
+
if (Array.isArray(json) && sortKey !== undefined) {
|
|
220
|
+
// sort json using sortKey
|
|
221
|
+
json.sort((a, b) => {
|
|
222
|
+
if (a[sortKey] < b[sortKey]) {
|
|
223
|
+
return -1;
|
|
224
|
+
}
|
|
225
|
+
if (a[sortKey] > b[sortKey]) {
|
|
226
|
+
return 1;
|
|
227
|
+
}
|
|
228
|
+
return 0;
|
|
229
|
+
})
|
|
230
|
+
|
|
231
|
+
// arrange json keys in specified order using keyOrder
|
|
232
|
+
json.forEach(function (part, index) {
|
|
233
|
+
this[index] = Object.keys(this[index])
|
|
234
|
+
.sort((a, b) => {
|
|
235
|
+
if (keyOrder.indexOf(a) == -1) return 1
|
|
236
|
+
if (keyOrder.indexOf(a) < keyOrder.indexOf(b)) return -1
|
|
237
|
+
if (keyOrder.indexOf(a) > keyOrder.indexOf(b)) return 1
|
|
238
|
+
return 0
|
|
239
|
+
})
|
|
240
|
+
.reduce((accumulator, key) => {
|
|
241
|
+
accumulator[key] = this[index][key]
|
|
242
|
+
return accumulator
|
|
243
|
+
}, {})
|
|
244
|
+
}, json)
|
|
245
|
+
|
|
246
|
+
// recursive objects
|
|
247
|
+
json.forEach(arrayItem => {
|
|
248
|
+
Object.keys(arrayItem).forEach(jsonKey => {
|
|
249
|
+
if (typeof arrayItem[jsonKey] == 'object') {
|
|
250
|
+
arrayItem[jsonKey] = keySort(that, jsonKey, arrayItem[jsonKey])
|
|
251
|
+
}
|
|
252
|
+
})
|
|
253
|
+
})
|
|
254
|
+
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
return json
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
function xml2json(currentValue) {
|
|
261
|
+
if (Array.isArray(currentValue)) {
|
|
262
|
+
if (currentValue.length == 1) {
|
|
263
|
+
currentValue = currentValue[0].toString().trim()
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
if (currentValue == 'true') currentValue = true
|
|
267
|
+
if (currentValue == 'false') currentValue = false
|
|
268
|
+
return currentValue
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
function getTimeDiff(startTime, endTime = process.hrtime.bigint()) {
|
|
272
|
+
const diff = BigInt(endTime) - BigInt(startTime)
|
|
273
|
+
let executionTime = convertHrtime(diff)
|
|
274
|
+
executionTime.seconds = Math.round(executionTime.seconds)
|
|
275
|
+
executionTime.milliseconds = Math.round(executionTime.milliseconds / 1000)
|
|
276
|
+
if (executionTime.milliseconds == 0 && executionTime.nanoseconds > 0) executionTime.milliseconds = 1
|
|
277
|
+
return executionTime
|
|
278
|
+
}
|
package/nodemon.json
ADDED