@transcend-io/cli 8.0.1 → 8.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. package/dist/bin/bash-complete.cjs +1 -1
  2. package/dist/bin/cli.cjs +1 -1
  3. package/dist/bin/deprecated-command.cjs +2 -2
  4. package/dist/{chunk-WRJVOXRS.cjs → chunk-3PX3HVEO.cjs} +2 -2
  5. package/dist/{chunk-WRJVOXRS.cjs.map → chunk-3PX3HVEO.cjs.map} +1 -1
  6. package/dist/{chunk-DBYKJZEA.cjs → chunk-44J2TVZU.cjs} +2 -2
  7. package/dist/{chunk-DBYKJZEA.cjs.map → chunk-44J2TVZU.cjs.map} +1 -1
  8. package/dist/chunk-7UL54E7X.cjs +2 -0
  9. package/dist/{chunk-MBB6OD4E.cjs.map → chunk-7UL54E7X.cjs.map} +1 -1
  10. package/dist/chunk-A4OQQVQT.cjs +3 -0
  11. package/dist/chunk-A4OQQVQT.cjs.map +1 -0
  12. package/dist/chunk-BPX6MSMC.cjs +2 -0
  13. package/dist/chunk-BPX6MSMC.cjs.map +1 -0
  14. package/dist/{chunk-7NYZMFPT.cjs → chunk-FNBADTTI.cjs} +19 -19
  15. package/dist/{chunk-7NYZMFPT.cjs.map → chunk-FNBADTTI.cjs.map} +1 -1
  16. package/dist/{chunk-56TB3NTQ.cjs → chunk-N7QF44VW.cjs} +2 -2
  17. package/dist/{chunk-56TB3NTQ.cjs.map → chunk-N7QF44VW.cjs.map} +1 -1
  18. package/dist/chunk-PQTI6C4Q.cjs +12 -0
  19. package/dist/chunk-PQTI6C4Q.cjs.map +1 -0
  20. package/dist/chunk-UONUI4AB.cjs +2920 -0
  21. package/dist/chunk-UONUI4AB.cjs.map +1 -0
  22. package/dist/chunk-W3S3ULXN.cjs +2 -0
  23. package/dist/chunk-W3S3ULXN.cjs.map +1 -0
  24. package/dist/{chunk-KS2REJYA.cjs → chunk-XARKMCNP.cjs} +4 -4
  25. package/dist/chunk-XARKMCNP.cjs.map +1 -0
  26. package/dist/{chunk-6IHSPKMP.cjs → chunk-XPKFB4IL.cjs} +2 -2
  27. package/dist/{chunk-6IHSPKMP.cjs.map → chunk-XPKFB4IL.cjs.map} +1 -1
  28. package/dist/{chunk-WLUTU3MY.cjs → chunk-YKZKPWDF.cjs} +2 -2
  29. package/dist/{chunk-WLUTU3MY.cjs.map → chunk-YKZKPWDF.cjs.map} +1 -1
  30. package/dist/chunk-YMXQXAUZ.cjs +2 -0
  31. package/dist/{chunk-PSRKLYGK.cjs.map → chunk-YMXQXAUZ.cjs.map} +1 -1
  32. package/dist/{chunk-PNSZQIYV.cjs → chunk-YP2PMUM5.cjs} +2 -2
  33. package/dist/{chunk-PNSZQIYV.cjs.map → chunk-YP2PMUM5.cjs.map} +1 -1
  34. package/dist/{impl-57LDSC2M.cjs → impl-2BXBJZII.cjs} +2 -2
  35. package/dist/{impl-57LDSC2M.cjs.map → impl-2BXBJZII.cjs.map} +1 -1
  36. package/dist/{impl-RGYCC7MP.cjs → impl-2SHN3CZX.cjs} +2 -2
  37. package/dist/{impl-RGYCC7MP.cjs.map → impl-2SHN3CZX.cjs.map} +1 -1
  38. package/dist/{impl-5KEI5AJG.cjs → impl-334G4VTL.cjs} +2 -2
  39. package/dist/{impl-5KEI5AJG.cjs.map → impl-334G4VTL.cjs.map} +1 -1
  40. package/dist/{impl-RXVSZRCO.cjs → impl-37NGCBEQ.cjs} +2 -2
  41. package/dist/{impl-RXVSZRCO.cjs.map → impl-37NGCBEQ.cjs.map} +1 -1
  42. package/dist/{impl-AHLMRZNM.cjs → impl-5IS3UVQQ.cjs} +2 -2
  43. package/dist/{impl-AHLMRZNM.cjs.map → impl-5IS3UVQQ.cjs.map} +1 -1
  44. package/dist/{impl-UMRC5P5T.cjs → impl-5IZQDLG7.cjs} +2 -2
  45. package/dist/{impl-UMRC5P5T.cjs.map → impl-5IZQDLG7.cjs.map} +1 -1
  46. package/dist/impl-7MIPQI3E.cjs +2 -0
  47. package/dist/impl-7MIPQI3E.cjs.map +1 -0
  48. package/dist/impl-AJMAFFJG.cjs +2 -0
  49. package/dist/{impl-FP3D5M5B.cjs.map → impl-AJMAFFJG.cjs.map} +1 -1
  50. package/dist/{impl-XW7JECCZ.cjs → impl-BL64FHKY.cjs} +2 -2
  51. package/dist/{impl-XW7JECCZ.cjs.map → impl-BL64FHKY.cjs.map} +1 -1
  52. package/dist/{impl-QJJNBKJR.cjs → impl-BXNGIHN2.cjs} +2 -2
  53. package/dist/{impl-QJJNBKJR.cjs.map → impl-BXNGIHN2.cjs.map} +1 -1
  54. package/dist/impl-DDHHIRXW.cjs +12 -0
  55. package/dist/impl-DDHHIRXW.cjs.map +1 -0
  56. package/dist/impl-DFDEITPV.cjs +7 -0
  57. package/dist/impl-DFDEITPV.cjs.map +1 -0
  58. package/dist/{impl-TJP4ZLHS.cjs → impl-DZSW2V2J.cjs} +2 -2
  59. package/dist/{impl-TJP4ZLHS.cjs.map → impl-DZSW2V2J.cjs.map} +1 -1
  60. package/dist/impl-FC7ICRDM.cjs +2 -0
  61. package/dist/{impl-YUYWOPRG.cjs.map → impl-FC7ICRDM.cjs.map} +1 -1
  62. package/dist/{impl-T4BXYX2K.cjs → impl-G2MAA4QD.cjs} +2 -2
  63. package/dist/{impl-T4BXYX2K.cjs.map → impl-G2MAA4QD.cjs.map} +1 -1
  64. package/dist/{impl-GP2DMI4O.cjs → impl-GIZXIRD5.cjs} +2 -2
  65. package/dist/{impl-GP2DMI4O.cjs.map → impl-GIZXIRD5.cjs.map} +1 -1
  66. package/dist/{impl-UG3FWG22.cjs → impl-H3RZXUMP.cjs} +2 -2
  67. package/dist/{impl-UG3FWG22.cjs.map → impl-H3RZXUMP.cjs.map} +1 -1
  68. package/dist/impl-HG3PK5DG.cjs +2 -0
  69. package/dist/{impl-HVKSG236.cjs.map → impl-HG3PK5DG.cjs.map} +1 -1
  70. package/dist/impl-IAZCCRHG.cjs +2 -0
  71. package/dist/{impl-GPTJLZHD.cjs.map → impl-IAZCCRHG.cjs.map} +1 -1
  72. package/dist/{impl-LZUXHWDU.cjs → impl-K3VBUCUB.cjs} +2 -2
  73. package/dist/{impl-LZUXHWDU.cjs.map → impl-K3VBUCUB.cjs.map} +1 -1
  74. package/dist/impl-KMLDAWB5.cjs +6 -0
  75. package/dist/impl-KMLDAWB5.cjs.map +1 -0
  76. package/dist/impl-LDJGDZNJ.cjs +2 -0
  77. package/dist/impl-LDJGDZNJ.cjs.map +1 -0
  78. package/dist/impl-LLCE3XUF.cjs +6 -0
  79. package/dist/impl-LLCE3XUF.cjs.map +1 -0
  80. package/dist/{impl-G24XCV65.cjs → impl-MVGWE3OY.cjs} +2 -2
  81. package/dist/{impl-G24XCV65.cjs.map → impl-MVGWE3OY.cjs.map} +1 -1
  82. package/dist/{impl-PAXESZFC.cjs → impl-N56GZUUE.cjs} +2 -2
  83. package/dist/{impl-PAXESZFC.cjs.map → impl-N56GZUUE.cjs.map} +1 -1
  84. package/dist/{impl-HQLA253Z.cjs → impl-NRVKZ6QP.cjs} +2 -2
  85. package/dist/{impl-HQLA253Z.cjs.map → impl-NRVKZ6QP.cjs.map} +1 -1
  86. package/dist/{impl-L2XUXKHL.cjs → impl-O7M62JQO.cjs} +2 -2
  87. package/dist/{impl-L2XUXKHL.cjs.map → impl-O7M62JQO.cjs.map} +1 -1
  88. package/dist/{impl-SW44TCHM.cjs → impl-OXCKKL3L.cjs} +2 -2
  89. package/dist/{impl-SW44TCHM.cjs.map → impl-OXCKKL3L.cjs.map} +1 -1
  90. package/dist/{impl-JUUYZHGL.cjs → impl-OYQLTGBY.cjs} +2 -2
  91. package/dist/{impl-JUUYZHGL.cjs.map → impl-OYQLTGBY.cjs.map} +1 -1
  92. package/dist/{impl-TLDBJN7P.cjs → impl-PNDNQD25.cjs} +2 -2
  93. package/dist/{impl-TLDBJN7P.cjs.map → impl-PNDNQD25.cjs.map} +1 -1
  94. package/dist/{impl-4VCIYJCG.cjs → impl-Q2JAFPV5.cjs} +2 -2
  95. package/dist/{impl-4VCIYJCG.cjs.map → impl-Q2JAFPV5.cjs.map} +1 -1
  96. package/dist/{impl-W6OLYSNF.cjs → impl-Q6LOHZB6.cjs} +2 -2
  97. package/dist/{impl-W6OLYSNF.cjs.map → impl-Q6LOHZB6.cjs.map} +1 -1
  98. package/dist/{impl-AHNQLAH5.cjs → impl-QS77O4PW.cjs} +2 -2
  99. package/dist/{impl-AHNQLAH5.cjs.map → impl-QS77O4PW.cjs.map} +1 -1
  100. package/dist/{impl-MLEPL5MZ.cjs → impl-TC6DE6BU.cjs} +5 -5
  101. package/dist/impl-TC6DE6BU.cjs.map +1 -0
  102. package/dist/{impl-RPRRJOI3.cjs → impl-W6WGF5YD.cjs} +2 -2
  103. package/dist/{impl-RPRRJOI3.cjs.map → impl-W6WGF5YD.cjs.map} +1 -1
  104. package/dist/{impl-C53IQ5HC.cjs → impl-WHZWURMO.cjs} +2 -2
  105. package/dist/{impl-C53IQ5HC.cjs.map → impl-WHZWURMO.cjs.map} +1 -1
  106. package/dist/{impl-WSRMGN2H.cjs → impl-WWQBCKZV.cjs} +2 -2
  107. package/dist/{impl-WSRMGN2H.cjs.map → impl-WWQBCKZV.cjs.map} +1 -1
  108. package/dist/{impl-IKTY3PZ3.cjs → impl-XWRII7ET.cjs} +2 -2
  109. package/dist/{impl-IKTY3PZ3.cjs.map → impl-XWRII7ET.cjs.map} +1 -1
  110. package/dist/index.cjs +3 -3
  111. package/dist/index.cjs.map +1 -1
  112. package/dist/index.d.cts +1 -1
  113. package/package.json +4 -2
  114. package/dist/chunk-7G4SCKPN.cjs +0 -2
  115. package/dist/chunk-7G4SCKPN.cjs.map +0 -1
  116. package/dist/chunk-AMO6I4AO.cjs +0 -2
  117. package/dist/chunk-AMO6I4AO.cjs.map +0 -1
  118. package/dist/chunk-JMBVOY56.cjs +0 -3
  119. package/dist/chunk-JMBVOY56.cjs.map +0 -1
  120. package/dist/chunk-KS2REJYA.cjs.map +0 -1
  121. package/dist/chunk-MBB6OD4E.cjs +0 -2
  122. package/dist/chunk-PSRKLYGK.cjs +0 -2
  123. package/dist/chunk-PTQHBKJU.cjs +0 -12
  124. package/dist/chunk-PTQHBKJU.cjs.map +0 -1
  125. package/dist/chunk-UPUHHIXZ.cjs +0 -2920
  126. package/dist/chunk-UPUHHIXZ.cjs.map +0 -1
  127. package/dist/impl-5BNP4JYG.cjs +0 -2
  128. package/dist/impl-5BNP4JYG.cjs.map +0 -1
  129. package/dist/impl-5MGEJPTO.cjs +0 -12
  130. package/dist/impl-5MGEJPTO.cjs.map +0 -1
  131. package/dist/impl-DSRH7IN2.cjs +0 -2
  132. package/dist/impl-DSRH7IN2.cjs.map +0 -1
  133. package/dist/impl-FP3D5M5B.cjs +0 -2
  134. package/dist/impl-GPTJLZHD.cjs +0 -2
  135. package/dist/impl-HVKSG236.cjs +0 -2
  136. package/dist/impl-IVDJEVS4.cjs +0 -6
  137. package/dist/impl-IVDJEVS4.cjs.map +0 -1
  138. package/dist/impl-MLEPL5MZ.cjs.map +0 -1
  139. package/dist/impl-QW6NAOEK.cjs +0 -7
  140. package/dist/impl-QW6NAOEK.cjs.map +0 -1
  141. package/dist/impl-VF3LT3IJ.cjs +0 -6
  142. package/dist/impl-VF3LT3IJ.cjs.map +0 -1
  143. package/dist/impl-YUYWOPRG.cjs +0 -2
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkPSRKLYGKcjs = require('./chunk-PSRKLYGK.cjs');var _chunkLR3CPNDMcjs = require('./chunk-LR3CPNDM.cjs');require('./chunk-MBB6OD4E.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkUPUHHIXZcjs = require('./chunk-UPUHHIXZ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-6IHSPKMP.cjs');var _chunkDBYKJZEAcjs = require('./chunk-DBYKJZEA.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function Z({file:l,fileTarget:f,transcendUrl:p,auth:g,sombraAuth:v,cronDataSiloId:N,targetDataSiloId:R,actions:S,skipRequestCount:h,pageLimit:s,chunkSize:r}){h&&_chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Skipping request count as requested. This may help speed up the call.")),(Number.isNaN(r)||r<=0||r%s!==0)&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Invalid chunk size: "${r}". Must be a positive integer that is a multiple of ${s}.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let j=_chunkUPUHHIXZcjs.Wd.call(void 0, p,g),{baseName:O,extension:A}=_chunkLR3CPNDMcjs.d.call(void 0, l),{baseName:D,extension:k}=_chunkLR3CPNDMcjs.d.call(void 0, f),$=0,b=0,d=0;await _chunkPSRKLYGKcjs.g.call(void 0, {dataSiloId:N,auth:g,sombraAuth:v,actions:S,apiPageSize:s,savePageSize:r,onSave:async n=>{$+=n.length;let E=n.map(t=>t.requestId),G=_chunkDBYKJZEAcjs.j.call(void 0, E),M=_chunkDBYKJZEAcjs.b.call(void 0, G,s),y=await _chunkUPUHHIXZcjs.b.call(void 0, M,async t=>(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching target identifiers for ${t.length} requests`)),(await _chunkUPUHHIXZcjs.ve.call(void 0, j,s*2,{requestIds:t,dataSiloIds:[R]})).map(({fileName:I,remoteId:C})=>{if(!C)throw new Error(`Failed to find remoteId for ${I}`);return{RecordId:C,Object:_optionalChain([I, 'access', _ => _.replace, 'call', _2 => _2(".json",""), 'access', _3 => _3.split, 'call', _4 => _4("/"), 'access', _5 => _5.pop, 'call', _6 => _6(), 'optionalAccess', _7 => _7.replace, 'call', _8 => _8(" Information","")]),Comment:"Customer data deletion request submitted via transcend.io"}})),{concurrency:1});b+=y.flat().length;let Q=_chunkDBYKJZEAcjs.j.call(void 0, n.map(t=>Object.keys(t)).flat()),T=`${O}-${d}${A}`,V=`${D}-${d}${k}`;_chunkLR3CPNDMcjs.c.call(void 0, T,n,Q),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${n.length} identifiers to file "${l}"`));let o=y.flat(),z=_chunkDBYKJZEAcjs.j.call(void 0, o.map(t=>Object.keys(t)).flat());_chunkLR3CPNDMcjs.c.call(void 0, V,o,z),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${o.length} identifiers to file "${f}"`)),_chunkZUNVPK23cjs.a.info(_colors2.default.blue(`Processed chunk of ${_chunkDBYKJZEAcjs.b.length} identifiers, found ${o.length} target identifiers`)),d+=1},transcendUrl:p,skipRequestCount:h}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${$} identifiers to file "${l}"`)),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${b} identifiers to file "${f}"`))}exports.pullProfiles = Z;
2
- //# sourceMappingURL=impl-DSRH7IN2.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-DSRH7IN2.cjs","../src/commands/request/cron/pull-profiles/impl.ts"],"names":["pullProfiles","file","fileTarget","transcendUrl","auth","sombraAuth","cronDataSiloId","targetDataSiloId","actions","skipRequestCount","pageLimit","chunkSize","logger","colors","doneInputValidation","client","buildTranscendGraphQLClient","baseName","extension","parseFilePath","baseNameTarget","extensionTarget","allIdentifiersCount","allTargetIdentifiersCount","fileCount","pullChunkedCustomSiloOutstandingIdentifiers","chunkToSave","requestIds","d","uniqueRequestIds","uniq_default","chunkedRequestIds","chunk_default","results","map","fetchRequestFilesForRequest","fileName","remoteId"],"mappings":"AAAA,quBAAwC,wDAAgD,gCAA6B,wDAAyC,wDAAyD,wDAAyC,gCAA6B,wDAAgD,gCAA6B,gFCEvV,MA8BnB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CACF,CAAA,CACe,CACXF,CAAAA,EACFG,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,MAAA,CACL,uEACF,CACF,CAAA,CAAA,CAIA,MAAA,CAAO,KAAA,CAAMF,CAAS,CAAA,EACtBA,CAAAA,EAAa,CAAA,EACbA,CAAAA,CAAYD,CAAAA,GAAc,CAAA,CAAA,EAAA,CAE1BE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,qBAAA,EAAwBF,CAAS,CAAA,oDAAA,EAAuDD,CAAS,CAAA,CAAA,CACnG,CACF,CAAA,CACA,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAGrBI,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAGrC,IAAMC,CAAAA,CAASC,kCAAAA,CAA4Bb,CAAcC,CAAI,CAAA,CACvD,CAAE,QAAA,CAAAa,CAAAA,CAAU,SAAA,CAAAC,CAAU,CAAA,CAAIC,iCAAAA,CAAkB,CAAA,CAC5C,CAAE,QAAA,CAAUC,CAAAA,CAAgB,SAAA,CAAWC,CAAgB,CAAA,CAC3DF,iCAAAA,CAAwB,CAAA,CAEtBG,CAAAA,CAAsB,CAAA,CACtBC,CAAAA,CAA4B,CAAA,CAC5BC,CAAAA,CAAY,CAAA,CAmFhB,MAAMC,iCAAAA,CACJ,UAAA,CAAYnB,CAAAA,CACZ,IAAA,CAAAF,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,OAAA,CAAAG,CAAAA,CACA,WAAA,CAAaE,CAAAA,CACb,YAAA,CAAcC,CAAAA,CACd,MAAA,CAxFa,MACbe,CAAAA,EACkB,CAElBJ,CAAAA,EAAuBI,CAAAA,CAAY,MAAA,CAGnC,IAAMC,CAAAA,CAAaD,CAAAA,CAAY,GAAA,CAAKE,CAAAA,EAAMA,CAAAA,CAAE,SAAmB,CAAA,CACzDC,CAAAA,CAAmBC,iCAAAA,CAAe,CAAA,CAGlCC,CAAAA,CAAoBC,iCAAAA,CAAMH,CAAkBnB,CAAS,CAAA,CACrDuB,CAAAA,CAAU,MAAMC,iCAAAA,CACpBH,CACA,MAAOJ,CAAAA,EAAAA,CACLf,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,gCAAA,EAAmCc,CAAAA,CAAW,MAAM,CAAA,SAAA,CACtD,CACF,CAAA,CAAA,CACgB,MAAMQ,kCAAAA,CACpBpB,CACAL,CAAAA,CAAY,CAAA,CACZ,CACE,UAAA,CAAAiB,CAAAA,CACA,WAAA,CAAa,CAACpB,CAAgB,CAChC,CACF,CAAA,CAAA,CACe,GAAA,CAAI,CAAC,CAAE,QAAA,CAAA6B,CAAAA,CAAU,QAAA,CAAAC,CAAS,CAAA,CAAA,EAAM,CAC7C,EAAA,CAAI,CAACA,CAAAA,CACH,MAAM,IAAI,KAAA,CAAM,CAAA,4BAAA,EAA+BD,CAAQ,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-DSRH7IN2.cjs","sourcesContent":[null,"import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from '../../../../lib/bluebird-replace';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n parseFilePath,\n pullChunkedCustomSiloOutstandingIdentifiers,\n writeCsv,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n writeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n writeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"]}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-PSRKLYGK.cjs');var _chunkLR3CPNDMcjs = require('./chunk-LR3CPNDM.cjs');var _chunkPTQHBKJUcjs = require('./chunk-PTQHBKJU.cjs');require('./chunk-7G4SCKPN.cjs');require('./chunk-MBB6OD4E.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-QEM6S2W7.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkUPUHHIXZcjs = require('./chunk-UPUHHIXZ.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-6IHSPKMP.cjs');require('./chunk-DBYKJZEA.cjs');require('./chunk-Q7I37FJV.cjs');async function S({auth:m,partition:c,sombraAuth:p,file:f,transcendUrl:l,timestampBefore:e,timestampAfter:r,identifiers:n=[],concurrency:g}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let u=await _chunkUPUHHIXZcjs.Xd.call(void 0, l,m,p),C=await _chunkPTQHBKJUcjs.j.call(void 0, u,{partition:c,filterBy:{...e?{timestampBefore:e.toISOString()}:{},...r?{timestampAfter:r.toISOString()}:{},...n.length>0?{identifiers:n}:{}},limit:g});_chunkLR3CPNDMcjs.c.call(void 0, f,C.map(t=>({...t,purposes:JSON.stringify(t.purposes),...t.purposes})))}exports.pullConsentPreferences = S;
2
- //# sourceMappingURL=impl-FP3D5M5B.cjs.map
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunk7G4SCKPNcjs = require('./chunk-7G4SCKPN.cjs');var _chunkMBB6OD4Ecjs = require('./chunk-MBB6OD4E.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-UPUHHIXZ.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-6IHSPKMP.cjs');require('./chunk-DBYKJZEA.cjs');require('./chunk-Q7I37FJV.cjs');async function g({base64EncryptionKey:t,base64SigningKey:s,partition:i,file:a,consentUrl:c,concurrency:p}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let m=_chunkMBB6OD4Ecjs.q.call(void 0, a,_chunk7G4SCKPNcjs.b);await _chunk7G4SCKPNcjs.f.call(void 0, {base64EncryptionKey:t,base64SigningKey:s,preferences:m,partition:i,concurrency:p,transcendUrl:c})}exports.uploadConsentPreferences = g;
2
- //# sourceMappingURL=impl-GPTJLZHD.cjs.map
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkJMBVOY56cjs = require('./chunk-JMBVOY56.cjs');require('./chunk-LR3CPNDM.cjs');require('./chunk-MBB6OD4E.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-UPUHHIXZ.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-6IHSPKMP.cjs');require('./chunk-DBYKJZEA.cjs');require('./chunk-Q7I37FJV.cjs');async function u({auth:r,transcendUrl:i,file:e,enricherId:o,concurrency:s,markSilent:a,sombraAuth:m}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkJMBVOY56cjs.d.call(void 0, {file:e,transcendUrl:i,enricherId:o,concurrency:s,markSilent:a,auth:r,sombraAuth:m})}exports.pushIdentifiers = u;
2
- //# sourceMappingURL=impl-HVKSG236.cjs.map
@@ -1,6 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkSW5CIF5Dcjs = require('./chunk-SW5CIF5D.cjs');var _chunkPNSZQIYVcjs = require('./chunk-PNSZQIYV.cjs');var _chunkAMO6I4AOcjs = require('./chunk-AMO6I4AO.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-QEM6S2W7.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkUPUHHIXZcjs = require('./chunk-UPUHHIXZ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunk6IHSPKMPcjs = require('./chunk-6IHSPKMP.cjs');require('./chunk-DBYKJZEA.cjs');var _chunkQ7I37FJVcjs = require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _path = require('path');var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);async function Y({auth:L,resources:g=_chunkPNSZQIYVcjs.h,file:e,transcendUrl:p,dataSiloIds:d=[],integrationNames:T=[],trackerStatuses:h=_chunkPNSZQIYVcjs.i,pageSize:y,skipDatapoints:S,skipSubDatapoints:$,includeGuessedCategories:A,debug:s}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let i=await _chunkAMO6I4AOcjs.b.call(void 0, L),C=g.includes("all")?Object.values(_chunkQ7I37FJVcjs.d):g;if(typeof i=="string"){try{let r=_chunkUPUHHIXZcjs.Wd.call(void 0, p,i),t=await _chunkUPUHHIXZcjs.He.call(void 0, r,{dataSiloIds:d,integrationNames:T,resources:C,pageSize:y,debug:s,skipDatapoints:S,skipSubDatapoints:$,includeGuessedCategories:A,trackerStatuses:h});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${e}"...`)),_chunkSW5CIF5Dcjs.e.call(void 0, e,t)}catch(r){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the schema: ${s?r.stack:r.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced yaml file to disk at ${e}! View at ${_chunk6IHSPKMPcjs.c}`))}else{if(!_fs2.default.lstatSync(e).isDirectory())throw new Error("File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/");let r=[];await _chunkUPUHHIXZcjs.a.call(void 0, i,async(t,_)=>{let a=`[${_+1}/${i.length}][${t.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
2
-
3
- ${a}Attempting to pull configuration...
4
-
5
- ~~~`));let k=_chunkUPUHHIXZcjs.Wd.call(void 0, p,t.apiKey);try{let c=await _chunkUPUHHIXZcjs.He.call(void 0, k,{dataSiloIds:d,integrationNames:T,resources:C,pageSize:y,debug:s,skipDatapoints:S,skipSubDatapoints:$,includeGuessedCategories:A,trackerStatuses:h}),w=_path.join.call(void 0, e,`${t.organizationName}.yml`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${w}"...`)),_chunkSW5CIF5Dcjs.e.call(void 0, w,c),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${a}Successfully pulled configuration!`))}catch(c){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to sync configuration. - ${c.message}`)),r.push(t.organizationName)}}),r.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${r.join(",")}". View output above for more information, or check out ${_chunk6IHSPKMPcjs.c}`)),this.process.exit(1))}}exports.pull = Y;
6
- //# sourceMappingURL=impl-IVDJEVS4.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-IVDJEVS4.cjs","../src/commands/inventory/pull/impl.ts"],"names":["pull","auth","resources","DEFAULT_TRANSCEND_PULL_RESOURCES","file","transcendUrl","dataSiloIds","integrationNames","trackerStatuses","DEFAULT_CONSENT_TRACKER_STATUSES","pageSize","skipDatapoints","skipSubDatapoints","includeGuessedCategories","debug","doneInputValidation","apiKeyOrList","validateTranscendAuth","resourcesToPull","TranscendPullResource","client","buildTranscendGraphQLClient","configuration","pullTranscendConfiguration","logger","colors","writeTranscendYaml","err"],"mappings":"AAAA,iOAAwC,wDAAgD,wDAAyC,wDAAyC,gCAA6B,gCAA6B,wDAAyD,wDAAyC,wDAAyC,gCAA6B,wDAAyC,gFCSla,4BAEE,gEACN,MA0Bf,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CAAYC,mBAAAA,CACZ,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,eAAA,CAAAC,CAAAA,CAAkBC,mBAAAA,CAClB,QAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAGrC,IAAMC,CAAAA,CAAe,MAAMC,iCAAAA,CAA0B,CAAA,CAE/CC,CAAAA,CAA2ChB,CAAAA,CAAU,QAAA,CAAS,KAAK,CAAA,CACrE,MAAA,CAAO,MAAA,CAAOiB,mBAAqB,CAAA,CAClCjB,CAAAA,CAGL,EAAA,CAAI,OAAOc,CAAAA,EAAiB,QAAA,CAAU,CACpC,GAAI,CAEF,IAAMI,CAAAA,CAASC,kCAAAA,CAA4BhB,CAAcW,CAAY,CAAA,CAE/DM,CAAAA,CAAgB,MAAMC,kCAAAA,CAA2BH,CAAQ,CAC7D,WAAA,CAAAd,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,SAAA,CAAWW,CAAAA,CACX,QAAA,CAAAR,CAAAA,CACA,KAAA,CAAAI,CAAAA,CACA,cAAA,CAAAH,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CAAAA,CACA,eAAA,CAAAL,CACF,CAAC,CAAA,CAEDgB,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,CAAA,+BAAA,EAAkCrB,CAAI,CAAA,IAAA,CAAM,CAAC,CAAA,CACxEsB,iCAAAA,CAAmBtB,CAAMkB,CAAa,CACxC,CAAA,KAAA,CAASK,CAAAA,CAAK,CACZH,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,sCAAA,EACEX,CAAAA,CAAQa,CAAAA,CAAI,KAAA,CAAQA,CAAAA,CAAI,OAC1B,CAAA,CAAA;AA0BA;AAAgB;AAAA;AAiD1B,GAAA","file":"/home/runner/work/cli/cli/dist/impl-IVDJEVS4.cjs","sourcesContent":[null,"import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport type { LocalContext } from '../../../context';\nimport { TranscendPullResource } from '../../../enums';\nimport {\n DEFAULT_CONSENT_TRACKER_STATUSES,\n DEFAULT_TRANSCEND_PULL_RESOURCES,\n} from './command';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { mapSeries } from '../../../lib/bluebird-replace';\nimport { join } from 'node:path';\nimport fs from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n pullTranscendConfiguration,\n} from '../../../lib/graphql';\n\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullCommandFlags {\n auth: string;\n resources?: (TranscendPullResource | 'all')[];\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n integrationNames?: string[];\n trackerStatuses?: ConsentTrackerStatus[];\n pageSize: number;\n skipDatapoints: boolean;\n skipSubDatapoints: boolean;\n includeGuessedCategories: boolean;\n debug: boolean;\n}\n\nexport async function pull(\n this: LocalContext,\n {\n auth,\n resources = DEFAULT_TRANSCEND_PULL_RESOURCES,\n file,\n transcendUrl,\n dataSiloIds = [],\n integrationNames = [],\n trackerStatuses = DEFAULT_CONSENT_TRACKER_STATUSES,\n pageSize,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n debug,\n }: PullCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n const resourcesToPull: TranscendPullResource[] = resources.includes('all')\n ? Object.values(TranscendPullResource)\n : (resources as TranscendPullResource[]);\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n writeTranscendYaml(file, configuration);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the schema: ${\n debug ? err.stack : err.message\n }`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to disk at ${file}! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n if (!fs.lstatSync(file).isDirectory()) {\n throw new Error(\n 'File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/',\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull configuration...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n const filePath = join(file, `${apiKey.organizationName}.yml`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${filePath}\"...`),\n );\n writeTranscendYaml(filePath, configuration);\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(\n colors.red(`${prefix}Failed to sync configuration. - ${err.message}`),\n );\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-MLEPL5MZ.cjs","../src/commands/consent/pull-consent-metrics/impl.ts"],"names":["pullConsentMetrics","auth","start","end","folder","bin","transcendUrl","parsedBin","ConsentManagerMetricBin","logger","colors"],"mappings":"AAAA,iOAAwC,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gCAA6B,wDAAyC,gCAA6B,gCAA6B,wDAAyD,wDAAyC,wDAAyC,gCAA6B,gCAA6B,gFCEte,4BAEE,gEACqB,MAoB1C,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAYF,CAAAA,CACb,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,QAAA,CAASD,CAAS,CAAA,EAAA,CAC5DE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CL,CAAG,CAAA;AAAA;AAAA,EACzB,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,IAAA,CAC3D,CAAA;AAAA,CACF,CAAC,CAAA,CAAA;AA+GD;AAAgB;AAAA;AAAA,GAAA","file":"/home/runner/work/cli/cli/dist/impl-MLEPL5MZ.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { mapSeries } from '../../../lib/bluebird-replace';\nimport { join } from 'node:path';\nimport fs, { existsSync, mkdirSync } from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n ConsentManagerMetricBin,\n} from '../../../lib/graphql';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { pullConsentManagerMetrics } from '../../../lib/consent-manager';\nimport { writeCsv } from '../../../lib/cron';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n {\n auth,\n start,\n end,\n folder,\n bin,\n transcendUrl,\n }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join(\n '\\n',\n )}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n this.process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"]}
@@ -1,7 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkUPUHHIXZcjs = require('./chunk-UPUHHIXZ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkDBYKJZEAcjs = require('./chunk-DBYKJZEA.cjs');var _chunkQ7I37FJVcjs = require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _got = require('got'); var _got2 = _interopRequireDefault(_got);var B=({hostname:t,auth:e})=>_got2.default.extend({prefixUrl:`https://${t}`,headers:{accept:"application/json","content-type":"application/json",authorization:`Bearer ${e}`}});var _yargsparser = require('yargs-parser'); var _yargsparser2 = _interopRequireDefault(_yargsparser);var ve=Object.values(_chunkQ7I37FJVcjs.b);var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);var w=({assessment:t,index:e,total:s,wrap:r=!0})=>{let n="";(e===0||r)&&(n=`[
2
- `);let m=JSON.stringify(t),o=s&&e<s-1&&!r?",":"";return n=`${n+m+o}
3
- `,(s&&e===s-1||r)&&(n+=`
4
- ]`),n};var J=({file:t,assessment:e,index:s,total:r})=>{_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing enriched assessment ${s+1} of ${r} to file "${t}"...`)),s===0?_fs2.default.writeFileSync(t,w({assessment:e,index:s,total:r,wrap:!1})):_fs2.default.appendFileSync(t,w({assessment:e,index:s,total:r,wrap:!1}))};var _typeutils = require('@transcend-io/type-utils');var _privacytypes = require('@transcend-io/privacy-types');var K=async({oneTrust:t})=>{let e=0,s=1,r=0,n=[];for(;e<s;){let{body:m}=await t.get(`api/assessment/v2/assessments?page=${e}&size=2000`),{page:o,content:u}=_typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetListOfAssessmentsResponse,m);n.push(..._nullishCoalesce(u, () => ([]))),e===0&&(s=_nullishCoalesce(_optionalChain([o, 'optionalAccess', _2 => _2.totalPages]), () => (0)),r=_nullishCoalesce(_optionalChain([o, 'optionalAccess', _3 => _3.totalElements]), () => (0))),e+=1,_chunkZUNVPK23cjs.a.info(`Fetched ${n.length} of ${r} assessments.`)}return n};var W=async({oneTrust:t,assessmentId:e})=>{let{body:s}=await t.get(`api/assessment/v2/assessments/${e}/export?ExcludeSkippedQuestions=false`);return _typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetAssessmentResponse,s)};var H=async({oneTrust:t,riskId:e})=>{let{body:s}=await t.get(`api/risk/v2/risks/${e}`);return _typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetRiskResponse,s)};var k=async({oneTrust:t,userId:e})=>{let{body:s}=await t.get(`api/scim/v2/Users/${e}`);return _typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetUserResponse,s)};var z=({assessment:t,assessmentDetails:e,riskDetails:s,creatorDetails:r,approversDetails:n,respondentsDetails:m})=>{let o=_chunkDBYKJZEAcjs.e.call(void 0, s,"id"),{sections:u,createdBy:h,...g}=e,O=u.map(i=>{let{questions:A,...$}=i,x=A.map(E=>{let{risks:U,...G}=E,v=(_nullishCoalesce(U, () => ([]))).map(y=>{let S=o[y.riskId];return{...y,...S,level:y.level,impactLevel:_nullishCoalesce(y.impactLevel, () => (0))}});return{...G,risks:v}});return{...$,questions:x}}),p={...h,active:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _4 => _4.active]), () => (!1)),userType:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _5 => _5.userType]), () => ("Internal")),emails:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _6 => _6.emails]), () => ([])),title:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _7 => _7.title]), () => (null)),givenName:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _8 => _8.name, 'access', _9 => _9.givenName]), () => (null)),familyName:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _10 => _10.name, 'access', _11 => _11.familyName]), () => (null))},d=_chunkDBYKJZEAcjs.e.call(void 0, n,"id"),l=e.approvers.flatMap(i=>d[i.id]?[{...i,approver:{...i.approver,active:d[i.id].active,userType:d[i.id].userType,emails:d[i.id].emails,title:d[i.id].title,givenName:_nullishCoalesce(d[i.id].name.givenName, () => (null)),familyName:_nullishCoalesce(d[i.id].name.familyName, () => (null))}}]:[]),T=_chunkDBYKJZEAcjs.e.call(void 0, m,"id"),C=e.respondents.filter(i=>!i.name.includes("@")).flatMap(i=>T[i.id]?[{...i,active:T[i.id].active,userType:T[i.id].userType,emails:T[i.id].emails,title:T[i.id].title,givenName:_nullishCoalesce(T[i.id].name.givenName, () => (null)),familyName:_nullishCoalesce(T[i.id].name.familyName, () => (null))}]:[]);return{...t,...g,approvers:l,respondents:C,createdBy:p,sections:O}};var F=async({transcend:t,assessment:e,total:s,index:r})=>{_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing enriched assessment ${r+1} ${s?`of ${s} `:" "}to Transcend...`));let m={json:w({assessment:e,index:r,total:s})};try{await _chunkUPUHHIXZcjs.cc.call(void 0, t,_chunkUPUHHIXZcjs.ha,{input:m})}catch (e2){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to sync assessment ${r+1} ${s?`of ${s} `:" "}to Transcend.
5
- Assessment Title: ${e.name}. Template Title: ${e.template.name}
6
- `))}};var Y=async({oneTrust:t,file:e,dryRun:s,transcend:r})=>{_chunkZUNVPK23cjs.a.info("Getting list of all assessments from OneTrust...");let n=await K({oneTrust:t}),m={},o=5,u=Array.from({length:Math.ceil(n.length/o)},(h,g)=>n.slice(g*o,(g+1)*o));await _chunkUPUHHIXZcjs.a.call(void 0, u,async(h,g)=>{let O=[];await _chunkUPUHHIXZcjs.b.call(void 0, h,async(p,d)=>{let l=o*g+d+1;_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching details...`);let{templateName:T,assessmentId:C}=p,i=await W({oneTrust:t,assessmentId:C}),A=i.createdBy.id,$=m[A];if(!$){_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching creator...`);try{$=await k({oneTrust:t,userId:A}),m[A]=$}catch (e3){_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[assessment ${l} of ${n.length}]: failed to fetch form creator. creatorId: ${A}. Assessment Title: ${p.name}. Template Title: ${T}`))}}let{approvers:x}=i,E=[];x.length>0&&(_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching approvers...`),E=await _chunkUPUHHIXZcjs.b.call(void 0, x.map(({id:c})=>c),async c=>{try{let f=m[c];return f||(f=await k({oneTrust:t,userId:c}),m[c]=f),[f]}catch (e4){return _chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[assessment ${l} of ${n.length}]: failed to fetch a form approver. approverId: ${c}. Assessment Title: ${p.name}. Template Title: ${T}`)),[]}},{concurrency:5}));let{respondents:U}=i,G=U.filter(c=>!c.name.includes("@")),v=[];G.length>0&&(_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching respondents...`),v=await _chunkUPUHHIXZcjs.b.call(void 0, G.map(({id:c})=>c),async c=>{try{let f=m[c];return f||(f=await k({oneTrust:t,userId:c}),m[c]=f),[f]}catch (e5){return _chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[assessment ${l} of ${n.length}]: failed to fetch a respondent. respondentId: ${c}. Assessment Title: ${p.name}. Template Title: ${T}`)),[]}},{concurrency:5}));let y=[],S=_chunkDBYKJZEAcjs.j.call(void 0, i.sections.flatMap(c=>c.questions.flatMap(f=>(_nullishCoalesce(f.risks, () => ([]))).flatMap(ee=>ee.riskId))));S.length>0&&(_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching risks...`),y=await _chunkUPUHHIXZcjs.b.call(void 0, S,c=>H({oneTrust:t,riskId:c}),{concurrency:5}));let X=z({assessment:p,assessmentDetails:i,riskDetails:y,creatorDetails:$,approversDetails:E.flat(),respondentsDetails:v.flat()});O.push(X)},{concurrency:o}),await _chunkUPUHHIXZcjs.a.call(void 0, O,async(p,d)=>{let l=g*o+d;s&&e?J({assessment:p,index:l,total:n.length,file:e}):r&&await F({assessment:p,transcend:r,total:n.length,index:l})})})};var _JSONStream = require('JSONStream'); var _JSONStream2 = _interopRequireDefault(_JSONStream);var Z=({transcend:t,file:e})=>(_chunkZUNVPK23cjs.a.info(`Getting list of all assessments from file ${e}...`),new Promise((s,r)=>{let n=_fs.createReadStream.call(void 0, e,{encoding:"utf-8",highWaterMark:65536}),m=_JSONStream2.default.parse("*"),o=0;n.pipe(m),m.on("data",async u=>{try{m.pause();let h=_typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustEnrichedAssessment,u);await F({assessment:h,transcend:t,index:o}),o+=1,m.resume()}catch(h){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to parse the assessment ${o} from file '${e}': ${h.message}.`))}}),m.on("end",()=>{_chunkZUNVPK23cjs.a.info(`Finished processing ${o} assessments from file ${e}`),s()}),m.on("error",u=>{_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error parsing file '${e}': ${u.message}`)),r(u)}),n.on("error",u=>{_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error reading file '${e}': ${u.message}`)),r(u)})}));async function Ds({hostname:t,oneTrustAuth:e,source:s,transcendAuth:r,transcendUrl:n,resource:m,file:o,dryRun:u,debug:h}){if(!u&&!r)throw new Error('Must specify a "transcendAuth" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}');if(u&&!o)throw new Error('Must set a "file" parameter when "dryRun" is "true". e.g. --file=./oneTrustAssessments.json');if(o){let p=o.split(".");if(p.length<2)throw new Error('The "file" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.');if(p.at(-1)!=="json")throw new Error(`Expected the format of the "file" parameters '${o}' to be 'json', but got '${p.at(-1)}'.`)}if(s==="oneTrust"){if(!t)throw new Error('Missing required parameter "hostname". e.g. --hostname=customer.my.onetrust.com');if(!e)throw new Error('Missing required parameter "oneTrustAuth". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN')}else{if(!o)throw new Error('Must specify a "file" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json');if(u)throw new Error('Cannot read and write to a file simultaneously. Emit the "source" parameter or set it to oneTrust if "dryRun" is enabled.')}_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let g=t&&e?B({hostname:t,auth:e}):void 0,O=n&&r?_chunkUPUHHIXZcjs.Wd.call(void 0, n,r):void 0;try{m==="assessments"&&(s==="oneTrust"&&g?await Y({oneTrust:g,file:o,dryRun:u,...O&&{transcend:O}}):s==="file"&&o&&O&&await Z({file:o,transcend:O}))}catch(p){throw new Error(`An error occurred syncing the resource ${m} from OneTrust: ${h?p.stack:p.message}`)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced OneTrust ${m} to ${u?`disk at "${o}"`:"Transcend"}!`))}exports.syncOt = Ds;
7
- //# sourceMappingURL=impl-QW6NAOEK.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-QW6NAOEK.cjs","../src/commands/migration/sync-ot/impl.ts","../src/lib/oneTrust/createOneTrustGotInstance.ts","../src/lib/oneTrust/helpers/oneTrustAssessmentToJson.ts","../src/lib/oneTrust/endpoints/getListOfOneTrustAssessments.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToTranscend.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromOneTrust.ts"],"names":["createOneTrustGotInstance","hostname","auth","got"],"mappings":"AAAA,y0BAAwC,wDAAwE,wDAAoC,wDAAgD,wDAAyC,gFCE1N,oECFM,IAQZA,CAAAA,CAA4B,CAAC,CACxC,QAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CACF,CAAA,CAAA,EAMEC,aAAAA,CAAI,MAAA,CAAO,CACT,SAAA,CAAW,CAAA,QAAA,EAAWF,CAAQ,CAAA,CAAA;ACiBwB;AAIzC;ACrCW,CAAA;ACyEiE,mBAAA;ACC3B","file":"/home/runner/work/cli/cli/dist/impl-QW6NAOEK.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { createOneTrustGotInstance } from '../../../lib/oneTrust';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '../../../enums';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport {\n syncOneTrustAssessmentsFromFile,\n syncOneTrustAssessmentsFromOneTrust,\n} from '../../../lib/oneTrust/helpers';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n// Command flag interface\nexport interface SyncOtCommandFlags {\n hostname?: string;\n oneTrustAuth?: string;\n source: OneTrustPullSource;\n transcendAuth?: string;\n transcendUrl: string;\n file?: string;\n resource: OneTrustPullResource;\n dryRun: boolean;\n debug: boolean;\n}\n\n// Command implementation\nexport async function syncOt(\n this: LocalContext,\n {\n hostname,\n oneTrustAuth,\n source,\n transcendAuth,\n transcendUrl,\n resource,\n file,\n dryRun,\n debug,\n }: SyncOtCommandFlags,\n): Promise<void> {\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n throw new Error(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n );\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n throw new Error(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n );\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n throw new Error(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n );\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n throw new Error(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n );\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n throw new Error(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n );\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n throw new Error(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n );\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n throw new Error(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n );\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n throw new Error(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n );\n }\n }\n\n doneInputValidation(this.process.exit);\n\n // instantiate a client to talk to OneTrust\n const oneTrust =\n hostname && oneTrustAuth\n ? createOneTrustGotInstance({\n hostname,\n auth: oneTrustAuth,\n })\n : undefined;\n\n // instantiate a client to talk to Transcend\n const transcend =\n transcendUrl && transcendAuth\n ? buildTranscendGraphQLClient(transcendUrl, transcendAuth)\n : undefined;\n\n try {\n if (resource === OneTrustPullResource.Assessments) {\n if (source === OneTrustPullSource.OneTrust && oneTrust) {\n await syncOneTrustAssessmentsFromOneTrust({\n oneTrust,\n file,\n dryRun,\n ...(transcend && { transcend }),\n });\n } else if (source === OneTrustPullSource.File && file && transcend) {\n await syncOneTrustAssessmentsFromFile({ file, transcend });\n }\n }\n } catch (err) {\n throw new Error(\n `An error occurred syncing the resource ${resource} from OneTrust: ${\n debug ? err.stack : err.message\n }`,\n );\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced OneTrust ${resource} to ${\n dryRun ? `disk at \"${file}\"` : 'Transcend'\n }!`,\n ),\n );\n}\n","import got, { Got } from 'got';\n\n/**\n * Instantiate an instance of got that is capable of making requests to OneTrust\n *\n * @param param - information about the OneTrust URL\n * @returns The instance of got that is capable of making requests to the customer ingress\n */\nexport const createOneTrustGotInstance = ({\n hostname,\n auth,\n}: {\n /** Hostname of the OneTrust API */\n hostname: string;\n /** The OAuth access token */\n auth: string;\n}): Got =>\n got.extend({\n prefixUrl: `https://${hostname}`,\n headers: {\n accept: 'application/json',\n 'content-type': 'application/json',\n authorization: `Bearer ${auth}`,\n },\n });\n","import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Converts the assessment into a json entry.\n *\n * @param param - information about the assessment and amount of entries\n * @returns a stringified json entry ready to be appended to a file\n */\nexport const oneTrustAssessmentToJson = ({\n assessment,\n index,\n total,\n wrap = true,\n}: {\n /** The assessment to convert */\n assessment: OneTrustEnrichedAssessment;\n /** The position of the assessment in the final Json object */\n index: number;\n /** The total amount of the assessments in the final Json object */\n total?: number;\n /** Whether to wrap every entry in brackets */\n wrap?: boolean;\n}): string => {\n let jsonEntry = '';\n // start with an opening bracket\n if (index === 0 || wrap) {\n jsonEntry = '[\\n';\n }\n\n const stringifiedAssessment = JSON.stringify(assessment);\n\n // Add comma for all items except the last one\n const comma = total && index < total - 1 && !wrap ? ',' : '';\n\n // write to file\n jsonEntry = `${jsonEntry + stringifiedAssessment + comma}\\n`;\n\n // end with closing bracket\n if ((total && index === total - 1) || wrap) {\n jsonEntry += '\\n]';\n }\n\n return jsonEntry;\n};\n","import { Got } from 'got';\nimport { logger } from '../../../logger';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport {\n OneTrustAssessment,\n OneTrustGetListOfAssessmentsResponse,\n} from '@transcend-io/privacy-types';\n\n/**\n * Fetch a list of all assessments from the OneTrust client.\n * ref: https://developer.onetrust.com/onetrust/reference/getallassessmentbasicdetailsusingget\n *\n * @param param - the information about the OneTrust client\n * @returns a list of OneTrustAssessment\n */\nexport const getListOfOneTrustAssessments = async ({\n oneTrust,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n}): Promise<OneTrustAssessment[]> => {\n let currentPage = 0;\n let totalPages = 1;\n let totalElements = 0;\n\n const allAssessments: OneTrustAssessment[] = [];\n\n while (currentPage < totalPages) {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments?page=${currentPage}&size=2000`,\n );\n\n const { page, content } = decodeCodec(\n OneTrustGetListOfAssessmentsResponse,\n body,\n );\n allAssessments.push(...(content ?? []));\n if (currentPage === 0) {\n totalPages = page?.totalPages ?? 0;\n totalElements = page?.totalElements ?? 0;\n }\n currentPage += 1;\n\n // log progress\n logger.info(\n `Fetched ${allAssessments.length} of ${totalElements} assessments.`,\n );\n }\n\n return allAssessments;\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport {\n IMPORT_ONE_TRUST_ASSESSMENT_FORMS,\n makeGraphQLRequest,\n} from '../../graphql';\nimport { ImportOnetrustAssessmentsInput } from '../../../codecs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Write the assessment to a Transcend instance.\n *\n *\n * @param param - information about the assessment and Transcend instance to write to\n */\nexport const syncOneTrustAssessmentToTranscend = async ({\n transcend,\n assessment,\n total,\n index,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** the assessment to sync to Transcend */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total?: number;\n}): Promise<void> => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend...`,\n ),\n );\n\n // convert the OneTrust assessment object into a json record\n const json = oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n });\n\n // transform the json record into a valid input to the mutation\n const input: ImportOnetrustAssessmentsInput = {\n json,\n };\n\n try {\n await makeGraphQLRequest<{\n /** the importOneTrustAssessmentForms mutation */\n importOneTrustAssessmentForms: {\n /** Created Assessment Forms */\n assessmentForms: AssessmentForm[];\n };\n }>(transcend, IMPORT_ONE_TRUST_ASSESSMENT_FORMS, {\n input,\n });\n } catch (e) {\n logger.error(\n colors.red(\n `Failed to sync assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend.\\n` +\n `\\tAssessment Title: ${assessment.name}. Template Title: ${assessment.template.name}\\n`,\n ),\n );\n }\n};\n","import type { Got } from 'got';\nimport colors from 'colors';\nimport {\n getListOfOneTrustAssessments,\n getOneTrustAssessment,\n getOneTrustRisk,\n getOneTrustUser,\n} from '../endpoints';\nimport { mapSeries, map } from '../../bluebird-replace';\nimport { logger } from '../../../logger';\nimport {\n OneTrustAssessmentQuestion,\n OneTrustAssessmentSection,\n OneTrustEnrichedAssessment,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { uniq } from 'lodash-es';\nimport { enrichOneTrustAssessment } from './enrichOneTrustAssessment';\nimport { syncOneTrustAssessmentToDisk } from './syncOneTrustAssessmentToDisk';\nimport { GraphQLClient } from 'graphql-request';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Reads all the assessments from a OneTrust instance and syncs them to Transcend or to Disk.\n *\n * @param param - the information about the assessment, its OneTrust source, and destination (disk or Transcend)\n */\nexport const syncOneTrustAssessmentsFromOneTrust = async ({\n oneTrust,\n file,\n dryRun,\n transcend,\n}: {\n /** the OneTrust client instance */\n oneTrust: Got;\n /** the Transcend client instance */\n transcend?: GraphQLClient;\n /** Whether to write to file instead of syncing to Transcend */\n dryRun: boolean;\n /** the path to the file in case dryRun is true */\n file?: string;\n}): Promise<void> => {\n // fetch the list of all assessments in the OneTrust organization\n logger.info('Getting list of all assessments from OneTrust...');\n const assessments = await getListOfOneTrustAssessments({ oneTrust });\n\n // a cache of OneTrust users so we avoid requesting already fetched users\n const oneTrustCachedUsers: Record<string, OneTrustGetUserResponse> = {};\n\n // split all assessments in batches, so we can process some of steps in parallel\n const BATCH_SIZE = 5;\n const assessmentBatches = Array.from(\n {\n length: Math.ceil(assessments.length / BATCH_SIZE),\n },\n (_, i) => assessments.slice(i * BATCH_SIZE, (i + 1) * BATCH_SIZE),\n );\n\n // process each batch and sync the batch right away so it's garbage collected and we don't run out of memory\n await mapSeries(assessmentBatches, async (assessmentBatch, batch) => {\n const batchEnrichedAssessments: OneTrustEnrichedAssessment[] = [];\n\n // fetch assessment details from OneTrust in parallel\n await map(\n assessmentBatch,\n async (assessment, index) => {\n const assessmentNumber = BATCH_SIZE * batch + index + 1;\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching details...`,\n );\n const { templateName, assessmentId } = assessment;\n const assessmentDetails = await getOneTrustAssessment({\n oneTrust,\n assessmentId,\n });\n // fetch assessment's creator information\n const creatorId = assessmentDetails.createdBy.id;\n let creator = oneTrustCachedUsers[creatorId];\n if (!creator) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching creator...`,\n );\n try {\n creator = await getOneTrustUser({\n oneTrust,\n userId: creatorId,\n });\n oneTrustCachedUsers[creatorId] = creator;\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch form creator.` +\n `\\tcreatorId: ${creatorId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n }\n }\n\n // fetch assessment approvers information\n const { approvers } = assessmentDetails;\n let approversDetails: OneTrustGetUserResponse[][] = [];\n if (approvers.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching approvers...`,\n );\n approversDetails = await map(\n approvers.map(({ id }) => id),\n async (userId) => {\n try {\n let approver = oneTrustCachedUsers[userId];\n if (!approver) {\n approver = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = approver;\n }\n return [approver];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a form approver.` +\n `\\tapproverId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment internal respondents information\n const { respondents } = assessmentDetails;\n // if a user is an internal respondents, their 'name' field can't be an email.\n const internalRespondents = respondents.filter(\n (r) => !r.name.includes('@'),\n );\n let respondentsDetails: OneTrustGetUserResponse[][] = [];\n if (internalRespondents.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching respondents...`,\n );\n respondentsDetails = await map(\n internalRespondents.map(({ id }) => id),\n async (userId) => {\n try {\n let respondent = oneTrustCachedUsers[userId];\n if (!respondent) {\n respondent = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = respondent;\n }\n return [respondent];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a respondent.` +\n `\\trespondentId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment risk information\n let riskDetails: OneTrustGetRiskResponse[] = [];\n const riskIds = uniq(\n assessmentDetails.sections.flatMap((s: OneTrustAssessmentSection) =>\n s.questions.flatMap((q: OneTrustAssessmentQuestion) =>\n (q.risks ?? []).flatMap((r) => r.riskId),\n ),\n ),\n );\n if (riskIds.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching risks...`,\n );\n riskDetails = await map(\n riskIds,\n (riskId) => getOneTrustRisk({ oneTrust, riskId: riskId as string }),\n {\n concurrency: 5,\n },\n );\n }\n\n // enrich the assessments with user and risk details\n const enrichedAssessment = enrichOneTrustAssessment({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails: creator,\n approversDetails: approversDetails.flat(),\n respondentsDetails: respondentsDetails.flat(),\n });\n\n batchEnrichedAssessments.push(enrichedAssessment);\n },\n { concurrency: BATCH_SIZE },\n );\n\n // sync assessments in series to avoid concurrency bugs\n await mapSeries(\n batchEnrichedAssessments,\n async (enrichedAssessment, index) => {\n // the assessment's global index takes its batch into consideration\n const globalIndex = batch * BATCH_SIZE + index;\n\n if (dryRun && file) {\n // sync to file\n syncOneTrustAssessmentToDisk({\n assessment: enrichedAssessment,\n index: globalIndex,\n total: assessments.length,\n file,\n });\n } else if (transcend) {\n // sync to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: enrichedAssessment,\n transcend,\n total: assessments.length,\n index: globalIndex,\n });\n }\n },\n );\n });\n};\n"]}
@@ -1,6 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkIBTP5OXEcjs = require('./chunk-IBTP5OXE.cjs');var _chunkTD7ADMVOcjs = require('./chunk-TD7ADMVO.cjs');var _chunkSW5CIF5Dcjs = require('./chunk-SW5CIF5D.cjs');var _chunkAMO6I4AOcjs = require('./chunk-AMO6I4AO.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-QEM6S2W7.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkUPUHHIXZcjs = require('./chunk-UPUHHIXZ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunk6IHSPKMPcjs = require('./chunk-6IHSPKMP.cjs');require('./chunk-DBYKJZEA.cjs');require('./chunk-Q7I37FJV.cjs');var _fs = require('fs');var _path = require('path');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function N({transcendUrl:i,auth:c,pageSize:h,publishToPrivacyCenter:y,contents:l,deleteExtraAttributeValues:m=!1,classifyService:f=!1}){let u=_chunkUPUHHIXZcjs.Wd.call(void 0, i,c);try{return!await _chunkUPUHHIXZcjs.Td.call(void 0, l,u,{pageSize:h,publishToPrivacyCenter:y,classifyService:f,deleteExtraAttributeValues:m})}catch(o){return _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An unexpected error occurred syncing the schema: ${o.message}`)),!1}}async function U({file:i="./transcend.yml",transcendUrl:c,auth:h,variables:y,pageSize:l,publishToPrivacyCenter:m,classifyService:f,deleteExtraAttributeValues:u}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let o=await _chunkAMO6I4AOcjs.b.call(void 0, h),E=_chunkTD7ADMVOcjs.a.call(void 0, y),p;if(Array.isArray(o)&&_fs.lstatSync.call(void 0, i).isDirectory()?p=_chunkAMO6I4AOcjs.c.call(void 0, i).map(e=>_path.join.call(void 0, i,e)):p=i.split(","),p.length<1)throw new Error("No file specified!");let s=p.map(e=>{_fs.existsSync.call(void 0, e)?_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading file "${e}"...`)):(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`The file path does not exist on disk: ${e}. You can specify the filepath using --file=./examples/transcend.yml`)),this.process.exit(1));try{let r=_chunkSW5CIF5Dcjs.d.call(void 0, e,E);return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully read in "${e}"`)),{content:r,name:e.split("/").pop().replace(".yml","")}}catch(r){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`The shape of your yaml file is invalid with the following errors: ${r.message}`)),this.process.exit(1)}});if(typeof o=="string"){let[e,...r]=s.map(({content:d})=>d),$=_chunkIBTP5OXEcjs.a.call(void 0, e,...r);await N({transcendUrl:c,auth:o,contents:$,publishToPrivacyCenter:m,deleteExtraAttributeValues:u,pageSize:l,classifyService:!!f})||(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors. View output above for more information, or check out ${_chunk6IHSPKMPcjs.c}`)),this.process.exit(1))}else{if(s.length!==1&&s.length!==o.length)throw new Error(`Expected list of yml files to be equal to the list of API keys.Got ${s.length} YML file${s.length===1?"":"s"} and ${o.length} API key${o.length===1?"":"s"}`);let e=[];await _chunkUPUHHIXZcjs.a.call(void 0, o,async(r,$)=>{let a=`[${$+1}/${o.length}][${r.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
2
-
3
- ${a}Attempting to push configuration...
4
-
5
- ~~~`));let d=s.length===1?s[0].content:_optionalChain([s, 'access', _ => _.find, 'call', _2 => _2(z=>z.name===r.organizationName), 'optionalAccess', _3 => _3.content]);if(!d){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to find transcend.yml file for organization: "${r.organizationName}".`)),e.push(r.organizationName);return}await N({transcendUrl:c,auth:r.apiKey,contents:d,pageSize:l,publishToPrivacyCenter:m,deleteExtraAttributeValues:u,classifyService:f})?_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${a}Successfully pushed configuration!`)):(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to sync configuration.`)),e.push(r.organizationName))}),e.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${e.join(",")}". View output above for more information, or check out ${_chunk6IHSPKMPcjs.c}`)),this.process.exit(1))}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced yaml file to Transcend! View at ${_chunk6IHSPKMPcjs.c}`))}exports.push = U;
6
- //# sourceMappingURL=impl-VF3LT3IJ.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-VF3LT3IJ.cjs","../src/commands/inventory/push/impl.ts"],"names":["syncConfiguration","transcendUrl","auth","pageSize","publishToPrivacyCenter","contents","deleteExtraAttributeValues","classifyService","client","buildTranscendGraphQLClient","syncConfigurationToTranscend","err","logger","colors"],"mappings":"AAAA,quBAAwC,wDAAyC,wDAAyC,wDAAgD,wDAAyC,gCAA6B,gCAA6B,wDAAyD,wDAAyC,wDAAyC,gCAA6B,gCAA6B,wBCI5a,4BACjB,gFAEF,MAmBnB,SAAeA,CAAAA,CAAkB,CAC/B,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,sBAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,0BAAA,CAAAC,CAAAA,CAA6B,CAAA,CAAA,CAC7B,eAAA,CAAAC,CAAAA,CAAkB,CAAA,CACpB,CAAA,CAeqB,CACnB,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BR,CAAcC,CAAI,CAAA,CAG7D,GAAI,CAWF,MAAO,CAVkB,MAAMQ,kCAAAA,CAC7BL,CACAG,CAAAA,CACA,CACE,QAAA,CAAAL,CAAAA,CACA,sBAAA,CAAAC,CAAAA,CACA,eAAA,CAAAG,CAAAA,CACA,0BAAA,CAAAD,CACF,CACF,CAEF,CAAA,KAAA,CAASK,CAAAA,CAAK,CACZ,OAAAC,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,iDAAA,EAAoDF,CAAAA,CAAI,OAAO,CAAA,CAAA;AAsI7D;AAAgB;AAAA;AA8D1B,GAAA","file":"/home/runner/work/cli/cli/dist/impl-VF3LT3IJ.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\n\nimport { logger } from '../../../logger';\nimport { mapSeries } from '../../../lib/bluebird-replace';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { readTranscendYaml } from '../../../lib/readTranscendYaml';\nimport colors from 'colors';\nimport {\n buildTranscendGraphQLClient,\n syncConfigurationToTranscend,\n} from '../../../lib/graphql';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { TranscendInput } from '../../../codecs';\nimport { validateTranscendAuth, listFiles } from '../../../lib/api-keys';\nimport { mergeTranscendInputs } from '../../../lib/mergeTranscendInputs';\nimport { parseVariablesFromString } from '../../../lib/helpers/parseVariablesFromString';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Sync configuration to Transcend\n *\n * @param options - Options\n * @returns True if synced successfully, false if error occurs\n */\nasync function syncConfiguration({\n transcendUrl,\n auth,\n pageSize,\n publishToPrivacyCenter,\n contents,\n deleteExtraAttributeValues = false,\n classifyService = false,\n}: {\n /** Transcend YAML */\n contents: TranscendInput;\n /** Transcend URL */\n transcendUrl: string;\n /** API key */\n auth: string;\n /** Page size */\n pageSize: number;\n /** Skip privacy center publish step */\n publishToPrivacyCenter: boolean;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** Delete attributes when syncing */\n deleteExtraAttributeValues?: boolean;\n}): Promise<boolean> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Sync to Transcend\n try {\n const encounteredError = await syncConfigurationToTranscend(\n contents,\n client,\n {\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n },\n );\n return !encounteredError;\n } catch (err) {\n logger.error(\n colors.red(\n `An unexpected error occurred syncing the schema: ${err.message}`,\n ),\n );\n return false;\n }\n}\n\nexport interface PushCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n pageSize: number;\n variables: string;\n publishToPrivacyCenter: boolean;\n classifyService: boolean;\n deleteExtraAttributeValues: boolean;\n}\n\nexport async function push(\n this: LocalContext,\n {\n file = './transcend.yml',\n transcendUrl,\n auth,\n variables,\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n }: PushCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Parse out the variables\n const vars = parseVariablesFromString(variables);\n\n // check if we are being passed a list of API keys and a list of files\n let fileList: string[];\n if (Array.isArray(apiKeyOrList) && lstatSync(file).isDirectory()) {\n fileList = listFiles(file).map((filePath) => join(file, filePath));\n } else {\n fileList = file.split(',');\n }\n\n // Ensure at least one file is parsed\n if (fileList.length < 1) {\n throw new Error('No file specified!');\n }\n\n // eslint-disable-next-line array-callback-return,consistent-return\n const transcendInputs = fileList.map((filePath) => {\n // Ensure yaml file exists on disk\n if (!existsSync(filePath)) {\n logger.error(\n colors.red(\n `The file path does not exist on disk: ${filePath}. You can specify the filepath using --file=./examples/transcend.yml`,\n ),\n );\n this.process.exit(1);\n } else {\n logger.info(colors.magenta(`Reading file \"${filePath}\"...`));\n }\n\n try {\n // Read in the yaml file and validate it's shape\n const newContents = readTranscendYaml(filePath, vars);\n logger.info(colors.green(`Successfully read in \"${filePath}\"`));\n return {\n content: newContents,\n name: filePath.split('/').pop()!.replace('.yml', ''),\n };\n } catch (err) {\n logger.error(\n colors.red(\n `The shape of your yaml file is invalid with the following errors: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n });\n\n // process a single API key\n if (typeof apiKeyOrList === 'string') {\n // if passed multiple inputs, merge them together\n const [base, ...rest] = transcendInputs.map(({ content }) => content);\n const contents = mergeTranscendInputs(base, ...rest);\n\n // sync the configuration\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKeyOrList,\n contents,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n pageSize,\n classifyService: !!classifyService,\n });\n\n // exist with error code\n if (!success) {\n logger.info(\n colors.red(\n `Sync encountered errors. View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n } else {\n // if passed multiple inputs, expect them to be one per instance\n if (\n transcendInputs.length !== 1 &&\n transcendInputs.length !== apiKeyOrList.length\n ) {\n throw new Error(\n 'Expected list of yml files to be equal to the list of API keys.' +\n `Got ${transcendInputs.length} YML file${\n transcendInputs.length === 1 ? '' : 's'\n } and ${apiKeyOrList.length} API key${\n apiKeyOrList.length === 1 ? '' : 's'\n }`,\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to push configuration...\\n\\n~~~`,\n ),\n );\n\n // use the merged contents if 1 yml passed, else use the contents that map to that organization\n const useContents =\n transcendInputs.length === 1\n ? transcendInputs[0].content\n : transcendInputs.find(\n (input) => input.name === apiKey.organizationName,\n )?.content;\n\n // Throw error if cannot find a yml file matching that organization name\n if (!useContents) {\n logger.error(\n colors.red(\n `${prefix}Failed to find transcend.yml file for organization: \"${apiKey.organizationName}\".`,\n ),\n );\n encounteredErrors.push(apiKey.organizationName);\n return;\n }\n\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKey.apiKey,\n contents: useContents,\n pageSize,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n classifyService,\n });\n\n if (success) {\n logger.info(\n colors.green(`${prefix}Successfully pushed configuration!`),\n );\n } else {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to Transcend! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n}\n"]}
@@ -1,2 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkPSRKLYGKcjs = require('./chunk-PSRKLYGK.cjs');require('./chunk-LR3CPNDM.cjs');require('./chunk-MBB6OD4E.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-UPUHHIXZ.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-6IHSPKMP.cjs');require('./chunk-DBYKJZEA.cjs');require('./chunk-Q7I37FJV.cjs');async function d({file:i,transcendUrl:o,auth:r,sombraAuth:n,dataSiloId:s}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkPSRKLYGKcjs.e.call(void 0, {file:i,transcendUrl:o,auth:r,sombraAuth:n,dataSiloId:s})}exports.markIdentifiersCompleted = d;
2
- //# sourceMappingURL=impl-YUYWOPRG.cjs.map