@transcend-io/cli 8.37.1 → 8.37.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. package/dist/{api-keys-Bb2BbZQe.cjs → api-keys-Bvt2HbSv.cjs} +2 -2
  2. package/dist/{api-keys-Bb2BbZQe.cjs.map → api-keys-Bvt2HbSv.cjs.map} +1 -1
  3. package/dist/{app-C9jD-f87.cjs → app-CdWFyBYu.cjs} +18 -18
  4. package/dist/{app-C9jD-f87.cjs.map → app-CdWFyBYu.cjs.map} +1 -1
  5. package/dist/bin/bash-complete.cjs +1 -1
  6. package/dist/bin/cli.cjs +1 -1
  7. package/dist/bin/deprecated-command.cjs +1 -1
  8. package/dist/{code-scanning-4d0zlFxk.cjs → code-scanning-BZzwKEfY.cjs} +2 -2
  9. package/dist/{code-scanning-4d0zlFxk.cjs.map → code-scanning-BZzwKEfY.cjs.map} +1 -1
  10. package/dist/{command-XJ7XPQ04.cjs → command-DNcjQs8y.cjs} +2 -2
  11. package/dist/{command-XJ7XPQ04.cjs.map → command-DNcjQs8y.cjs.map} +1 -1
  12. package/dist/{consent-manager-CCyvzvY5.cjs → consent-manager-oip5m3XC.cjs} +2 -2
  13. package/dist/{consent-manager-CCyvzvY5.cjs.map → consent-manager-oip5m3XC.cjs.map} +1 -1
  14. package/dist/{constants-wkuhlP8d.cjs → constants-K6pQQtc7.cjs} +2 -2
  15. package/dist/{constants-wkuhlP8d.cjs.map → constants-K6pQQtc7.cjs.map} +1 -1
  16. package/dist/{cron-DfEGA7Rf.cjs → cron-lijiEqFA.cjs} +2 -2
  17. package/dist/{cron-DfEGA7Rf.cjs.map → cron-lijiEqFA.cjs.map} +1 -1
  18. package/dist/{data-inventory-C1eqZk1M.cjs → data-inventory-BKAQGjFN.cjs} +2 -2
  19. package/dist/{data-inventory-C1eqZk1M.cjs.map → data-inventory-BKAQGjFN.cjs.map} +1 -1
  20. package/dist/{dataFlowsToDataSilos-DXlFFHMV.cjs → dataFlowsToDataSilos-CnvG2jqy.cjs} +2 -2
  21. package/dist/{dataFlowsToDataSilos-DXlFFHMV.cjs.map → dataFlowsToDataSilos-CnvG2jqy.cjs.map} +1 -1
  22. package/dist/{impl-BOEoFzcB.cjs → impl-1-1sg4WF.cjs} +2 -2
  23. package/dist/{impl-BOEoFzcB.cjs.map → impl-1-1sg4WF.cjs.map} +1 -1
  24. package/dist/{impl-DwWoAbT_.cjs → impl-57HOh2c3.cjs} +2 -2
  25. package/dist/{impl-DwWoAbT_.cjs.map → impl-57HOh2c3.cjs.map} +1 -1
  26. package/dist/{impl-B_2CdctV.cjs → impl-58WnFNmn.cjs} +2 -2
  27. package/dist/{impl-B_2CdctV.cjs.map → impl-58WnFNmn.cjs.map} +1 -1
  28. package/dist/{impl-BOEjB3fo.cjs → impl-B4OVz7FC.cjs} +2 -2
  29. package/dist/{impl-BOEjB3fo.cjs.map → impl-B4OVz7FC.cjs.map} +1 -1
  30. package/dist/{impl-BfC5CRRX.cjs → impl-BFRrE04X.cjs} +2 -2
  31. package/dist/{impl-BfC5CRRX.cjs.map → impl-BFRrE04X.cjs.map} +1 -1
  32. package/dist/{impl-DqfyWyoV.cjs → impl-BSS_avMv.cjs} +2 -2
  33. package/dist/{impl-DqfyWyoV.cjs.map → impl-BSS_avMv.cjs.map} +1 -1
  34. package/dist/{impl-DNRsFfbU.cjs → impl-BVmw0mE4.cjs} +2 -2
  35. package/dist/{impl-DNRsFfbU.cjs.map → impl-BVmw0mE4.cjs.map} +1 -1
  36. package/dist/{impl-C6JjApDI.cjs → impl-Bf_hLViY.cjs} +2 -2
  37. package/dist/{impl-C6JjApDI.cjs.map → impl-Bf_hLViY.cjs.map} +1 -1
  38. package/dist/{impl-DxhyqjcY.cjs → impl-BkEg-Nm6.cjs} +2 -2
  39. package/dist/{impl-DxhyqjcY.cjs.map → impl-BkEg-Nm6.cjs.map} +1 -1
  40. package/dist/{impl-7LAuV25D.cjs → impl-Bmln6D88.cjs} +2 -2
  41. package/dist/{impl-7LAuV25D.cjs.map → impl-Bmln6D88.cjs.map} +1 -1
  42. package/dist/{impl-DhbV3bBZ.cjs → impl-BqIqzp40.cjs} +2 -2
  43. package/dist/{impl-DhbV3bBZ.cjs.map → impl-BqIqzp40.cjs.map} +1 -1
  44. package/dist/{impl-u8o3S8w2.cjs → impl-BszlCtcR.cjs} +2 -2
  45. package/dist/{impl-u8o3S8w2.cjs.map → impl-BszlCtcR.cjs.map} +1 -1
  46. package/dist/{impl-DetfC7CT.cjs → impl-BtuKKdl3.cjs} +2 -2
  47. package/dist/{impl-DetfC7CT.cjs.map → impl-BtuKKdl3.cjs.map} +1 -1
  48. package/dist/{impl-_QrpPIPw.cjs → impl-C2e4xVvX.cjs} +2 -2
  49. package/dist/{impl-_QrpPIPw.cjs.map → impl-C2e4xVvX.cjs.map} +1 -1
  50. package/dist/{impl-CyzGdwB1.cjs → impl-C65nk0G8.cjs} +2 -2
  51. package/dist/{impl-CyzGdwB1.cjs.map → impl-C65nk0G8.cjs.map} +1 -1
  52. package/dist/{impl-DjP2MJNK.cjs → impl-CCdxbRmg.cjs} +2 -2
  53. package/dist/{impl-DjP2MJNK.cjs.map → impl-CCdxbRmg.cjs.map} +1 -1
  54. package/dist/{impl-DmXYpp-M.cjs → impl-CMwmo2vR.cjs} +2 -2
  55. package/dist/{impl-DmXYpp-M.cjs.map → impl-CMwmo2vR.cjs.map} +1 -1
  56. package/dist/{impl-CR-wyJSg.cjs → impl-Cb64HwGx.cjs} +2 -2
  57. package/dist/{impl-CR-wyJSg.cjs.map → impl-Cb64HwGx.cjs.map} +1 -1
  58. package/dist/{impl-CV3axMeT.cjs → impl-CdfA8kxo.cjs} +2 -2
  59. package/dist/{impl-CV3axMeT.cjs.map → impl-CdfA8kxo.cjs.map} +1 -1
  60. package/dist/{impl-C-aKX3zu.cjs → impl-CkfOZzpI.cjs} +2 -2
  61. package/dist/{impl-C-aKX3zu.cjs.map → impl-CkfOZzpI.cjs.map} +1 -1
  62. package/dist/{impl-CKYwKeLz.cjs → impl-ClujxTb8.cjs} +2 -2
  63. package/dist/{impl-CKYwKeLz.cjs.map → impl-ClujxTb8.cjs.map} +1 -1
  64. package/dist/{impl-B04CctrY.cjs → impl-D-IWtHQi.cjs} +2 -2
  65. package/dist/{impl-B04CctrY.cjs.map → impl-D-IWtHQi.cjs.map} +1 -1
  66. package/dist/{impl-BGoAnVJu.cjs → impl-D9-ZQmJB.cjs} +2 -2
  67. package/dist/{impl-BGoAnVJu.cjs.map → impl-D9-ZQmJB.cjs.map} +1 -1
  68. package/dist/{impl-CmEsmnYZ.cjs → impl-DGel0ZLe.cjs} +2 -2
  69. package/dist/{impl-CmEsmnYZ.cjs.map → impl-DGel0ZLe.cjs.map} +1 -1
  70. package/dist/{impl-DHuguAlW.cjs → impl-DL2j8g1C.cjs} +2 -2
  71. package/dist/{impl-DHuguAlW.cjs.map → impl-DL2j8g1C.cjs.map} +1 -1
  72. package/dist/{impl-LgUGDTQK.cjs → impl-DSNgFKP_.cjs} +2 -2
  73. package/dist/{impl-LgUGDTQK.cjs.map → impl-DSNgFKP_.cjs.map} +1 -1
  74. package/dist/{impl-Dzq0t6mX.cjs → impl-DU85U1jO.cjs} +2 -2
  75. package/dist/{impl-Dzq0t6mX.cjs.map → impl-DU85U1jO.cjs.map} +1 -1
  76. package/dist/{impl-C4q9xHFr.cjs → impl-DV5f54rm.cjs} +2 -2
  77. package/dist/{impl-C4q9xHFr.cjs.map → impl-DV5f54rm.cjs.map} +1 -1
  78. package/dist/{impl-CLcnbVfj.cjs → impl-DXKJH0AZ.cjs} +2 -2
  79. package/dist/{impl-CLcnbVfj.cjs.map → impl-DXKJH0AZ.cjs.map} +1 -1
  80. package/dist/{impl-6TmoWv0o.cjs → impl-DbxzDk8h.cjs} +2 -2
  81. package/dist/{impl-6TmoWv0o.cjs.map → impl-DbxzDk8h.cjs.map} +1 -1
  82. package/dist/{impl-XyWPUpvw.cjs → impl-DhnCAbU-.cjs} +2 -2
  83. package/dist/{impl-XyWPUpvw.cjs.map → impl-DhnCAbU-.cjs.map} +1 -1
  84. package/dist/{impl-kxwq3OMk.cjs → impl-Dj2fTDNO.cjs} +2 -2
  85. package/dist/{impl-kxwq3OMk.cjs.map → impl-Dj2fTDNO.cjs.map} +1 -1
  86. package/dist/{impl-Cp7-Tctr.cjs → impl-KAorCmlT.cjs} +2 -2
  87. package/dist/{impl-Cp7-Tctr.cjs.map → impl-KAorCmlT.cjs.map} +1 -1
  88. package/dist/{impl-CnRqR4kw.cjs → impl-LMp29vxd.cjs} +2 -2
  89. package/dist/{impl-CnRqR4kw.cjs.map → impl-LMp29vxd.cjs.map} +1 -1
  90. package/dist/{impl-DC_YquN8.cjs → impl-MrsSr72p.cjs} +2 -2
  91. package/dist/{impl-DC_YquN8.cjs.map → impl-MrsSr72p.cjs.map} +1 -1
  92. package/dist/{impl-CgKn47V9.cjs → impl-SZp3iTUp.cjs} +2 -2
  93. package/dist/{impl-CgKn47V9.cjs.map → impl-SZp3iTUp.cjs.map} +1 -1
  94. package/dist/{impl-DrJj-l3s.cjs → impl-W6jE_UV0.cjs} +2 -2
  95. package/dist/{impl-DrJj-l3s.cjs.map → impl-W6jE_UV0.cjs.map} +1 -1
  96. package/dist/{impl-Dp3-sA6b.cjs → impl-XwC7A99P.cjs} +2 -2
  97. package/dist/{impl-Dp3-sA6b.cjs.map → impl-XwC7A99P.cjs.map} +1 -1
  98. package/dist/{impl-DQ8rr7Fv.cjs → impl-ebVxRYAc.cjs} +2 -2
  99. package/dist/{impl-DQ8rr7Fv.cjs.map → impl-ebVxRYAc.cjs.map} +1 -1
  100. package/dist/{impl-CsKfLxov.cjs → impl-k61p_VQY.cjs} +2 -2
  101. package/dist/{impl-CsKfLxov.cjs.map → impl-k61p_VQY.cjs.map} +1 -1
  102. package/dist/{impl-CqadSQOh.cjs → impl-kMebV10f.cjs} +2 -2
  103. package/dist/{impl-CqadSQOh.cjs.map → impl-kMebV10f.cjs.map} +1 -1
  104. package/dist/{impl-Dvoj_snk.cjs → impl-oYFKp06U.cjs} +2 -2
  105. package/dist/{impl-Dvoj_snk.cjs.map → impl-oYFKp06U.cjs.map} +1 -1
  106. package/dist/index.cjs +1 -1
  107. package/dist/index.d.cts +9 -9
  108. package/dist/{manual-enrichment-CzTpv-mM.cjs → manual-enrichment-C6h9gjY1.cjs} +2 -2
  109. package/dist/{manual-enrichment-CzTpv-mM.cjs.map → manual-enrichment-C6h9gjY1.cjs.map} +1 -1
  110. package/dist/{pooling-DA_LwUEp.cjs → pooling-DLEGcLtt.cjs} +5 -5
  111. package/dist/pooling-DLEGcLtt.cjs.map +1 -0
  112. package/dist/{preference-management-aOhuZCuE.cjs → preference-management-B36PQuMK.cjs} +2 -2
  113. package/dist/{preference-management-aOhuZCuE.cjs.map → preference-management-B36PQuMK.cjs.map} +1 -1
  114. package/dist/{syncConfigurationToTranscend-DuTZKIG8.cjs → syncConfigurationToTranscend-DKliAJhK.cjs} +2 -2
  115. package/dist/{syncConfigurationToTranscend-DuTZKIG8.cjs.map → syncConfigurationToTranscend-DKliAJhK.cjs.map} +1 -1
  116. package/dist/{uploadConsents-C9Pv8Awr.cjs → uploadConsents-MtgCk8B0.cjs} +2 -2
  117. package/dist/{uploadConsents-C9Pv8Awr.cjs.map → uploadConsents-MtgCk8B0.cjs.map} +1 -1
  118. package/package.json +1 -1
  119. package/dist/pooling-DA_LwUEp.cjs.map +0 -1
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-wkuhlP8d.cjs`),n=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),r=require(`./logger-DQwEYtSS.cjs`);let i=require(`@transcend-io/privacy-types`),a=require(`@transcend-io/type-utils`),o=require(`colors`);o=e.t(o);let s=require(`io-ts`);s=e.t(s);let c=require(`cli-progress`);c=e.t(c);const l=s.type({identifier:s.string,type:s.string,coreIdentifier:s.string,dataSiloId:s.string,requestId:s.string,nonce:s.string,requestCreatedAt:s.string,daysUntilOverdue:s.number,attributes:s.array(s.type({key:s.string,values:s.array(s.string)}))});async function u(e,{dataSiloId:t,limit:n=100,offset:r=0,requestType:i}){try{let o=await e.get(`v1/data-silo/${t}/pending-requests/${i}`,{searchParams:{offset:r,limit:n}}).json(),{items:c}=(0,a.decodeCodec)(s.type({items:s.array(l)}),o);return c}catch(e){throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}}const d=s.type({nonce:s.string,identifier:s.string});async function f(e,{nonce:t,identifier:n}){try{return await e.put(`v1/data-silo`,{headers:{"x-transcend-nonce":t},json:{profiles:[{profileId:n}]}}),!0}catch(e){if(e.response?.statusCode===409)return!1;throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}}async function p({file:e,dataSiloId:i,auth:a,sombraAuth:s,concurrency:l=100,transcendUrl:u=t.a,sleepSeconds:p=10}){let m=await n.ei(u,a,s);r.t.info(o.default.magenta(`Reading "${e}" from disk`));let h=n.oi(e,d);r.t.info(o.default.magenta(`Notifying Transcend for data silo "${i}" marking "${h.length}" identifiers as completed.`));let g=new Date().getTime(),_=new c.default.SingleBar({},c.default.Presets.shades_classic),v=0,y=0,b=0;_.start(h.length,0);let x=n.Ns(h,l),S=x.length;await n.Es(x,async(e,t)=>{r.t.info(o.default.blue(`Processing chunk ${t+1}/${S} (${n.Ns.length} items)`)),await n.Ts(e,async e=>{try{await f(m,e)?v+=1:y+=1}catch(t){r.t.error(o.default.red(`Error notifying Transcend for identifier "${e.identifier}" - ${t?.message}`)),b+=1}_.update(v+y)}),p>0&&t<S-1&&(r.t.info(o.default.yellow(`Sleeping for ${p}s before next chunk...`)),await new Promise(e=>{setTimeout(e,p*1e3)}))}),_.stop();let C=new Date().getTime()-g;if(r.t.info(o.default.green(`Successfully notified Transcend for ${v} identifiers in "${C/1e3}" seconds!`)),y&&r.t.info(o.default.magenta(`There were ${y} identifiers that were not in a state to be updated.They likely have already been resolved.`)),b)throw r.t.error(o.default.red(`There were ${b} identifiers that failed to be updated. Please review the logs for more information.`)),Error(`Failed to update all identifiers`);return h.length}async function m({requestIds:e,dataSiloId:a,auth:s,concurrency:l=100,status:u=i.RequestDataSiloStatus.Resolved,transcendUrl:d=t.a}){let f=n.ti(d,s),p=new Date().getTime(),m=new c.default.SingleBar({},c.default.Presets.shades_classic);r.t.info(o.default.magenta(`Notifying Transcend for data silo "${a}" marking "${e.length}" requests as completed.`));let h=0;m.start(e.length,0),await n.Ts(e,async e=>{let t=await n.Gn(f,{requestId:e,dataSiloId:a});try{await n.i(f,n.to,{requestDataSiloId:t.id,status:u})}catch(e){if(!e.message.includes(`Client error: Request must be active:`)&&!e.message.includes(`Failed to find RequestDataSilo`))throw e}h+=1,m.update(h)},{concurrency:l}),m.stop();let g=new Date().getTime()-p;return r.t.info(o.default.green(`Successfully notified Transcend in "${g/1e3}" seconds!`)),e.length}async function h({dataSiloId:e,auth:i,sombraAuth:a,actions:s,apiPageSize:l=100,savePageSize:d=1e3,onSave:f,transcendUrl:p=t.a,skipRequestCount:m=!1}){if(d%l!==0)throw Error(`savePageSize must be a multiple of apiPageSize. savePageSize: ${d}, apiPageSize: ${l}`);let h=await n.ei(p,i,a),g=n.ti(p,i),_=0;m||(_=await n.Wn(g,{dataSiloId:e})),r.t.info(o.default.magenta(`Pulling ${m?`all`:_} outstanding request identifiers for data silo: "${e}" for requests of types "${s.join(`", "`)}"`));let v=new Date().getTime(),y=new c.default.SingleBar({},c.default.Presets.shades_classic),b=new Set,x=[],S=[];m||y.start(_,0),await n.Es(s,async t=>{let n=0,i=!0;for(;i;){let a=await u(h,{dataSiloId:e,limit:l,offset:n,requestType:t}),s=a.map(e=>(b.add(e.requestId),{...e,action:t})),c=s.map(({attributes:e,...t})=>({...t,...e.reduce((e,t)=>Object.assign(e,{[t.key]:t.values.join(`,`)}),{})}));x.push(...s),S.push(...c),S.length>=d&&(await f(S),S=[]),i=a.length===l,n+=l,m?r.t.info(o.default.magenta(`Pulled ${a.length} outstanding identifiers for ${b.size} requests`)):y.update(b.size)}}),S.length>0&&await f(S),m||y.stop();let C=new Date().getTime()-v;return r.t.info(o.default.green(`Successfully pulled ${x.length} outstanding identifiers from ${b.size} requests in "${C/1e3}" seconds!`)),{identifiers:x}}Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return m}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return l}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return p}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return h}});
2
- //# sourceMappingURL=cron-DfEGA7Rf.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-K6pQQtc7.cjs`),n=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`),r=require(`./logger-DQwEYtSS.cjs`);let i=require(`@transcend-io/privacy-types`),a=require(`@transcend-io/type-utils`),o=require(`colors`);o=e.t(o);let s=require(`io-ts`);s=e.t(s);let c=require(`cli-progress`);c=e.t(c);const l=s.type({identifier:s.string,type:s.string,coreIdentifier:s.string,dataSiloId:s.string,requestId:s.string,nonce:s.string,requestCreatedAt:s.string,daysUntilOverdue:s.number,attributes:s.array(s.type({key:s.string,values:s.array(s.string)}))});async function u(e,{dataSiloId:t,limit:n=100,offset:r=0,requestType:i}){try{let o=await e.get(`v1/data-silo/${t}/pending-requests/${i}`,{searchParams:{offset:r,limit:n}}).json(),{items:c}=(0,a.decodeCodec)(s.type({items:s.array(l)}),o);return c}catch(e){throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}}const d=s.type({nonce:s.string,identifier:s.string});async function f(e,{nonce:t,identifier:n}){try{return await e.put(`v1/data-silo`,{headers:{"x-transcend-nonce":t},json:{profiles:[{profileId:n}]}}),!0}catch(e){if(e.response?.statusCode===409)return!1;throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}}async function p({file:e,dataSiloId:i,auth:a,sombraAuth:s,concurrency:l=100,transcendUrl:u=t.a,sleepSeconds:p=10}){let m=await n.ei(u,a,s);r.t.info(o.default.magenta(`Reading "${e}" from disk`));let h=n.oi(e,d);r.t.info(o.default.magenta(`Notifying Transcend for data silo "${i}" marking "${h.length}" identifiers as completed.`));let g=new Date().getTime(),_=new c.default.SingleBar({},c.default.Presets.shades_classic),v=0,y=0,b=0;_.start(h.length,0);let x=n.Ns(h,l),S=x.length;await n.Es(x,async(e,t)=>{r.t.info(o.default.blue(`Processing chunk ${t+1}/${S} (${n.Ns.length} items)`)),await n.Ts(e,async e=>{try{await f(m,e)?v+=1:y+=1}catch(t){r.t.error(o.default.red(`Error notifying Transcend for identifier "${e.identifier}" - ${t?.message}`)),b+=1}_.update(v+y)}),p>0&&t<S-1&&(r.t.info(o.default.yellow(`Sleeping for ${p}s before next chunk...`)),await new Promise(e=>{setTimeout(e,p*1e3)}))}),_.stop();let C=new Date().getTime()-g;if(r.t.info(o.default.green(`Successfully notified Transcend for ${v} identifiers in "${C/1e3}" seconds!`)),y&&r.t.info(o.default.magenta(`There were ${y} identifiers that were not in a state to be updated.They likely have already been resolved.`)),b)throw r.t.error(o.default.red(`There were ${b} identifiers that failed to be updated. Please review the logs for more information.`)),Error(`Failed to update all identifiers`);return h.length}async function m({requestIds:e,dataSiloId:a,auth:s,concurrency:l=100,status:u=i.RequestDataSiloStatus.Resolved,transcendUrl:d=t.a}){let f=n.ti(d,s),p=new Date().getTime(),m=new c.default.SingleBar({},c.default.Presets.shades_classic);r.t.info(o.default.magenta(`Notifying Transcend for data silo "${a}" marking "${e.length}" requests as completed.`));let h=0;m.start(e.length,0),await n.Ts(e,async e=>{let t=await n.Gn(f,{requestId:e,dataSiloId:a});try{await n.i(f,n.to,{requestDataSiloId:t.id,status:u})}catch(e){if(!e.message.includes(`Client error: Request must be active:`)&&!e.message.includes(`Failed to find RequestDataSilo`))throw e}h+=1,m.update(h)},{concurrency:l}),m.stop();let g=new Date().getTime()-p;return r.t.info(o.default.green(`Successfully notified Transcend in "${g/1e3}" seconds!`)),e.length}async function h({dataSiloId:e,auth:i,sombraAuth:a,actions:s,apiPageSize:l=100,savePageSize:d=1e3,onSave:f,transcendUrl:p=t.a,skipRequestCount:m=!1}){if(d%l!==0)throw Error(`savePageSize must be a multiple of apiPageSize. savePageSize: ${d}, apiPageSize: ${l}`);let h=await n.ei(p,i,a),g=n.ti(p,i),_=0;m||(_=await n.Wn(g,{dataSiloId:e})),r.t.info(o.default.magenta(`Pulling ${m?`all`:_} outstanding request identifiers for data silo: "${e}" for requests of types "${s.join(`", "`)}"`));let v=new Date().getTime(),y=new c.default.SingleBar({},c.default.Presets.shades_classic),b=new Set,x=[],S=[];m||y.start(_,0),await n.Es(s,async t=>{let n=0,i=!0;for(;i;){let a=await u(h,{dataSiloId:e,limit:l,offset:n,requestType:t}),s=a.map(e=>(b.add(e.requestId),{...e,action:t})),c=s.map(({attributes:e,...t})=>({...t,...e.reduce((e,t)=>Object.assign(e,{[t.key]:t.values.join(`,`)}),{})}));x.push(...s),S.push(...c),S.length>=d&&(await f(S),S=[]),i=a.length===l,n+=l,m?r.t.info(o.default.magenta(`Pulled ${a.length} outstanding identifiers for ${b.size} requests`)):y.update(b.size)}}),S.length>0&&await f(S),m||y.stop();let C=new Date().getTime()-v;return r.t.info(o.default.green(`Successfully pulled ${x.length} outstanding identifiers from ${b.size} requests in "${C/1e3}" seconds!`)),{identifiers:x}}Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return m}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return l}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return p}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return h}});
2
+ //# sourceMappingURL=cron-lijiEqFA.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"cron-DfEGA7Rf.cjs","names":["t","t","DEFAULT_TRANSCEND_API","createSombraGotInstance","readCsv","cliProgress","chunk","map","mapSeries","RequestDataSiloStatus","DEFAULT_TRANSCEND_API","buildTranscendGraphQLClient","cliProgress","map","fetchRequestDataSilo","makeGraphQLRequest","CHANGE_REQUEST_DATA_SILO_STATUS","DEFAULT_TRANSCEND_API","createSombraGotInstance","buildTranscendGraphQLClient","fetchRequestDataSiloActiveCount","cliProgress","mapSeries"],"sources":["../src/lib/cron/pullCronPageOfIdentifiers.ts","../src/lib/cron/markCronIdentifierCompleted.ts","../src/lib/cron/pushCronIdentifiersFromCsv.ts","../src/lib/cron/markRequestDataSiloIdsCompleted.ts","../src/lib/cron/pullChunkedCustomSiloOutstandingIdentifiers.ts"],"sourcesContent":["import * as t from 'io-ts';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { RequestAction } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\n\nexport const CronIdentifier = t.type({\n /** The identifier value */\n identifier: t.string,\n /** The type of identifier */\n type: t.string,\n /** The core identifier of the request */\n coreIdentifier: t.string,\n /** The ID of the underlying data silo */\n dataSiloId: t.string,\n /** The ID of the underlying request */\n requestId: t.string,\n /** The request nonce */\n nonce: t.string,\n /** The time the request was created */\n requestCreatedAt: t.string,\n /** The number of days until the request is overdue */\n daysUntilOverdue: t.number,\n /** Request attributes */\n attributes: t.array(\n t.type({\n key: t.string,\n values: t.array(t.string),\n }),\n ),\n});\n\n/** Type override */\nexport type CronIdentifier = t.TypeOf<typeof CronIdentifier>;\n\n/**\n * Pull a offset of identifiers for a cron job\n *\n * @see https://docs.transcend.io/docs/api-reference/GET/v1/data-silo/(id)/pending-requests/(type)\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request\n */\nexport async function pullCronPageOfIdentifiers(\n sombra: Got,\n {\n dataSiloId,\n limit = 100,\n offset = 0,\n requestType,\n }: {\n /** Data Silo ID */\n dataSiloId: string;\n /** Type of request */\n requestType: RequestAction;\n /** Number of identifiers to pull in */\n limit?: number;\n /** Page to pull in */\n offset?: number;\n },\n): Promise<CronIdentifier[]> {\n try {\n // Make the GraphQL request\n const response = await sombra\n .get(`v1/data-silo/${dataSiloId}/pending-requests/${requestType}`, {\n searchParams: {\n offset,\n limit,\n },\n })\n .json();\n\n const { items } = decodeCodec(\n t.type({\n items: t.array(CronIdentifier),\n }),\n response,\n );\n return items;\n } catch (err) {\n throw new Error(\n `Received an error from server: ${err?.response?.body || err?.message}`,\n );\n }\n}\n","import type { Got } from 'got';\nimport * as t from 'io-ts';\n\n/**\n * Minimal set required to mark as completed\n */\nexport const CronIdentifierPush = t.type({\n nonce: t.string,\n identifier: t.string,\n});\n\n/** Type override */\nexport type CronIdentifierPush = t.TypeOf<typeof CronIdentifierPush>;\n\n/**\n * Mark an identifier output by the cron job as completed.\n *\n * @see https://docs.transcend.io/docs/api-reference/PUT/v1/data-silo\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request, false if not in a state to update\n */\nexport async function markCronIdentifierCompleted(\n sombra: Got,\n { nonce, identifier }: CronIdentifierPush,\n): Promise<boolean> {\n try {\n // Make the GraphQL request\n await sombra.put('v1/data-silo', {\n headers: {\n 'x-transcend-nonce': nonce,\n },\n json: {\n profiles: [\n {\n profileId: identifier,\n },\n ],\n },\n });\n return true;\n } catch (err) {\n // handle gracefully\n if (err.response?.statusCode === 409) {\n return false;\n }\n throw new Error(\n `Received an error from server: ${err?.response?.body || err?.message}`,\n );\n }\n}\n","import { map, mapSeries } from '../bluebird';\nimport { chunk } from 'lodash-es';\nimport { createSombraGotInstance } from '../graphql';\nimport colors from 'colors';\nimport {\n markCronIdentifierCompleted,\n CronIdentifierPush,\n} from './markCronIdentifierCompleted';\nimport cliProgress from 'cli-progress';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\n/**\n * Given a CSV of cron job outputs, mark all requests as completed in Transcend\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function pushCronIdentifiersFromCsv({\n file,\n dataSiloId,\n auth,\n sombraAuth,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n sleepSeconds = 10,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Sleep time in seconds between chunks of concurrent calls */\n sleepSeconds?: number;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, CronIdentifierPush);\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Notifying Transcend for data silo \"${dataSiloId}\" marking \"${activeResults.length}\" identifiers as completed.`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n let successCount = 0;\n let failureCount = 0;\n let errorCount = 0;\n progressBar.start(activeResults.length, 0);\n\n // Process in chunks with sleep intervals\n const chunks = chunk(activeResults, concurrency);\n const totalChunks = chunks.length;\n const processChunk = async (\n items: CronIdentifierPush[],\n chunkIndex: number,\n ): Promise<void> => {\n logger.info(\n colors.blue(\n `Processing chunk ${chunkIndex + 1}/${totalChunks} (${\n chunk.length\n } items)`,\n ),\n );\n\n // Process the items of the chunk concurrently\n await map(items, async (identifier) => {\n try {\n const success = await markCronIdentifierCompleted(sombra, identifier);\n if (success) {\n successCount += 1;\n } else {\n failureCount += 1;\n }\n } catch (e) {\n logger.error(\n colors.red(\n `Error notifying Transcend for identifier \"${identifier.identifier}\" - ${e?.message}`,\n ),\n );\n errorCount += 1;\n }\n progressBar.update(successCount + failureCount);\n });\n\n // Sleep between chunks (except for the last chunk)\n if (sleepSeconds > 0 && chunkIndex < totalChunks - 1) {\n logger.info(\n colors.yellow(`Sleeping for ${sleepSeconds}s before next chunk...`),\n );\n\n await new Promise((resolve) => {\n setTimeout(resolve, sleepSeconds * 1000);\n });\n }\n };\n\n // Process all chunks sequentially\n await mapSeries(chunks, processChunk);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully notified Transcend for ${successCount} identifiers in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n if (failureCount) {\n logger.info(\n colors.magenta(\n `There were ${failureCount} identifiers that were not in a state to be updated.` +\n 'They likely have already been resolved.',\n ),\n );\n }\n if (errorCount) {\n logger.error(\n colors.red(\n `There were ${errorCount} identifiers that failed to be updated. Please review the logs for more information.`,\n ),\n );\n throw new Error('Failed to update all identifiers');\n }\n return activeResults.length;\n}\n","import { map } from '../bluebird';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport {\n CHANGE_REQUEST_DATA_SILO_STATUS,\n fetchRequestDataSilo,\n makeGraphQLRequest,\n buildTranscendGraphQLClient,\n} from '../graphql';\nimport cliProgress from 'cli-progress';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { RequestDataSiloStatus } from '@transcend-io/privacy-types';\n\n/**\n * Given a CSV of Request IDs, mark associated RequestDataSilos as completed\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function markRequestDataSiloIdsCompleted({\n requestIds,\n dataSiloId,\n auth,\n concurrency = 100,\n status = RequestDataSiloStatus.Resolved,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** The list of request ids to mark as completed */\n requestIds: string[];\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Status to update requests to */\n status?: RequestDataSiloStatus;\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<number> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Notifying Transcend for data silo \"${dataSiloId}\" marking \"${requestIds.length}\" requests as completed.`,\n ),\n );\n\n let total = 0;\n progressBar.start(requestIds.length, 0);\n await map(\n requestIds,\n async (requestId) => {\n const requestDataSilo = await fetchRequestDataSilo(client, {\n requestId,\n dataSiloId,\n });\n\n try {\n await makeGraphQLRequest<{\n /** Whether we successfully uploaded the results */\n success: boolean;\n }>(client, CHANGE_REQUEST_DATA_SILO_STATUS, {\n requestDataSiloId: requestDataSilo.id,\n status,\n });\n } catch (err) {\n if (\n !err.message.includes('Client error: Request must be active:') &&\n !err.message.includes('Failed to find RequestDataSilo')\n ) {\n throw err;\n }\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully notified Transcend in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n return requestIds.length;\n}\n","import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchRequestDataSiloActiveCount,\n} from '../graphql';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport {\n pullCronPageOfIdentifiers,\n CronIdentifier,\n} from './pullCronPageOfIdentifiers';\nimport { RequestAction } from '@transcend-io/privacy-types';\n\nimport { logger } from '../../logger';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { mapSeries } from '../bluebird';\n\n/**\n * A CSV formatted identifier\n */\nexport type CsvFormattedIdentifier = {\n [k in string]: string | null | boolean | number;\n};\n\nexport interface CronIdentifierWithAction extends CronIdentifier {\n /** The request action that the identifier relates to */\n action: RequestAction;\n}\n\n/**\n * Pull the set of identifiers outstanding for a cron or AVC integration\n *\n * This function is designed to be used in a loop, and will call the onSave callback\n * with a chunk of identifiers when the savePageSize is reached.\n *\n * @param options - Options\n * @returns The identifiers and identifiers formatted for CSV\n */\nexport async function pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize = 100,\n savePageSize = 1000,\n onSave,\n transcendUrl = DEFAULT_TRANSCEND_API,\n skipRequestCount = false,\n}: {\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** The request actions to fetch */\n actions: RequestAction[];\n /** How many identifiers to pull in a single call to the backend */\n apiPageSize: number;\n /** How many identifiers to save at a time (usually to a CSV file, should be a multiple of apiPageSize) */\n savePageSize: number;\n /** Callback function called when a chunk of identifiers is ready to be saved */\n onSave: (chunk: CsvFormattedIdentifier[]) => Promise<void>;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Skip request count */\n skipRequestCount?: boolean;\n}): Promise<{\n /** Raw Identifiers */\n identifiers: CronIdentifierWithAction[];\n}> {\n // Validate savePageSize\n if (savePageSize % apiPageSize !== 0) {\n throw new Error(\n `savePageSize must be a multiple of apiPageSize. savePageSize: ${savePageSize}, apiPageSize: ${apiPageSize}`,\n );\n }\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n let totalRequestCount = 0;\n if (!skipRequestCount) {\n totalRequestCount = await fetchRequestDataSiloActiveCount(client, {\n dataSiloId,\n });\n }\n\n logger.info(\n colors.magenta(\n `Pulling ${\n skipRequestCount ? 'all' : totalRequestCount\n } outstanding request identifiers ` +\n `for data silo: \"${dataSiloId}\" for requests of types \"${actions.join(\n '\", \"',\n )}\"`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n const foundRequestIds = new Set<string>();\n\n // identifiers found in total\n const identifiers: CronIdentifierWithAction[] = [];\n // current chunk of identifiers to be saved\n let currentChunk: CsvFormattedIdentifier[] = [];\n\n // map over each action\n if (!skipRequestCount) {\n progressBar.start(totalRequestCount, 0);\n }\n await mapSeries(actions, async (action) => {\n let offset = 0;\n let shouldContinue = true;\n\n // Fetch a page of identifiers\n while (shouldContinue) {\n const pageIdentifiers = await pullCronPageOfIdentifiers(sombra, {\n dataSiloId,\n limit: apiPageSize,\n offset,\n requestType: action,\n });\n\n const identifiersWithAction: CronIdentifierWithAction[] =\n pageIdentifiers.map((identifier) => {\n foundRequestIds.add(identifier.requestId);\n return {\n ...identifier,\n action,\n };\n });\n\n const csvFormattedIdentifiers = identifiersWithAction.map(\n ({ attributes, ...identifier }) => ({\n ...identifier,\n ...attributes.reduce(\n (acc, val) =>\n Object.assign(acc, {\n [val.key]: val.values.join(','),\n }),\n {},\n ),\n }),\n );\n\n identifiers.push(...identifiersWithAction);\n currentChunk.push(...csvFormattedIdentifiers);\n\n // Check if we've reached the savePageSize and call the onSave callback\n if (currentChunk.length >= savePageSize) {\n await onSave(currentChunk);\n currentChunk = [];\n }\n\n shouldContinue = pageIdentifiers.length === apiPageSize;\n offset += apiPageSize;\n if (!skipRequestCount) {\n progressBar.update(foundRequestIds.size);\n } else {\n logger.info(\n colors.magenta(\n `Pulled ${pageIdentifiers.length} outstanding identifiers for ${foundRequestIds.size} requests`,\n ),\n );\n }\n }\n });\n\n // Save any remaining identifiers in the current chunk\n if (currentChunk.length > 0) {\n await onSave(currentChunk);\n }\n\n if (!skipRequestCount) {\n progressBar.stop();\n }\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled ${identifiers.length} outstanding identifiers from ${\n foundRequestIds.size\n } requests in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n\n return { identifiers };\n}\n"],"mappings":"iWAKA,MAAa,EAAiBA,EAAE,KAAK,CAEnC,WAAYA,EAAE,OAEd,KAAMA,EAAE,OAER,eAAgBA,EAAE,OAElB,WAAYA,EAAE,OAEd,UAAWA,EAAE,OAEb,MAAOA,EAAE,OAET,iBAAkBA,EAAE,OAEpB,iBAAkBA,EAAE,OAEpB,WAAYA,EAAE,MACZA,EAAE,KAAK,CACL,IAAKA,EAAE,OACP,OAAQA,EAAE,MAAMA,EAAE,OAAO,CAC1B,CAAC,CACH,CACF,CAAC,CAaF,eAAsB,EACpB,EACA,CACE,aACA,QAAQ,IACR,SAAS,EACT,eAWyB,CAC3B,GAAI,CAEF,IAAM,EAAW,MAAM,EACpB,IAAI,gBAAgB,EAAW,oBAAoB,IAAe,CACjE,aAAc,CACZ,SACA,QACD,CACF,CAAC,CACD,MAAM,CAEH,CAAE,UAAA,EAAA,EAAA,aACNA,EAAE,KAAK,CACL,MAAOA,EAAE,MAAM,EAAe,CAC/B,CAAC,CACF,EACD,CACD,OAAO,QACA,EAAK,CACZ,MAAU,MACR,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAC/D,EC3EL,MAAa,EAAqBC,EAAE,KAAK,CACvC,MAAOA,EAAE,OACT,WAAYA,EAAE,OACf,CAAC,CAaF,eAAsB,EACpB,EACA,CAAE,QAAO,cACS,CAClB,GAAI,CAcF,OAZA,MAAM,EAAO,IAAI,eAAgB,CAC/B,QAAS,CACP,oBAAqB,EACtB,CACD,KAAM,CACJ,SAAU,CACR,CACE,UAAW,EACZ,CACF,CACF,CACF,CAAC,CACK,SACA,EAAK,CAEZ,GAAI,EAAI,UAAU,aAAe,IAC/B,MAAO,GAET,MAAU,MACR,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAC/D,EC7BL,eAAsB,EAA2B,CAC/C,OACA,aACA,OACA,aACA,cAAc,IACd,eAAeC,EAAAA,EACf,eAAe,IAgBG,CAElB,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CAG5E,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAC1D,IAAM,EAAgBC,EAAAA,GAAQ,EAAM,EAAmB,CAGvD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,sCAAsC,EAAW,aAAa,EAAc,OAAO,6BACpF,CACF,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAEG,EAAe,EACf,EAAe,EACf,EAAa,EACjB,EAAY,MAAM,EAAc,OAAQ,EAAE,CAG1C,IAAM,EAASC,EAAAA,GAAM,EAAe,EAAY,CAC1C,EAAc,EAAO,OA8C3B,MAAME,EAAAA,GAAU,EA7CK,MACnB,EACA,IACkB,CAClB,EAAA,EAAO,KACL,EAAA,QAAO,KACL,oBAAoB,EAAa,EAAE,GAAG,EAAY,IAChDF,EAAAA,GAAM,OACP,SACF,CACF,CAGD,MAAMC,EAAAA,GAAI,EAAO,KAAO,IAAe,CACrC,GAAI,CACc,MAAM,EAA4B,EAAQ,EAAW,CAEnE,GAAgB,EAEhB,GAAgB,QAEX,EAAG,CACV,EAAA,EAAO,MACL,EAAA,QAAO,IACL,6CAA6C,EAAW,WAAW,MAAM,GAAG,UAC7E,CACF,CACD,GAAc,EAEhB,EAAY,OAAO,EAAe,EAAa,EAC/C,CAGE,EAAe,GAAK,EAAa,EAAc,IACjD,EAAA,EAAO,KACL,EAAA,QAAO,OAAO,gBAAgB,EAAa,wBAAwB,CACpE,CAED,MAAM,IAAI,QAAS,GAAY,CAC7B,WAAW,EAAS,EAAe,IAAK,EACxC,GAK+B,CAErC,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAiBvB,GAfA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uCAAuC,EAAa,mBAClD,EAAY,IACb,YACF,CACF,CACG,GACF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,cAAc,EAAa,6FAE5B,CACF,CAEC,EAMF,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,cAAc,EAAW,sFAC1B,CACF,CACS,MAAM,mCAAmC,CAErD,OAAO,EAAc,OChIvB,eAAsB,EAAgC,CACpD,aACA,aACA,OACA,cAAc,IACd,SAASE,EAAAA,sBAAsB,SAC/B,eAAeC,EAAAA,GAcG,CAElB,IAAM,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAGD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,sCAAsC,EAAW,aAAa,EAAW,OAAO,0BACjF,CACF,CAED,IAAI,EAAQ,EACZ,EAAY,MAAM,EAAW,OAAQ,EAAE,CACvC,MAAMC,EAAAA,GACJ,EACA,KAAO,IAAc,CACnB,IAAM,EAAkB,MAAMC,EAAAA,GAAqB,EAAQ,CACzD,YACA,aACD,CAAC,CAEF,GAAI,CACF,MAAMC,EAAAA,EAGH,EAAQC,EAAAA,GAAiC,CAC1C,kBAAmB,EAAgB,GACnC,SACD,CAAC,OACK,EAAK,CACZ,GACE,CAAC,EAAI,QAAQ,SAAS,wCAAwC,EAC9D,CAAC,EAAI,QAAQ,SAAS,iCAAiC,CAEvD,MAAM,EAIV,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAOvB,OALA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uCAAuC,EAAY,IAAK,YACzD,CACF,CACM,EAAW,OC9DpB,eAAsB,EAA4C,CAChE,aACA,OACA,aACA,UACA,cAAc,IACd,eAAe,IACf,SACA,eAAeC,EAAAA,EACf,mBAAmB,IAuBlB,CAED,GAAI,EAAe,IAAgB,EACjC,MAAU,MACR,iEAAiE,EAAa,iBAAiB,IAChG,CAIH,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CAGtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAE1D,EAAoB,EACnB,IACH,EAAoB,MAAMC,EAAAA,GAAgC,EAAQ,CAChE,aACD,CAAC,EAGJ,EAAA,EAAO,KACL,EAAA,QAAO,QACL,WACE,EAAmB,MAAQ,EAC5B,mDACoB,EAAW,2BAA2B,EAAQ,KAC/D,OACD,CAAC,GACL,CACF,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CACK,EAAkB,IAAI,IAGtB,EAA0C,EAAE,CAE9C,EAAyC,EAAE,CAG1C,GACH,EAAY,MAAM,EAAmB,EAAE,CAEzC,MAAMC,EAAAA,GAAU,EAAS,KAAO,IAAW,CACzC,IAAI,EAAS,EACT,EAAiB,GAGrB,KAAO,GAAgB,CACrB,IAAM,EAAkB,MAAM,EAA0B,EAAQ,CAC9D,aACA,MAAO,EACP,SACA,YAAa,EACd,CAAC,CAEI,EACJ,EAAgB,IAAK,IACnB,EAAgB,IAAI,EAAW,UAAU,CAClC,CACL,GAAG,EACH,SACD,EACD,CAEE,EAA0B,EAAsB,KACnD,CAAE,aAAY,GAAG,MAAkB,CAClC,GAAG,EACH,GAAG,EAAW,QACX,EAAK,IACJ,OAAO,OAAO,EAAK,EAChB,EAAI,KAAM,EAAI,OAAO,KAAK,IAAI,CAChC,CAAC,CACJ,EAAE,CACH,CACF,EACF,CAED,EAAY,KAAK,GAAG,EAAsB,CAC1C,EAAa,KAAK,GAAG,EAAwB,CAGzC,EAAa,QAAU,IACzB,MAAM,EAAO,EAAa,CAC1B,EAAe,EAAE,EAGnB,EAAiB,EAAgB,SAAW,EAC5C,GAAU,EACL,EAGH,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAgB,OAAO,+BAA+B,EAAgB,KAAK,WACtF,CACF,CAND,EAAY,OAAO,EAAgB,KAAK,GAS5C,CAGE,EAAa,OAAS,GACxB,MAAM,EAAO,EAAa,CAGvB,GACH,EAAY,MAAM,CAGpB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAUvB,OARA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uBAAuB,EAAY,OAAO,gCACxC,EAAgB,KACjB,gBAAgB,EAAY,IAAK,YACnC,CACF,CAEM,CAAE,cAAa"}
1
+ {"version":3,"file":"cron-lijiEqFA.cjs","names":["t","t","DEFAULT_TRANSCEND_API","createSombraGotInstance","readCsv","cliProgress","chunk","map","mapSeries","RequestDataSiloStatus","DEFAULT_TRANSCEND_API","buildTranscendGraphQLClient","cliProgress","map","fetchRequestDataSilo","makeGraphQLRequest","CHANGE_REQUEST_DATA_SILO_STATUS","DEFAULT_TRANSCEND_API","createSombraGotInstance","buildTranscendGraphQLClient","fetchRequestDataSiloActiveCount","cliProgress","mapSeries"],"sources":["../src/lib/cron/pullCronPageOfIdentifiers.ts","../src/lib/cron/markCronIdentifierCompleted.ts","../src/lib/cron/pushCronIdentifiersFromCsv.ts","../src/lib/cron/markRequestDataSiloIdsCompleted.ts","../src/lib/cron/pullChunkedCustomSiloOutstandingIdentifiers.ts"],"sourcesContent":["import * as t from 'io-ts';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { RequestAction } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\n\nexport const CronIdentifier = t.type({\n /** The identifier value */\n identifier: t.string,\n /** The type of identifier */\n type: t.string,\n /** The core identifier of the request */\n coreIdentifier: t.string,\n /** The ID of the underlying data silo */\n dataSiloId: t.string,\n /** The ID of the underlying request */\n requestId: t.string,\n /** The request nonce */\n nonce: t.string,\n /** The time the request was created */\n requestCreatedAt: t.string,\n /** The number of days until the request is overdue */\n daysUntilOverdue: t.number,\n /** Request attributes */\n attributes: t.array(\n t.type({\n key: t.string,\n values: t.array(t.string),\n }),\n ),\n});\n\n/** Type override */\nexport type CronIdentifier = t.TypeOf<typeof CronIdentifier>;\n\n/**\n * Pull a offset of identifiers for a cron job\n *\n * @see https://docs.transcend.io/docs/api-reference/GET/v1/data-silo/(id)/pending-requests/(type)\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request\n */\nexport async function pullCronPageOfIdentifiers(\n sombra: Got,\n {\n dataSiloId,\n limit = 100,\n offset = 0,\n requestType,\n }: {\n /** Data Silo ID */\n dataSiloId: string;\n /** Type of request */\n requestType: RequestAction;\n /** Number of identifiers to pull in */\n limit?: number;\n /** Page to pull in */\n offset?: number;\n },\n): Promise<CronIdentifier[]> {\n try {\n // Make the GraphQL request\n const response = await sombra\n .get(`v1/data-silo/${dataSiloId}/pending-requests/${requestType}`, {\n searchParams: {\n offset,\n limit,\n },\n })\n .json();\n\n const { items } = decodeCodec(\n t.type({\n items: t.array(CronIdentifier),\n }),\n response,\n );\n return items;\n } catch (err) {\n throw new Error(\n `Received an error from server: ${err?.response?.body || err?.message}`,\n );\n }\n}\n","import type { Got } from 'got';\nimport * as t from 'io-ts';\n\n/**\n * Minimal set required to mark as completed\n */\nexport const CronIdentifierPush = t.type({\n nonce: t.string,\n identifier: t.string,\n});\n\n/** Type override */\nexport type CronIdentifierPush = t.TypeOf<typeof CronIdentifierPush>;\n\n/**\n * Mark an identifier output by the cron job as completed.\n *\n * @see https://docs.transcend.io/docs/api-reference/PUT/v1/data-silo\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request, false if not in a state to update\n */\nexport async function markCronIdentifierCompleted(\n sombra: Got,\n { nonce, identifier }: CronIdentifierPush,\n): Promise<boolean> {\n try {\n // Make the GraphQL request\n await sombra.put('v1/data-silo', {\n headers: {\n 'x-transcend-nonce': nonce,\n },\n json: {\n profiles: [\n {\n profileId: identifier,\n },\n ],\n },\n });\n return true;\n } catch (err) {\n // handle gracefully\n if (err.response?.statusCode === 409) {\n return false;\n }\n throw new Error(\n `Received an error from server: ${err?.response?.body || err?.message}`,\n );\n }\n}\n","import { map, mapSeries } from '../bluebird';\nimport { chunk } from 'lodash-es';\nimport { createSombraGotInstance } from '../graphql';\nimport colors from 'colors';\nimport {\n markCronIdentifierCompleted,\n CronIdentifierPush,\n} from './markCronIdentifierCompleted';\nimport cliProgress from 'cli-progress';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\n/**\n * Given a CSV of cron job outputs, mark all requests as completed in Transcend\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function pushCronIdentifiersFromCsv({\n file,\n dataSiloId,\n auth,\n sombraAuth,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n sleepSeconds = 10,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Sleep time in seconds between chunks of concurrent calls */\n sleepSeconds?: number;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, CronIdentifierPush);\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Notifying Transcend for data silo \"${dataSiloId}\" marking \"${activeResults.length}\" identifiers as completed.`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n let successCount = 0;\n let failureCount = 0;\n let errorCount = 0;\n progressBar.start(activeResults.length, 0);\n\n // Process in chunks with sleep intervals\n const chunks = chunk(activeResults, concurrency);\n const totalChunks = chunks.length;\n const processChunk = async (\n items: CronIdentifierPush[],\n chunkIndex: number,\n ): Promise<void> => {\n logger.info(\n colors.blue(\n `Processing chunk ${chunkIndex + 1}/${totalChunks} (${\n chunk.length\n } items)`,\n ),\n );\n\n // Process the items of the chunk concurrently\n await map(items, async (identifier) => {\n try {\n const success = await markCronIdentifierCompleted(sombra, identifier);\n if (success) {\n successCount += 1;\n } else {\n failureCount += 1;\n }\n } catch (e) {\n logger.error(\n colors.red(\n `Error notifying Transcend for identifier \"${identifier.identifier}\" - ${e?.message}`,\n ),\n );\n errorCount += 1;\n }\n progressBar.update(successCount + failureCount);\n });\n\n // Sleep between chunks (except for the last chunk)\n if (sleepSeconds > 0 && chunkIndex < totalChunks - 1) {\n logger.info(\n colors.yellow(`Sleeping for ${sleepSeconds}s before next chunk...`),\n );\n\n await new Promise((resolve) => {\n setTimeout(resolve, sleepSeconds * 1000);\n });\n }\n };\n\n // Process all chunks sequentially\n await mapSeries(chunks, processChunk);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully notified Transcend for ${successCount} identifiers in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n if (failureCount) {\n logger.info(\n colors.magenta(\n `There were ${failureCount} identifiers that were not in a state to be updated.` +\n 'They likely have already been resolved.',\n ),\n );\n }\n if (errorCount) {\n logger.error(\n colors.red(\n `There were ${errorCount} identifiers that failed to be updated. Please review the logs for more information.`,\n ),\n );\n throw new Error('Failed to update all identifiers');\n }\n return activeResults.length;\n}\n","import { map } from '../bluebird';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport {\n CHANGE_REQUEST_DATA_SILO_STATUS,\n fetchRequestDataSilo,\n makeGraphQLRequest,\n buildTranscendGraphQLClient,\n} from '../graphql';\nimport cliProgress from 'cli-progress';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { RequestDataSiloStatus } from '@transcend-io/privacy-types';\n\n/**\n * Given a CSV of Request IDs, mark associated RequestDataSilos as completed\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function markRequestDataSiloIdsCompleted({\n requestIds,\n dataSiloId,\n auth,\n concurrency = 100,\n status = RequestDataSiloStatus.Resolved,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** The list of request ids to mark as completed */\n requestIds: string[];\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Status to update requests to */\n status?: RequestDataSiloStatus;\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<number> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Notifying Transcend for data silo \"${dataSiloId}\" marking \"${requestIds.length}\" requests as completed.`,\n ),\n );\n\n let total = 0;\n progressBar.start(requestIds.length, 0);\n await map(\n requestIds,\n async (requestId) => {\n const requestDataSilo = await fetchRequestDataSilo(client, {\n requestId,\n dataSiloId,\n });\n\n try {\n await makeGraphQLRequest<{\n /** Whether we successfully uploaded the results */\n success: boolean;\n }>(client, CHANGE_REQUEST_DATA_SILO_STATUS, {\n requestDataSiloId: requestDataSilo.id,\n status,\n });\n } catch (err) {\n if (\n !err.message.includes('Client error: Request must be active:') &&\n !err.message.includes('Failed to find RequestDataSilo')\n ) {\n throw err;\n }\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully notified Transcend in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n return requestIds.length;\n}\n","import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchRequestDataSiloActiveCount,\n} from '../graphql';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport {\n pullCronPageOfIdentifiers,\n CronIdentifier,\n} from './pullCronPageOfIdentifiers';\nimport { RequestAction } from '@transcend-io/privacy-types';\n\nimport { logger } from '../../logger';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { mapSeries } from '../bluebird';\n\n/**\n * A CSV formatted identifier\n */\nexport type CsvFormattedIdentifier = {\n [k in string]: string | null | boolean | number;\n};\n\nexport interface CronIdentifierWithAction extends CronIdentifier {\n /** The request action that the identifier relates to */\n action: RequestAction;\n}\n\n/**\n * Pull the set of identifiers outstanding for a cron or AVC integration\n *\n * This function is designed to be used in a loop, and will call the onSave callback\n * with a chunk of identifiers when the savePageSize is reached.\n *\n * @param options - Options\n * @returns The identifiers and identifiers formatted for CSV\n */\nexport async function pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize = 100,\n savePageSize = 1000,\n onSave,\n transcendUrl = DEFAULT_TRANSCEND_API,\n skipRequestCount = false,\n}: {\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** The request actions to fetch */\n actions: RequestAction[];\n /** How many identifiers to pull in a single call to the backend */\n apiPageSize: number;\n /** How many identifiers to save at a time (usually to a CSV file, should be a multiple of apiPageSize) */\n savePageSize: number;\n /** Callback function called when a chunk of identifiers is ready to be saved */\n onSave: (chunk: CsvFormattedIdentifier[]) => Promise<void>;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Skip request count */\n skipRequestCount?: boolean;\n}): Promise<{\n /** Raw Identifiers */\n identifiers: CronIdentifierWithAction[];\n}> {\n // Validate savePageSize\n if (savePageSize % apiPageSize !== 0) {\n throw new Error(\n `savePageSize must be a multiple of apiPageSize. savePageSize: ${savePageSize}, apiPageSize: ${apiPageSize}`,\n );\n }\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n let totalRequestCount = 0;\n if (!skipRequestCount) {\n totalRequestCount = await fetchRequestDataSiloActiveCount(client, {\n dataSiloId,\n });\n }\n\n logger.info(\n colors.magenta(\n `Pulling ${\n skipRequestCount ? 'all' : totalRequestCount\n } outstanding request identifiers ` +\n `for data silo: \"${dataSiloId}\" for requests of types \"${actions.join(\n '\", \"',\n )}\"`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n const foundRequestIds = new Set<string>();\n\n // identifiers found in total\n const identifiers: CronIdentifierWithAction[] = [];\n // current chunk of identifiers to be saved\n let currentChunk: CsvFormattedIdentifier[] = [];\n\n // map over each action\n if (!skipRequestCount) {\n progressBar.start(totalRequestCount, 0);\n }\n await mapSeries(actions, async (action) => {\n let offset = 0;\n let shouldContinue = true;\n\n // Fetch a page of identifiers\n while (shouldContinue) {\n const pageIdentifiers = await pullCronPageOfIdentifiers(sombra, {\n dataSiloId,\n limit: apiPageSize,\n offset,\n requestType: action,\n });\n\n const identifiersWithAction: CronIdentifierWithAction[] =\n pageIdentifiers.map((identifier) => {\n foundRequestIds.add(identifier.requestId);\n return {\n ...identifier,\n action,\n };\n });\n\n const csvFormattedIdentifiers = identifiersWithAction.map(\n ({ attributes, ...identifier }) => ({\n ...identifier,\n ...attributes.reduce(\n (acc, val) =>\n Object.assign(acc, {\n [val.key]: val.values.join(','),\n }),\n {},\n ),\n }),\n );\n\n identifiers.push(...identifiersWithAction);\n currentChunk.push(...csvFormattedIdentifiers);\n\n // Check if we've reached the savePageSize and call the onSave callback\n if (currentChunk.length >= savePageSize) {\n await onSave(currentChunk);\n currentChunk = [];\n }\n\n shouldContinue = pageIdentifiers.length === apiPageSize;\n offset += apiPageSize;\n if (!skipRequestCount) {\n progressBar.update(foundRequestIds.size);\n } else {\n logger.info(\n colors.magenta(\n `Pulled ${pageIdentifiers.length} outstanding identifiers for ${foundRequestIds.size} requests`,\n ),\n );\n }\n }\n });\n\n // Save any remaining identifiers in the current chunk\n if (currentChunk.length > 0) {\n await onSave(currentChunk);\n }\n\n if (!skipRequestCount) {\n progressBar.stop();\n }\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled ${identifiers.length} outstanding identifiers from ${\n foundRequestIds.size\n } requests in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n\n return { identifiers };\n}\n"],"mappings":"iWAKA,MAAa,EAAiBA,EAAE,KAAK,CAEnC,WAAYA,EAAE,OAEd,KAAMA,EAAE,OAER,eAAgBA,EAAE,OAElB,WAAYA,EAAE,OAEd,UAAWA,EAAE,OAEb,MAAOA,EAAE,OAET,iBAAkBA,EAAE,OAEpB,iBAAkBA,EAAE,OAEpB,WAAYA,EAAE,MACZA,EAAE,KAAK,CACL,IAAKA,EAAE,OACP,OAAQA,EAAE,MAAMA,EAAE,OAAO,CAC1B,CAAC,CACH,CACF,CAAC,CAaF,eAAsB,EACpB,EACA,CACE,aACA,QAAQ,IACR,SAAS,EACT,eAWyB,CAC3B,GAAI,CAEF,IAAM,EAAW,MAAM,EACpB,IAAI,gBAAgB,EAAW,oBAAoB,IAAe,CACjE,aAAc,CACZ,SACA,QACD,CACF,CAAC,CACD,MAAM,CAEH,CAAE,UAAA,EAAA,EAAA,aACNA,EAAE,KAAK,CACL,MAAOA,EAAE,MAAM,EAAe,CAC/B,CAAC,CACF,EACD,CACD,OAAO,QACA,EAAK,CACZ,MAAU,MACR,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAC/D,EC3EL,MAAa,EAAqBC,EAAE,KAAK,CACvC,MAAOA,EAAE,OACT,WAAYA,EAAE,OACf,CAAC,CAaF,eAAsB,EACpB,EACA,CAAE,QAAO,cACS,CAClB,GAAI,CAcF,OAZA,MAAM,EAAO,IAAI,eAAgB,CAC/B,QAAS,CACP,oBAAqB,EACtB,CACD,KAAM,CACJ,SAAU,CACR,CACE,UAAW,EACZ,CACF,CACF,CACF,CAAC,CACK,SACA,EAAK,CAEZ,GAAI,EAAI,UAAU,aAAe,IAC/B,MAAO,GAET,MAAU,MACR,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAC/D,EC7BL,eAAsB,EAA2B,CAC/C,OACA,aACA,OACA,aACA,cAAc,IACd,eAAeC,EAAAA,EACf,eAAe,IAgBG,CAElB,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CAG5E,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAC1D,IAAM,EAAgBC,EAAAA,GAAQ,EAAM,EAAmB,CAGvD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,sCAAsC,EAAW,aAAa,EAAc,OAAO,6BACpF,CACF,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAEG,EAAe,EACf,EAAe,EACf,EAAa,EACjB,EAAY,MAAM,EAAc,OAAQ,EAAE,CAG1C,IAAM,EAASC,EAAAA,GAAM,EAAe,EAAY,CAC1C,EAAc,EAAO,OA8C3B,MAAME,EAAAA,GAAU,EA7CK,MACnB,EACA,IACkB,CAClB,EAAA,EAAO,KACL,EAAA,QAAO,KACL,oBAAoB,EAAa,EAAE,GAAG,EAAY,IAChDF,EAAAA,GAAM,OACP,SACF,CACF,CAGD,MAAMC,EAAAA,GAAI,EAAO,KAAO,IAAe,CACrC,GAAI,CACc,MAAM,EAA4B,EAAQ,EAAW,CAEnE,GAAgB,EAEhB,GAAgB,QAEX,EAAG,CACV,EAAA,EAAO,MACL,EAAA,QAAO,IACL,6CAA6C,EAAW,WAAW,MAAM,GAAG,UAC7E,CACF,CACD,GAAc,EAEhB,EAAY,OAAO,EAAe,EAAa,EAC/C,CAGE,EAAe,GAAK,EAAa,EAAc,IACjD,EAAA,EAAO,KACL,EAAA,QAAO,OAAO,gBAAgB,EAAa,wBAAwB,CACpE,CAED,MAAM,IAAI,QAAS,GAAY,CAC7B,WAAW,EAAS,EAAe,IAAK,EACxC,GAK+B,CAErC,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAiBvB,GAfA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uCAAuC,EAAa,mBAClD,EAAY,IACb,YACF,CACF,CACG,GACF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,cAAc,EAAa,6FAE5B,CACF,CAEC,EAMF,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,cAAc,EAAW,sFAC1B,CACF,CACS,MAAM,mCAAmC,CAErD,OAAO,EAAc,OChIvB,eAAsB,EAAgC,CACpD,aACA,aACA,OACA,cAAc,IACd,SAASE,EAAAA,sBAAsB,SAC/B,eAAeC,EAAAA,GAcG,CAElB,IAAM,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAGD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,sCAAsC,EAAW,aAAa,EAAW,OAAO,0BACjF,CACF,CAED,IAAI,EAAQ,EACZ,EAAY,MAAM,EAAW,OAAQ,EAAE,CACvC,MAAMC,EAAAA,GACJ,EACA,KAAO,IAAc,CACnB,IAAM,EAAkB,MAAMC,EAAAA,GAAqB,EAAQ,CACzD,YACA,aACD,CAAC,CAEF,GAAI,CACF,MAAMC,EAAAA,EAGH,EAAQC,EAAAA,GAAiC,CAC1C,kBAAmB,EAAgB,GACnC,SACD,CAAC,OACK,EAAK,CACZ,GACE,CAAC,EAAI,QAAQ,SAAS,wCAAwC,EAC9D,CAAC,EAAI,QAAQ,SAAS,iCAAiC,CAEvD,MAAM,EAIV,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAOvB,OALA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uCAAuC,EAAY,IAAK,YACzD,CACF,CACM,EAAW,OC9DpB,eAAsB,EAA4C,CAChE,aACA,OACA,aACA,UACA,cAAc,IACd,eAAe,IACf,SACA,eAAeC,EAAAA,EACf,mBAAmB,IAuBlB,CAED,GAAI,EAAe,IAAgB,EACjC,MAAU,MACR,iEAAiE,EAAa,iBAAiB,IAChG,CAIH,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CAGtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAE1D,EAAoB,EACnB,IACH,EAAoB,MAAMC,EAAAA,GAAgC,EAAQ,CAChE,aACD,CAAC,EAGJ,EAAA,EAAO,KACL,EAAA,QAAO,QACL,WACE,EAAmB,MAAQ,EAC5B,mDACoB,EAAW,2BAA2B,EAAQ,KAC/D,OACD,CAAC,GACL,CACF,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CACK,EAAkB,IAAI,IAGtB,EAA0C,EAAE,CAE9C,EAAyC,EAAE,CAG1C,GACH,EAAY,MAAM,EAAmB,EAAE,CAEzC,MAAMC,EAAAA,GAAU,EAAS,KAAO,IAAW,CACzC,IAAI,EAAS,EACT,EAAiB,GAGrB,KAAO,GAAgB,CACrB,IAAM,EAAkB,MAAM,EAA0B,EAAQ,CAC9D,aACA,MAAO,EACP,SACA,YAAa,EACd,CAAC,CAEI,EACJ,EAAgB,IAAK,IACnB,EAAgB,IAAI,EAAW,UAAU,CAClC,CACL,GAAG,EACH,SACD,EACD,CAEE,EAA0B,EAAsB,KACnD,CAAE,aAAY,GAAG,MAAkB,CAClC,GAAG,EACH,GAAG,EAAW,QACX,EAAK,IACJ,OAAO,OAAO,EAAK,EAChB,EAAI,KAAM,EAAI,OAAO,KAAK,IAAI,CAChC,CAAC,CACJ,EAAE,CACH,CACF,EACF,CAED,EAAY,KAAK,GAAG,EAAsB,CAC1C,EAAa,KAAK,GAAG,EAAwB,CAGzC,EAAa,QAAU,IACzB,MAAM,EAAO,EAAa,CAC1B,EAAe,EAAE,EAGnB,EAAiB,EAAgB,SAAW,EAC5C,GAAU,EACL,EAGH,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAgB,OAAO,+BAA+B,EAAgB,KAAK,WACtF,CACF,CAND,EAAY,OAAO,EAAgB,KAAK,GAS5C,CAGE,EAAa,OAAS,GACxB,MAAM,EAAO,EAAa,CAGvB,GACH,EAAY,MAAM,CAGpB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAUvB,OARA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uBAAuB,EAAY,OAAO,gCACxC,EAAgB,KACjB,gBAAgB,EAAY,IAAK,YACnC,CACF,CAEM,CAAE,cAAa"}
@@ -1,4 +1,4 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-wkuhlP8d.cjs`),n=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),r=require(`./logger-DQwEYtSS.cjs`);let i=require(`@transcend-io/privacy-types`),a=require(`colors`);a=e.t(a);let o=require(`graphql-request`),s=require(`cli-progress`);s=e.t(s);async function c(e,{dataSiloIds:t=[],includeGuessedCategories:c,includeAttributes:l,parentCategories:u=[],subCategories:d=[],pageSize:f=1e3}={}){let p=[],m=new Date().getTime(),h=new s.default.SingleBar({},s.default.Presets.shades_classic),g={...u.length>0?{category:u}:{},...d.length>0?{subCategoryIds:d}:{},...u.length+d.length>0&&!c?{status:i.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...t.length>0?{dataSilos:t}:{}},{subDataPoints:{totalCount:_}}=await n.i(e,n.Ss,{filterBy:g});r.t.info(a.default.magenta(`[Step 1/3] Pulling in all subdatapoints`)),h.start(_,0);let v=0,y=!1,b,x=0;do try{let{subDataPoints:{nodes:t}}=await n.i(e,o.gql`
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-K6pQQtc7.cjs`),n=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`),r=require(`./logger-DQwEYtSS.cjs`);let i=require(`@transcend-io/privacy-types`),a=require(`colors`);a=e.t(a);let o=require(`graphql-request`),s=require(`cli-progress`);s=e.t(s);async function c(e,{dataSiloIds:t=[],includeGuessedCategories:c,includeAttributes:l,parentCategories:u=[],subCategories:d=[],pageSize:f=1e3}={}){let p=[],m=new Date().getTime(),h=new s.default.SingleBar({},s.default.Presets.shades_classic),g={...u.length>0?{category:u}:{},...d.length>0?{subCategoryIds:d}:{},...u.length+d.length>0&&!c?{status:i.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...t.length>0?{dataSilos:t}:{}},{subDataPoints:{totalCount:_}}=await n.i(e,n.Ss,{filterBy:g});r.t.info(a.default.magenta(`[Step 1/3] Pulling in all subdatapoints`)),h.start(_,0);let v=0,y=!1,b,x=0;do try{let{subDataPoints:{nodes:t}}=await n.i(e,o.gql`
2
2
  query TranscendCliSubDataPointCsvExport(
3
3
  $filterBy: SubDataPointFiltersInput
4
4
  $first: Int!
@@ -72,4 +72,4 @@ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-wkuhlP8d.cjs`),n=
72
72
  }
73
73
  }
74
74
  `,{first:u,offset:y,filterBy:{...m}});v=t[t.length-1]?.id,d.push(...t),_=t.length===u,g+=t.length,y+=t.length,p.update(g)}catch(e){throw r.t.error(a.default.red(`An error fetching subdatapoints for cursor ${v} and offset ${y}`)),e}while(_);p.stop();let b=new Date().getTime()-f,x=n.ks(d,`name`);return r.t.info(a.default.green(`Successfully pulled in ${x.length} subdatapoints in ${b/1e3} seconds!`)),x}Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return f}});
75
- //# sourceMappingURL=data-inventory-C1eqZk1M.cjs.map
75
+ //# sourceMappingURL=data-inventory-BKAQGjFN.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"data-inventory-C1eqZk1M.cjs","names":["cliProgress","SubDataPointDataSubCategoryGuessStatus","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","sortBy","chunk","mapSeries","DATAPOINT_EXPORT","DATA_SILO_EXPORT","keyBy","uniq","cliProgress","makeGraphQLRequest","ENTRY_COUNT","sortBy"],"sources":["../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"sourcesContent":["/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from '../bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"],"mappings":"wTA0FA,eAAe,EACb,EACA,CACE,cAAc,EAAE,CAChB,2BACA,oBACA,mBAAmB,EAAE,CACrB,gBAAgB,EAAE,CAClB,WAAW,KAIT,EAAE,CAC6B,CACnC,IAAM,EAA0C,EAAE,CAG5C,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIA,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAGK,EAAW,CACf,GAAI,EAAiB,OAAS,EAAI,CAAE,SAAU,EAAkB,CAAG,EAAE,CACrE,GAAI,EAAc,OAAS,EAAI,CAAE,eAAgB,EAAe,CAAG,EAAE,CAErE,GAAI,EAAiB,OAAS,EAAc,OAAS,GACrD,CAAC,EAEG,CAAE,OAAQC,EAAAA,uCAAuC,SAAU,CAC3D,EAAE,CACN,GAAI,EAAY,OAAS,EAAI,CAAE,UAAW,EAAa,CAAG,EAAE,CAC7D,CAGK,CACJ,cAAe,CAAE,eACf,MAAMC,EAAAA,EAMP,EAAQC,EAAAA,GAAuB,CAChC,WACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,0CAA0C,CAAC,CAEtE,EAAY,MAAM,EAAY,EAAE,CAChC,IAAI,EAAQ,EACR,EAAiB,GACjB,EACA,EAAS,EACb,EACE,IAAI,CACF,GAAM,CACJ,cAAe,CAAE,UACf,MAAMD,EAAAA,EAOR,EACA,EAAA,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;kBA2BO,EACI;;;;;;;mBAQA,GACL;kBAEC,EACI;;;;;mBAMA,GACL;;;;UAKT,CACE,MAAO,EACP,SACA,SAAU,CACR,GAAG,EAGJ,CACF,CACF,CAED,EAAS,EAAM,EAAM,OAAS,IAAI,GAClC,EAAc,KAAK,GAAG,EAAM,CAC5B,EAAiB,EAAM,SAAW,EAClC,GAAS,EAAM,OACf,GAAU,EAAM,OAChB,EAAY,OAAO,EAAM,OAClB,EAAK,CAMZ,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,8CAA8C,EAAO,cAAc,IACpE,CACF,CACK,QAED,GAET,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAEjB,EAASE,EAAAA,GAAO,EAAe,OAAO,CAS5C,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAO,OAAO,oBACtC,EAAY,IACb,WACF,CACF,CACM,EAUT,eAAe,EACb,EACA,CACE,eAAe,EAAE,CACjB,WAAW,KAOmB,CAChC,IAAM,EAAoC,EAAE,CAGtC,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIJ,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAED,EAAA,EAAO,KACL,EAAA,QAAO,QACL,oCAAoC,EAAa,OAAO,aACzD,CACF,CAGD,IAAM,EAAoBK,EAAAA,GAAM,EAAc,EAAS,CAEvD,EAAY,MAAM,EAAa,OAAQ,EAAE,CACzC,IAAI,EAAQ,EACZ,MAAMC,EAAAA,GAAU,EAAmB,KAAO,IAAsB,CAC9D,GAAI,CACF,GAAM,CACJ,WAAY,CAAE,UACZ,MAAMJ,EAAAA,EAMP,EAAQK,EAAAA,GAAkB,CAC3B,MAAO,EACP,SAAU,CACR,IAAK,EACN,CACF,CAAC,CAEF,EAAW,KAAK,GAAG,EAAM,CACzB,GAAS,EAAkB,OAC3B,EAAY,OAAO,EAAM,OAClB,EAAK,CAQZ,MAPA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,2CAA2C,EAAkB,KAC3D,KACD,GACF,CACF,CACK,IAER,CAEF,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAW,OAAO,iBAC1C,EAAY,IACb,WACF,CACF,CACM,EAUT,eAAe,EACb,EACA,CACE,cAAc,EAAE,CAChB,WAAW,KAOkB,CAC/B,IAAM,EAAkC,EAAE,CAGpC,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIP,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAED,EAAA,EAAO,KACL,EAAA,QAAO,QACL,oCAAoC,EAAY,OAAO,aACxD,CACF,CAGD,IAAM,EAAmBK,EAAAA,GAAM,EAAa,EAAS,CAErD,EAAY,MAAM,EAAY,OAAQ,EAAE,CACxC,IAAI,EAAQ,EACZ,MAAMC,EAAAA,GAAU,EAAkB,KAAO,IAAqB,CAC5D,GAAI,CACF,GAAM,CACJ,UAAW,CAAE,UACX,MAAMJ,EAAAA,EAMP,EAAQM,EAAAA,GAAkB,CAC3B,MAAO,EACP,SAAU,CACR,IAAK,EACN,CACF,CAAC,CAEF,EAAU,KAAK,GAAG,EAAM,CACxB,GAAS,EAAiB,OAC1B,EAAY,OAAO,EAAM,OAClB,EAAK,CAMZ,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wCAAwC,EAAiB,KAAK,KAAK,GACpE,CACF,CACK,IAER,CAEF,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAU,OAAO,iBACzC,EAAY,IACb,WACF,CACF,CACM,EAUT,eAAsB,EACpB,EACA,CACE,cAAc,EAAE,CAChB,2BACA,oBACA,mBAAmB,EAAE,CACrB,gBAAgB,EAAE,CAClB,WAAW,KAIT,EAAE,CAQN,CAEA,IAAM,EAAgB,MAAM,EAAkB,EAAQ,CACpD,cACA,2BACA,oBACA,mBACA,gBACA,WACD,CAAC,CAOI,EAAgBC,EAAAA,EAHH,MAAM,EAAe,EAAQ,CAC9C,aAFmBC,EAAAA,GAAK,EAAc,IAAK,GAAU,EAAM,YAAY,CAAC,CAGzE,CAAC,CACsC,KAAK,CAOvC,EAAeD,EAAAA,EAHH,MAAM,EAAc,EAAQ,CAC5C,YAFqBC,EAAAA,GAAK,EAAc,IAAK,GAAU,EAAM,WAAW,CAAC,CAG1E,CAAC,CACoC,KAAK,CAE3C,OAAO,EAAc,IAAK,IAAkB,CAC1C,GAAG,EACH,UAAW,EAAc,EAAa,aACtC,SAAU,EAAa,EAAa,YACrC,EAAE,CCxaL,eAAsB,EACpB,EACA,CACE,cAAc,EAAE,CAChB,SACA,gBAAgB,EAAE,CAClB,2BACA,WAAW,KAIT,EAAE,CACuD,CAC7D,IAAM,EACJ,EAAE,CAGE,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAGK,EAAW,CACf,GAAI,EAAc,OAAS,EAAI,CAAE,eAAgB,EAAe,CAAG,EAAE,CACrE,GAAI,EAAS,CAAE,SAAQ,CAAG,EAAE,CAC5B,GAAI,EAAY,OAAS,EAAI,CAAE,UAAW,EAAa,CAAG,EAAE,CAC7D,CAGK,CACJ,wCAAyC,CAAE,eACzC,MAAMC,EAAAA,EAMP,EAAQC,EAAAA,GAAa,CACtB,WACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,0CAA0C,CAAC,CAEtE,EAAY,MAAM,EAAY,EAAE,CAChC,IAAI,EAAQ,EACR,EAAiB,GACjB,EACA,EAAS,EACb,EACE,IAAI,CACF,GAAM,CACJ,wCAAyC,CAAE,UACzC,MAAMD,EAAAA,EAOR,EACA,EAAA,GAAG;;;;;;;;;;;;;;;;;kBAiBO,EAA2B,OAAS,GAAG;kBACvC,EAA2B,iBAAmB,GAAG;;;;;;;;;;;;UAa3D,CACE,MAAO,EACP,SACA,SAAU,CACR,GAAG,EACJ,CACF,CACF,CAED,EAAS,EAAM,EAAM,OAAS,IAAI,GAClC,EAAwC,KAAK,GAAG,EAAM,CACtD,EAAiB,EAAM,SAAW,EAClC,GAAS,EAAM,OACf,GAAU,EAAM,OAChB,EAAY,OAAO,EAAM,OAClB,EAAK,CAMZ,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,8CAA8C,EAAO,cAAc,IACpE,CACF,CACK,QAED,GAET,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAEjB,EAASE,EAAAA,GAAO,EAAyC,OAAO,CAStE,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAO,OAAO,oBACtC,EAAY,IACb,WACF,CACF,CACM"}
1
+ {"version":3,"file":"data-inventory-BKAQGjFN.cjs","names":["cliProgress","SubDataPointDataSubCategoryGuessStatus","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","sortBy","chunk","mapSeries","DATAPOINT_EXPORT","DATA_SILO_EXPORT","keyBy","uniq","cliProgress","makeGraphQLRequest","ENTRY_COUNT","sortBy"],"sources":["../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"sourcesContent":["/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from '../bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"],"mappings":"wTA0FA,eAAe,EACb,EACA,CACE,cAAc,EAAE,CAChB,2BACA,oBACA,mBAAmB,EAAE,CACrB,gBAAgB,EAAE,CAClB,WAAW,KAIT,EAAE,CAC6B,CACnC,IAAM,EAA0C,EAAE,CAG5C,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIA,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAGK,EAAW,CACf,GAAI,EAAiB,OAAS,EAAI,CAAE,SAAU,EAAkB,CAAG,EAAE,CACrE,GAAI,EAAc,OAAS,EAAI,CAAE,eAAgB,EAAe,CAAG,EAAE,CAErE,GAAI,EAAiB,OAAS,EAAc,OAAS,GACrD,CAAC,EAEG,CAAE,OAAQC,EAAAA,uCAAuC,SAAU,CAC3D,EAAE,CACN,GAAI,EAAY,OAAS,EAAI,CAAE,UAAW,EAAa,CAAG,EAAE,CAC7D,CAGK,CACJ,cAAe,CAAE,eACf,MAAMC,EAAAA,EAMP,EAAQC,EAAAA,GAAuB,CAChC,WACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,0CAA0C,CAAC,CAEtE,EAAY,MAAM,EAAY,EAAE,CAChC,IAAI,EAAQ,EACR,EAAiB,GACjB,EACA,EAAS,EACb,EACE,IAAI,CACF,GAAM,CACJ,cAAe,CAAE,UACf,MAAMD,EAAAA,EAOR,EACA,EAAA,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;kBA2BO,EACI;;;;;;;mBAQA,GACL;kBAEC,EACI;;;;;mBAMA,GACL;;;;UAKT,CACE,MAAO,EACP,SACA,SAAU,CACR,GAAG,EAGJ,CACF,CACF,CAED,EAAS,EAAM,EAAM,OAAS,IAAI,GAClC,EAAc,KAAK,GAAG,EAAM,CAC5B,EAAiB,EAAM,SAAW,EAClC,GAAS,EAAM,OACf,GAAU,EAAM,OAChB,EAAY,OAAO,EAAM,OAClB,EAAK,CAMZ,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,8CAA8C,EAAO,cAAc,IACpE,CACF,CACK,QAED,GAET,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAEjB,EAASE,EAAAA,GAAO,EAAe,OAAO,CAS5C,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAO,OAAO,oBACtC,EAAY,IACb,WACF,CACF,CACM,EAUT,eAAe,EACb,EACA,CACE,eAAe,EAAE,CACjB,WAAW,KAOmB,CAChC,IAAM,EAAoC,EAAE,CAGtC,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIJ,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAED,EAAA,EAAO,KACL,EAAA,QAAO,QACL,oCAAoC,EAAa,OAAO,aACzD,CACF,CAGD,IAAM,EAAoBK,EAAAA,GAAM,EAAc,EAAS,CAEvD,EAAY,MAAM,EAAa,OAAQ,EAAE,CACzC,IAAI,EAAQ,EACZ,MAAMC,EAAAA,GAAU,EAAmB,KAAO,IAAsB,CAC9D,GAAI,CACF,GAAM,CACJ,WAAY,CAAE,UACZ,MAAMJ,EAAAA,EAMP,EAAQK,EAAAA,GAAkB,CAC3B,MAAO,EACP,SAAU,CACR,IAAK,EACN,CACF,CAAC,CAEF,EAAW,KAAK,GAAG,EAAM,CACzB,GAAS,EAAkB,OAC3B,EAAY,OAAO,EAAM,OAClB,EAAK,CAQZ,MAPA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,2CAA2C,EAAkB,KAC3D,KACD,GACF,CACF,CACK,IAER,CAEF,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAW,OAAO,iBAC1C,EAAY,IACb,WACF,CACF,CACM,EAUT,eAAe,EACb,EACA,CACE,cAAc,EAAE,CAChB,WAAW,KAOkB,CAC/B,IAAM,EAAkC,EAAE,CAGpC,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIP,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAED,EAAA,EAAO,KACL,EAAA,QAAO,QACL,oCAAoC,EAAY,OAAO,aACxD,CACF,CAGD,IAAM,EAAmBK,EAAAA,GAAM,EAAa,EAAS,CAErD,EAAY,MAAM,EAAY,OAAQ,EAAE,CACxC,IAAI,EAAQ,EACZ,MAAMC,EAAAA,GAAU,EAAkB,KAAO,IAAqB,CAC5D,GAAI,CACF,GAAM,CACJ,UAAW,CAAE,UACX,MAAMJ,EAAAA,EAMP,EAAQM,EAAAA,GAAkB,CAC3B,MAAO,EACP,SAAU,CACR,IAAK,EACN,CACF,CAAC,CAEF,EAAU,KAAK,GAAG,EAAM,CACxB,GAAS,EAAiB,OAC1B,EAAY,OAAO,EAAM,OAClB,EAAK,CAMZ,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wCAAwC,EAAiB,KAAK,KAAK,GACpE,CACF,CACK,IAER,CAEF,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAU,OAAO,iBACzC,EAAY,IACb,WACF,CACF,CACM,EAUT,eAAsB,EACpB,EACA,CACE,cAAc,EAAE,CAChB,2BACA,oBACA,mBAAmB,EAAE,CACrB,gBAAgB,EAAE,CAClB,WAAW,KAIT,EAAE,CAQN,CAEA,IAAM,EAAgB,MAAM,EAAkB,EAAQ,CACpD,cACA,2BACA,oBACA,mBACA,gBACA,WACD,CAAC,CAOI,EAAgBC,EAAAA,EAHH,MAAM,EAAe,EAAQ,CAC9C,aAFmBC,EAAAA,GAAK,EAAc,IAAK,GAAU,EAAM,YAAY,CAAC,CAGzE,CAAC,CACsC,KAAK,CAOvC,EAAeD,EAAAA,EAHH,MAAM,EAAc,EAAQ,CAC5C,YAFqBC,EAAAA,GAAK,EAAc,IAAK,GAAU,EAAM,WAAW,CAAC,CAG1E,CAAC,CACoC,KAAK,CAE3C,OAAO,EAAc,IAAK,IAAkB,CAC1C,GAAG,EACH,UAAW,EAAc,EAAa,aACtC,SAAU,EAAa,EAAa,YACrC,EAAE,CCxaL,eAAsB,EACpB,EACA,CACE,cAAc,EAAE,CAChB,SACA,gBAAgB,EAAE,CAClB,2BACA,WAAW,KAIT,EAAE,CACuD,CAC7D,IAAM,EACJ,EAAE,CAGE,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CAGK,EAAW,CACf,GAAI,EAAc,OAAS,EAAI,CAAE,eAAgB,EAAe,CAAG,EAAE,CACrE,GAAI,EAAS,CAAE,SAAQ,CAAG,EAAE,CAC5B,GAAI,EAAY,OAAS,EAAI,CAAE,UAAW,EAAa,CAAG,EAAE,CAC7D,CAGK,CACJ,wCAAyC,CAAE,eACzC,MAAMC,EAAAA,EAMP,EAAQC,EAAAA,GAAa,CACtB,WACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,0CAA0C,CAAC,CAEtE,EAAY,MAAM,EAAY,EAAE,CAChC,IAAI,EAAQ,EACR,EAAiB,GACjB,EACA,EAAS,EACb,EACE,IAAI,CACF,GAAM,CACJ,wCAAyC,CAAE,UACzC,MAAMD,EAAAA,EAOR,EACA,EAAA,GAAG;;;;;;;;;;;;;;;;;kBAiBO,EAA2B,OAAS,GAAG;kBACvC,EAA2B,iBAAmB,GAAG;;;;;;;;;;;;UAa3D,CACE,MAAO,EACP,SACA,SAAU,CACR,GAAG,EACJ,CACF,CACF,CAED,EAAS,EAAM,EAAM,OAAS,IAAI,GAClC,EAAwC,KAAK,GAAG,EAAM,CACtD,EAAiB,EAAM,SAAW,EAClC,GAAS,EAAM,OACf,GAAU,EAAM,OAChB,EAAY,OAAO,EAAM,OAClB,EAAK,CAMZ,MALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,8CAA8C,EAAO,cAAc,IACpE,CACF,CACK,QAED,GAET,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAEjB,EAASE,EAAAA,GAAO,EAAyC,OAAO,CAStE,OAPA,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAO,OAAO,oBACtC,EAAY,IACb,WACF,CACF,CACM"}
@@ -1,2 +1,2 @@
1
- const e=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);var t=e.Fs(function(t){return e.Os(e.Ps(t,1,e.Ms,!0))}),n=t;function r(e,{adTechPurposes:t=[`SaleOfInfo`],serviceToTitle:r,serviceToSupportedIntegration:i}){let a=[],o=[],s={};e.forEach(e=>{let{service:r,attributes:i=[]}=e;if(!r||r===`internalService`)return;let c=i.find(e=>e.key===`Found on Domain`);c&&(s[r]||(s[r]=[]),s[r].push(...c.values.map(e=>e.replace(`https://`,``).replace(`http://`,``))),s[r]=[...new Set(s[r])]),n(e.trackingPurposes,t).length>0?(o.push(r),a.includes(r)&&(a=a.filter(e=>e!==r))):o.includes(r)||a.push(r)});let c=[...new Set(o)].map(e=>({title:r[e],...i[e]?{integrationName:e}:{integrationName:`promptAPerson`,"outer-type":e},attributes:[{key:`Tech Type`,values:[`Ad Tech`]},{key:`Found On Domain`,values:s[e]||[]}]}));return{siteTechDataSilos:[...new Set(a)].map(e=>({title:r[e],...i[e]?{integrationName:e}:{integrationName:`promptAPerson`,outerType:e},attributes:[{key:`Tech Type`,values:[`Site Tech`]},{key:`Found On Domain`,values:s[e]||[]}]})),adTechDataSilos:c}}Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return r}});
2
- //# sourceMappingURL=dataFlowsToDataSilos-DXlFFHMV.cjs.map
1
+ const e=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);var t=e.Fs(function(t){return e.Os(e.Ps(t,1,e.Ms,!0))}),n=t;function r(e,{adTechPurposes:t=[`SaleOfInfo`],serviceToTitle:r,serviceToSupportedIntegration:i}){let a=[],o=[],s={};e.forEach(e=>{let{service:r,attributes:i=[]}=e;if(!r||r===`internalService`)return;let c=i.find(e=>e.key===`Found on Domain`);c&&(s[r]||(s[r]=[]),s[r].push(...c.values.map(e=>e.replace(`https://`,``).replace(`http://`,``))),s[r]=[...new Set(s[r])]),n(e.trackingPurposes,t).length>0?(o.push(r),a.includes(r)&&(a=a.filter(e=>e!==r))):o.includes(r)||a.push(r)});let c=[...new Set(o)].map(e=>({title:r[e],...i[e]?{integrationName:e}:{integrationName:`promptAPerson`,"outer-type":e},attributes:[{key:`Tech Type`,values:[`Ad Tech`]},{key:`Found On Domain`,values:s[e]||[]}]}));return{siteTechDataSilos:[...new Set(a)].map(e=>({title:r[e],...i[e]?{integrationName:e}:{integrationName:`promptAPerson`,outerType:e},attributes:[{key:`Tech Type`,values:[`Site Tech`]},{key:`Found On Domain`,values:s[e]||[]}]})),adTechDataSilos:c}}Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return r}});
2
+ //# sourceMappingURL=dataFlowsToDataSilos-CnvG2jqy.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"dataFlowsToDataSilos-DXlFFHMV.cjs","names":["baseRest","baseUniq","baseFlatten","isArrayLikeObject","union"],"sources":["../node_modules/.pnpm/lodash-es@4.17.21/node_modules/lodash-es/union.js","../src/lib/consent-manager/dataFlowsToDataSilos.ts"],"sourcesContent":["import baseFlatten from './_baseFlatten.js';\nimport baseRest from './_baseRest.js';\nimport baseUniq from './_baseUniq.js';\nimport isArrayLikeObject from './isArrayLikeObject.js';\n\n/**\n * Creates an array of unique values, in order, from all given arrays using\n * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)\n * for equality comparisons.\n *\n * @static\n * @memberOf _\n * @since 0.1.0\n * @category Array\n * @param {...Array} [arrays] The arrays to inspect.\n * @returns {Array} Returns the new array of combined values.\n * @example\n *\n * _.union([2], [1, 2]);\n * // => [2, 1]\n */\nvar union = baseRest(function(arrays) {\n return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true));\n});\n\nexport default union;\n","import { DataFlowInput, DataSiloInput } from '../../codecs';\nimport { union } from 'lodash-es';\nimport { IndexedCatalogs } from '../graphql';\n\n/**\n * Convert data flow configurations into a set of data silo configurations\n *\n * @param inputs - Data flow input to convert to data silos\n * @param options - Additional options\n * @returns Business entity configuration input\n */\nexport function dataFlowsToDataSilos(\n inputs: DataFlowInput[],\n {\n adTechPurposes = ['SaleOfInfo'],\n serviceToTitle,\n serviceToSupportedIntegration,\n }: IndexedCatalogs & {\n /** List of purposes that are considered \"Ad Tech\" */\n adTechPurposes?: string[];\n },\n): {\n /** List of data silo configurations for site-tech services */\n siteTechDataSilos: DataSiloInput[];\n /** List of data silo configurations for ad-tech services */\n adTechDataSilos: DataSiloInput[];\n} {\n // List of site tech integrations\n let siteTechIntegrations: string[] = [];\n\n // List of ad tech integrations\n const adTechIntegrations: string[] = [];\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n\n // iterate over each flow\n inputs.forEach((flow) => {\n // process data flows with services\n const { service, attributes = [] } = flow;\n if (!service || service === 'internalService') {\n return;\n }\n\n // create mapping to found on domain\n const foundOnDomain = attributes.find(\n (attr) => attr.key === 'Found on Domain',\n );\n\n // Create a list of all domains where the data flow was found\n if (foundOnDomain) {\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(\n ...foundOnDomain.values.map((v) =>\n v.replace('https://', '').replace('http://', ''),\n ),\n );\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n }\n\n // Keep track of ad tech\n if (union(flow.trackingPurposes, adTechPurposes).length > 0) {\n // add service to ad tech list\n adTechIntegrations.push(service);\n\n // remove from site tech list\n if (siteTechIntegrations.includes(service)) {\n siteTechIntegrations = siteTechIntegrations.filter(\n (s) => s !== service,\n );\n }\n } else if (!adTechIntegrations.includes(service)) {\n // add to site tech list\n siteTechIntegrations.push(service);\n }\n });\n\n // create the list of ad tech integrations\n const adTechDataSilos = [...new Set(adTechIntegrations)].map((service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }));\n\n // create the list of site tech integrations\n const siteTechDataSilos = [...new Set(siteTechIntegrations)].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', outerType: service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Site Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n return {\n siteTechDataSilos,\n adTechDataSilos,\n };\n}\n"],"x_google_ignoreList":[0],"mappings":"+DAqBA,IAAI,EAAQA,EAAAA,GAAS,SAAS,EAAQ,CACpC,OAAOC,EAAAA,GAASC,EAAAA,GAAY,EAAQ,EAAGC,EAAAA,GAAmB,GAAK,CAAC,EAChE,CAEF,EAAe,ECdf,SAAgB,EACd,EACA,CACE,iBAAiB,CAAC,aAAa,CAC/B,iBACA,iCAUF,CAEA,IAAI,EAAiC,EAAE,CAGjC,EAA+B,EAAE,CAGjC,EAAsD,EAAE,CAG9D,EAAO,QAAS,GAAS,CAEvB,GAAM,CAAE,UAAS,aAAa,EAAE,EAAK,EACrC,GAAI,CAAC,GAAW,IAAY,kBAC1B,OAIF,IAAM,EAAgB,EAAW,KAC9B,GAAS,EAAK,MAAQ,kBACxB,CAGG,IACG,EAAuB,KAC1B,EAAuB,GAAW,EAAE,EAEtC,EAAuB,GAAU,KAC/B,GAAG,EAAc,OAAO,IAAK,GAC3B,EAAE,QAAQ,WAAY,GAAG,CAAC,QAAQ,UAAW,GAAG,CACjD,CACF,CACD,EAAuB,GAAW,CAChC,GAAG,IAAI,IAAI,EAAuB,GAAS,CAC5C,EAICC,EAAM,EAAK,iBAAkB,EAAe,CAAC,OAAS,GAExD,EAAmB,KAAK,EAAQ,CAG5B,EAAqB,SAAS,EAAQ,GACxC,EAAuB,EAAqB,OACzC,GAAM,IAAM,EACd,GAEO,EAAmB,SAAS,EAAQ,EAE9C,EAAqB,KAAK,EAAQ,EAEpC,CAGF,IAAM,EAAkB,CAAC,GAAG,IAAI,IAAI,EAAmB,CAAC,CAAC,IAAK,IAAa,CACzE,MAAO,EAAe,GACtB,GAAI,EAA8B,GAC9B,CAAE,gBAAiB,EAAS,CAC5B,CAAE,gBAAiB,gBAAiB,aAAc,EAAS,CAC/D,WAAY,CACV,CACE,IAAK,YACL,OAAQ,CAAC,UAAU,CACpB,CACD,CACE,IAAK,kBACL,OAAQ,EAAuB,IAAY,EAAE,CAC9C,CACF,CACF,EAAE,CAsBH,MAAO,CACL,kBApBwB,CAAC,GAAG,IAAI,IAAI,EAAqB,CAAC,CAAC,IAC1D,IAAa,CACZ,MAAO,EAAe,GACtB,GAAI,EAA8B,GAC9B,CAAE,gBAAiB,EAAS,CAC5B,CAAE,gBAAiB,gBAAiB,UAAW,EAAS,CAC5D,WAAY,CACV,CACE,IAAK,YACL,OAAQ,CAAC,YAAY,CACtB,CACD,CACE,IAAK,kBACL,OAAQ,EAAuB,IAAY,EAAE,CAC9C,CACF,CACF,EACF,CAIC,kBACD"}
1
+ {"version":3,"file":"dataFlowsToDataSilos-CnvG2jqy.cjs","names":["baseRest","baseUniq","baseFlatten","isArrayLikeObject","union"],"sources":["../node_modules/.pnpm/lodash-es@4.17.21/node_modules/lodash-es/union.js","../src/lib/consent-manager/dataFlowsToDataSilos.ts"],"sourcesContent":["import baseFlatten from './_baseFlatten.js';\nimport baseRest from './_baseRest.js';\nimport baseUniq from './_baseUniq.js';\nimport isArrayLikeObject from './isArrayLikeObject.js';\n\n/**\n * Creates an array of unique values, in order, from all given arrays using\n * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)\n * for equality comparisons.\n *\n * @static\n * @memberOf _\n * @since 0.1.0\n * @category Array\n * @param {...Array} [arrays] The arrays to inspect.\n * @returns {Array} Returns the new array of combined values.\n * @example\n *\n * _.union([2], [1, 2]);\n * // => [2, 1]\n */\nvar union = baseRest(function(arrays) {\n return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true));\n});\n\nexport default union;\n","import { DataFlowInput, DataSiloInput } from '../../codecs';\nimport { union } from 'lodash-es';\nimport { IndexedCatalogs } from '../graphql';\n\n/**\n * Convert data flow configurations into a set of data silo configurations\n *\n * @param inputs - Data flow input to convert to data silos\n * @param options - Additional options\n * @returns Business entity configuration input\n */\nexport function dataFlowsToDataSilos(\n inputs: DataFlowInput[],\n {\n adTechPurposes = ['SaleOfInfo'],\n serviceToTitle,\n serviceToSupportedIntegration,\n }: IndexedCatalogs & {\n /** List of purposes that are considered \"Ad Tech\" */\n adTechPurposes?: string[];\n },\n): {\n /** List of data silo configurations for site-tech services */\n siteTechDataSilos: DataSiloInput[];\n /** List of data silo configurations for ad-tech services */\n adTechDataSilos: DataSiloInput[];\n} {\n // List of site tech integrations\n let siteTechIntegrations: string[] = [];\n\n // List of ad tech integrations\n const adTechIntegrations: string[] = [];\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n\n // iterate over each flow\n inputs.forEach((flow) => {\n // process data flows with services\n const { service, attributes = [] } = flow;\n if (!service || service === 'internalService') {\n return;\n }\n\n // create mapping to found on domain\n const foundOnDomain = attributes.find(\n (attr) => attr.key === 'Found on Domain',\n );\n\n // Create a list of all domains where the data flow was found\n if (foundOnDomain) {\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(\n ...foundOnDomain.values.map((v) =>\n v.replace('https://', '').replace('http://', ''),\n ),\n );\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n }\n\n // Keep track of ad tech\n if (union(flow.trackingPurposes, adTechPurposes).length > 0) {\n // add service to ad tech list\n adTechIntegrations.push(service);\n\n // remove from site tech list\n if (siteTechIntegrations.includes(service)) {\n siteTechIntegrations = siteTechIntegrations.filter(\n (s) => s !== service,\n );\n }\n } else if (!adTechIntegrations.includes(service)) {\n // add to site tech list\n siteTechIntegrations.push(service);\n }\n });\n\n // create the list of ad tech integrations\n const adTechDataSilos = [...new Set(adTechIntegrations)].map((service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }));\n\n // create the list of site tech integrations\n const siteTechDataSilos = [...new Set(siteTechIntegrations)].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', outerType: service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Site Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n return {\n siteTechDataSilos,\n adTechDataSilos,\n };\n}\n"],"x_google_ignoreList":[0],"mappings":"+DAqBA,IAAI,EAAQA,EAAAA,GAAS,SAAS,EAAQ,CACpC,OAAOC,EAAAA,GAASC,EAAAA,GAAY,EAAQ,EAAGC,EAAAA,GAAmB,GAAK,CAAC,EAChE,CAEF,EAAe,ECdf,SAAgB,EACd,EACA,CACE,iBAAiB,CAAC,aAAa,CAC/B,iBACA,iCAUF,CAEA,IAAI,EAAiC,EAAE,CAGjC,EAA+B,EAAE,CAGjC,EAAsD,EAAE,CAG9D,EAAO,QAAS,GAAS,CAEvB,GAAM,CAAE,UAAS,aAAa,EAAE,EAAK,EACrC,GAAI,CAAC,GAAW,IAAY,kBAC1B,OAIF,IAAM,EAAgB,EAAW,KAC9B,GAAS,EAAK,MAAQ,kBACxB,CAGG,IACG,EAAuB,KAC1B,EAAuB,GAAW,EAAE,EAEtC,EAAuB,GAAU,KAC/B,GAAG,EAAc,OAAO,IAAK,GAC3B,EAAE,QAAQ,WAAY,GAAG,CAAC,QAAQ,UAAW,GAAG,CACjD,CACF,CACD,EAAuB,GAAW,CAChC,GAAG,IAAI,IAAI,EAAuB,GAAS,CAC5C,EAICC,EAAM,EAAK,iBAAkB,EAAe,CAAC,OAAS,GAExD,EAAmB,KAAK,EAAQ,CAG5B,EAAqB,SAAS,EAAQ,GACxC,EAAuB,EAAqB,OACzC,GAAM,IAAM,EACd,GAEO,EAAmB,SAAS,EAAQ,EAE9C,EAAqB,KAAK,EAAQ,EAEpC,CAGF,IAAM,EAAkB,CAAC,GAAG,IAAI,IAAI,EAAmB,CAAC,CAAC,IAAK,IAAa,CACzE,MAAO,EAAe,GACtB,GAAI,EAA8B,GAC9B,CAAE,gBAAiB,EAAS,CAC5B,CAAE,gBAAiB,gBAAiB,aAAc,EAAS,CAC/D,WAAY,CACV,CACE,IAAK,YACL,OAAQ,CAAC,UAAU,CACpB,CACD,CACE,IAAK,kBACL,OAAQ,EAAuB,IAAY,EAAE,CAC9C,CACF,CACF,EAAE,CAsBH,MAAO,CACL,kBApBwB,CAAC,GAAG,IAAI,IAAI,EAAqB,CAAC,CAAC,IAC1D,IAAa,CACZ,MAAO,EAAe,GACtB,GAAI,EAA8B,GAC9B,CAAE,gBAAiB,EAAS,CAC5B,CAAE,gBAAiB,gBAAiB,UAAW,EAAS,CAC5D,WAAY,CACV,CACE,IAAK,YACL,OAAQ,CAAC,YAAY,CACtB,CACD,CACE,IAAK,kBACL,OAAQ,EAAuB,IAAY,EAAE,CAC9C,CACF,CACF,EACF,CAIC,kBACD"}
@@ -1,4 +1,4 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-wkuhlP8d.cjs`),n=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),r=require(`./enums-BZulhPFa.cjs`),i=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const a=require(`./done-input-validation-DGckEJ5a.cjs`);let o=require(`@transcend-io/privacy-types`),s=require(`@transcend-io/type-utils`),c=require(`node:fs`);c=e.t(c);let l=require(`colors`);l=e.t(l);let u=require(`yargs-parser`);u=e.t(u);let d=require(`got`);d=e.t(d);let f=require(`JSONStream`);f=e.t(f);const p=({hostname:e,auth:t})=>d.default.extend({prefixUrl:`https://${e}`,headers:{accept:`application/json`,"content-type":`application/json`,authorization:`Bearer ${t}`}}),m=Object.values(r.n),h=({assessment:e,index:t,total:n,wrap:r=!0})=>{let i=``;(t===0||r)&&(i=`[
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-K6pQQtc7.cjs`),n=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`),r=require(`./enums-BZulhPFa.cjs`),i=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const a=require(`./done-input-validation-DGckEJ5a.cjs`);let o=require(`@transcend-io/privacy-types`),s=require(`@transcend-io/type-utils`),c=require(`node:fs`);c=e.t(c);let l=require(`colors`);l=e.t(l);let u=require(`yargs-parser`);u=e.t(u);let d=require(`got`);d=e.t(d);let f=require(`JSONStream`);f=e.t(f);const p=({hostname:e,auth:t})=>d.default.extend({prefixUrl:`https://${e}`,headers:{accept:`application/json`,"content-type":`application/json`,authorization:`Bearer ${t}`}}),m=Object.values(r.n),h=({assessment:e,index:t,total:n,wrap:r=!0})=>{let i=``;(t===0||r)&&(i=`[
2
2
  `);let a=JSON.stringify(e),o=n&&t<n-1&&!r?`,`:``;return i=`${i+a+o}\n`,(n&&t===n-1||r)&&(i+=`
3
3
  ]`),i},g=({file:e,assessment:t,index:n,total:r})=>{i.t.info(l.default.magenta(`Writing enriched assessment ${n+1} of ${r} to file "${e}"...`)),n===0?c.default.writeFileSync(e,h({assessment:t,index:n,total:r,wrap:!1})):c.default.appendFileSync(e,h({assessment:t,index:n,total:r,wrap:!1}))},_=async({oneTrust:e})=>{let t=0,n=1,r=0,a=[];for(;t<n;){let{body:c}=await e.get(`api/assessment/v2/assessments?page=${t}&size=2000`),{page:l,content:u}=(0,s.decodeCodec)(o.OneTrustGetListOfAssessmentsResponse,c);a.push(...u??[]),t===0&&(n=l?.totalPages??0,r=l?.totalElements??0),t+=1,i.t.info(`Fetched ${a.length} of ${r} assessments.`)}return a},v=async({oneTrust:e,assessmentId:t})=>{let{body:n}=await e.get(`api/assessment/v2/assessments/${t}/export?ExcludeSkippedQuestions=false`);return(0,s.decodeCodec)(o.OneTrustGetAssessmentResponse,n)},y=async({oneTrust:e,riskId:t})=>{let{body:n}=await e.get(`api/risk/v2/risks/${t}`);return(0,s.decodeCodec)(o.OneTrustGetRiskResponse,n)},b=async({oneTrust:e,userId:t})=>{let{body:n}=await e.get(`api/scim/v2/Users/${t}`);return(0,s.decodeCodec)(o.OneTrustGetUserResponse,n)},x=({assessment:e,assessmentDetails:n,riskDetails:r,creatorDetails:i,approversDetails:a,respondentsDetails:o})=>{let s=t.g(r,`id`),{sections:c,createdBy:l,...u}=n,d=c.map(e=>{let{questions:t,...n}=e,r=t.map(e=>{let{risks:t,...n}=e,r=(t??[]).map(e=>{let t=s[e.riskId];return{...e,...t,level:e.level,impactLevel:e.impactLevel??0}});return{...n,risks:r}});return{...n,questions:r}}),f={...l,active:i?.active??!1,userType:i?.userType??`Internal`,emails:i?.emails??[],title:i?.title??null,givenName:i?.name.givenName??null,familyName:i?.name.familyName??null},p=t.g(a,`id`),m=n.approvers.flatMap(e=>p[e.id]?[{...e,approver:{...e.approver,active:p[e.id].active,userType:p[e.id].userType,emails:p[e.id].emails,title:p[e.id].title,givenName:p[e.id].name.givenName??null,familyName:p[e.id].name.familyName??null}}]:[]),h=t.g(o,`id`),g=n.respondents.filter(e=>!e.name.includes(`@`)).flatMap(e=>h[e.id]?[{...e,active:h[e.id].active,userType:h[e.id].userType,emails:h[e.id].emails,title:h[e.id].title,givenName:h[e.id].name.givenName??null,familyName:h[e.id].name.familyName??null}]:[]);return{...e,...u,approvers:m,respondents:g,createdBy:f,sections:d}},S=async({transcend:e,assessment:t,total:r,index:a})=>{i.t.info(l.default.magenta(`Writing enriched assessment ${a+1} ${r?`of ${r} `:` `}to Transcend...`));let o={json:h({assessment:t,index:a,total:r})};try{await n.i(e,n.bo,{input:o})}catch{i.t.error(l.default.red(`Failed to sync assessment ${a+1} ${r?`of ${r} `:` `}to Transcend.\n\tAssessment Title: ${t.name}. Template Title: ${t.template.name}\n`))}},C=async({oneTrust:e,file:t,dryRun:r,transcend:a})=>{i.t.info(`Getting list of all assessments from OneTrust...`);let o=await _({oneTrust:e}),s={};await n.Es(Array.from({length:Math.ceil(o.length/5)},(e,t)=>o.slice(t*5,(t+1)*5)),async(c,u)=>{let d=[];await n.Ts(c,async(t,r)=>{let a=5*u+r+1;i.t.info(`[assessment ${a} of ${o.length}]: fetching details...`);let{templateName:c,assessmentId:f}=t,p=await v({oneTrust:e,assessmentId:f}),m=p.createdBy.id,h=s[m];if(!h){i.t.info(`[assessment ${a} of ${o.length}]: fetching creator...`);try{h=await b({oneTrust:e,userId:m}),s[m]=h}catch{i.t.warn(l.default.yellow(`[assessment ${a} of ${o.length}]: failed to fetch form creator.\tcreatorId: ${m}. Assessment Title: ${t.name}. Template Title: ${c}`))}}let{approvers:g}=p,_=[];g.length>0&&(i.t.info(`[assessment ${a} of ${o.length}]: fetching approvers...`),_=await n.Ts(g.map(({id:e})=>e),async n=>{try{let t=s[n];return t||(t=await b({oneTrust:e,userId:n}),s[n]=t),[t]}catch{return i.t.warn(l.default.yellow(`[assessment ${a} of ${o.length}]: failed to fetch a form approver.\tapproverId: ${n}. Assessment Title: ${t.name}. Template Title: ${c}`)),[]}},{concurrency:5}));let{respondents:S}=p,C=S.filter(e=>!e.name.includes(`@`)),w=[];C.length>0&&(i.t.info(`[assessment ${a} of ${o.length}]: fetching respondents...`),w=await n.Ts(C.map(({id:e})=>e),async n=>{try{let t=s[n];return t||(t=await b({oneTrust:e,userId:n}),s[n]=t),[t]}catch{return i.t.warn(l.default.yellow(`[assessment ${a} of ${o.length}]: failed to fetch a respondent.\trespondentId: ${n}. Assessment Title: ${t.name}. Template Title: ${c}`)),[]}},{concurrency:5}));let T=[],E=n.Ds(p.sections.flatMap(e=>e.questions.flatMap(e=>(e.risks??[]).flatMap(e=>e.riskId))));E.length>0&&(i.t.info(`[assessment ${a} of ${o.length}]: fetching risks...`),T=await n.Ts(E,t=>y({oneTrust:e,riskId:t}),{concurrency:5}));let D=x({assessment:t,assessmentDetails:p,riskDetails:T,creatorDetails:h,approversDetails:_.flat(),respondentsDetails:w.flat()});d.push(D)},{concurrency:5}),await n.Es(d,async(e,n)=>{let i=u*5+n;r&&t?g({assessment:e,index:i,total:o.length,file:t}):a&&await S({assessment:e,transcend:a,total:o.length,index:i})})})},w=({transcend:e,file:t})=>(i.t.info(`Getting list of all assessments from file ${t}...`),new Promise((n,r)=>{let a=(0,c.createReadStream)(t,{encoding:`utf-8`,highWaterMark:64*1024}),u=f.default.parse(`*`),d=0;a.pipe(u),u.on(`data`,async n=>{try{u.pause(),await S({assessment:(0,s.decodeCodec)(o.OneTrustEnrichedAssessment,n),transcend:e,index:d}),d+=1,u.resume()}catch(e){i.t.error(l.default.red(`Failed to parse the assessment ${d} from file '${t}': ${e.message}.`))}}),u.on(`end`,()=>{i.t.info(`Finished processing ${d} assessments from file ${t}`),n()}),u.on(`error`,e=>{i.t.error(l.default.red(`Error parsing file '${t}': ${e.message}`)),r(e)}),a.on(`error`,e=>{i.t.error(l.default.red(`Error reading file '${t}': ${e.message}`)),r(e)})}));async function T({hostname:e,oneTrustAuth:t,source:o,transcendAuth:s,transcendUrl:c,resource:u,file:d,dryRun:f,debug:m}){if(!f&&!s)throw Error('Must specify a "transcendAuth" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}');if(f&&!d)throw Error(`Must set a "file" parameter when "dryRun" is "true". e.g. --file=./oneTrustAssessments.json`);if(d){let e=d.split(`.`);if(e.length<2)throw Error(`The "file" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.`);if(e.at(-1)!==r.t.Json)throw Error(`Expected the format of the "file" parameters '${d}' to be '${r.t.Json}', but got '${e.at(-1)}'.`)}if(o===r.r.OneTrust){if(!e)throw Error(`Missing required parameter "hostname". e.g. --hostname=customer.my.onetrust.com`);if(!t)throw Error(`Missing required parameter "oneTrustAuth". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN`)}else{if(!d)throw Error(`Must specify a "file" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json`);if(f)throw Error(`Cannot read and write to a file simultaneously. Emit the "source" parameter or set it to ${r.r.OneTrust} if "dryRun" is enabled.`)}a.t(this.process.exit);let h=e&&t?p({hostname:e,auth:t}):void 0,g=c&&s?n.ti(c,s):void 0;try{u===r.n.Assessments&&(o===r.r.OneTrust&&h?await C({oneTrust:h,file:d,dryRun:f,...g&&{transcend:g}}):o===r.r.File&&d&&g&&await w({file:d,transcend:g}))}catch(e){throw Error(`An error occurred syncing the resource ${u} from OneTrust: ${m?e.stack:e.message}`)}i.t.info(l.default.green(`Successfully synced OneTrust ${u} to ${f?`disk at "${d}"`:`Transcend`}!`))}exports.syncOt=T;
4
- //# sourceMappingURL=impl-BOEoFzcB.cjs.map
4
+ //# sourceMappingURL=impl-1-1sg4WF.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-BOEoFzcB.cjs","names":["OneTrustPullResource","OneTrustGetListOfAssessmentsResponse","OneTrustGetAssessmentResponse","OneTrustGetRiskResponse","OneTrustGetUserResponse","keyBy","makeGraphQLRequest","IMPORT_ONE_TRUST_ASSESSMENT_FORMS","mapSeries","map","uniq","OneTrustEnrichedAssessment","OneTrustFileFormat","OneTrustPullSource","buildTranscendGraphQLClient","OneTrustPullResource"],"sources":["../src/lib/oneTrust/createOneTrustGotInstance.ts","../src/lib/oneTrust/helpers/parseCliSyncOtArguments.ts","../src/lib/oneTrust/helpers/oneTrustAssessmentToJson.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToDisk.ts","../src/lib/oneTrust/endpoints/getListOfOneTrustAssessments.ts","../src/lib/oneTrust/endpoints/getOneTrustAssessment.ts","../src/lib/oneTrust/endpoints/getOneTrustRisk.ts","../src/lib/oneTrust/endpoints/getOneTrustUser.ts","../src/lib/oneTrust/helpers/enrichOneTrustAssessment.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToTranscend.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromOneTrust.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromFile.ts","../src/commands/migration/sync-ot/impl.ts"],"sourcesContent":["import got, { Got } from 'got';\n\n/**\n * Instantiate an instance of got that is capable of making requests to OneTrust\n *\n * @param param - information about the OneTrust URL\n * @returns The instance of got that is capable of making requests to the customer ingress\n */\nexport const createOneTrustGotInstance = ({\n hostname,\n auth,\n}: {\n /** Hostname of the OneTrust API */\n hostname: string;\n /** The OAuth access token */\n auth: string;\n}): Got =>\n got.extend({\n prefixUrl: `https://${hostname}`,\n headers: {\n accept: 'application/json',\n 'content-type': 'application/json',\n authorization: `Bearer ${auth}`,\n },\n });\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport yargs from 'yargs-parser';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '../../../enums';\n\nconst VALID_RESOURCES = Object.values(OneTrustPullResource);\n\ninterface OneTrustCliArguments {\n /** The name of the file to write the resources to */\n file: string;\n /** The OneTrust hostname to send the requests to */\n hostname?: string;\n /** The OAuth Bearer token used to authenticate the requests to OneTrust */\n oneTrustAuth?: string;\n /** The Transcend API key to authenticate the requests to Transcend */\n transcendAuth: string;\n /** The Transcend URL where to forward requests */\n transcendUrl: string;\n /** The resource to pull from OneTrust */\n resource: OneTrustPullResource;\n /** Whether to enable debugging while reporting errors */\n debug: boolean;\n /** Whether to export the resource into a file rather than push to transcend */\n dryRun: boolean;\n /** Where to read the OneTrust resource from */\n source: OneTrustPullSource;\n}\n\n/**\n * Parse the command line arguments\n *\n * @returns the parsed arguments\n */\nexport const parseCliSyncOtArguments = (): OneTrustCliArguments => {\n const {\n file,\n hostname,\n oneTrustAuth,\n resource,\n debug,\n dryRun,\n transcendAuth,\n transcendUrl,\n source,\n } = yargs(process.argv.slice(2), {\n string: [\n 'file',\n 'hostname',\n 'oneTrustAuth',\n 'resource',\n 'dryRun',\n 'transcendAuth',\n 'transcendUrl',\n 'source',\n ],\n boolean: ['debug', 'dryRun'],\n default: {\n resource: OneTrustPullResource.Assessments,\n debug: false,\n dryRun: false,\n transcendUrl: 'https://api.transcend.io',\n source: OneTrustPullSource.OneTrust,\n },\n });\n\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n logger.error(\n colors.red(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n ),\n );\n return process.exit(1);\n }\n if (!dryRun && !transcendUrl) {\n logger.error(\n colors.red(\n // eslint-disable-next-line max-len\n 'Must specify a \"transcendUrl\" parameter to sync resources to Transcend. e.g. --transcendUrl=https://api.transcend.io',\n ),\n );\n return process.exit(1);\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n logger.error(\n colors.red(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n ),\n );\n return process.exit(1);\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n logger.error(\n colors.red(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n ),\n );\n return process.exit(1);\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n logger.error(\n colors.red(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n ),\n );\n return process.exit(1);\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n logger.error(\n colors.red(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n ),\n );\n return process.exit(1);\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n logger.error(\n colors.red(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n ),\n );\n return process.exit(1);\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n logger.error(\n colors.red(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n ),\n );\n return process.exit(1);\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n logger.error(\n colors.red(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n ),\n );\n return process.exit(1);\n }\n }\n\n if (!VALID_RESOURCES.includes(resource)) {\n logger.error(\n colors.red(\n `Received invalid resource value: \"${resource}\". Allowed: ${VALID_RESOURCES.join(\n ',',\n )}`,\n ),\n );\n return process.exit(1);\n }\n\n return {\n file,\n ...(hostname && { hostname }),\n ...(oneTrustAuth && { oneTrustAuth }),\n resource,\n debug,\n dryRun,\n transcendAuth,\n transcendUrl,\n source,\n };\n};\n","import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Converts the assessment into a json entry.\n *\n * @param param - information about the assessment and amount of entries\n * @returns a stringified json entry ready to be appended to a file\n */\nexport const oneTrustAssessmentToJson = ({\n assessment,\n index,\n total,\n wrap = true,\n}: {\n /** The assessment to convert */\n assessment: OneTrustEnrichedAssessment;\n /** The position of the assessment in the final Json object */\n index: number;\n /** The total amount of the assessments in the final Json object */\n total?: number;\n /** Whether to wrap every entry in brackets */\n wrap?: boolean;\n}): string => {\n let jsonEntry = '';\n // start with an opening bracket\n if (index === 0 || wrap) {\n jsonEntry = '[\\n';\n }\n\n const stringifiedAssessment = JSON.stringify(assessment);\n\n // Add comma for all items except the last one\n const comma = total && index < total - 1 && !wrap ? ',' : '';\n\n // write to file\n jsonEntry = `${jsonEntry + stringifiedAssessment + comma}\\n`;\n\n // end with closing bracket\n if ((total && index === total - 1) || wrap) {\n jsonEntry += '\\n]';\n }\n\n return jsonEntry;\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport fs from 'node:fs';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Write the assessment to disk at the specified file path.\n *\n *\n * @param param - information about the assessment to write\n */\nexport const syncOneTrustAssessmentToDisk = ({\n file,\n assessment,\n index,\n total,\n}: {\n /** The file path to write the assessment to */\n file: string;\n /** The basic assessment */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total: number;\n}): void => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${\n index + 1\n } of ${total} to file \"${file}\"...`,\n ),\n );\n\n if (index === 0) {\n fs.writeFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n } else {\n fs.appendFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n }\n};\n","import { Got } from 'got';\nimport { logger } from '../../../logger';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport {\n OneTrustAssessment,\n OneTrustGetListOfAssessmentsResponse,\n} from '@transcend-io/privacy-types';\n\n/**\n * Fetch a list of all assessments from the OneTrust client.\n * ref: https://developer.onetrust.com/onetrust/reference/getallassessmentbasicdetailsusingget\n *\n * @param param - the information about the OneTrust client\n * @returns a list of OneTrustAssessment\n */\nexport const getListOfOneTrustAssessments = async ({\n oneTrust,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n}): Promise<OneTrustAssessment[]> => {\n let currentPage = 0;\n let totalPages = 1;\n let totalElements = 0;\n\n const allAssessments: OneTrustAssessment[] = [];\n\n while (currentPage < totalPages) {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments?page=${currentPage}&size=2000`,\n );\n\n const { page, content } = decodeCodec(\n OneTrustGetListOfAssessmentsResponse,\n body,\n );\n allAssessments.push(...(content ?? []));\n if (currentPage === 0) {\n totalPages = page?.totalPages ?? 0;\n totalElements = page?.totalElements ?? 0;\n }\n currentPage += 1;\n\n // log progress\n logger.info(\n `Fetched ${allAssessments.length} of ${totalElements} assessments.`,\n );\n }\n\n return allAssessments;\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetAssessmentResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular assessment.\n * ref: https://developer.onetrust.com/onetrust/reference/exportassessmentusingget\n *\n * @param param - the information about the OneTrust client and assessment to retrieve\n * @returns details about the assessment\n */\nexport const getOneTrustAssessment = async ({\n oneTrust,\n assessmentId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the assessment to retrieve */\n assessmentId: string;\n}): Promise<OneTrustGetAssessmentResponse> => {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments/${assessmentId}/export?ExcludeSkippedQuestions=false`,\n );\n\n return decodeCodec(OneTrustGetAssessmentResponse, body);\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetRiskResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular risk.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustRisk = async ({\n oneTrust,\n riskId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust risk to retrieve */\n riskId: string;\n}): Promise<OneTrustGetRiskResponse> => {\n const { body } = await oneTrust.get(`api/risk/v2/risks/${riskId}`);\n\n return decodeCodec(OneTrustGetRiskResponse, body);\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetUserResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular user.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustUser = async ({\n oneTrust,\n userId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust user to retrieve */\n userId: string;\n}): Promise<OneTrustGetUserResponse> => {\n const { body } = await oneTrust.get(`api/scim/v2/Users/${userId}`);\n\n return decodeCodec(OneTrustGetUserResponse, body);\n};\n","import {\n OneTrustAssessment,\n OneTrustEnrichedAssessment,\n OneTrustGetAssessmentResponse,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { keyBy } from 'lodash-es';\n\n/**\n * Merge the assessment, assessmentDetails, and riskDetails into one object.\n *\n * @param param - the assessment and risk information\n * @returns the assessment enriched with details and risk information\n */\nexport const enrichOneTrustAssessment = ({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails,\n approversDetails,\n respondentsDetails,\n}: {\n /** The OneTrust risk details */\n riskDetails: OneTrustGetRiskResponse[];\n /** The OneTrust assessment as returned from Get List of Assessments endpoint */\n assessment: OneTrustAssessment;\n /** The OneTrust assessment details */\n assessmentDetails: OneTrustGetAssessmentResponse;\n /** The OneTrust assessment creator details */\n creatorDetails: OneTrustGetUserResponse;\n /** The OneTrust assessment approvers details */\n approversDetails: OneTrustGetUserResponse[];\n /** The OneTrust assessment internal respondents details */\n respondentsDetails: OneTrustGetUserResponse[];\n}): OneTrustEnrichedAssessment => {\n const riskDetailsById = keyBy(riskDetails, 'id');\n const { sections, createdBy, ...restAssessmentDetails } = assessmentDetails;\n const sectionsWithEnrichedRisk = sections.map((section) => {\n const { questions, ...restSection } = section;\n const enrichedQuestions = questions.map((question) => {\n const { risks, ...restQuestion } = question;\n const enrichedRisks = (risks ?? []).map((risk) => {\n const details = riskDetailsById[risk.riskId];\n return {\n ...risk,\n ...details,\n level: risk.level,\n impactLevel: risk.impactLevel ?? 0,\n };\n });\n return {\n ...restQuestion,\n risks: enrichedRisks,\n };\n });\n return {\n ...restSection,\n questions: enrichedQuestions,\n };\n });\n\n // grab creator details\n const enrichedCreatedBy = {\n ...createdBy,\n active: creatorDetails?.active ?? false,\n userType: creatorDetails?.userType ?? 'Internal',\n emails: creatorDetails?.emails ?? [],\n title: creatorDetails?.title ?? null,\n givenName: creatorDetails?.name.givenName ?? null,\n familyName: creatorDetails?.name.familyName ?? null,\n };\n\n // grab approvers details\n const approverDetailsById = keyBy(approversDetails, 'id');\n const enrichedApprovers = assessmentDetails.approvers.flatMap(\n (originalApprover) =>\n approverDetailsById[originalApprover.id]\n ? [\n {\n ...originalApprover,\n approver: {\n ...originalApprover.approver,\n active: approverDetailsById[originalApprover.id].active,\n userType: approverDetailsById[originalApprover.id].userType,\n emails: approverDetailsById[originalApprover.id].emails,\n title: approverDetailsById[originalApprover.id].title,\n givenName:\n approverDetailsById[originalApprover.id].name.givenName ??\n null,\n familyName:\n approverDetailsById[originalApprover.id].name.familyName ??\n null,\n },\n },\n ]\n : [],\n );\n\n // grab respondents details\n const respondentsDetailsById = keyBy(respondentsDetails, 'id');\n const enrichedRespondents = assessmentDetails.respondents\n .filter((r) => !r.name.includes('@')) // search only internal respondents\n .flatMap((respondent) =>\n respondentsDetailsById[respondent.id]\n ? [\n {\n ...respondent,\n active: respondentsDetailsById[respondent.id].active,\n userType: respondentsDetailsById[respondent.id].userType,\n emails: respondentsDetailsById[respondent.id].emails,\n title: respondentsDetailsById[respondent.id].title,\n givenName:\n respondentsDetailsById[respondent.id].name.givenName ?? null,\n familyName:\n respondentsDetailsById[respondent.id].name.familyName ?? null,\n },\n ]\n : [],\n );\n\n // combine everything into a single enriched assessment\n return {\n ...assessment,\n ...restAssessmentDetails,\n approvers: enrichedApprovers,\n respondents: enrichedRespondents,\n createdBy: enrichedCreatedBy,\n sections: sectionsWithEnrichedRisk,\n };\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport {\n IMPORT_ONE_TRUST_ASSESSMENT_FORMS,\n makeGraphQLRequest,\n} from '../../graphql';\nimport { ImportOnetrustAssessmentsInput } from '../../../codecs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Write the assessment to a Transcend instance.\n *\n *\n * @param param - information about the assessment and Transcend instance to write to\n */\nexport const syncOneTrustAssessmentToTranscend = async ({\n transcend,\n assessment,\n total,\n index,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** the assessment to sync to Transcend */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total?: number;\n}): Promise<void> => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend...`,\n ),\n );\n\n // convert the OneTrust assessment object into a json record\n const json = oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n });\n\n // transform the json record into a valid input to the mutation\n const input: ImportOnetrustAssessmentsInput = {\n json,\n };\n\n try {\n await makeGraphQLRequest<{\n /** the importOneTrustAssessmentForms mutation */\n importOneTrustAssessmentForms: {\n /** Created Assessment Forms */\n assessmentForms: AssessmentForm[];\n };\n }>(transcend, IMPORT_ONE_TRUST_ASSESSMENT_FORMS, {\n input,\n });\n } catch (e) {\n logger.error(\n colors.red(\n `Failed to sync assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend.\\n` +\n `\\tAssessment Title: ${assessment.name}. Template Title: ${assessment.template.name}\\n`,\n ),\n );\n }\n};\n","import type { Got } from 'got';\nimport colors from 'colors';\nimport {\n getListOfOneTrustAssessments,\n getOneTrustAssessment,\n getOneTrustRisk,\n getOneTrustUser,\n} from '../endpoints';\nimport { mapSeries, map } from '../../bluebird';\nimport { logger } from '../../../logger';\nimport {\n OneTrustAssessmentQuestion,\n OneTrustAssessmentSection,\n OneTrustEnrichedAssessment,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { uniq } from 'lodash-es';\nimport { enrichOneTrustAssessment } from './enrichOneTrustAssessment';\nimport { syncOneTrustAssessmentToDisk } from './syncOneTrustAssessmentToDisk';\nimport { GraphQLClient } from 'graphql-request';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Reads all the assessments from a OneTrust instance and syncs them to Transcend or to Disk.\n *\n * @param param - the information about the assessment, its OneTrust source, and destination (disk or Transcend)\n */\nexport const syncOneTrustAssessmentsFromOneTrust = async ({\n oneTrust,\n file,\n dryRun,\n transcend,\n}: {\n /** the OneTrust client instance */\n oneTrust: Got;\n /** the Transcend client instance */\n transcend?: GraphQLClient;\n /** Whether to write to file instead of syncing to Transcend */\n dryRun: boolean;\n /** the path to the file in case dryRun is true */\n file?: string;\n}): Promise<void> => {\n // fetch the list of all assessments in the OneTrust organization\n logger.info('Getting list of all assessments from OneTrust...');\n const assessments = await getListOfOneTrustAssessments({ oneTrust });\n\n // a cache of OneTrust users so we avoid requesting already fetched users\n const oneTrustCachedUsers: Record<string, OneTrustGetUserResponse> = {};\n\n // split all assessments in batches, so we can process some of steps in parallel\n const BATCH_SIZE = 5;\n const assessmentBatches = Array.from(\n {\n length: Math.ceil(assessments.length / BATCH_SIZE),\n },\n (_, i) => assessments.slice(i * BATCH_SIZE, (i + 1) * BATCH_SIZE),\n );\n\n // process each batch and sync the batch right away so it's garbage collected and we don't run out of memory\n await mapSeries(assessmentBatches, async (assessmentBatch, batch) => {\n const batchEnrichedAssessments: OneTrustEnrichedAssessment[] = [];\n\n // fetch assessment details from OneTrust in parallel\n await map(\n assessmentBatch,\n async (assessment, index) => {\n const assessmentNumber = BATCH_SIZE * batch + index + 1;\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching details...`,\n );\n const { templateName, assessmentId } = assessment;\n const assessmentDetails = await getOneTrustAssessment({\n oneTrust,\n assessmentId,\n });\n // fetch assessment's creator information\n const creatorId = assessmentDetails.createdBy.id;\n let creator = oneTrustCachedUsers[creatorId];\n if (!creator) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching creator...`,\n );\n try {\n creator = await getOneTrustUser({\n oneTrust,\n userId: creatorId,\n });\n oneTrustCachedUsers[creatorId] = creator;\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch form creator.` +\n `\\tcreatorId: ${creatorId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n }\n }\n\n // fetch assessment approvers information\n const { approvers } = assessmentDetails;\n let approversDetails: OneTrustGetUserResponse[][] = [];\n if (approvers.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching approvers...`,\n );\n approversDetails = await map(\n approvers.map(({ id }) => id),\n async (userId) => {\n try {\n let approver = oneTrustCachedUsers[userId];\n if (!approver) {\n approver = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = approver;\n }\n return [approver];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a form approver.` +\n `\\tapproverId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment internal respondents information\n const { respondents } = assessmentDetails;\n // if a user is an internal respondents, their 'name' field can't be an email.\n const internalRespondents = respondents.filter(\n (r) => !r.name.includes('@'),\n );\n let respondentsDetails: OneTrustGetUserResponse[][] = [];\n if (internalRespondents.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching respondents...`,\n );\n respondentsDetails = await map(\n internalRespondents.map(({ id }) => id),\n async (userId) => {\n try {\n let respondent = oneTrustCachedUsers[userId];\n if (!respondent) {\n respondent = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = respondent;\n }\n return [respondent];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a respondent.` +\n `\\trespondentId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment risk information\n let riskDetails: OneTrustGetRiskResponse[] = [];\n const riskIds = uniq(\n assessmentDetails.sections.flatMap((s: OneTrustAssessmentSection) =>\n s.questions.flatMap((q: OneTrustAssessmentQuestion) =>\n (q.risks ?? []).flatMap((r) => r.riskId),\n ),\n ),\n );\n if (riskIds.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching risks...`,\n );\n riskDetails = await map(\n riskIds,\n (riskId) => getOneTrustRisk({ oneTrust, riskId: riskId as string }),\n {\n concurrency: 5,\n },\n );\n }\n\n // enrich the assessments with user and risk details\n const enrichedAssessment = enrichOneTrustAssessment({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails: creator,\n approversDetails: approversDetails.flat(),\n respondentsDetails: respondentsDetails.flat(),\n });\n\n batchEnrichedAssessments.push(enrichedAssessment);\n },\n { concurrency: BATCH_SIZE },\n );\n\n // sync assessments in series to avoid concurrency bugs\n await mapSeries(\n batchEnrichedAssessments,\n async (enrichedAssessment, index) => {\n // the assessment's global index takes its batch into consideration\n const globalIndex = batch * BATCH_SIZE + index;\n\n if (dryRun && file) {\n // sync to file\n syncOneTrustAssessmentToDisk({\n assessment: enrichedAssessment,\n index: globalIndex,\n total: assessments.length,\n file,\n });\n } else if (transcend) {\n // sync to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: enrichedAssessment,\n transcend,\n total: assessments.length,\n index: globalIndex,\n });\n }\n },\n );\n });\n};\n","import { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport JSONStream from 'JSONStream';\n\nimport { createReadStream } from 'node:fs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\nimport { GraphQLClient } from 'graphql-request';\n\n/**\n * Reads assessments from a file and syncs them to Transcend.\n *\n * @param param - the information about the source file and Transcend instance to write them to.\n */\nexport const syncOneTrustAssessmentsFromFile = ({\n transcend,\n file,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** The name of the file from which to read the OneTrust assessments */\n file: string;\n}): Promise<void> => {\n logger.info(`Getting list of all assessments from file ${file}...`);\n\n return new Promise((resolve, reject) => {\n // Create a readable stream from the file\n const fileStream = createReadStream(file, {\n encoding: 'utf-8',\n highWaterMark: 64 * 1024, // 64KB chunks\n });\n\n // Create a JSONStream parser to parse the array of OneTrust assessments from the file\n const parser = JSONStream.parse('*'); // '*' matches each element in the root array\n\n let index = 0;\n\n // Pipe the file stream into the JSON parser\n fileStream.pipe(parser);\n\n // Handle each parsed assessment object\n parser.on('data', async (assessment) => {\n try {\n // Pause the stream while processing to avoid overwhelming memory\n parser.pause();\n\n // Decode and validate the assessment\n const parsedAssessment = decodeCodec(\n OneTrustEnrichedAssessment,\n assessment,\n );\n\n // Sync the assessment to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: parsedAssessment,\n transcend,\n index,\n });\n\n index += 1;\n\n // Resume the stream after processing\n parser.resume();\n } catch (e) {\n // if failed to parse a line, report error and continue\n logger.error(\n colors.red(\n `Failed to parse the assessment ${index} from file '${file}': ${e.message}.`,\n ),\n );\n }\n });\n\n // Handle completion\n parser.on('end', () => {\n logger.info(`Finished processing ${index} assessments from file ${file}`);\n resolve();\n });\n\n // Handle stream or parsing errors\n parser.on('error', (error) => {\n logger.error(\n colors.red(`Error parsing file '${file}': ${error.message}`),\n );\n reject(error);\n });\n\n fileStream.on('error', (error) => {\n logger.error(\n colors.red(`Error reading file '${file}': ${error.message}`),\n );\n reject(error);\n });\n });\n};\n","import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { createOneTrustGotInstance } from '../../../lib/oneTrust';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '../../../enums';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport {\n syncOneTrustAssessmentsFromFile,\n syncOneTrustAssessmentsFromOneTrust,\n} from '../../../lib/oneTrust/helpers';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n// Command flag interface\nexport interface SyncOtCommandFlags {\n hostname?: string;\n oneTrustAuth?: string;\n source: OneTrustPullSource;\n transcendAuth?: string;\n transcendUrl: string;\n file?: string;\n resource: OneTrustPullResource;\n dryRun: boolean;\n debug: boolean;\n}\n\n// Command implementation\nexport async function syncOt(\n this: LocalContext,\n {\n hostname,\n oneTrustAuth,\n source,\n transcendAuth,\n transcendUrl,\n resource,\n file,\n dryRun,\n debug,\n }: SyncOtCommandFlags,\n): Promise<void> {\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n throw new Error(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n );\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n throw new Error(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n );\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n throw new Error(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n );\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n throw new Error(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n );\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n throw new Error(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n );\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n throw new Error(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n );\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n throw new Error(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n );\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n throw new Error(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n );\n }\n }\n\n doneInputValidation(this.process.exit);\n\n // instantiate a client to talk to OneTrust\n const oneTrust =\n hostname && oneTrustAuth\n ? createOneTrustGotInstance({\n hostname,\n auth: oneTrustAuth,\n })\n : undefined;\n\n // instantiate a client to talk to Transcend\n const transcend =\n transcendUrl && transcendAuth\n ? buildTranscendGraphQLClient(transcendUrl, transcendAuth)\n : undefined;\n\n try {\n if (resource === OneTrustPullResource.Assessments) {\n if (source === OneTrustPullSource.OneTrust && oneTrust) {\n await syncOneTrustAssessmentsFromOneTrust({\n oneTrust,\n file,\n dryRun,\n ...(transcend && { transcend }),\n });\n } else if (source === OneTrustPullSource.File && file && transcend) {\n await syncOneTrustAssessmentsFromFile({ file, transcend });\n }\n }\n } catch (err) {\n throw new Error(\n `An error occurred syncing the resource ${resource} from OneTrust: ${\n debug ? err.stack : err.message\n }`,\n );\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced OneTrust ${resource} to ${\n dryRun ? `disk at \"${file}\"` : 'Transcend'\n }!`,\n ),\n );\n}\n"],"mappings":"ijBAQA,MAAa,GAA6B,CACxC,WACA,UAOA,EAAA,QAAI,OAAO,CACT,UAAW,WAAW,IACtB,QAAS,CACP,OAAQ,mBACR,eAAgB,mBAChB,cAAe,UAAU,IAC1B,CACF,CAAC,CCfE,EAAkB,OAAO,OAAOA,EAAAA,EAAqB,CCD9C,GAA4B,CACvC,aACA,QACA,QACA,OAAO,MAUK,CACZ,IAAI,EAAY,IAEZ,IAAU,GAAK,KACjB,EAAY;GAGd,IAAM,EAAwB,KAAK,UAAU,EAAW,CAGlD,EAAQ,GAAS,EAAQ,EAAQ,GAAK,CAAC,EAAO,IAAM,GAU1D,MAPA,GAAY,GAAG,EAAY,EAAwB,EAAM,KAGpD,GAAS,IAAU,EAAQ,GAAM,KACpC,GAAa;IAGR,GC9BI,GAAgC,CAC3C,OACA,aACA,QACA,WAUU,CACV,EAAA,EAAO,KACL,EAAA,QAAO,QACL,+BACE,EAAQ,EACT,MAAM,EAAM,YAAY,EAAK,MAC/B,CACF,CAEG,IAAU,EACZ,EAAA,QAAG,cACD,EACA,EAAyB,CACvB,aACA,QACA,QACA,KAAM,GACP,CAAC,CACH,CAED,EAAA,QAAG,eACD,EACA,EAAyB,CACvB,aACA,QACA,QACA,KAAM,GACP,CAAC,CACH,ECvCQ,EAA+B,MAAO,CACjD,cAImC,CACnC,IAAI,EAAc,EACd,EAAa,EACb,EAAgB,EAEd,EAAuC,EAAE,CAE/C,KAAO,EAAc,GAAY,CAC/B,GAAM,CAAE,QAAS,MAAM,EAAS,IAC9B,sCAAsC,EAAY,YACnD,CAEK,CAAE,OAAM,YAAA,EAAA,EAAA,aACZC,EAAAA,qCACA,EACD,CACD,EAAe,KAAK,GAAI,GAAW,EAAE,CAAE,CACnC,IAAgB,IAClB,EAAa,GAAM,YAAc,EACjC,EAAgB,GAAM,eAAiB,GAEzC,GAAe,EAGf,EAAA,EAAO,KACL,WAAW,EAAe,OAAO,MAAM,EAAc,eACtD,CAGH,OAAO,GCtCI,EAAwB,MAAO,CAC1C,WACA,kBAM4C,CAC5C,GAAM,CAAE,QAAS,MAAM,EAAS,IAC9B,iCAAiC,EAAa,uCAC/C,CAED,OAAA,EAAA,EAAA,aAAmBC,EAAAA,8BAA+B,EAAK,ECb5C,EAAkB,MAAO,CACpC,WACA,YAMsC,CACtC,GAAM,CAAE,QAAS,MAAM,EAAS,IAAI,qBAAqB,IAAS,CAElE,OAAA,EAAA,EAAA,aAAmBC,EAAAA,wBAAyB,EAAK,ECXtC,EAAkB,MAAO,CACpC,WACA,YAMsC,CACtC,GAAM,CAAE,QAAS,MAAM,EAAS,IAAI,qBAAqB,IAAS,CAElE,OAAA,EAAA,EAAA,aAAmBC,EAAAA,wBAAyB,EAAK,ECPtC,GAA4B,CACvC,aACA,oBACA,cACA,iBACA,mBACA,wBAcgC,CAChC,IAAM,EAAkBC,EAAAA,EAAM,EAAa,KAAK,CAC1C,CAAE,WAAU,YAAW,GAAG,GAA0B,EACpD,EAA2B,EAAS,IAAK,GAAY,CACzD,GAAM,CAAE,YAAW,GAAG,GAAgB,EAChC,EAAoB,EAAU,IAAK,GAAa,CACpD,GAAM,CAAE,QAAO,GAAG,GAAiB,EAC7B,GAAiB,GAAS,EAAE,EAAE,IAAK,GAAS,CAChD,IAAM,EAAU,EAAgB,EAAK,QACrC,MAAO,CACL,GAAG,EACH,GAAG,EACH,MAAO,EAAK,MACZ,YAAa,EAAK,aAAe,EAClC,EACD,CACF,MAAO,CACL,GAAG,EACH,MAAO,EACR,EACD,CACF,MAAO,CACL,GAAG,EACH,UAAW,EACZ,EACD,CAGI,EAAoB,CACxB,GAAG,EACH,OAAQ,GAAgB,QAAU,GAClC,SAAU,GAAgB,UAAY,WACtC,OAAQ,GAAgB,QAAU,EAAE,CACpC,MAAO,GAAgB,OAAS,KAChC,UAAW,GAAgB,KAAK,WAAa,KAC7C,WAAY,GAAgB,KAAK,YAAc,KAChD,CAGK,EAAsBA,EAAAA,EAAM,EAAkB,KAAK,CACnD,EAAoB,EAAkB,UAAU,QACnD,GACC,EAAoB,EAAiB,IACjC,CACE,CACE,GAAG,EACH,SAAU,CACR,GAAG,EAAiB,SACpB,OAAQ,EAAoB,EAAiB,IAAI,OACjD,SAAU,EAAoB,EAAiB,IAAI,SACnD,OAAQ,EAAoB,EAAiB,IAAI,OACjD,MAAO,EAAoB,EAAiB,IAAI,MAChD,UACE,EAAoB,EAAiB,IAAI,KAAK,WAC9C,KACF,WACE,EAAoB,EAAiB,IAAI,KAAK,YAC9C,KACH,CACF,CACF,CACD,EAAE,CACT,CAGK,EAAyBA,EAAAA,EAAM,EAAoB,KAAK,CACxD,EAAsB,EAAkB,YAC3C,OAAQ,GAAM,CAAC,EAAE,KAAK,SAAS,IAAI,CAAC,CACpC,QAAS,GACR,EAAuB,EAAW,IAC9B,CACE,CACE,GAAG,EACH,OAAQ,EAAuB,EAAW,IAAI,OAC9C,SAAU,EAAuB,EAAW,IAAI,SAChD,OAAQ,EAAuB,EAAW,IAAI,OAC9C,MAAO,EAAuB,EAAW,IAAI,MAC7C,UACE,EAAuB,EAAW,IAAI,KAAK,WAAa,KAC1D,WACE,EAAuB,EAAW,IAAI,KAAK,YAAc,KAC5D,CACF,CACD,EAAE,CACP,CAGH,MAAO,CACL,GAAG,EACH,GAAG,EACH,UAAW,EACX,YAAa,EACb,UAAW,EACX,SAAU,EACX,ECzGU,EAAoC,MAAO,CACtD,YACA,aACA,QACA,WAUmB,CACnB,EAAA,EAAO,KACL,EAAA,QAAO,QACL,+BAA+B,EAAQ,EAAE,GACvC,EAAQ,MAAM,EAAM,GAAK,IAC1B,iBACF,CACF,CAUD,IAAM,EAAwC,CAC5C,KARW,EAAyB,CACpC,aACA,QACA,QACD,CAAC,CAKD,CAED,GAAI,CACF,MAAMC,EAAAA,EAMH,EAAWC,EAAAA,GAAmC,CAC/C,QACD,CAAC,MACQ,CACV,EAAA,EAAO,MACL,EAAA,QAAO,IACL,6BAA6B,EAAQ,EAAE,GACrC,EAAQ,MAAM,EAAM,GAAK,IAC1B,qCACwB,EAAW,KAAK,oBAAoB,EAAW,SAAS,KAAK,IACvF,CACF,GC1CQ,EAAsC,MAAO,CACxD,WACA,OACA,SACA,eAUmB,CAEnB,EAAA,EAAO,KAAK,mDAAmD,CAC/D,IAAM,EAAc,MAAM,EAA6B,CAAE,WAAU,CAAC,CAG9D,EAA+D,EAAE,CAYvE,MAAMC,EAAAA,GARoB,MAAM,KAC9B,CACE,OAAQ,KAAK,KAAK,EAAY,OAAS,EAAW,CACnD,EACA,EAAG,IAAM,EAAY,MAAM,EAAI,GAAa,EAAI,GAAK,EAAW,CAClE,CAGkC,MAAO,EAAiB,IAAU,CACnE,IAAM,EAAyD,EAAE,CAGjE,MAAMC,EAAAA,GACJ,EACA,MAAO,EAAY,IAAU,CAC3B,IAAM,EAAmB,EAAa,EAAQ,EAAQ,EACtD,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,wBAC1D,CACD,GAAM,CAAE,eAAc,gBAAiB,EACjC,EAAoB,MAAM,EAAsB,CACpD,WACA,eACD,CAAC,CAEI,EAAY,EAAkB,UAAU,GAC1C,EAAU,EAAoB,GAClC,GAAI,CAAC,EAAS,CACZ,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,wBAC1D,CACD,GAAI,CACF,EAAU,MAAM,EAAgB,CAC9B,WACA,OAAQ,EACT,CAAC,CACF,EAAoB,GAAa,OACvB,CACV,EAAA,EAAO,KACL,EAAA,QAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,+CACvC,EAAU,sBAAsB,EAAW,KAAK,oBAAoB,IACvF,CACF,EAKL,GAAM,CAAE,aAAc,EAClB,EAAgD,EAAE,CAClD,EAAU,OAAS,IACrB,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,0BAC1D,CACD,EAAmB,MAAMA,EAAAA,GACvB,EAAU,KAAK,CAAE,QAAS,EAAG,CAC7B,KAAO,IAAW,CAChB,GAAI,CACF,IAAI,EAAW,EAAoB,GAKnC,OAJK,IACH,EAAW,MAAM,EAAgB,CAAE,WAAU,SAAQ,CAAC,CACtD,EAAoB,GAAU,GAEzB,CAAC,EAAS,MACP,CAOV,OANA,EAAA,EAAO,KACL,EAAA,QAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,mDACtC,EAAO,sBAAsB,EAAW,KAAK,oBAAoB,IACrF,CACF,CACM,EAAE,GAGb,CAAE,YAAa,EAAG,CACnB,EAIH,GAAM,CAAE,eAAgB,EAElB,EAAsB,EAAY,OACrC,GAAM,CAAC,EAAE,KAAK,SAAS,IAAI,CAC7B,CACG,EAAkD,EAAE,CACpD,EAAoB,OAAS,IAC/B,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,4BAC1D,CACD,EAAqB,MAAMA,EAAAA,GACzB,EAAoB,KAAK,CAAE,QAAS,EAAG,CACvC,KAAO,IAAW,CAChB,GAAI,CACF,IAAI,EAAa,EAAoB,GAKrC,OAJK,IACH,EAAa,MAAM,EAAgB,CAAE,WAAU,SAAQ,CAAC,CACxD,EAAoB,GAAU,GAEzB,CAAC,EAAW,MACT,CAOV,OANA,EAAA,EAAO,KACL,EAAA,QAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,kDACpC,EAAO,sBAAsB,EAAW,KAAK,oBAAoB,IACvF,CACF,CACM,EAAE,GAGb,CAAE,YAAa,EAAG,CACnB,EAIH,IAAI,EAAyC,EAAE,CACzC,EAAUC,EAAAA,GACd,EAAkB,SAAS,QAAS,GAClC,EAAE,UAAU,QAAS,IAClB,EAAE,OAAS,EAAE,EAAE,QAAS,GAAM,EAAE,OAAO,CACzC,CACF,CACF,CACG,EAAQ,OAAS,IACnB,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,sBAC1D,CACD,EAAc,MAAMD,EAAAA,GAClB,EACC,GAAW,EAAgB,CAAE,WAAkB,SAAkB,CAAC,CACnE,CACE,YAAa,EACd,CACF,EAIH,IAAM,EAAqB,EAAyB,CAClD,aACA,oBACA,cACA,eAAgB,EAChB,iBAAkB,EAAiB,MAAM,CACzC,mBAAoB,EAAmB,MAAM,CAC9C,CAAC,CAEF,EAAyB,KAAK,EAAmB,EAEnD,CAAE,YAAa,EAAY,CAC5B,CAGD,MAAMD,EAAAA,GACJ,EACA,MAAO,EAAoB,IAAU,CAEnC,IAAM,EAAc,EAAQ,EAAa,EAErC,GAAU,EAEZ,EAA6B,CAC3B,WAAY,EACZ,MAAO,EACP,MAAO,EAAY,OACnB,OACD,CAAC,CACO,GAET,MAAM,EAAkC,CACtC,WAAY,EACZ,YACA,MAAO,EAAY,OACnB,MAAO,EACR,CAAC,EAGP,EACD,EC5NS,GAAmC,CAC9C,YACA,WAOA,EAAA,EAAO,KAAK,6CAA6C,EAAK,KAAK,CAE5D,IAAI,SAAS,EAAS,IAAW,CAEtC,IAAM,GAAA,EAAA,EAAA,kBAA8B,EAAM,CACxC,SAAU,QACV,cAAe,GAAK,KACrB,CAAC,CAGI,EAAS,EAAA,QAAW,MAAM,IAAI,CAEhC,EAAQ,EAGZ,EAAW,KAAK,EAAO,CAGvB,EAAO,GAAG,OAAQ,KAAO,IAAe,CACtC,GAAI,CAEF,EAAO,OAAO,CASd,MAAM,EAAkC,CACtC,YAAA,EAAA,EAAA,aANAG,EAAAA,2BACA,EACD,CAKC,YACA,QACD,CAAC,CAEF,GAAS,EAGT,EAAO,QAAQ,OACR,EAAG,CAEV,EAAA,EAAO,MACL,EAAA,QAAO,IACL,kCAAkC,EAAM,cAAc,EAAK,KAAK,EAAE,QAAQ,GAC3E,CACF,GAEH,CAGF,EAAO,GAAG,UAAa,CACrB,EAAA,EAAO,KAAK,uBAAuB,EAAM,yBAAyB,IAAO,CACzE,GAAS,EACT,CAGF,EAAO,GAAG,QAAU,GAAU,CAC5B,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,uBAAuB,EAAK,KAAK,EAAM,UAAU,CAC7D,CACD,EAAO,EAAM,EACb,CAEF,EAAW,GAAG,QAAU,GAAU,CAChC,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,uBAAuB,EAAK,KAAK,EAAM,UAAU,CAC7D,CACD,EAAO,EAAM,EACb,EACF,EChEJ,eAAsB,EAEpB,CACE,WACA,eACA,SACA,gBACA,eACA,WACA,OACA,SACA,SAEa,CAEf,GAAI,CAAC,GAAU,CAAC,EACd,MAAU,MAER,qHACD,CAIH,GAAI,GAAU,CAAC,EACb,MAAU,MACR,8FACD,CAGH,GAAI,EAAM,CACR,IAAM,EAAY,EAAK,MAAM,IAAI,CACjC,GAAI,EAAU,OAAS,EACrB,MAAU,MACR,8GACD,CAEH,GAAI,EAAU,GAAG,GAAG,GAAKC,EAAAA,EAAmB,KAC1C,MAAU,MACR,iDAAiD,EAAK,WACpDA,EAAAA,EAAmB,KACpB,cAAc,EAAU,GAAG,GAAG,CAAC,IACjC,CAKL,GAAI,IAAWC,EAAAA,EAAmB,SAAU,CAE1C,GAAI,CAAC,EACH,MAAU,MACR,kFACD,CAGH,GAAI,CAAC,EACH,MAAU,MACR,uFACD,KAEE,CAEL,GAAI,CAAC,EACH,MAAU,MACR,kHACD,CAIH,GAAI,EACF,MAAU,MACR,4FAC+CA,EAAAA,EAAmB,SAAS,0BAC5E,CAIL,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EACJ,GAAY,EACR,EAA0B,CACxB,WACA,KAAM,EACP,CAAC,CACF,IAAA,GAGA,EACJ,GAAgB,EACZC,EAAAA,GAA4B,EAAc,EAAc,CACxD,IAAA,GAEN,GAAI,CACE,IAAaC,EAAAA,EAAqB,cAChC,IAAWF,EAAAA,EAAmB,UAAY,EAC5C,MAAM,EAAoC,CACxC,WACA,OACA,SACA,GAAI,GAAa,CAAE,YAAW,CAC/B,CAAC,CACO,IAAWA,EAAAA,EAAmB,MAAQ,GAAQ,GACvD,MAAM,EAAgC,CAAE,OAAM,YAAW,CAAC,QAGvD,EAAK,CACZ,MAAU,MACR,0CAA0C,EAAS,kBACjD,EAAQ,EAAI,MAAQ,EAAI,UAE3B,CAIH,EAAA,EAAO,KACL,EAAA,QAAO,MACL,gCAAgC,EAAS,MACvC,EAAS,YAAY,EAAK,GAAK,YAChC,GACF,CACF"}
1
+ {"version":3,"file":"impl-1-1sg4WF.cjs","names":["OneTrustPullResource","OneTrustGetListOfAssessmentsResponse","OneTrustGetAssessmentResponse","OneTrustGetRiskResponse","OneTrustGetUserResponse","keyBy","makeGraphQLRequest","IMPORT_ONE_TRUST_ASSESSMENT_FORMS","mapSeries","map","uniq","OneTrustEnrichedAssessment","OneTrustFileFormat","OneTrustPullSource","buildTranscendGraphQLClient","OneTrustPullResource"],"sources":["../src/lib/oneTrust/createOneTrustGotInstance.ts","../src/lib/oneTrust/helpers/parseCliSyncOtArguments.ts","../src/lib/oneTrust/helpers/oneTrustAssessmentToJson.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToDisk.ts","../src/lib/oneTrust/endpoints/getListOfOneTrustAssessments.ts","../src/lib/oneTrust/endpoints/getOneTrustAssessment.ts","../src/lib/oneTrust/endpoints/getOneTrustRisk.ts","../src/lib/oneTrust/endpoints/getOneTrustUser.ts","../src/lib/oneTrust/helpers/enrichOneTrustAssessment.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToTranscend.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromOneTrust.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromFile.ts","../src/commands/migration/sync-ot/impl.ts"],"sourcesContent":["import got, { Got } from 'got';\n\n/**\n * Instantiate an instance of got that is capable of making requests to OneTrust\n *\n * @param param - information about the OneTrust URL\n * @returns The instance of got that is capable of making requests to the customer ingress\n */\nexport const createOneTrustGotInstance = ({\n hostname,\n auth,\n}: {\n /** Hostname of the OneTrust API */\n hostname: string;\n /** The OAuth access token */\n auth: string;\n}): Got =>\n got.extend({\n prefixUrl: `https://${hostname}`,\n headers: {\n accept: 'application/json',\n 'content-type': 'application/json',\n authorization: `Bearer ${auth}`,\n },\n });\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport yargs from 'yargs-parser';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '../../../enums';\n\nconst VALID_RESOURCES = Object.values(OneTrustPullResource);\n\ninterface OneTrustCliArguments {\n /** The name of the file to write the resources to */\n file: string;\n /** The OneTrust hostname to send the requests to */\n hostname?: string;\n /** The OAuth Bearer token used to authenticate the requests to OneTrust */\n oneTrustAuth?: string;\n /** The Transcend API key to authenticate the requests to Transcend */\n transcendAuth: string;\n /** The Transcend URL where to forward requests */\n transcendUrl: string;\n /** The resource to pull from OneTrust */\n resource: OneTrustPullResource;\n /** Whether to enable debugging while reporting errors */\n debug: boolean;\n /** Whether to export the resource into a file rather than push to transcend */\n dryRun: boolean;\n /** Where to read the OneTrust resource from */\n source: OneTrustPullSource;\n}\n\n/**\n * Parse the command line arguments\n *\n * @returns the parsed arguments\n */\nexport const parseCliSyncOtArguments = (): OneTrustCliArguments => {\n const {\n file,\n hostname,\n oneTrustAuth,\n resource,\n debug,\n dryRun,\n transcendAuth,\n transcendUrl,\n source,\n } = yargs(process.argv.slice(2), {\n string: [\n 'file',\n 'hostname',\n 'oneTrustAuth',\n 'resource',\n 'dryRun',\n 'transcendAuth',\n 'transcendUrl',\n 'source',\n ],\n boolean: ['debug', 'dryRun'],\n default: {\n resource: OneTrustPullResource.Assessments,\n debug: false,\n dryRun: false,\n transcendUrl: 'https://api.transcend.io',\n source: OneTrustPullSource.OneTrust,\n },\n });\n\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n logger.error(\n colors.red(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n ),\n );\n return process.exit(1);\n }\n if (!dryRun && !transcendUrl) {\n logger.error(\n colors.red(\n // eslint-disable-next-line max-len\n 'Must specify a \"transcendUrl\" parameter to sync resources to Transcend. e.g. --transcendUrl=https://api.transcend.io',\n ),\n );\n return process.exit(1);\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n logger.error(\n colors.red(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n ),\n );\n return process.exit(1);\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n logger.error(\n colors.red(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n ),\n );\n return process.exit(1);\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n logger.error(\n colors.red(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n ),\n );\n return process.exit(1);\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n logger.error(\n colors.red(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n ),\n );\n return process.exit(1);\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n logger.error(\n colors.red(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n ),\n );\n return process.exit(1);\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n logger.error(\n colors.red(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n ),\n );\n return process.exit(1);\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n logger.error(\n colors.red(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n ),\n );\n return process.exit(1);\n }\n }\n\n if (!VALID_RESOURCES.includes(resource)) {\n logger.error(\n colors.red(\n `Received invalid resource value: \"${resource}\". Allowed: ${VALID_RESOURCES.join(\n ',',\n )}`,\n ),\n );\n return process.exit(1);\n }\n\n return {\n file,\n ...(hostname && { hostname }),\n ...(oneTrustAuth && { oneTrustAuth }),\n resource,\n debug,\n dryRun,\n transcendAuth,\n transcendUrl,\n source,\n };\n};\n","import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Converts the assessment into a json entry.\n *\n * @param param - information about the assessment and amount of entries\n * @returns a stringified json entry ready to be appended to a file\n */\nexport const oneTrustAssessmentToJson = ({\n assessment,\n index,\n total,\n wrap = true,\n}: {\n /** The assessment to convert */\n assessment: OneTrustEnrichedAssessment;\n /** The position of the assessment in the final Json object */\n index: number;\n /** The total amount of the assessments in the final Json object */\n total?: number;\n /** Whether to wrap every entry in brackets */\n wrap?: boolean;\n}): string => {\n let jsonEntry = '';\n // start with an opening bracket\n if (index === 0 || wrap) {\n jsonEntry = '[\\n';\n }\n\n const stringifiedAssessment = JSON.stringify(assessment);\n\n // Add comma for all items except the last one\n const comma = total && index < total - 1 && !wrap ? ',' : '';\n\n // write to file\n jsonEntry = `${jsonEntry + stringifiedAssessment + comma}\\n`;\n\n // end with closing bracket\n if ((total && index === total - 1) || wrap) {\n jsonEntry += '\\n]';\n }\n\n return jsonEntry;\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport fs from 'node:fs';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Write the assessment to disk at the specified file path.\n *\n *\n * @param param - information about the assessment to write\n */\nexport const syncOneTrustAssessmentToDisk = ({\n file,\n assessment,\n index,\n total,\n}: {\n /** The file path to write the assessment to */\n file: string;\n /** The basic assessment */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total: number;\n}): void => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${\n index + 1\n } of ${total} to file \"${file}\"...`,\n ),\n );\n\n if (index === 0) {\n fs.writeFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n } else {\n fs.appendFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n }\n};\n","import { Got } from 'got';\nimport { logger } from '../../../logger';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport {\n OneTrustAssessment,\n OneTrustGetListOfAssessmentsResponse,\n} from '@transcend-io/privacy-types';\n\n/**\n * Fetch a list of all assessments from the OneTrust client.\n * ref: https://developer.onetrust.com/onetrust/reference/getallassessmentbasicdetailsusingget\n *\n * @param param - the information about the OneTrust client\n * @returns a list of OneTrustAssessment\n */\nexport const getListOfOneTrustAssessments = async ({\n oneTrust,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n}): Promise<OneTrustAssessment[]> => {\n let currentPage = 0;\n let totalPages = 1;\n let totalElements = 0;\n\n const allAssessments: OneTrustAssessment[] = [];\n\n while (currentPage < totalPages) {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments?page=${currentPage}&size=2000`,\n );\n\n const { page, content } = decodeCodec(\n OneTrustGetListOfAssessmentsResponse,\n body,\n );\n allAssessments.push(...(content ?? []));\n if (currentPage === 0) {\n totalPages = page?.totalPages ?? 0;\n totalElements = page?.totalElements ?? 0;\n }\n currentPage += 1;\n\n // log progress\n logger.info(\n `Fetched ${allAssessments.length} of ${totalElements} assessments.`,\n );\n }\n\n return allAssessments;\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetAssessmentResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular assessment.\n * ref: https://developer.onetrust.com/onetrust/reference/exportassessmentusingget\n *\n * @param param - the information about the OneTrust client and assessment to retrieve\n * @returns details about the assessment\n */\nexport const getOneTrustAssessment = async ({\n oneTrust,\n assessmentId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the assessment to retrieve */\n assessmentId: string;\n}): Promise<OneTrustGetAssessmentResponse> => {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments/${assessmentId}/export?ExcludeSkippedQuestions=false`,\n );\n\n return decodeCodec(OneTrustGetAssessmentResponse, body);\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetRiskResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular risk.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustRisk = async ({\n oneTrust,\n riskId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust risk to retrieve */\n riskId: string;\n}): Promise<OneTrustGetRiskResponse> => {\n const { body } = await oneTrust.get(`api/risk/v2/risks/${riskId}`);\n\n return decodeCodec(OneTrustGetRiskResponse, body);\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetUserResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular user.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustUser = async ({\n oneTrust,\n userId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust user to retrieve */\n userId: string;\n}): Promise<OneTrustGetUserResponse> => {\n const { body } = await oneTrust.get(`api/scim/v2/Users/${userId}`);\n\n return decodeCodec(OneTrustGetUserResponse, body);\n};\n","import {\n OneTrustAssessment,\n OneTrustEnrichedAssessment,\n OneTrustGetAssessmentResponse,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { keyBy } from 'lodash-es';\n\n/**\n * Merge the assessment, assessmentDetails, and riskDetails into one object.\n *\n * @param param - the assessment and risk information\n * @returns the assessment enriched with details and risk information\n */\nexport const enrichOneTrustAssessment = ({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails,\n approversDetails,\n respondentsDetails,\n}: {\n /** The OneTrust risk details */\n riskDetails: OneTrustGetRiskResponse[];\n /** The OneTrust assessment as returned from Get List of Assessments endpoint */\n assessment: OneTrustAssessment;\n /** The OneTrust assessment details */\n assessmentDetails: OneTrustGetAssessmentResponse;\n /** The OneTrust assessment creator details */\n creatorDetails: OneTrustGetUserResponse;\n /** The OneTrust assessment approvers details */\n approversDetails: OneTrustGetUserResponse[];\n /** The OneTrust assessment internal respondents details */\n respondentsDetails: OneTrustGetUserResponse[];\n}): OneTrustEnrichedAssessment => {\n const riskDetailsById = keyBy(riskDetails, 'id');\n const { sections, createdBy, ...restAssessmentDetails } = assessmentDetails;\n const sectionsWithEnrichedRisk = sections.map((section) => {\n const { questions, ...restSection } = section;\n const enrichedQuestions = questions.map((question) => {\n const { risks, ...restQuestion } = question;\n const enrichedRisks = (risks ?? []).map((risk) => {\n const details = riskDetailsById[risk.riskId];\n return {\n ...risk,\n ...details,\n level: risk.level,\n impactLevel: risk.impactLevel ?? 0,\n };\n });\n return {\n ...restQuestion,\n risks: enrichedRisks,\n };\n });\n return {\n ...restSection,\n questions: enrichedQuestions,\n };\n });\n\n // grab creator details\n const enrichedCreatedBy = {\n ...createdBy,\n active: creatorDetails?.active ?? false,\n userType: creatorDetails?.userType ?? 'Internal',\n emails: creatorDetails?.emails ?? [],\n title: creatorDetails?.title ?? null,\n givenName: creatorDetails?.name.givenName ?? null,\n familyName: creatorDetails?.name.familyName ?? null,\n };\n\n // grab approvers details\n const approverDetailsById = keyBy(approversDetails, 'id');\n const enrichedApprovers = assessmentDetails.approvers.flatMap(\n (originalApprover) =>\n approverDetailsById[originalApprover.id]\n ? [\n {\n ...originalApprover,\n approver: {\n ...originalApprover.approver,\n active: approverDetailsById[originalApprover.id].active,\n userType: approverDetailsById[originalApprover.id].userType,\n emails: approverDetailsById[originalApprover.id].emails,\n title: approverDetailsById[originalApprover.id].title,\n givenName:\n approverDetailsById[originalApprover.id].name.givenName ??\n null,\n familyName:\n approverDetailsById[originalApprover.id].name.familyName ??\n null,\n },\n },\n ]\n : [],\n );\n\n // grab respondents details\n const respondentsDetailsById = keyBy(respondentsDetails, 'id');\n const enrichedRespondents = assessmentDetails.respondents\n .filter((r) => !r.name.includes('@')) // search only internal respondents\n .flatMap((respondent) =>\n respondentsDetailsById[respondent.id]\n ? [\n {\n ...respondent,\n active: respondentsDetailsById[respondent.id].active,\n userType: respondentsDetailsById[respondent.id].userType,\n emails: respondentsDetailsById[respondent.id].emails,\n title: respondentsDetailsById[respondent.id].title,\n givenName:\n respondentsDetailsById[respondent.id].name.givenName ?? null,\n familyName:\n respondentsDetailsById[respondent.id].name.familyName ?? null,\n },\n ]\n : [],\n );\n\n // combine everything into a single enriched assessment\n return {\n ...assessment,\n ...restAssessmentDetails,\n approvers: enrichedApprovers,\n respondents: enrichedRespondents,\n createdBy: enrichedCreatedBy,\n sections: sectionsWithEnrichedRisk,\n };\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport {\n IMPORT_ONE_TRUST_ASSESSMENT_FORMS,\n makeGraphQLRequest,\n} from '../../graphql';\nimport { ImportOnetrustAssessmentsInput } from '../../../codecs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Write the assessment to a Transcend instance.\n *\n *\n * @param param - information about the assessment and Transcend instance to write to\n */\nexport const syncOneTrustAssessmentToTranscend = async ({\n transcend,\n assessment,\n total,\n index,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** the assessment to sync to Transcend */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total?: number;\n}): Promise<void> => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend...`,\n ),\n );\n\n // convert the OneTrust assessment object into a json record\n const json = oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n });\n\n // transform the json record into a valid input to the mutation\n const input: ImportOnetrustAssessmentsInput = {\n json,\n };\n\n try {\n await makeGraphQLRequest<{\n /** the importOneTrustAssessmentForms mutation */\n importOneTrustAssessmentForms: {\n /** Created Assessment Forms */\n assessmentForms: AssessmentForm[];\n };\n }>(transcend, IMPORT_ONE_TRUST_ASSESSMENT_FORMS, {\n input,\n });\n } catch (e) {\n logger.error(\n colors.red(\n `Failed to sync assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend.\\n` +\n `\\tAssessment Title: ${assessment.name}. Template Title: ${assessment.template.name}\\n`,\n ),\n );\n }\n};\n","import type { Got } from 'got';\nimport colors from 'colors';\nimport {\n getListOfOneTrustAssessments,\n getOneTrustAssessment,\n getOneTrustRisk,\n getOneTrustUser,\n} from '../endpoints';\nimport { mapSeries, map } from '../../bluebird';\nimport { logger } from '../../../logger';\nimport {\n OneTrustAssessmentQuestion,\n OneTrustAssessmentSection,\n OneTrustEnrichedAssessment,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { uniq } from 'lodash-es';\nimport { enrichOneTrustAssessment } from './enrichOneTrustAssessment';\nimport { syncOneTrustAssessmentToDisk } from './syncOneTrustAssessmentToDisk';\nimport { GraphQLClient } from 'graphql-request';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Reads all the assessments from a OneTrust instance and syncs them to Transcend or to Disk.\n *\n * @param param - the information about the assessment, its OneTrust source, and destination (disk or Transcend)\n */\nexport const syncOneTrustAssessmentsFromOneTrust = async ({\n oneTrust,\n file,\n dryRun,\n transcend,\n}: {\n /** the OneTrust client instance */\n oneTrust: Got;\n /** the Transcend client instance */\n transcend?: GraphQLClient;\n /** Whether to write to file instead of syncing to Transcend */\n dryRun: boolean;\n /** the path to the file in case dryRun is true */\n file?: string;\n}): Promise<void> => {\n // fetch the list of all assessments in the OneTrust organization\n logger.info('Getting list of all assessments from OneTrust...');\n const assessments = await getListOfOneTrustAssessments({ oneTrust });\n\n // a cache of OneTrust users so we avoid requesting already fetched users\n const oneTrustCachedUsers: Record<string, OneTrustGetUserResponse> = {};\n\n // split all assessments in batches, so we can process some of steps in parallel\n const BATCH_SIZE = 5;\n const assessmentBatches = Array.from(\n {\n length: Math.ceil(assessments.length / BATCH_SIZE),\n },\n (_, i) => assessments.slice(i * BATCH_SIZE, (i + 1) * BATCH_SIZE),\n );\n\n // process each batch and sync the batch right away so it's garbage collected and we don't run out of memory\n await mapSeries(assessmentBatches, async (assessmentBatch, batch) => {\n const batchEnrichedAssessments: OneTrustEnrichedAssessment[] = [];\n\n // fetch assessment details from OneTrust in parallel\n await map(\n assessmentBatch,\n async (assessment, index) => {\n const assessmentNumber = BATCH_SIZE * batch + index + 1;\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching details...`,\n );\n const { templateName, assessmentId } = assessment;\n const assessmentDetails = await getOneTrustAssessment({\n oneTrust,\n assessmentId,\n });\n // fetch assessment's creator information\n const creatorId = assessmentDetails.createdBy.id;\n let creator = oneTrustCachedUsers[creatorId];\n if (!creator) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching creator...`,\n );\n try {\n creator = await getOneTrustUser({\n oneTrust,\n userId: creatorId,\n });\n oneTrustCachedUsers[creatorId] = creator;\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch form creator.` +\n `\\tcreatorId: ${creatorId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n }\n }\n\n // fetch assessment approvers information\n const { approvers } = assessmentDetails;\n let approversDetails: OneTrustGetUserResponse[][] = [];\n if (approvers.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching approvers...`,\n );\n approversDetails = await map(\n approvers.map(({ id }) => id),\n async (userId) => {\n try {\n let approver = oneTrustCachedUsers[userId];\n if (!approver) {\n approver = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = approver;\n }\n return [approver];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a form approver.` +\n `\\tapproverId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment internal respondents information\n const { respondents } = assessmentDetails;\n // if a user is an internal respondents, their 'name' field can't be an email.\n const internalRespondents = respondents.filter(\n (r) => !r.name.includes('@'),\n );\n let respondentsDetails: OneTrustGetUserResponse[][] = [];\n if (internalRespondents.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching respondents...`,\n );\n respondentsDetails = await map(\n internalRespondents.map(({ id }) => id),\n async (userId) => {\n try {\n let respondent = oneTrustCachedUsers[userId];\n if (!respondent) {\n respondent = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = respondent;\n }\n return [respondent];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a respondent.` +\n `\\trespondentId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment risk information\n let riskDetails: OneTrustGetRiskResponse[] = [];\n const riskIds = uniq(\n assessmentDetails.sections.flatMap((s: OneTrustAssessmentSection) =>\n s.questions.flatMap((q: OneTrustAssessmentQuestion) =>\n (q.risks ?? []).flatMap((r) => r.riskId),\n ),\n ),\n );\n if (riskIds.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching risks...`,\n );\n riskDetails = await map(\n riskIds,\n (riskId) => getOneTrustRisk({ oneTrust, riskId: riskId as string }),\n {\n concurrency: 5,\n },\n );\n }\n\n // enrich the assessments with user and risk details\n const enrichedAssessment = enrichOneTrustAssessment({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails: creator,\n approversDetails: approversDetails.flat(),\n respondentsDetails: respondentsDetails.flat(),\n });\n\n batchEnrichedAssessments.push(enrichedAssessment);\n },\n { concurrency: BATCH_SIZE },\n );\n\n // sync assessments in series to avoid concurrency bugs\n await mapSeries(\n batchEnrichedAssessments,\n async (enrichedAssessment, index) => {\n // the assessment's global index takes its batch into consideration\n const globalIndex = batch * BATCH_SIZE + index;\n\n if (dryRun && file) {\n // sync to file\n syncOneTrustAssessmentToDisk({\n assessment: enrichedAssessment,\n index: globalIndex,\n total: assessments.length,\n file,\n });\n } else if (transcend) {\n // sync to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: enrichedAssessment,\n transcend,\n total: assessments.length,\n index: globalIndex,\n });\n }\n },\n );\n });\n};\n","import { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport JSONStream from 'JSONStream';\n\nimport { createReadStream } from 'node:fs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\nimport { GraphQLClient } from 'graphql-request';\n\n/**\n * Reads assessments from a file and syncs them to Transcend.\n *\n * @param param - the information about the source file and Transcend instance to write them to.\n */\nexport const syncOneTrustAssessmentsFromFile = ({\n transcend,\n file,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** The name of the file from which to read the OneTrust assessments */\n file: string;\n}): Promise<void> => {\n logger.info(`Getting list of all assessments from file ${file}...`);\n\n return new Promise((resolve, reject) => {\n // Create a readable stream from the file\n const fileStream = createReadStream(file, {\n encoding: 'utf-8',\n highWaterMark: 64 * 1024, // 64KB chunks\n });\n\n // Create a JSONStream parser to parse the array of OneTrust assessments from the file\n const parser = JSONStream.parse('*'); // '*' matches each element in the root array\n\n let index = 0;\n\n // Pipe the file stream into the JSON parser\n fileStream.pipe(parser);\n\n // Handle each parsed assessment object\n parser.on('data', async (assessment) => {\n try {\n // Pause the stream while processing to avoid overwhelming memory\n parser.pause();\n\n // Decode and validate the assessment\n const parsedAssessment = decodeCodec(\n OneTrustEnrichedAssessment,\n assessment,\n );\n\n // Sync the assessment to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: parsedAssessment,\n transcend,\n index,\n });\n\n index += 1;\n\n // Resume the stream after processing\n parser.resume();\n } catch (e) {\n // if failed to parse a line, report error and continue\n logger.error(\n colors.red(\n `Failed to parse the assessment ${index} from file '${file}': ${e.message}.`,\n ),\n );\n }\n });\n\n // Handle completion\n parser.on('end', () => {\n logger.info(`Finished processing ${index} assessments from file ${file}`);\n resolve();\n });\n\n // Handle stream or parsing errors\n parser.on('error', (error) => {\n logger.error(\n colors.red(`Error parsing file '${file}': ${error.message}`),\n );\n reject(error);\n });\n\n fileStream.on('error', (error) => {\n logger.error(\n colors.red(`Error reading file '${file}': ${error.message}`),\n );\n reject(error);\n });\n });\n};\n","import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { createOneTrustGotInstance } from '../../../lib/oneTrust';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '../../../enums';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport {\n syncOneTrustAssessmentsFromFile,\n syncOneTrustAssessmentsFromOneTrust,\n} from '../../../lib/oneTrust/helpers';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n// Command flag interface\nexport interface SyncOtCommandFlags {\n hostname?: string;\n oneTrustAuth?: string;\n source: OneTrustPullSource;\n transcendAuth?: string;\n transcendUrl: string;\n file?: string;\n resource: OneTrustPullResource;\n dryRun: boolean;\n debug: boolean;\n}\n\n// Command implementation\nexport async function syncOt(\n this: LocalContext,\n {\n hostname,\n oneTrustAuth,\n source,\n transcendAuth,\n transcendUrl,\n resource,\n file,\n dryRun,\n debug,\n }: SyncOtCommandFlags,\n): Promise<void> {\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n throw new Error(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n );\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n throw new Error(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n );\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n throw new Error(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n );\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n throw new Error(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n );\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n throw new Error(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n );\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n throw new Error(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n );\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n throw new Error(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n );\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n throw new Error(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n );\n }\n }\n\n doneInputValidation(this.process.exit);\n\n // instantiate a client to talk to OneTrust\n const oneTrust =\n hostname && oneTrustAuth\n ? createOneTrustGotInstance({\n hostname,\n auth: oneTrustAuth,\n })\n : undefined;\n\n // instantiate a client to talk to Transcend\n const transcend =\n transcendUrl && transcendAuth\n ? buildTranscendGraphQLClient(transcendUrl, transcendAuth)\n : undefined;\n\n try {\n if (resource === OneTrustPullResource.Assessments) {\n if (source === OneTrustPullSource.OneTrust && oneTrust) {\n await syncOneTrustAssessmentsFromOneTrust({\n oneTrust,\n file,\n dryRun,\n ...(transcend && { transcend }),\n });\n } else if (source === OneTrustPullSource.File && file && transcend) {\n await syncOneTrustAssessmentsFromFile({ file, transcend });\n }\n }\n } catch (err) {\n throw new Error(\n `An error occurred syncing the resource ${resource} from OneTrust: ${\n debug ? err.stack : err.message\n }`,\n );\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced OneTrust ${resource} to ${\n dryRun ? `disk at \"${file}\"` : 'Transcend'\n }!`,\n ),\n );\n}\n"],"mappings":"ijBAQA,MAAa,GAA6B,CACxC,WACA,UAOA,EAAA,QAAI,OAAO,CACT,UAAW,WAAW,IACtB,QAAS,CACP,OAAQ,mBACR,eAAgB,mBAChB,cAAe,UAAU,IAC1B,CACF,CAAC,CCfE,EAAkB,OAAO,OAAOA,EAAAA,EAAqB,CCD9C,GAA4B,CACvC,aACA,QACA,QACA,OAAO,MAUK,CACZ,IAAI,EAAY,IAEZ,IAAU,GAAK,KACjB,EAAY;GAGd,IAAM,EAAwB,KAAK,UAAU,EAAW,CAGlD,EAAQ,GAAS,EAAQ,EAAQ,GAAK,CAAC,EAAO,IAAM,GAU1D,MAPA,GAAY,GAAG,EAAY,EAAwB,EAAM,KAGpD,GAAS,IAAU,EAAQ,GAAM,KACpC,GAAa;IAGR,GC9BI,GAAgC,CAC3C,OACA,aACA,QACA,WAUU,CACV,EAAA,EAAO,KACL,EAAA,QAAO,QACL,+BACE,EAAQ,EACT,MAAM,EAAM,YAAY,EAAK,MAC/B,CACF,CAEG,IAAU,EACZ,EAAA,QAAG,cACD,EACA,EAAyB,CACvB,aACA,QACA,QACA,KAAM,GACP,CAAC,CACH,CAED,EAAA,QAAG,eACD,EACA,EAAyB,CACvB,aACA,QACA,QACA,KAAM,GACP,CAAC,CACH,ECvCQ,EAA+B,MAAO,CACjD,cAImC,CACnC,IAAI,EAAc,EACd,EAAa,EACb,EAAgB,EAEd,EAAuC,EAAE,CAE/C,KAAO,EAAc,GAAY,CAC/B,GAAM,CAAE,QAAS,MAAM,EAAS,IAC9B,sCAAsC,EAAY,YACnD,CAEK,CAAE,OAAM,YAAA,EAAA,EAAA,aACZC,EAAAA,qCACA,EACD,CACD,EAAe,KAAK,GAAI,GAAW,EAAE,CAAE,CACnC,IAAgB,IAClB,EAAa,GAAM,YAAc,EACjC,EAAgB,GAAM,eAAiB,GAEzC,GAAe,EAGf,EAAA,EAAO,KACL,WAAW,EAAe,OAAO,MAAM,EAAc,eACtD,CAGH,OAAO,GCtCI,EAAwB,MAAO,CAC1C,WACA,kBAM4C,CAC5C,GAAM,CAAE,QAAS,MAAM,EAAS,IAC9B,iCAAiC,EAAa,uCAC/C,CAED,OAAA,EAAA,EAAA,aAAmBC,EAAAA,8BAA+B,EAAK,ECb5C,EAAkB,MAAO,CACpC,WACA,YAMsC,CACtC,GAAM,CAAE,QAAS,MAAM,EAAS,IAAI,qBAAqB,IAAS,CAElE,OAAA,EAAA,EAAA,aAAmBC,EAAAA,wBAAyB,EAAK,ECXtC,EAAkB,MAAO,CACpC,WACA,YAMsC,CACtC,GAAM,CAAE,QAAS,MAAM,EAAS,IAAI,qBAAqB,IAAS,CAElE,OAAA,EAAA,EAAA,aAAmBC,EAAAA,wBAAyB,EAAK,ECPtC,GAA4B,CACvC,aACA,oBACA,cACA,iBACA,mBACA,wBAcgC,CAChC,IAAM,EAAkBC,EAAAA,EAAM,EAAa,KAAK,CAC1C,CAAE,WAAU,YAAW,GAAG,GAA0B,EACpD,EAA2B,EAAS,IAAK,GAAY,CACzD,GAAM,CAAE,YAAW,GAAG,GAAgB,EAChC,EAAoB,EAAU,IAAK,GAAa,CACpD,GAAM,CAAE,QAAO,GAAG,GAAiB,EAC7B,GAAiB,GAAS,EAAE,EAAE,IAAK,GAAS,CAChD,IAAM,EAAU,EAAgB,EAAK,QACrC,MAAO,CACL,GAAG,EACH,GAAG,EACH,MAAO,EAAK,MACZ,YAAa,EAAK,aAAe,EAClC,EACD,CACF,MAAO,CACL,GAAG,EACH,MAAO,EACR,EACD,CACF,MAAO,CACL,GAAG,EACH,UAAW,EACZ,EACD,CAGI,EAAoB,CACxB,GAAG,EACH,OAAQ,GAAgB,QAAU,GAClC,SAAU,GAAgB,UAAY,WACtC,OAAQ,GAAgB,QAAU,EAAE,CACpC,MAAO,GAAgB,OAAS,KAChC,UAAW,GAAgB,KAAK,WAAa,KAC7C,WAAY,GAAgB,KAAK,YAAc,KAChD,CAGK,EAAsBA,EAAAA,EAAM,EAAkB,KAAK,CACnD,EAAoB,EAAkB,UAAU,QACnD,GACC,EAAoB,EAAiB,IACjC,CACE,CACE,GAAG,EACH,SAAU,CACR,GAAG,EAAiB,SACpB,OAAQ,EAAoB,EAAiB,IAAI,OACjD,SAAU,EAAoB,EAAiB,IAAI,SACnD,OAAQ,EAAoB,EAAiB,IAAI,OACjD,MAAO,EAAoB,EAAiB,IAAI,MAChD,UACE,EAAoB,EAAiB,IAAI,KAAK,WAC9C,KACF,WACE,EAAoB,EAAiB,IAAI,KAAK,YAC9C,KACH,CACF,CACF,CACD,EAAE,CACT,CAGK,EAAyBA,EAAAA,EAAM,EAAoB,KAAK,CACxD,EAAsB,EAAkB,YAC3C,OAAQ,GAAM,CAAC,EAAE,KAAK,SAAS,IAAI,CAAC,CACpC,QAAS,GACR,EAAuB,EAAW,IAC9B,CACE,CACE,GAAG,EACH,OAAQ,EAAuB,EAAW,IAAI,OAC9C,SAAU,EAAuB,EAAW,IAAI,SAChD,OAAQ,EAAuB,EAAW,IAAI,OAC9C,MAAO,EAAuB,EAAW,IAAI,MAC7C,UACE,EAAuB,EAAW,IAAI,KAAK,WAAa,KAC1D,WACE,EAAuB,EAAW,IAAI,KAAK,YAAc,KAC5D,CACF,CACD,EAAE,CACP,CAGH,MAAO,CACL,GAAG,EACH,GAAG,EACH,UAAW,EACX,YAAa,EACb,UAAW,EACX,SAAU,EACX,ECzGU,EAAoC,MAAO,CACtD,YACA,aACA,QACA,WAUmB,CACnB,EAAA,EAAO,KACL,EAAA,QAAO,QACL,+BAA+B,EAAQ,EAAE,GACvC,EAAQ,MAAM,EAAM,GAAK,IAC1B,iBACF,CACF,CAUD,IAAM,EAAwC,CAC5C,KARW,EAAyB,CACpC,aACA,QACA,QACD,CAAC,CAKD,CAED,GAAI,CACF,MAAMC,EAAAA,EAMH,EAAWC,EAAAA,GAAmC,CAC/C,QACD,CAAC,MACQ,CACV,EAAA,EAAO,MACL,EAAA,QAAO,IACL,6BAA6B,EAAQ,EAAE,GACrC,EAAQ,MAAM,EAAM,GAAK,IAC1B,qCACwB,EAAW,KAAK,oBAAoB,EAAW,SAAS,KAAK,IACvF,CACF,GC1CQ,EAAsC,MAAO,CACxD,WACA,OACA,SACA,eAUmB,CAEnB,EAAA,EAAO,KAAK,mDAAmD,CAC/D,IAAM,EAAc,MAAM,EAA6B,CAAE,WAAU,CAAC,CAG9D,EAA+D,EAAE,CAYvE,MAAMC,EAAAA,GARoB,MAAM,KAC9B,CACE,OAAQ,KAAK,KAAK,EAAY,OAAS,EAAW,CACnD,EACA,EAAG,IAAM,EAAY,MAAM,EAAI,GAAa,EAAI,GAAK,EAAW,CAClE,CAGkC,MAAO,EAAiB,IAAU,CACnE,IAAM,EAAyD,EAAE,CAGjE,MAAMC,EAAAA,GACJ,EACA,MAAO,EAAY,IAAU,CAC3B,IAAM,EAAmB,EAAa,EAAQ,EAAQ,EACtD,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,wBAC1D,CACD,GAAM,CAAE,eAAc,gBAAiB,EACjC,EAAoB,MAAM,EAAsB,CACpD,WACA,eACD,CAAC,CAEI,EAAY,EAAkB,UAAU,GAC1C,EAAU,EAAoB,GAClC,GAAI,CAAC,EAAS,CACZ,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,wBAC1D,CACD,GAAI,CACF,EAAU,MAAM,EAAgB,CAC9B,WACA,OAAQ,EACT,CAAC,CACF,EAAoB,GAAa,OACvB,CACV,EAAA,EAAO,KACL,EAAA,QAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,+CACvC,EAAU,sBAAsB,EAAW,KAAK,oBAAoB,IACvF,CACF,EAKL,GAAM,CAAE,aAAc,EAClB,EAAgD,EAAE,CAClD,EAAU,OAAS,IACrB,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,0BAC1D,CACD,EAAmB,MAAMA,EAAAA,GACvB,EAAU,KAAK,CAAE,QAAS,EAAG,CAC7B,KAAO,IAAW,CAChB,GAAI,CACF,IAAI,EAAW,EAAoB,GAKnC,OAJK,IACH,EAAW,MAAM,EAAgB,CAAE,WAAU,SAAQ,CAAC,CACtD,EAAoB,GAAU,GAEzB,CAAC,EAAS,MACP,CAOV,OANA,EAAA,EAAO,KACL,EAAA,QAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,mDACtC,EAAO,sBAAsB,EAAW,KAAK,oBAAoB,IACrF,CACF,CACM,EAAE,GAGb,CAAE,YAAa,EAAG,CACnB,EAIH,GAAM,CAAE,eAAgB,EAElB,EAAsB,EAAY,OACrC,GAAM,CAAC,EAAE,KAAK,SAAS,IAAI,CAC7B,CACG,EAAkD,EAAE,CACpD,EAAoB,OAAS,IAC/B,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,4BAC1D,CACD,EAAqB,MAAMA,EAAAA,GACzB,EAAoB,KAAK,CAAE,QAAS,EAAG,CACvC,KAAO,IAAW,CAChB,GAAI,CACF,IAAI,EAAa,EAAoB,GAKrC,OAJK,IACH,EAAa,MAAM,EAAgB,CAAE,WAAU,SAAQ,CAAC,CACxD,EAAoB,GAAU,GAEzB,CAAC,EAAW,MACT,CAOV,OANA,EAAA,EAAO,KACL,EAAA,QAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,kDACpC,EAAO,sBAAsB,EAAW,KAAK,oBAAoB,IACvF,CACF,CACM,EAAE,GAGb,CAAE,YAAa,EAAG,CACnB,EAIH,IAAI,EAAyC,EAAE,CACzC,EAAUC,EAAAA,GACd,EAAkB,SAAS,QAAS,GAClC,EAAE,UAAU,QAAS,IAClB,EAAE,OAAS,EAAE,EAAE,QAAS,GAAM,EAAE,OAAO,CACzC,CACF,CACF,CACG,EAAQ,OAAS,IACnB,EAAA,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,sBAC1D,CACD,EAAc,MAAMD,EAAAA,GAClB,EACC,GAAW,EAAgB,CAAE,WAAkB,SAAkB,CAAC,CACnE,CACE,YAAa,EACd,CACF,EAIH,IAAM,EAAqB,EAAyB,CAClD,aACA,oBACA,cACA,eAAgB,EAChB,iBAAkB,EAAiB,MAAM,CACzC,mBAAoB,EAAmB,MAAM,CAC9C,CAAC,CAEF,EAAyB,KAAK,EAAmB,EAEnD,CAAE,YAAa,EAAY,CAC5B,CAGD,MAAMD,EAAAA,GACJ,EACA,MAAO,EAAoB,IAAU,CAEnC,IAAM,EAAc,EAAQ,EAAa,EAErC,GAAU,EAEZ,EAA6B,CAC3B,WAAY,EACZ,MAAO,EACP,MAAO,EAAY,OACnB,OACD,CAAC,CACO,GAET,MAAM,EAAkC,CACtC,WAAY,EACZ,YACA,MAAO,EAAY,OACnB,MAAO,EACR,CAAC,EAGP,EACD,EC5NS,GAAmC,CAC9C,YACA,WAOA,EAAA,EAAO,KAAK,6CAA6C,EAAK,KAAK,CAE5D,IAAI,SAAS,EAAS,IAAW,CAEtC,IAAM,GAAA,EAAA,EAAA,kBAA8B,EAAM,CACxC,SAAU,QACV,cAAe,GAAK,KACrB,CAAC,CAGI,EAAS,EAAA,QAAW,MAAM,IAAI,CAEhC,EAAQ,EAGZ,EAAW,KAAK,EAAO,CAGvB,EAAO,GAAG,OAAQ,KAAO,IAAe,CACtC,GAAI,CAEF,EAAO,OAAO,CASd,MAAM,EAAkC,CACtC,YAAA,EAAA,EAAA,aANAG,EAAAA,2BACA,EACD,CAKC,YACA,QACD,CAAC,CAEF,GAAS,EAGT,EAAO,QAAQ,OACR,EAAG,CAEV,EAAA,EAAO,MACL,EAAA,QAAO,IACL,kCAAkC,EAAM,cAAc,EAAK,KAAK,EAAE,QAAQ,GAC3E,CACF,GAEH,CAGF,EAAO,GAAG,UAAa,CACrB,EAAA,EAAO,KAAK,uBAAuB,EAAM,yBAAyB,IAAO,CACzE,GAAS,EACT,CAGF,EAAO,GAAG,QAAU,GAAU,CAC5B,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,uBAAuB,EAAK,KAAK,EAAM,UAAU,CAC7D,CACD,EAAO,EAAM,EACb,CAEF,EAAW,GAAG,QAAU,GAAU,CAChC,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,uBAAuB,EAAK,KAAK,EAAM,UAAU,CAC7D,CACD,EAAO,EAAM,EACb,EACF,EChEJ,eAAsB,EAEpB,CACE,WACA,eACA,SACA,gBACA,eACA,WACA,OACA,SACA,SAEa,CAEf,GAAI,CAAC,GAAU,CAAC,EACd,MAAU,MAER,qHACD,CAIH,GAAI,GAAU,CAAC,EACb,MAAU,MACR,8FACD,CAGH,GAAI,EAAM,CACR,IAAM,EAAY,EAAK,MAAM,IAAI,CACjC,GAAI,EAAU,OAAS,EACrB,MAAU,MACR,8GACD,CAEH,GAAI,EAAU,GAAG,GAAG,GAAKC,EAAAA,EAAmB,KAC1C,MAAU,MACR,iDAAiD,EAAK,WACpDA,EAAAA,EAAmB,KACpB,cAAc,EAAU,GAAG,GAAG,CAAC,IACjC,CAKL,GAAI,IAAWC,EAAAA,EAAmB,SAAU,CAE1C,GAAI,CAAC,EACH,MAAU,MACR,kFACD,CAGH,GAAI,CAAC,EACH,MAAU,MACR,uFACD,KAEE,CAEL,GAAI,CAAC,EACH,MAAU,MACR,kHACD,CAIH,GAAI,EACF,MAAU,MACR,4FAC+CA,EAAAA,EAAmB,SAAS,0BAC5E,CAIL,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EACJ,GAAY,EACR,EAA0B,CACxB,WACA,KAAM,EACP,CAAC,CACF,IAAA,GAGA,EACJ,GAAgB,EACZC,EAAAA,GAA4B,EAAc,EAAc,CACxD,IAAA,GAEN,GAAI,CACE,IAAaC,EAAAA,EAAqB,cAChC,IAAWF,EAAAA,EAAmB,UAAY,EAC5C,MAAM,EAAoC,CACxC,WACA,OACA,SACA,GAAI,GAAa,CAAE,YAAW,CAC/B,CAAC,CACO,IAAWA,EAAAA,EAAmB,MAAQ,GAAQ,GACvD,MAAM,EAAgC,CAAE,OAAM,YAAW,CAAC,QAGvD,EAAK,CACZ,MAAU,MACR,0CAA0C,EAAS,kBACjD,EAAQ,EAAI,MAAQ,EAAI,UAE3B,CAIH,EAAA,EAAO,KACL,EAAA,QAAO,MACL,gCAAgC,EAAS,MACvC,EAAS,YAAY,EAAK,GAAK,YAChC,GACF,CACF"}