@transcend-io/cli 8.25.3 → 8.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. package/README.md +53 -13
  2. package/dist/bin/bash-complete.cjs +1 -1
  3. package/dist/bin/cli.cjs +1 -1
  4. package/dist/bin/deprecated-command.cjs +2 -2
  5. package/dist/{chunk-HPERBM2R.cjs → chunk-2MHJ5LZ7.cjs} +2 -2
  6. package/dist/{chunk-HPERBM2R.cjs.map → chunk-2MHJ5LZ7.cjs.map} +1 -1
  7. package/dist/{chunk-EDVVHIFK.cjs → chunk-5TKJLZA2.cjs} +2 -2
  8. package/dist/{chunk-EDVVHIFK.cjs.map → chunk-5TKJLZA2.cjs.map} +1 -1
  9. package/dist/{chunk-FIBHN6CY.cjs → chunk-AZ4GTUNR.cjs} +22 -22
  10. package/dist/chunk-AZ4GTUNR.cjs.map +1 -0
  11. package/dist/{chunk-HJTYLFGJ.cjs → chunk-FTXFBDFW.cjs} +2 -2
  12. package/dist/{chunk-HJTYLFGJ.cjs.map → chunk-FTXFBDFW.cjs.map} +1 -1
  13. package/dist/{chunk-GLXV5XPP.cjs → chunk-K65NYGJM.cjs} +165 -162
  14. package/dist/chunk-K65NYGJM.cjs.map +1 -0
  15. package/dist/{chunk-FGBTRT4J.cjs → chunk-KOXU6674.cjs} +2 -2
  16. package/dist/{chunk-FGBTRT4J.cjs.map → chunk-KOXU6674.cjs.map} +1 -1
  17. package/dist/{chunk-GDPRENOQ.cjs → chunk-L32BTN2M.cjs} +4 -4
  18. package/dist/{chunk-GDPRENOQ.cjs.map → chunk-L32BTN2M.cjs.map} +1 -1
  19. package/dist/{chunk-DV57HBVO.cjs → chunk-L5QUUSVB.cjs} +4 -4
  20. package/dist/{chunk-DV57HBVO.cjs.map → chunk-L5QUUSVB.cjs.map} +1 -1
  21. package/dist/{chunk-UMAN3VT7.cjs → chunk-QAKUAPPO.cjs} +2 -2
  22. package/dist/{chunk-UMAN3VT7.cjs.map → chunk-QAKUAPPO.cjs.map} +1 -1
  23. package/dist/chunk-SONNKP33.cjs +12 -0
  24. package/dist/chunk-SONNKP33.cjs.map +1 -0
  25. package/dist/{chunk-C3ZJMA3A.cjs → chunk-U2XRCT7A.cjs} +2 -2
  26. package/dist/{chunk-C3ZJMA3A.cjs.map → chunk-U2XRCT7A.cjs.map} +1 -1
  27. package/dist/{chunk-7YR2TG7U.cjs → chunk-VEU5LPA6.cjs} +2 -2
  28. package/dist/{chunk-7YR2TG7U.cjs.map → chunk-VEU5LPA6.cjs.map} +1 -1
  29. package/dist/{chunk-5MIDKBL7.cjs → chunk-WGCFVEJZ.cjs} +2 -2
  30. package/dist/{chunk-5MIDKBL7.cjs.map → chunk-WGCFVEJZ.cjs.map} +1 -1
  31. package/dist/{chunk-G6QNUBBK.cjs → chunk-XN6WUCF4.cjs} +2 -2
  32. package/dist/{chunk-G6QNUBBK.cjs.map → chunk-XN6WUCF4.cjs.map} +1 -1
  33. package/dist/{impl-GRQJMVJE.cjs → impl-2CF2PKFH.cjs} +2 -2
  34. package/dist/{impl-GRQJMVJE.cjs.map → impl-2CF2PKFH.cjs.map} +1 -1
  35. package/dist/{impl-DRHYZQSB.cjs → impl-2XEHL54N.cjs} +2 -2
  36. package/dist/{impl-DRHYZQSB.cjs.map → impl-2XEHL54N.cjs.map} +1 -1
  37. package/dist/{impl-FJO7BPTB.cjs → impl-35DXHKNE.cjs} +2 -2
  38. package/dist/{impl-FJO7BPTB.cjs.map → impl-35DXHKNE.cjs.map} +1 -1
  39. package/dist/{impl-QYNAMOYS.cjs → impl-3BLJB5VJ.cjs} +2 -2
  40. package/dist/{impl-QYNAMOYS.cjs.map → impl-3BLJB5VJ.cjs.map} +1 -1
  41. package/dist/impl-3TTJGFRK.cjs +2 -0
  42. package/dist/{impl-O742JPYW.cjs.map → impl-3TTJGFRK.cjs.map} +1 -1
  43. package/dist/impl-43MGE5WS.cjs +2 -0
  44. package/dist/{impl-C3XI35SQ.cjs.map → impl-43MGE5WS.cjs.map} +1 -1
  45. package/dist/{impl-T7EXOBCA.cjs → impl-4GZIUG3H.cjs} +3 -3
  46. package/dist/{impl-T7EXOBCA.cjs.map → impl-4GZIUG3H.cjs.map} +1 -1
  47. package/dist/{impl-4MW5YXMP.cjs → impl-4VNXB52F.cjs} +2 -2
  48. package/dist/{impl-4MW5YXMP.cjs.map → impl-4VNXB52F.cjs.map} +1 -1
  49. package/dist/{impl-5FBCF5HY.cjs → impl-5PPLTGRY.cjs} +2 -2
  50. package/dist/{impl-5FBCF5HY.cjs.map → impl-5PPLTGRY.cjs.map} +1 -1
  51. package/dist/{impl-BPFDJMJY.cjs → impl-7TIKA2O6.cjs} +2 -2
  52. package/dist/{impl-BPFDJMJY.cjs.map → impl-7TIKA2O6.cjs.map} +1 -1
  53. package/dist/{impl-X6RMXDJS.cjs → impl-BEVLBZ2F.cjs} +2 -2
  54. package/dist/{impl-X6RMXDJS.cjs.map → impl-BEVLBZ2F.cjs.map} +1 -1
  55. package/dist/{impl-QTTAMUHI.cjs → impl-CCY7BINI.cjs} +2 -2
  56. package/dist/{impl-QTTAMUHI.cjs.map → impl-CCY7BINI.cjs.map} +1 -1
  57. package/dist/{impl-22JGFXXK.cjs → impl-DEQW3OGQ.cjs} +2 -2
  58. package/dist/{impl-22JGFXXK.cjs.map → impl-DEQW3OGQ.cjs.map} +1 -1
  59. package/dist/{impl-IQVMZJD7.cjs → impl-F2UNSOIO.cjs} +2 -2
  60. package/dist/{impl-IQVMZJD7.cjs.map → impl-F2UNSOIO.cjs.map} +1 -1
  61. package/dist/{impl-M6JBOLVE.cjs → impl-H3J7N77A.cjs} +2 -2
  62. package/dist/{impl-M6JBOLVE.cjs.map → impl-H3J7N77A.cjs.map} +1 -1
  63. package/dist/{impl-YM2OKSJ4.cjs → impl-HZJIVXWT.cjs} +2 -2
  64. package/dist/{impl-YM2OKSJ4.cjs.map → impl-HZJIVXWT.cjs.map} +1 -1
  65. package/dist/{impl-DVNUPZK7.cjs → impl-IQCQDDW7.cjs} +2 -2
  66. package/dist/{impl-DVNUPZK7.cjs.map → impl-IQCQDDW7.cjs.map} +1 -1
  67. package/dist/{impl-I6NMF5QP.cjs → impl-J3FUD3MS.cjs} +5 -5
  68. package/dist/{impl-I6NMF5QP.cjs.map → impl-J3FUD3MS.cjs.map} +1 -1
  69. package/dist/{impl-KJQDZ5CD.cjs → impl-JHP4RJZ4.cjs} +2 -2
  70. package/dist/{impl-KJQDZ5CD.cjs.map → impl-JHP4RJZ4.cjs.map} +1 -1
  71. package/dist/{impl-XWVPG2AO.cjs → impl-JK3TTCKR.cjs} +2 -2
  72. package/dist/{impl-XWVPG2AO.cjs.map → impl-JK3TTCKR.cjs.map} +1 -1
  73. package/dist/{impl-XUBKDJNI.cjs → impl-JYAKR5RR.cjs} +2 -2
  74. package/dist/{impl-XUBKDJNI.cjs.map → impl-JYAKR5RR.cjs.map} +1 -1
  75. package/dist/{impl-JTLAUUYX.cjs → impl-K6MYBWAY.cjs} +2 -2
  76. package/dist/{impl-JTLAUUYX.cjs.map → impl-K6MYBWAY.cjs.map} +1 -1
  77. package/dist/{impl-VINV2XOZ.cjs → impl-KZZC7P5D.cjs} +2 -2
  78. package/dist/{impl-VINV2XOZ.cjs.map → impl-KZZC7P5D.cjs.map} +1 -1
  79. package/dist/{impl-PVIVOTA4.cjs → impl-L74WP6LX.cjs} +2 -2
  80. package/dist/{impl-PVIVOTA4.cjs.map → impl-L74WP6LX.cjs.map} +1 -1
  81. package/dist/{impl-2HC6MNNN.cjs → impl-LUKG7ED6.cjs} +2 -2
  82. package/dist/{impl-2HC6MNNN.cjs.map → impl-LUKG7ED6.cjs.map} +1 -1
  83. package/dist/{impl-52NIQWXG.cjs → impl-MENWC3FS.cjs} +2 -2
  84. package/dist/{impl-52NIQWXG.cjs.map → impl-MENWC3FS.cjs.map} +1 -1
  85. package/dist/{impl-BA6B2C6M.cjs → impl-PK3DXVLW.cjs} +2 -2
  86. package/dist/{impl-BA6B2C6M.cjs.map → impl-PK3DXVLW.cjs.map} +1 -1
  87. package/dist/impl-QVM227RD.cjs +2 -0
  88. package/dist/impl-QVM227RD.cjs.map +1 -0
  89. package/dist/{impl-4CRBCTLC.cjs → impl-S2UBMTKQ.cjs} +2 -2
  90. package/dist/{impl-4CRBCTLC.cjs.map → impl-S2UBMTKQ.cjs.map} +1 -1
  91. package/dist/{impl-PAKRFLP6.cjs → impl-S6GUFHKS.cjs} +2 -2
  92. package/dist/{impl-PAKRFLP6.cjs.map → impl-S6GUFHKS.cjs.map} +1 -1
  93. package/dist/{impl-BGBHVFXR.cjs → impl-SAWOUYGV.cjs} +2 -2
  94. package/dist/{impl-BGBHVFXR.cjs.map → impl-SAWOUYGV.cjs.map} +1 -1
  95. package/dist/{impl-IXVIQGXK.cjs → impl-SBIOFGRE.cjs} +2 -2
  96. package/dist/{impl-IXVIQGXK.cjs.map → impl-SBIOFGRE.cjs.map} +1 -1
  97. package/dist/{impl-M7XLIXX7.cjs → impl-UR7MDG5O.cjs} +2 -2
  98. package/dist/{impl-M7XLIXX7.cjs.map → impl-UR7MDG5O.cjs.map} +1 -1
  99. package/dist/{impl-MSDEMRBZ.cjs → impl-UTDGC4UZ.cjs} +2 -2
  100. package/dist/{impl-MSDEMRBZ.cjs.map → impl-UTDGC4UZ.cjs.map} +1 -1
  101. package/dist/{impl-UDUTUFUT.cjs → impl-UTJTENWY.cjs} +2 -2
  102. package/dist/{impl-UDUTUFUT.cjs.map → impl-UTJTENWY.cjs.map} +1 -1
  103. package/dist/{impl-2YWGSU4V.cjs → impl-VEW5WAK7.cjs} +3 -3
  104. package/dist/{impl-2YWGSU4V.cjs.map → impl-VEW5WAK7.cjs.map} +1 -1
  105. package/dist/{impl-RVJLQVKI.cjs → impl-W33BLTPN.cjs} +2 -2
  106. package/dist/{impl-RVJLQVKI.cjs.map → impl-W33BLTPN.cjs.map} +1 -1
  107. package/dist/{impl-6BPL3Z3G.cjs → impl-WLYI6G4A.cjs} +4 -4
  108. package/dist/{impl-6BPL3Z3G.cjs.map → impl-WLYI6G4A.cjs.map} +1 -1
  109. package/dist/{impl-5Z557CRG.cjs → impl-X5JUJPG5.cjs} +2 -2
  110. package/dist/{impl-5Z557CRG.cjs.map → impl-X5JUJPG5.cjs.map} +1 -1
  111. package/dist/{impl-XCXP4S3J.cjs → impl-XJX7GD3G.cjs} +2 -2
  112. package/dist/{impl-XCXP4S3J.cjs.map → impl-XJX7GD3G.cjs.map} +1 -1
  113. package/dist/{impl-5H7LSGMO.cjs → impl-YZ6LZ2LI.cjs} +2 -2
  114. package/dist/{impl-5H7LSGMO.cjs.map → impl-YZ6LZ2LI.cjs.map} +1 -1
  115. package/dist/index.cjs +3 -3
  116. package/dist/index.cjs.map +1 -1
  117. package/dist/index.d.cts +21 -4
  118. package/package.json +2 -1
  119. package/dist/chunk-FIBHN6CY.cjs.map +0 -1
  120. package/dist/chunk-GLXV5XPP.cjs.map +0 -1
  121. package/dist/chunk-WO3KAASS.cjs +0 -12
  122. package/dist/chunk-WO3KAASS.cjs.map +0 -1
  123. package/dist/impl-C3XI35SQ.cjs +0 -2
  124. package/dist/impl-LVGKFMBD.cjs +0 -2
  125. package/dist/impl-LVGKFMBD.cjs.map +0 -1
  126. package/dist/impl-O742JPYW.cjs +0 -2
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkFGBTRT4Jcjs = require('./chunk-FGBTRT4J.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkHJTYLFGJcjs = require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function _({file:l,fileTarget:f,transcendUrl:p,auth:g,sombraAuth:x,cronDataSiloId:v,targetDataSiloId:N,actions:R,skipRequestCount:h,pageLimit:r,chunkSize:s}){h&&_chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Skipping request count as requested. This may help speed up the call.")),(Number.isNaN(s)||s<=0||s%r!==0)&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Invalid chunk size: "${s}". Must be a positive integer that is a multiple of ${r}.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let S=_chunkGLXV5XPPcjs.wc.call(void 0, p,g),{baseName:j,extension:O}=_chunkGLXV5XPPcjs.ng.call(void 0, l),{baseName:A,extension:D}=_chunkGLXV5XPPcjs.ng.call(void 0, f),$=0,b=0,d=0;await _chunkFGBTRT4Jcjs.g.call(void 0, {dataSiloId:v,auth:g,sombraAuth:x,actions:R,apiPageSize:r,savePageSize:s,onSave:async o=>{$+=o.length;let k=o.map(t=>t.requestId),E=_chunkHJTYLFGJcjs.j.call(void 0, k),G=_chunkHJTYLFGJcjs.b.call(void 0, E,r),y=await _bluebird.map.call(void 0, G,async t=>(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching target identifiers for ${t.length} requests`)),(await _chunkGLXV5XPPcjs.Id.call(void 0, S,r*2,{requestIds:t,dataSiloIds:[N]})).map(({fileName:I,remoteId:C})=>{if(!C)throw new Error(`Failed to find remoteId for ${I}`);return{RecordId:C,Object:_optionalChain([I, 'access', _2 => _2.replace, 'call', _3 => _3(".json",""), 'access', _4 => _4.split, 'call', _5 => _5("/"), 'access', _6 => _6.pop, 'call', _7 => _7(), 'optionalAccess', _8 => _8.replace, 'call', _9 => _9(" Information","")]),Comment:"Customer data deletion request submitted via transcend.io"}})),{concurrency:1});b+=y.flat().length;let L=_chunkHJTYLFGJcjs.j.call(void 0, o.map(t=>Object.keys(t)).flat()),M=`${j}-${d}${O}`,Q=`${A}-${d}${D}`;await _chunkGLXV5XPPcjs.og.call(void 0, M,o,L),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${o.length} identifiers to file "${l}"`));let n=y.flat(),T=_chunkHJTYLFGJcjs.j.call(void 0, n.map(t=>Object.keys(t)).flat());await _chunkGLXV5XPPcjs.og.call(void 0, Q,n,T),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${n.length} identifiers to file "${f}"`)),_chunkZUNVPK23cjs.a.info(_colors2.default.blue(`Processed chunk of ${_chunkHJTYLFGJcjs.b.length} identifiers, found ${n.length} target identifiers`)),d+=1},transcendUrl:p,skipRequestCount:h}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${$} identifiers to file "${l}"`)),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${b} identifiers to file "${f}"`))}exports.pullProfiles = _;
2
- //# sourceMappingURL=impl-X6RMXDJS.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkKOXU6674cjs = require('./chunk-KOXU6674.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkFTXFBDFWcjs = require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function _({file:l,fileTarget:f,transcendUrl:p,auth:g,sombraAuth:x,cronDataSiloId:v,targetDataSiloId:N,actions:R,skipRequestCount:h,pageLimit:r,chunkSize:s}){h&&_chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Skipping request count as requested. This may help speed up the call.")),(Number.isNaN(s)||s<=0||s%r!==0)&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Invalid chunk size: "${s}". Must be a positive integer that is a multiple of ${r}.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let S=_chunkK65NYGJMcjs.wc.call(void 0, p,g),{baseName:j,extension:O}=_chunkK65NYGJMcjs.pg.call(void 0, l),{baseName:A,extension:D}=_chunkK65NYGJMcjs.pg.call(void 0, f),$=0,b=0,d=0;await _chunkKOXU6674cjs.g.call(void 0, {dataSiloId:v,auth:g,sombraAuth:x,actions:R,apiPageSize:r,savePageSize:s,onSave:async o=>{$+=o.length;let k=o.map(t=>t.requestId),E=_chunkFTXFBDFWcjs.j.call(void 0, k),G=_chunkFTXFBDFWcjs.b.call(void 0, E,r),y=await _bluebird.map.call(void 0, G,async t=>(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching target identifiers for ${t.length} requests`)),(await _chunkK65NYGJMcjs.Id.call(void 0, S,r*2,{requestIds:t,dataSiloIds:[N]})).map(({fileName:I,remoteId:C})=>{if(!C)throw new Error(`Failed to find remoteId for ${I}`);return{RecordId:C,Object:_optionalChain([I, 'access', _2 => _2.replace, 'call', _3 => _3(".json",""), 'access', _4 => _4.split, 'call', _5 => _5("/"), 'access', _6 => _6.pop, 'call', _7 => _7(), 'optionalAccess', _8 => _8.replace, 'call', _9 => _9(" Information","")]),Comment:"Customer data deletion request submitted via transcend.io"}})),{concurrency:1});b+=y.flat().length;let L=_chunkFTXFBDFWcjs.j.call(void 0, o.map(t=>Object.keys(t)).flat()),M=`${j}-${d}${O}`,Q=`${A}-${d}${D}`;await _chunkK65NYGJMcjs.qg.call(void 0, M,o,L),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${o.length} identifiers to file "${l}"`));let n=y.flat(),T=_chunkFTXFBDFWcjs.j.call(void 0, n.map(t=>Object.keys(t)).flat());await _chunkK65NYGJMcjs.qg.call(void 0, Q,n,T),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${n.length} identifiers to file "${f}"`)),_chunkZUNVPK23cjs.a.info(_colors2.default.blue(`Processed chunk of ${_chunkFTXFBDFWcjs.b.length} identifiers, found ${n.length} target identifiers`)),d+=1},transcendUrl:p,skipRequestCount:h}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${$} identifiers to file "${l}"`)),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${b} identifiers to file "${f}"`))}exports.pullProfiles = _;
2
+ //# sourceMappingURL=impl-BEVLBZ2F.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-X6RMXDJS.cjs","../src/commands/request/cron/pull-profiles/impl.ts"],"names":["pullProfiles","file","fileTarget","transcendUrl","auth","sombraAuth","cronDataSiloId","targetDataSiloId","actions","skipRequestCount","pageLimit","chunkSize","logger","colors","doneInputValidation","client","buildTranscendGraphQLClient","baseName","extension","parseFilePath","baseNameTarget","extensionTarget","allIdentifiersCount","allTargetIdentifiersCount","fileCount","pullChunkedCustomSiloOutstandingIdentifiers","chunkToSave","requestIds","d","uniqueRequestIds","uniq_default","chunkedRequestIds","chunk_default","results","map","fetchRequestFilesForRequest","fileName","remoteId"],"mappings":"AAAA,quBAAwC,wDAAyC,wDAAkE,gCAA6B,wDAAyC,wDAAgD,gCAA6B,gFCEnR,oCAEC,MA2BpB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CACF,CAAA,CACe,CACXF,CAAAA,EACFG,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,MAAA,CACL,uEACF,CACF,CAAA,CAAA,CAIA,MAAA,CAAO,KAAA,CAAMF,CAAS,CAAA,EACtBA,CAAAA,EAAa,CAAA,EACbA,CAAAA,CAAYD,CAAAA,GAAc,CAAA,CAAA,EAAA,CAE1BE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,qBAAA,EAAwBF,CAAS,CAAA,oDAAA,EAAuDD,CAAS,CAAA,CAAA,CACnG,CACF,CAAA,CACA,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAGrBI,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAGrC,IAAMC,CAAAA,CAASC,kCAAAA,CAA4Bb,CAAcC,CAAI,CAAA,CACvD,CAAE,QAAA,CAAAa,CAAAA,CAAU,SAAA,CAAAC,CAAU,CAAA,CAAIC,kCAAAA,CAAkB,CAAA,CAC5C,CAAE,QAAA,CAAUC,CAAAA,CAAgB,SAAA,CAAWC,CAAgB,CAAA,CAC3DF,kCAAAA,CAAwB,CAAA,CAEtBG,CAAAA,CAAsB,CAAA,CACtBC,CAAAA,CAA4B,CAAA,CAC5BC,CAAAA,CAAY,CAAA,CAmFhB,MAAMC,iCAAAA,CACJ,UAAA,CAAYnB,CAAAA,CACZ,IAAA,CAAAF,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,OAAA,CAAAG,CAAAA,CACA,WAAA,CAAaE,CAAAA,CACb,YAAA,CAAcC,CAAAA,CACd,MAAA,CAxFa,MACbe,CAAAA,EACkB,CAElBJ,CAAAA,EAAuBI,CAAAA,CAAY,MAAA,CAGnC,IAAMC,CAAAA,CAAaD,CAAAA,CAAY,GAAA,CAAKE,CAAAA,EAAMA,CAAAA,CAAE,SAAmB,CAAA,CACzDC,CAAAA,CAAmBC,iCAAAA,CAAe,CAAA,CAGlCC,CAAAA,CAAoBC,iCAAAA,CAAMH,CAAkBnB,CAAS,CAAA,CACrDuB,CAAAA,CAAU,MAAMC,2BAAAA,CACpBH,CACA,MAAOJ,CAAAA,EAAAA,CACLf,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,gCAAA,EAAmCc,CAAAA,CAAW,MAAM,CAAA,SAAA,CACtD,CACF,CAAA,CAAA,CACgB,MAAMQ,kCAAAA,CACpBpB,CACAL,CAAAA,CAAY,CAAA,CACZ,CACE,UAAA,CAAAiB,CAAAA,CACA,WAAA,CAAa,CAACpB,CAAgB,CAChC,CACF,CAAA,CAAA,CACe,GAAA,CAAI,CAAC,CAAE,QAAA,CAAA6B,CAAAA,CAAU,QAAA,CAAAC,CAAS,CAAA,CAAA,EAAM,CAC7C,EAAA,CAAI,CAACA,CAAAA,CACH,MAAM,IAAI,KAAA,CAAM,CAAA,4BAAA,EAA+BD,CAAQ,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-X6RMXDJS.cjs","sourcesContent":[null,"import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from 'bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-BEVLBZ2F.cjs","../src/commands/request/cron/pull-profiles/impl.ts"],"names":["pullProfiles","file","fileTarget","transcendUrl","auth","sombraAuth","cronDataSiloId","targetDataSiloId","actions","skipRequestCount","pageLimit","chunkSize","logger","colors","doneInputValidation","client","buildTranscendGraphQLClient","baseName","extension","parseFilePath","baseNameTarget","extensionTarget","allIdentifiersCount","allTargetIdentifiersCount","fileCount","pullChunkedCustomSiloOutstandingIdentifiers","chunkToSave","requestIds","d","uniqueRequestIds","uniq_default","chunkedRequestIds","chunk_default","results","map","fetchRequestFilesForRequest","fileName","remoteId"],"mappings":"AAAA,quBAAwC,wDAAyC,wDAAkE,gCAA6B,wDAAyC,wDAAgD,gCAA6B,gFCEnR,oCAEC,MA2BpB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CACF,CAAA,CACe,CACXF,CAAAA,EACFG,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,MAAA,CACL,uEACF,CACF,CAAA,CAAA,CAIA,MAAA,CAAO,KAAA,CAAMF,CAAS,CAAA,EACtBA,CAAAA,EAAa,CAAA,EACbA,CAAAA,CAAYD,CAAAA,GAAc,CAAA,CAAA,EAAA,CAE1BE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,qBAAA,EAAwBF,CAAS,CAAA,oDAAA,EAAuDD,CAAS,CAAA,CAAA,CACnG,CACF,CAAA,CACA,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAGrBI,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAGrC,IAAMC,CAAAA,CAASC,kCAAAA,CAA4Bb,CAAcC,CAAI,CAAA,CACvD,CAAE,QAAA,CAAAa,CAAAA,CAAU,SAAA,CAAAC,CAAU,CAAA,CAAIC,kCAAAA,CAAkB,CAAA,CAC5C,CAAE,QAAA,CAAUC,CAAAA,CAAgB,SAAA,CAAWC,CAAgB,CAAA,CAC3DF,kCAAAA,CAAwB,CAAA,CAEtBG,CAAAA,CAAsB,CAAA,CACtBC,CAAAA,CAA4B,CAAA,CAC5BC,CAAAA,CAAY,CAAA,CAmFhB,MAAMC,iCAAAA,CACJ,UAAA,CAAYnB,CAAAA,CACZ,IAAA,CAAAF,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,OAAA,CAAAG,CAAAA,CACA,WAAA,CAAaE,CAAAA,CACb,YAAA,CAAcC,CAAAA,CACd,MAAA,CAxFa,MACbe,CAAAA,EACkB,CAElBJ,CAAAA,EAAuBI,CAAAA,CAAY,MAAA,CAGnC,IAAMC,CAAAA,CAAaD,CAAAA,CAAY,GAAA,CAAKE,CAAAA,EAAMA,CAAAA,CAAE,SAAmB,CAAA,CACzDC,CAAAA,CAAmBC,iCAAAA,CAAe,CAAA,CAGlCC,CAAAA,CAAoBC,iCAAAA,CAAMH,CAAkBnB,CAAS,CAAA,CACrDuB,CAAAA,CAAU,MAAMC,2BAAAA,CACpBH,CACA,MAAOJ,CAAAA,EAAAA,CACLf,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,gCAAA,EAAmCc,CAAAA,CAAW,MAAM,CAAA,SAAA,CACtD,CACF,CAAA,CAAA,CACgB,MAAMQ,kCAAAA,CACpBpB,CACAL,CAAAA,CAAY,CAAA,CACZ,CACE,UAAA,CAAAiB,CAAAA,CACA,WAAA,CAAa,CAACpB,CAAgB,CAChC,CACF,CAAA,CAAA,CACe,GAAA,CAAI,CAAC,CAAE,QAAA,CAAA6B,CAAAA,CAAU,QAAA,CAAAC,CAAS,CAAA,CAAA,EAAM,CAC7C,EAAA,CAAI,CAACA,CAAAA,CACH,MAAM,IAAI,KAAA,CAAM,CAAA,4BAAA,EAA+BD,CAAQ,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-BEVLBZ2F.cjs","sourcesContent":[null,"import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from 'bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkDV57HBVOcjs = require('./chunk-DV57HBVO.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function q(){let e=Number(process.env.WORKER_ID||"0");_chunkZUNVPK23cjs.a.info(`[w${e}] ready pid=${process.pid}`),_optionalChain([process, 'access', _ => _.send, 'optionalCall', _2 => _2({type:"ready"})]),process.on("message",async o=>{if(!o||typeof o!="object"||(o.type==="shutdown"&&process.exit(0),o.type!=="task"))return;let{filePath:t,options:p}=o.payload,{outputDir:u,clearOutputDir:i}=p;try{_chunkZUNVPK23cjs.a.info(`[w${e}] processing ${t}`),await _chunkGLXV5XPPcjs.cg.call(void 0, {filePath:t,outputDir:u,clearOutputDir:i,onProgress:(s,n)=>_optionalChain([process, 'access', _3 => _3.send, 'optionalCall', _4 => _4({type:"progress",payload:{filePath:t,processed:s,total:n}})])}),_optionalChain([process, 'access', _5 => _5.send, 'optionalCall', _6 => _6({type:"result",payload:{ok:!0,filePath:t}})])}catch(s){let n=_chunkGLXV5XPPcjs.Wf.call(void 0, s);_chunkZUNVPK23cjs.a.error(`[w${e}] ERROR ${t}: ${s.stack||n}`),_optionalChain([process, 'access', _7 => _7.send, 'optionalCall', _8 => _8({type:"result",payload:{ok:!1,filePath:t,error:n}})])}}),await new Promise(()=>{})}function R(e){return _chunkDV57HBVOcjs.d.call(void 0, e)}function $(e){return _chunkDV57HBVOcjs.e.call(void 0, e)}var v={renderHeader:R,renderWorkers:$};function O(){return typeof __filename<"u"?__filename:process.argv[1]}async function N(e){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let{directory:o,outputDir:t,clearOutputDir:p,concurrency:u,viewerMode:i}=e,s=_chunkGLXV5XPPcjs.bg.call(void 0, o,this),{poolSize:n,cpuCount:d}=_chunkDV57HBVOcjs.a.call(void 0, u,s.length);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Converting ${s.length} Parquet file(s) \u2192 CSV with pool size ${n} (CPU=${d})`));let w=s.map(r=>({filePath:r,options:{outputDir:t,clearOutputDir:p}})),b={nextTask:()=>w.shift(),taskLabel:r=>r.filePath,initTotals:()=>({}),initSlotProgress:()=>{},onProgress:r=>r,onResult:(r,l)=>({totals:r,ok:!!l.ok}),postProcess:async()=>{}};await _chunkDV57HBVOcjs.f.call(void 0, {title:`Parquet \u2192 CSV - ${o}`,baseDir:o||t||process.cwd(),childFlag:_chunkDV57HBVOcjs.b,childModulePath:O(),poolSize:n,cpuCount:d,filesTotal:s.length,hooks:b,viewerMode:i,render:r=>_chunkDV57HBVOcjs.c.call(void 0, r,v,i),extraKeyHandler:({logsBySlot:r,repaint:l,setPaused:D})=>_chunkDV57HBVOcjs.g.call(void 0, {logsBySlot:r,repaint:l,setPaused:D})})}process.argv.includes(_chunkDV57HBVOcjs.b)&&q().catch(e=>{_chunkZUNVPK23cjs.a.error(e),process.exit(1)});exports.parquetToCsv = N;
2
- //# sourceMappingURL=impl-QTTAMUHI.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkL5QUUSVBcjs = require('./chunk-L5QUUSVB.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function q(){let e=Number(process.env.WORKER_ID||"0");_chunkZUNVPK23cjs.a.info(`[w${e}] ready pid=${process.pid}`),_optionalChain([process, 'access', _ => _.send, 'optionalCall', _2 => _2({type:"ready"})]),process.on("message",async o=>{if(!o||typeof o!="object"||(o.type==="shutdown"&&process.exit(0),o.type!=="task"))return;let{filePath:t,options:p}=o.payload,{outputDir:u,clearOutputDir:i}=p;try{_chunkZUNVPK23cjs.a.info(`[w${e}] processing ${t}`),await _chunkK65NYGJMcjs.cg.call(void 0, {filePath:t,outputDir:u,clearOutputDir:i,onProgress:(s,n)=>_optionalChain([process, 'access', _3 => _3.send, 'optionalCall', _4 => _4({type:"progress",payload:{filePath:t,processed:s,total:n}})])}),_optionalChain([process, 'access', _5 => _5.send, 'optionalCall', _6 => _6({type:"result",payload:{ok:!0,filePath:t}})])}catch(s){let n=_chunkK65NYGJMcjs.Wf.call(void 0, s);_chunkZUNVPK23cjs.a.error(`[w${e}] ERROR ${t}: ${s.stack||n}`),_optionalChain([process, 'access', _7 => _7.send, 'optionalCall', _8 => _8({type:"result",payload:{ok:!1,filePath:t,error:n}})])}}),await new Promise(()=>{})}function R(e){return _chunkL5QUUSVBcjs.d.call(void 0, e)}function $(e){return _chunkL5QUUSVBcjs.e.call(void 0, e)}var v={renderHeader:R,renderWorkers:$};function O(){return typeof __filename<"u"?__filename:process.argv[1]}async function N(e){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let{directory:o,outputDir:t,clearOutputDir:p,concurrency:u,viewerMode:i}=e,s=_chunkK65NYGJMcjs.bg.call(void 0, o,this),{poolSize:n,cpuCount:d}=_chunkL5QUUSVBcjs.a.call(void 0, u,s.length);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Converting ${s.length} Parquet file(s) \u2192 CSV with pool size ${n} (CPU=${d})`));let w=s.map(r=>({filePath:r,options:{outputDir:t,clearOutputDir:p}})),b={nextTask:()=>w.shift(),taskLabel:r=>r.filePath,initTotals:()=>({}),initSlotProgress:()=>{},onProgress:r=>r,onResult:(r,l)=>({totals:r,ok:!!l.ok}),postProcess:async()=>{}};await _chunkL5QUUSVBcjs.f.call(void 0, {title:`Parquet \u2192 CSV - ${o}`,baseDir:o||t||process.cwd(),childFlag:_chunkL5QUUSVBcjs.b,childModulePath:O(),poolSize:n,cpuCount:d,filesTotal:s.length,hooks:b,viewerMode:i,render:r=>_chunkL5QUUSVBcjs.c.call(void 0, r,v,i),extraKeyHandler:({logsBySlot:r,repaint:l,setPaused:D})=>_chunkL5QUUSVBcjs.g.call(void 0, {logsBySlot:r,repaint:l,setPaused:D})})}process.argv.includes(_chunkL5QUUSVBcjs.b)&&q().catch(e=>{_chunkZUNVPK23cjs.a.error(e),process.exit(1)});exports.parquetToCsv = N;
2
+ //# sourceMappingURL=impl-CCY7BINI.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-QTTAMUHI.cjs","../src/commands/admin/parquet-to-csv/impl.ts","../src/commands/admin/parquet-to-csv/worker.ts"],"names":["runChild","workerId","logger"],"mappings":"AAAA,quBAAkF,wDAAyC,wDAA0D,gCAA6B,wDAAoC,gCAA6B,gCAA6B,gFCC7R,MCgCnB,SAAsBA,CAAAA,CAAAA,CAA0B,CAC9C,IAAMC,CAAAA,CAAW,MAAA,CAAO,OAAA,CAAQ,GAAA,CAAI,SAAA,EAAa,GAAG,CAAA,CACpDC,mBAAAA,CAAO,IAAA,CAAK,CAAA,EAAA,EAAKD,CAAQ,CAAA,YAAA,EAAe,OAAA,CAAQ,GAAG,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-QTTAMUHI.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectParquetFilesOrExit } from '../../../lib/helpers';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ParquetProgress,\n type ParquetResult,\n type ParquetTask,\n} from './worker';\nimport { parquetToCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path.\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/** No custom totals for the header; the runner’s built-ins suffice. */\ntype Totals = Record<string, never>;\n\nexport type ParquetToCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Convert all Parquet files in a directory to CSV, in parallel.\n *\n * @param flags - The command flags.\n */\nexport async function parquetToCsv(\n this: LocalContext,\n flags: ParquetToCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { directory, outputDir, clearOutputDir, concurrency, viewerMode } =\n flags;\n\n /* 1) Discover .parquet inputs */\n const files = collectParquetFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Converting ${files.length} Parquet file(s) → CSV with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Build FIFO queue of tasks (one per file) */\n const queue = files.map<ParquetTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir },\n }));\n\n /* 4) Pool hooks */\n const hooks: PoolHooks<ParquetTask, ParquetProgress, ParquetResult, Totals> =\n {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n postProcess: async () => {\n // nothing special post-run\n },\n };\n\n /* 5) Launch the pool runner with custom dashboard plugin */\n await runPool({\n title: `Parquet → CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, parquetToCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({ logsBySlot, repaint, setPaused }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n","import { parquetToCsvOneFile, extractErrorMessage } from '../../../lib/helpers';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\nexport type ParquetTask = {\n /** Absolute path of the Parquet file to convert. */\n filePath: string;\n options: {\n /** Optional directory where CSV output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output before writing new ones. */\n clearOutputDir: boolean;\n };\n};\n\nexport type ParquetProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Rows processed so far. */\n processed: number;\n /** Optional known total rows (not always available). */\n total?: number;\n};\n\nexport type ParquetResult = {\n ok: boolean;\n filePath: string;\n error?: string;\n};\n\n/**\n * Worker loop: convert a single Parquet file to one or more CSV files.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n process.send?.({ type: 'ready' });\n\n process.on('message', async (msg: ToWorker<ParquetTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir } = options;\n\n try {\n logger.info(`[w${workerId}] processing ${filePath}`);\n await parquetToCsvOneFile({\n filePath,\n outputDir,\n clearOutputDir,\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${err.stack || message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive until shutdown\n await new Promise<never>(() => {\n // Do nothing\n });\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-CCY7BINI.cjs","../src/commands/admin/parquet-to-csv/impl.ts","../src/commands/admin/parquet-to-csv/worker.ts"],"names":["runChild","workerId","logger"],"mappings":"AAAA,quBAAkF,wDAAyC,wDAA0D,gCAA6B,wDAAoC,gCAA6B,gCAA6B,gFCC7R,MCgCnB,SAAsBA,CAAAA,CAAAA,CAA0B,CAC9C,IAAMC,CAAAA,CAAW,MAAA,CAAO,OAAA,CAAQ,GAAA,CAAI,SAAA,EAAa,GAAG,CAAA,CACpDC,mBAAAA,CAAO,IAAA,CAAK,CAAA,EAAA,EAAKD,CAAQ,CAAA,YAAA,EAAe,OAAA,CAAQ,GAAG,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-CCY7BINI.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectParquetFilesOrExit } from '../../../lib/helpers';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ParquetProgress,\n type ParquetResult,\n type ParquetTask,\n} from './worker';\nimport { parquetToCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path.\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/** No custom totals for the header; the runner’s built-ins suffice. */\ntype Totals = Record<string, never>;\n\nexport type ParquetToCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Convert all Parquet files in a directory to CSV, in parallel.\n *\n * @param flags - The command flags.\n */\nexport async function parquetToCsv(\n this: LocalContext,\n flags: ParquetToCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { directory, outputDir, clearOutputDir, concurrency, viewerMode } =\n flags;\n\n /* 1) Discover .parquet inputs */\n const files = collectParquetFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Converting ${files.length} Parquet file(s) → CSV with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Build FIFO queue of tasks (one per file) */\n const queue = files.map<ParquetTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir },\n }));\n\n /* 4) Pool hooks */\n const hooks: PoolHooks<ParquetTask, ParquetProgress, ParquetResult, Totals> =\n {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n postProcess: async () => {\n // nothing special post-run\n },\n };\n\n /* 5) Launch the pool runner with custom dashboard plugin */\n await runPool({\n title: `Parquet → CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, parquetToCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({ logsBySlot, repaint, setPaused }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n","import { parquetToCsvOneFile, extractErrorMessage } from '../../../lib/helpers';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\nexport type ParquetTask = {\n /** Absolute path of the Parquet file to convert. */\n filePath: string;\n options: {\n /** Optional directory where CSV output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output before writing new ones. */\n clearOutputDir: boolean;\n };\n};\n\nexport type ParquetProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Rows processed so far. */\n processed: number;\n /** Optional known total rows (not always available). */\n total?: number;\n};\n\nexport type ParquetResult = {\n ok: boolean;\n filePath: string;\n error?: string;\n};\n\n/**\n * Worker loop: convert a single Parquet file to one or more CSV files.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n process.send?.({ type: 'ready' });\n\n process.on('message', async (msg: ToWorker<ParquetTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir } = options;\n\n try {\n logger.info(`[w${workerId}] processing ${filePath}`);\n await parquetToCsvOneFile({\n filePath,\n outputDir,\n clearOutputDir,\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${err.stack || message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive until shutdown\n await new Promise<never>(() => {\n // Do nothing\n });\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkHPERBM2Rcjs = require('./chunk-HPERBM2R.cjs');var _chunkRE5YALEOcjs = require('./chunk-RE5YALEO.cjs');var _chunkEDVVHIFKcjs = require('./chunk-EDVVHIFK.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkHJTYLFGJcjs = require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _path = require('path');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _fs = require('fs');async function J({auth:F,dataFlowsYmlFolder:r,output:x,ignoreYmls:C=[],transcendUrl:N}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),(!_fs.existsSync.call(void 0, r)||!_fs.lstatSync.call(void 0, r).isDirectory())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Folder does not exist: "${r}"`)),this.process.exit(1));let k=C.map(t=>t.split(".")[0]),l=_chunkEDVVHIFKcjs.c.call(void 0, r).map(t=>{let{"data-flows":o=[]}=_chunkRE5YALEOcjs.d.call(void 0, _path.join.call(void 0, r,t)),{adTechDataSilos:m,siteTechDataSilos:i}=_chunkHPERBM2Rcjs.a.call(void 0, o,{serviceToSupportedIntegration:d,serviceToTitle:u});return{adTechDataSilos:m,siteTechDataSilos:i,organizationName:t.split(".")[0]}}),a={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o,organizationName:m})=>{[...t,...o].forEach(e=>{let n=e["outer-type"]||e.integrationName;a[n]||(a[n]=[]),a[n].push(m),a[n]=[...new Set(a[n])]})});let p=[...new Set(l.map(({adTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],f=_chunkHJTYLFGJcjs.c.call(void 0, [...new Set(l.map(({siteTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],p),s={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o})=>{[...t,...o].forEach(i=>{let e=i["outer-type"]||i.integrationName,n=_optionalChain([i, 'access', _ => _.attributes, 'optionalAccess', _2 => _2.find, 'call', _3 => _3(E=>E.key==="Found On Domain")]);s[e]||(s[e]=[]),s[e].push(..._optionalChain([n, 'optionalAccess', _4 => _4.values])||[]),s[e]=[...new Set(s[e])]})});let A=_chunkGLXV5XPPcjs.wc.call(void 0, N,F),{serviceToTitle:u,serviceToSupportedIntegration:d}=await _chunkGLXV5XPPcjs.ud.call(void 0, A),h=[...p,...f].map(t=>({title:u[t],...d[t]?{integrationName:t}:{integrationName:"promptAPerson","outer-type":t},attributes:[{key:"Tech Type",values:["Ad Tech"]},{key:"Business Units",values:_chunkHJTYLFGJcjs.c.call(void 0, a[t]||[],k)},{key:"Found On Domain",values:s[t]||[]}]}));_chunkZUNVPK23cjs.a.log(`Total Services: ${h.length}`),_chunkZUNVPK23cjs.a.log(`Ad Tech Services: ${p.length}`),_chunkZUNVPK23cjs.a.log(`Site Tech Services: ${f.length}`),_chunkRE5YALEOcjs.e.call(void 0, x,{"data-silos":h})}exports.deriveDataSilosFromDataFlowsCrossInstance = J;
2
- //# sourceMappingURL=impl-22JGFXXK.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk2MHJ5LZ7cjs = require('./chunk-2MHJ5LZ7.cjs');var _chunkRE5YALEOcjs = require('./chunk-RE5YALEO.cjs');var _chunk5TKJLZA2cjs = require('./chunk-5TKJLZA2.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkFTXFBDFWcjs = require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _path = require('path');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _fs = require('fs');async function J({auth:F,dataFlowsYmlFolder:r,output:x,ignoreYmls:C=[],transcendUrl:N}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),(!_fs.existsSync.call(void 0, r)||!_fs.lstatSync.call(void 0, r).isDirectory())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Folder does not exist: "${r}"`)),this.process.exit(1));let k=C.map(t=>t.split(".")[0]),l=_chunk5TKJLZA2cjs.c.call(void 0, r).map(t=>{let{"data-flows":o=[]}=_chunkRE5YALEOcjs.d.call(void 0, _path.join.call(void 0, r,t)),{adTechDataSilos:m,siteTechDataSilos:i}=_chunk2MHJ5LZ7cjs.a.call(void 0, o,{serviceToSupportedIntegration:d,serviceToTitle:u});return{adTechDataSilos:m,siteTechDataSilos:i,organizationName:t.split(".")[0]}}),a={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o,organizationName:m})=>{[...t,...o].forEach(e=>{let n=e["outer-type"]||e.integrationName;a[n]||(a[n]=[]),a[n].push(m),a[n]=[...new Set(a[n])]})});let p=[...new Set(l.map(({adTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],f=_chunkFTXFBDFWcjs.c.call(void 0, [...new Set(l.map(({siteTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],p),s={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o})=>{[...t,...o].forEach(i=>{let e=i["outer-type"]||i.integrationName,n=_optionalChain([i, 'access', _ => _.attributes, 'optionalAccess', _2 => _2.find, 'call', _3 => _3(E=>E.key==="Found On Domain")]);s[e]||(s[e]=[]),s[e].push(..._optionalChain([n, 'optionalAccess', _4 => _4.values])||[]),s[e]=[...new Set(s[e])]})});let A=_chunkK65NYGJMcjs.wc.call(void 0, N,F),{serviceToTitle:u,serviceToSupportedIntegration:d}=await _chunkK65NYGJMcjs.ud.call(void 0, A),h=[...p,...f].map(t=>({title:u[t],...d[t]?{integrationName:t}:{integrationName:"promptAPerson","outer-type":t},attributes:[{key:"Tech Type",values:["Ad Tech"]},{key:"Business Units",values:_chunkFTXFBDFWcjs.c.call(void 0, a[t]||[],k)},{key:"Found On Domain",values:s[t]||[]}]}));_chunkZUNVPK23cjs.a.log(`Total Services: ${h.length}`),_chunkZUNVPK23cjs.a.log(`Ad Tech Services: ${p.length}`),_chunkZUNVPK23cjs.a.log(`Site Tech Services: ${f.length}`),_chunkRE5YALEOcjs.e.call(void 0, x,{"data-silos":h})}exports.deriveDataSilosFromDataFlowsCrossInstance = J;
2
+ //# sourceMappingURL=impl-DEQW3OGQ.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-22JGFXXK.cjs","../src/commands/inventory/derive-data-silos-from-data-flows-cross-instance/impl.ts"],"names":["deriveDataSilosFromDataFlowsCrossInstance","auth","dataFlowsYmlFolder","output","ignoreYmls","transcendUrl","doneInputValidation","existsSync","lstatSync","logger","colors","instancesToIgnore","x","dataSiloInputs","listFiles","directory","dataFlows","readTranscendYaml","join","adTechDataSilos","siteTechDataSilos","dataFlowsToDataSilos","serviceToSupportedIntegration","serviceToTitle","serviceToInstance","organizationName","dataSilo","service","adTechIntegrations","silo","siteTechIntegrations","difference_default","serviceToFoundOnDomain","foundOnDomain","attr","client","buildTranscendGraphQLClient","fetchAndIndexCatalogs","dataSilos"],"mappings":"AAAA,quBAAwC,wDAAgD,wDAAyC,gCAA6B,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,4BCKhX,gFAEF,wBAImB,MAgBtC,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,kBAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CAAa,CAAC,CAAA,CACd,YAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAAA,CAInC,CAACC,4BAAAA,CAA6B,CAAA,EAC9B,CAACC,2BAAAA,CAA4B,CAAA,CAAE,WAAA,CAAY,CAAA,CAAA,EAAA,CAE3CC,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,wBAAA,EAA2BR,CAAkB,CAAA,CAAA,CAAG,CAAC,CAAA,CACzE,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAIrB,IAAMS,CAAAA,CAAoBP,CAAAA,CAAW,GAAA,CAAKQ,CAAAA,EAAMA,CAAAA,CAAE,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAAC,CAAA,CAGzDC,CAAAA,CAAiBC,iCAAAA,CAA4B,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAc,CAEtE,GAAM,CAAE,YAAA,CAAcC,CAAAA,CAAY,CAAC,CAAE,CAAA,CAAIC,iCAAAA,wBACvCC,CAAKhB,CAAoBa,CAAS,CACpC,CAAA,CAGM,CAAE,eAAA,CAAAI,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAIC,iCAAAA,CAC7CL,CACA,CACE,6BAAA,CAAAM,CAAAA,CACA,cAAA,CAAAC,CACF,CACF,CAAA,CAEA,MAAO,CACL,eAAA,CAAAJ,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAkBL,CAAAA,CAAU,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAC1C,CACF,CAAC,CAAA,CAGKS,CAAAA,CAAiD,CAAC,CAAA,CACxDX,CAAAA,CAAe,OAAA,CACb,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAAA,CAAmB,gBAAA,CAAAK,CAAiB,CAAA,CAAA,EAAM,CACvC,CAAC,GAAGN,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAE9CF,CAAAA,CAAkBG,CAAO,CAAA,EAAA,CAC5BH,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAEhCH,CAAAA,CAAkBG,CAAO,CAAA,CAAG,IAAA,CAAKF,CAAgB,CAAA,CACjDD,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,GAAG,IAAI,GAAA,CAAIH,CAAAA,CAAkBG,CAAO,CAAC,CAAC,CACtE,CAAC,CACH,CACF,CAAA,CAGA,IAAMC,CAAAA,CAAqB,CACzB,GAAG,IAAI,GAAA,CACLf,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,eAAA,CAAAM,CAAgB,CAAA,CAAA,EACtBA,CAAAA,CAAgB,GAAA,CACbU,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CAGMC,CAAAA,CAAuBC,iCAAAA,CAEzB,GAAG,IAAI,GAAA,CACLlB,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,iBAAA,CAAAO,CAAkB,CAAA,CAAA,EACxBA,CAAAA,CAAkB,GAAA,CACfS,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CACAD,CACF,CAAA,CAGMI,CAAAA,CAAsD,CAAC,CAAA,CAC7DnB,CAAAA,CAAe,OAAA,CAAQ,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAA,EAAM,CAC5C,CAAC,GAAGD,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAC7CO,CAAAA,iBAAgBP,CAAAA,mBAAS,UAAA,6BAAY,IAAA,mBACxCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,GAAQ,iBACzB,GAAA,CAEKF,CAAAA,CAAuBL,CAAO,CAAA,EAAA,CACjCK,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAErCK,CAAAA,CAAuBL,CAAO,CAAA,CAAG,IAAA,CAAK,mBAAIM,CAAAA,6BAAe,QAAA,EAAU,CAAC,CAAE,CAAA,CACtED,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAChC,GAAG,IAAI,GAAA,CAAIK,CAAAA,CAAuBL,CAAO,CAAC,CAC5C,CACF,CAAC,CACH,CAAC,CAAA,CAGD,IAAMQ,CAAAA,CAASC,kCAAAA,CAA4B/B,CAAcJ,CAAI,CAAA,CACvD,CAAE,cAAA,CAAAsB,CAAAA,CAAgB,6BAAA,CAAAD,CAA8B,CAAA,CACpD,MAAMe,kCAAAA,CAA4B,CAAA,CAG9BC,CAAAA,CAAY,CAAC,GAAGV,CAAAA,CAAoB,GAAGE,CAAoB,CAAA,CAAE,GAAA,CAChEH,CAAAA,EAAAA,CAAa,CACZ,KAAA,CAAOJ,CAAAA,CAAeI,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,YAAA,CAAcA,CAAQ,CAAA,CAC9D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,SAAS,CACpB,CAAA,CACA,CACE,GAAA,CAAK,gBAAA,CACL,MAAA,CAAQI,iCAAAA,CACNP,CAAkBG,CAAO,CAAA,EAAK,CAAC,CAAA,CAC/BhB,CACF,CACF,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQqB,CAAAA,CAAuBL,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CACF,CAAA,CAGAlB,mBAAAA,CAAO,GAAA,CAAI,CAAA,gBAAA,EAAmB6B,CAAAA,CAAU,MAAM,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-22JGFXXK.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '../../../lib/graphql';\nimport { join } from 'node:path';\nimport { difference } from 'lodash-es';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '../../../codecs';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { listFiles } from '../../../lib/api-keys';\nimport {\n readTranscendYaml,\n writeTranscendYaml,\n} from '../../../lib/readTranscendYaml';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n output: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlowsCrossInstance(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n output,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ignore the data flows in these yml files\n const instancesToIgnore = ignoreYmls.map((x) => x.split('.')[0]);\n\n // Map over each data flow yml file and convert to data silo configurations\n const dataSiloInputs = listFiles(dataFlowsYmlFolder).map((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n return {\n adTechDataSilos,\n siteTechDataSilos,\n organizationName: directory.split('.')[0],\n };\n });\n\n // Mapping from service name to instances that have that service\n const serviceToInstance: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(\n ({ adTechDataSilos, siteTechDataSilos, organizationName }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n // create mapping to instance\n if (!serviceToInstance[service]) {\n serviceToInstance[service] = [];\n }\n serviceToInstance[service]!.push(organizationName);\n serviceToInstance[service] = [...new Set(serviceToInstance[service])];\n });\n },\n );\n\n // List of ad tech integrations\n const adTechIntegrations = [\n ...new Set(\n dataSiloInputs\n .map(({ adTechDataSilos }) =>\n adTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ];\n\n // List of site tech integrations\n const siteTechIntegrations = difference(\n [\n ...new Set(\n dataSiloInputs\n .map(({ siteTechDataSilos }) =>\n siteTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ],\n adTechIntegrations,\n );\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(({ adTechDataSilos, siteTechDataSilos }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n const foundOnDomain = dataSilo.attributes?.find(\n (attr) => attr.key === 'Found On Domain',\n );\n // create mapping to instance\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(...(foundOnDomain?.values || []));\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n });\n });\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // construct the aggregated data silo inputs\n const dataSilos = [...adTechIntegrations, ...siteTechIntegrations].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Business Units',\n values: difference(\n serviceToInstance[service] || [],\n instancesToIgnore,\n ),\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n // Log output\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechIntegrations.length}`);\n logger.log(`Site Tech Services: ${siteTechIntegrations.length}`);\n\n // Write to yaml\n writeTranscendYaml(output, {\n 'data-silos': dataSilos,\n });\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-DEQW3OGQ.cjs","../src/commands/inventory/derive-data-silos-from-data-flows-cross-instance/impl.ts"],"names":["deriveDataSilosFromDataFlowsCrossInstance","auth","dataFlowsYmlFolder","output","ignoreYmls","transcendUrl","doneInputValidation","existsSync","lstatSync","logger","colors","instancesToIgnore","x","dataSiloInputs","listFiles","directory","dataFlows","readTranscendYaml","join","adTechDataSilos","siteTechDataSilos","dataFlowsToDataSilos","serviceToSupportedIntegration","serviceToTitle","serviceToInstance","organizationName","dataSilo","service","adTechIntegrations","silo","siteTechIntegrations","difference_default","serviceToFoundOnDomain","foundOnDomain","attr","client","buildTranscendGraphQLClient","fetchAndIndexCatalogs","dataSilos"],"mappings":"AAAA,quBAAwC,wDAAgD,wDAAyC,gCAA6B,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,4BCKhX,gFAEF,wBAImB,MAgBtC,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,kBAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CAAa,CAAC,CAAA,CACd,YAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAAA,CAInC,CAACC,4BAAAA,CAA6B,CAAA,EAC9B,CAACC,2BAAAA,CAA4B,CAAA,CAAE,WAAA,CAAY,CAAA,CAAA,EAAA,CAE3CC,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,wBAAA,EAA2BR,CAAkB,CAAA,CAAA,CAAG,CAAC,CAAA,CACzE,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAIrB,IAAMS,CAAAA,CAAoBP,CAAAA,CAAW,GAAA,CAAKQ,CAAAA,EAAMA,CAAAA,CAAE,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAAC,CAAA,CAGzDC,CAAAA,CAAiBC,iCAAAA,CAA4B,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAc,CAEtE,GAAM,CAAE,YAAA,CAAcC,CAAAA,CAAY,CAAC,CAAE,CAAA,CAAIC,iCAAAA,wBACvCC,CAAKhB,CAAoBa,CAAS,CACpC,CAAA,CAGM,CAAE,eAAA,CAAAI,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAIC,iCAAAA,CAC7CL,CACA,CACE,6BAAA,CAAAM,CAAAA,CACA,cAAA,CAAAC,CACF,CACF,CAAA,CAEA,MAAO,CACL,eAAA,CAAAJ,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAkBL,CAAAA,CAAU,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAC1C,CACF,CAAC,CAAA,CAGKS,CAAAA,CAAiD,CAAC,CAAA,CACxDX,CAAAA,CAAe,OAAA,CACb,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAAA,CAAmB,gBAAA,CAAAK,CAAiB,CAAA,CAAA,EAAM,CACvC,CAAC,GAAGN,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAE9CF,CAAAA,CAAkBG,CAAO,CAAA,EAAA,CAC5BH,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAEhCH,CAAAA,CAAkBG,CAAO,CAAA,CAAG,IAAA,CAAKF,CAAgB,CAAA,CACjDD,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,GAAG,IAAI,GAAA,CAAIH,CAAAA,CAAkBG,CAAO,CAAC,CAAC,CACtE,CAAC,CACH,CACF,CAAA,CAGA,IAAMC,CAAAA,CAAqB,CACzB,GAAG,IAAI,GAAA,CACLf,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,eAAA,CAAAM,CAAgB,CAAA,CAAA,EACtBA,CAAAA,CAAgB,GAAA,CACbU,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CAGMC,CAAAA,CAAuBC,iCAAAA,CAEzB,GAAG,IAAI,GAAA,CACLlB,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,iBAAA,CAAAO,CAAkB,CAAA,CAAA,EACxBA,CAAAA,CAAkB,GAAA,CACfS,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CACAD,CACF,CAAA,CAGMI,CAAAA,CAAsD,CAAC,CAAA,CAC7DnB,CAAAA,CAAe,OAAA,CAAQ,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAA,EAAM,CAC5C,CAAC,GAAGD,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAC7CO,CAAAA,iBAAgBP,CAAAA,mBAAS,UAAA,6BAAY,IAAA,mBACxCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,GAAQ,iBACzB,GAAA,CAEKF,CAAAA,CAAuBL,CAAO,CAAA,EAAA,CACjCK,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAErCK,CAAAA,CAAuBL,CAAO,CAAA,CAAG,IAAA,CAAK,mBAAIM,CAAAA,6BAAe,QAAA,EAAU,CAAC,CAAE,CAAA,CACtED,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAChC,GAAG,IAAI,GAAA,CAAIK,CAAAA,CAAuBL,CAAO,CAAC,CAC5C,CACF,CAAC,CACH,CAAC,CAAA,CAGD,IAAMQ,CAAAA,CAASC,kCAAAA,CAA4B/B,CAAcJ,CAAI,CAAA,CACvD,CAAE,cAAA,CAAAsB,CAAAA,CAAgB,6BAAA,CAAAD,CAA8B,CAAA,CACpD,MAAMe,kCAAAA,CAA4B,CAAA,CAG9BC,CAAAA,CAAY,CAAC,GAAGV,CAAAA,CAAoB,GAAGE,CAAoB,CAAA,CAAE,GAAA,CAChEH,CAAAA,EAAAA,CAAa,CACZ,KAAA,CAAOJ,CAAAA,CAAeI,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,YAAA,CAAcA,CAAQ,CAAA,CAC9D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,SAAS,CACpB,CAAA,CACA,CACE,GAAA,CAAK,gBAAA,CACL,MAAA,CAAQI,iCAAAA,CACNP,CAAkBG,CAAO,CAAA,EAAK,CAAC,CAAA,CAC/BhB,CACF,CACF,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQqB,CAAAA,CAAuBL,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CACF,CAAA,CAGAlB,mBAAAA,CAAO,GAAA,CAAI,CAAA,gBAAA,EAAmB6B,CAAAA,CAAU,MAAM,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-DEQW3OGQ.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '../../../lib/graphql';\nimport { join } from 'node:path';\nimport { difference } from 'lodash-es';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '../../../codecs';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { listFiles } from '../../../lib/api-keys';\nimport {\n readTranscendYaml,\n writeTranscendYaml,\n} from '../../../lib/readTranscendYaml';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n output: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlowsCrossInstance(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n output,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ignore the data flows in these yml files\n const instancesToIgnore = ignoreYmls.map((x) => x.split('.')[0]);\n\n // Map over each data flow yml file and convert to data silo configurations\n const dataSiloInputs = listFiles(dataFlowsYmlFolder).map((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n return {\n adTechDataSilos,\n siteTechDataSilos,\n organizationName: directory.split('.')[0],\n };\n });\n\n // Mapping from service name to instances that have that service\n const serviceToInstance: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(\n ({ adTechDataSilos, siteTechDataSilos, organizationName }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n // create mapping to instance\n if (!serviceToInstance[service]) {\n serviceToInstance[service] = [];\n }\n serviceToInstance[service]!.push(organizationName);\n serviceToInstance[service] = [...new Set(serviceToInstance[service])];\n });\n },\n );\n\n // List of ad tech integrations\n const adTechIntegrations = [\n ...new Set(\n dataSiloInputs\n .map(({ adTechDataSilos }) =>\n adTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ];\n\n // List of site tech integrations\n const siteTechIntegrations = difference(\n [\n ...new Set(\n dataSiloInputs\n .map(({ siteTechDataSilos }) =>\n siteTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ],\n adTechIntegrations,\n );\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(({ adTechDataSilos, siteTechDataSilos }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n const foundOnDomain = dataSilo.attributes?.find(\n (attr) => attr.key === 'Found On Domain',\n );\n // create mapping to instance\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(...(foundOnDomain?.values || []));\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n });\n });\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // construct the aggregated data silo inputs\n const dataSilos = [...adTechIntegrations, ...siteTechIntegrations].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Business Units',\n values: difference(\n serviceToInstance[service] || [],\n instancesToIgnore,\n ),\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n // Log output\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechIntegrations.length}`);\n logger.log(`Site Tech Services: ${siteTechIntegrations.length}`);\n\n // Write to yaml\n writeTranscendYaml(output, {\n 'data-silos': dataSilos,\n });\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkFGBTRT4Jcjs = require('./chunk-FGBTRT4J.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _iots = require('io-ts'); var t = _interopRequireWildcard(_iots);var c=t.type({"Request Id":t.string});async function q({auth:i,dataSiloId:m,file:o,transcendUrl:n}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${o}" from disk`));let d=_chunkGLXV5XPPcjs.rc.call(void 0, o,c);await _chunkFGBTRT4Jcjs.f.call(void 0, {requestIds:d.map(l=>l["Request Id"]),transcendUrl:n,auth:i,dataSiloId:m})}exports.markRequestDataSilosCompleted = q;
2
- //# sourceMappingURL=impl-IQVMZJD7.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkKOXU6674cjs = require('./chunk-KOXU6674.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _iots = require('io-ts'); var t = _interopRequireWildcard(_iots);var c=t.type({"Request Id":t.string});async function q({auth:i,dataSiloId:m,file:o,transcendUrl:n}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${o}" from disk`));let d=_chunkK65NYGJMcjs.rc.call(void 0, o,c);await _chunkKOXU6674cjs.f.call(void 0, {requestIds:d.map(l=>l["Request Id"]),transcendUrl:n,auth:i,dataSiloId:m})}exports.markRequestDataSilosCompleted = q;
2
+ //# sourceMappingURL=impl-F2UNSOIO.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-IQVMZJD7.cjs","../src/commands/request/system/mark-request-data-silos-completed/impl.ts"],"names":["RequestIdRow","markRequestDataSilosCompleted","auth","dataSiloId","file","transcendUrl","doneInputValidation","logger","colors","activeResults","readCsv","markRequestDataSiloIdsCompleted","request"],"mappings":"AAAA,mfAAwC,wDAAyC,wDAA0C,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gFCCxO,qEACA,IAObA,CAAAA,CAAiB,CAAA,CAAA,IAAA,CAAK,CAC1B,YAAA,CAAgB,CAAA,CAAA,MAClB,CAAC,CAAA,CASD,MAAA,SAAsBC,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErCC,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,CAAA,SAAA,EAAYJ,CAAI,CAAA,WAAA,CAAa,CAAC,CAAA,CACzD,IAAMK,CAAAA,CAAgBC,kCAAAA,CAAQN,CAAMJ,CAAY,CAAA,CAEhD,MAAMW,iCAAAA,CACJ,UAAA,CAAYF,CAAAA,CAAc,GAAA,CAAKG,CAAAA,EAAYA,CAAAA,CAAQ,YAAY,CAAC,CAAA,CAChE,YAAA,CAAAP,CAAAA,CACA,IAAA,CAAAH,CAAAA,CACA,UAAA,CAAAC,CACF,CAAC,CACH,CAAA,0CAAA","file":"/home/runner/work/cli/cli/dist/impl-IQVMZJD7.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../../context';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { logger } from '../../../../logger';\nimport { markRequestDataSiloIdsCompleted } from '../../../../lib/cron';\nimport { readCsv } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nconst RequestIdRow = t.type({\n 'Request Id': t.string,\n});\n\nexport interface MarkRequestDataSilosCompletedCommandFlags {\n auth: string;\n dataSiloId: string;\n file: string;\n transcendUrl: string;\n}\n\nexport async function markRequestDataSilosCompleted(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n file,\n transcendUrl,\n }: MarkRequestDataSilosCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, RequestIdRow);\n\n await markRequestDataSiloIdsCompleted({\n requestIds: activeResults.map((request) => request['Request Id']),\n transcendUrl,\n auth,\n dataSiloId,\n });\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-F2UNSOIO.cjs","../src/commands/request/system/mark-request-data-silos-completed/impl.ts"],"names":["RequestIdRow","markRequestDataSilosCompleted","auth","dataSiloId","file","transcendUrl","doneInputValidation","logger","colors","activeResults","readCsv","markRequestDataSiloIdsCompleted","request"],"mappings":"AAAA,mfAAwC,wDAAyC,wDAA0C,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gFCCxO,qEACA,IAObA,CAAAA,CAAiB,CAAA,CAAA,IAAA,CAAK,CAC1B,YAAA,CAAgB,CAAA,CAAA,MAClB,CAAC,CAAA,CASD,MAAA,SAAsBC,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErCC,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,CAAA,SAAA,EAAYJ,CAAI,CAAA,WAAA,CAAa,CAAC,CAAA,CACzD,IAAMK,CAAAA,CAAgBC,kCAAAA,CAAQN,CAAMJ,CAAY,CAAA,CAEhD,MAAMW,iCAAAA,CACJ,UAAA,CAAYF,CAAAA,CAAc,GAAA,CAAKG,CAAAA,EAAYA,CAAAA,CAAQ,YAAY,CAAC,CAAA,CAChE,YAAA,CAAAP,CAAAA,CACA,IAAA,CAAAH,CAAAA,CACA,UAAA,CAAAC,CACF,CAAC,CACH,CAAA,0CAAA","file":"/home/runner/work/cli/cli/dist/impl-F2UNSOIO.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../../context';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { logger } from '../../../../logger';\nimport { markRequestDataSiloIdsCompleted } from '../../../../lib/cron';\nimport { readCsv } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nconst RequestIdRow = t.type({\n 'Request Id': t.string,\n});\n\nexport interface MarkRequestDataSilosCompletedCommandFlags {\n auth: string;\n dataSiloId: string;\n file: string;\n transcendUrl: string;\n}\n\nexport async function markRequestDataSilosCompleted(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n file,\n transcendUrl,\n }: MarkRequestDataSilosCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, RequestIdRow);\n\n await markRequestDataSiloIdsCompleted({\n requestIds: activeResults.map((request) => request['Request Id']),\n transcendUrl,\n auth,\n dataSiloId,\n });\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkFGBTRT4Jcjs = require('./chunk-FGBTRT4J.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');async function d({file:i,transcendUrl:o,auth:r,sombraAuth:n,dataSiloId:s}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkFGBTRT4Jcjs.e.call(void 0, {file:i,transcendUrl:o,auth:r,sombraAuth:n,dataSiloId:s})}exports.markIdentifiersCompleted = d;
2
- //# sourceMappingURL=impl-M6JBOLVE.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkKOXU6674cjs = require('./chunk-KOXU6674.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');async function d({file:i,transcendUrl:o,auth:r,sombraAuth:n,dataSiloId:s}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkKOXU6674cjs.e.call(void 0, {file:i,transcendUrl:o,auth:r,sombraAuth:n,dataSiloId:s})}exports.markIdentifiersCompleted = d;
2
+ //# sourceMappingURL=impl-H3J7N77A.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-M6JBOLVE.cjs","../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"names":["markIdentifiersCompleted","file","transcendUrl","auth","sombraAuth","dataSiloId","doneInputValidation","pushCronIdentifiersFromCsv"],"mappings":"AAAA,iIAAwC,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCYlO,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,iCAAAA,CACJ,IAAA,CAAAN,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAC,CACH,CAAA,qCAAA","file":"/home/runner/work/cli/cli/dist/impl-M6JBOLVE.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../../context';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-H3J7N77A.cjs","../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"names":["markIdentifiersCompleted","file","transcendUrl","auth","sombraAuth","dataSiloId","doneInputValidation","pushCronIdentifiersFromCsv"],"mappings":"AAAA,iIAAwC,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCYlO,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,iCAAAA,CACJ,IAAA,CAAAN,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAC,CACH,CAAA,qCAAA","file":"/home/runner/work/cli/cli/dist/impl-H3J7N77A.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../../context';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkGDPRENOQcjs = require('./chunk-GDPRENOQ.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkHJTYLFGJcjs = require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function v({auth:y,file:a,transcendUrl:C,dataSiloIds:f,includeAttributes:D,includeGuessedCategories:P,parentCategories:b,subCategories:h=[]}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);try{let o=_chunkGLXV5XPPcjs.wc.call(void 0, C,y),$=await _chunkGDPRENOQcjs.a.call(void 0, o,{dataSiloIds:f,includeGuessedCategories:P,parentCategories:b,includeAttributes:D,subCategories:h});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing datapoints to file "${a}"...`));let s=[],j=$.map(t=>{let n={"Property ID":t.id,"Data Silo":t.dataSilo.title,Object:t.dataPoint.name,"Object Path":t.dataPoint.path.join("."),Property:t.name,"Property Description":t.description,"Data Categories":t.categories.map(e=>`${e.category}:${e.name}`).join(", "),"Guessed Category":_optionalChain([t, 'access', _ => _.pendingCategoryGuesses, 'optionalAccess', _2 => _2[0]])?`${t.pendingCategoryGuesses[0].category.category}:${t.pendingCategoryGuesses[0].category.name}`:"","Processing Purposes":t.purposes.map(e=>`${e.purpose}:${e.name}`).join(", "),...Object.entries(_chunkHJTYLFGJcjs.d.call(void 0, t.attributeValues||[],({attributeKey:e})=>e.name)).reduce((e,[x,A])=>(e[x]=A.map(G=>G.name).join(","),e),{})};return s=_chunkHJTYLFGJcjs.j.call(void 0, [...s,...Object.keys(n)]),n});await _chunkGLXV5XPPcjs.og.call(void 0, a,j,s)}catch(o){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the datapoints: ${o.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced datapoints to disk at ${a}! View at ${_chunkHJTYLFGJcjs.q}`))}exports.pullDatapoints = v;
2
- //# sourceMappingURL=impl-YM2OKSJ4.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkL32BTN2Mcjs = require('./chunk-L32BTN2M.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkFTXFBDFWcjs = require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function v({auth:y,file:a,transcendUrl:C,dataSiloIds:f,includeAttributes:D,includeGuessedCategories:P,parentCategories:b,subCategories:h=[]}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);try{let o=_chunkK65NYGJMcjs.wc.call(void 0, C,y),$=await _chunkL32BTN2Mcjs.a.call(void 0, o,{dataSiloIds:f,includeGuessedCategories:P,parentCategories:b,includeAttributes:D,subCategories:h});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing datapoints to file "${a}"...`));let s=[],j=$.map(t=>{let n={"Property ID":t.id,"Data Silo":t.dataSilo.title,Object:t.dataPoint.name,"Object Path":t.dataPoint.path.join("."),Property:t.name,"Property Description":t.description,"Data Categories":t.categories.map(e=>`${e.category}:${e.name}`).join(", "),"Guessed Category":_optionalChain([t, 'access', _ => _.pendingCategoryGuesses, 'optionalAccess', _2 => _2[0]])?`${t.pendingCategoryGuesses[0].category.category}:${t.pendingCategoryGuesses[0].category.name}`:"","Processing Purposes":t.purposes.map(e=>`${e.purpose}:${e.name}`).join(", "),...Object.entries(_chunkFTXFBDFWcjs.d.call(void 0, t.attributeValues||[],({attributeKey:e})=>e.name)).reduce((e,[x,A])=>(e[x]=A.map(G=>G.name).join(","),e),{})};return s=_chunkFTXFBDFWcjs.j.call(void 0, [...s,...Object.keys(n)]),n});await _chunkK65NYGJMcjs.qg.call(void 0, a,j,s)}catch(o){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the datapoints: ${o.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced datapoints to disk at ${a}! View at ${_chunkFTXFBDFWcjs.q}`))}exports.pullDatapoints = v;
2
+ //# sourceMappingURL=impl-HZJIVXWT.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-YM2OKSJ4.cjs","../src/commands/inventory/pull-datapoints/impl.ts"],"names":["pullDatapoints","auth","file","transcendUrl","dataSiloIds","includeAttributes","includeGuessedCategories","parentCategories","subCategories","doneInputValidation","client","buildTranscendGraphQLClient","dataPoints","pullAllDatapoints","logger","colors","headers","inputs","point","result","category"],"mappings":"AAAA,quBAAwC,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAuD,gCAA6B,gFCI1Q,MAmBnB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CACnB,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,GAAI,CAEF,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BR,CAAcF,CAAI,CAAA,CAEvDW,CAAAA,CAAa,MAAMC,iCAAAA,CAAkBH,CAAQ,CACjD,WAAA,CAAAN,CAAAA,CACA,wBAAA,CAAAE,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAF,CAAAA,CACA,aAAA,CAAAG,CACF,CAAC,CAAA,CAEDM,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,CAAA,4BAAA,EAA+Bb,CAAI,CAAA,IAAA,CAAM,CAAC,CAAA,CACrE,IAAIc,CAAAA,CAAoB,CAAC,CAAA,CACnBC,CAAAA,CAASL,CAAAA,CAAW,GAAA,CAAKM,CAAAA,EAAU,CACvC,IAAMC,CAAAA,CAAS,CACb,aAAA,CAAeD,CAAAA,CAAM,EAAA,CACrB,WAAA,CAAaA,CAAAA,CAAM,QAAA,CAAS,KAAA,CAC5B,MAAA,CAAQA,CAAAA,CAAM,SAAA,CAAU,IAAA,CACxB,aAAA,CAAeA,CAAAA,CAAM,SAAA,CAAU,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA,CAC5C,QAAA,CAAUA,CAAAA,CAAM,IAAA,CAChB,sBAAA,CAAwBA,CAAAA,CAAM,WAAA,CAC9B,iBAAA,CAAmBA,CAAAA,CAAM,UAAA,CACtB,GAAA,CAAKE,CAAAA,EAAa,CAAA,EAAA","file":"/home/runner/work/cli/cli/dist/impl-YM2OKSJ4.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { uniq, groupBy } from 'lodash-es';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { ADMIN_DASH_DATAPOINTS } from '../../../constants';\nimport { pullAllDatapoints } from '../../../lib/data-inventory';\nimport { DataCategoryType } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullDatapointsCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n includeAttributes: boolean;\n includeGuessedCategories: boolean;\n parentCategories?: DataCategoryType[];\n subCategories?: string[];\n}\n\nexport async function pullDatapoints(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n includeAttributes,\n includeGuessedCategories,\n parentCategories,\n subCategories = [],\n }: PullDatapointsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const dataPoints = await pullAllDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n parentCategories,\n includeAttributes,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n });\n\n logger.info(colors.magenta(`Writing datapoints to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = dataPoints.map((point) => {\n const result = {\n 'Property ID': point.id,\n 'Data Silo': point.dataSilo.title,\n Object: point.dataPoint.name,\n 'Object Path': point.dataPoint.path.join('.'),\n Property: point.name,\n 'Property Description': point.description,\n 'Data Categories': point.categories\n .map((category) => `${category.category}:${category.name}`)\n .join(', '),\n 'Guessed Category': point.pendingCategoryGuesses?.[0]\n ? `${point.pendingCategoryGuesses![0]!.category.category}:${\n point.pendingCategoryGuesses![0]!.category.name\n }`\n : '',\n 'Processing Purposes': point.purposes\n .map((purpose) => `${purpose.purpose}:${purpose.name}`)\n .join(', '),\n ...Object.entries(\n groupBy(\n point.attributeValues || [],\n ({ attributeKey }) => attributeKey.name,\n ),\n ).reduce((acc, [key, values]) => {\n acc[key] = values.map((value) => value.name).join(',');\n return acc;\n }, {} as Record<string, string>),\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the datapoints: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced datapoints to disk at ${file}! View at ${ADMIN_DASH_DATAPOINTS}`,\n ),\n );\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-HZJIVXWT.cjs","../src/commands/inventory/pull-datapoints/impl.ts"],"names":["pullDatapoints","auth","file","transcendUrl","dataSiloIds","includeAttributes","includeGuessedCategories","parentCategories","subCategories","doneInputValidation","client","buildTranscendGraphQLClient","dataPoints","pullAllDatapoints","logger","colors","headers","inputs","point","result","category"],"mappings":"AAAA,quBAAwC,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAuD,gCAA6B,gFCI1Q,MAmBnB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CACnB,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,GAAI,CAEF,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BR,CAAcF,CAAI,CAAA,CAEvDW,CAAAA,CAAa,MAAMC,iCAAAA,CAAkBH,CAAQ,CACjD,WAAA,CAAAN,CAAAA,CACA,wBAAA,CAAAE,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAF,CAAAA,CACA,aAAA,CAAAG,CACF,CAAC,CAAA,CAEDM,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,CAAA,4BAAA,EAA+Bb,CAAI,CAAA,IAAA,CAAM,CAAC,CAAA,CACrE,IAAIc,CAAAA,CAAoB,CAAC,CAAA,CACnBC,CAAAA,CAASL,CAAAA,CAAW,GAAA,CAAKM,CAAAA,EAAU,CACvC,IAAMC,CAAAA,CAAS,CACb,aAAA,CAAeD,CAAAA,CAAM,EAAA,CACrB,WAAA,CAAaA,CAAAA,CAAM,QAAA,CAAS,KAAA,CAC5B,MAAA,CAAQA,CAAAA,CAAM,SAAA,CAAU,IAAA,CACxB,aAAA,CAAeA,CAAAA,CAAM,SAAA,CAAU,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA,CAC5C,QAAA,CAAUA,CAAAA,CAAM,IAAA,CAChB,sBAAA,CAAwBA,CAAAA,CAAM,WAAA,CAC9B,iBAAA,CAAmBA,CAAAA,CAAM,UAAA,CACtB,GAAA,CAAKE,CAAAA,EAAa,CAAA,EAAA","file":"/home/runner/work/cli/cli/dist/impl-HZJIVXWT.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { uniq, groupBy } from 'lodash-es';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { ADMIN_DASH_DATAPOINTS } from '../../../constants';\nimport { pullAllDatapoints } from '../../../lib/data-inventory';\nimport { DataCategoryType } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullDatapointsCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n includeAttributes: boolean;\n includeGuessedCategories: boolean;\n parentCategories?: DataCategoryType[];\n subCategories?: string[];\n}\n\nexport async function pullDatapoints(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n includeAttributes,\n includeGuessedCategories,\n parentCategories,\n subCategories = [],\n }: PullDatapointsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const dataPoints = await pullAllDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n parentCategories,\n includeAttributes,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n });\n\n logger.info(colors.magenta(`Writing datapoints to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = dataPoints.map((point) => {\n const result = {\n 'Property ID': point.id,\n 'Data Silo': point.dataSilo.title,\n Object: point.dataPoint.name,\n 'Object Path': point.dataPoint.path.join('.'),\n Property: point.name,\n 'Property Description': point.description,\n 'Data Categories': point.categories\n .map((category) => `${category.category}:${category.name}`)\n .join(', '),\n 'Guessed Category': point.pendingCategoryGuesses?.[0]\n ? `${point.pendingCategoryGuesses![0]!.category.category}:${\n point.pendingCategoryGuesses![0]!.category.name\n }`\n : '',\n 'Processing Purposes': point.purposes\n .map((purpose) => `${purpose.purpose}:${purpose.name}`)\n .join(', '),\n ...Object.entries(\n groupBy(\n point.attributeValues || [],\n ({ attributeKey }) => attributeKey.name,\n ),\n ).reduce((acc, [key, values]) => {\n acc[key] = values.map((value) => value.name).join(',');\n return acc;\n }, {} as Record<string, string>),\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the datapoints: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced datapoints to disk at ${file}! View at ${ADMIN_DASH_DATAPOINTS}`,\n ),\n );\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkGDPRENOQcjs = require('./chunk-GDPRENOQ.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkHJTYLFGJcjs = require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function j({auth:p,file:o,transcendUrl:f,dataSiloIds:g,subCategories:C,status:S,includeEncryptedSnippets:a}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);try{let i=_chunkGLXV5XPPcjs.wc.call(void 0, f,p),b=await _chunkGDPRENOQcjs.b.call(void 0, i,{dataSiloIds:g,subCategories:C,status:S,includeEncryptedSnippets:a});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let s=[],h=b.map(t=>{let n={"Entry ID":t.id,"Data Silo ID":t.dataSiloId,"Object Path ID":t.scannedObjectPathId,"Object ID":t.scannedObjectId,...a?{Entry:t.name,"Context Snippet":t.contextSnippet}:{},"Data Category":`${t.dataSubCategory.category}:${t.dataSubCategory.name}`,"Classification Status":t.status,"Confidence Score":t.confidence,"Classification Method":t.classificationMethod,"Classifier Version":t.classifierVersion};return s=_chunkHJTYLFGJcjs.j.call(void 0, [...s,...Object.keys(n)]),n});await _chunkGLXV5XPPcjs.og.call(void 0, o,h,s)}catch(i){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the unstructured discovery files: ${i.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles = j;
2
- //# sourceMappingURL=impl-DVNUPZK7.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkL32BTN2Mcjs = require('./chunk-L32BTN2M.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkFTXFBDFWcjs = require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function j({auth:p,file:o,transcendUrl:f,dataSiloIds:g,subCategories:C,status:S,includeEncryptedSnippets:a}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);try{let i=_chunkK65NYGJMcjs.wc.call(void 0, f,p),b=await _chunkL32BTN2Mcjs.b.call(void 0, i,{dataSiloIds:g,subCategories:C,status:S,includeEncryptedSnippets:a});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let s=[],h=b.map(t=>{let n={"Entry ID":t.id,"Data Silo ID":t.dataSiloId,"Object Path ID":t.scannedObjectPathId,"Object ID":t.scannedObjectId,...a?{Entry:t.name,"Context Snippet":t.contextSnippet}:{},"Data Category":`${t.dataSubCategory.category}:${t.dataSubCategory.name}`,"Classification Status":t.status,"Confidence Score":t.confidence,"Classification Method":t.classificationMethod,"Classifier Version":t.classifierVersion};return s=_chunkFTXFBDFWcjs.j.call(void 0, [...s,...Object.keys(n)]),n});await _chunkK65NYGJMcjs.qg.call(void 0, o,h,s)}catch(i){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the unstructured discovery files: ${i.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles = j;
2
+ //# sourceMappingURL=impl-IQCQDDW7.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-DVNUPZK7.cjs","../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"names":["pullUnstructuredDiscoveryFiles","auth","file","transcendUrl","dataSiloIds","subCategories","status","includeEncryptedSnippets","doneInputValidation","client","buildTranscendGraphQLClient","entries","pullUnstructuredSubDataPointRecommendations","logger","colors","headers","inputs","entry","result"],"mappings":"AAAA,iOAAwC,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gFCE5P,MAkBnB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,GAAI,CAEF,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BP,CAAcF,CAAI,CAAA,CAEvDU,CAAAA,CAAU,MAAMC,iCAAAA,CAA4CH,CAAQ,CACxE,WAAA,CAAAL,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAC,CAAA,CAEDM,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,8CAAA,EAAiDZ,CAAI,CAAA,IAAA,CACvD,CACF,CAAA,CACA,IAAIa,CAAAA,CAAoB,CAAC,CAAA,CACnBC,CAAAA,CAASL,CAAAA,CAAQ,GAAA,CAAKM,CAAAA,EAAU,CACpC,IAAMC,CAAAA,CAAS,CACb,UAAA,CAAYD,CAAAA,CAAM,EAAA,CAClB,cAAA,CAAgBA,CAAAA,CAAM,UAAA,CACtB,gBAAA,CAAkBA,CAAAA,CAAM,mBAAA,CACxB,WAAA,CAAaA,CAAAA,CAAM,eAAA,CACnB,GAAIV,CAAAA,CACA,CAAE,KAAA,CAAOU,CAAAA,CAAM,IAAA,CAAM,iBAAA,CAAmBA,CAAAA,CAAM,cAAe,CAAA,CAC7D,CAAC,CAAA,CACL,eAAA,CAAiB,CAAA,EAAA","file":"/home/runner/work/cli/cli/dist/impl-DVNUPZK7.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-IQCQDDW7.cjs","../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"names":["pullUnstructuredDiscoveryFiles","auth","file","transcendUrl","dataSiloIds","subCategories","status","includeEncryptedSnippets","doneInputValidation","client","buildTranscendGraphQLClient","entries","pullUnstructuredSubDataPointRecommendations","logger","colors","headers","inputs","entry","result"],"mappings":"AAAA,iOAAwC,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gFCE5P,MAkBnB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,GAAI,CAEF,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BP,CAAcF,CAAI,CAAA,CAEvDU,CAAAA,CAAU,MAAMC,iCAAAA,CAA4CH,CAAQ,CACxE,WAAA,CAAAL,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAC,CAAA,CAEDM,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,8CAAA,EAAiDZ,CAAI,CAAA,IAAA,CACvD,CACF,CAAA,CACA,IAAIa,CAAAA,CAAoB,CAAC,CAAA,CACnBC,CAAAA,CAASL,CAAAA,CAAQ,GAAA,CAAKM,CAAAA,EAAU,CACpC,IAAMC,CAAAA,CAAS,CACb,UAAA,CAAYD,CAAAA,CAAM,EAAA,CAClB,cAAA,CAAgBA,CAAAA,CAAM,UAAA,CACtB,gBAAA,CAAkBA,CAAAA,CAAM,mBAAA,CACxB,WAAA,CAAaA,CAAAA,CAAM,eAAA,CACnB,GAAIV,CAAAA,CACA,CAAE,KAAA,CAAOU,CAAAA,CAAM,IAAA,CAAM,iBAAA,CAAmBA,CAAAA,CAAM,cAAe,CAAA,CAC7D,CAAC,CAAA,CACL,eAAA,CAAiB,CAAA,EAAA","file":"/home/runner/work/cli/cli/dist/impl-IQCQDDW7.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"]}
@@ -1,9 +1,9 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunk5MIDKBL7cjs = require('./chunk-5MIDKBL7.cjs');require('./chunk-G6QNUBBK.cjs');var _chunkEDVVHIFKcjs = require('./chunk-EDVVHIFK.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkHJTYLFGJcjs = require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');var _path = require('path');var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);async function X({auth:E,start:C,end:f,folder:r,bin:p,transcendUrl:N}){let l=p;Object.values(_chunkGLXV5XPPcjs.Sc).includes(l)||(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to parse argument "bin" with value "${p}"
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkWGCFVEJZcjs = require('./chunk-WGCFVEJZ.cjs');require('./chunk-XN6WUCF4.cjs');var _chunk5TKJLZA2cjs = require('./chunk-5TKJLZA2.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkFTXFBDFWcjs = require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');var _path = require('path');var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);async function X({auth:E,start:C,end:f,folder:r,bin:p,transcendUrl:N}){let l=p;Object.values(_chunkK65NYGJMcjs.Sc).includes(l)||(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to parse argument "bin" with value "${p}"
2
2
  Expected one of:
3
- ${Object.values(_chunkGLXV5XPPcjs.Sc).join(`
4
- `)}`)),this.process.exit(1));let i=new Date(C),o=f?new Date(f):new Date;Number.isNaN(i.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Start date provided is invalid date. Got --start="${C}" expected --start="01/01/2023"`)),this.process.exit(1)),Number.isNaN(o.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`End date provided is invalid date. Got --end="${f}" expected --end="01/01/2023"`)),this.process.exit(1)),i>o&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Got a start date "${i.toISOString()}" that was larger than the end date "${o.toISOString()}". Start date must be before end date.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let m=await _chunkEDVVHIFKcjs.b.call(void 0, E);if(_fs2.default.existsSync(r)&&!_fs2.default.lstatSync(r).isDirectory()&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red('The provided argument "folder" was passed a file. expected: folder="./consent-metrics/"')),this.process.exit(1)),_fs.existsSync.call(void 0, r)||_fs.mkdirSync.call(void 0, r),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling consent metrics from start=${i.toString()} to end=${o.toISOString()} with bin size "${p}"`)),typeof m=="string"){try{let n=_chunkGLXV5XPPcjs.wc.call(void 0, N,m),s=await _chunk5MIDKBL7cjs.d.call(void 0, n,{bin:l,start:i,end:o});await _bluebird.map.call(void 0, Object.entries(s),async([g,c])=>{await _bluebird.map.call(void 0, c,async({points:u,name:d})=>{let a=_path.join.call(void 0, r,`${g}_${d}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${a}"...`)),await _chunkGLXV5XPPcjs.mg.call(void 0, a,u.map(({key:h,value:$})=>({timestamp:h,value:$})))},{concurrency:5})},{concurrency:5})}catch(n){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the schema: ${n.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced consent metrics to disk in folder "${r}"! View at ${_chunkHJTYLFGJcjs.p}`))}else{let n=[];await _bluebird.mapSeries.call(void 0, m,async(s,g)=>{let c=`[${g+1}/${m.length}][${s.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
3
+ ${Object.values(_chunkK65NYGJMcjs.Sc).join(`
4
+ `)}`)),this.process.exit(1));let i=new Date(C),o=f?new Date(f):new Date;Number.isNaN(i.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Start date provided is invalid date. Got --start="${C}" expected --start="01/01/2023"`)),this.process.exit(1)),Number.isNaN(o.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`End date provided is invalid date. Got --end="${f}" expected --end="01/01/2023"`)),this.process.exit(1)),i>o&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Got a start date "${i.toISOString()}" that was larger than the end date "${o.toISOString()}". Start date must be before end date.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let m=await _chunk5TKJLZA2cjs.b.call(void 0, E);if(_fs2.default.existsSync(r)&&!_fs2.default.lstatSync(r).isDirectory()&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red('The provided argument "folder" was passed a file. expected: folder="./consent-metrics/"')),this.process.exit(1)),_fs.existsSync.call(void 0, r)||_fs.mkdirSync.call(void 0, r),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling consent metrics from start=${i.toString()} to end=${o.toISOString()} with bin size "${p}"`)),typeof m=="string"){try{let n=_chunkK65NYGJMcjs.wc.call(void 0, N,m),s=await _chunkWGCFVEJZcjs.d.call(void 0, n,{bin:l,start:i,end:o});await _bluebird.map.call(void 0, Object.entries(s),async([g,c])=>{await _bluebird.map.call(void 0, c,async({points:u,name:d})=>{let a=_path.join.call(void 0, r,`${g}_${d}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${a}"...`)),await _chunkK65NYGJMcjs.og.call(void 0, a,u.map(({key:h,value:$})=>({timestamp:h,value:$})))},{concurrency:5})},{concurrency:5})}catch(n){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the schema: ${n.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced consent metrics to disk in folder "${r}"! View at ${_chunkFTXFBDFWcjs.p}`))}else{let n=[];await _bluebird.mapSeries.call(void 0, m,async(s,g)=>{let c=`[${g+1}/${m.length}][${s.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
5
5
 
6
6
  ${c}Attempting to pull consent metrics...
7
7
 
8
- ~~~`));let u=_chunkGLXV5XPPcjs.wc.call(void 0, N,s.apiKey);try{let d=await _chunk5MIDKBL7cjs.d.call(void 0, u,{bin:l,start:i,end:o}),a=_path.join.call(void 0, r,s.organizationName);_fs.existsSync.call(void 0, a)||_fs.mkdirSync.call(void 0, a),Object.entries(d).forEach(([h,$])=>{$.forEach(({points:F,name:G})=>{let D=_path.join.call(void 0, a,`${h}_${G}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${D}"...`)),_chunkGLXV5XPPcjs.mg.call(void 0, D,F.map(({key:z,value:L})=>({timestamp:z,value:L})))})}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${c}Successfully pulled configuration!`))}catch (e2){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${c}Failed to sync configuration.`)),n.push(s.organizationName)}}),n.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${n.join(",")}". View output above for more information, or check out ${_chunkHJTYLFGJcjs.p}`)),this.process.exit(1))}}exports.pullConsentMetrics = X;
9
- //# sourceMappingURL=impl-I6NMF5QP.cjs.map
8
+ ~~~`));let u=_chunkK65NYGJMcjs.wc.call(void 0, N,s.apiKey);try{let d=await _chunkWGCFVEJZcjs.d.call(void 0, u,{bin:l,start:i,end:o}),a=_path.join.call(void 0, r,s.organizationName);_fs.existsSync.call(void 0, a)||_fs.mkdirSync.call(void 0, a),Object.entries(d).forEach(([h,$])=>{$.forEach(({points:F,name:G})=>{let D=_path.join.call(void 0, a,`${h}_${G}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${D}"...`)),_chunkK65NYGJMcjs.og.call(void 0, D,F.map(({key:z,value:L})=>({timestamp:z,value:L})))})}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${c}Successfully pulled configuration!`))}catch (e2){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${c}Failed to sync configuration.`)),n.push(s.organizationName)}}),n.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${n.join(",")}". View output above for more information, or check out ${_chunkFTXFBDFWcjs.p}`)),this.process.exit(1))}}exports.pullConsentMetrics = X;
9
+ //# sourceMappingURL=impl-J3FUD3MS.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-I6NMF5QP.cjs","../src/commands/consent/pull-consent-metrics/impl.ts"],"names":["pullConsentMetrics","auth","start","end","folder","bin","transcendUrl","parsedBin","ConsentManagerMetricBin","logger","colors"],"mappings":"AAAA,iOAAwC,gCAA6B,wDAAyC,gCAA6B,wDAAyC,wDAA0D,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gFCEvW,oCACY,4BACV,gEACqB,MAoB1C,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAYF,CAAAA,CACb,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,QAAA,CAASD,CAAS,CAAA,EAAA,CAC5DE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CL,CAAG,CAAA;AAAA;AAAA,EACzB,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,IAAA,CAC3D,CAAA;AAAA,CACF,CAAC,CAAA,CAAA;AAyHD;AAAgB;AAAA;AAAA,GAAA","file":"/home/runner/work/cli/cli/dist/impl-I6NMF5QP.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { map, mapSeries } from 'bluebird';\nimport { join } from 'node:path';\nimport fs, { existsSync, mkdirSync } from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n ConsentManagerMetricBin,\n} from '../../../lib/graphql';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { pullConsentManagerMetrics } from '../../../lib/consent-manager';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeCsv } from '../../../lib/helpers';\n\nexport interface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n {\n auth,\n start,\n end,\n folder,\n bin,\n transcendUrl,\n }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join(\n '\\n',\n )}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n this.process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n await map(\n Object.entries(configuration),\n async ([metricName, metrics]) => {\n await map(\n metrics,\n async ({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n await writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n },\n {\n concurrency: 5,\n },\n );\n },\n { concurrency: 5 },\n );\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-J3FUD3MS.cjs","../src/commands/consent/pull-consent-metrics/impl.ts"],"names":["pullConsentMetrics","auth","start","end","folder","bin","transcendUrl","parsedBin","ConsentManagerMetricBin","logger","colors"],"mappings":"AAAA,iOAAwC,gCAA6B,wDAAyC,gCAA6B,wDAAyC,wDAA0D,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gFCEvW,oCACY,4BACV,gEACqB,MAoB1C,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAYF,CAAAA,CACb,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,QAAA,CAASD,CAAS,CAAA,EAAA,CAC5DE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CL,CAAG,CAAA;AAAA;AAAA,EACzB,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,IAAA,CAC3D,CAAA;AAAA,CACF,CAAC,CAAA,CAAA;AAyHD;AAAgB;AAAA;AAAA,GAAA","file":"/home/runner/work/cli/cli/dist/impl-J3FUD3MS.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { map, mapSeries } from 'bluebird';\nimport { join } from 'node:path';\nimport fs, { existsSync, mkdirSync } from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n ConsentManagerMetricBin,\n} from '../../../lib/graphql';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { pullConsentManagerMetrics } from '../../../lib/consent-manager';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeCsv } from '../../../lib/helpers';\n\nexport interface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n {\n auth,\n start,\n end,\n folder,\n bin,\n transcendUrl,\n }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join(\n '\\n',\n )}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n this.process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n await map(\n Object.entries(configuration),\n async ([metricName, metrics]) => {\n await map(\n metrics,\n async ({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n await writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n },\n {\n concurrency: 5,\n },\n );\n },\n { concurrency: 5 },\n );\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');async function c({auth:i,transcendUrl:n,identifierNames:r,actions:o=[]}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkGLXV5XPPcjs.Qf.call(void 0, {requestActions:o,transcendUrl:n,auth:i,identifierNames:r})}exports.rejectUnverifiedIdentifiers = c;
2
- //# sourceMappingURL=impl-KJQDZ5CD.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');async function c({auth:i,transcendUrl:n,identifierNames:r,actions:o=[]}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkK65NYGJMcjs.Qf.call(void 0, {requestActions:o,transcendUrl:n,auth:i,identifierNames:r})}exports.rejectUnverifiedIdentifiers = c;
2
+ //# sourceMappingURL=impl-JHP4RJZ4.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-KJQDZ5CD.cjs","../src/commands/request/reject-unverified-identifiers/impl.ts"],"names":["rejectUnverifiedIdentifiers","auth","transcendUrl","identifierNames","actions","doneInputValidation","removeUnverifiedRequestIdentifiers"],"mappings":"AAAA,iIAAwC,wDAA0C,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCYtM,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CAAU,CAAC,CACb,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,kCAAAA,CACJ,cAAA,CAAgBF,CAAAA,CAChB,YAAA,CAAAF,CAAAA,CACA,IAAA,CAAAD,CAAAA,CACA,eAAA,CAAAE,CACF,CAAC,CACH,CAAA,wCAAA","file":"/home/runner/work/cli/cli/dist/impl-KJQDZ5CD.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { removeUnverifiedRequestIdentifiers } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n identifierNames,\n actions = [],\n }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-JHP4RJZ4.cjs","../src/commands/request/reject-unverified-identifiers/impl.ts"],"names":["rejectUnverifiedIdentifiers","auth","transcendUrl","identifierNames","actions","doneInputValidation","removeUnverifiedRequestIdentifiers"],"mappings":"AAAA,iIAAwC,wDAA0C,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCYtM,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CAAU,CAAC,CACb,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,kCAAAA,CACJ,cAAA,CAAgBF,CAAAA,CAChB,YAAA,CAAAF,CAAAA,CACA,IAAA,CAAAD,CAAAA,CACA,eAAA,CAAAE,CACF,CAAC,CACH,CAAA,wCAAA","file":"/home/runner/work/cli/cli/dist/impl-JHP4RJZ4.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { removeUnverifiedRequestIdentifiers } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n identifierNames,\n actions = [],\n }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');async function d({auth:o,actions:r,origins:i,silentModeBefore:n,createdAtBefore:s,createdAtAfter:a,transcendUrl:c,concurrency:p}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkGLXV5XPPcjs.Cf.call(void 0, {transcendUrl:c,requestActions:r,auth:o,requestOrigins:i,concurrency:p,silentModeBefore:n,createdAtBefore:s,createdAtAfter:a})}exports.approve = d;
2
- //# sourceMappingURL=impl-XWVPG2AO.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');async function d({auth:o,actions:r,origins:i,silentModeBefore:n,createdAtBefore:s,createdAtAfter:a,transcendUrl:c,concurrency:p}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkK65NYGJMcjs.Cf.call(void 0, {transcendUrl:c,requestActions:r,auth:o,requestOrigins:i,concurrency:p,silentModeBefore:n,createdAtBefore:s,createdAtAfter:a})}exports.approve = d;
2
+ //# sourceMappingURL=impl-JK3TTCKR.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-XWVPG2AO.cjs","../src/commands/request/approve/impl.ts"],"names":["approve","auth","actions","origins","silentModeBefore","createdAtBefore","createdAtAfter","transcendUrl","concurrency","doneInputValidation","approvePrivacyRequests"],"mappings":"AAAA,iIAAwC,wDAA0C,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCiBtM,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,kCAAAA,CACJ,YAAA,CAAAH,CAAAA,CACA,cAAA,CAAgBL,CAAAA,CAChB,IAAA,CAAAD,CAAAA,CACA,cAAA,CAAgBE,CAAAA,CAChB,WAAA,CAAAK,CAAAA,CACA,gBAAA,CAAAJ,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CACF,CAAC,CACH,CAAA,oBAAA","file":"/home/runner/work/cli/cli/dist/impl-XWVPG2AO.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\n\nimport { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\nimport { approvePrivacyRequests } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n });\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-JK3TTCKR.cjs","../src/commands/request/approve/impl.ts"],"names":["approve","auth","actions","origins","silentModeBefore","createdAtBefore","createdAtAfter","transcendUrl","concurrency","doneInputValidation","approvePrivacyRequests"],"mappings":"AAAA,iIAAwC,wDAA0C,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCiBtM,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,kCAAAA,CACJ,YAAA,CAAAH,CAAAA,CACA,cAAA,CAAgBL,CAAAA,CAChB,IAAA,CAAAD,CAAAA,CACA,cAAA,CAAgBE,CAAAA,CAChB,WAAA,CAAAK,CAAAA,CACA,gBAAA,CAAAJ,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CACF,CAAC,CACH,CAAA,oBAAA","file":"/home/runner/work/cli/cli/dist/impl-JK3TTCKR.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\n\nimport { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\nimport { approvePrivacyRequests } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n });\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkDV57HBVOcjs = require('./chunk-DV57HBVO.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkGLXV5XPPcjs = require('./chunk-GLXV5XPP.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-HJTYLFGJ.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _path = require('path');var _fs = require('fs');function H(r,e){r||(_chunkZUNVPK23cjs.a.error(_colors2.default.red("A --directory must be provided.")),e.process.exit(1));let n=[];try{n=_fs.readdirSync.call(void 0, r).filter(s=>s.endsWith(".csv")).map(s=>_path.join.call(void 0, r,s)).filter(s=>{try{return _fs.statSync.call(void 0, s).isFile()}catch (e2){return!1}})}catch(i){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to read directory: ${r}`)),_chunkZUNVPK23cjs.a.error(_colors2.default.red(i.message)),e.process.exit(1)}return n.length===0&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`No CSV files found in directory: ${r}`)),e.process.exit(1)),n}var _promises = require('fs/promises');var _promises3 = require('stream/promises');var _stream = require('stream');var _events = require('events');var _csvparse = require('csv-parse');var _fastcsv = require('fast-csv'); var K = _interopRequireWildcard(_fastcsv);function M(r,e){let n=_fs.createWriteStream.call(void 0, r),i=K.format({headers:e,writeHeaders:!0,objectMode:!0});return i.pipe(n),{async write(s){i.write(s)||await _events.once.call(void 0, i,"drain")},async end(){let s=Promise.all([_events.once.call(void 0, n,"finish")]);i.end(),await s}}}function me(r){return String(r).padStart(4,"0")}function fe(r){return Buffer.byteLength(Object.values(r).map(e=>e==null?"":String(e)).join(","),"utf8")}async function N(r){let{filePath:e,outputDir:n,clearOutputDir:i,chunkSizeMB:s,onProgress:f,reportEveryMs:g=500}=r,{size:l}=await _promises.stat.call(void 0, e),u=0;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Chunking ${e} into ~${s}MB files...`));let y=Math.floor(s*1024*1024),C=_path.basename.call(void 0, e,".csv"),d=n||_path.dirname.call(void 0, e);if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Output directory: ${d}`)),await _promises.mkdir.call(void 0, d,{recursive:!0}),i){_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Clearing output directory: ${d}`));let a=await _promises.readdir.call(void 0, d);await Promise.all(a.filter(m=>m.startsWith(`${C}_chunk_`)&&m.endsWith(".csv")).map(m=>_promises.unlink.call(void 0, _path.join.call(void 0, d,m))))}let t=null,h=null,c=0,w=1,P=0,G=new (0, _csvparse.Parser)({columns:!1,skip_empty_lines:!0}),O=0,b=0,S=()=>{let a=b>0?O/b:0,m=a>0?Math.max(c,Math.ceil(l/a)):void 0;f(c,m),u=Date.now()};S();let p=null,$=()=>_path.join.call(void 0, d,`${C}_chunk_${me(w)}.csv`),U=new (0, _stream.Transform)({objectMode:!0,async transform(a,m,T){try{if(!t){t=a.slice(0),h=t.length,p=M($(),t),T();return}h!==null&&a.length!==h&&_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Row has ${a.length} cols; expected ${h}`)),c+=1,c%25e4===0&&f(c);let v=Object.fromEntries(t.map((Q,X)=>[Q,a[X]])),D=fe(v);O+=D,b+=1,Date.now()-u>=g&&S(),p&&P>0&&P+D>y&&(await p.end(),w+=1,P=0,_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Rolling to chunk ${w} after ${c.toLocaleString()} rows.`)),p=M($(),t)),p||(p=M($(),t)),await p.write(v),P+=D,T()}catch(v){T(v)}},async flush(a){try{p&&(await p.end(),p=null),S(),a()}catch(m){a(m)}}}),J=_fs.createReadStream.call(void 0, e);await _promises3.pipeline.call(void 0, J,G,U),f(c),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Chunked ${e} into ${w} file(s); processed ${c.toLocaleString()} rows.`))}async function A(){let r=Number(process.env.WORKER_ID||"0");_chunkZUNVPK23cjs.a.info(`[w${r}] ready pid=${process.pid}`),_optionalChain([process, 'access', _2 => _2.send, 'optionalCall', _3 => _3({type:"ready"})]),process.on("message",async e=>{if(!e||typeof e!="object"||(e.type==="shutdown"&&process.exit(0),e.type!=="task"))return;let{filePath:n,options:i}=e.payload,{outputDir:s,clearOutputDir:f,chunkSizeMB:g}=i;try{await N({filePath:n,outputDir:s,clearOutputDir:f,chunkSizeMB:g,onProgress:(l,u)=>_optionalChain([process, 'access', _4 => _4.send, 'optionalCall', _5 => _5({type:"progress",payload:{filePath:n,processed:l,total:u}})])}),_optionalChain([process, 'access', _6 => _6.send, 'optionalCall', _7 => _7({type:"result",payload:{ok:!0,filePath:n}})])}catch(l){let u=_chunkGLXV5XPPcjs.Wf.call(void 0, l);_chunkZUNVPK23cjs.a.error(`[w${r}] ERROR ${n}: ${u}`),_optionalChain([process, 'access', _8 => _8.send, 'optionalCall', _9 => _9({type:"result",payload:{ok:!1,filePath:n,error:u}})])}}),await new Promise(()=>{})}function de(r){return _chunkDV57HBVOcjs.d.call(void 0, r)}function he(r){return _chunkDV57HBVOcjs.e.call(void 0, r)}var q={renderHeader:de,renderWorkers:he};function ke(){return typeof __filename<"u"?__filename:process.argv[1]}async function Ue(r){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let{directory:e,outputDir:n,clearOutputDir:i,chunkSizeMB:s,concurrency:f,viewerMode:g}=r,l=H(e,this),{poolSize:u,cpuCount:y}=_chunkDV57HBVOcjs.a.call(void 0, f,l.length);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Chunking ${l.length} CSV file(s) with pool size ${u} (CPU=${y})`));let C=l.map(t=>({filePath:t,options:{outputDir:n,clearOutputDir:i,chunkSizeMB:s}})),d={nextTask:()=>C.shift(),taskLabel:t=>t.filePath,initTotals:()=>({}),initSlotProgress:()=>{},onProgress:t=>t,onResult:(t,h)=>({totals:t,ok:!!h.ok}),postProcess:async()=>{}};await _chunkDV57HBVOcjs.f.call(void 0, {title:`Chunk CSV - ${e}`,baseDir:e||n||process.cwd(),childFlag:_chunkDV57HBVOcjs.b,childModulePath:ke(),poolSize:u,cpuCount:y,filesTotal:l.length,hooks:d,viewerMode:g,render:t=>_chunkDV57HBVOcjs.c.call(void 0, t,q,g),extraKeyHandler:({logsBySlot:t,repaint:h,setPaused:c})=>_chunkDV57HBVOcjs.g.call(void 0, {logsBySlot:t,repaint:h,setPaused:c})})}process.argv.includes(_chunkDV57HBVOcjs.b)&&A().catch(r=>{_chunkZUNVPK23cjs.a.error(r),process.exit(1)});exports.chunkCsv = Ue;
2
- //# sourceMappingURL=impl-XUBKDJNI.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkL5QUUSVBcjs = require('./chunk-L5QUUSVB.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunkK65NYGJMcjs = require('./chunk-K65NYGJM.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-FTXFBDFW.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _path = require('path');var _fs = require('fs');function H(r,e){r||(_chunkZUNVPK23cjs.a.error(_colors2.default.red("A --directory must be provided.")),e.process.exit(1));let n=[];try{n=_fs.readdirSync.call(void 0, r).filter(s=>s.endsWith(".csv")).map(s=>_path.join.call(void 0, r,s)).filter(s=>{try{return _fs.statSync.call(void 0, s).isFile()}catch (e2){return!1}})}catch(i){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to read directory: ${r}`)),_chunkZUNVPK23cjs.a.error(_colors2.default.red(i.message)),e.process.exit(1)}return n.length===0&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`No CSV files found in directory: ${r}`)),e.process.exit(1)),n}var _promises = require('fs/promises');var _promises3 = require('stream/promises');var _stream = require('stream');var _events = require('events');var _csvparse = require('csv-parse');var _fastcsv = require('fast-csv'); var K = _interopRequireWildcard(_fastcsv);function M(r,e){let n=_fs.createWriteStream.call(void 0, r),i=K.format({headers:e,writeHeaders:!0,objectMode:!0});return i.pipe(n),{async write(s){i.write(s)||await _events.once.call(void 0, i,"drain")},async end(){let s=Promise.all([_events.once.call(void 0, n,"finish")]);i.end(),await s}}}function me(r){return String(r).padStart(4,"0")}function fe(r){return Buffer.byteLength(Object.values(r).map(e=>e==null?"":String(e)).join(","),"utf8")}async function N(r){let{filePath:e,outputDir:n,clearOutputDir:i,chunkSizeMB:s,onProgress:f,reportEveryMs:g=500}=r,{size:l}=await _promises.stat.call(void 0, e),u=0;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Chunking ${e} into ~${s}MB files...`));let y=Math.floor(s*1024*1024),C=_path.basename.call(void 0, e,".csv"),d=n||_path.dirname.call(void 0, e);if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Output directory: ${d}`)),await _promises.mkdir.call(void 0, d,{recursive:!0}),i){_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Clearing output directory: ${d}`));let a=await _promises.readdir.call(void 0, d);await Promise.all(a.filter(m=>m.startsWith(`${C}_chunk_`)&&m.endsWith(".csv")).map(m=>_promises.unlink.call(void 0, _path.join.call(void 0, d,m))))}let t=null,h=null,c=0,w=1,P=0,G=new (0, _csvparse.Parser)({columns:!1,skip_empty_lines:!0}),O=0,b=0,S=()=>{let a=b>0?O/b:0,m=a>0?Math.max(c,Math.ceil(l/a)):void 0;f(c,m),u=Date.now()};S();let p=null,$=()=>_path.join.call(void 0, d,`${C}_chunk_${me(w)}.csv`),U=new (0, _stream.Transform)({objectMode:!0,async transform(a,m,T){try{if(!t){t=a.slice(0),h=t.length,p=M($(),t),T();return}h!==null&&a.length!==h&&_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Row has ${a.length} cols; expected ${h}`)),c+=1,c%25e4===0&&f(c);let v=Object.fromEntries(t.map((Q,X)=>[Q,a[X]])),D=fe(v);O+=D,b+=1,Date.now()-u>=g&&S(),p&&P>0&&P+D>y&&(await p.end(),w+=1,P=0,_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Rolling to chunk ${w} after ${c.toLocaleString()} rows.`)),p=M($(),t)),p||(p=M($(),t)),await p.write(v),P+=D,T()}catch(v){T(v)}},async flush(a){try{p&&(await p.end(),p=null),S(),a()}catch(m){a(m)}}}),J=_fs.createReadStream.call(void 0, e);await _promises3.pipeline.call(void 0, J,G,U),f(c),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Chunked ${e} into ${w} file(s); processed ${c.toLocaleString()} rows.`))}async function A(){let r=Number(process.env.WORKER_ID||"0");_chunkZUNVPK23cjs.a.info(`[w${r}] ready pid=${process.pid}`),_optionalChain([process, 'access', _2 => _2.send, 'optionalCall', _3 => _3({type:"ready"})]),process.on("message",async e=>{if(!e||typeof e!="object"||(e.type==="shutdown"&&process.exit(0),e.type!=="task"))return;let{filePath:n,options:i}=e.payload,{outputDir:s,clearOutputDir:f,chunkSizeMB:g}=i;try{await N({filePath:n,outputDir:s,clearOutputDir:f,chunkSizeMB:g,onProgress:(l,u)=>_optionalChain([process, 'access', _4 => _4.send, 'optionalCall', _5 => _5({type:"progress",payload:{filePath:n,processed:l,total:u}})])}),_optionalChain([process, 'access', _6 => _6.send, 'optionalCall', _7 => _7({type:"result",payload:{ok:!0,filePath:n}})])}catch(l){let u=_chunkK65NYGJMcjs.Wf.call(void 0, l);_chunkZUNVPK23cjs.a.error(`[w${r}] ERROR ${n}: ${u}`),_optionalChain([process, 'access', _8 => _8.send, 'optionalCall', _9 => _9({type:"result",payload:{ok:!1,filePath:n,error:u}})])}}),await new Promise(()=>{})}function de(r){return _chunkL5QUUSVBcjs.d.call(void 0, r)}function he(r){return _chunkL5QUUSVBcjs.e.call(void 0, r)}var q={renderHeader:de,renderWorkers:he};function ke(){return typeof __filename<"u"?__filename:process.argv[1]}async function Ue(r){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let{directory:e,outputDir:n,clearOutputDir:i,chunkSizeMB:s,concurrency:f,viewerMode:g}=r,l=H(e,this),{poolSize:u,cpuCount:y}=_chunkL5QUUSVBcjs.a.call(void 0, f,l.length);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Chunking ${l.length} CSV file(s) with pool size ${u} (CPU=${y})`));let C=l.map(t=>({filePath:t,options:{outputDir:n,clearOutputDir:i,chunkSizeMB:s}})),d={nextTask:()=>C.shift(),taskLabel:t=>t.filePath,initTotals:()=>({}),initSlotProgress:()=>{},onProgress:t=>t,onResult:(t,h)=>({totals:t,ok:!!h.ok}),postProcess:async()=>{}};await _chunkL5QUUSVBcjs.f.call(void 0, {title:`Chunk CSV - ${e}`,baseDir:e||n||process.cwd(),childFlag:_chunkL5QUUSVBcjs.b,childModulePath:ke(),poolSize:u,cpuCount:y,filesTotal:l.length,hooks:d,viewerMode:g,render:t=>_chunkL5QUUSVBcjs.c.call(void 0, t,q,g),extraKeyHandler:({logsBySlot:t,repaint:h,setPaused:c})=>_chunkL5QUUSVBcjs.g.call(void 0, {logsBySlot:t,repaint:h,setPaused:c})})}process.argv.includes(_chunkL5QUUSVBcjs.b)&&A().catch(r=>{_chunkZUNVPK23cjs.a.error(r),process.exit(1)});exports.chunkCsv = Ue;
2
+ //# sourceMappingURL=impl-JYAKR5RR.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-XUBKDJNI.cjs","../src/commands/admin/chunk-csv/impl.ts","../src/lib/helpers/collectCsvFilesOrExit.ts"],"names":["collectCsvFilesOrExit","directory","localContext","logger","colors","files","readdirSync","f","join","p","statSync","err"],"mappings":"AAAA,u/BAAkF,wDAAyC,wDAA0C,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gFCClR,4BCDE,wBACiB,SAatBA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACU,CACLD,CAAAA,EAAAA,CACHE,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,iCAAiC,CAAC,CAAA,CAC1DF,CAAAA,CAAa,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAG7B,IAAIG,CAAAA,CAAkB,CAAC,CAAA,CACvB,GAAI,CAEFA,CAAAA,CADgBC,6BAAAA,CAAqB,CAAA,CAElC,MAAA,CAAQC,CAAAA,EAAMA,CAAAA,CAAE,QAAA,CAAS,MAAM,CAAC,CAAA,CAChC,GAAA,CAAKA,CAAAA,EAAMC,wBAAAA,CAAKP,CAAWM,CAAC,CAAC,CAAA,CAC7B,MAAA,CAAQE,CAAAA,EAAM,CACb,GAAI,CACF,OAAOC,0BAAAA,CAAU,CAAA,CAAE,MAAA,CAAO,CAC5B,CAAA,UAAQ,CACN,MAAO,CAAA,CACT,CACF,CAAC,CACL,CAAA,KAAA,CAASC,CAAAA,CAAK,CACZR,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,0BAAA,EAA6BH,CAAS,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-XUBKDJNI.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n","import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n\n return files;\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-JYAKR5RR.cjs","../src/commands/admin/chunk-csv/impl.ts","../src/lib/helpers/collectCsvFilesOrExit.ts"],"names":["collectCsvFilesOrExit","directory","localContext","logger","colors","files","readdirSync","f","join","p","statSync","err"],"mappings":"AAAA,u/BAAkF,wDAAyC,wDAA0C,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gFCClR,4BCDE,wBACiB,SAatBA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACU,CACLD,CAAAA,EAAAA,CACHE,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,iCAAiC,CAAC,CAAA,CAC1DF,CAAAA,CAAa,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAG7B,IAAIG,CAAAA,CAAkB,CAAC,CAAA,CACvB,GAAI,CAEFA,CAAAA,CADgBC,6BAAAA,CAAqB,CAAA,CAElC,MAAA,CAAQC,CAAAA,EAAMA,CAAAA,CAAE,QAAA,CAAS,MAAM,CAAC,CAAA,CAChC,GAAA,CAAKA,CAAAA,EAAMC,wBAAAA,CAAKP,CAAWM,CAAC,CAAC,CAAA,CAC7B,MAAA,CAAQE,CAAAA,EAAM,CACb,GAAI,CACF,OAAOC,0BAAAA,CAAU,CAAA,CAAE,MAAA,CAAO,CAC5B,CAAA,UAAQ,CACN,MAAO,CAAA,CACT,CACF,CAAC,CACL,CAAA,KAAA,CAASC,CAAAA,CAAK,CACZR,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,0BAAA,EAA6BH,CAAS,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-JYAKR5RR.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n","import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n\n return files;\n}\n"]}