@transcend-io/cli 8.0.6 → 8.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/dist/bin/bash-complete.cjs +1 -1
  2. package/dist/bin/cli.cjs +1 -1
  3. package/dist/bin/deprecated-command.cjs +2 -2
  4. package/dist/{chunk-DCBE53LG.cjs → chunk-2VX4QD73.cjs} +3 -3
  5. package/dist/{chunk-DCBE53LG.cjs.map → chunk-2VX4QD73.cjs.map} +1 -1
  6. package/dist/{chunk-JZEDU4GZ.cjs → chunk-4GRQMYW2.cjs} +2 -2
  7. package/dist/{chunk-JZEDU4GZ.cjs.map → chunk-4GRQMYW2.cjs.map} +1 -1
  8. package/dist/{chunk-MFIMY5RY.cjs → chunk-E57K4DGX.cjs} +2 -2
  9. package/dist/{chunk-MFIMY5RY.cjs.map → chunk-E57K4DGX.cjs.map} +1 -1
  10. package/dist/{chunk-IU4E6MQW.cjs → chunk-GT7Q27PI.cjs} +2 -2
  11. package/dist/{chunk-IU4E6MQW.cjs.map → chunk-GT7Q27PI.cjs.map} +1 -1
  12. package/dist/{chunk-5Y7FIOUT.cjs → chunk-H6YI2HZS.cjs} +2 -2
  13. package/dist/{chunk-5Y7FIOUT.cjs.map → chunk-H6YI2HZS.cjs.map} +1 -1
  14. package/dist/{chunk-KU3UCRAF.cjs → chunk-HBKA5Z5A.cjs} +2 -2
  15. package/dist/{chunk-KU3UCRAF.cjs.map → chunk-HBKA5Z5A.cjs.map} +1 -1
  16. package/dist/{chunk-NAGRUVXK.cjs → chunk-K2E6TCB5.cjs} +2 -2
  17. package/dist/{chunk-NAGRUVXK.cjs.map → chunk-K2E6TCB5.cjs.map} +1 -1
  18. package/dist/{chunk-OKLX7XKW.cjs → chunk-NKFAEQ2Q.cjs} +2 -2
  19. package/dist/{chunk-OKLX7XKW.cjs.map → chunk-NKFAEQ2Q.cjs.map} +1 -1
  20. package/dist/{chunk-LPUZGMB6.cjs → chunk-OCNPZWYT.cjs} +4 -4
  21. package/dist/{chunk-LPUZGMB6.cjs.map → chunk-OCNPZWYT.cjs.map} +1 -1
  22. package/dist/chunk-RSN3DUTP.cjs +2920 -0
  23. package/dist/{chunk-7O5DWH3M.cjs.map → chunk-RSN3DUTP.cjs.map} +1 -1
  24. package/dist/{chunk-DTGC4M5U.cjs → chunk-S3HIPD4J.cjs} +2 -2
  25. package/dist/{chunk-DTGC4M5U.cjs.map → chunk-S3HIPD4J.cjs.map} +1 -1
  26. package/dist/{chunk-YMH24XQT.cjs → chunk-SQVECTFA.cjs} +19 -19
  27. package/dist/{chunk-YMH24XQT.cjs.map → chunk-SQVECTFA.cjs.map} +1 -1
  28. package/dist/{impl-WCRE3QMZ.cjs → impl-2GZWVX2N.cjs} +2 -2
  29. package/dist/{impl-WCRE3QMZ.cjs.map → impl-2GZWVX2N.cjs.map} +1 -1
  30. package/dist/{impl-4WFQNFXM.cjs → impl-2NI344VN.cjs} +2 -2
  31. package/dist/{impl-4WFQNFXM.cjs.map → impl-2NI344VN.cjs.map} +1 -1
  32. package/dist/{impl-LAYRDHOS.cjs → impl-5XZA6XDY.cjs} +2 -2
  33. package/dist/{impl-LAYRDHOS.cjs.map → impl-5XZA6XDY.cjs.map} +1 -1
  34. package/dist/{impl-S3QSYERT.cjs → impl-A7WOM6B2.cjs} +2 -2
  35. package/dist/{impl-S3QSYERT.cjs.map → impl-A7WOM6B2.cjs.map} +1 -1
  36. package/dist/{impl-GNNZ6ISS.cjs → impl-ACT4HEIF.cjs} +2 -2
  37. package/dist/{impl-GNNZ6ISS.cjs.map → impl-ACT4HEIF.cjs.map} +1 -1
  38. package/dist/{impl-2XFKDVXC.cjs → impl-ASZALW6O.cjs} +2 -2
  39. package/dist/{impl-2XFKDVXC.cjs.map → impl-ASZALW6O.cjs.map} +1 -1
  40. package/dist/{impl-YYLZH24L.cjs → impl-B4RGQNKY.cjs} +4 -4
  41. package/dist/{impl-YYLZH24L.cjs.map → impl-B4RGQNKY.cjs.map} +1 -1
  42. package/dist/{impl-DUKSVNOM.cjs → impl-BRA6TEVA.cjs} +2 -2
  43. package/dist/{impl-DUKSVNOM.cjs.map → impl-BRA6TEVA.cjs.map} +1 -1
  44. package/dist/{impl-IN2PQA3H.cjs → impl-CWWICDPC.cjs} +2 -2
  45. package/dist/{impl-IN2PQA3H.cjs.map → impl-CWWICDPC.cjs.map} +1 -1
  46. package/dist/{impl-LWPYKGCZ.cjs → impl-E7JWHVRJ.cjs} +2 -2
  47. package/dist/{impl-LWPYKGCZ.cjs.map → impl-E7JWHVRJ.cjs.map} +1 -1
  48. package/dist/{impl-SZM2IXSY.cjs → impl-ERCHNSZD.cjs} +3 -3
  49. package/dist/{impl-SZM2IXSY.cjs.map → impl-ERCHNSZD.cjs.map} +1 -1
  50. package/dist/{impl-TEADN2ZL.cjs → impl-EU6DPMR5.cjs} +5 -5
  51. package/dist/{impl-TEADN2ZL.cjs.map → impl-EU6DPMR5.cjs.map} +1 -1
  52. package/dist/{impl-OZ6XHZRW.cjs → impl-GS4CVUAP.cjs} +2 -2
  53. package/dist/{impl-OZ6XHZRW.cjs.map → impl-GS4CVUAP.cjs.map} +1 -1
  54. package/dist/{impl-5IVCXXAL.cjs → impl-GTJ5DPQ4.cjs} +2 -2
  55. package/dist/{impl-5IVCXXAL.cjs.map → impl-GTJ5DPQ4.cjs.map} +1 -1
  56. package/dist/{impl-VZDR2CDX.cjs → impl-HHPNH3QN.cjs} +2 -2
  57. package/dist/{impl-VZDR2CDX.cjs.map → impl-HHPNH3QN.cjs.map} +1 -1
  58. package/dist/{impl-I6ETF7X7.cjs → impl-JYM2T6AQ.cjs} +2 -2
  59. package/dist/{impl-I6ETF7X7.cjs.map → impl-JYM2T6AQ.cjs.map} +1 -1
  60. package/dist/{impl-XXGHYIS2.cjs → impl-K6UD53QP.cjs} +2 -2
  61. package/dist/{impl-XXGHYIS2.cjs.map → impl-K6UD53QP.cjs.map} +1 -1
  62. package/dist/{impl-5VNZMRQP.cjs → impl-KHR6CYYW.cjs} +2 -2
  63. package/dist/{impl-5VNZMRQP.cjs.map → impl-KHR6CYYW.cjs.map} +1 -1
  64. package/dist/{impl-3JDGUUXS.cjs → impl-KQNFC333.cjs} +2 -2
  65. package/dist/{impl-3JDGUUXS.cjs.map → impl-KQNFC333.cjs.map} +1 -1
  66. package/dist/{impl-VRTPR7ZC.cjs → impl-LTMYU2AY.cjs} +2 -2
  67. package/dist/{impl-VRTPR7ZC.cjs.map → impl-LTMYU2AY.cjs.map} +1 -1
  68. package/dist/{impl-LO7NADSE.cjs → impl-M6XVZEZC.cjs} +2 -2
  69. package/dist/{impl-LO7NADSE.cjs.map → impl-M6XVZEZC.cjs.map} +1 -1
  70. package/dist/{impl-DGMMPU6J.cjs → impl-MM7Z76YA.cjs} +2 -2
  71. package/dist/{impl-DGMMPU6J.cjs.map → impl-MM7Z76YA.cjs.map} +1 -1
  72. package/dist/{impl-2WH2UTIZ.cjs → impl-MNZZSLBI.cjs} +2 -2
  73. package/dist/{impl-2WH2UTIZ.cjs.map → impl-MNZZSLBI.cjs.map} +1 -1
  74. package/dist/{impl-OMOQK2HQ.cjs → impl-N2ANLQOJ.cjs} +6 -6
  75. package/dist/{impl-OMOQK2HQ.cjs.map → impl-N2ANLQOJ.cjs.map} +1 -1
  76. package/dist/{impl-EJMVBV6T.cjs → impl-NRS5JFKV.cjs} +2 -2
  77. package/dist/{impl-EJMVBV6T.cjs.map → impl-NRS5JFKV.cjs.map} +1 -1
  78. package/dist/{impl-7U3M4RGF.cjs → impl-NWMCEWUU.cjs} +2 -2
  79. package/dist/{impl-7U3M4RGF.cjs.map → impl-NWMCEWUU.cjs.map} +1 -1
  80. package/dist/{impl-ZXCX5CQQ.cjs → impl-P23VFJZF.cjs} +2 -2
  81. package/dist/{impl-ZXCX5CQQ.cjs.map → impl-P23VFJZF.cjs.map} +1 -1
  82. package/dist/{impl-B3OMYCGQ.cjs → impl-QC2QXWOW.cjs} +2 -2
  83. package/dist/{impl-B3OMYCGQ.cjs.map → impl-QC2QXWOW.cjs.map} +1 -1
  84. package/dist/{impl-JA34WWJ5.cjs → impl-R3CY7ZIW.cjs} +2 -2
  85. package/dist/{impl-JA34WWJ5.cjs.map → impl-R3CY7ZIW.cjs.map} +1 -1
  86. package/dist/{impl-7JEVVESR.cjs → impl-SVCHBYPB.cjs} +2 -2
  87. package/dist/{impl-7JEVVESR.cjs.map → impl-SVCHBYPB.cjs.map} +1 -1
  88. package/dist/{impl-ZY3HLGMV.cjs → impl-T5AW675H.cjs} +2 -2
  89. package/dist/{impl-ZY3HLGMV.cjs.map → impl-T5AW675H.cjs.map} +1 -1
  90. package/dist/{impl-TVRDO556.cjs → impl-TGY32SCN.cjs} +2 -2
  91. package/dist/{impl-TVRDO556.cjs.map → impl-TGY32SCN.cjs.map} +1 -1
  92. package/dist/{impl-X7QVQTTU.cjs → impl-U6XFUPLF.cjs} +2 -2
  93. package/dist/{impl-X7QVQTTU.cjs.map → impl-U6XFUPLF.cjs.map} +1 -1
  94. package/dist/impl-UFXTQLXO.cjs +2 -0
  95. package/dist/{impl-IKL2VVT5.cjs.map → impl-UFXTQLXO.cjs.map} +1 -1
  96. package/dist/{impl-SVLQQPHG.cjs → impl-UHENI4EC.cjs} +2 -2
  97. package/dist/{impl-SVLQQPHG.cjs.map → impl-UHENI4EC.cjs.map} +1 -1
  98. package/dist/{impl-ZZL5FMCH.cjs → impl-UOAWNR34.cjs} +3 -3
  99. package/dist/{impl-ZZL5FMCH.cjs.map → impl-UOAWNR34.cjs.map} +1 -1
  100. package/dist/{impl-Z3TEIP7I.cjs → impl-VUQ6T7ON.cjs} +2 -2
  101. package/dist/{impl-Z3TEIP7I.cjs.map → impl-VUQ6T7ON.cjs.map} +1 -1
  102. package/dist/{impl-22FRQBKA.cjs → impl-W4I4GTBC.cjs} +2 -2
  103. package/dist/{impl-22FRQBKA.cjs.map → impl-W4I4GTBC.cjs.map} +1 -1
  104. package/dist/index.cjs +3 -3
  105. package/dist/index.cjs.map +1 -1
  106. package/dist/index.d.cts +447 -429
  107. package/package.json +1 -1
  108. package/dist/chunk-7O5DWH3M.cjs +0 -2920
  109. package/dist/impl-IKL2VVT5.cjs +0 -2
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkOKLX7XKWcjs = require('./chunk-OKLX7XKW.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _fs = require('fs');var _typeutils = require('@transcend-io/type-utils');var _privacytypes = require('@transcend-io/privacy-types');var N=/target ('|")(.*?)('|")/,O=/pod ('|")(.*?)('|")(, ('|")~> (.+?)('|")|)/,y={supportedFiles:["Podfile"],ignoreDirs:["Pods"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(N,"g"),matches:["quote1","name","quote2"]},n),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(O,"g"),matches:["quote1","name","quote2","extra","quote3","version","quote4"]},n);return o.map((t,p)=>({name:t.name,type:_privacytypes.CodePackageType.CocoaPods,softwareDevelopmentKits:c.filter(r=>r.matchIndex>t.matchIndex&&(!o[p+1]||r.matchIndex<o[p+1].matchIndex)).map(r=>({name:r.name,version:r.version}))}))}};var _path = require('path');var K=/implementation( *)('|")(.+?):(.+?):(.+?|)('|")/,M=/apply plugin: *('|")(.+?)(:(.+?)|)('|")/,j=/implementation group:( *)('|")(.+?)('|"),( *)name:( *)('|")(.+?)('|"),( *)version:( *)('|")(.+?)('|")/,L=/applicationId( *)"(.+?)"/,S={supportedFiles:["build.gradle**"],ignoreDirs:["gradle-app.setting","gradle-wrapper.jar","gradle-wrapper.properties"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(K,"g"),matches:["space","quote1","name","path","version","quote2"]},n),a=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(M,"g"),matches:["quote1","name","group","version","quote2"]},n),t=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(j,"g"),matches:["space1","quote1","group","quote2","space2","space3","quote3","name","quote4","space4","space5","quote5","version","quote6"]},n),p=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(L,"g"),matches:["space","name"]},n);if(p.length>1)throw new Error(`Expected only one applicationId per file: ${e}`);return[{name:_optionalChain([p, 'access', _2 => _2[0], 'optionalAccess', _3 => _3.name])||o.split("/").pop(),softwareDevelopmentKits:[...c,...t,...a].map(r=>({name:r.name,version:r.version||void 0}))}]}};var x={supportedFiles:["package.json"],ignoreDirs:["node_modules","serverless-build","lambda-build"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=JSON.parse(n),{name:a,description:t,dependencies:p={},devDependencies:r={},optionalDependencies:i={}}=c;return[{name:a||o.split("/").pop(),description:t,softwareDevelopmentKits:[...Object.entries(p).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0})),...Object.entries(r).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0,isDevDependency:!0})),...Object.entries(i).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0}))]}]}};var V=/(.+?)(=+)(.+)/,H=/name *= *('|")(.+?)('|")/,Q=/description *= *('|")(.+?)('|")/,P={supportedFiles:["requirements.txt"],ignoreDirs:["build","lib","lib64"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),a=_chunkOKLX7XKWcjs.c.call(void 0, o).find(s=>s==="setup.py"),t=a?_fs.readFileSync.call(void 0, _path.join.call(void 0, o,a),"utf-8"):void 0,p=t?(H.exec(t)||[])[2]:void 0,r=t?(Q.exec(t)||[])[2]:void 0,i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(V,"g"),matches:["name","equals","version"]},n);return[{name:p||o.split("/").pop(),description:r||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:i.map(s=>({name:s.name,version:s.version}))}]}};var ee=/gem *('|")(.+?)('|")(, *('|")(.+?)('|")|)/,ne=/spec\.name *= *('|")(.+?)('|")/,oe=/spec\.description *= *('|")(.+?)('|")/,te=/spec\.summary *= *('|")(.+?)('|")/,k={supportedFiles:["Gemfile"],ignoreDirs:["bin"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),a=_chunkOKLX7XKWcjs.c.call(void 0, o).find(s=>s===".gemspec"),t=a?_fs.readFileSync.call(void 0, a,"utf-8"):void 0,p=t?(ne.exec(t)||[])[2]:void 0,r=t?(oe.exec(t)||te.exec(t)||[])[1]:void 0,i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(ee,"g"),matches:["quote1","name","quote2","hasVersion","quote3","version","quote4"]},n);return[{name:p||o.split("/").pop(),description:r||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:i.map(s=>({name:s.name,version:s.version}))}]}};var _jsyaml = require('js-yaml'); var _jsyaml2 = _interopRequireDefault(_jsyaml);function pe(e){return e.split(`
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkNKFAEQ2Qcjs = require('./chunk-NKFAEQ2Q.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _fs = require('fs');var _typeutils = require('@transcend-io/type-utils');var _privacytypes = require('@transcend-io/privacy-types');var N=/target ('|")(.*?)('|")/,O=/pod ('|")(.*?)('|")(, ('|")~> (.+?)('|")|)/,y={supportedFiles:["Podfile"],ignoreDirs:["Pods"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(N,"g"),matches:["quote1","name","quote2"]},n),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(O,"g"),matches:["quote1","name","quote2","extra","quote3","version","quote4"]},n);return o.map((t,p)=>({name:t.name,type:_privacytypes.CodePackageType.CocoaPods,softwareDevelopmentKits:c.filter(r=>r.matchIndex>t.matchIndex&&(!o[p+1]||r.matchIndex<o[p+1].matchIndex)).map(r=>({name:r.name,version:r.version}))}))}};var _path = require('path');var K=/implementation( *)('|")(.+?):(.+?):(.+?|)('|")/,M=/apply plugin: *('|")(.+?)(:(.+?)|)('|")/,j=/implementation group:( *)('|")(.+?)('|"),( *)name:( *)('|")(.+?)('|"),( *)version:( *)('|")(.+?)('|")/,L=/applicationId( *)"(.+?)"/,S={supportedFiles:["build.gradle**"],ignoreDirs:["gradle-app.setting","gradle-wrapper.jar","gradle-wrapper.properties"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(K,"g"),matches:["space","quote1","name","path","version","quote2"]},n),a=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(M,"g"),matches:["quote1","name","group","version","quote2"]},n),t=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(j,"g"),matches:["space1","quote1","group","quote2","space2","space3","quote3","name","quote4","space4","space5","quote5","version","quote6"]},n),p=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(L,"g"),matches:["space","name"]},n);if(p.length>1)throw new Error(`Expected only one applicationId per file: ${e}`);return[{name:_optionalChain([p, 'access', _2 => _2[0], 'optionalAccess', _3 => _3.name])||o.split("/").pop(),softwareDevelopmentKits:[...c,...t,...a].map(r=>({name:r.name,version:r.version||void 0}))}]}};var x={supportedFiles:["package.json"],ignoreDirs:["node_modules","serverless-build","lambda-build"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=JSON.parse(n),{name:a,description:t,dependencies:p={},devDependencies:r={},optionalDependencies:i={}}=c;return[{name:a||o.split("/").pop(),description:t,softwareDevelopmentKits:[...Object.entries(p).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0})),...Object.entries(r).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0,isDevDependency:!0})),...Object.entries(i).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0}))]}]}};var V=/(.+?)(=+)(.+)/,H=/name *= *('|")(.+?)('|")/,Q=/description *= *('|")(.+?)('|")/,P={supportedFiles:["requirements.txt"],ignoreDirs:["build","lib","lib64"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),a=_chunkNKFAEQ2Qcjs.c.call(void 0, o).find(s=>s==="setup.py"),t=a?_fs.readFileSync.call(void 0, _path.join.call(void 0, o,a),"utf-8"):void 0,p=t?(H.exec(t)||[])[2]:void 0,r=t?(Q.exec(t)||[])[2]:void 0,i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(V,"g"),matches:["name","equals","version"]},n);return[{name:p||o.split("/").pop(),description:r||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:i.map(s=>({name:s.name,version:s.version}))}]}};var ee=/gem *('|")(.+?)('|")(, *('|")(.+?)('|")|)/,ne=/spec\.name *= *('|")(.+?)('|")/,oe=/spec\.description *= *('|")(.+?)('|")/,te=/spec\.summary *= *('|")(.+?)('|")/,k={supportedFiles:["Gemfile"],ignoreDirs:["bin"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),a=_chunkNKFAEQ2Qcjs.c.call(void 0, o).find(s=>s===".gemspec"),t=a?_fs.readFileSync.call(void 0, a,"utf-8"):void 0,p=t?(ne.exec(t)||[])[2]:void 0,r=t?(oe.exec(t)||te.exec(t)||[])[1]:void 0,i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(ee,"g"),matches:["quote1","name","quote2","hasVersion","quote3","version","quote4"]},n);return[{name:p||o.split("/").pop(),description:r||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:i.map(s=>({name:s.name,version:s.version}))}]}};var _jsyaml = require('js-yaml'); var _jsyaml2 = _interopRequireDefault(_jsyaml);function pe(e){return e.split(`
2
2
  `).map(n=>{let o=n.indexOf("#");return o>-1&&!n.substring(0,o).includes('"')&&!n.substring(0,o).includes("'")?n.substring(0,o).trim():n}).filter(n=>n.length>0).join(`
3
3
  `)}var v={supportedFiles:["pubspec.yml"],ignoreDirs:["build"],scanFunction:e=>{let n=_path.dirname.call(void 0, e),o=_fs.readFileSync.call(void 0, e,"utf-8"),{name:c,description:a,dev_dependencies:t={},dependencies:p={}}=_jsyaml2.default.load(pe(o));return[{name:c||n.split("/").pop(),description:a,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:[...Object.entries(p).map(([r,i])=>({name:r,version:typeof i=="string"?i:typeof i=="number"?i.toString():_optionalChain([i, 'optionalAccess', _4 => _4.sdk])})),...Object.entries(t).map(([r,i])=>({name:r,version:typeof i=="string"?i:typeof i=="number"?i.toString():_optionalChain([i, 'optionalAccess', _5 => _5.sdk]),isDevDependency:!0}))]}]}};var I={supportedFiles:["composer.json"],ignoreDirs:["vendor","node_modules","cache","build","dist"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=JSON.parse(n),{name:a,description:t,require:p={},"require-dev":r={}}=c;return[{name:a||o.split("/").pop(),description:t,softwareDevelopmentKits:[...Object.entries(p).map(([i,s])=>({name:i,version:typeof s=="string"?s:void 0})),...Object.entries(r).map(([i,s])=>({name:i,version:typeof s=="string"?s:void 0,isDevDependency:!0}))]}]}};var _iots = require('io-ts'); var d = _interopRequireWildcard(_iots);var le=d.type({pins:d.array(d.type({identity:d.string,kind:d.string,location:d.string,state:d.type({revision:d.string,version:d.string})})),version:d.number}),D={supportedFiles:["Package.resolved"],ignoreDirs:[],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_typeutils.decodeCodec.call(void 0, le,n);return[{name:_path.dirname.call(void 0, e).split("/").pop()||"",type:_privacytypes.CodePackageType.CocoaPods,softwareDevelopmentKits:o.pins.map(c=>({name:c.identity,version:c.state.version}))}]}};var gn={cocoaPods:y,gradle:S,javascriptPackageJson:x,pythonRequirementsTxt:P,gemfile:k,pubspec:v,swift:D},_={[_privacytypes.CodePackageType.CocoaPods]:y,[_privacytypes.CodePackageType.Gradle]:S,[_privacytypes.CodePackageType.PackageJson]:x,[_privacytypes.CodePackageType.RequirementsTxt]:P,[_privacytypes.CodePackageType.Gemfile]:k,[_privacytypes.CodePackageType.Pubspec]:v,[_privacytypes.CodePackageType.ComposerJson]:I,[_privacytypes.CodePackageType.Swift]:D};var _fastglob = require('fast-glob'); var _fastglob2 = _interopRequireDefault(_fastglob);var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function Sn({scanPath:e,ignoreDirs:n=[],repositoryName:o}){return(await Promise.all(_typeutils.getEntries.call(void 0, _).map(async([a,t])=>{let{ignoreDirs:p,supportedFiles:r,scanFunction:i}=t,s=[...n,...p].filter(m=>m.length>0);try{let m=await _fastglob2.default.call(void 0, `${e}/**/${r.join("|")}`,{ignore:s.map(g=>`${e}/**/${g}`),unique:!0,onlyFiles:!0});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Scanning: ${m.length} files of type ${a}`));let C=m.map(g=>i(g).map(q=>({...q,relativePath:g.replace(`${e}/`,"")}))).flat();return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Found: ${C.length} packages and ${C.map(({softwareDevelopmentKits:g=[]})=>g).flat().length} sdks`)),C.map(g=>({...g,type:a,repositoryName:o}))}catch(m){throw new Error(`Error scanning globs ${r} with error: ${m}`)}}))).flat()}exports.a = gn; exports.b = Sn;
4
- //# sourceMappingURL=chunk-IU4E6MQW.cjs.map
4
+ //# sourceMappingURL=chunk-GT7Q27PI.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-IU4E6MQW.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/constants.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAM,CAAA,CACnB,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;ACVR","file":"/home/runner/work/cli/cli/dist/chunk-IU4E6MQW.cjs","sourcesContent":[null,"import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'node:path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'node:path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { CodeScanningConfig } from './types';\nimport {\n cocoaPods,\n gradle,\n javascriptPackageJson,\n gemfile,\n composerJson,\n pubspec,\n swift,\n pythonRequirementsTxt,\n} from './integrations';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\n/**\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n */\nexport const SILO_DISCOVERY_CONFIGS: {\n [k in string]: CodeScanningConfig;\n} = {\n cocoaPods,\n gradle,\n javascriptPackageJson,\n pythonRequirementsTxt,\n gemfile,\n pubspec,\n swift,\n};\n\nexport const CODE_SCANNING_CONFIGS: {\n [k in CodePackageType]: CodeScanningConfig;\n} = {\n [CodePackageType.CocoaPods]: cocoaPods,\n [CodePackageType.Gradle]: gradle,\n [CodePackageType.PackageJson]: javascriptPackageJson,\n [CodePackageType.RequirementsTxt]: pythonRequirementsTxt,\n [CodePackageType.Gemfile]: gemfile,\n [CodePackageType.Pubspec]: pubspec,\n [CodePackageType.ComposerJson]: composerJson,\n [CodePackageType.Swift]: swift,\n};\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-GT7Q27PI.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/constants.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAM,CAAA,CACnB,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;ACVR","file":"/home/runner/work/cli/cli/dist/chunk-GT7Q27PI.cjs","sourcesContent":[null,"import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'node:path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'node:path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { CodeScanningConfig } from './types';\nimport {\n cocoaPods,\n gradle,\n javascriptPackageJson,\n gemfile,\n composerJson,\n pubspec,\n swift,\n pythonRequirementsTxt,\n} from './integrations';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\n/**\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n */\nexport const SILO_DISCOVERY_CONFIGS: {\n [k in string]: CodeScanningConfig;\n} = {\n cocoaPods,\n gradle,\n javascriptPackageJson,\n pythonRequirementsTxt,\n gemfile,\n pubspec,\n swift,\n};\n\nexport const CODE_SCANNING_CONFIGS: {\n [k in CodePackageType]: CodeScanningConfig;\n} = {\n [CodePackageType.CocoaPods]: cocoaPods,\n [CodePackageType.Gradle]: gradle,\n [CodePackageType.PackageJson]: javascriptPackageJson,\n [CodePackageType.RequirementsTxt]: pythonRequirementsTxt,\n [CodePackageType.Gemfile]: gemfile,\n [CodePackageType.Pubspec]: pubspec,\n [CodePackageType.ComposerJson]: composerJson,\n [CodePackageType.Swift]: swift,\n};\n"]}
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var _chunkMFIMY5RYcjs = require('./chunk-MFIMY5RY.cjs');var _chunkQ7I37FJVcjs = require('./chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');var _privacytypes = require('@transcend-io/privacy-types');function h(e){if(!/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(e))throw new Error(`Invalid UUID format: ${e}`);return e}function a(e){try{return new URL(e).toString().replace(/\/$/,"")}catch (e2){throw new Error(`Invalid URL format: ${e}`)}}function b(e){return e.split(",").map(r=>r.trim()).filter(r=>r.length>0)}function T(e){let r=new Date(e);if(Number.isNaN(r.getTime()))throw new TypeError(`Invalid date: ${e}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`);return r}var l=({scopes:e,requiresSiloScope:r=!1})=>{let t={kind:"parsed",parse:String,brief:"The Transcend API key."};return r&&(t.brief+=" This key must be associated with the data silo(s) being operated on."),e==="Varies"?{...t,brief:`${t.brief} The scopes required will vary depending on the operation performed. If in doubt, the ${_privacytypes.TRANSCEND_SCOPES[_privacytypes.ScopeName.FullAdmin].title} scope will always work.`}:e.length===0?{...t,brief:`${t.brief} No scopes are required for this command.`}:{...t,brief:`${t.brief} Requires scopes: ${e.map(p=>`"${_privacytypes.TRANSCEND_SCOPES[p].title}"`).join(", ")}`}},d= exports.e =(e=_chunkMFIMY5RYcjs.r)=>({kind:"parsed",parse:a,brief:"URL of the Transcend backend. Use https://api.us.transcend.io for US hosting",default:e}),A= exports.f =(e=_chunkMFIMY5RYcjs.s)=>({kind:"parsed",parse:a,brief:"URL of the Transcend consent backend. Use https://consent.us.transcend.io for US hosting",default:e}),U= exports.g =()=>({kind:"parsed",parse:String,brief:"The Sombra internal key, use for additional authentication when self-hosting Sombra",optional:!0});var g=["dataSilos","enrichers","templates","apiKeys"],N= exports.i =Object.values(_privacytypes.ConsentTrackerStatus),L= exports.j =_core.buildCommand.call(void 0, {loader:async()=>{let{pull:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-SZM2IXSY.cjs")));return e},parameters:{flags:{auth:l({scopes:"Varies"}),resources:{kind:"enum",values:["all",...Object.values(_chunkQ7I37FJVcjs.d)],brief:`The different resource types to pull in. Defaults to ${g.join(",")}.`,variadic:",",optional:!0},file:{kind:"parsed",parse:String,brief:"Path to the YAML file to pull into",default:"./transcend.yml"},transcendUrl:d(),dataSiloIds:{kind:"parsed",parse:String,variadic:",",brief:"The UUIDs of the data silos that should be pulled into the YAML file",optional:!0},integrationNames:{kind:"parsed",parse:String,variadic:",",brief:"The types of integrations to pull down",optional:!0},trackerStatuses:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),variadic:",",brief:"The statuses of consent manager trackers to pull down. Defaults to all statuses.",optional:!0},pageSize:{kind:"parsed",parse:_core.numberParser,brief:"The page size to use when paginating over the API",default:"50"},skipDatapoints:{kind:"boolean",brief:"When true, skip pulling in datapoints alongside data silo resource",default:!1},skipSubDatapoints:{kind:"boolean",brief:"When true, skip pulling in subDatapoints alongside data silo resource",default:!1},includeGuessedCategories:{kind:"boolean",brief:"When true, included guessed data categories that came from the content classifier",default:!1},debug:{kind:"boolean",brief:"Set to true to include debug logs while pulling the configuration",default:!1}}},docs:{brief:"Pull metadata from Transcend into transcend.yml",fullDescription:`Generates a transcend.yml by pulling the configuration from your Transcend instance.
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var _chunkE57K4DGXcjs = require('./chunk-E57K4DGX.cjs');var _chunkQ7I37FJVcjs = require('./chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');var _privacytypes = require('@transcend-io/privacy-types');function h(e){if(!/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(e))throw new Error(`Invalid UUID format: ${e}`);return e}function a(e){try{return new URL(e).toString().replace(/\/$/,"")}catch (e2){throw new Error(`Invalid URL format: ${e}`)}}function b(e){return e.split(",").map(r=>r.trim()).filter(r=>r.length>0)}function T(e){let r=new Date(e);if(Number.isNaN(r.getTime()))throw new TypeError(`Invalid date: ${e}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`);return r}var l=({scopes:e,requiresSiloScope:r=!1})=>{let t={kind:"parsed",parse:String,brief:"The Transcend API key."};return r&&(t.brief+=" This key must be associated with the data silo(s) being operated on."),e==="Varies"?{...t,brief:`${t.brief} The scopes required will vary depending on the operation performed. If in doubt, the ${_privacytypes.TRANSCEND_SCOPES[_privacytypes.ScopeName.FullAdmin].title} scope will always work.`}:e.length===0?{...t,brief:`${t.brief} No scopes are required for this command.`}:{...t,brief:`${t.brief} Requires scopes: ${e.map(p=>`"${_privacytypes.TRANSCEND_SCOPES[p].title}"`).join(", ")}`}},d= exports.e =(e=_chunkE57K4DGXcjs.r)=>({kind:"parsed",parse:a,brief:"URL of the Transcend backend. Use https://api.us.transcend.io for US hosting",default:e}),A= exports.f =(e=_chunkE57K4DGXcjs.s)=>({kind:"parsed",parse:a,brief:"URL of the Transcend consent backend. Use https://consent.us.transcend.io for US hosting",default:e}),U= exports.g =()=>({kind:"parsed",parse:String,brief:"The Sombra internal key, use for additional authentication when self-hosting Sombra",optional:!0});var g=["dataSilos","enrichers","templates","apiKeys"],N= exports.i =Object.values(_privacytypes.ConsentTrackerStatus),L= exports.j =_core.buildCommand.call(void 0, {loader:async()=>{let{pull:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-ERCHNSZD.cjs")));return e},parameters:{flags:{auth:l({scopes:"Varies"}),resources:{kind:"enum",values:["all",...Object.values(_chunkQ7I37FJVcjs.d)],brief:`The different resource types to pull in. Defaults to ${g.join(",")}.`,variadic:",",optional:!0},file:{kind:"parsed",parse:String,brief:"Path to the YAML file to pull into",default:"./transcend.yml"},transcendUrl:d(),dataSiloIds:{kind:"parsed",parse:String,variadic:",",brief:"The UUIDs of the data silos that should be pulled into the YAML file",optional:!0},integrationNames:{kind:"parsed",parse:String,variadic:",",brief:"The types of integrations to pull down",optional:!0},trackerStatuses:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),variadic:",",brief:"The statuses of consent manager trackers to pull down. Defaults to all statuses.",optional:!0},pageSize:{kind:"parsed",parse:_core.numberParser,brief:"The page size to use when paginating over the API",default:"50"},skipDatapoints:{kind:"boolean",brief:"When true, skip pulling in datapoints alongside data silo resource",default:!1},skipSubDatapoints:{kind:"boolean",brief:"When true, skip pulling in subDatapoints alongside data silo resource",default:!1},includeGuessedCategories:{kind:"boolean",brief:"When true, included guessed data categories that came from the content classifier",default:!1},debug:{kind:"boolean",brief:"Set to true to include debug logs while pulling the configuration",default:!1}}},docs:{brief:"Pull metadata from Transcend into transcend.yml",fullDescription:`Generates a transcend.yml by pulling the configuration from your Transcend instance.
2
2
 
3
3
  The API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).
4
4
 
@@ -6,4 +6,4 @@ This command can be helpful if you are looking to:
6
6
 
7
7
  - Copy your data into another instance
8
8
  - Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`}});exports.a = h; exports.b = b; exports.c = T; exports.d = l; exports.e = d; exports.f = A; exports.g = U; exports.h = g; exports.i = N; exports.j = L;
9
- //# sourceMappingURL=chunk-5Y7FIOUT.cjs.map
9
+ //# sourceMappingURL=chunk-H6YI2HZS.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-5Y7FIOUT.cjs","../src/commands/inventory/pull/command.ts","../src/lib/cli/parsers.ts"],"names":["uuidParser","input"],"mappings":"AAAA,mZAA+C,wDAAyC,qCCA7C,2DACN,SCMrBA,CAAAA,CAAWC,CAAAA,CAAuB,CAGhD,EAAA,CAAI,CADF,4EAAA,CACa,IAAA,CAAKA,CAAK,CAAA,CACvB,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwBA,CAAK,CAAA,CAAA;AD0F5B;AAAA;AAAA;AAAA;AAAA;AAAA;AASpB,qGAAA","file":"/home/runner/work/cli/cli/dist/chunk-5Y7FIOUT.cjs","sourcesContent":[null,"import { buildCommand, numberParser } from '@stricli/core';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport {\n createAuthParameter,\n createTranscendUrlParameter,\n} from '../../../lib/cli/common-parameters';\nimport { TranscendPullResource } from '../../../enums';\n\nexport const DEFAULT_TRANSCEND_PULL_RESOURCES = [\n TranscendPullResource.DataSilos,\n TranscendPullResource.Enrichers,\n TranscendPullResource.Templates,\n TranscendPullResource.ApiKeys,\n];\n\nexport const DEFAULT_CONSENT_TRACKER_STATUSES =\n Object.values(ConsentTrackerStatus);\n\nexport const pullCommand = buildCommand({\n loader: async () => {\n const { pull } = await import('./impl');\n return pull;\n },\n parameters: {\n flags: {\n auth: createAuthParameter({\n scopes: 'Varies',\n }),\n resources: {\n kind: 'enum',\n values: ['all', ...Object.values(TranscendPullResource)],\n brief: `The different resource types to pull in. Defaults to ${DEFAULT_TRANSCEND_PULL_RESOURCES.join(\n ',',\n )}.`,\n variadic: ',',\n optional: true,\n },\n file: {\n kind: 'parsed',\n parse: String,\n brief: 'Path to the YAML file to pull into',\n default: './transcend.yml',\n },\n transcendUrl: createTranscendUrlParameter(),\n dataSiloIds: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief:\n 'The UUIDs of the data silos that should be pulled into the YAML file',\n optional: true,\n },\n integrationNames: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief: 'The types of integrations to pull down',\n optional: true,\n },\n trackerStatuses: {\n kind: 'enum',\n values: Object.values(ConsentTrackerStatus),\n variadic: ',',\n brief:\n 'The statuses of consent manager trackers to pull down. Defaults to all statuses.',\n optional: true,\n },\n pageSize: {\n kind: 'parsed',\n parse: numberParser,\n brief: 'The page size to use when paginating over the API',\n default: '50',\n },\n skipDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in datapoints alongside data silo resource',\n default: false,\n },\n skipSubDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in subDatapoints alongside data silo resource',\n default: false,\n },\n includeGuessedCategories: {\n kind: 'boolean',\n brief:\n 'When true, included guessed data categories that came from the content classifier',\n default: false,\n },\n debug: {\n kind: 'boolean',\n brief:\n 'Set to true to include debug logs while pulling the configuration',\n default: false,\n },\n },\n },\n docs: {\n brief: 'Pull metadata from Transcend into transcend.yml',\n fullDescription: `Generates a transcend.yml by pulling the configuration from your Transcend instance.\n\nThe API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).\n\nThis command can be helpful if you are looking to:\n\n- Copy your data into another instance\n- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`,\n },\n});\n","/**\n * Validates and returns a UUID string.\n *\n * @param input - The input string to validate as UUID\n * @returns The validated UUID string\n * @throws Error if input is not a valid UUID\n */\nexport function uuidParser(input: string): string {\n const uuidRegex =\n /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;\n if (!uuidRegex.test(input)) {\n throw new Error(`Invalid UUID format: ${input}`);\n }\n return input;\n}\n\n/**\n * Validates and returns a URL string.\n *\n * @param input - The input string to validate as URL\n * @returns The validated URL string\n * @throws Error if input is not a valid URL\n */\nexport function urlParser(input: string): string {\n try {\n const url = new URL(input);\n return url.toString().replace(/\\/$/, '');\n } catch {\n throw new Error(`Invalid URL format: ${input}`);\n }\n}\n\n/**\n * Parse a comma-separated string to array.\n * NOTE: Prefer using `variadic` for list arguments instead of this function. This should only be used for arguments which have a default value.\n *\n * @param input - The comma-separated string to parse\n * @returns Array of trimmed, non-empty strings\n */\nexport function arrayParser(input: string): string[] {\n return input\n .split(',')\n .map((s) => s.trim())\n .filter((s) => s.length > 0);\n}\n\n/**\n * Parse a date string to a Date object.\n *\n * @param input - The date string to parse\n * @returns The parsed Date object\n * @throws TypeError if input is not a valid date\n */\nexport function dateParser(input: string): Date {\n const date = new Date(input);\n if (Number.isNaN(date.getTime())) {\n throw new TypeError(\n `Invalid date: ${input}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`,\n );\n }\n return date;\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-H6YI2HZS.cjs","../src/commands/inventory/pull/command.ts","../src/lib/cli/parsers.ts"],"names":["uuidParser","input"],"mappings":"AAAA,mZAA+C,wDAAyC,qCCA7C,2DACN,SCMrBA,CAAAA,CAAWC,CAAAA,CAAuB,CAGhD,EAAA,CAAI,CADF,4EAAA,CACa,IAAA,CAAKA,CAAK,CAAA,CACvB,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwBA,CAAK,CAAA,CAAA;AD0F5B;AAAA;AAAA;AAAA;AAAA;AAAA;AASpB,qGAAA","file":"/home/runner/work/cli/cli/dist/chunk-H6YI2HZS.cjs","sourcesContent":[null,"import { buildCommand, numberParser } from '@stricli/core';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport {\n createAuthParameter,\n createTranscendUrlParameter,\n} from '../../../lib/cli/common-parameters';\nimport { TranscendPullResource } from '../../../enums';\n\nexport const DEFAULT_TRANSCEND_PULL_RESOURCES = [\n TranscendPullResource.DataSilos,\n TranscendPullResource.Enrichers,\n TranscendPullResource.Templates,\n TranscendPullResource.ApiKeys,\n];\n\nexport const DEFAULT_CONSENT_TRACKER_STATUSES =\n Object.values(ConsentTrackerStatus);\n\nexport const pullCommand = buildCommand({\n loader: async () => {\n const { pull } = await import('./impl');\n return pull;\n },\n parameters: {\n flags: {\n auth: createAuthParameter({\n scopes: 'Varies',\n }),\n resources: {\n kind: 'enum',\n values: ['all', ...Object.values(TranscendPullResource)],\n brief: `The different resource types to pull in. Defaults to ${DEFAULT_TRANSCEND_PULL_RESOURCES.join(\n ',',\n )}.`,\n variadic: ',',\n optional: true,\n },\n file: {\n kind: 'parsed',\n parse: String,\n brief: 'Path to the YAML file to pull into',\n default: './transcend.yml',\n },\n transcendUrl: createTranscendUrlParameter(),\n dataSiloIds: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief:\n 'The UUIDs of the data silos that should be pulled into the YAML file',\n optional: true,\n },\n integrationNames: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief: 'The types of integrations to pull down',\n optional: true,\n },\n trackerStatuses: {\n kind: 'enum',\n values: Object.values(ConsentTrackerStatus),\n variadic: ',',\n brief:\n 'The statuses of consent manager trackers to pull down. Defaults to all statuses.',\n optional: true,\n },\n pageSize: {\n kind: 'parsed',\n parse: numberParser,\n brief: 'The page size to use when paginating over the API',\n default: '50',\n },\n skipDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in datapoints alongside data silo resource',\n default: false,\n },\n skipSubDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in subDatapoints alongside data silo resource',\n default: false,\n },\n includeGuessedCategories: {\n kind: 'boolean',\n brief:\n 'When true, included guessed data categories that came from the content classifier',\n default: false,\n },\n debug: {\n kind: 'boolean',\n brief:\n 'Set to true to include debug logs while pulling the configuration',\n default: false,\n },\n },\n },\n docs: {\n brief: 'Pull metadata from Transcend into transcend.yml',\n fullDescription: `Generates a transcend.yml by pulling the configuration from your Transcend instance.\n\nThe API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).\n\nThis command can be helpful if you are looking to:\n\n- Copy your data into another instance\n- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`,\n },\n});\n","/**\n * Validates and returns a UUID string.\n *\n * @param input - The input string to validate as UUID\n * @returns The validated UUID string\n * @throws Error if input is not a valid UUID\n */\nexport function uuidParser(input: string): string {\n const uuidRegex =\n /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;\n if (!uuidRegex.test(input)) {\n throw new Error(`Invalid UUID format: ${input}`);\n }\n return input;\n}\n\n/**\n * Validates and returns a URL string.\n *\n * @param input - The input string to validate as URL\n * @returns The validated URL string\n * @throws Error if input is not a valid URL\n */\nexport function urlParser(input: string): string {\n try {\n const url = new URL(input);\n return url.toString().replace(/\\/$/, '');\n } catch {\n throw new Error(`Invalid URL format: ${input}`);\n }\n}\n\n/**\n * Parse a comma-separated string to array.\n * NOTE: Prefer using `variadic` for list arguments instead of this function. This should only be used for arguments which have a default value.\n *\n * @param input - The comma-separated string to parse\n * @returns Array of trimmed, non-empty strings\n */\nexport function arrayParser(input: string): string[] {\n return input\n .split(',')\n .map((s) => s.trim())\n .filter((s) => s.length > 0);\n}\n\n/**\n * Parse a date string to a Date object.\n *\n * @param input - The date string to parse\n * @returns The parsed Date object\n * @throws TypeError if input is not a valid date\n */\nexport function dateParser(input: string): Date {\n const date = new Date(input);\n if (Number.isNaN(date.getTime())) {\n throw new TypeError(\n `Invalid date: ${input}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`,\n );\n }\n return date;\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk7O5DWH3Mcjs = require('./chunk-7O5DWH3M.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkMFIMY5RYcjs = require('./chunk-MFIMY5RY.cjs');var _iots = require('io-ts'); var e = _interopRequireWildcard(_iots); var S = _interopRequireWildcard(_iots);var _typeutils = require('@transcend-io/type-utils');var X=e.type({identifier:e.string,type:e.string,coreIdentifier:e.string,dataSiloId:e.string,requestId:e.string,nonce:e.string,requestCreatedAt:e.string,daysUntilOverdue:e.number,attributes:e.array(e.type({key:e.string,values:e.array(e.string)}))});async function U(r,{dataSiloId:s,limit:c=100,offset:n=0,requestType:a}){try{let i=await r.get(`v1/data-silo/${s}/pending-requests/${a}`,{searchParams:{offset:n,limit:c}}).json(),{items:f}=_typeutils.decodeCodec.call(void 0, e.type({items:e.array(X)}),i);return f}catch(i){throw new Error(`Received an error from server: ${_optionalChain([i, 'optionalAccess', _2 => _2.response, 'optionalAccess', _3 => _3.body])||_optionalChain([i, 'optionalAccess', _4 => _4.message])}`)}}var L=S.type({nonce:S.string,identifier:S.string});async function B(r,{nonce:s,identifier:c}){try{return await r.put("v1/data-silo",{headers:{"x-transcend-nonce":s},json:{profiles:[{profileId:c}]}}),!0}catch(n){if(_optionalChain([n, 'access', _5 => _5.response, 'optionalAccess', _6 => _6.statusCode])===409)return!1;throw new Error(`Received an error from server: ${_optionalChain([n, 'optionalAccess', _7 => _7.response, 'optionalAccess', _8 => _8.body])||_optionalChain([n, 'optionalAccess', _9 => _9.message])}`)}}var _bluebird = require('bluebird');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);async function ge({file:r,dataSiloId:s,auth:c,sombraAuth:n,concurrency:a=100,transcendUrl:i=_chunkMFIMY5RYcjs.r,sleepSeconds:f=10}){let y=await _chunk7O5DWH3Mcjs.wc.call(void 0, i,c,n);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${r}" from disk`));let o=_chunk7O5DWH3Mcjs.qc.call(void 0, r,L);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Notifying Transcend for data silo "${s}" marking "${o.length}" identifiers as completed.`));let C=new Date().getTime(),w=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d=0,u=0,l=0;w.start(o.length,0);let m=_chunkMFIMY5RYcjs.b.call(void 0, o,a),I=m.length;await _bluebird.mapSeries.call(void 0, m,async($,P)=>{_chunkZUNVPK23cjs.a.info(_colors2.default.blue(`Processing chunk ${P+1}/${I} (${_chunkMFIMY5RYcjs.b.length} items)`)),await _bluebird.map.call(void 0, $,async h=>{try{await B(y,h)?d+=1:u+=1}catch(b){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error notifying Transcend for identifier "${h.identifier}" - ${_optionalChain([b, 'optionalAccess', _10 => _10.message])}`)),l+=1}w.update(d+u)}),f>0&&P<I-1&&(_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Sleeping for ${f}s before next chunk...`)),await new Promise(h=>{setTimeout(h,f*1e3)}))}),w.stop();let D=new Date().getTime()-C;if(_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend for ${d} identifiers in "${D/1e3}" seconds!`)),u&&_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`There were ${u} identifiers that were not in a state to be updated.They likely have already been resolved.`)),l)throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`There were ${l} identifiers that failed to be updated. Please review the logs for more information.`)),new Error("Failed to update all identifiers");return o.length}var _privacytypes = require('@transcend-io/privacy-types');async function Se({requestIds:r,dataSiloId:s,auth:c,concurrency:n=100,status:a=_privacytypes.RequestDataSiloStatus.Resolved,transcendUrl:i=_chunkMFIMY5RYcjs.r}){let f=_chunk7O5DWH3Mcjs.vc.call(void 0, i,c),y=new Date().getTime(),o=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Notifying Transcend for data silo "${s}" marking "${r.length}" requests as completed.`));let C=0;o.start(r.length,0),await _bluebird.map.call(void 0, r,async u=>{let l=await _chunk7O5DWH3Mcjs.Bd.call(void 0, f,{requestId:u,dataSiloId:s});try{await _chunk7O5DWH3Mcjs.Wf.call(void 0, f,_chunk7O5DWH3Mcjs.ua,{requestDataSiloId:l.id,status:a})}catch(m){if(!m.message.includes("Client error: Request must be active:"))throw m}C+=1,o.update(C)},{concurrency:n}),o.stop();let d=new Date().getTime()-y;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend in "${d/1e3}" seconds!`)),r.length}async function _e({dataSiloId:r,auth:s,sombraAuth:c,actions:n,apiPageSize:a=100,savePageSize:i=1e3,onSave:f,transcendUrl:y=_chunkMFIMY5RYcjs.r,skipRequestCount:o=!1}){if(i%a!==0)throw new Error(`savePageSize must be a multiple of apiPageSize. savePageSize: ${i}, apiPageSize: ${a}`);let C=await _chunk7O5DWH3Mcjs.wc.call(void 0, y,s,c),w=_chunk7O5DWH3Mcjs.vc.call(void 0, y,s),d=0;o||(d=await _chunk7O5DWH3Mcjs.Cd.call(void 0, w,{dataSiloId:r})),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling ${o?"all":d} outstanding request identifiers for data silo: "${r}" for requests of types "${n.join('", "')}"`));let u=new Date().getTime(),l=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),m=new Set,I=[],g=[];o||l.start(d,0),await _bluebird.mapSeries.call(void 0, n,async $=>{let P=0,h=!0;for(;h;){let b=await U(C,{dataSiloId:r,limit:a,offset:P,requestType:$}),k=b.map(A=>(m.add(A.requestId),{...A,action:$})),J=k.map(({attributes:A,...K})=>({...K,...A.reduce((M,E)=>Object.assign(M,{[E.key]:E.values.join(",")}),{})}));I.push(...k),g.push(...J),g.length>=i&&(await f(g),g=[]),h=b.length===a,P+=a,o?_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulled ${b.length} outstanding identifiers for ${m.size} requests`)):l.update(m.size)}}),g.length>0&&await f(g),o||l.stop();let D=new Date().getTime()-u;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled ${I.length} outstanding identifiers from ${m.size} requests in "${D/1e3}" seconds!`)),{identifiers:I}}exports.a = X; exports.b = U; exports.c = L; exports.d = B; exports.e = ge; exports.f = Se; exports.g = _e;
2
- //# sourceMappingURL=chunk-KU3UCRAF.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkRSN3DUTPcjs = require('./chunk-RSN3DUTP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkE57K4DGXcjs = require('./chunk-E57K4DGX.cjs');var _iots = require('io-ts'); var e = _interopRequireWildcard(_iots); var S = _interopRequireWildcard(_iots);var _typeutils = require('@transcend-io/type-utils');var X=e.type({identifier:e.string,type:e.string,coreIdentifier:e.string,dataSiloId:e.string,requestId:e.string,nonce:e.string,requestCreatedAt:e.string,daysUntilOverdue:e.number,attributes:e.array(e.type({key:e.string,values:e.array(e.string)}))});async function U(r,{dataSiloId:s,limit:c=100,offset:n=0,requestType:a}){try{let i=await r.get(`v1/data-silo/${s}/pending-requests/${a}`,{searchParams:{offset:n,limit:c}}).json(),{items:f}=_typeutils.decodeCodec.call(void 0, e.type({items:e.array(X)}),i);return f}catch(i){throw new Error(`Received an error from server: ${_optionalChain([i, 'optionalAccess', _2 => _2.response, 'optionalAccess', _3 => _3.body])||_optionalChain([i, 'optionalAccess', _4 => _4.message])}`)}}var L=S.type({nonce:S.string,identifier:S.string});async function B(r,{nonce:s,identifier:c}){try{return await r.put("v1/data-silo",{headers:{"x-transcend-nonce":s},json:{profiles:[{profileId:c}]}}),!0}catch(n){if(_optionalChain([n, 'access', _5 => _5.response, 'optionalAccess', _6 => _6.statusCode])===409)return!1;throw new Error(`Received an error from server: ${_optionalChain([n, 'optionalAccess', _7 => _7.response, 'optionalAccess', _8 => _8.body])||_optionalChain([n, 'optionalAccess', _9 => _9.message])}`)}}var _bluebird = require('bluebird');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);async function ge({file:r,dataSiloId:s,auth:c,sombraAuth:n,concurrency:a=100,transcendUrl:i=_chunkE57K4DGXcjs.r,sleepSeconds:f=10}){let y=await _chunkRSN3DUTPcjs.wc.call(void 0, i,c,n);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${r}" from disk`));let o=_chunkRSN3DUTPcjs.qc.call(void 0, r,L);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Notifying Transcend for data silo "${s}" marking "${o.length}" identifiers as completed.`));let C=new Date().getTime(),w=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d=0,u=0,l=0;w.start(o.length,0);let m=_chunkE57K4DGXcjs.b.call(void 0, o,a),I=m.length;await _bluebird.mapSeries.call(void 0, m,async($,P)=>{_chunkZUNVPK23cjs.a.info(_colors2.default.blue(`Processing chunk ${P+1}/${I} (${_chunkE57K4DGXcjs.b.length} items)`)),await _bluebird.map.call(void 0, $,async h=>{try{await B(y,h)?d+=1:u+=1}catch(b){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error notifying Transcend for identifier "${h.identifier}" - ${_optionalChain([b, 'optionalAccess', _10 => _10.message])}`)),l+=1}w.update(d+u)}),f>0&&P<I-1&&(_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Sleeping for ${f}s before next chunk...`)),await new Promise(h=>{setTimeout(h,f*1e3)}))}),w.stop();let D=new Date().getTime()-C;if(_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend for ${d} identifiers in "${D/1e3}" seconds!`)),u&&_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`There were ${u} identifiers that were not in a state to be updated.They likely have already been resolved.`)),l)throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`There were ${l} identifiers that failed to be updated. Please review the logs for more information.`)),new Error("Failed to update all identifiers");return o.length}var _privacytypes = require('@transcend-io/privacy-types');async function Se({requestIds:r,dataSiloId:s,auth:c,concurrency:n=100,status:a=_privacytypes.RequestDataSiloStatus.Resolved,transcendUrl:i=_chunkE57K4DGXcjs.r}){let f=_chunkRSN3DUTPcjs.vc.call(void 0, i,c),y=new Date().getTime(),o=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Notifying Transcend for data silo "${s}" marking "${r.length}" requests as completed.`));let C=0;o.start(r.length,0),await _bluebird.map.call(void 0, r,async u=>{let l=await _chunkRSN3DUTPcjs.Cd.call(void 0, f,{requestId:u,dataSiloId:s});try{await _chunkRSN3DUTPcjs.Xf.call(void 0, f,_chunkRSN3DUTPcjs.ua,{requestDataSiloId:l.id,status:a})}catch(m){if(!m.message.includes("Client error: Request must be active:"))throw m}C+=1,o.update(C)},{concurrency:n}),o.stop();let d=new Date().getTime()-y;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend in "${d/1e3}" seconds!`)),r.length}async function _e({dataSiloId:r,auth:s,sombraAuth:c,actions:n,apiPageSize:a=100,savePageSize:i=1e3,onSave:f,transcendUrl:y=_chunkE57K4DGXcjs.r,skipRequestCount:o=!1}){if(i%a!==0)throw new Error(`savePageSize must be a multiple of apiPageSize. savePageSize: ${i}, apiPageSize: ${a}`);let C=await _chunkRSN3DUTPcjs.wc.call(void 0, y,s,c),w=_chunkRSN3DUTPcjs.vc.call(void 0, y,s),d=0;o||(d=await _chunkRSN3DUTPcjs.Dd.call(void 0, w,{dataSiloId:r})),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling ${o?"all":d} outstanding request identifiers for data silo: "${r}" for requests of types "${n.join('", "')}"`));let u=new Date().getTime(),l=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),m=new Set,I=[],g=[];o||l.start(d,0),await _bluebird.mapSeries.call(void 0, n,async $=>{let P=0,h=!0;for(;h;){let b=await U(C,{dataSiloId:r,limit:a,offset:P,requestType:$}),k=b.map(A=>(m.add(A.requestId),{...A,action:$})),J=k.map(({attributes:A,...K})=>({...K,...A.reduce((M,E)=>Object.assign(M,{[E.key]:E.values.join(",")}),{})}));I.push(...k),g.push(...J),g.length>=i&&(await f(g),g=[]),h=b.length===a,P+=a,o?_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulled ${b.length} outstanding identifiers for ${m.size} requests`)):l.update(m.size)}}),g.length>0&&await f(g),o||l.stop();let D=new Date().getTime()-u;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled ${I.length} outstanding identifiers from ${m.size} requests in "${D/1e3}" seconds!`)),{identifiers:I}}exports.a = X; exports.b = U; exports.c = L; exports.d = B; exports.e = ge; exports.f = Se; exports.g = _e;
2
+ //# sourceMappingURL=chunk-HBKA5Z5A.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-KU3UCRAF.cjs","../src/lib/cron/pullCronPageOfIdentifiers.ts"],"names":["CronIdentifier","pullCronPageOfIdentifiers","sombra","dataSiloId","limit","offset","requestType","response"],"mappings":"AAAA,u/BAAyF,wDAAyC,wDAAgD,6GCA/J,qDACS,IAIfA,CAAAA,CAAmB,CAAA,CAAA,IAAA,CAAK,CAEnC,UAAA,CAAc,CAAA,CAAA,MAAA,CAEd,IAAA,CAAQ,CAAA,CAAA,MAAA,CAER,cAAA,CAAkB,CAAA,CAAA,MAAA,CAElB,UAAA,CAAc,CAAA,CAAA,MAAA,CAEd,SAAA,CAAa,CAAA,CAAA,MAAA,CAEb,KAAA,CAAS,CAAA,CAAA,MAAA,CAET,gBAAA,CAAoB,CAAA,CAAA,MAAA,CAEpB,gBAAA,CAAoB,CAAA,CAAA,MAAA,CAEpB,UAAA,CAAc,CAAA,CAAA,KAAA,CACV,CAAA,CAAA,IAAA,CAAK,CACL,GAAA,CAAO,CAAA,CAAA,MAAA,CACP,MAAA,CAAU,CAAA,CAAA,KAAA,CAAQ,CAAA,CAAA,MAAM,CAC1B,CAAC,CACH,CACF,CAAC,CAAA,CAaD,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACA,CACE,UAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CAAQ,GAAA,CACR,MAAA,CAAAC,CAAAA,CAAS,CAAA,CACT,WAAA,CAAAC,CACF,CAAA,CAU2B,CAC3B,GAAI,CAEF,IAAMC,CAAAA,CAAW,MAAML,CAAAA,CACpB,GAAA,CAAI,CAAA,aAAA,EAAgBC,CAAU,CAAA,kBAAA,EAAqBG,CAAW,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-KU3UCRAF.cjs","sourcesContent":[null,"import * as t from 'io-ts';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { RequestAction } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\n\nexport const CronIdentifier = t.type({\n /** The identifier value */\n identifier: t.string,\n /** The type of identifier */\n type: t.string,\n /** The core identifier of the request */\n coreIdentifier: t.string,\n /** The ID of the underlying data silo */\n dataSiloId: t.string,\n /** The ID of the underlying request */\n requestId: t.string,\n /** The request nonce */\n nonce: t.string,\n /** The time the request was created */\n requestCreatedAt: t.string,\n /** The number of days until the request is overdue */\n daysUntilOverdue: t.number,\n /** Request attributes */\n attributes: t.array(\n t.type({\n key: t.string,\n values: t.array(t.string),\n }),\n ),\n});\n\n/** Type override */\nexport type CronIdentifier = t.TypeOf<typeof CronIdentifier>;\n\n/**\n * Pull a offset of identifiers for a cron job\n *\n * @see https://docs.transcend.io/docs/api-reference/GET/v1/data-silo/(id)/pending-requests/(type)\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request\n */\nexport async function pullCronPageOfIdentifiers(\n sombra: Got,\n {\n dataSiloId,\n limit = 100,\n offset = 0,\n requestType,\n }: {\n /** Data Silo ID */\n dataSiloId: string;\n /** Type of request */\n requestType: RequestAction;\n /** Number of identifiers to pull in */\n limit?: number;\n /** Page to pull in */\n offset?: number;\n },\n): Promise<CronIdentifier[]> {\n try {\n // Make the GraphQL request\n const response = await sombra\n .get(`v1/data-silo/${dataSiloId}/pending-requests/${requestType}`, {\n searchParams: {\n offset,\n limit,\n },\n })\n .json();\n\n const { items } = decodeCodec(\n t.type({\n items: t.array(CronIdentifier),\n }),\n response,\n );\n return items;\n } catch (err) {\n throw new Error(\n `Received an error from server: ${err?.response?.body || err?.message}`,\n );\n }\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-HBKA5Z5A.cjs","../src/lib/cron/pullCronPageOfIdentifiers.ts"],"names":["CronIdentifier","pullCronPageOfIdentifiers","sombra","dataSiloId","limit","offset","requestType","response"],"mappings":"AAAA,u/BAAyF,wDAAyC,wDAAgD,6GCA/J,qDACS,IAIfA,CAAAA,CAAmB,CAAA,CAAA,IAAA,CAAK,CAEnC,UAAA,CAAc,CAAA,CAAA,MAAA,CAEd,IAAA,CAAQ,CAAA,CAAA,MAAA,CAER,cAAA,CAAkB,CAAA,CAAA,MAAA,CAElB,UAAA,CAAc,CAAA,CAAA,MAAA,CAEd,SAAA,CAAa,CAAA,CAAA,MAAA,CAEb,KAAA,CAAS,CAAA,CAAA,MAAA,CAET,gBAAA,CAAoB,CAAA,CAAA,MAAA,CAEpB,gBAAA,CAAoB,CAAA,CAAA,MAAA,CAEpB,UAAA,CAAc,CAAA,CAAA,KAAA,CACV,CAAA,CAAA,IAAA,CAAK,CACL,GAAA,CAAO,CAAA,CAAA,MAAA,CACP,MAAA,CAAU,CAAA,CAAA,KAAA,CAAQ,CAAA,CAAA,MAAM,CAC1B,CAAC,CACH,CACF,CAAC,CAAA,CAaD,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACA,CACE,UAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CAAQ,GAAA,CACR,MAAA,CAAAC,CAAAA,CAAS,CAAA,CACT,WAAA,CAAAC,CACF,CAAA,CAU2B,CAC3B,GAAI,CAEF,IAAMC,CAAAA,CAAW,MAAML,CAAAA,CACpB,GAAA,CAAI,CAAA,aAAA,EAAgBC,CAAU,CAAA,kBAAA,EAAqBG,CAAW,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-HBKA5Z5A.cjs","sourcesContent":[null,"import * as t from 'io-ts';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { RequestAction } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\n\nexport const CronIdentifier = t.type({\n /** The identifier value */\n identifier: t.string,\n /** The type of identifier */\n type: t.string,\n /** The core identifier of the request */\n coreIdentifier: t.string,\n /** The ID of the underlying data silo */\n dataSiloId: t.string,\n /** The ID of the underlying request */\n requestId: t.string,\n /** The request nonce */\n nonce: t.string,\n /** The time the request was created */\n requestCreatedAt: t.string,\n /** The number of days until the request is overdue */\n daysUntilOverdue: t.number,\n /** Request attributes */\n attributes: t.array(\n t.type({\n key: t.string,\n values: t.array(t.string),\n }),\n ),\n});\n\n/** Type override */\nexport type CronIdentifier = t.TypeOf<typeof CronIdentifier>;\n\n/**\n * Pull a offset of identifiers for a cron job\n *\n * @see https://docs.transcend.io/docs/api-reference/GET/v1/data-silo/(id)/pending-requests/(type)\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request\n */\nexport async function pullCronPageOfIdentifiers(\n sombra: Got,\n {\n dataSiloId,\n limit = 100,\n offset = 0,\n requestType,\n }: {\n /** Data Silo ID */\n dataSiloId: string;\n /** Type of request */\n requestType: RequestAction;\n /** Number of identifiers to pull in */\n limit?: number;\n /** Page to pull in */\n offset?: number;\n },\n): Promise<CronIdentifier[]> {\n try {\n // Make the GraphQL request\n const response = await sombra\n .get(`v1/data-silo/${dataSiloId}/pending-requests/${requestType}`, {\n searchParams: {\n offset,\n limit,\n },\n })\n .json();\n\n const { items } = decodeCodec(\n t.type({\n items: t.array(CronIdentifier),\n }),\n response,\n );\n return items;\n } catch (err) {\n throw new Error(\n `Received an error from server: ${err?.response?.body || err?.message}`,\n );\n }\n}\n"]}
@@ -1,3 +1,3 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkLR3CPNDMcjs = require('./chunk-LR3CPNDM.cjs');var _chunk7O5DWH3Mcjs = require('./chunk-7O5DWH3M.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkMFIMY5RYcjs = require('./chunk-MFIMY5RY.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _bluebird = require('bluebird');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function te({file:u,auth:s,sombraAuth:g,requestActions:f=[],concurrency:p=100,transcendUrl:n=_chunkMFIMY5RYcjs.r}){let o=_chunk7O5DWH3Mcjs.vc.call(void 0, n,s),e=await _chunk7O5DWH3Mcjs.wc.call(void 0, n,s,g);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling manual enrichment requests, filtered for actions: ${f.join(",")}`));let d=await _chunk7O5DWH3Mcjs.jd.call(void 0, o,{actions:f,statuses:[_privacytypes.RequestStatus.Enriching]}),i=[];await _bluebird.map.call(void 0, d,async t=>{let c=await _chunk7O5DWH3Mcjs.ed.call(void 0, o,{requestId:t.id});if(c.filter(({status:m})=>m==="ACTION_REQUIRED")){let m=await _chunk7O5DWH3Mcjs.hd.call(void 0, o,e,{requestId:t.id});i.push({...t,requestIdentifiers:m,requestEnrichers:c})}},{concurrency:p});let l=i.map(({attributeValues:t,requestIdentifiers:c,requestEnrichers:I,...m})=>({...m,...Object.entries(_chunkMFIMY5RYcjs.d.call(void 0, c,"name")).reduce((w,[P,$])=>Object.assign(w,{[P]:$.map(({value:C})=>C).join(",")}),{}),...Object.entries(_chunkMFIMY5RYcjs.d.call(void 0, t,"attributeKey.name")).reduce((w,[P,$])=>Object.assign(w,{[P]:$.map(({name:C})=>C).join(",")}),{})})),a=_chunkMFIMY5RYcjs.j.call(void 0, l.map(t=>Object.keys(t)).flat());return _chunkLR3CPNDMcjs.c.call(void 0, u,l,a),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${i.length} requests to file "${u}"`)),i}var _iots = require('io-ts'); var R = _interopRequireWildcard(_iots);var T="https://app.transcend.io/privacy-requests/incoming-requests/",F= exports.b =R.record(R.string,R.string);async function M(u,{id:s,...g},f,p){if(!s){let e=`Request ID must be provided to enricher request.${p?` Found error in row: ${p}`:""}`;throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(e)),new Error(e)}let n=s.toLowerCase(),o=Object.entries(g).reduce((e,[d,i])=>_chunkMFIMY5RYcjs.j.call(void 0, _chunk7O5DWH3Mcjs.nc.call(void 0, i)).length===0?e:Object.assign(e,{[d]:_chunkMFIMY5RYcjs.j.call(void 0, _chunk7O5DWH3Mcjs.nc.call(void 0, i)).map(a=>({value:d==="email"?a.toLowerCase():a}))}),{});try{return await u.post("v1/enrich-identifiers",{headers:{"x-transcend-request-id":n,"x-transcend-enricher-id":f},json:{enrichedIdentifiers:o}}).json(),_chunkZUNVPK23cjs.a.error(_colors2.default.green(`Successfully enriched request: ${T}${n}`)),!0}catch(e){if(typeof e.response.body=="string"&&e.response.body.includes("Cannot update a resolved RequestEnricher"))return _chunkZUNVPK23cjs.a.warn(_colors2.default.magenta(`Skipped enrichment for request: ${T}${n}, request is no longer in the enriching phase.`)),!1;throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to enricher identifiers for request with id: ${T}${n} - ${e.message} - ${e.response.body}`)),e}}async function ge({file:u,auth:s,sombraAuth:g,enricherId:f,markSilent:p,concurrency:n=100,transcendUrl:o=_chunkMFIMY5RYcjs.r}){let e=await _chunk7O5DWH3Mcjs.wc.call(void 0, o,s,g),d=_chunk7O5DWH3Mcjs.vc.call(void 0, o,s);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${u}" from disk`));let i=_chunk7O5DWH3Mcjs.qc.call(void 0, u,F);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Enriching "${i.length}" privacy requests.`));let l=0,a=0,t=0;if(await _bluebird.map.call(void 0, i,async(c,I)=>{try{p&&(await _chunk7O5DWH3Mcjs.Wf.call(void 0, d,_chunk7O5DWH3Mcjs.Y,{input:{id:c.id,isSilent:!0}}),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Mark request as silent mode - ${c.id}`))),await M(e,c,f,I)?l+=1:a+=1}catch (e2){t+=1}},{concurrency:n}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend!
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkLR3CPNDMcjs = require('./chunk-LR3CPNDM.cjs');var _chunkRSN3DUTPcjs = require('./chunk-RSN3DUTP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkE57K4DGXcjs = require('./chunk-E57K4DGX.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _bluebird = require('bluebird');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function te({file:u,auth:s,sombraAuth:g,requestActions:f=[],concurrency:p=100,transcendUrl:n=_chunkE57K4DGXcjs.r}){let o=_chunkRSN3DUTPcjs.vc.call(void 0, n,s),e=await _chunkRSN3DUTPcjs.wc.call(void 0, n,s,g);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling manual enrichment requests, filtered for actions: ${f.join(",")}`));let d=await _chunkRSN3DUTPcjs.kd.call(void 0, o,{actions:f,statuses:[_privacytypes.RequestStatus.Enriching]}),i=[];await _bluebird.map.call(void 0, d,async t=>{let c=await _chunkRSN3DUTPcjs.fd.call(void 0, o,{requestId:t.id});if(c.filter(({status:m})=>m==="ACTION_REQUIRED")){let m=await _chunkRSN3DUTPcjs.id.call(void 0, o,e,{requestId:t.id});i.push({...t,requestIdentifiers:m,requestEnrichers:c})}},{concurrency:p});let l=i.map(({attributeValues:t,requestIdentifiers:c,requestEnrichers:I,...m})=>({...m,...Object.entries(_chunkE57K4DGXcjs.d.call(void 0, c,"name")).reduce((w,[P,$])=>Object.assign(w,{[P]:$.map(({value:C})=>C).join(",")}),{}),...Object.entries(_chunkE57K4DGXcjs.d.call(void 0, t,"attributeKey.name")).reduce((w,[P,$])=>Object.assign(w,{[P]:$.map(({name:C})=>C).join(",")}),{})})),a=_chunkE57K4DGXcjs.j.call(void 0, l.map(t=>Object.keys(t)).flat());return _chunkLR3CPNDMcjs.c.call(void 0, u,l,a),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${i.length} requests to file "${u}"`)),i}var _iots = require('io-ts'); var R = _interopRequireWildcard(_iots);var T="https://app.transcend.io/privacy-requests/incoming-requests/",F= exports.b =R.record(R.string,R.string);async function M(u,{id:s,...g},f,p){if(!s){let e=`Request ID must be provided to enricher request.${p?` Found error in row: ${p}`:""}`;throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(e)),new Error(e)}let n=s.toLowerCase(),o=Object.entries(g).reduce((e,[d,i])=>_chunkE57K4DGXcjs.j.call(void 0, _chunkRSN3DUTPcjs.nc.call(void 0, i)).length===0?e:Object.assign(e,{[d]:_chunkE57K4DGXcjs.j.call(void 0, _chunkRSN3DUTPcjs.nc.call(void 0, i)).map(a=>({value:d==="email"?a.toLowerCase():a}))}),{});try{return await u.post("v1/enrich-identifiers",{headers:{"x-transcend-request-id":n,"x-transcend-enricher-id":f},json:{enrichedIdentifiers:o}}).json(),_chunkZUNVPK23cjs.a.error(_colors2.default.green(`Successfully enriched request: ${T}${n}`)),!0}catch(e){if(typeof e.response.body=="string"&&e.response.body.includes("Cannot update a resolved RequestEnricher"))return _chunkZUNVPK23cjs.a.warn(_colors2.default.magenta(`Skipped enrichment for request: ${T}${n}, request is no longer in the enriching phase.`)),!1;throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to enricher identifiers for request with id: ${T}${n} - ${e.message} - ${e.response.body}`)),e}}async function ge({file:u,auth:s,sombraAuth:g,enricherId:f,markSilent:p,concurrency:n=100,transcendUrl:o=_chunkE57K4DGXcjs.r}){let e=await _chunkRSN3DUTPcjs.wc.call(void 0, o,s,g),d=_chunkRSN3DUTPcjs.vc.call(void 0, o,s);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${u}" from disk`));let i=_chunkRSN3DUTPcjs.qc.call(void 0, u,F);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Enriching "${i.length}" privacy requests.`));let l=0,a=0,t=0;if(await _bluebird.map.call(void 0, i,async(c,I)=>{try{p&&(await _chunkRSN3DUTPcjs.Xf.call(void 0, d,_chunkRSN3DUTPcjs.Y,{input:{id:c.id,isSilent:!0}}),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Mark request as silent mode - ${c.id}`))),await M(e,c,f,I)?l+=1:a+=1}catch (e2){t+=1}},{concurrency:n}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend!
2
2
  Success count: ${l}.`)),a>0&&_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Skipped count: ${a}.`)),t>0)throw _chunkZUNVPK23cjs.a.info(_colors2.default.red(`Error Count: ${t}.`)),new Error(`Failed to enrich: ${t} requests.`);return i.length}exports.a = te; exports.b = F; exports.c = M; exports.d = ge;
3
- //# sourceMappingURL=chunk-NAGRUVXK.cjs.map
3
+ //# sourceMappingURL=chunk-K2E6TCB5.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-NAGRUVXK.cjs","../src/lib/manual-enrichment/pullManualEnrichmentIdentifiersToCsv.ts","../src/lib/manual-enrichment/pushManualEnrichmentIdentifiersFromCsv.ts"],"names":["pullManualEnrichmentIdentifiersToCsv","file","auth","sombraAuth","requestActions","concurrency","transcendUrl","DEFAULT_TRANSCEND_API","client","buildTranscendGraphQLClient","sombra","createSombraGotInstance","logger","colors"],"mappings":"AAAA,mfAAwC,wDAAyG,wDAAyC,wDAAuD,2DCApM,oCACzB,gFACD,MA6BnB,SAAsBA,EAAAA,CAAqC,CACzD,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CAAiB,CAAC,CAAA,CAClB,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAa6C,CAE3C,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BH,CAAcJ,CAAI,CAAA,CACvDQ,CAAAA,CAAS,MAAMC,kCAAAA,CAAwBL,CAAcJ,CAAAA,CAAMC,CAAU,CAAA,CAE3ES,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,0DAAA,EAA6DT,CAAAA,CAAe,IAAA,CAC1E,GACF,CAAC,CAAA,CAAA;ACuDP,gBAAA","file":"/home/runner/work/cli/cli/dist/chunk-NAGRUVXK.cjs","sourcesContent":[null,"import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { map } from 'bluebird';\nimport colors from 'colors';\nimport { groupBy, uniq } from 'lodash-es';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { writeCsv } from '../cron/writeCsv';\nimport {\n PrivacyRequest,\n RequestEnricher,\n RequestIdentifier,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllRequestEnrichers,\n fetchAllRequestIdentifiers,\n fetchAllRequests,\n} from '../graphql';\nimport { logger } from '../../logger';\n\nexport interface PrivacyRequestWithIdentifiers extends PrivacyRequest {\n /** Request Enrichers */\n requestEnrichers: RequestEnricher[];\n /** Request Identifiers */\n requestIdentifiers: RequestIdentifier[];\n}\n\n/**\n * Pull the set of manual enrichment jobs to CSV\n *\n * @param options - Options\n * @returns List of requests with identifiers\n */\nexport async function pullManualEnrichmentIdentifiersToCsv({\n file,\n auth,\n sombraAuth,\n requestActions = [],\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** The request actions to fetch */\n requestActions?: RequestAction[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<PrivacyRequestWithIdentifiers[]> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n logger.info(\n colors.magenta(\n `Pulling manual enrichment requests, filtered for actions: ${requestActions.join(\n ',',\n )}`,\n ),\n );\n\n // Pull all privacy requests\n const allRequests = await fetchAllRequests(client, {\n actions: requestActions,\n statuses: [RequestStatus.Enriching],\n });\n\n // Requests to save\n const savedRequests: PrivacyRequestWithIdentifiers[] = [];\n\n // Filter down requests to what is needed\n await map(\n allRequests,\n async (request) => {\n // Fetch enrichers\n const requestEnrichers = await fetchAllRequestEnrichers(client, {\n requestId: request.id,\n });\n\n // Check if manual enrichment exists for that request\n const hasManualEnrichment = requestEnrichers.filter(\n ({ status }) => status === 'ACTION_REQUIRED',\n );\n\n // Save request to queue\n if (hasManualEnrichment) {\n const requestIdentifiers = await fetchAllRequestIdentifiers(\n client,\n sombra,\n {\n requestId: request.id,\n },\n );\n savedRequests.push({\n ...request,\n requestIdentifiers,\n requestEnrichers,\n });\n }\n },\n {\n concurrency,\n },\n );\n\n const data = savedRequests.map(\n ({\n attributeValues,\n requestIdentifiers,\n requestEnrichers, // eslint-disable-line @typescript-eslint/no-unused-vars\n ...request\n }) => ({\n ...request,\n // flatten identifiers\n ...Object.entries(groupBy(requestIdentifiers, 'name')).reduce(\n (acc, [key, values]) =>\n Object.assign(acc, {\n [key]: values.map(({ value }) => value).join(','),\n }),\n {},\n ),\n // flatten attributes\n ...Object.entries(groupBy(attributeValues, 'attributeKey.name')).reduce(\n (acc, [key, values]) =>\n Object.assign(acc, {\n [key]: values.map(({ name }) => name).join(','),\n }),\n {},\n ),\n }),\n );\n\n // Write out to CSV\n const headers = uniq(data.map((d) => Object.keys(d)).flat());\n writeCsv(file, data, headers);\n\n logger.info(\n colors.green(\n `Successfully wrote ${savedRequests.length} requests to file \"${file}\"`,\n ),\n );\n\n return savedRequests;\n}\n","import colors from 'colors';\nimport { map } from 'bluebird';\nimport { logger } from '../../logger';\nimport {\n UPDATE_PRIVACY_REQUEST,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n makeGraphQLRequest,\n} from '../graphql';\nimport {\n enrichPrivacyRequest,\n EnrichPrivacyRequest,\n} from './enrichPrivacyRequest';\nimport { readCsv } from '../requests';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\n/**\n * Push a CSV of enriched requests back into Transcend\n *\n * @param options - Options\n * @returns Number of items processed\n */\nexport async function pushManualEnrichmentIdentifiersFromCsv({\n file,\n auth,\n sombraAuth,\n enricherId,\n markSilent,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** ID of enricher being uploaded to */\n enricherId: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Mark requests in silent mode before enriching */\n markSilent?: boolean;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, EnrichPrivacyRequest);\n\n // Notify Transcend\n logger.info(\n colors.magenta(`Enriching \"${activeResults.length}\" privacy requests.`),\n );\n\n let successCount = 0;\n let skippedCount = 0;\n let errorCount = 0;\n\n await map(\n activeResults,\n async (request, index) => {\n try {\n // Mark requests in silent mode before a certain date\n if (markSilent) {\n await makeGraphQLRequest(client, UPDATE_PRIVACY_REQUEST, {\n input: {\n id: request.id,\n isSilent: true,\n },\n });\n\n logger.info(\n colors.magenta(`Mark request as silent mode - ${request.id}`),\n );\n }\n\n const result = await enrichPrivacyRequest(\n sombra,\n request,\n enricherId,\n index,\n );\n if (result) {\n successCount += 1;\n } else {\n skippedCount += 1;\n }\n } catch (err) {\n errorCount += 1;\n }\n },\n { concurrency },\n );\n\n logger.info(\n colors.green(\n `Successfully notified Transcend! \\n Success count: ${successCount}.`,\n ),\n );\n\n if (skippedCount > 0) {\n logger.info(colors.magenta(`Skipped count: ${skippedCount}.`));\n }\n\n if (errorCount > 0) {\n logger.info(colors.red(`Error Count: ${errorCount}.`));\n throw new Error(`Failed to enrich: ${errorCount} requests.`);\n }\n\n return activeResults.length;\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-K2E6TCB5.cjs","../src/lib/manual-enrichment/pullManualEnrichmentIdentifiersToCsv.ts","../src/lib/manual-enrichment/pushManualEnrichmentIdentifiersFromCsv.ts"],"names":["pullManualEnrichmentIdentifiersToCsv","file","auth","sombraAuth","requestActions","concurrency","transcendUrl","DEFAULT_TRANSCEND_API","client","buildTranscendGraphQLClient","sombra","createSombraGotInstance","logger","colors"],"mappings":"AAAA,mfAAwC,wDAAyG,wDAAyC,wDAAuD,2DCApM,oCACzB,gFACD,MA6BnB,SAAsBA,EAAAA,CAAqC,CACzD,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CAAiB,CAAC,CAAA,CAClB,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAa6C,CAE3C,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BH,CAAcJ,CAAI,CAAA,CACvDQ,CAAAA,CAAS,MAAMC,kCAAAA,CAAwBL,CAAcJ,CAAAA,CAAMC,CAAU,CAAA,CAE3ES,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,0DAAA,EAA6DT,CAAAA,CAAe,IAAA,CAC1E,GACF,CAAC,CAAA,CAAA;ACuDP,gBAAA","file":"/home/runner/work/cli/cli/dist/chunk-K2E6TCB5.cjs","sourcesContent":[null,"import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { map } from 'bluebird';\nimport colors from 'colors';\nimport { groupBy, uniq } from 'lodash-es';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { writeCsv } from '../cron/writeCsv';\nimport {\n PrivacyRequest,\n RequestEnricher,\n RequestIdentifier,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllRequestEnrichers,\n fetchAllRequestIdentifiers,\n fetchAllRequests,\n} from '../graphql';\nimport { logger } from '../../logger';\n\nexport interface PrivacyRequestWithIdentifiers extends PrivacyRequest {\n /** Request Enrichers */\n requestEnrichers: RequestEnricher[];\n /** Request Identifiers */\n requestIdentifiers: RequestIdentifier[];\n}\n\n/**\n * Pull the set of manual enrichment jobs to CSV\n *\n * @param options - Options\n * @returns List of requests with identifiers\n */\nexport async function pullManualEnrichmentIdentifiersToCsv({\n file,\n auth,\n sombraAuth,\n requestActions = [],\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** The request actions to fetch */\n requestActions?: RequestAction[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<PrivacyRequestWithIdentifiers[]> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n logger.info(\n colors.magenta(\n `Pulling manual enrichment requests, filtered for actions: ${requestActions.join(\n ',',\n )}`,\n ),\n );\n\n // Pull all privacy requests\n const allRequests = await fetchAllRequests(client, {\n actions: requestActions,\n statuses: [RequestStatus.Enriching],\n });\n\n // Requests to save\n const savedRequests: PrivacyRequestWithIdentifiers[] = [];\n\n // Filter down requests to what is needed\n await map(\n allRequests,\n async (request) => {\n // Fetch enrichers\n const requestEnrichers = await fetchAllRequestEnrichers(client, {\n requestId: request.id,\n });\n\n // Check if manual enrichment exists for that request\n const hasManualEnrichment = requestEnrichers.filter(\n ({ status }) => status === 'ACTION_REQUIRED',\n );\n\n // Save request to queue\n if (hasManualEnrichment) {\n const requestIdentifiers = await fetchAllRequestIdentifiers(\n client,\n sombra,\n {\n requestId: request.id,\n },\n );\n savedRequests.push({\n ...request,\n requestIdentifiers,\n requestEnrichers,\n });\n }\n },\n {\n concurrency,\n },\n );\n\n const data = savedRequests.map(\n ({\n attributeValues,\n requestIdentifiers,\n requestEnrichers, // eslint-disable-line @typescript-eslint/no-unused-vars\n ...request\n }) => ({\n ...request,\n // flatten identifiers\n ...Object.entries(groupBy(requestIdentifiers, 'name')).reduce(\n (acc, [key, values]) =>\n Object.assign(acc, {\n [key]: values.map(({ value }) => value).join(','),\n }),\n {},\n ),\n // flatten attributes\n ...Object.entries(groupBy(attributeValues, 'attributeKey.name')).reduce(\n (acc, [key, values]) =>\n Object.assign(acc, {\n [key]: values.map(({ name }) => name).join(','),\n }),\n {},\n ),\n }),\n );\n\n // Write out to CSV\n const headers = uniq(data.map((d) => Object.keys(d)).flat());\n writeCsv(file, data, headers);\n\n logger.info(\n colors.green(\n `Successfully wrote ${savedRequests.length} requests to file \"${file}\"`,\n ),\n );\n\n return savedRequests;\n}\n","import colors from 'colors';\nimport { map } from 'bluebird';\nimport { logger } from '../../logger';\nimport {\n UPDATE_PRIVACY_REQUEST,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n makeGraphQLRequest,\n} from '../graphql';\nimport {\n enrichPrivacyRequest,\n EnrichPrivacyRequest,\n} from './enrichPrivacyRequest';\nimport { readCsv } from '../requests';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\n/**\n * Push a CSV of enriched requests back into Transcend\n *\n * @param options - Options\n * @returns Number of items processed\n */\nexport async function pushManualEnrichmentIdentifiersFromCsv({\n file,\n auth,\n sombraAuth,\n enricherId,\n markSilent,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** ID of enricher being uploaded to */\n enricherId: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Mark requests in silent mode before enriching */\n markSilent?: boolean;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, EnrichPrivacyRequest);\n\n // Notify Transcend\n logger.info(\n colors.magenta(`Enriching \"${activeResults.length}\" privacy requests.`),\n );\n\n let successCount = 0;\n let skippedCount = 0;\n let errorCount = 0;\n\n await map(\n activeResults,\n async (request, index) => {\n try {\n // Mark requests in silent mode before a certain date\n if (markSilent) {\n await makeGraphQLRequest(client, UPDATE_PRIVACY_REQUEST, {\n input: {\n id: request.id,\n isSilent: true,\n },\n });\n\n logger.info(\n colors.magenta(`Mark request as silent mode - ${request.id}`),\n );\n }\n\n const result = await enrichPrivacyRequest(\n sombra,\n request,\n enricherId,\n index,\n );\n if (result) {\n successCount += 1;\n } else {\n skippedCount += 1;\n }\n } catch (err) {\n errorCount += 1;\n }\n },\n { concurrency },\n );\n\n logger.info(\n colors.green(\n `Successfully notified Transcend! \\n Success count: ${successCount}.`,\n ),\n );\n\n if (skippedCount > 0) {\n logger.info(colors.magenta(`Skipped count: ${skippedCount}.`));\n }\n\n if (errorCount > 0) {\n logger.info(colors.red(`Error Count: ${errorCount}.`));\n throw new Error(`Failed to enrich: ${errorCount} requests.`);\n }\n\n return activeResults.length;\n}\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkQEM6S2W7cjs = require('./chunk-QEM6S2W7.cjs');var _chunk7O5DWH3Mcjs = require('./chunk-7O5DWH3M.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkMFIMY5RYcjs = require('./chunk-MFIMY5RY.cjs');var _bluebird = require('bluebird');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function H({email:r,password:a,scopes:f,apiKeyTitle:t,parentOrganizationId:o,deleteExistingApiKey:l=!0,createNewApiKey:I=!0,transcendUrl:x=_chunkMFIMY5RYcjs.r}){let s=await _chunk7O5DWH3Mcjs.uc.call(void 0, x,{});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("Logging in using email and password."));let{roles:d,loginCookie:k}=await _chunk7O5DWH3Mcjs.Ed.call(void 0, s,{email:r,password:a});_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully logged in and found ${d.length} role${d.length===1?"":"s"}!`));let P=o?d.filter(e=>e.organization.id===o||e.organization.parentOrganizationId===o):d;s.setHeaders({Cookie:k});let m=[],c=[];return _chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Generating API keys with title: ${t}, scopes: ${f.join(",")}.`)),await _bluebird.mapSeries.call(void 0, P,async e=>{try{await _chunk7O5DWH3Mcjs.Fd.call(void 0, s,{roleId:e.id,email:r}),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Checking if API key already exists in organization "${e.organization.name}" with title: "${t}".`));let[g]=await _chunk7O5DWH3Mcjs.nd.call(void 0, s,[t]);if(g&&l)_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Deleting existing API key in "${e.organization.name}" with title: "${t}".`)),await _chunk7O5DWH3Mcjs.Hd.call(void 0, s,g.id),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully deleted API key in "${e.organization.name}" with title: "${t}".`));else if(g)throw new Error(`API key already exists with title: "${t}"`);if(I){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Creating API key in "${e.organization.name}" with title: "${t}".`));let{apiKey:K}=await _chunk7O5DWH3Mcjs.Gd.call(void 0, s,{title:t,scopes:f});m.push({organizationName:e.organization.name,organizationId:e.organization.id,apiKey:K}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully created API key in "${e.organization.name}" with title: "${t}".`))}else m.push({organizationName:e.organization.name,organizationId:e.organization.id,apiKey:""})}catch(g){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to create API key in organization "${e.organization.name}"! - ${g.message}`)),c.push({organizationName:e.organization.name,organizationId:e.organization.id,error:g.message})}}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully created ${m.length} API key${m.length===1?"":"s"}`)),c.length>0&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to create ${c.length} API key${c.length===1?"":"s"}!`)),{errors:c,apiKeys:m}}var _typeutils = require('@transcend-io/type-utils');var _iots = require('io-ts'); var S = _interopRequireWildcard(_iots);var _fs = require('fs');function T(r){return r||(_chunkZUNVPK23cjs.a.error(_colors2.default.red("A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY")),process.exit(1)),_fs.existsSync.call(void 0, r)?_typeutils.decodeCodec.call(void 0, S.array(_chunkQEM6S2W7cjs.ha),_fs.readFileSync.call(void 0, r,"utf-8")):r}function Z(r,a,f=!1){if(!_fs.existsSync.call(void 0, r))return[];let t=_fs.readdirSync.call(void 0, r).filter(o=>a?a.filter(l=>o.endsWith(l)).length:!0).filter(o=>o.indexOf(".")>0);return f?t.map(o=>o.replace(/\.[^/.]+$/,"")):t}var _path = require('path');function te(r){return _fs.readdirSync.call(void 0, r).filter(a=>_fs.statSync.call(void 0, _path.join.call(void 0, r,a)).isDirectory())}exports.a = H; exports.b = T; exports.c = Z; exports.d = te;
2
- //# sourceMappingURL=chunk-OKLX7XKW.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkQEM6S2W7cjs = require('./chunk-QEM6S2W7.cjs');var _chunkRSN3DUTPcjs = require('./chunk-RSN3DUTP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkE57K4DGXcjs = require('./chunk-E57K4DGX.cjs');var _bluebird = require('bluebird');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function H({email:r,password:a,scopes:f,apiKeyTitle:t,parentOrganizationId:o,deleteExistingApiKey:l=!0,createNewApiKey:I=!0,transcendUrl:x=_chunkE57K4DGXcjs.r}){let s=await _chunkRSN3DUTPcjs.uc.call(void 0, x,{});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("Logging in using email and password."));let{roles:d,loginCookie:k}=await _chunkRSN3DUTPcjs.Fd.call(void 0, s,{email:r,password:a});_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully logged in and found ${d.length} role${d.length===1?"":"s"}!`));let P=o?d.filter(e=>e.organization.id===o||e.organization.parentOrganizationId===o):d;s.setHeaders({Cookie:k});let m=[],c=[];return _chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Generating API keys with title: ${t}, scopes: ${f.join(",")}.`)),await _bluebird.mapSeries.call(void 0, P,async e=>{try{await _chunkRSN3DUTPcjs.Gd.call(void 0, s,{roleId:e.id,email:r}),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Checking if API key already exists in organization "${e.organization.name}" with title: "${t}".`));let[g]=await _chunkRSN3DUTPcjs.od.call(void 0, s,[t]);if(g&&l)_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Deleting existing API key in "${e.organization.name}" with title: "${t}".`)),await _chunkRSN3DUTPcjs.Id.call(void 0, s,g.id),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully deleted API key in "${e.organization.name}" with title: "${t}".`));else if(g)throw new Error(`API key already exists with title: "${t}"`);if(I){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Creating API key in "${e.organization.name}" with title: "${t}".`));let{apiKey:K}=await _chunkRSN3DUTPcjs.Hd.call(void 0, s,{title:t,scopes:f});m.push({organizationName:e.organization.name,organizationId:e.organization.id,apiKey:K}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully created API key in "${e.organization.name}" with title: "${t}".`))}else m.push({organizationName:e.organization.name,organizationId:e.organization.id,apiKey:""})}catch(g){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to create API key in organization "${e.organization.name}"! - ${g.message}`)),c.push({organizationName:e.organization.name,organizationId:e.organization.id,error:g.message})}}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully created ${m.length} API key${m.length===1?"":"s"}`)),c.length>0&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to create ${c.length} API key${c.length===1?"":"s"}!`)),{errors:c,apiKeys:m}}var _typeutils = require('@transcend-io/type-utils');var _iots = require('io-ts'); var S = _interopRequireWildcard(_iots);var _fs = require('fs');function T(r){return r||(_chunkZUNVPK23cjs.a.error(_colors2.default.red("A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY")),process.exit(1)),_fs.existsSync.call(void 0, r)?_typeutils.decodeCodec.call(void 0, S.array(_chunkQEM6S2W7cjs.ha),_fs.readFileSync.call(void 0, r,"utf-8")):r}function Z(r,a,f=!1){if(!_fs.existsSync.call(void 0, r))return[];let t=_fs.readdirSync.call(void 0, r).filter(o=>a?a.filter(l=>o.endsWith(l)).length:!0).filter(o=>o.indexOf(".")>0);return f?t.map(o=>o.replace(/\.[^/.]+$/,"")):t}var _path = require('path');function te(r){return _fs.readdirSync.call(void 0, r).filter(a=>_fs.statSync.call(void 0, _path.join.call(void 0, r,a)).isDirectory())}exports.a = H; exports.b = T; exports.c = Z; exports.d = te;
2
+ //# sourceMappingURL=chunk-NKFAEQ2Q.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-OKLX7XKW.cjs","../src/lib/api-keys/generateCrossAccountApiKeys.ts"],"names":["generateCrossAccountApiKeys","email","password","scopes","apiKeyTitle","parentOrganizationId","deleteExistingApiKey","createNewApiKey","transcendUrl","DEFAULT_TRANSCEND_API","client","buildTranscendGraphQLClientGeneric","logger","colors","roles","loginCookie","loginUser","filteredRoles","role","results","errors","mapSeries","assumeRole","apiKeyWithTitle","fetchAllApiKeys","deleteApiKey","apiKey","createApiKey","err"],"mappings":"AAAA,mfAAyC,wDAAkF,wDAAyC,wDAAyC,oCCAnL,gFAUP,MAoBnB,SAAsBA,CAAAA,CAA4B,CAChD,KAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,oBAAA,CAAAC,CAAAA,CACA,oBAAA,CAAAC,CAAAA,CAAuB,CAAA,CAAA,CACvB,eAAA,CAAAC,CAAAA,CAAkB,CAAA,CAAA,CAClB,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAsBG,CAED,IAAMC,CAAAA,CAAS,MAAMC,kCAAAA,CAAmCH,CAAc,CAAC,CAAC,CAAA,CAGxEI,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,sCAAsC,CAAC,CAAA,CAClE,GAAM,CAAE,KAAA,CAAAC,CAAAA,CAAO,WAAA,CAAAC,CAAY,CAAA,CAAI,MAAMC,kCAAAA,CAAUN,CAAQ,CAAE,KAAA,CAAAT,CAAAA,CAAO,QAAA,CAAAC,CAAS,CAAC,CAAA,CAC1EU,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,iCAAA,EAAoCC,CAAAA,CAAM,MAAM,CAAA,KAAA,EAC9CA,CAAAA,CAAM,MAAA,GAAW,CAAA,CAAI,EAAA,CAAK,GAC5B,CAAA,CAAA,CACF,CACF,CAAA,CAGA,IAAMG,CAAAA,CAAgBZ,CAAAA,CAClBS,CAAAA,CAAM,MAAA,CACHI,CAAAA,EACCA,CAAAA,CAAK,YAAA,CAAa,EAAA,GAAOb,CAAAA,EACzBa,CAAAA,CAAK,YAAA,CAAa,oBAAA,GAAyBb,CAC/C,CAAA,CACAS,CAAAA,CAGJJ,CAAAA,CAAO,UAAA,CAAW,CAChB,MAAA,CAAQK,CACV,CAAC,CAAA,CAGD,IAAMI,CAAAA,CAA0B,CAAC,CAAA,CAC3BC,CAAAA,CAAgC,CAAC,CAAA,CAGvC,OAAAR,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,gCAAA,EAAmCT,CAAW,CAAA,UAAA,EAAaD,CAAAA,CAAO,IAAA,CAChE,GACF,CAAC,CAAA,CAAA,CACH,CACF,CAAA,CAGA,MAAMkB,iCAAAA,CAAUJ,CAAe,MAAOC,CAAAA,EAAS,CAC7C,GAAI,CAEF,MAAMI,kCAAAA,CAAWZ,CAAQ,CAAE,MAAA,CAAQQ,CAAAA,CAAK,EAAA,CAAI,KAAA,CAAAjB,CAAM,CAAC,CAAA,CAGnDW,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,oDAAA,EAAuDK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CAC5G,CACF,CAAA,CAGA,GAAM,CAACmB,CAAe,CAAA,CAAI,MAAMC,kCAAAA,CAAgBd,CAAQ,CAACN,CAAW,CAAC,CAAA,CACrE,EAAA,CAAImB,CAAAA,EAAmBjB,CAAAA,CACrBM,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,MAAA,CACL,CAAA,8BAAA,EAAiCK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CACtF,CACF,CAAA,CACA,MAAMqB,kCAAAA,CAAaf,CAAQa,CAAAA,CAAgB,EAAE,CAAA,CAC7CX,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,iCAAA,EAAoCK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CACzF,CACF,CAAA,CAAA,KAAA,EAAA,CACSmB,CAAAA,CAET,MAAM,IAAI,KAAA,CAAM,CAAA,oCAAA,EAAuCnB,CAAW,CAAA,CAAA,CAAG,CAAA,CAIvE,EAAA,CAAIG,CAAAA,CAAiB,CACnBK,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,qBAAA,EAAwBK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CAC7E,CACF,CAAA,CACA,GAAM,CAAE,MAAA,CAAAsB,CAAO,CAAA,CAAI,MAAMC,kCAAAA,CAAajB,CAAQ,CAC5C,KAAA,CAAON,CAAAA,CACP,MAAA,CAAAD,CACF,CAAC,CAAA,CACDgB,CAAAA,CAAQ,IAAA,CAAK,CACX,gBAAA,CAAkBD,CAAAA,CAAK,YAAA,CAAa,IAAA,CACpC,cAAA,CAAgBA,CAAAA,CAAK,YAAA,CAAa,EAAA,CAClC,MAAA,CAAAQ,CACF,CAAC,CAAA,CACDd,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,iCAAA,EAAoCK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CACzF,CACF,CACF,CAAA,KAEEe,CAAAA,CAAQ,IAAA,CAAK,CACX,gBAAA,CAAkBD,CAAAA,CAAK,YAAA,CAAa,IAAA,CACpC,cAAA,CAAgBA,CAAAA,CAAK,YAAA,CAAa,EAAA,CAClC,MAAA,CAAQ,EACV,CAAC,CAEL,CAAA,KAAA,CAASU,CAAAA,CAAK,CACZhB,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,0CAAA,EAA6CK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,KAAA,EAAQU,CAAAA,CAAI,OAAO,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-OKLX7XKW.cjs","sourcesContent":[null,"import { mapSeries } from 'bluebird';\nimport {\n buildTranscendGraphQLClientGeneric,\n loginUser,\n createApiKey,\n fetchAllApiKeys,\n deleteApiKey,\n assumeRole,\n} from '../graphql';\nimport { ScopeName } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { StoredApiKey } from '../../codecs';\nimport { logger } from '../../logger';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\nexport interface ApiKeyGenerateError {\n /** Name of instance */\n organizationName: string;\n /** Error */\n error: string;\n /** Organization ID API key is for */\n organizationId: string;\n}\n\n/**\n * Generate API keys across multiple transcend accounts\n *\n * @param options - Options\n * @returns Number of API keys created\n */\nexport async function generateCrossAccountApiKeys({\n email,\n password,\n scopes,\n apiKeyTitle,\n parentOrganizationId,\n deleteExistingApiKey = true,\n createNewApiKey = true,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** Email address of user generating API keys */\n email: string;\n /** Password of user generating API keys */\n password: string;\n /** Filter for organizations that match this parent organization ID */\n parentOrganizationId?: string;\n /** Title of the API create to create */\n apiKeyTitle: string;\n /** Title of the API create to create */\n scopes: ScopeName[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** When true delete existing API keys with that title, if set to false an API key exists with that title, an error is thrown */\n deleteExistingApiKey?: boolean;\n /** When true, generate new API keys, otherwise only will delete past API keys */\n createNewApiKey?: boolean;\n}): Promise<{\n /** Successfully generated */\n apiKeys: StoredApiKey[];\n /** Error results */\n errors: ApiKeyGenerateError[];\n}> {\n // Create GraphQL client\n const client = await buildTranscendGraphQLClientGeneric(transcendUrl, {});\n\n // Login the user\n logger.info(colors.magenta('Logging in using email and password.'));\n const { roles, loginCookie } = await loginUser(client, { email, password });\n logger.info(\n colors.green(\n `Successfully logged in and found ${roles.length} role${\n roles.length === 1 ? '' : 's'\n }!`,\n ),\n );\n\n // Filter down by parentOrganizationId\n const filteredRoles = parentOrganizationId\n ? roles.filter(\n (role) =>\n role.organization.id === parentOrganizationId ||\n role.organization.parentOrganizationId === parentOrganizationId,\n )\n : roles;\n\n // Save cookie to call route subsequent times\n client.setHeaders({\n Cookie: loginCookie,\n });\n\n // Save the resulting API keys\n const results: StoredApiKey[] = [];\n const errors: ApiKeyGenerateError[] = [];\n\n // Generate API keys\n logger.info(\n colors.magenta(\n `Generating API keys with title: ${apiKeyTitle}, scopes: ${scopes.join(\n ',',\n )}.`,\n ),\n );\n\n // Map over each role\n await mapSeries(filteredRoles, async (role) => {\n try {\n // Log into the other instance\n await assumeRole(client, { roleId: role.id, email });\n\n // Grab API keys with that title\n logger.info(\n colors.magenta(\n `Checking if API key already exists in organization \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n\n // Delete existing API key\n const [apiKeyWithTitle] = await fetchAllApiKeys(client, [apiKeyTitle]);\n if (apiKeyWithTitle && deleteExistingApiKey) {\n logger.info(\n colors.yellow(\n `Deleting existing API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n await deleteApiKey(client, apiKeyWithTitle.id);\n logger.info(\n colors.green(\n `Successfully deleted API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else if (apiKeyWithTitle) {\n // throw error if one exists but not configured to delete\n throw new Error(`API key already exists with title: \"${apiKeyTitle}\"`);\n }\n\n // Create the API key\n if (createNewApiKey) {\n logger.info(\n colors.magenta(\n `Creating API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n const { apiKey } = await createApiKey(client, {\n title: apiKeyTitle,\n scopes,\n });\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey,\n });\n logger.info(\n colors.green(\n `Successfully created API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else {\n // Delete only\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey: '',\n });\n }\n } catch (err) {\n logger.error(\n colors.red(\n `Failed to create API key in organization \"${role.organization.name}\"! - ${err.message}`,\n ),\n );\n errors.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n error: err.message,\n });\n }\n });\n logger.info(\n colors.green(\n `Successfully created ${results.length} API key${\n results.length === 1 ? '' : 's'\n }`,\n ),\n );\n\n if (errors.length > 0) {\n logger.error(\n colors.red(\n `Failed to create ${errors.length} API key${\n errors.length === 1 ? '' : 's'\n }!`,\n ),\n );\n }\n\n return { errors, apiKeys: results };\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-NKFAEQ2Q.cjs","../src/lib/api-keys/generateCrossAccountApiKeys.ts"],"names":["generateCrossAccountApiKeys","email","password","scopes","apiKeyTitle","parentOrganizationId","deleteExistingApiKey","createNewApiKey","transcendUrl","DEFAULT_TRANSCEND_API","client","buildTranscendGraphQLClientGeneric","logger","colors","roles","loginCookie","loginUser","filteredRoles","role","results","errors","mapSeries","assumeRole","apiKeyWithTitle","fetchAllApiKeys","deleteApiKey","apiKey","createApiKey","err"],"mappings":"AAAA,mfAAyC,wDAAkF,wDAAyC,wDAAyC,oCCAnL,gFAUP,MAoBnB,SAAsBA,CAAAA,CAA4B,CAChD,KAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,oBAAA,CAAAC,CAAAA,CACA,oBAAA,CAAAC,CAAAA,CAAuB,CAAA,CAAA,CACvB,eAAA,CAAAC,CAAAA,CAAkB,CAAA,CAAA,CAClB,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAsBG,CAED,IAAMC,CAAAA,CAAS,MAAMC,kCAAAA,CAAmCH,CAAc,CAAC,CAAC,CAAA,CAGxEI,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,sCAAsC,CAAC,CAAA,CAClE,GAAM,CAAE,KAAA,CAAAC,CAAAA,CAAO,WAAA,CAAAC,CAAY,CAAA,CAAI,MAAMC,kCAAAA,CAAUN,CAAQ,CAAE,KAAA,CAAAT,CAAAA,CAAO,QAAA,CAAAC,CAAS,CAAC,CAAA,CAC1EU,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,iCAAA,EAAoCC,CAAAA,CAAM,MAAM,CAAA,KAAA,EAC9CA,CAAAA,CAAM,MAAA,GAAW,CAAA,CAAI,EAAA,CAAK,GAC5B,CAAA,CAAA,CACF,CACF,CAAA,CAGA,IAAMG,CAAAA,CAAgBZ,CAAAA,CAClBS,CAAAA,CAAM,MAAA,CACHI,CAAAA,EACCA,CAAAA,CAAK,YAAA,CAAa,EAAA,GAAOb,CAAAA,EACzBa,CAAAA,CAAK,YAAA,CAAa,oBAAA,GAAyBb,CAC/C,CAAA,CACAS,CAAAA,CAGJJ,CAAAA,CAAO,UAAA,CAAW,CAChB,MAAA,CAAQK,CACV,CAAC,CAAA,CAGD,IAAMI,CAAAA,CAA0B,CAAC,CAAA,CAC3BC,CAAAA,CAAgC,CAAC,CAAA,CAGvC,OAAAR,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,gCAAA,EAAmCT,CAAW,CAAA,UAAA,EAAaD,CAAAA,CAAO,IAAA,CAChE,GACF,CAAC,CAAA,CAAA,CACH,CACF,CAAA,CAGA,MAAMkB,iCAAAA,CAAUJ,CAAe,MAAOC,CAAAA,EAAS,CAC7C,GAAI,CAEF,MAAMI,kCAAAA,CAAWZ,CAAQ,CAAE,MAAA,CAAQQ,CAAAA,CAAK,EAAA,CAAI,KAAA,CAAAjB,CAAM,CAAC,CAAA,CAGnDW,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,oDAAA,EAAuDK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CAC5G,CACF,CAAA,CAGA,GAAM,CAACmB,CAAe,CAAA,CAAI,MAAMC,kCAAAA,CAAgBd,CAAQ,CAACN,CAAW,CAAC,CAAA,CACrE,EAAA,CAAImB,CAAAA,EAAmBjB,CAAAA,CACrBM,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,MAAA,CACL,CAAA,8BAAA,EAAiCK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CACtF,CACF,CAAA,CACA,MAAMqB,kCAAAA,CAAaf,CAAQa,CAAAA,CAAgB,EAAE,CAAA,CAC7CX,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,iCAAA,EAAoCK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CACzF,CACF,CAAA,CAAA,KAAA,EAAA,CACSmB,CAAAA,CAET,MAAM,IAAI,KAAA,CAAM,CAAA,oCAAA,EAAuCnB,CAAW,CAAA,CAAA,CAAG,CAAA,CAIvE,EAAA,CAAIG,CAAAA,CAAiB,CACnBK,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,qBAAA,EAAwBK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CAC7E,CACF,CAAA,CACA,GAAM,CAAE,MAAA,CAAAsB,CAAO,CAAA,CAAI,MAAMC,kCAAAA,CAAajB,CAAQ,CAC5C,KAAA,CAAON,CAAAA,CACP,MAAA,CAAAD,CACF,CAAC,CAAA,CACDgB,CAAAA,CAAQ,IAAA,CAAK,CACX,gBAAA,CAAkBD,CAAAA,CAAK,YAAA,CAAa,IAAA,CACpC,cAAA,CAAgBA,CAAAA,CAAK,YAAA,CAAa,EAAA,CAClC,MAAA,CAAAQ,CACF,CAAC,CAAA,CACDd,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,iCAAA,EAAoCK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,eAAA,EAAkBd,CAAW,CAAA,EAAA,CACzF,CACF,CACF,CAAA,KAEEe,CAAAA,CAAQ,IAAA,CAAK,CACX,gBAAA,CAAkBD,CAAAA,CAAK,YAAA,CAAa,IAAA,CACpC,cAAA,CAAgBA,CAAAA,CAAK,YAAA,CAAa,EAAA,CAClC,MAAA,CAAQ,EACV,CAAC,CAEL,CAAA,KAAA,CAASU,CAAAA,CAAK,CACZhB,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,0CAAA,EAA6CK,CAAAA,CAAK,YAAA,CAAa,IAAI,CAAA,KAAA,EAAQU,CAAAA,CAAI,OAAO,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-NKFAEQ2Q.cjs","sourcesContent":[null,"import { mapSeries } from 'bluebird';\nimport {\n buildTranscendGraphQLClientGeneric,\n loginUser,\n createApiKey,\n fetchAllApiKeys,\n deleteApiKey,\n assumeRole,\n} from '../graphql';\nimport { ScopeName } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { StoredApiKey } from '../../codecs';\nimport { logger } from '../../logger';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\nexport interface ApiKeyGenerateError {\n /** Name of instance */\n organizationName: string;\n /** Error */\n error: string;\n /** Organization ID API key is for */\n organizationId: string;\n}\n\n/**\n * Generate API keys across multiple transcend accounts\n *\n * @param options - Options\n * @returns Number of API keys created\n */\nexport async function generateCrossAccountApiKeys({\n email,\n password,\n scopes,\n apiKeyTitle,\n parentOrganizationId,\n deleteExistingApiKey = true,\n createNewApiKey = true,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** Email address of user generating API keys */\n email: string;\n /** Password of user generating API keys */\n password: string;\n /** Filter for organizations that match this parent organization ID */\n parentOrganizationId?: string;\n /** Title of the API create to create */\n apiKeyTitle: string;\n /** Title of the API create to create */\n scopes: ScopeName[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** When true delete existing API keys with that title, if set to false an API key exists with that title, an error is thrown */\n deleteExistingApiKey?: boolean;\n /** When true, generate new API keys, otherwise only will delete past API keys */\n createNewApiKey?: boolean;\n}): Promise<{\n /** Successfully generated */\n apiKeys: StoredApiKey[];\n /** Error results */\n errors: ApiKeyGenerateError[];\n}> {\n // Create GraphQL client\n const client = await buildTranscendGraphQLClientGeneric(transcendUrl, {});\n\n // Login the user\n logger.info(colors.magenta('Logging in using email and password.'));\n const { roles, loginCookie } = await loginUser(client, { email, password });\n logger.info(\n colors.green(\n `Successfully logged in and found ${roles.length} role${\n roles.length === 1 ? '' : 's'\n }!`,\n ),\n );\n\n // Filter down by parentOrganizationId\n const filteredRoles = parentOrganizationId\n ? roles.filter(\n (role) =>\n role.organization.id === parentOrganizationId ||\n role.organization.parentOrganizationId === parentOrganizationId,\n )\n : roles;\n\n // Save cookie to call route subsequent times\n client.setHeaders({\n Cookie: loginCookie,\n });\n\n // Save the resulting API keys\n const results: StoredApiKey[] = [];\n const errors: ApiKeyGenerateError[] = [];\n\n // Generate API keys\n logger.info(\n colors.magenta(\n `Generating API keys with title: ${apiKeyTitle}, scopes: ${scopes.join(\n ',',\n )}.`,\n ),\n );\n\n // Map over each role\n await mapSeries(filteredRoles, async (role) => {\n try {\n // Log into the other instance\n await assumeRole(client, { roleId: role.id, email });\n\n // Grab API keys with that title\n logger.info(\n colors.magenta(\n `Checking if API key already exists in organization \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n\n // Delete existing API key\n const [apiKeyWithTitle] = await fetchAllApiKeys(client, [apiKeyTitle]);\n if (apiKeyWithTitle && deleteExistingApiKey) {\n logger.info(\n colors.yellow(\n `Deleting existing API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n await deleteApiKey(client, apiKeyWithTitle.id);\n logger.info(\n colors.green(\n `Successfully deleted API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else if (apiKeyWithTitle) {\n // throw error if one exists but not configured to delete\n throw new Error(`API key already exists with title: \"${apiKeyTitle}\"`);\n }\n\n // Create the API key\n if (createNewApiKey) {\n logger.info(\n colors.magenta(\n `Creating API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n const { apiKey } = await createApiKey(client, {\n title: apiKeyTitle,\n scopes,\n });\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey,\n });\n logger.info(\n colors.green(\n `Successfully created API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else {\n // Delete only\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey: '',\n });\n }\n } catch (err) {\n logger.error(\n colors.red(\n `Failed to create API key in organization \"${role.organization.name}\"! - ${err.message}`,\n ),\n );\n errors.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n error: err.message,\n });\n }\n });\n logger.info(\n colors.green(\n `Successfully created ${results.length} API key${\n results.length === 1 ? '' : 's'\n }`,\n ),\n );\n\n if (errors.length > 0) {\n logger.error(\n colors.red(\n `Failed to create ${errors.length} API key${\n errors.length === 1 ? '' : 's'\n }!`,\n ),\n );\n }\n\n return { errors, apiKeys: results };\n}\n"]}
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk7O5DWH3Mcjs = require('./chunk-7O5DWH3M.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkMFIMY5RYcjs = require('./chunk-MFIMY5RY.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunk7O5DWH3Mcjs.Wf.call(void 0, u,_chunk7O5DWH3Mcjs.d,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunk7O5DWH3Mcjs.Wf.call(void 0, u,_graphqlrequest.gql`
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkRSN3DUTPcjs = require('./chunk-RSN3DUTP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkE57K4DGXcjs = require('./chunk-E57K4DGX.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunkRSN3DUTPcjs.Xf.call(void 0, u,_chunkRSN3DUTPcjs.d,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunkRSN3DUTPcjs.Xf.call(void 0, u,_graphqlrequest.gql`
2
2
  query TranscendCliSubDataPointCsvExport(
3
3
  $filterBy: SubDataPointFiltersInput
4
4
  $first: Int!
@@ -41,7 +41,7 @@
41
41
  }
42
42
  }
43
43
  }
44
- `,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunkMFIMY5RYcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunkMFIMY5RYcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunk7O5DWH3Mcjs.Wf.call(void 0, u,_chunk7O5DWH3Mcjs.g,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunkMFIMY5RYcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunk7O5DWH3Mcjs.Wf.call(void 0, u,_chunk7O5DWH3Mcjs.j,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunkMFIMY5RYcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunkMFIMY5RYcjs.e.call(void 0, d,"id"),o=_chunkMFIMY5RYcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunkMFIMY5RYcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunk7O5DWH3Mcjs.Wf.call(void 0, u,_chunk7O5DWH3Mcjs.h,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunk7O5DWH3Mcjs.Wf.call(void 0, u,_graphqlrequest.gql`
44
+ `,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunkE57K4DGXcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunkE57K4DGXcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunkRSN3DUTPcjs.Xf.call(void 0, u,_chunkRSN3DUTPcjs.g,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunkE57K4DGXcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunkRSN3DUTPcjs.Xf.call(void 0, u,_chunkRSN3DUTPcjs.j,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunkE57K4DGXcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunkE57K4DGXcjs.e.call(void 0, d,"id"),o=_chunkE57K4DGXcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunkE57K4DGXcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunkRSN3DUTPcjs.Xf.call(void 0, u,_chunkRSN3DUTPcjs.h,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunkRSN3DUTPcjs.Xf.call(void 0, u,_graphqlrequest.gql`
45
45
  query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(
46
46
  $filterBy: UnstructuredSubDataPointRecommendationsFilterInput
47
47
  $first: Int!
@@ -71,5 +71,5 @@
71
71
  }
72
72
  }
73
73
  }
74
- `,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let T=new Date().getTime()-n,C=_chunkMFIMY5RYcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${T/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
75
- //# sourceMappingURL=chunk-LPUZGMB6.cjs.map
74
+ `,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let T=new Date().getTime()-n,C=_chunkE57K4DGXcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${T/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
75
+ //# sourceMappingURL=chunk-OCNPZWYT.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-LPUZGMB6.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAAqE,wDAAyC,wDAA8D,2DCKrK,qGACiB,iDACJ,gFACD,oCAWO,MAuE1B,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-LPUZGMB6.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from 'bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-OCNPZWYT.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAAqE,wDAAyC,wDAA8D,2DCKrK,qGACiB,iDACJ,gFACD,oCAWO,MAuE1B,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-OCNPZWYT.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from 'bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}