@transcend-io/cli 10.0.1 → 10.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (404) hide show
  1. package/README.md +16 -5
  2. package/dist/{app-BfTrk2nc.mjs → app-C1m4rExX.mjs} +21 -21
  3. package/dist/{app-BfTrk2nc.mjs.map → app-C1m4rExX.mjs.map} +1 -1
  4. package/dist/approvePrivacyRequests-BIHcACAj.mjs +2 -0
  5. package/dist/approvePrivacyRequests-BIHcACAj.mjs.map +1 -0
  6. package/dist/bin/bash-complete.mjs +1 -1
  7. package/dist/bin/cli.mjs +1 -1
  8. package/dist/bin/deprecated-command.mjs +1 -1
  9. package/dist/buildXdiSyncEndpoint-CBbcir-p.mjs +9 -0
  10. package/dist/buildXdiSyncEndpoint-CBbcir-p.mjs.map +1 -0
  11. package/dist/bulkRestartRequests-ByH7TjH2.mjs +2 -0
  12. package/dist/bulkRestartRequests-ByH7TjH2.mjs.map +1 -0
  13. package/dist/bulkRetryEnrichers-DuYXD-64.mjs +2 -0
  14. package/dist/bulkRetryEnrichers-DuYXD-64.mjs.map +1 -0
  15. package/dist/cancelPrivacyRequests-DMgQOffA.mjs +2 -0
  16. package/dist/cancelPrivacyRequests-DMgQOffA.mjs.map +1 -0
  17. package/dist/{codecs-BE3Wmoh8.mjs → codecs-CeDPaLYa.mjs} +1 -1
  18. package/dist/{codecs-BE3Wmoh8.mjs.map → codecs-CeDPaLYa.mjs.map} +1 -1
  19. package/dist/collectCsvFilesOrExit-CbtyKAzu.mjs +2 -0
  20. package/dist/collectCsvFilesOrExit-CbtyKAzu.mjs.map +1 -0
  21. package/dist/collectParquetFilesOrExit-BJiAyaQ5.mjs +2 -0
  22. package/dist/collectParquetFilesOrExit-BJiAyaQ5.mjs.map +1 -0
  23. package/dist/{command-BXxoAjFo.mjs → command-DnoHX-eW.mjs} +2 -2
  24. package/dist/{command-BXxoAjFo.mjs.map → command-DnoHX-eW.mjs.map} +1 -1
  25. package/dist/commands/admin/chunk-csv/worker.d.mts +48 -0
  26. package/dist/commands/admin/chunk-csv/worker.d.mts.map +1 -0
  27. package/dist/commands/admin/chunk-csv/worker.mjs +2 -0
  28. package/dist/commands/admin/chunk-csv/worker.mjs.map +1 -0
  29. package/dist/commands/admin/parquet-to-csv/worker.d.mts +25 -0
  30. package/dist/commands/admin/parquet-to-csv/worker.d.mts.map +1 -0
  31. package/dist/commands/admin/parquet-to-csv/worker.mjs +2 -0
  32. package/dist/commands/admin/parquet-to-csv/worker.mjs.map +1 -0
  33. package/dist/{consentManagersToBusinessEntities-BDgOFga7.mjs → consentManagersToBusinessEntities-BdKDganK.mjs} +2 -2
  34. package/dist/{consentManagersToBusinessEntities-BDgOFga7.mjs.map → consentManagersToBusinessEntities-BdKDganK.mjs.map} +1 -1
  35. package/dist/{constants-AFtS5Nad.mjs → constants-BmwXDQu9.mjs} +2 -2
  36. package/dist/{constants-AFtS5Nad.mjs.map → constants-BmwXDQu9.mjs.map} +1 -1
  37. package/dist/{constants-lIvXgkdp.mjs → constants-ClkQQhJs.mjs} +1 -1
  38. package/dist/{constants-lIvXgkdp.mjs.map → constants-ClkQQhJs.mjs.map} +1 -1
  39. package/dist/constants-muOBBQA_.mjs +2 -0
  40. package/dist/constants-muOBBQA_.mjs.map +1 -0
  41. package/dist/{context-CdSyuBlf.mjs → context-bkKpii_t.mjs} +1 -1
  42. package/dist/{context-CdSyuBlf.mjs.map → context-bkKpii_t.mjs.map} +1 -1
  43. package/dist/createExtraKeyHandler-srtG2U7q.mjs +14 -0
  44. package/dist/createExtraKeyHandler-srtG2U7q.mjs.map +1 -0
  45. package/dist/{dataFlowsToDataSilos-NhvBw1iy.mjs → dataFlowsToDataSilos-Ca2DtTsd.mjs} +1 -1
  46. package/dist/dataFlowsToDataSilos-Ca2DtTsd.mjs.map +1 -0
  47. package/dist/{done-input-validation-DLR0-MJ7.mjs → done-input-validation-BcNBxhEs.mjs} +1 -1
  48. package/dist/{done-input-validation-DLR0-MJ7.mjs.map → done-input-validation-BcNBxhEs.mjs.map} +1 -1
  49. package/dist/downloadPrivacyRequestFiles-kKhGnFmx.mjs +2 -0
  50. package/dist/downloadPrivacyRequestFiles-kKhGnFmx.mjs.map +1 -0
  51. package/dist/{extractClientError-DPjv09EH.mjs → extractClientError-i-Tw_az7.mjs} +1 -1
  52. package/dist/{extractClientError-DPjv09EH.mjs.map → extractClientError-i-Tw_az7.mjs.map} +1 -1
  53. package/dist/fetchAllRequests-CHHdyb4Q.mjs +2 -0
  54. package/dist/fetchAllRequests-CHHdyb4Q.mjs.map +1 -0
  55. package/dist/generateCrossAccountApiKeys-C7yH3Rbi.mjs +2 -0
  56. package/dist/generateCrossAccountApiKeys-C7yH3Rbi.mjs.map +1 -0
  57. package/dist/impl-3VLH9aat.mjs +2 -0
  58. package/dist/impl-3VLH9aat.mjs.map +1 -0
  59. package/dist/{impl-Rt3C_fDF.mjs → impl-3sDUDXru.mjs} +2 -2
  60. package/dist/{impl-Rt3C_fDF.mjs.map → impl-3sDUDXru.mjs.map} +1 -1
  61. package/dist/{impl-Cgg_bv7j.mjs → impl-6mCOBlSD.mjs} +2 -2
  62. package/dist/{impl-Cgg_bv7j.mjs.map → impl-6mCOBlSD.mjs.map} +1 -1
  63. package/dist/impl-84ylH4aO.mjs +2 -0
  64. package/dist/{impl-CMmyv1cl.mjs.map → impl-84ylH4aO.mjs.map} +1 -1
  65. package/dist/impl-B62XN4tV.mjs +2 -0
  66. package/dist/impl-B62XN4tV.mjs.map +1 -0
  67. package/dist/{impl-DGiPB5Vq2.mjs → impl-B9BsXBxS.mjs} +2 -2
  68. package/dist/impl-B9BsXBxS.mjs.map +1 -0
  69. package/dist/{impl-KDuBh4bu2.mjs → impl-BNqmxytJ.mjs} +2 -2
  70. package/dist/impl-BNqmxytJ.mjs.map +1 -0
  71. package/dist/{impl-CqXFyvgV2.mjs → impl-BYf4MpWP.mjs} +2 -2
  72. package/dist/impl-BYf4MpWP.mjs.map +1 -0
  73. package/dist/impl-BaC9iEO_.mjs +2 -0
  74. package/dist/impl-BaC9iEO_.mjs.map +1 -0
  75. package/dist/impl-BhDS0QIt.mjs +2 -0
  76. package/dist/impl-BhDS0QIt.mjs.map +1 -0
  77. package/dist/{impl-CNez1OAw.mjs → impl-BjCQSRLu.mjs} +2 -2
  78. package/dist/{impl-CNez1OAw.mjs.map → impl-BjCQSRLu.mjs.map} +1 -1
  79. package/dist/impl-BjIylEKQ.mjs +4 -0
  80. package/dist/impl-BjIylEKQ.mjs.map +1 -0
  81. package/dist/impl-BwrEi3s7.mjs +2 -0
  82. package/dist/impl-BwrEi3s7.mjs.map +1 -0
  83. package/dist/impl-C4AI1Fsj.mjs +3 -0
  84. package/dist/impl-C4AI1Fsj.mjs.map +1 -0
  85. package/dist/{impl-fqOKTw5J.mjs → impl-CCAeEeMR.mjs} +2 -2
  86. package/dist/{impl-fqOKTw5J.mjs.map → impl-CCAeEeMR.mjs.map} +1 -1
  87. package/dist/{impl-P_NDC3cX.mjs → impl-CFI5y5U-.mjs} +2 -2
  88. package/dist/{impl-P_NDC3cX.mjs.map → impl-CFI5y5U-.mjs.map} +1 -1
  89. package/dist/{impl-BOUm7wly2.mjs → impl-CIfRN0ux.mjs} +2 -2
  90. package/dist/impl-CIfRN0ux.mjs.map +1 -0
  91. package/dist/impl-CLznNZ5F.mjs +2 -0
  92. package/dist/impl-CLznNZ5F.mjs.map +1 -0
  93. package/dist/impl-CUdo0Jyh.mjs +2 -0
  94. package/dist/impl-CUdo0Jyh.mjs.map +1 -0
  95. package/dist/{impl-MpkLBntW.mjs → impl-Cmj1Vi5Q.mjs} +2 -2
  96. package/dist/{impl-MpkLBntW.mjs.map → impl-Cmj1Vi5Q.mjs.map} +1 -1
  97. package/dist/impl-Cw3_0zqC.mjs +2 -0
  98. package/dist/impl-Cw3_0zqC.mjs.map +1 -0
  99. package/dist/{impl-D-cp0CYr.mjs → impl-CzvCA0Ev.mjs} +2 -2
  100. package/dist/{impl-D-cp0CYr.mjs.map → impl-CzvCA0Ev.mjs.map} +1 -1
  101. package/dist/{impl-CSChmq_t2.mjs → impl-D1DmW5-P.mjs} +2 -2
  102. package/dist/impl-D1DmW5-P.mjs.map +1 -0
  103. package/dist/{impl-CCUsnhoW2.mjs → impl-D41c_KGj.mjs} +2 -2
  104. package/dist/impl-D41c_KGj.mjs.map +1 -0
  105. package/dist/impl-DEpCg7UP.mjs +2 -0
  106. package/dist/{impl-tbGnvKFm.mjs.map → impl-DEpCg7UP.mjs.map} +1 -1
  107. package/dist/{impl-DGuwD_qz.mjs → impl-DHOh4ypd.mjs} +2 -2
  108. package/dist/{impl-DGuwD_qz.mjs.map → impl-DHOh4ypd.mjs.map} +1 -1
  109. package/dist/{impl-D9NjIwEi2.mjs → impl-DJg0Ibxs.mjs} +2 -2
  110. package/dist/impl-DJg0Ibxs.mjs.map +1 -0
  111. package/dist/impl-DUdbbIpf.mjs +2 -0
  112. package/dist/impl-DUdbbIpf.mjs.map +1 -0
  113. package/dist/impl-DXHqqWJb.mjs +7 -0
  114. package/dist/impl-DXHqqWJb.mjs.map +1 -0
  115. package/dist/impl-DXaA3sMt.mjs +2 -0
  116. package/dist/impl-DXaA3sMt.mjs.map +1 -0
  117. package/dist/impl-Dl4RcPKp.mjs +2 -0
  118. package/dist/{impl-DEWXA_QC.mjs.map → impl-Dl4RcPKp.mjs.map} +1 -1
  119. package/dist/{impl-c7rUQYDc2.mjs → impl-DvAwxl6Z.mjs} +2 -2
  120. package/dist/impl-DvAwxl6Z.mjs.map +1 -0
  121. package/dist/{impl-JThkrXiI2.mjs → impl-GRdcDZQ4.mjs} +2 -2
  122. package/dist/impl-GRdcDZQ4.mjs.map +1 -0
  123. package/dist/{impl-CNykdy3e2.mjs → impl-UMb9wjra.mjs} +2 -2
  124. package/dist/impl-UMb9wjra.mjs.map +1 -0
  125. package/dist/impl-Yq33AV90.mjs +2 -0
  126. package/dist/impl-Yq33AV90.mjs.map +1 -0
  127. package/dist/impl-aGDJJgGc.mjs +2 -0
  128. package/dist/impl-aGDJJgGc.mjs.map +1 -0
  129. package/dist/impl-fZQxhZRu.mjs +12 -0
  130. package/dist/impl-fZQxhZRu.mjs.map +1 -0
  131. package/dist/{impl-BUC4ZelU.mjs → impl-gitQPEo3.mjs} +2 -2
  132. package/dist/{impl-BUC4ZelU.mjs.map → impl-gitQPEo3.mjs.map} +1 -1
  133. package/dist/impl-i-vquwbD.mjs +2 -0
  134. package/dist/{impl-C2o0eDzJ.mjs.map → impl-i-vquwbD.mjs.map} +1 -1
  135. package/dist/impl-iteb85IZ.mjs +4 -0
  136. package/dist/impl-iteb85IZ.mjs.map +1 -0
  137. package/dist/impl-tYtVXUz2.mjs +2 -0
  138. package/dist/impl-tYtVXUz2.mjs.map +1 -0
  139. package/dist/{impl-DGzvE8aJ.mjs → impl-wcRtA0L3.mjs} +2 -2
  140. package/dist/{impl-DGzvE8aJ.mjs.map → impl-wcRtA0L3.mjs.map} +1 -1
  141. package/dist/impl-xtlx25UP.mjs +2 -0
  142. package/dist/impl-xtlx25UP.mjs.map +1 -0
  143. package/dist/impl-yMumZUUX.mjs +2 -0
  144. package/dist/impl-yMumZUUX.mjs.map +1 -0
  145. package/dist/index.d.mts +1135 -5125
  146. package/dist/index.d.mts.map +1 -1
  147. package/dist/index.mjs +4 -78
  148. package/dist/index.mjs.map +1 -1
  149. package/dist/{inquirer-BgNcicZ4.mjs → inquirer-BqZXFEt1.mjs} +2 -2
  150. package/dist/{inquirer-BgNcicZ4.mjs.map → inquirer-BqZXFEt1.mjs.map} +1 -1
  151. package/dist/{listFiles-qzyQMaYH.mjs → listFiles-D2wMHnEr.mjs} +1 -1
  152. package/dist/{listFiles-qzyQMaYH.mjs.map → listFiles-D2wMHnEr.mjs.map} +1 -1
  153. package/dist/{logger-B-LXIf3U.mjs → logger-Bj782ZYD.mjs} +1 -1
  154. package/dist/{logger-B-LXIf3U.mjs.map → logger-Bj782ZYD.mjs.map} +1 -1
  155. package/dist/markRequestDataSiloIdsCompleted-BaVxVfDe.mjs +2 -0
  156. package/dist/markRequestDataSiloIdsCompleted-BaVxVfDe.mjs.map +1 -0
  157. package/dist/markSilentPrivacyRequests-miaumnaC.mjs +2 -0
  158. package/dist/markSilentPrivacyRequests-miaumnaC.mjs.map +1 -0
  159. package/dist/notifyPrivacyRequestsAdditionalTime-BUdhSCNL.mjs +2 -0
  160. package/dist/notifyPrivacyRequestsAdditionalTime-BUdhSCNL.mjs.map +1 -0
  161. package/dist/parquetToCsvOneFile-B84XXInh.mjs +6 -0
  162. package/dist/parquetToCsvOneFile-B84XXInh.mjs.map +1 -0
  163. package/dist/parseAttributesFromString-D1Yl0xwT.mjs +2 -0
  164. package/dist/{parseAttributesFromString-CZStzJc0.mjs.map → parseAttributesFromString-D1Yl0xwT.mjs.map} +1 -1
  165. package/dist/parseVariablesFromString-BeKOGw5n.mjs +3 -0
  166. package/dist/parseVariablesFromString-BeKOGw5n.mjs.map +1 -0
  167. package/dist/pullAllDatapoints-Bbmky50p.mjs +45 -0
  168. package/dist/pullAllDatapoints-Bbmky50p.mjs.map +1 -0
  169. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BW5Vws25.mjs +2 -0
  170. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BW5Vws25.mjs.map +1 -0
  171. package/dist/pullConsentManagerMetrics-zKgjc3Ap.mjs +2 -0
  172. package/dist/pullConsentManagerMetrics-zKgjc3Ap.mjs.map +1 -0
  173. package/dist/pullManualEnrichmentIdentifiersToCsv-kpGy9H7T.mjs +2 -0
  174. package/dist/pullManualEnrichmentIdentifiersToCsv-kpGy9H7T.mjs.map +1 -0
  175. package/dist/pullTranscendConfiguration-DjOELnPo.mjs +58 -0
  176. package/dist/pullTranscendConfiguration-DjOELnPo.mjs.map +1 -0
  177. package/dist/{pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs → pullUnstructuredSubDataPointRecommendations-D0z-vPgq.mjs} +4 -4
  178. package/dist/pullUnstructuredSubDataPointRecommendations-D0z-vPgq.mjs.map +1 -0
  179. package/dist/pushCronIdentifiersFromCsv-BZRA1n_8.mjs +2 -0
  180. package/dist/pushCronIdentifiersFromCsv-BZRA1n_8.mjs.map +1 -0
  181. package/dist/pushManualEnrichmentIdentifiersFromCsv-DXqf8WWy.mjs +2 -0
  182. package/dist/pushManualEnrichmentIdentifiersFromCsv-DXqf8WWy.mjs.map +1 -0
  183. package/dist/{readCsv-CyOL7eCc.mjs → readCsv-C4TyEs-r.mjs} +1 -1
  184. package/dist/{readCsv-CyOL7eCc.mjs.map → readCsv-C4TyEs-r.mjs.map} +1 -1
  185. package/dist/{readTranscendYaml-D-J1ilS0.mjs → readTranscendYaml-DVkQL2SC.mjs} +2 -2
  186. package/dist/{readTranscendYaml-D-J1ilS0.mjs.map → readTranscendYaml-DVkQL2SC.mjs.map} +1 -1
  187. package/dist/removeUnverifiedRequestIdentifiers-BxWSsJit.mjs +2 -0
  188. package/dist/removeUnverifiedRequestIdentifiers-BxWSsJit.mjs.map +1 -0
  189. package/dist/{request-CAsR6CMY.mjs → request-DfkRPQFr.mjs} +1 -1
  190. package/dist/{request-CAsR6CMY.mjs.map → request-DfkRPQFr.mjs.map} +1 -1
  191. package/dist/retryRequestDataSilos-BVrJz_GC.mjs +2 -0
  192. package/dist/retryRequestDataSilos-BVrJz_GC.mjs.map +1 -0
  193. package/dist/skipPreflightJobs-CYuoMG3z.mjs +2 -0
  194. package/dist/skipPreflightJobs-CYuoMG3z.mjs.map +1 -0
  195. package/dist/skipRequestDataSilos-BNspAsjR.mjs +2 -0
  196. package/dist/skipRequestDataSilos-BNspAsjR.mjs.map +1 -0
  197. package/dist/streamPrivacyRequestsToCsv-PoyTmQd6.mjs +2 -0
  198. package/dist/streamPrivacyRequestsToCsv-PoyTmQd6.mjs.map +1 -0
  199. package/dist/syncCodePackages-CAk_Hjyl.mjs +2 -0
  200. package/dist/syncCodePackages-CAk_Hjyl.mjs.map +1 -0
  201. package/dist/updateConsentManagerVersionToLatest-lAw3E1wm.mjs +2 -0
  202. package/dist/updateConsentManagerVersionToLatest-lAw3E1wm.mjs.map +1 -0
  203. package/dist/uploadConsents-BzmWrNc1.mjs +2 -0
  204. package/dist/uploadConsents-BzmWrNc1.mjs.map +1 -0
  205. package/dist/uploadCookiesFromCsv-TH10UBgw.mjs +2 -0
  206. package/dist/uploadCookiesFromCsv-TH10UBgw.mjs.map +1 -0
  207. package/dist/uploadDataFlowsFromCsv-DUSFCae9.mjs +2 -0
  208. package/dist/uploadDataFlowsFromCsv-DUSFCae9.mjs.map +1 -0
  209. package/dist/uploadPrivacyRequestsFromCsv-sKSFfE6q.mjs +2 -0
  210. package/dist/uploadPrivacyRequestsFromCsv-sKSFfE6q.mjs.map +1 -0
  211. package/dist/{validateTranscendAuth-1W1IylqE.mjs → validateTranscendAuth-Cuh2Qfdl.mjs} +2 -2
  212. package/dist/{validateTranscendAuth-1W1IylqE.mjs.map → validateTranscendAuth-Cuh2Qfdl.mjs.map} +1 -1
  213. package/dist/{writeCsv-B51ulrVl.mjs → writeCsv-C4pjXGsD.mjs} +1 -1
  214. package/dist/{writeCsv-B51ulrVl.mjs.map → writeCsv-C4pjXGsD.mjs.map} +1 -1
  215. package/package.json +13 -10
  216. package/dist/RateCounter-DFL_mnk2.mjs +0 -2
  217. package/dist/RateCounter-DFL_mnk2.mjs.map +0 -1
  218. package/dist/RequestDataSilo-_Iv44M9u.mjs +0 -51
  219. package/dist/RequestDataSilo-_Iv44M9u.mjs.map +0 -1
  220. package/dist/approvePrivacyRequests-CWGZR2N6.mjs +0 -2
  221. package/dist/approvePrivacyRequests-CWGZR2N6.mjs.map +0 -1
  222. package/dist/assessment-BDywVaGR.mjs +0 -284
  223. package/dist/assessment-BDywVaGR.mjs.map +0 -1
  224. package/dist/bluebird-CUitXgsY.mjs +0 -2
  225. package/dist/bluebird-CUitXgsY.mjs.map +0 -1
  226. package/dist/buildXdiSyncEndpoint-Cb-pvpak.mjs +0 -9
  227. package/dist/buildXdiSyncEndpoint-Cb-pvpak.mjs.map +0 -1
  228. package/dist/bulkRestartRequests-CKF_xpN0.mjs +0 -2
  229. package/dist/bulkRestartRequests-CKF_xpN0.mjs.map +0 -1
  230. package/dist/bulkRetryEnrichers-B-Szmin-.mjs +0 -2
  231. package/dist/bulkRetryEnrichers-B-Szmin-.mjs.map +0 -1
  232. package/dist/cancelPrivacyRequests-DNiL13E_.mjs +0 -2
  233. package/dist/cancelPrivacyRequests-DNiL13E_.mjs.map +0 -1
  234. package/dist/codecs-Dx_vGxsl.mjs +0 -2
  235. package/dist/codecs-Dx_vGxsl.mjs.map +0 -1
  236. package/dist/constants-CeMiHaHx.mjs +0 -2
  237. package/dist/constants-CeMiHaHx.mjs.map +0 -1
  238. package/dist/createExtraKeyHandler-tubeaEjA.mjs +0 -23
  239. package/dist/createExtraKeyHandler-tubeaEjA.mjs.map +0 -1
  240. package/dist/createPreferenceAccessTokens-DqmFctn3.mjs +0 -10
  241. package/dist/createPreferenceAccessTokens-DqmFctn3.mjs.map +0 -1
  242. package/dist/createSombraGotInstance-D1Il9zUE.mjs +0 -10
  243. package/dist/createSombraGotInstance-D1Il9zUE.mjs.map +0 -1
  244. package/dist/dataFlowsToDataSilos-NhvBw1iy.mjs.map +0 -1
  245. package/dist/dataSilo-DrFetFXw.mjs +0 -302
  246. package/dist/dataSilo-DrFetFXw.mjs.map +0 -1
  247. package/dist/dataSubject-y_aXI0pa.mjs +0 -92
  248. package/dist/dataSubject-y_aXI0pa.mjs.map +0 -1
  249. package/dist/downloadPrivacyRequestFiles-DlpgxqHF.mjs +0 -2
  250. package/dist/downloadPrivacyRequestFiles-DlpgxqHF.mjs.map +0 -1
  251. package/dist/extractErrorMessage-CPnTsT1S.mjs +0 -2
  252. package/dist/extractErrorMessage-CPnTsT1S.mjs.map +0 -1
  253. package/dist/fetchAllActions-BJsPdnxy.mjs +0 -832
  254. package/dist/fetchAllActions-BJsPdnxy.mjs.map +0 -1
  255. package/dist/fetchAllDataFlows-D248lO6_.mjs +0 -2
  256. package/dist/fetchAllDataFlows-D248lO6_.mjs.map +0 -1
  257. package/dist/fetchAllPreferenceTopics-ForE9GpZ.mjs +0 -36
  258. package/dist/fetchAllPreferenceTopics-ForE9GpZ.mjs.map +0 -1
  259. package/dist/fetchAllPurposes-ZdkO2fMp.mjs +0 -29
  260. package/dist/fetchAllPurposes-ZdkO2fMp.mjs.map +0 -1
  261. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs +0 -2
  262. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs.map +0 -1
  263. package/dist/fetchAllRequestEnrichers-CK-kk5eg.mjs +0 -42
  264. package/dist/fetchAllRequestEnrichers-CK-kk5eg.mjs.map +0 -1
  265. package/dist/fetchAllRequestIdentifiers-DrFFOt0m.mjs +0 -10
  266. package/dist/fetchAllRequestIdentifiers-DrFFOt0m.mjs.map +0 -1
  267. package/dist/fetchAllRequests-DNQQsY4s.mjs +0 -2
  268. package/dist/fetchAllRequests-DNQQsY4s.mjs.map +0 -1
  269. package/dist/fetchApiKeys-DjOr44xA.mjs +0 -33
  270. package/dist/fetchApiKeys-DjOr44xA.mjs.map +0 -1
  271. package/dist/fetchCatalogs-BM4FCbcS.mjs +0 -12
  272. package/dist/fetchCatalogs-BM4FCbcS.mjs.map +0 -1
  273. package/dist/fetchConsentManagerId-CFkg3-RS.mjs +0 -321
  274. package/dist/fetchConsentManagerId-CFkg3-RS.mjs.map +0 -1
  275. package/dist/fetchIdentifiers-pjQV4vUg.mjs +0 -54
  276. package/dist/fetchIdentifiers-pjQV4vUg.mjs.map +0 -1
  277. package/dist/fetchRequestDataSilo-P4yA7Lyc.mjs +0 -2
  278. package/dist/fetchRequestDataSilo-P4yA7Lyc.mjs.map +0 -1
  279. package/dist/fetchRequestFilesForRequest-BbxrEKFK.mjs +0 -33
  280. package/dist/fetchRequestFilesForRequest-BbxrEKFK.mjs.map +0 -1
  281. package/dist/generateCrossAccountApiKeys-Bxc_dzMG.mjs +0 -33
  282. package/dist/generateCrossAccountApiKeys-Bxc_dzMG.mjs.map +0 -1
  283. package/dist/impl-4ltdSmpl2.mjs +0 -4
  284. package/dist/impl-4ltdSmpl2.mjs.map +0 -1
  285. package/dist/impl-B19fH75P.mjs +0 -12
  286. package/dist/impl-B19fH75P.mjs.map +0 -1
  287. package/dist/impl-BBMjv5YQ.mjs +0 -2
  288. package/dist/impl-BBMjv5YQ.mjs.map +0 -1
  289. package/dist/impl-BKH3QRLi.mjs +0 -3
  290. package/dist/impl-BKH3QRLi.mjs.map +0 -1
  291. package/dist/impl-BOUm7wly2.mjs.map +0 -1
  292. package/dist/impl-BhTCp0kg.mjs +0 -2
  293. package/dist/impl-BhTCp0kg.mjs.map +0 -1
  294. package/dist/impl-BlHU1bbJ2.mjs +0 -2
  295. package/dist/impl-BlHU1bbJ2.mjs.map +0 -1
  296. package/dist/impl-BwjguKHC.mjs +0 -4
  297. package/dist/impl-BwjguKHC.mjs.map +0 -1
  298. package/dist/impl-C2o0eDzJ.mjs +0 -2
  299. package/dist/impl-C8HKnjw82.mjs +0 -2
  300. package/dist/impl-C8HKnjw82.mjs.map +0 -1
  301. package/dist/impl-CCUsnhoW2.mjs.map +0 -1
  302. package/dist/impl-CCc-wXqD.mjs +0 -2
  303. package/dist/impl-CCc-wXqD.mjs.map +0 -1
  304. package/dist/impl-CMmyv1cl.mjs +0 -2
  305. package/dist/impl-CNykdy3e2.mjs.map +0 -1
  306. package/dist/impl-CSChmq_t2.mjs.map +0 -1
  307. package/dist/impl-Ce9K4OCp.mjs +0 -2
  308. package/dist/impl-Ce9K4OCp.mjs.map +0 -1
  309. package/dist/impl-ChCqHkOc2.mjs +0 -2
  310. package/dist/impl-ChCqHkOc2.mjs.map +0 -1
  311. package/dist/impl-CqEwwWeD.mjs +0 -2
  312. package/dist/impl-CqEwwWeD.mjs.map +0 -1
  313. package/dist/impl-CqXFyvgV2.mjs.map +0 -1
  314. package/dist/impl-CxLSJk2P.mjs +0 -2
  315. package/dist/impl-CxLSJk2P.mjs.map +0 -1
  316. package/dist/impl-CzU9WTiW.mjs +0 -2
  317. package/dist/impl-CzU9WTiW.mjs.map +0 -1
  318. package/dist/impl-D9NjIwEi2.mjs.map +0 -1
  319. package/dist/impl-DEWXA_QC.mjs +0 -2
  320. package/dist/impl-DGiPB5Vq2.mjs.map +0 -1
  321. package/dist/impl-DTp9OQIZ.mjs +0 -7
  322. package/dist/impl-DTp9OQIZ.mjs.map +0 -1
  323. package/dist/impl-DhscnXSw.mjs +0 -2
  324. package/dist/impl-DhscnXSw.mjs.map +0 -1
  325. package/dist/impl-Dk7MdX-1.mjs +0 -2
  326. package/dist/impl-Dk7MdX-1.mjs.map +0 -1
  327. package/dist/impl-DsNPvet4.mjs +0 -2
  328. package/dist/impl-DsNPvet4.mjs.map +0 -1
  329. package/dist/impl-DxUFb0vv.mjs +0 -2
  330. package/dist/impl-DxUFb0vv.mjs.map +0 -1
  331. package/dist/impl-JThkrXiI2.mjs.map +0 -1
  332. package/dist/impl-KDuBh4bu2.mjs.map +0 -1
  333. package/dist/impl-c7rUQYDc2.mjs.map +0 -1
  334. package/dist/impl-oiBTZqQS2.mjs +0 -2
  335. package/dist/impl-oiBTZqQS2.mjs.map +0 -1
  336. package/dist/impl-tbGnvKFm.mjs +0 -2
  337. package/dist/makeGraphQLRequest-Cq26A_Lq.mjs +0 -2
  338. package/dist/makeGraphQLRequest-Cq26A_Lq.mjs.map +0 -1
  339. package/dist/markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs +0 -2
  340. package/dist/markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs.map +0 -1
  341. package/dist/markSilentPrivacyRequests-BKQUu6Ep.mjs +0 -2
  342. package/dist/markSilentPrivacyRequests-BKQUu6Ep.mjs.map +0 -1
  343. package/dist/mergeTranscendInputs-DGC4xUGu.mjs +0 -2
  344. package/dist/mergeTranscendInputs-DGC4xUGu.mjs.map +0 -1
  345. package/dist/notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs +0 -2
  346. package/dist/notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs.map +0 -1
  347. package/dist/package-C4J38oR1.mjs +0 -2
  348. package/dist/package-C4J38oR1.mjs.map +0 -1
  349. package/dist/parquetToCsvOneFile-DZVKXrjn.mjs +0 -6
  350. package/dist/parquetToCsvOneFile-DZVKXrjn.mjs.map +0 -1
  351. package/dist/parseAttributesFromString-CZStzJc0.mjs +0 -2
  352. package/dist/pullAllDatapoints-Cntwuzw7.mjs +0 -45
  353. package/dist/pullAllDatapoints-Cntwuzw7.mjs.map +0 -1
  354. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs +0 -2
  355. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs.map +0 -1
  356. package/dist/pullConsentManagerMetrics-FnhPEszu.mjs +0 -2
  357. package/dist/pullConsentManagerMetrics-FnhPEszu.mjs.map +0 -1
  358. package/dist/pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs +0 -2
  359. package/dist/pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs.map +0 -1
  360. package/dist/pullTranscendConfiguration-CqsgEf9A.mjs +0 -80
  361. package/dist/pullTranscendConfiguration-CqsgEf9A.mjs.map +0 -1
  362. package/dist/pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs.map +0 -1
  363. package/dist/pushCronIdentifiersFromCsv-D2saGR5i.mjs +0 -2
  364. package/dist/pushCronIdentifiersFromCsv-D2saGR5i.mjs.map +0 -1
  365. package/dist/pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs +0 -2
  366. package/dist/pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs.map +0 -1
  367. package/dist/removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs +0 -35
  368. package/dist/removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs.map +0 -1
  369. package/dist/retryRequestDataSilos-DnwXA1YZ.mjs +0 -2
  370. package/dist/retryRequestDataSilos-DnwXA1YZ.mjs.map +0 -1
  371. package/dist/skipPreflightJobs-jK5lNlmv.mjs +0 -2
  372. package/dist/skipPreflightJobs-jK5lNlmv.mjs.map +0 -1
  373. package/dist/skipRequestDataSilos-DQGroOos.mjs +0 -2
  374. package/dist/skipRequestDataSilos-DQGroOos.mjs.map +0 -1
  375. package/dist/splitCsvToList-BRq_CIfd.mjs +0 -2
  376. package/dist/splitCsvToList-BRq_CIfd.mjs.map +0 -1
  377. package/dist/streamPrivacyRequestsToCsv-BK07Bm-T.mjs +0 -2
  378. package/dist/streamPrivacyRequestsToCsv-BK07Bm-T.mjs.map +0 -1
  379. package/dist/syncCodePackages-F-97FNjo.mjs +0 -232
  380. package/dist/syncCodePackages-F-97FNjo.mjs.map +0 -1
  381. package/dist/syncCookies-BxY36BeJ.mjs +0 -2
  382. package/dist/syncCookies-BxY36BeJ.mjs.map +0 -1
  383. package/dist/syncDataFlows-Cx5LZCen.mjs +0 -2
  384. package/dist/syncDataFlows-Cx5LZCen.mjs.map +0 -1
  385. package/dist/syncTemplates-BrH7Yr0V.mjs +0 -23
  386. package/dist/syncTemplates-BrH7Yr0V.mjs.map +0 -1
  387. package/dist/time-Bl_c3W8U.mjs +0 -2
  388. package/dist/time-Bl_c3W8U.mjs.map +0 -1
  389. package/dist/types-B4CVJCpj.mjs +0 -2
  390. package/dist/types-B4CVJCpj.mjs.map +0 -1
  391. package/dist/updateConsentManagerVersionToLatest-C221vAAw.mjs +0 -2
  392. package/dist/updateConsentManagerVersionToLatest-C221vAAw.mjs.map +0 -1
  393. package/dist/uploadConsents-BbR7_sSt.mjs +0 -2
  394. package/dist/uploadConsents-BbR7_sSt.mjs.map +0 -1
  395. package/dist/uploadCookiesFromCsv-roHWekOP.mjs +0 -2
  396. package/dist/uploadCookiesFromCsv-roHWekOP.mjs.map +0 -1
  397. package/dist/uploadDataFlowsFromCsv-DcTbrsv2.mjs +0 -2
  398. package/dist/uploadDataFlowsFromCsv-DcTbrsv2.mjs.map +0 -1
  399. package/dist/uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs +0 -17
  400. package/dist/uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs.map +0 -1
  401. package/dist/uploadSiloDiscoveryResults-D2fK92WR.mjs +0 -20
  402. package/dist/uploadSiloDiscoveryResults-D2fK92WR.mjs.map +0 -1
  403. package/dist/withPreferenceRetry-xLMZyTq9.mjs +0 -2
  404. package/dist/withPreferenceRetry-xLMZyTq9.mjs.map +0 -1
@@ -1,5 +1,5 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";import n from"node:fs";import r from"node:path";import i from"colors";import a from"fast-glob";import{spawn as o}from"node:child_process";function s(e,t,r){return new Promise((i,a)=>{let o=n.createReadStream(e),s=Buffer.alloc(0),c=t.length,l=0;o.on(`data`,e=>{let n=typeof e==`string`?Buffer.from(e):e;if(r){let e=r-l;if(e<=0){o.destroy(),i(!1);return}n.length>e&&(n=n.subarray(0,e)),l+=n.length}let a=s.length?Buffer.concat([s,n]):n;if(a.toString(`utf8`).toLowerCase().includes(t.toString(`utf8`))){o.destroy(),i(!0);return}s=c>1?Buffer.from(a.subarray(Math.max(0,a.length-(c-1)))):Buffer.alloc(0)}),o.on(`error`,a),o.on(`close`,()=>i(!1)),o.on(`end`,()=>i(!1))})}async function c(e,t,n){let r=0,i=Array.from({length:Math.min(t,e.length)},async()=>{for(;;){let t=r;if(r+=1,t>=e.length)return;await n(e[t])}});await Promise.all(i)}function l(e,t){return new Promise((n,r)=>{let i=o(e,[`-noheader`,`-batch`,`-cmd`,t],{stdio:[`ignore`,`pipe`,`pipe`]}),a=``,s=``;i.stdout.on(`data`,e=>{a+=String(e)}),i.stderr.on(`data`,e=>{s+=String(e)}),i.on(`error`,r),i.on(`close`,e=>{e===0?n(a):r(Error(`duckdb exited ${e}: ${s}`))})})}async function u(e,t){return(await l(e,[`SELECT column_name`,`FROM parquet_schema('${t.replace(/'/g,`''`)}')`,`WHERE lower(column_type) LIKE '%varchar%'`,` OR lower(column_type) LIKE '%string%';`].join(`
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";import n from"node:fs";import r from"node:path";import i from"colors";import a from"fast-glob";import{spawn as o}from"node:child_process";function s(e,t,r){return new Promise((i,a)=>{let o=n.createReadStream(e),s=Buffer.alloc(0),c=t.length,l=0;o.on(`data`,e=>{let n=typeof e==`string`?Buffer.from(e):e;if(r){let e=r-l;if(e<=0){o.destroy(),i(!1);return}n.length>e&&(n=n.subarray(0,e)),l+=n.length}let a=s.length?Buffer.concat([s,n]):n;if(a.toString(`utf8`).toLowerCase().includes(t.toString(`utf8`))){o.destroy(),i(!0);return}s=c>1?Buffer.from(a.subarray(Math.max(0,a.length-(c-1)))):Buffer.alloc(0)}),o.on(`error`,a),o.on(`close`,()=>i(!1)),o.on(`end`,()=>i(!1))})}async function c(e,t,n){let r=0,i=Array.from({length:Math.min(t,e.length)},async()=>{for(;;){let t=r;if(r+=1,t>=e.length)return;await n(e[t])}});await Promise.all(i)}function l(e,t){return new Promise((n,r)=>{let i=o(e,[`-noheader`,`-batch`,`-cmd`,t],{stdio:[`ignore`,`pipe`,`pipe`]}),a=``,s=``;i.stdout.on(`data`,e=>{a+=String(e)}),i.stderr.on(`data`,e=>{s+=String(e)}),i.on(`error`,r),i.on(`close`,e=>{e===0?n(a):r(Error(`duckdb exited ${e}: ${s}`))})})}async function u(e,t){return(await l(e,[`SELECT column_name`,`FROM parquet_schema('${t.replace(/'/g,`''`)}')`,`WHERE lower(column_type) LIKE '%varchar%'`,` OR lower(column_type) LIKE '%string%';`].join(`
2
2
  `))).split(`
3
3
  `).map(e=>e.trim()).filter(Boolean)}async function d(e,t,n){let r=await u(e,t);if(r.length===0)return!1;let i=t.replace(/'/g,`''`),a=r.map(e=>`"${e.replace(/"/g,`""`)}" = '${n.replace(/'/g,`''`)}'`).join(` OR `);return(await l(e,[`SELECT 1 AS hit FROM read_parquet('${i}')`,`WHERE ${a}`,`LIMIT 1;`].join(`
4
4
  `))).trim().length>0}async function f(n){t(this.process.exit);let{needle:o,root:l,exts:u,noParquet:f,concurrency:p,maxBytes:m}=n,h=r.resolve(l),g=new Set(u.split(`,`).map(e=>e.trim().replace(/^\./,``).toLowerCase()).filter(Boolean)),_=Array.from(g).map(e=>`**/*.${e}`);e.info(i.green(`Searching for "${o}" in ${h} (exts: ${[...g].join(`, `)})`));let v=await a(_,{cwd:h,absolute:!0,onlyFiles:!0,followSymbolicLinks:!1,suppressErrors:!0}),y=Buffer.from(o.toLowerCase(),`utf8`),b=[];if(await c(v,p,async e=>{try{await s(e,y,m)&&(b.push(e),this.process.stdout.write(`${e}\n`))}catch{}}),!f){let t=await a([`**/*.parquet`],{cwd:h,absolute:!0,onlyFiles:!0,followSymbolicLinks:!1,suppressErrors:!0});t.length>0&&(e.info(i.green(`Scanning ${t.length} parquet file(s) via DuckDB...`)),await c(t,Math.max(2,Math.floor(p/4)),async e=>{try{await d(`duckdb`,e,o)&&(b.push(e),this.process.stdout.write(`${e}\n`))}catch{}}))}e.info(i.green(`Done. Found ${b.length} matching file(s).`))}export{f as findTextInFolder};
5
- //# sourceMappingURL=impl-CNez1OAw.mjs.map
5
+ //# sourceMappingURL=impl-BjCQSRLu.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CNez1OAw.mjs","names":["fg"],"sources":["../src/commands/admin/find-text-in-folder/impl.ts"],"sourcesContent":["import { spawn } from 'node:child_process';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nimport colors from 'colors';\nimport fg from 'fast-glob';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { logger } from '../../../logger.js';\n\n/** CLI flags accepted by the `find-text-in-folder` command. */\nexport type FindTextInFolderCommandFlags = {\n /** The text string to search for */\n needle: string;\n /** Root directory to search */\n root: string;\n /** Comma-separated file extensions */\n exts: string;\n /** Skip parquet file scanning */\n noParquet: boolean;\n /** Max concurrent file scans */\n concurrency: number;\n /** Stop scanning each file after N bytes */\n maxBytes?: number;\n};\n\n/**\n * Streams through a file checking if it contains the needle (case-insensitive).\n *\n * @param filePath - Absolute path to the file to scan\n * @param needle - Lowercased needle as a Buffer\n * @param maxBytes - Optional byte limit per file\n * @returns Whether the file contains the needle\n */\nexport function fileContainsExactBytes(\n filePath: string,\n needle: Buffer,\n maxBytes?: number,\n): Promise<boolean> {\n return new Promise<boolean>((resolve, reject) => {\n const stream = fs.createReadStream(filePath);\n let carry = Buffer.alloc(0);\n const n = needle.length;\n let seen = 0;\n\n stream.on('data', (raw) => {\n let chunk = typeof raw === 'string' ? Buffer.from(raw) : raw;\n\n if (maxBytes) {\n const remaining = maxBytes - seen;\n if (remaining <= 0) {\n stream.destroy();\n resolve(false);\n return;\n }\n if (chunk.length > remaining) {\n chunk = chunk.subarray(0, remaining);\n }\n seen += chunk.length;\n }\n\n const buf = carry.length ? Buffer.concat([carry, chunk]) : chunk;\n const haystack = buf.toString('utf8').toLowerCase();\n if (haystack.includes(needle.toString('utf8'))) {\n stream.destroy();\n resolve(true);\n return;\n }\n\n // Keep last n-1 bytes to catch boundary matches\n if (n > 1) {\n carry = Buffer.from(buf.subarray(Math.max(0, buf.length - (n - 1))));\n } else {\n carry = Buffer.alloc(0);\n }\n });\n\n stream.on('error', reject);\n stream.on('close', () => resolve(false));\n stream.on('end', () => resolve(false));\n });\n}\n\n/**\n * Run async workers over items with bounded concurrency.\n *\n * @param items - Array of items to process\n * @param limit - Maximum concurrent workers\n * @param worker - Async function to run per item\n * @returns Resolves when all items are processed\n */\nasync function runWithConcurrency<T>(\n items: T[],\n limit: number,\n worker: (item: T) => Promise<void>,\n): Promise<void> {\n let idx = 0;\n const runners = Array.from({ length: Math.min(limit, items.length) }, async () => {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const current = idx;\n idx += 1;\n if (current >= items.length) return;\n await worker(items[current]);\n }\n });\n await Promise.all(runners);\n}\n\n/**\n * Execute a DuckDB query and return stdout.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param sql - SQL query to execute\n * @returns The stdout output from duckdb\n */\nfunction duckdbQuery(duckdbPath: string, sql: string): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n const child = spawn(duckdbPath, ['-noheader', '-batch', '-cmd', sql], {\n stdio: ['ignore', 'pipe', 'pipe'],\n });\n\n let stdout = '';\n let stderr = '';\n child.stdout.on('data', (d) => {\n stdout += String(d);\n });\n child.stderr.on('data', (d) => {\n stderr += String(d);\n });\n\n child.on('error', reject);\n child.on('close', (code) => {\n if (code === 0) resolve(stdout);\n else reject(new Error(`duckdb exited ${code}: ${stderr}`));\n });\n });\n}\n\n/**\n * Get all VARCHAR/STRING column names from a parquet file.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param filePath - Absolute path to the parquet file\n * @returns Array of string column names\n */\nasync function duckdbGetParquetStringColumns(\n duckdbPath: string,\n filePath: string,\n): Promise<string[]> {\n const escaped = filePath.replace(/'/g, \"''\");\n const sql = [\n 'SELECT column_name',\n `FROM parquet_schema('${escaped}')`,\n \"WHERE lower(column_type) LIKE '%varchar%'\",\n \" OR lower(column_type) LIKE '%string%';\",\n ].join('\\n');\n\n const out = await duckdbQuery(duckdbPath, sql);\n return out\n .split('\\n')\n .map((l) => l.trim())\n .filter(Boolean);\n}\n\n/**\n * Check if any string column in a parquet file contains the needle value.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param filePath - Absolute path to the parquet file\n * @param needle - The string to search for (exact equality per column)\n * @returns Whether any row/column matches\n */\nasync function parquetFileHasExactString(\n duckdbPath: string,\n filePath: string,\n needle: string,\n): Promise<boolean> {\n const cols = await duckdbGetParquetStringColumns(duckdbPath, filePath);\n if (cols.length === 0) return false;\n\n const escaped = filePath.replace(/'/g, \"''\");\n const orChain = cols\n .map((c) => `\"${c.replace(/\"/g, '\"\"')}\" = '${needle.replace(/'/g, \"''\")}'`)\n .join(' OR ');\n\n const sql = [\n `SELECT 1 AS hit FROM read_parquet('${escaped}')`,\n `WHERE ${orChain}`,\n 'LIMIT 1;',\n ].join('\\n');\n\n const out = await duckdbQuery(duckdbPath, sql);\n return out.trim().length > 0;\n}\n\n/**\n * Entrypoint for the `admin find-text-in-folder` command.\n *\n * Searches a folder of files for a given text string. Useful for finding\n * a needle in a haystack across many large files (multi-GB CSVs, JSON\n * dumps, log archives). Files are streamed so memory stays flat.\n *\n * @param this - Bound CLI context\n * @param flags - CLI flags for the run\n */\nexport async function findTextInFolder(\n this: LocalContext,\n flags: FindTextInFolderCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { needle, root, exts, noParquet, concurrency, maxBytes } = flags;\n const rootAbs = path.resolve(root);\n\n const extSet = new Set(\n exts\n .split(',')\n .map((x) => x.trim().replace(/^\\./, '').toLowerCase())\n .filter(Boolean),\n );\n const patterns = Array.from(extSet).map((e) => `**/*.${e}`);\n\n logger.info(\n colors.green(`Searching for \"${needle}\" in ${rootAbs} (exts: ${[...extSet].join(', ')})`),\n );\n\n const normalFiles = await fg(patterns, {\n cwd: rootAbs,\n absolute: true,\n onlyFiles: true,\n followSymbolicLinks: false,\n suppressErrors: true,\n });\n\n const needleBuf = Buffer.from(needle.toLowerCase(), 'utf8');\n const hits: string[] = [];\n\n await runWithConcurrency(normalFiles, concurrency, async (file) => {\n try {\n const ok = await fileContainsExactBytes(file, needleBuf, maxBytes);\n if (ok) {\n hits.push(file);\n this.process.stdout.write(`${file}\\n`);\n }\n } catch {\n // ignore unreadable files\n }\n });\n\n if (!noParquet) {\n const parquetFiles = await fg(['**/*.parquet'], {\n cwd: rootAbs,\n absolute: true,\n onlyFiles: true,\n followSymbolicLinks: false,\n suppressErrors: true,\n });\n\n if (parquetFiles.length > 0) {\n logger.info(colors.green(`Scanning ${parquetFiles.length} parquet file(s) via DuckDB...`));\n\n await runWithConcurrency(\n parquetFiles,\n Math.max(2, Math.floor(concurrency / 4)),\n async (file) => {\n try {\n const ok = await parquetFileHasExactString('duckdb', file, needle);\n if (ok) {\n hits.push(file);\n this.process.stdout.write(`${file}\\n`);\n }\n } catch {\n // ignore parquet read issues\n }\n },\n );\n }\n }\n\n logger.info(colors.green(`Done. Found ${hits.length} matching file(s).`));\n}\n"],"mappings":"wOAmCA,SAAgB,EACd,EACA,EACA,EACkB,CAClB,OAAO,IAAI,SAAkB,EAAS,IAAW,CAC/C,IAAM,EAAS,EAAG,iBAAiB,EAAS,CACxC,EAAQ,OAAO,MAAM,EAAE,CACrB,EAAI,EAAO,OACb,EAAO,EAEX,EAAO,GAAG,OAAS,GAAQ,CACzB,IAAI,EAAQ,OAAO,GAAQ,SAAW,OAAO,KAAK,EAAI,CAAG,EAEzD,GAAI,EAAU,CACZ,IAAM,EAAY,EAAW,EAC7B,GAAI,GAAa,EAAG,CAClB,EAAO,SAAS,CAChB,EAAQ,GAAM,CACd,OAEE,EAAM,OAAS,IACjB,EAAQ,EAAM,SAAS,EAAG,EAAU,EAEtC,GAAQ,EAAM,OAGhB,IAAM,EAAM,EAAM,OAAS,OAAO,OAAO,CAAC,EAAO,EAAM,CAAC,CAAG,EAE3D,GADiB,EAAI,SAAS,OAAO,CAAC,aAAa,CACtC,SAAS,EAAO,SAAS,OAAO,CAAC,CAAE,CAC9C,EAAO,SAAS,CAChB,EAAQ,GAAK,CACb,OAIF,AAGE,EAHE,EAAI,EACE,OAAO,KAAK,EAAI,SAAS,KAAK,IAAI,EAAG,EAAI,QAAU,EAAI,GAAG,CAAC,CAAC,CAE5D,OAAO,MAAM,EAAE,EAEzB,CAEF,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,YAAe,EAAQ,GAAM,CAAC,CACxC,EAAO,GAAG,UAAa,EAAQ,GAAM,CAAC,EACtC,CAWJ,eAAe,EACb,EACA,EACA,EACe,CACf,IAAI,EAAM,EACJ,EAAU,MAAM,KAAK,CAAE,OAAQ,KAAK,IAAI,EAAO,EAAM,OAAO,CAAE,CAAE,SAAY,CAEhF,OAAa,CACX,IAAM,EAAU,EAEhB,GADA,GAAO,EACH,GAAW,EAAM,OAAQ,OAC7B,MAAM,EAAO,EAAM,GAAS,GAE9B,CACF,MAAM,QAAQ,IAAI,EAAQ,CAU5B,SAAS,EAAY,EAAoB,EAA8B,CACrE,OAAO,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,EAAQ,EAAM,EAAY,CAAC,YAAa,SAAU,OAAQ,EAAI,CAAE,CACpE,MAAO,CAAC,SAAU,OAAQ,OAAO,CAClC,CAAC,CAEE,EAAS,GACT,EAAS,GACb,EAAM,OAAO,GAAG,OAAS,GAAM,CAC7B,GAAU,OAAO,EAAE,EACnB,CACF,EAAM,OAAO,GAAG,OAAS,GAAM,CAC7B,GAAU,OAAO,EAAE,EACnB,CAEF,EAAM,GAAG,QAAS,EAAO,CACzB,EAAM,GAAG,QAAU,GAAS,CACtB,IAAS,EAAG,EAAQ,EAAO,CAC1B,EAAW,MAAM,iBAAiB,EAAK,IAAI,IAAS,CAAC,EAC1D,EACF,CAUJ,eAAe,EACb,EACA,EACmB,CAUnB,OADY,MAAM,EAAY,EAPlB,CACV,qBACA,wBAHc,EAAS,QAAQ,KAAM,KAAK,CAGV,IAChC,4CACA,4CACD,CAAC,KAAK;EAAK,CAEkC,EAE3C,MAAM;EAAK,CACX,IAAK,GAAM,EAAE,MAAM,CAAC,CACpB,OAAO,QAAQ,CAWpB,eAAe,EACb,EACA,EACA,EACkB,CAClB,IAAM,EAAO,MAAM,EAA8B,EAAY,EAAS,CACtE,GAAI,EAAK,SAAW,EAAG,MAAO,GAE9B,IAAM,EAAU,EAAS,QAAQ,KAAM,KAAK,CACtC,EAAU,EACb,IAAK,GAAM,IAAI,EAAE,QAAQ,KAAM,KAAK,CAAC,OAAO,EAAO,QAAQ,KAAM,KAAK,CAAC,GAAG,CAC1E,KAAK,OAAO,CASf,OADY,MAAM,EAAY,EANlB,CACV,sCAAsC,EAAQ,IAC9C,SAAS,IACT,WACD,CAAC,KAAK;EAAK,CAEkC,EACnC,MAAM,CAAC,OAAS,EAa7B,eAAsB,EAEpB,EACe,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,SAAQ,OAAM,OAAM,YAAW,cAAa,YAAa,EAC3D,EAAU,EAAK,QAAQ,EAAK,CAE5B,EAAS,IAAI,IACjB,EACG,MAAM,IAAI,CACV,IAAK,GAAM,EAAE,MAAM,CAAC,QAAQ,MAAO,GAAG,CAAC,aAAa,CAAC,CACrD,OAAO,QAAQ,CACnB,CACK,EAAW,MAAM,KAAK,EAAO,CAAC,IAAK,GAAM,QAAQ,IAAI,CAE3D,EAAO,KACL,EAAO,MAAM,kBAAkB,EAAO,OAAO,EAAQ,UAAU,CAAC,GAAG,EAAO,CAAC,KAAK,KAAK,CAAC,GAAG,CAC1F,CAED,IAAM,EAAc,MAAMA,EAAG,EAAU,CACrC,IAAK,EACL,SAAU,GACV,UAAW,GACX,oBAAqB,GACrB,eAAgB,GACjB,CAAC,CAEI,EAAY,OAAO,KAAK,EAAO,aAAa,CAAE,OAAO,CACrD,EAAiB,EAAE,CAczB,GAZA,MAAM,EAAmB,EAAa,EAAa,KAAO,IAAS,CACjE,GAAI,CACS,MAAM,EAAuB,EAAM,EAAW,EAAS,GAEhE,EAAK,KAAK,EAAK,CACf,KAAK,QAAQ,OAAO,MAAM,GAAG,EAAK,IAAI,OAElC,IAGR,CAEE,CAAC,EAAW,CACd,IAAM,EAAe,MAAMA,EAAG,CAAC,eAAe,CAAE,CAC9C,IAAK,EACL,SAAU,GACV,UAAW,GACX,oBAAqB,GACrB,eAAgB,GACjB,CAAC,CAEE,EAAa,OAAS,IACxB,EAAO,KAAK,EAAO,MAAM,YAAY,EAAa,OAAO,gCAAgC,CAAC,CAE1F,MAAM,EACJ,EACA,KAAK,IAAI,EAAG,KAAK,MAAM,EAAc,EAAE,CAAC,CACxC,KAAO,IAAS,CACd,GAAI,CACS,MAAM,EAA0B,SAAU,EAAM,EAAO,GAEhE,EAAK,KAAK,EAAK,CACf,KAAK,QAAQ,OAAO,MAAM,GAAG,EAAK,IAAI,OAElC,IAIX,EAIL,EAAO,KAAK,EAAO,MAAM,eAAe,EAAK,OAAO,oBAAoB,CAAC"}
1
+ {"version":3,"file":"impl-BjCQSRLu.mjs","names":["fg"],"sources":["../src/commands/admin/find-text-in-folder/impl.ts"],"sourcesContent":["import { spawn } from 'node:child_process';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nimport colors from 'colors';\nimport fg from 'fast-glob';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { logger } from '../../../logger.js';\n\n/** CLI flags accepted by the `find-text-in-folder` command. */\nexport type FindTextInFolderCommandFlags = {\n /** The text string to search for */\n needle: string;\n /** Root directory to search */\n root: string;\n /** Comma-separated file extensions */\n exts: string;\n /** Skip parquet file scanning */\n noParquet: boolean;\n /** Max concurrent file scans */\n concurrency: number;\n /** Stop scanning each file after N bytes */\n maxBytes?: number;\n};\n\n/**\n * Streams through a file checking if it contains the needle (case-insensitive).\n *\n * @param filePath - Absolute path to the file to scan\n * @param needle - Lowercased needle as a Buffer\n * @param maxBytes - Optional byte limit per file\n * @returns Whether the file contains the needle\n */\nexport function fileContainsExactBytes(\n filePath: string,\n needle: Buffer,\n maxBytes?: number,\n): Promise<boolean> {\n return new Promise<boolean>((resolve, reject) => {\n const stream = fs.createReadStream(filePath);\n let carry = Buffer.alloc(0);\n const n = needle.length;\n let seen = 0;\n\n stream.on('data', (raw) => {\n let chunk = typeof raw === 'string' ? Buffer.from(raw) : raw;\n\n if (maxBytes) {\n const remaining = maxBytes - seen;\n if (remaining <= 0) {\n stream.destroy();\n resolve(false);\n return;\n }\n if (chunk.length > remaining) {\n chunk = chunk.subarray(0, remaining);\n }\n seen += chunk.length;\n }\n\n const buf = carry.length ? Buffer.concat([carry, chunk]) : chunk;\n const haystack = buf.toString('utf8').toLowerCase();\n if (haystack.includes(needle.toString('utf8'))) {\n stream.destroy();\n resolve(true);\n return;\n }\n\n // Keep last n-1 bytes to catch boundary matches\n if (n > 1) {\n carry = Buffer.from(buf.subarray(Math.max(0, buf.length - (n - 1))));\n } else {\n carry = Buffer.alloc(0);\n }\n });\n\n stream.on('error', reject);\n stream.on('close', () => resolve(false));\n stream.on('end', () => resolve(false));\n });\n}\n\n/**\n * Run async workers over items with bounded concurrency.\n *\n * @param items - Array of items to process\n * @param limit - Maximum concurrent workers\n * @param worker - Async function to run per item\n * @returns Resolves when all items are processed\n */\nasync function runWithConcurrency<T>(\n items: T[],\n limit: number,\n worker: (item: T) => Promise<void>,\n): Promise<void> {\n let idx = 0;\n const runners = Array.from({ length: Math.min(limit, items.length) }, async () => {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const current = idx;\n idx += 1;\n if (current >= items.length) return;\n await worker(items[current]);\n }\n });\n await Promise.all(runners);\n}\n\n/**\n * Execute a DuckDB query and return stdout.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param sql - SQL query to execute\n * @returns The stdout output from duckdb\n */\nfunction duckdbQuery(duckdbPath: string, sql: string): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n const child = spawn(duckdbPath, ['-noheader', '-batch', '-cmd', sql], {\n stdio: ['ignore', 'pipe', 'pipe'],\n });\n\n let stdout = '';\n let stderr = '';\n child.stdout.on('data', (d) => {\n stdout += String(d);\n });\n child.stderr.on('data', (d) => {\n stderr += String(d);\n });\n\n child.on('error', reject);\n child.on('close', (code) => {\n if (code === 0) resolve(stdout);\n else reject(new Error(`duckdb exited ${code}: ${stderr}`));\n });\n });\n}\n\n/**\n * Get all VARCHAR/STRING column names from a parquet file.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param filePath - Absolute path to the parquet file\n * @returns Array of string column names\n */\nasync function duckdbGetParquetStringColumns(\n duckdbPath: string,\n filePath: string,\n): Promise<string[]> {\n const escaped = filePath.replace(/'/g, \"''\");\n const sql = [\n 'SELECT column_name',\n `FROM parquet_schema('${escaped}')`,\n \"WHERE lower(column_type) LIKE '%varchar%'\",\n \" OR lower(column_type) LIKE '%string%';\",\n ].join('\\n');\n\n const out = await duckdbQuery(duckdbPath, sql);\n return out\n .split('\\n')\n .map((l) => l.trim())\n .filter(Boolean);\n}\n\n/**\n * Check if any string column in a parquet file contains the needle value.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param filePath - Absolute path to the parquet file\n * @param needle - The string to search for (exact equality per column)\n * @returns Whether any row/column matches\n */\nasync function parquetFileHasExactString(\n duckdbPath: string,\n filePath: string,\n needle: string,\n): Promise<boolean> {\n const cols = await duckdbGetParquetStringColumns(duckdbPath, filePath);\n if (cols.length === 0) return false;\n\n const escaped = filePath.replace(/'/g, \"''\");\n const orChain = cols\n .map((c) => `\"${c.replace(/\"/g, '\"\"')}\" = '${needle.replace(/'/g, \"''\")}'`)\n .join(' OR ');\n\n const sql = [\n `SELECT 1 AS hit FROM read_parquet('${escaped}')`,\n `WHERE ${orChain}`,\n 'LIMIT 1;',\n ].join('\\n');\n\n const out = await duckdbQuery(duckdbPath, sql);\n return out.trim().length > 0;\n}\n\n/**\n * Entrypoint for the `admin find-text-in-folder` command.\n *\n * Searches a folder of files for a given text string. Useful for finding\n * a needle in a haystack across many large files (multi-GB CSVs, JSON\n * dumps, log archives). Files are streamed so memory stays flat.\n *\n * @param this - Bound CLI context\n * @param flags - CLI flags for the run\n */\nexport async function findTextInFolder(\n this: LocalContext,\n flags: FindTextInFolderCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { needle, root, exts, noParquet, concurrency, maxBytes } = flags;\n const rootAbs = path.resolve(root);\n\n const extSet = new Set(\n exts\n .split(',')\n .map((x) => x.trim().replace(/^\\./, '').toLowerCase())\n .filter(Boolean),\n );\n const patterns = Array.from(extSet).map((e) => `**/*.${e}`);\n\n logger.info(\n colors.green(`Searching for \"${needle}\" in ${rootAbs} (exts: ${[...extSet].join(', ')})`),\n );\n\n const normalFiles = await fg(patterns, {\n cwd: rootAbs,\n absolute: true,\n onlyFiles: true,\n followSymbolicLinks: false,\n suppressErrors: true,\n });\n\n const needleBuf = Buffer.from(needle.toLowerCase(), 'utf8');\n const hits: string[] = [];\n\n await runWithConcurrency(normalFiles, concurrency, async (file) => {\n try {\n const ok = await fileContainsExactBytes(file, needleBuf, maxBytes);\n if (ok) {\n hits.push(file);\n this.process.stdout.write(`${file}\\n`);\n }\n } catch {\n // ignore unreadable files\n }\n });\n\n if (!noParquet) {\n const parquetFiles = await fg(['**/*.parquet'], {\n cwd: rootAbs,\n absolute: true,\n onlyFiles: true,\n followSymbolicLinks: false,\n suppressErrors: true,\n });\n\n if (parquetFiles.length > 0) {\n logger.info(colors.green(`Scanning ${parquetFiles.length} parquet file(s) via DuckDB...`));\n\n await runWithConcurrency(\n parquetFiles,\n Math.max(2, Math.floor(concurrency / 4)),\n async (file) => {\n try {\n const ok = await parquetFileHasExactString('duckdb', file, needle);\n if (ok) {\n hits.push(file);\n this.process.stdout.write(`${file}\\n`);\n }\n } catch {\n // ignore parquet read issues\n }\n },\n );\n }\n }\n\n logger.info(colors.green(`Done. Found ${hits.length} matching file(s).`));\n}\n"],"mappings":"wOAmCA,SAAgB,EACd,EACA,EACA,EACkB,CAClB,OAAO,IAAI,SAAkB,EAAS,IAAW,CAC/C,IAAM,EAAS,EAAG,iBAAiB,EAAS,CACxC,EAAQ,OAAO,MAAM,EAAE,CACrB,EAAI,EAAO,OACb,EAAO,EAEX,EAAO,GAAG,OAAS,GAAQ,CACzB,IAAI,EAAQ,OAAO,GAAQ,SAAW,OAAO,KAAK,EAAI,CAAG,EAEzD,GAAI,EAAU,CACZ,IAAM,EAAY,EAAW,EAC7B,GAAI,GAAa,EAAG,CAClB,EAAO,SAAS,CAChB,EAAQ,GAAM,CACd,OAEE,EAAM,OAAS,IACjB,EAAQ,EAAM,SAAS,EAAG,EAAU,EAEtC,GAAQ,EAAM,OAGhB,IAAM,EAAM,EAAM,OAAS,OAAO,OAAO,CAAC,EAAO,EAAM,CAAC,CAAG,EAE3D,GADiB,EAAI,SAAS,OAAO,CAAC,aAAa,CACtC,SAAS,EAAO,SAAS,OAAO,CAAC,CAAE,CAC9C,EAAO,SAAS,CAChB,EAAQ,GAAK,CACb,OAIF,AAGE,EAHE,EAAI,EACE,OAAO,KAAK,EAAI,SAAS,KAAK,IAAI,EAAG,EAAI,QAAU,EAAI,GAAG,CAAC,CAAC,CAE5D,OAAO,MAAM,EAAE,EAEzB,CAEF,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,YAAe,EAAQ,GAAM,CAAC,CACxC,EAAO,GAAG,UAAa,EAAQ,GAAM,CAAC,EACtC,CAWJ,eAAe,EACb,EACA,EACA,EACe,CACf,IAAI,EAAM,EACJ,EAAU,MAAM,KAAK,CAAE,OAAQ,KAAK,IAAI,EAAO,EAAM,OAAO,CAAE,CAAE,SAAY,CAEhF,OAAa,CACX,IAAM,EAAU,EAEhB,GADA,GAAO,EACH,GAAW,EAAM,OAAQ,OAC7B,MAAM,EAAO,EAAM,GAAS,GAE9B,CACF,MAAM,QAAQ,IAAI,EAAQ,CAU5B,SAAS,EAAY,EAAoB,EAA8B,CACrE,OAAO,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,EAAQ,EAAM,EAAY,CAAC,YAAa,SAAU,OAAQ,EAAI,CAAE,CACpE,MAAO,CAAC,SAAU,OAAQ,OAAO,CAClC,CAAC,CAEE,EAAS,GACT,EAAS,GACb,EAAM,OAAO,GAAG,OAAS,GAAM,CAC7B,GAAU,OAAO,EAAE,EACnB,CACF,EAAM,OAAO,GAAG,OAAS,GAAM,CAC7B,GAAU,OAAO,EAAE,EACnB,CAEF,EAAM,GAAG,QAAS,EAAO,CACzB,EAAM,GAAG,QAAU,GAAS,CACtB,IAAS,EAAG,EAAQ,EAAO,CAC1B,EAAW,MAAM,iBAAiB,EAAK,IAAI,IAAS,CAAC,EAC1D,EACF,CAUJ,eAAe,EACb,EACA,EACmB,CAUnB,OADY,MAAM,EAAY,EAPlB,CACV,qBACA,wBAHc,EAAS,QAAQ,KAAM,KAAK,CAGV,IAChC,4CACA,4CACD,CAAC,KAAK;EAAK,CAEkC,EAE3C,MAAM;EAAK,CACX,IAAK,GAAM,EAAE,MAAM,CAAC,CACpB,OAAO,QAAQ,CAWpB,eAAe,EACb,EACA,EACA,EACkB,CAClB,IAAM,EAAO,MAAM,EAA8B,EAAY,EAAS,CACtE,GAAI,EAAK,SAAW,EAAG,MAAO,GAE9B,IAAM,EAAU,EAAS,QAAQ,KAAM,KAAK,CACtC,EAAU,EACb,IAAK,GAAM,IAAI,EAAE,QAAQ,KAAM,KAAK,CAAC,OAAO,EAAO,QAAQ,KAAM,KAAK,CAAC,GAAG,CAC1E,KAAK,OAAO,CASf,OADY,MAAM,EAAY,EANlB,CACV,sCAAsC,EAAQ,IAC9C,SAAS,IACT,WACD,CAAC,KAAK;EAAK,CAEkC,EACnC,MAAM,CAAC,OAAS,EAa7B,eAAsB,EAEpB,EACe,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,SAAQ,OAAM,OAAM,YAAW,cAAa,YAAa,EAC3D,EAAU,EAAK,QAAQ,EAAK,CAE5B,EAAS,IAAI,IACjB,EACG,MAAM,IAAI,CACV,IAAK,GAAM,EAAE,MAAM,CAAC,QAAQ,MAAO,GAAG,CAAC,aAAa,CAAC,CACrD,OAAO,QAAQ,CACnB,CACK,EAAW,MAAM,KAAK,EAAO,CAAC,IAAK,GAAM,QAAQ,IAAI,CAE3D,EAAO,KACL,EAAO,MAAM,kBAAkB,EAAO,OAAO,EAAQ,UAAU,CAAC,GAAG,EAAO,CAAC,KAAK,KAAK,CAAC,GAAG,CAC1F,CAED,IAAM,EAAc,MAAMA,EAAG,EAAU,CACrC,IAAK,EACL,SAAU,GACV,UAAW,GACX,oBAAqB,GACrB,eAAgB,GACjB,CAAC,CAEI,EAAY,OAAO,KAAK,EAAO,aAAa,CAAE,OAAO,CACrD,EAAiB,EAAE,CAczB,GAZA,MAAM,EAAmB,EAAa,EAAa,KAAO,IAAS,CACjE,GAAI,CACS,MAAM,EAAuB,EAAM,EAAW,EAAS,GAEhE,EAAK,KAAK,EAAK,CACf,KAAK,QAAQ,OAAO,MAAM,GAAG,EAAK,IAAI,OAElC,IAGR,CAEE,CAAC,EAAW,CACd,IAAM,EAAe,MAAMA,EAAG,CAAC,eAAe,CAAE,CAC9C,IAAK,EACL,SAAU,GACV,UAAW,GACX,oBAAqB,GACrB,eAAgB,GACjB,CAAC,CAEE,EAAa,OAAS,IACxB,EAAO,KAAK,EAAO,MAAM,YAAY,EAAa,OAAO,gCAAgC,CAAC,CAE1F,MAAM,EACJ,EACA,KAAK,IAAI,EAAG,KAAK,MAAM,EAAc,EAAE,CAAC,CACxC,KAAO,IAAS,CACd,GAAI,CACS,MAAM,EAA0B,SAAU,EAAM,EAAO,GAEhE,EAAK,KAAK,EAAK,CACf,KAAK,QAAQ,OAAO,MAAM,GAAG,EAAK,IAAI,OAElC,IAIX,EAIL,EAAO,KAAK,EAAO,MAAM,eAAe,EAAK,OAAO,oBAAoB,CAAC"}
@@ -0,0 +1,4 @@
1
+ import{n as e,r as t,t as n}from"./enums-CyFTrzXY.mjs";import{t as r}from"./logger-Bj782ZYD.mjs";import{t as i}from"./done-input-validation-BcNBxhEs.mjs";import{OneTrustEnrichedAssessment as a,OneTrustGetAssessmentResponse as o,OneTrustGetListOfAssessmentsResponse as s,OneTrustGetRiskResponse as c,OneTrustGetUserResponse as l}from"@transcend-io/privacy-types";import{keyBy as u,uniq as d}from"lodash-es";import{decodeCodec as f}from"@transcend-io/type-utils";import p,{createReadStream as m}from"node:fs";import h from"colors";import{IMPORT_ONE_TRUST_ASSESSMENT_FORMS as g,buildTranscendGraphQLClient as _,makeGraphQLRequest as v}from"@transcend-io/sdk";import{map as y,mapSeries as b}from"@transcend-io/utils";import x from"JSONStream";import S from"got";const C=({assessment:e,index:t,total:n,wrap:r=!0})=>{let i=``;(t===0||r)&&(i=`[
2
+ `);let a=JSON.stringify(e),o=n&&t<n-1&&!r?`,`:``;return i=`${i+a+o}\n`,(n&&t===n-1||r)&&(i+=`
3
+ ]`),i},w=({file:e,assessment:t,index:n,total:i})=>{r.info(h.magenta(`Writing enriched assessment ${n+1} of ${i} to file "${e}"...`)),n===0?p.writeFileSync(e,C({assessment:t,index:n,total:i,wrap:!1})):p.appendFileSync(e,C({assessment:t,index:n,total:i,wrap:!1}))},T=async({oneTrust:e})=>{let t=0,n=1,i=0,a=[];for(;t<n;){let{body:o}=await e.get(`api/assessment/v2/assessments?page=${t}&size=2000`),{page:c,content:l}=f(s,o);a.push(...l??[]),t===0&&(n=c?.totalPages??0,i=c?.totalElements??0),t+=1,r.info(`Fetched ${a.length} of ${i} assessments.`)}return a},E=async({oneTrust:e,assessmentId:t})=>{let{body:n}=await e.get(`api/assessment/v2/assessments/${t}/export?ExcludeSkippedQuestions=false`);return f(o,n)},D=async({oneTrust:e,riskId:t})=>{let{body:n}=await e.get(`api/risk/v2/risks/${t}`);return f(c,n)},O=async({oneTrust:e,userId:t})=>{let{body:n}=await e.get(`api/scim/v2/Users/${t}`);return f(l,n)},k=({assessment:e,assessmentDetails:t,riskDetails:n,creatorDetails:r,approversDetails:i,respondentsDetails:a})=>{let o=u(n,`id`),{sections:s,createdBy:c,...l}=t,d=s.map(e=>{let{questions:t,...n}=e,r=t.map(e=>{let{risks:t,...n}=e,r=(t??[]).map(e=>{let t=o[e.riskId];return{...e,...t,level:e.level,impactLevel:e.impactLevel??0}});return{...n,risks:r}});return{...n,questions:r}}),f={...c,active:r?.active??!1,userType:r?.userType??`Internal`,emails:r?.emails??[],title:r?.title??null,givenName:r?.name.givenName??null,familyName:r?.name.familyName??null},p=u(i,`id`),m=t.approvers.flatMap(e=>p[e.id]?[{...e,approver:{...e.approver,active:p[e.id].active,userType:p[e.id].userType,emails:p[e.id].emails,title:p[e.id].title,givenName:p[e.id].name.givenName??null,familyName:p[e.id].name.familyName??null}}]:[]),h=u(a,`id`),g=t.respondents.filter(e=>!e.name.includes(`@`)).flatMap(e=>h[e.id]?[{...e,active:h[e.id].active,userType:h[e.id].userType,emails:h[e.id].emails,title:h[e.id].title,givenName:h[e.id].name.givenName??null,familyName:h[e.id].name.familyName??null}]:[]);return{...e,...l,approvers:m,respondents:g,createdBy:f,sections:d}},A=async({transcend:e,assessment:t,total:n,index:i})=>{r.info(h.magenta(`Writing enriched assessment ${i+1} ${n?`of ${n} `:` `}to Transcend...`));let a={json:C({assessment:t,index:i,total:n})};try{await v(e,g,{variables:{input:a},logger:r})}catch(e){r.error(h.red(`Failed to sync assessment ${i+1} ${n?`of ${n} `:` `}to Transcend.\n\tAssessment Title: ${t.name}. Template Title: ${t.template.name}\n`),e)}},j=async({oneTrust:e,file:t,dryRun:n,transcend:i})=>{r.info(`Getting list of all assessments from OneTrust...`);let a=await T({oneTrust:e}),o={};await b(Array.from({length:Math.ceil(a.length/5)},(e,t)=>a.slice(t*5,(t+1)*5)),async(s,c)=>{let l=[];await y(s,async(t,n)=>{let i=5*c+n+1;r.info(`[assessment ${i} of ${a.length}]: fetching details...`);let{templateName:s,assessmentId:u}=t,f=await E({oneTrust:e,assessmentId:u}),p=f.createdBy.id,m=o[p];if(!m){r.info(`[assessment ${i} of ${a.length}]: fetching creator...`);try{m=await O({oneTrust:e,userId:p}),o[p]=m}catch(e){r.warn(h.yellow(`[assessment ${i} of ${a.length}]: failed to fetch form creator.\tcreatorId: ${p}. Assessment Title: ${t.name}. Template Title: ${s}`),e)}}let{approvers:g}=f,_=[];g.length>0&&(r.info(`[assessment ${i} of ${a.length}]: fetching approvers...`),_=await y(g.map(({id:e})=>e),async n=>{try{let t=o[n];return t||(t=await O({oneTrust:e,userId:n}),o[n]=t),[t]}catch(e){return r.warn(h.yellow(`[assessment ${i} of ${a.length}]: failed to fetch a form approver.\tapproverId: ${n}. Assessment Title: ${t.name}. Template Title: ${s}`),e),[]}},{concurrency:5}));let{respondents:v}=f,b=v.filter(e=>!e.name.includes(`@`)),x=[];b.length>0&&(r.info(`[assessment ${i} of ${a.length}]: fetching respondents...`),x=await y(b.map(({id:e})=>e),async n=>{try{let t=o[n];return t||(t=await O({oneTrust:e,userId:n}),o[n]=t),[t]}catch(e){return r.warn(h.yellow(`[assessment ${i} of ${a.length}]: failed to fetch a respondent.\trespondentId: ${n}. Assessment Title: ${t.name}. Template Title: ${s}`),e),[]}},{concurrency:5}));let S=[],C=d(f.sections.flatMap(e=>e.questions.flatMap(e=>(e.risks??[]).flatMap(e=>e.riskId))));C.length>0&&(r.info(`[assessment ${i} of ${a.length}]: fetching risks...`),S=await y(C,t=>D({oneTrust:e,riskId:t}),{concurrency:5}));let w=k({assessment:t,assessmentDetails:f,riskDetails:S,creatorDetails:m,approversDetails:_.flat(),respondentsDetails:x.flat()});l.push(w)},{concurrency:5}),await b(l,async(e,r)=>{let o=c*5+r;n&&t?w({assessment:e,index:o,total:a.length,file:t}):i&&await A({assessment:e,transcend:i,total:a.length,index:o})})})},M=({transcend:e,file:t})=>(r.info(`Getting list of all assessments from file ${t}...`),new Promise((n,i)=>{let o=m(t,{encoding:`utf-8`,highWaterMark:64*1024}),s=x.parse(`*`),c=0;o.pipe(s),s.on(`data`,async n=>{try{s.pause(),await A({assessment:f(a,n),transcend:e,index:c}),c+=1,s.resume()}catch(e){r.error(h.red(`Failed to parse the assessment ${c} from file '${t}': ${e.message}.`))}}),s.on(`end`,()=>{r.info(`Finished processing ${c} assessments from file ${t}`),n()}),s.on(`error`,e=>{r.error(h.red(`Error parsing file '${t}': ${e.message}`)),i(e)}),o.on(`error`,e=>{r.error(h.red(`Error reading file '${t}': ${e.message}`)),i(e)})})),N=({hostname:e,auth:t})=>S.extend({prefixUrl:`https://${e}`,headers:{accept:`application/json`,"content-type":`application/json`,authorization:`Bearer ${t}`}});async function P({hostname:a,oneTrustAuth:o,source:s,transcendAuth:c,transcendUrl:l,resource:u,file:d,dryRun:f,debug:p}){if(!f&&!c)throw Error('Must specify a "transcendAuth" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}');if(f&&!d)throw Error(`Must set a "file" parameter when "dryRun" is "true". e.g. --file=./oneTrustAssessments.json`);if(d){let e=d.split(`.`);if(e.length<2)throw Error(`The "file" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.`);if(e.at(-1)!==n.Json)throw Error(`Expected the format of the "file" parameters '${d}' to be '${n.Json}', but got '${e.at(-1)}'.`)}if(s===t.OneTrust){if(!a)throw Error(`Missing required parameter "hostname". e.g. --hostname=customer.my.onetrust.com`);if(!o)throw Error(`Missing required parameter "oneTrustAuth". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN`)}else{if(!d)throw Error(`Must specify a "file" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json`);if(f)throw Error(`Cannot read and write to a file simultaneously. Emit the "source" parameter or set it to ${t.OneTrust} if "dryRun" is enabled.`)}i(this.process.exit);let m=a&&o?N({hostname:a,auth:o}):void 0,g=l&&c?_(l,c):void 0;try{u===e.Assessments&&(s===t.OneTrust&&m?await j({oneTrust:m,file:d,dryRun:f,...g&&{transcend:g}}):s===t.File&&d&&g&&await M({file:d,transcend:g}))}catch(e){throw Error(`An error occurred syncing the resource ${u} from OneTrust: ${p?e.stack:e.message}`)}r.info(h.green(`Successfully synced OneTrust ${u} to ${f?`disk at "${d}"`:`Transcend`}!`))}export{P as syncOt};
4
+ //# sourceMappingURL=impl-BjIylEKQ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-BjIylEKQ.mjs","names":[],"sources":["../src/lib/oneTrust/helpers/oneTrustAssessmentToJson.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToDisk.ts","../src/lib/oneTrust/endpoints/getListOfOneTrustAssessments.ts","../src/lib/oneTrust/endpoints/getOneTrustAssessment.ts","../src/lib/oneTrust/endpoints/getOneTrustRisk.ts","../src/lib/oneTrust/endpoints/getOneTrustUser.ts","../src/lib/oneTrust/helpers/enrichOneTrustAssessment.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToTranscend.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromOneTrust.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromFile.ts","../src/lib/oneTrust/createOneTrustGotInstance.ts","../src/commands/migration/sync-ot/impl.ts"],"sourcesContent":["import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Converts the assessment into a json entry.\n *\n * @param param - information about the assessment and amount of entries\n * @returns a stringified json entry ready to be appended to a file\n */\nexport const oneTrustAssessmentToJson = ({\n assessment,\n index,\n total,\n wrap = true,\n}: {\n /** The assessment to convert */\n assessment: OneTrustEnrichedAssessment;\n /** The position of the assessment in the final Json object */\n index: number;\n /** The total amount of the assessments in the final Json object */\n total?: number;\n /** Whether to wrap every entry in brackets */\n wrap?: boolean;\n}): string => {\n let jsonEntry = '';\n // start with an opening bracket\n if (index === 0 || wrap) {\n jsonEntry = '[\\n';\n }\n\n const stringifiedAssessment = JSON.stringify(assessment);\n\n // Add comma for all items except the last one\n const comma = total && index < total - 1 && !wrap ? ',' : '';\n\n // write to file\n jsonEntry = `${jsonEntry + stringifiedAssessment + comma}\\n`;\n\n // end with closing bracket\n if ((total && index === total - 1) || wrap) {\n jsonEntry += '\\n]';\n }\n\n return jsonEntry;\n};\n","import fs from 'node:fs';\n\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport colors from 'colors';\n\nimport { logger } from '../../../logger.js';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson.js';\n\n/**\n * Write the assessment to disk at the specified file path.\n *\n *\n * @param param - information about the assessment to write\n */\nexport const syncOneTrustAssessmentToDisk = ({\n file,\n assessment,\n index,\n total,\n}: {\n /** The file path to write the assessment to */\n file: string;\n /** The basic assessment */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total: number;\n}): void => {\n logger.info(\n colors.magenta(`Writing enriched assessment ${index + 1} of ${total} to file \"${file}\"...`),\n );\n\n if (index === 0) {\n fs.writeFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n } else {\n fs.appendFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n }\n};\n","import {\n OneTrustAssessment,\n OneTrustGetListOfAssessmentsResponse,\n} from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { Got } from 'got';\n\nimport { logger } from '../../../logger.js';\n\n/**\n * Fetch a list of all assessments from the OneTrust client.\n * ref: https://developer.onetrust.com/onetrust/reference/getallassessmentbasicdetailsusingget\n *\n * @param param - the information about the OneTrust client\n * @returns a list of OneTrustAssessment\n */\nexport const getListOfOneTrustAssessments = async ({\n oneTrust,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n}): Promise<OneTrustAssessment[]> => {\n let currentPage = 0;\n let totalPages = 1;\n let totalElements = 0;\n\n const allAssessments: OneTrustAssessment[] = [];\n\n while (currentPage < totalPages) {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments?page=${currentPage}&size=2000`,\n );\n\n const { page, content } = decodeCodec(OneTrustGetListOfAssessmentsResponse, body);\n allAssessments.push(...(content ?? []));\n if (currentPage === 0) {\n totalPages = page?.totalPages ?? 0;\n totalElements = page?.totalElements ?? 0;\n }\n currentPage += 1;\n\n // log progress\n logger.info(`Fetched ${allAssessments.length} of ${totalElements} assessments.`);\n }\n\n return allAssessments;\n};\n","import { OneTrustGetAssessmentResponse } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { Got } from 'got';\n\n/**\n * Retrieve details about a particular assessment.\n * ref: https://developer.onetrust.com/onetrust/reference/exportassessmentusingget\n *\n * @param param - the information about the OneTrust client and assessment to retrieve\n * @returns details about the assessment\n */\nexport const getOneTrustAssessment = async ({\n oneTrust,\n assessmentId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the assessment to retrieve */\n assessmentId: string;\n}): Promise<OneTrustGetAssessmentResponse> => {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments/${assessmentId}/export?ExcludeSkippedQuestions=false`,\n );\n\n return decodeCodec(OneTrustGetAssessmentResponse, body);\n};\n","import { OneTrustGetRiskResponse } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { Got } from 'got';\n\n/**\n * Retrieve details about a particular risk.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustRisk = async ({\n oneTrust,\n riskId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust risk to retrieve */\n riskId: string;\n}): Promise<OneTrustGetRiskResponse> => {\n const { body } = await oneTrust.get(`api/risk/v2/risks/${riskId}`);\n\n return decodeCodec(OneTrustGetRiskResponse, body);\n};\n","import { OneTrustGetUserResponse } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { Got } from 'got';\n\n/**\n * Retrieve details about a particular user.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustUser = async ({\n oneTrust,\n userId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust user to retrieve */\n userId: string;\n}): Promise<OneTrustGetUserResponse> => {\n const { body } = await oneTrust.get(`api/scim/v2/Users/${userId}`);\n\n return decodeCodec(OneTrustGetUserResponse, body);\n};\n","import {\n OneTrustAssessment,\n OneTrustEnrichedAssessment,\n OneTrustGetAssessmentResponse,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { keyBy } from 'lodash-es';\n\n/**\n * Merge the assessment, assessmentDetails, and riskDetails into one object.\n *\n * @param param - the assessment and risk information\n * @returns the assessment enriched with details and risk information\n */\nexport const enrichOneTrustAssessment = ({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails,\n approversDetails,\n respondentsDetails,\n}: {\n /** The OneTrust risk details */\n riskDetails: OneTrustGetRiskResponse[];\n /** The OneTrust assessment as returned from Get List of Assessments endpoint */\n assessment: OneTrustAssessment;\n /** The OneTrust assessment details */\n assessmentDetails: OneTrustGetAssessmentResponse;\n /** The OneTrust assessment creator details */\n creatorDetails: OneTrustGetUserResponse;\n /** The OneTrust assessment approvers details */\n approversDetails: OneTrustGetUserResponse[];\n /** The OneTrust assessment internal respondents details */\n respondentsDetails: OneTrustGetUserResponse[];\n}): OneTrustEnrichedAssessment => {\n const riskDetailsById = keyBy(riskDetails, 'id');\n const { sections, createdBy, ...restAssessmentDetails } = assessmentDetails;\n const sectionsWithEnrichedRisk = sections.map((section) => {\n const { questions, ...restSection } = section;\n const enrichedQuestions = questions.map((question) => {\n const { risks, ...restQuestion } = question;\n const enrichedRisks = (risks ?? []).map((risk) => {\n const details = riskDetailsById[risk.riskId];\n return {\n ...risk,\n ...details,\n level: risk.level,\n impactLevel: risk.impactLevel ?? 0,\n };\n });\n return {\n ...restQuestion,\n risks: enrichedRisks,\n };\n });\n return {\n ...restSection,\n questions: enrichedQuestions,\n };\n });\n\n // grab creator details\n const enrichedCreatedBy = {\n ...createdBy,\n active: creatorDetails?.active ?? false,\n userType: creatorDetails?.userType ?? 'Internal',\n emails: creatorDetails?.emails ?? [],\n title: creatorDetails?.title ?? null,\n givenName: creatorDetails?.name.givenName ?? null,\n familyName: creatorDetails?.name.familyName ?? null,\n };\n\n // grab approvers details\n const approverDetailsById = keyBy(approversDetails, 'id');\n const enrichedApprovers = assessmentDetails.approvers.flatMap((originalApprover) =>\n approverDetailsById[originalApprover.id]\n ? [\n {\n ...originalApprover,\n approver: {\n ...originalApprover.approver,\n active: approverDetailsById[originalApprover.id].active,\n userType: approverDetailsById[originalApprover.id].userType,\n emails: approverDetailsById[originalApprover.id].emails,\n title: approverDetailsById[originalApprover.id].title,\n givenName: approverDetailsById[originalApprover.id].name.givenName ?? null,\n familyName: approverDetailsById[originalApprover.id].name.familyName ?? null,\n },\n },\n ]\n : [],\n );\n\n // grab respondents details\n const respondentsDetailsById = keyBy(respondentsDetails, 'id');\n const enrichedRespondents = assessmentDetails.respondents\n .filter((r) => !r.name.includes('@')) // search only internal respondents\n .flatMap((respondent) =>\n respondentsDetailsById[respondent.id]\n ? [\n {\n ...respondent,\n active: respondentsDetailsById[respondent.id].active,\n userType: respondentsDetailsById[respondent.id].userType,\n emails: respondentsDetailsById[respondent.id].emails,\n title: respondentsDetailsById[respondent.id].title,\n givenName: respondentsDetailsById[respondent.id].name.givenName ?? null,\n familyName: respondentsDetailsById[respondent.id].name.familyName ?? null,\n },\n ]\n : [],\n );\n\n // combine everything into a single enriched assessment\n return {\n ...assessment,\n ...restAssessmentDetails,\n approvers: enrichedApprovers,\n respondents: enrichedRespondents,\n createdBy: enrichedCreatedBy,\n sections: sectionsWithEnrichedRisk,\n };\n};\n","import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { makeGraphQLRequest, IMPORT_ONE_TRUST_ASSESSMENT_FORMS } from '@transcend-io/sdk';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\n\nimport { ImportOnetrustAssessmentsInput } from '../../../codecs.js';\nimport { logger } from '../../../logger.js';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson.js';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Write the assessment to a Transcend instance.\n *\n *\n * @param param - information about the assessment and Transcend instance to write to\n */\nexport const syncOneTrustAssessmentToTranscend = async ({\n transcend,\n assessment,\n total,\n index,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** the assessment to sync to Transcend */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total?: number;\n}): Promise<void> => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${index + 1} ${total ? `of ${total} ` : ' '}to Transcend...`,\n ),\n );\n\n // convert the OneTrust assessment object into a json record\n const json = oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n });\n\n // transform the json record into a valid input to the mutation\n const input: ImportOnetrustAssessmentsInput = {\n json,\n };\n\n try {\n await makeGraphQLRequest<{\n /** the importOneTrustAssessmentForms mutation */\n importOneTrustAssessmentForms: {\n /** Created Assessment Forms */\n assessmentForms: AssessmentForm[];\n };\n }>(transcend, IMPORT_ONE_TRUST_ASSESSMENT_FORMS, {\n variables: { input },\n logger,\n });\n } catch (error) {\n logger.error(\n colors.red(\n `Failed to sync assessment ${index + 1} ${total ? `of ${total} ` : ' '}to Transcend.\\n` +\n `\\tAssessment Title: ${assessment.name}. Template Title: ${assessment.template.name}\\n`,\n ),\n error,\n );\n }\n};\n","import {\n OneTrustAssessmentQuestion,\n OneTrustAssessmentSection,\n OneTrustEnrichedAssessment,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { mapSeries, map } from '@transcend-io/utils';\nimport colors from 'colors';\nimport type { Got } from 'got';\nimport { GraphQLClient } from 'graphql-request';\nimport { uniq } from 'lodash-es';\n\nimport { logger } from '../../../logger.js';\nimport {\n getListOfOneTrustAssessments,\n getOneTrustAssessment,\n getOneTrustRisk,\n getOneTrustUser,\n} from '../endpoints/index.js';\nimport { enrichOneTrustAssessment } from './enrichOneTrustAssessment.js';\nimport { syncOneTrustAssessmentToDisk } from './syncOneTrustAssessmentToDisk.js';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend.js';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Reads all the assessments from a OneTrust instance and syncs them to Transcend or to Disk.\n *\n * @param param - the information about the assessment, its OneTrust source, and destination (disk or Transcend)\n */\nexport const syncOneTrustAssessmentsFromOneTrust = async ({\n oneTrust,\n file,\n dryRun,\n transcend,\n}: {\n /** the OneTrust client instance */\n oneTrust: Got;\n /** the Transcend client instance */\n transcend?: GraphQLClient;\n /** Whether to write to file instead of syncing to Transcend */\n dryRun: boolean;\n /** the path to the file in case dryRun is true */\n file?: string;\n}): Promise<void> => {\n // fetch the list of all assessments in the OneTrust organization\n logger.info('Getting list of all assessments from OneTrust...');\n const assessments = await getListOfOneTrustAssessments({ oneTrust });\n\n // a cache of OneTrust users so we avoid requesting already fetched users\n const oneTrustCachedUsers: Record<string, OneTrustGetUserResponse> = {};\n\n // split all assessments in batches, so we can process some of steps in parallel\n const BATCH_SIZE = 5;\n const assessmentBatches = Array.from(\n {\n length: Math.ceil(assessments.length / BATCH_SIZE),\n },\n (_, i) => assessments.slice(i * BATCH_SIZE, (i + 1) * BATCH_SIZE),\n );\n\n // process each batch and sync the batch right away so it's garbage collected and we don't run out of memory\n await mapSeries(assessmentBatches, async (assessmentBatch, batch) => {\n const batchEnrichedAssessments: OneTrustEnrichedAssessment[] = [];\n\n // fetch assessment details from OneTrust in parallel\n await map(\n assessmentBatch,\n async (assessment, index) => {\n const assessmentNumber = BATCH_SIZE * batch + index + 1;\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching details...`,\n );\n const { templateName, assessmentId } = assessment;\n const assessmentDetails = await getOneTrustAssessment({\n oneTrust,\n assessmentId,\n });\n // fetch assessment's creator information\n const creatorId = assessmentDetails.createdBy.id;\n let creator = oneTrustCachedUsers[creatorId];\n if (!creator) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching creator...`,\n );\n try {\n creator = await getOneTrustUser({\n oneTrust,\n userId: creatorId,\n });\n oneTrustCachedUsers[creatorId] = creator;\n } catch (error) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch form creator.` +\n `\\tcreatorId: ${creatorId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n error,\n );\n }\n }\n\n // fetch assessment approvers information\n const { approvers } = assessmentDetails;\n let approversDetails: OneTrustGetUserResponse[][] = [];\n if (approvers.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching approvers...`,\n );\n approversDetails = await map(\n approvers.map(({ id }) => id),\n async (userId) => {\n try {\n let approver = oneTrustCachedUsers[userId];\n if (!approver) {\n approver = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = approver;\n }\n return [approver];\n } catch (error) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a form approver.` +\n `\\tapproverId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n error,\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment internal respondents information\n const { respondents } = assessmentDetails;\n // if a user is an internal respondents, their 'name' field can't be an email.\n const internalRespondents = respondents.filter((r) => !r.name.includes('@'));\n let respondentsDetails: OneTrustGetUserResponse[][] = [];\n if (internalRespondents.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching respondents...`,\n );\n respondentsDetails = await map(\n internalRespondents.map(({ id }) => id),\n async (userId) => {\n try {\n let respondent = oneTrustCachedUsers[userId];\n if (!respondent) {\n respondent = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = respondent;\n }\n return [respondent];\n } catch (error) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a respondent.` +\n `\\trespondentId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n error,\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment risk information\n let riskDetails: OneTrustGetRiskResponse[] = [];\n const riskIds = uniq(\n assessmentDetails.sections.flatMap((s: OneTrustAssessmentSection) =>\n s.questions.flatMap((q: OneTrustAssessmentQuestion) =>\n (q.risks ?? []).flatMap((r) => r.riskId),\n ),\n ),\n );\n if (riskIds.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching risks...`,\n );\n riskDetails = await map(\n riskIds,\n (riskId) => getOneTrustRisk({ oneTrust, riskId: riskId as string }),\n {\n concurrency: 5,\n },\n );\n }\n\n // enrich the assessments with user and risk details\n const enrichedAssessment = enrichOneTrustAssessment({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails: creator,\n approversDetails: approversDetails.flat(),\n respondentsDetails: respondentsDetails.flat(),\n });\n\n batchEnrichedAssessments.push(enrichedAssessment);\n },\n { concurrency: BATCH_SIZE },\n );\n\n // sync assessments in series to avoid concurrency bugs\n await mapSeries(batchEnrichedAssessments, async (enrichedAssessment, index) => {\n // the assessment's global index takes its batch into consideration\n const globalIndex = batch * BATCH_SIZE + index;\n\n if (dryRun && file) {\n // sync to file\n syncOneTrustAssessmentToDisk({\n assessment: enrichedAssessment,\n index: globalIndex,\n total: assessments.length,\n file,\n });\n } else if (transcend) {\n // sync to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: enrichedAssessment,\n transcend,\n total: assessments.length,\n index: globalIndex,\n });\n }\n });\n });\n};\n","import { createReadStream } from 'node:fs';\n\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport JSONStream from 'JSONStream';\n\nimport { logger } from '../../../logger.js';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend.js';\n\n/**\n * Reads assessments from a file and syncs them to Transcend.\n *\n * @param param - the information about the source file and Transcend instance to write them to.\n */\nexport const syncOneTrustAssessmentsFromFile = ({\n transcend,\n file,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** The name of the file from which to read the OneTrust assessments */\n file: string;\n}): Promise<void> => {\n logger.info(`Getting list of all assessments from file ${file}...`);\n\n return new Promise((resolve, reject) => {\n // Create a readable stream from the file\n const fileStream = createReadStream(file, {\n encoding: 'utf-8',\n highWaterMark: 64 * 1024, // 64KB chunks\n });\n\n // Create a JSONStream parser to parse the array of OneTrust assessments from the file\n const parser = JSONStream.parse('*'); // '*' matches each element in the root array\n\n let index = 0;\n\n // Pipe the file stream into the JSON parser\n fileStream.pipe(parser);\n\n // Handle each parsed assessment object\n parser.on('data', async (assessment) => {\n try {\n // Pause the stream while processing to avoid overwhelming memory\n parser.pause();\n\n // Decode and validate the assessment\n const parsedAssessment = decodeCodec(OneTrustEnrichedAssessment, assessment);\n\n // Sync the assessment to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: parsedAssessment,\n transcend,\n index,\n });\n\n index += 1;\n\n // Resume the stream after processing\n parser.resume();\n } catch (e) {\n // if failed to parse a line, report error and continue\n logger.error(\n colors.red(`Failed to parse the assessment ${index} from file '${file}': ${e.message}.`),\n );\n }\n });\n\n // Handle completion\n parser.on('end', () => {\n logger.info(`Finished processing ${index} assessments from file ${file}`);\n resolve();\n });\n\n // Handle stream or parsing errors\n parser.on('error', (error) => {\n logger.error(colors.red(`Error parsing file '${file}': ${error.message}`));\n reject(error);\n });\n\n fileStream.on('error', (error) => {\n logger.error(colors.red(`Error reading file '${file}': ${error.message}`));\n reject(error);\n });\n });\n};\n","import got, { Got } from 'got';\n\n/**\n * Instantiate an instance of got that is capable of making requests to OneTrust\n *\n * @param param - information about the OneTrust URL\n * @returns The instance of got that is capable of making requests to the customer ingress\n */\nexport const createOneTrustGotInstance = ({\n hostname,\n auth,\n}: {\n /** Hostname of the OneTrust API */\n hostname: string;\n /** The OAuth access token */\n auth: string;\n}): Got =>\n got.extend({\n prefixUrl: `https://${hostname}`,\n headers: {\n accept: 'application/json',\n 'content-type': 'application/json',\n authorization: `Bearer ${auth}`,\n },\n });\n","import { buildTranscendGraphQLClient } from '@transcend-io/sdk';\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { OneTrustFileFormat, OneTrustPullResource, OneTrustPullSource } from '../../../enums.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport {\n syncOneTrustAssessmentsFromFile,\n syncOneTrustAssessmentsFromOneTrust,\n} from '../../../lib/oneTrust/helpers/index.js';\nimport { createOneTrustGotInstance } from '../../../lib/oneTrust/index.js';\nimport { logger } from '../../../logger.js';\n\n// Command flag interface\nexport interface SyncOtCommandFlags {\n hostname?: string;\n oneTrustAuth?: string;\n source: OneTrustPullSource;\n transcendAuth?: string;\n transcendUrl: string;\n file?: string;\n resource: OneTrustPullResource;\n dryRun: boolean;\n debug: boolean;\n}\n\n// Command implementation\nexport async function syncOt(\n this: LocalContext,\n {\n hostname,\n oneTrustAuth,\n source,\n transcendAuth,\n transcendUrl,\n resource,\n file,\n dryRun,\n debug,\n }: SyncOtCommandFlags,\n): Promise<void> {\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n throw new Error(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n );\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n throw new Error(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n );\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n throw new Error(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n );\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n throw new Error(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n );\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n throw new Error(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n );\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n throw new Error(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n );\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n throw new Error(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n );\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n throw new Error(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n );\n }\n }\n\n doneInputValidation(this.process.exit);\n\n // instantiate a client to talk to OneTrust\n const oneTrust =\n hostname && oneTrustAuth\n ? createOneTrustGotInstance({\n hostname,\n auth: oneTrustAuth,\n })\n : undefined;\n\n // instantiate a client to talk to Transcend\n const transcend =\n transcendUrl && transcendAuth\n ? buildTranscendGraphQLClient(transcendUrl, transcendAuth)\n : undefined;\n\n try {\n if (resource === OneTrustPullResource.Assessments) {\n if (source === OneTrustPullSource.OneTrust && oneTrust) {\n await syncOneTrustAssessmentsFromOneTrust({\n oneTrust,\n file,\n dryRun,\n ...(transcend && { transcend }),\n });\n } else if (source === OneTrustPullSource.File && file && transcend) {\n await syncOneTrustAssessmentsFromFile({ file, transcend });\n }\n }\n } catch (err) {\n throw new Error(\n `An error occurred syncing the resource ${resource} from OneTrust: ${\n debug ? err.stack : err.message\n }`,\n );\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced OneTrust ${resource} to ${dryRun ? `disk at \"${file}\"` : 'Transcend'}!`,\n ),\n );\n}\n"],"mappings":"svBAQA,MAAa,GAA4B,CACvC,aACA,QACA,QACA,OAAO,MAUK,CACZ,IAAI,EAAY,IAEZ,IAAU,GAAK,KACjB,EAAY;GAGd,IAAM,EAAwB,KAAK,UAAU,EAAW,CAGlD,EAAQ,GAAS,EAAQ,EAAQ,GAAK,CAAC,EAAO,IAAM,GAU1D,MAPA,GAAY,GAAG,EAAY,EAAwB,EAAM,KAGpD,GAAS,IAAU,EAAQ,GAAM,KACpC,GAAa;IAGR,GC5BI,GAAgC,CAC3C,OACA,aACA,QACA,WAUU,CACV,EAAO,KACL,EAAO,QAAQ,+BAA+B,EAAQ,EAAE,MAAM,EAAM,YAAY,EAAK,MAAM,CAC5F,CAEG,IAAU,EACZ,EAAG,cACD,EACA,EAAyB,CACvB,aACA,QACA,QACA,KAAM,GACP,CAAC,CACH,CAED,EAAG,eACD,EACA,EAAyB,CACvB,aACA,QACA,QACA,KAAM,GACP,CAAC,CACH,ECpCQ,EAA+B,MAAO,CACjD,cAImC,CACnC,IAAI,EAAc,EACd,EAAa,EACb,EAAgB,EAEd,EAAuC,EAAE,CAE/C,KAAO,EAAc,GAAY,CAC/B,GAAM,CAAE,QAAS,MAAM,EAAS,IAC9B,sCAAsC,EAAY,YACnD,CAEK,CAAE,OAAM,WAAY,EAAY,EAAsC,EAAK,CACjF,EAAe,KAAK,GAAI,GAAW,EAAE,CAAE,CACnC,IAAgB,IAClB,EAAa,GAAM,YAAc,EACjC,EAAgB,GAAM,eAAiB,GAEzC,GAAe,EAGf,EAAO,KAAK,WAAW,EAAe,OAAO,MAAM,EAAc,eAAe,CAGlF,OAAO,GClCI,EAAwB,MAAO,CAC1C,WACA,kBAM4C,CAC5C,GAAM,CAAE,QAAS,MAAM,EAAS,IAC9B,iCAAiC,EAAa,uCAC/C,CAED,OAAO,EAAY,EAA+B,EAAK,ECb5C,EAAkB,MAAO,CACpC,WACA,YAMsC,CACtC,GAAM,CAAE,QAAS,MAAM,EAAS,IAAI,qBAAqB,IAAS,CAElE,OAAO,EAAY,EAAyB,EAAK,ECXtC,EAAkB,MAAO,CACpC,WACA,YAMsC,CACtC,GAAM,CAAE,QAAS,MAAM,EAAS,IAAI,qBAAqB,IAAS,CAElE,OAAO,EAAY,EAAyB,EAAK,ECPtC,GAA4B,CACvC,aACA,oBACA,cACA,iBACA,mBACA,wBAcgC,CAChC,IAAM,EAAkB,EAAM,EAAa,KAAK,CAC1C,CAAE,WAAU,YAAW,GAAG,GAA0B,EACpD,EAA2B,EAAS,IAAK,GAAY,CACzD,GAAM,CAAE,YAAW,GAAG,GAAgB,EAChC,EAAoB,EAAU,IAAK,GAAa,CACpD,GAAM,CAAE,QAAO,GAAG,GAAiB,EAC7B,GAAiB,GAAS,EAAE,EAAE,IAAK,GAAS,CAChD,IAAM,EAAU,EAAgB,EAAK,QACrC,MAAO,CACL,GAAG,EACH,GAAG,EACH,MAAO,EAAK,MACZ,YAAa,EAAK,aAAe,EAClC,EACD,CACF,MAAO,CACL,GAAG,EACH,MAAO,EACR,EACD,CACF,MAAO,CACL,GAAG,EACH,UAAW,EACZ,EACD,CAGI,EAAoB,CACxB,GAAG,EACH,OAAQ,GAAgB,QAAU,GAClC,SAAU,GAAgB,UAAY,WACtC,OAAQ,GAAgB,QAAU,EAAE,CACpC,MAAO,GAAgB,OAAS,KAChC,UAAW,GAAgB,KAAK,WAAa,KAC7C,WAAY,GAAgB,KAAK,YAAc,KAChD,CAGK,EAAsB,EAAM,EAAkB,KAAK,CACnD,EAAoB,EAAkB,UAAU,QAAS,GAC7D,EAAoB,EAAiB,IACjC,CACE,CACE,GAAG,EACH,SAAU,CACR,GAAG,EAAiB,SACpB,OAAQ,EAAoB,EAAiB,IAAI,OACjD,SAAU,EAAoB,EAAiB,IAAI,SACnD,OAAQ,EAAoB,EAAiB,IAAI,OACjD,MAAO,EAAoB,EAAiB,IAAI,MAChD,UAAW,EAAoB,EAAiB,IAAI,KAAK,WAAa,KACtE,WAAY,EAAoB,EAAiB,IAAI,KAAK,YAAc,KACzE,CACF,CACF,CACD,EAAE,CACP,CAGK,EAAyB,EAAM,EAAoB,KAAK,CACxD,EAAsB,EAAkB,YAC3C,OAAQ,GAAM,CAAC,EAAE,KAAK,SAAS,IAAI,CAAC,CACpC,QAAS,GACR,EAAuB,EAAW,IAC9B,CACE,CACE,GAAG,EACH,OAAQ,EAAuB,EAAW,IAAI,OAC9C,SAAU,EAAuB,EAAW,IAAI,SAChD,OAAQ,EAAuB,EAAW,IAAI,OAC9C,MAAO,EAAuB,EAAW,IAAI,MAC7C,UAAW,EAAuB,EAAW,IAAI,KAAK,WAAa,KACnE,WAAY,EAAuB,EAAW,IAAI,KAAK,YAAc,KACtE,CACF,CACD,EAAE,CACP,CAGH,MAAO,CACL,GAAG,EACH,GAAG,EACH,UAAW,EACX,YAAa,EACb,UAAW,EACX,SAAU,EACX,ECpGU,EAAoC,MAAO,CACtD,YACA,aACA,QACA,WAUmB,CACnB,EAAO,KACL,EAAO,QACL,+BAA+B,EAAQ,EAAE,GAAG,EAAQ,MAAM,EAAM,GAAK,IAAI,iBAC1E,CACF,CAUD,IAAM,EAAwC,CAC5C,KARW,EAAyB,CACpC,aACA,QACA,QACD,CAAC,CAKD,CAED,GAAI,CACF,MAAM,EAMH,EAAW,EAAmC,CAC/C,UAAW,CAAE,QAAO,CACpB,SACD,CAAC,OACK,EAAO,CACd,EAAO,MACL,EAAO,IACL,6BAA6B,EAAQ,EAAE,GAAG,EAAQ,MAAM,EAAM,GAAK,IAAI,qCAC9C,EAAW,KAAK,oBAAoB,EAAW,SAAS,KAAK,IACvF,CACD,EACD,GCrCQ,EAAsC,MAAO,CACxD,WACA,OACA,SACA,eAUmB,CAEnB,EAAO,KAAK,mDAAmD,CAC/D,IAAM,EAAc,MAAM,EAA6B,CAAE,WAAU,CAAC,CAG9D,EAA+D,EAAE,CAYvE,MAAM,EARoB,MAAM,KAC9B,CACE,OAAQ,KAAK,KAAK,EAAY,OAAS,EAAW,CACnD,EACA,EAAG,IAAM,EAAY,MAAM,EAAI,GAAa,EAAI,GAAK,EAAW,CAClE,CAGkC,MAAO,EAAiB,IAAU,CACnE,IAAM,EAAyD,EAAE,CAGjE,MAAM,EACJ,EACA,MAAO,EAAY,IAAU,CAC3B,IAAM,EAAmB,EAAa,EAAQ,EAAQ,EACtD,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,wBAC1D,CACD,GAAM,CAAE,eAAc,gBAAiB,EACjC,EAAoB,MAAM,EAAsB,CACpD,WACA,eACD,CAAC,CAEI,EAAY,EAAkB,UAAU,GAC1C,EAAU,EAAoB,GAClC,GAAI,CAAC,EAAS,CACZ,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,wBAC1D,CACD,GAAI,CACF,EAAU,MAAM,EAAgB,CAC9B,WACA,OAAQ,EACT,CAAC,CACF,EAAoB,GAAa,QAC1B,EAAO,CACd,EAAO,KACL,EAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,+CACvC,EAAU,sBAAsB,EAAW,KAAK,oBAAoB,IACvF,CACD,EACD,EAKL,GAAM,CAAE,aAAc,EAClB,EAAgD,EAAE,CAClD,EAAU,OAAS,IACrB,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,0BAC1D,CACD,EAAmB,MAAM,EACvB,EAAU,KAAK,CAAE,QAAS,EAAG,CAC7B,KAAO,IAAW,CAChB,GAAI,CACF,IAAI,EAAW,EAAoB,GAKnC,OAJK,IACH,EAAW,MAAM,EAAgB,CAAE,WAAU,SAAQ,CAAC,CACtD,EAAoB,GAAU,GAEzB,CAAC,EAAS,OACV,EAAO,CAQd,OAPA,EAAO,KACL,EAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,mDACtC,EAAO,sBAAsB,EAAW,KAAK,oBAAoB,IACrF,CACD,EACD,CACM,EAAE,GAGb,CAAE,YAAa,EAAG,CACnB,EAIH,GAAM,CAAE,eAAgB,EAElB,EAAsB,EAAY,OAAQ,GAAM,CAAC,EAAE,KAAK,SAAS,IAAI,CAAC,CACxE,EAAkD,EAAE,CACpD,EAAoB,OAAS,IAC/B,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,4BAC1D,CACD,EAAqB,MAAM,EACzB,EAAoB,KAAK,CAAE,QAAS,EAAG,CACvC,KAAO,IAAW,CAChB,GAAI,CACF,IAAI,EAAa,EAAoB,GAKrC,OAJK,IACH,EAAa,MAAM,EAAgB,CAAE,WAAU,SAAQ,CAAC,CACxD,EAAoB,GAAU,GAEzB,CAAC,EAAW,OACZ,EAAO,CAQd,OAPA,EAAO,KACL,EAAO,OACL,eAAe,EAAiB,MAAM,EAAY,OAAO,kDACpC,EAAO,sBAAsB,EAAW,KAAK,oBAAoB,IACvF,CACD,EACD,CACM,EAAE,GAGb,CAAE,YAAa,EAAG,CACnB,EAIH,IAAI,EAAyC,EAAE,CACzC,EAAU,EACd,EAAkB,SAAS,QAAS,GAClC,EAAE,UAAU,QAAS,IAClB,EAAE,OAAS,EAAE,EAAE,QAAS,GAAM,EAAE,OAAO,CACzC,CACF,CACF,CACG,EAAQ,OAAS,IACnB,EAAO,KACL,eAAe,EAAiB,MAAM,EAAY,OAAO,sBAC1D,CACD,EAAc,MAAM,EAClB,EACC,GAAW,EAAgB,CAAE,WAAkB,SAAkB,CAAC,CACnE,CACE,YAAa,EACd,CACF,EAIH,IAAM,EAAqB,EAAyB,CAClD,aACA,oBACA,cACA,eAAgB,EAChB,iBAAkB,EAAiB,MAAM,CACzC,mBAAoB,EAAmB,MAAM,CAC9C,CAAC,CAEF,EAAyB,KAAK,EAAmB,EAEnD,CAAE,YAAa,EAAY,CAC5B,CAGD,MAAM,EAAU,EAA0B,MAAO,EAAoB,IAAU,CAE7E,IAAM,EAAc,EAAQ,EAAa,EAErC,GAAU,EAEZ,EAA6B,CAC3B,WAAY,EACZ,MAAO,EACP,MAAO,EAAY,OACnB,OACD,CAAC,CACO,GAET,MAAM,EAAkC,CACtC,WAAY,EACZ,YACA,MAAO,EAAY,OACnB,MAAO,EACR,CAAC,EAEJ,EACF,EC1NS,GAAmC,CAC9C,YACA,WAOA,EAAO,KAAK,6CAA6C,EAAK,KAAK,CAE5D,IAAI,SAAS,EAAS,IAAW,CAEtC,IAAM,EAAa,EAAiB,EAAM,CACxC,SAAU,QACV,cAAe,GAAK,KACrB,CAAC,CAGI,EAAS,EAAW,MAAM,IAAI,CAEhC,EAAQ,EAGZ,EAAW,KAAK,EAAO,CAGvB,EAAO,GAAG,OAAQ,KAAO,IAAe,CACtC,GAAI,CAEF,EAAO,OAAO,CAMd,MAAM,EAAkC,CACtC,WAJuB,EAAY,EAA4B,EAAW,CAK1E,YACA,QACD,CAAC,CAEF,GAAS,EAGT,EAAO,QAAQ,OACR,EAAG,CAEV,EAAO,MACL,EAAO,IAAI,kCAAkC,EAAM,cAAc,EAAK,KAAK,EAAE,QAAQ,GAAG,CACzF,GAEH,CAGF,EAAO,GAAG,UAAa,CACrB,EAAO,KAAK,uBAAuB,EAAM,yBAAyB,IAAO,CACzE,GAAS,EACT,CAGF,EAAO,GAAG,QAAU,GAAU,CAC5B,EAAO,MAAM,EAAO,IAAI,uBAAuB,EAAK,KAAK,EAAM,UAAU,CAAC,CAC1E,EAAO,EAAM,EACb,CAEF,EAAW,GAAG,QAAU,GAAU,CAChC,EAAO,MAAM,EAAO,IAAI,uBAAuB,EAAK,KAAK,EAAM,UAAU,CAAC,CAC1E,EAAO,EAAM,EACb,EACF,EC9ES,GAA6B,CACxC,WACA,UAOA,EAAI,OAAO,CACT,UAAW,WAAW,IACtB,QAAS,CACP,OAAQ,mBACR,eAAgB,mBAChB,cAAe,UAAU,IAC1B,CACF,CAAC,CCGJ,eAAsB,EAEpB,CACE,WACA,eACA,SACA,gBACA,eACA,WACA,OACA,SACA,SAEa,CAEf,GAAI,CAAC,GAAU,CAAC,EACd,MAAU,MAER,qHACD,CAIH,GAAI,GAAU,CAAC,EACb,MAAU,MACR,8FACD,CAGH,GAAI,EAAM,CACR,IAAM,EAAY,EAAK,MAAM,IAAI,CACjC,GAAI,EAAU,OAAS,EACrB,MAAU,MACR,8GACD,CAEH,GAAI,EAAU,GAAG,GAAG,GAAK,EAAmB,KAC1C,MAAU,MACR,iDAAiD,EAAK,WACpD,EAAmB,KACpB,cAAc,EAAU,GAAG,GAAG,CAAC,IACjC,CAKL,GAAI,IAAW,EAAmB,SAAU,CAE1C,GAAI,CAAC,EACH,MAAU,MACR,kFACD,CAGH,GAAI,CAAC,EACH,MAAU,MACR,uFACD,KAEE,CAEL,GAAI,CAAC,EACH,MAAU,MACR,kHACD,CAIH,GAAI,EACF,MAAU,MACR,4FAC+C,EAAmB,SAAS,0BAC5E,CAIL,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EACJ,GAAY,EACR,EAA0B,CACxB,WACA,KAAM,EACP,CAAC,CACF,IAAA,GAGA,EACJ,GAAgB,EACZ,EAA4B,EAAc,EAAc,CACxD,IAAA,GAEN,GAAI,CACE,IAAa,EAAqB,cAChC,IAAW,EAAmB,UAAY,EAC5C,MAAM,EAAoC,CACxC,WACA,OACA,SACA,GAAI,GAAa,CAAE,YAAW,CAC/B,CAAC,CACO,IAAW,EAAmB,MAAQ,GAAQ,GACvD,MAAM,EAAgC,CAAE,OAAM,YAAW,CAAC,QAGvD,EAAK,CACZ,MAAU,MACR,0CAA0C,EAAS,kBACjD,EAAQ,EAAI,MAAQ,EAAI,UAE3B,CAIH,EAAO,KACL,EAAO,MACL,gCAAgC,EAAS,MAAM,EAAS,YAAY,EAAK,GAAK,YAAY,GAC3F,CACF"}
@@ -0,0 +1,2 @@
1
+ import{o as e}from"./enums-CyFTrzXY.mjs";import{r as t}from"./constants-muOBBQA_.mjs";import{n,t as r}from"./command-DnoHX-eW.mjs";import{t as i}from"./logger-Bj782ZYD.mjs";import{a}from"./readTranscendYaml-DVkQL2SC.mjs";import{n as o}from"./pullTranscendConfiguration-DjOELnPo.mjs";import{t as s}from"./validateTranscendAuth-Cuh2Qfdl.mjs";import{t as c}from"./done-input-validation-BcNBxhEs.mjs";import l from"node:fs";import{join as u}from"node:path";import d from"colors";import{buildTranscendGraphQLClient as f}from"@transcend-io/sdk";import{mapSeries as p}from"@transcend-io/utils";async function m({auth:m,resources:h=n,file:g,transcendUrl:_,dataSiloIds:v=[],integrationNames:y=[],trackerStatuses:b=r,pageSize:x,skipDatapoints:S,skipSubDatapoints:C,includeGuessedCategories:w,debug:T}){c(this.process.exit);let E=await s(m),D=h.includes(`all`)?Object.values(e):h;if(typeof E==`string`){try{let e=await o(f(_,E),{dataSiloIds:v,integrationNames:y,resources:D,pageSize:x,debug:T,skipDatapoints:S,skipSubDatapoints:C,includeGuessedCategories:w,trackerStatuses:b});i.info(d.magenta(`Writing configuration to file "${g}"...`)),a(g,e)}catch(e){i.error(d.red(`An error occurred syncing the schema: ${T?e.stack:e.message}`)),this.process.exit(1)}i.info(d.green(`Successfully synced yaml file to disk at ${g}! View at ${t}`))}else{if(!l.lstatSync(g).isDirectory())throw Error(`File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/`);let e=[];await p(E,async(t,n)=>{let r=`[${n+1}/${E.length}][${t.organizationName}] `;i.info(d.magenta(`~~~\n\n${r}Attempting to pull configuration...\n\n~~~`));let s=f(_,t.apiKey);try{let e=await o(s,{dataSiloIds:v,integrationNames:y,resources:D,pageSize:x,debug:T,skipDatapoints:S,skipSubDatapoints:C,includeGuessedCategories:w,trackerStatuses:b}),n=u(g,`${t.organizationName}.yml`);i.info(d.magenta(`Writing configuration to file "${n}"...`)),a(n,e),i.info(d.green(`${r}Successfully pulled configuration!`))}catch(n){i.error(d.red(`${r}Failed to sync configuration. - ${n.message}`)),e.push(t.organizationName)}}),e.length>0&&(i.info(d.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${t}`)),this.process.exit(1))}}export{m as pull};
2
+ //# sourceMappingURL=impl-BwrEi3s7.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-BwrEi3s7.mjs","names":[],"sources":["../src/commands/inventory/pull/impl.ts"],"sourcesContent":["import fs from 'node:fs';\nimport { join } from 'node:path';\n\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient } from '@transcend-io/sdk';\nimport { mapSeries } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants.js';\nimport type { LocalContext } from '../../../context.js';\nimport { TranscendPullResource } from '../../../enums.js';\nimport { validateTranscendAuth } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { pullTranscendConfiguration } from '../../../lib/graphql/index.js';\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml.js';\nimport { logger } from '../../../logger.js';\nimport { DEFAULT_CONSENT_TRACKER_STATUSES, DEFAULT_TRANSCEND_PULL_RESOURCES } from './command.js';\n\nexport interface PullCommandFlags {\n auth: string;\n resources?: (TranscendPullResource | 'all')[];\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n integrationNames?: string[];\n trackerStatuses?: ConsentTrackerStatus[];\n pageSize: number;\n skipDatapoints: boolean;\n skipSubDatapoints: boolean;\n includeGuessedCategories: boolean;\n debug: boolean;\n}\n\nexport async function pull(\n this: LocalContext,\n {\n auth,\n resources = DEFAULT_TRANSCEND_PULL_RESOURCES,\n file,\n transcendUrl,\n dataSiloIds = [],\n integrationNames = [],\n trackerStatuses = DEFAULT_CONSENT_TRACKER_STATUSES,\n pageSize,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n debug,\n }: PullCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n const resourcesToPull: TranscendPullResource[] = resources.includes('all')\n ? Object.values(TranscendPullResource)\n : (resources as TranscendPullResource[]);\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n writeTranscendYaml(file, configuration);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${debug ? err.stack : err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to disk at ${file}! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n if (!fs.lstatSync(file).isDirectory()) {\n throw new Error(\n 'File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/',\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${apiKey.organizationName}] `;\n logger.info(colors.magenta(`~~~\\n\\n${prefix}Attempting to pull configuration...\\n\\n~~~`));\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n const filePath = join(file, `${apiKey.organizationName}.yml`);\n logger.info(colors.magenta(`Writing configuration to file \"${filePath}\"...`));\n writeTranscendYaml(filePath, configuration);\n\n logger.info(colors.green(`${prefix}Successfully pulled configuration!`));\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration. - ${err.message}`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"],"mappings":"2kBAiCA,eAAsB,EAEpB,CACE,OACA,YAAY,EACZ,OACA,eACA,cAAc,EAAE,CAChB,mBAAmB,EAAE,CACrB,kBAAkB,EAClB,WACA,iBACA,oBACA,2BACA,SAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAM,EAAsB,EAAK,CAEhD,EAA2C,EAAU,SAAS,MAAM,CACtE,OAAO,OAAO,EAAsB,CACnC,EAGL,GAAI,OAAO,GAAiB,SAAU,CACpC,GAAI,CAIF,IAAM,EAAgB,MAAM,EAFb,EAA4B,EAAc,EAAa,CAEP,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEF,EAAO,KAAK,EAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,EAAmB,EAAM,EAAc,OAChC,EAAK,CACZ,EAAO,MACL,EAAO,IAAI,yCAAyC,EAAQ,EAAI,MAAQ,EAAI,UAAU,CACvF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAO,KACL,EAAO,MACL,4CAA4C,EAAK,YAAY,IAC9D,CACF,KACI,CACL,GAAI,CAAC,EAAG,UAAU,EAAK,CAAC,aAAa,CACnC,MAAU,MACR,0GACD,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAM,EAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAAI,EAAO,iBAAiB,IAC9E,EAAO,KAAK,EAAO,QAAQ,UAAU,EAAO,4CAA4C,CAAC,CAGzF,IAAM,EAAS,EAA4B,EAAc,EAAO,OAAO,CAEvE,GAAI,CACF,IAAM,EAAgB,MAAM,EAA2B,EAAQ,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEI,EAAW,EAAK,EAAM,GAAG,EAAO,iBAAiB,MAAM,CAC7D,EAAO,KAAK,EAAO,QAAQ,kCAAkC,EAAS,MAAM,CAAC,CAC7E,EAAmB,EAAU,EAAc,CAE3C,EAAO,KAAK,EAAO,MAAM,GAAG,EAAO,oCAAoC,CAAC,OACjE,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,GAAG,EAAO,kCAAkC,EAAI,UAAU,CAAC,CACnF,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAO,KACL,EAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0D,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE"}
@@ -0,0 +1,3 @@
1
+ import{r as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{a as n}from"./writeCsv-C4pjXGsD.mjs";import{t as r}from"./pullConsentManagerMetrics-zKgjc3Ap.mjs";import{t as i}from"./validateTranscendAuth-Cuh2Qfdl.mjs";import{t as a}from"./done-input-validation-BcNBxhEs.mjs";import o,{existsSync as s,mkdirSync as c}from"node:fs";import{join as l}from"node:path";import u from"colors";import{ConsentManagerMetricBin as d,buildTranscendGraphQLClient as f}from"@transcend-io/sdk";import{map as p,mapSeries as m}from"@transcend-io/utils";async function h({auth:h,start:g,end:_,folder:v,bin:y,transcendUrl:b}){let x=y;Object.values(d).includes(x)||(t.error(u.red(`Failed to parse argument "bin" with value "${y}"\nExpected one of: \n${Object.values(d).join(`
2
+ `)}`)),this.process.exit(1));let S=new Date(g),C=_?new Date(_):new Date;Number.isNaN(S.getTime())&&(t.error(u.red(`Start date provided is invalid date. Got --start="${g}" expected --start="01/01/2023"`)),this.process.exit(1)),Number.isNaN(C.getTime())&&(t.error(u.red(`End date provided is invalid date. Got --end="${_}" expected --end="01/01/2023"`)),this.process.exit(1)),S>C&&(t.error(u.red(`Got a start date "${S.toISOString()}" that was larger than the end date "${C.toISOString()}". Start date must be before end date.`)),this.process.exit(1)),a(this.process.exit);let w=await i(h);if(o.existsSync(v)&&!o.lstatSync(v).isDirectory()&&(t.error(u.red(`The provided argument "folder" was passed a file. expected: folder="./consent-metrics/"`)),this.process.exit(1)),s(v)||c(v),t.info(u.magenta(`Pulling consent metrics from start=${S.toString()} to end=${C.toISOString()} with bin size "${y}"`)),typeof w==`string`){try{let e=await r(f(b,w),{bin:x,start:S,end:C});await p(Object.entries(e),async([e,r])=>{await p(r,async({points:r,name:i})=>{let a=l(v,`${e}_${i}.csv`);t.info(u.magenta(`Writing configuration to file "${a}"...`)),await n(a,r.map(({key:e,value:t})=>({timestamp:e,value:t})))},{concurrency:5})},{concurrency:5})}catch(e){t.error(u.red(`An error occurred syncing the schema: ${e.message}`)),this.process.exit(1)}t.info(u.green(`Successfully synced consent metrics to disk in folder "${v}"! View at ${e}`))}else{let i=[];await m(w,async(e,a)=>{let o=`[${a+1}/${w.length}][${e.organizationName}] `;t.info(u.magenta(`~~~\n\n${o}Attempting to pull consent metrics...\n\n~~~`));let d=f(b,e.apiKey);try{let i=await r(d,{bin:x,start:S,end:C}),a=l(v,e.organizationName);s(a)||c(a),Object.entries(i).forEach(([e,r])=>{r.forEach(({points:r,name:i})=>{let o=l(a,`${e}_${i}.csv`);t.info(u.magenta(`Writing configuration to file "${o}"...`)),n(o,r.map(({key:e,value:t})=>({timestamp:e,value:t})))})}),t.info(u.green(`${o}Successfully pulled configuration!`))}catch(n){t.error(u.red(`${o}Failed to sync configuration.`),n),i.push(e.organizationName)}}),i.length>0&&(t.info(u.red(`Sync encountered errors for "${i.join(`,`)}". View output above for more information, or check out ${e}`)),this.process.exit(1))}}export{h as pullConsentMetrics};
3
+ //# sourceMappingURL=impl-C4AI1Fsj.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-C4AI1Fsj.mjs","names":[],"sources":["../src/commands/consent/pull-consent-metrics/impl.ts"],"sourcesContent":["import fs, { existsSync, mkdirSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { buildTranscendGraphQLClient, ConsentManagerMetricBin } from '@transcend-io/sdk';\nimport { map, mapSeries } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants.js';\nimport type { LocalContext } from '../../../context.js';\nimport { validateTranscendAuth } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { pullConsentManagerMetrics } from '../../../lib/consent-manager/index.js';\nimport { writeCsv } from '../../../lib/helpers/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n { auth, start, end, folder, bin, transcendUrl }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join('\\n')}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n this.process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n await map(\n Object.entries(configuration),\n async ([metricName, metrics]) => {\n await map(\n metrics,\n async ({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n await writeCsv(\n file,\n points.map(({ key, value }: { key: string; value: string }) => ({\n timestamp: key,\n value,\n })),\n );\n },\n {\n concurrency: 5,\n },\n );\n },\n { concurrency: 5 },\n );\n } catch (err) {\n logger.error(colors.red(`An error occurred syncing the schema: ${err.message}`));\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${apiKey.organizationName}] `;\n logger.info(colors.magenta(`~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`));\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n writeCsv(\n file,\n points.map(({ key, value }: { key: string; value: string }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(colors.green(`${prefix}Successfully pulled configuration!`));\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`), err);\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"],"mappings":"ijBAwBA,eAAsB,EAEpB,CAAE,OAAM,QAAO,MAAK,SAAQ,MAAK,gBAClB,CAEf,IAAM,EAAY,EACb,OAAO,OAAO,EAAwB,CAAC,SAAS,EAAU,GAC7D,EAAO,MACL,EAAO,IACL,8CAA8C,EAAI,wBAC1B,OAAO,OAAO,EAAwB,CAAC,KAAK;EAAK,GAC1E,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,IAAM,EAAY,IAAI,KAAK,EAAM,CAC3B,EAAU,EAAM,IAAI,KAAK,EAAI,CAAG,IAAI,KACtC,OAAO,MAAM,EAAU,SAAS,CAAC,GACnC,EAAO,MACL,EAAO,IACL,qDAAqD,EAAM,iCAC5D,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAElB,OAAO,MAAM,EAAQ,SAAS,CAAC,GACjC,EAAO,MACL,EAAO,IACL,iDAAiD,EAAI,+BACtD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAElB,EAAY,IACd,EAAO,MACL,EAAO,IACL,qBAAqB,EAAU,aAAa,CAAC,uCAAuC,EAAQ,aAAa,CAAC,wCAE3G,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAM,EAAsB,EAAK,CAwBtD,GArBI,EAAG,WAAW,EAAO,EAAI,CAAC,EAAG,UAAU,EAAO,CAAC,aAAa,GAC9D,EAAO,MACL,EAAO,IACL,0FACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAIjB,EAAW,EAAO,EACrB,EAAU,EAAO,CAGnB,EAAO,KACL,EAAO,QACL,sCAAsC,EAAU,UAAU,CAAC,UAAU,EAAQ,aAAa,CAAC,kBAAkB,EAAI,GAClH,CACF,CAGG,OAAO,GAAiB,SAAU,CACpC,GAAI,CAKF,IAAM,EAAgB,MAAM,EAHb,EAA4B,EAAc,EAAa,CAGR,CAC5D,IAAK,EACL,MAAO,EACP,IAAK,EACN,CAAC,CAGF,MAAM,EACJ,OAAO,QAAQ,EAAc,CAC7B,MAAO,CAAC,EAAY,KAAa,CAC/B,MAAM,EACJ,EACA,MAAO,CAAE,SAAQ,UAAW,CAC1B,IAAM,EAAO,EAAK,EAAQ,GAAG,EAAW,GAAG,EAAK,MAAM,CACtD,EAAO,KAAK,EAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,MAAM,EACJ,EACA,EAAO,KAAK,CAAE,MAAK,YAA6C,CAC9D,UAAW,EACX,QACD,EAAE,CACJ,EAEH,CACE,YAAa,EACd,CACF,EAEH,CAAE,YAAa,EAAG,CACnB,OACM,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,yCAAyC,EAAI,UAAU,CAAC,CAChF,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAO,KACL,EAAO,MACL,0DAA0D,EAAO,aAAa,IAC/E,CACF,KACI,CACL,IAAM,EAA8B,EAAE,CACtC,MAAM,EAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAAI,EAAO,iBAAiB,IAC9E,EAAO,KAAK,EAAO,QAAQ,UAAU,EAAO,8CAA8C,CAAC,CAG3F,IAAM,EAAS,EAA4B,EAAc,EAAO,OAAO,CAEvE,GAAI,CACF,IAAM,EAAgB,MAAM,EAA0B,EAAQ,CAC5D,IAAK,EACL,MAAO,EACP,IAAK,EACN,CAAC,CAGI,EAAY,EAAK,EAAQ,EAAO,iBAAiB,CAClD,EAAW,EAAU,EACxB,EAAU,EAAU,CAItB,OAAO,QAAQ,EAAc,CAAC,SAAS,CAAC,EAAY,KAAa,CAC/D,EAAQ,SAAS,CAAE,SAAQ,UAAW,CACpC,IAAM,EAAO,EAAK,EAAW,GAAG,EAAW,GAAG,EAAK,MAAM,CACzD,EAAO,KAAK,EAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,EACE,EACA,EAAO,KAAK,CAAE,MAAK,YAA6C,CAC9D,UAAW,EACX,QACD,EAAE,CACJ,EACD,EACF,CAEF,EAAO,KAAK,EAAO,MAAM,GAAG,EAAO,oCAAoC,CAAC,OACjE,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,GAAG,EAAO,+BAA+B,CAAE,EAAI,CACvE,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAO,KACL,EAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0D,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./pushCronIdentifiersFromCsv-D2saGR5i.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o}){t(this.process.exit),await e({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o})}export{n as markIdentifiersCompleted};
2
- //# sourceMappingURL=impl-fqOKTw5J.mjs.map
1
+ import{t as e}from"./pushCronIdentifiersFromCsv-BZRA1n_8.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";async function n({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o}){t(this.process.exit),await e({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o})}export{n as markIdentifiersCompleted};
2
+ //# sourceMappingURL=impl-CCAeEeMR.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-fqOKTw5J.mjs","names":[],"sources":["../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron/index.js';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n { file, transcendUrl, auth, sombraAuth, dataSiloId }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"],"mappings":"kHAYA,eAAsB,EAEpB,CAAE,OAAM,eAAc,OAAM,aAAY,cACzB,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAA2B,CAC/B,OACA,eACA,OACA,aACA,aACD,CAAC"}
1
+ {"version":3,"file":"impl-CCAeEeMR.mjs","names":[],"sources":["../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron/index.js';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n { file, transcendUrl, auth, sombraAuth, dataSiloId }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"],"mappings":"kHAYA,eAAsB,EAEpB,CAAE,OAAM,eAAc,OAAM,aAAY,cACzB,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAA2B,CAC/B,OACA,eACA,OACA,aACA,aACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs";import{i as n,s as r}from"./writeCsv-B51ulrVl.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{uniq as a}from"lodash-es";import o from"colors";async function s({file:s,transcendUrl:c,auth:l,sombraAuth:u,dataSiloId:d,actions:f,pageLimit:p,skipRequestCount:m,chunkSize:h}){m&&e.info(o.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%p!==0)&&(e.error(o.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${p}.`)),this.process.exit(1)),i(this.process.exit);let{baseName:g,extension:_}=n(s),v=0;await t({transcendUrl:c,apiPageSize:p,savePageSize:h,onSave:async t=>{let n=`${g}-${v}${_}`;return e.info(o.blue(`Saving ${t.length} identifiers to file "${n}"`)),await r(n,t,a(t.map(e=>Object.keys(e)).flat())),e.info(o.green(`Successfully wrote ${t.length} identifiers to file "${n}"`)),v+=1,Promise.resolve()},actions:f,auth:l,sombraAuth:u,dataSiloId:d,skipRequestCount:m})}export{s as pullIdentifiers};
2
- //# sourceMappingURL=impl-P_NDC3cX.mjs.map
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{t}from"./pullChunkedCustomSiloOutstandingIdentifiers-BW5Vws25.mjs";import{i as n,s as r}from"./writeCsv-C4pjXGsD.mjs";import{t as i}from"./done-input-validation-BcNBxhEs.mjs";import{uniq as a}from"lodash-es";import o from"colors";async function s({file:s,transcendUrl:c,auth:l,sombraAuth:u,dataSiloId:d,actions:f,pageLimit:p,skipRequestCount:m,chunkSize:h}){m&&e.info(o.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%p!==0)&&(e.error(o.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${p}.`)),this.process.exit(1)),i(this.process.exit);let{baseName:g,extension:_}=n(s),v=0;await t({transcendUrl:c,apiPageSize:p,savePageSize:h,onSave:async t=>{let n=`${g}-${v}${_}`;return e.info(o.blue(`Saving ${t.length} identifiers to file "${n}"`)),await r(n,t,a(t.map(e=>Object.keys(e)).flat())),e.info(o.green(`Successfully wrote ${t.length} identifiers to file "${n}"`)),v+=1,Promise.resolve()},actions:f,auth:l,sombraAuth:u,dataSiloId:d,skipRequestCount:m})}export{s as pullIdentifiers};
2
+ //# sourceMappingURL=impl-CFI5y5U-.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-P_NDC3cX.mjs","names":[],"sources":["../src/commands/request/cron/pull-identifiers/impl.ts"],"sourcesContent":["import { RequestAction } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport {\n CsvFormattedIdentifier,\n pullChunkedCustomSiloOutstandingIdentifiers,\n} from '../../../../lib/cron/index.js';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers/index.js';\nimport { logger } from '../../../../logger.js';\n\nexport interface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow('Skipping request count as requested. This may help speed up the call.'),\n );\n }\n\n if (Number.isNaN(chunkSize) || chunkSize <= 0 || chunkSize % pageLimit !== 0) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = async (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(colors.blue(`Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`));\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(`Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"],"mappings":"uRAyBA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,aACA,UACA,YACA,mBACA,aAEa,CACX,GACF,EAAO,KACL,EAAO,OAAO,wEAAwE,CACvF,EAGC,OAAO,MAAM,EAAU,EAAI,GAAa,GAAK,EAAY,IAAc,KACzE,EAAO,MACL,EAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,WAAU,aAAc,EAAc,EAAK,CAC/C,EAAY,EAgBhB,MAAM,EAA4C,CAChD,eACA,YAAa,EACb,aAAc,EACd,OAlBa,KAAO,IAAmD,CACvE,IAAM,EAAmB,GAAG,EAAS,GAAG,IAAY,IASpD,OARA,EAAO,KAAK,EAAO,KAAK,UAAU,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAAC,CAG5F,MAAM,EAAc,EAAkB,EADtB,EAAK,EAAM,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACR,CACrD,EAAO,KACL,EAAO,MAAM,sBAAsB,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAC7F,CACD,GAAa,EACN,QAAQ,SAAS,EASxB,UACA,OACA,aACA,aACA,mBACD,CAAC"}
1
+ {"version":3,"file":"impl-CFI5y5U-.mjs","names":[],"sources":["../src/commands/request/cron/pull-identifiers/impl.ts"],"sourcesContent":["import { RequestAction } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport {\n CsvFormattedIdentifier,\n pullChunkedCustomSiloOutstandingIdentifiers,\n} from '../../../../lib/cron/index.js';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers/index.js';\nimport { logger } from '../../../../logger.js';\n\nexport interface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow('Skipping request count as requested. This may help speed up the call.'),\n );\n }\n\n if (Number.isNaN(chunkSize) || chunkSize <= 0 || chunkSize % pageLimit !== 0) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = async (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(colors.blue(`Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`));\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(`Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"],"mappings":"uRAyBA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,aACA,UACA,YACA,mBACA,aAEa,CACX,GACF,EAAO,KACL,EAAO,OAAO,wEAAwE,CACvF,EAGC,OAAO,MAAM,EAAU,EAAI,GAAa,GAAK,EAAY,IAAc,KACzE,EAAO,MACL,EAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,WAAU,aAAc,EAAc,EAAK,CAC/C,EAAY,EAgBhB,MAAM,EAA4C,CAChD,eACA,YAAa,EACb,aAAc,EACd,OAlBa,KAAO,IAAmD,CACvE,IAAM,EAAmB,GAAG,EAAS,GAAG,IAAY,IASpD,OARA,EAAO,KAAK,EAAO,KAAK,UAAU,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAAC,CAG5F,MAAM,EAAc,EAAkB,EADtB,EAAK,EAAM,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACR,CACrD,EAAO,KACL,EAAO,MAAM,sBAAsB,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAC7F,CACD,GAAa,EACN,QAAQ,SAAS,EASxB,UACA,OACA,aACA,aACA,mBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./streamPrivacyRequestsToCsv-BK07Bm-T.mjs";import{t as n}from"./done-input-validation-DLR0-MJ7.mjs";import r from"colors";async function i({auth:i,transcendUrl:a,file:o,pageLimit:s,concurrency:c,actions:l,sombraAuth:u,skipRequestIdentifiers:d,statuses:f,createdAtBefore:p,createdAtAfter:m,updatedAtBefore:h,updatedAtAfter:g,showTests:_}){n(this.process.exit);let{filePaths:v,totalCount:y}=await t({transcendUrl:a,concurrency:c,pageLimit:s,actions:l,statuses:f,auth:i,sombraAuth:u,skipRequestIdentifiers:d,createdAtBefore:p,createdAtAfter:m,updatedAtBefore:h,updatedAtAfter:g,isTest:_,file:o});e.info(r.green(`Successfully wrote ${y} requests to ${v.length} file(s): ${v.join(`, `)}`))}export{i as _export};
2
- //# sourceMappingURL=impl-BOUm7wly2.mjs.map
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{t}from"./streamPrivacyRequestsToCsv-PoyTmQd6.mjs";import{t as n}from"./done-input-validation-BcNBxhEs.mjs";import r from"colors";async function i({auth:i,transcendUrl:a,file:o,pageLimit:s,concurrency:c,actions:l,sombraAuth:u,skipRequestIdentifiers:d,statuses:f,createdAtBefore:p,createdAtAfter:m,updatedAtBefore:h,updatedAtAfter:g,showTests:_}){n(this.process.exit);let{filePaths:v,totalCount:y}=await t({transcendUrl:a,concurrency:c,pageLimit:s,actions:l,statuses:f,auth:i,sombraAuth:u,skipRequestIdentifiers:d,createdAtBefore:p,createdAtAfter:m,updatedAtBefore:h,updatedAtAfter:g,isTest:_,file:o});e.info(r.green(`Successfully wrote ${y} requests to ${v.length} file(s): ${v.join(`, `)}`))}export{i as _export};
2
+ //# sourceMappingURL=impl-CIfRN0ux.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CIfRN0ux.mjs","names":[],"sources":["../src/commands/request/export/impl.ts"],"sourcesContent":["import type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { streamPrivacyRequestsToCsv } from '../../../lib/requests/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface ExportCommandFlags {\n auth: string;\n sombraAuth?: string;\n actions?: RequestAction[];\n statuses?: RequestStatus[];\n transcendUrl: string;\n file: string;\n concurrency: number;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n showTests?: boolean;\n skipRequestIdentifiers?: boolean;\n pageLimit: number;\n}\n\n// `export` is a reserved keyword, so we need to prefix it with an underscore\n// eslint-disable-next-line no-underscore-dangle\nexport async function _export(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n pageLimit,\n concurrency,\n actions,\n sombraAuth,\n skipRequestIdentifiers,\n statuses,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n showTests,\n }: ExportCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { filePaths, totalCount } = await streamPrivacyRequestsToCsv({\n transcendUrl,\n concurrency,\n pageLimit,\n actions,\n statuses,\n auth,\n sombraAuth,\n skipRequestIdentifiers,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n isTest: showTests,\n file,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${totalCount} requests to ` +\n `${filePaths.length} file(s): ${filePaths.join(', ')}`,\n ),\n );\n}\n"],"mappings":"kLA2BA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,YACA,cACA,UACA,aACA,yBACA,WACA,kBACA,iBACA,kBACA,iBACA,aAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,YAAW,cAAe,MAAM,EAA2B,CACjE,eACA,cACA,YACA,UACA,WACA,OACA,aACA,yBACA,kBACA,iBACA,kBACA,iBACA,OAAQ,EACR,OACD,CAAC,CAEF,EAAO,KACL,EAAO,MACL,sBAAsB,EAAW,eAC5B,EAAU,OAAO,YAAY,EAAU,KAAK,KAAK,GACvD,CACF"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{r as t,t as n}from"./writeCsv-C4pjXGsD.mjs";import{t as r}from"./done-input-validation-BcNBxhEs.mjs";import i from"colors";import{buildTranscendGraphQLClient as a,createSombraGotInstance as o,fetchAllIdentifiers as s,fetchAllPurposesAndPreferences as c,fetchConsentPreferences as l,fetchConsentPreferencesChunked as u,transformPreferenceRecordToCsv as d}from"@transcend-io/sdk";const f=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function p({auth:p,partition:m,sombraAuth:h,file:g,transcendUrl:_,timestampBefore:v,timestampAfter:y,updatedBefore:b,updatedAfter:x,identifiers:S=[],concurrency:C,shouldChunk:w,windowConcurrency:T,maxChunks:E,exportIdentifiersWithDelimiter:D,maxLookbackDays:O}){r(this.process.exit);let k=await o(_,p,{logger:e,sombraApiKey:h,sombraUrl:process.env.SOMBRA_URL}),A=a(_,p),j=S.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),M={...v?{timestampBefore:v.toISOString()}:{},...y?{timestampAfter:y.toISOString()}:{},...x||b?{system:{...b?{updatedBefore:b.toISOString()}:{},...x?{updatedAfter:x.toISOString()}:{}}}:{},...j.length>0?{identifiers:j}:{}};e.info(`Fetching consent preferences from partition ${m}, using mode=${w?`chunked-stream`:`paged-stream`}...`),e.info(i.magenta(`Preparing CSV at: ${g}`));let[N,P]=await Promise.all([c(A,{logger:e}),s(A,{logger:e})]),F=P.map(e=>e.name),I=Array.from(new Set(N.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),L=Array.from(new Set(N.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),R=[...f,...F,...L,...I],z=null,B=!1,V=e=>{if(!e||e.length===0)return;let r=e.map(e=>d(e,D));if(!B){let e=Object.keys(r[0]??{}),n=new Set;z=[...R,...e].filter(e=>e===void 0||n.has(e)?!1:(n.add(e),!0)),t(g,z),B=!0}n(g,r,z)};if(w){await u(k,{partition:m,filterBy:M,limit:C,windowConcurrency:T,maxChunks:E,maxLookbackDays:O,logger:e,onItems:e=>V(e)}),e.info(i.green(`Finished writing CSV to ${g}`));return}await l(k,{partition:m,filterBy:M,limit:C,logger:e,onItems:e=>V(e)}),e.info(i.green(`Finished writing CSV to ${g}`))}export{p as pullConsentPreferences};
2
+ //# sourceMappingURL=impl-CLznNZ5F.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CLznNZ5F.mjs","names":[],"sources":["../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '@transcend-io/sdk';\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers/index.js';\nimport { logger } from '../../../logger.js';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, {\n logger,\n sombraApiKey: sombraAuth,\n sombraUrl: process.env.SOMBRA_URL,\n });\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map((identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n });\n\n // Build filter\n const filterBy = {\n ...(timestampBefore ? { timestampBefore: timestampBefore.toISOString() } : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore ? { updatedBefore: updatedBefore.toISOString() } : {}),\n ...(updatedAfter ? { updatedAfter: updatedAfter.toISOString() } : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client, { logger }),\n fetchAllIdentifiers(client, { logger }),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap((p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? []),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(new Set(purposesWithTopics.map((p) => p.trackingType))).sort(\n (a, b) => a.localeCompare(b),\n );\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [...CORE_COLS, ...identifierCols, ...purposeCols, ...topicCols];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n logger,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n logger,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"2aAoBA,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAM,EAAwB,EAAc,EAAM,CAC/D,SACA,aAAc,EACd,UAAW,QAAQ,IAAI,WACxB,CAAC,CACI,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IAAK,GAAqC,CAC9E,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EACtB,CAGI,EAAW,CACf,GAAI,EAAkB,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAAG,EAAE,CAC7E,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EAAgB,CAAE,cAAe,EAAc,aAAa,CAAE,CAAG,EAAE,CACvE,GAAI,EAAe,CAAE,aAAc,EAAa,aAAa,CAAE,CAAG,EAAE,CACrE,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAO,KAAK,EAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7D,EAA+B,EAAQ,CAAE,SAAQ,CAAC,CAClD,EAAoB,EAAQ,CAAE,SAAQ,CAAC,CACxC,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAAS,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CAAC,CAC7F,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KAAK,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CAAC,CAAC,MACpF,EAAG,IAAM,EAAE,cAAc,EAAE,CAC7B,CAIK,EAAsB,CAAC,GAAG,EAAW,GAAG,EAAgB,GAAG,EAAa,GAAG,EAAU,CAGvF,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtB,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAM,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,SACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAO,KAAK,EAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAM,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,SACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAO,KAAK,EAAO,MAAM,2BAA2B,IAAO,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{r as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{r as n}from"./readTranscendYaml-DVkQL2SC.mjs";import{n as r,r as i,t as a}from"./parseVariablesFromString-BeKOGw5n.mjs";import{t as o}from"./validateTranscendAuth-Cuh2Qfdl.mjs";import{t as s}from"./listFiles-D2wMHnEr.mjs";import{t as c}from"./done-input-validation-BcNBxhEs.mjs";import{existsSync as l,lstatSync as u}from"node:fs";import{join as d}from"node:path";import f from"colors";import{buildTranscendGraphQLClient as p}from"@transcend-io/sdk";import{mapSeries as m}from"@transcend-io/utils";async function h({transcendUrl:e,auth:n,pageSize:r,publishToPrivacyCenter:a,contents:o,deleteExtraAttributeValues:s=!1,classifyService:c=!1}){let l=p(e,n);try{return!await i(o,l,{pageSize:r,publishToPrivacyCenter:a,classifyService:c,deleteExtraAttributeValues:s})}catch(e){return t.error(f.red(`An unexpected error occurred syncing the schema: ${e.message}`)),!1}}async function g({file:i=`./transcend.yml`,transcendUrl:p,auth:g,variables:_,pageSize:v,publishToPrivacyCenter:y,classifyService:b,deleteExtraAttributeValues:x}){c(this.process.exit);let S=await o(g),C=a(_),w;if(w=Array.isArray(S)&&u(i).isDirectory()?s(i).map(e=>d(i,e)):i.split(`,`),w.length<1)throw Error(`No file specified!`);let T=w.map(e=>{l(e)?t.info(f.magenta(`Reading file "${e}"...`)):(t.error(f.red(`The file path does not exist on disk: ${e}. You can specify the filepath using --file=./examples/transcend.yml`)),this.process.exit(1));try{let r=n(e,C);return t.info(f.green(`Successfully read in "${e}"`)),{content:r,name:e.split(`/`).pop().replace(`.yml`,``)}}catch(e){t.error(f.red(`The shape of your yaml file is invalid with the following errors: ${e.message}`)),this.process.exit(1)}});if(typeof S==`string`){let[n,...i]=T.map(({content:e})=>e);await h({transcendUrl:p,auth:S,contents:r(n,...i),publishToPrivacyCenter:y,deleteExtraAttributeValues:x,pageSize:v,classifyService:!!b})||(t.info(f.red(`Sync encountered errors. View output above for more information, or check out ${e}`)),this.process.exit(1))}else{if(T.length!==1&&T.length!==S.length)throw Error(`Expected list of yml files to be equal to the list of API keys.Got ${T.length} YML file${T.length===1?``:`s`} and ${S.length} API key${S.length===1?``:`s`}`);let n=[];await m(S,async(e,r)=>{let i=`[${r+1}/${S.length}][${e.organizationName}] `;t.info(f.magenta(`~~~\n\n${i}Attempting to push configuration...\n\n~~~`));let a=T.length===1?T[0].content:T.find(t=>t.name===e.organizationName)?.content;if(!a){t.error(f.red(`${i}Failed to find transcend.yml file for organization: "${e.organizationName}".`)),n.push(e.organizationName);return}await h({transcendUrl:p,auth:e.apiKey,contents:a,pageSize:v,publishToPrivacyCenter:y,deleteExtraAttributeValues:x,classifyService:b})?t.info(f.green(`${i}Successfully pushed configuration!`)):(t.error(f.red(`${i}Failed to sync configuration.`)),n.push(e.organizationName))}),n.length>0&&(t.info(f.red(`Sync encountered errors for "${n.join(`,`)}". View output above for more information, or check out ${e}`)),this.process.exit(1))}t.info(f.green(`Successfully synced yaml file to Transcend! View at ${e}`))}export{g as push};
2
+ //# sourceMappingURL=impl-CUdo0Jyh.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CUdo0Jyh.mjs","names":[],"sources":["../src/commands/inventory/push/impl.ts"],"sourcesContent":["import { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport { buildTranscendGraphQLClient } from '@transcend-io/sdk';\nimport { mapSeries } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { TranscendInput } from '../../../codecs.js';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants.js';\nimport type { LocalContext } from '../../../context.js';\nimport { validateTranscendAuth, listFiles } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { syncConfigurationToTranscend } from '../../../lib/graphql/index.js';\nimport { parseVariablesFromString } from '../../../lib/helpers/parseVariablesFromString.js';\nimport { mergeTranscendInputs } from '../../../lib/mergeTranscendInputs.js';\nimport { readTranscendYaml } from '../../../lib/readTranscendYaml.js';\nimport { logger } from '../../../logger.js';\n\n/**\n * Sync configuration to Transcend\n *\n * @param options - Options\n * @returns True if synced successfully, false if error occurs\n */\nasync function syncConfiguration({\n transcendUrl,\n auth,\n pageSize,\n publishToPrivacyCenter,\n contents,\n deleteExtraAttributeValues = false,\n classifyService = false,\n}: {\n /** Transcend YAML */\n contents: TranscendInput;\n /** Transcend URL */\n transcendUrl: string;\n /** API key */\n auth: string;\n /** Page size */\n pageSize: number;\n /** Skip privacy center publish step */\n publishToPrivacyCenter: boolean;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** Delete attributes when syncing */\n deleteExtraAttributeValues?: boolean;\n}): Promise<boolean> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Sync to Transcend\n try {\n const encounteredError = await syncConfigurationToTranscend(contents, client, {\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n });\n return !encounteredError;\n } catch (err) {\n logger.error(colors.red(`An unexpected error occurred syncing the schema: ${err.message}`));\n return false;\n }\n}\n\nexport interface PushCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n pageSize: number;\n variables: string;\n publishToPrivacyCenter: boolean;\n classifyService: boolean;\n deleteExtraAttributeValues: boolean;\n}\n\nexport async function push(\n this: LocalContext,\n {\n file = './transcend.yml',\n transcendUrl,\n auth,\n variables,\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n }: PushCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Parse out the variables\n const vars = parseVariablesFromString(variables);\n\n // check if we are being passed a list of API keys and a list of files\n let fileList: string[];\n if (Array.isArray(apiKeyOrList) && lstatSync(file).isDirectory()) {\n fileList = listFiles(file).map((filePath) => join(file, filePath));\n } else {\n fileList = file.split(',');\n }\n\n // Ensure at least one file is parsed\n if (fileList.length < 1) {\n throw new Error('No file specified!');\n }\n\n // eslint-disable-next-line array-callback-return,consistent-return\n const transcendInputs = fileList.map((filePath) => {\n // Ensure yaml file exists on disk\n if (!existsSync(filePath)) {\n logger.error(\n colors.red(\n `The file path does not exist on disk: ${filePath}. You can specify the filepath using --file=./examples/transcend.yml`,\n ),\n );\n this.process.exit(1);\n } else {\n logger.info(colors.magenta(`Reading file \"${filePath}\"...`));\n }\n\n try {\n // Read in the yaml file and validate it's shape\n const newContents = readTranscendYaml(filePath, vars);\n logger.info(colors.green(`Successfully read in \"${filePath}\"`));\n return {\n content: newContents,\n name: filePath.split('/').pop()!.replace('.yml', ''),\n };\n } catch (err) {\n logger.error(\n colors.red(\n `The shape of your yaml file is invalid with the following errors: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n });\n\n // process a single API key\n if (typeof apiKeyOrList === 'string') {\n // if passed multiple inputs, merge them together\n const [base, ...rest] = transcendInputs.map(({ content }) => content);\n const contents = mergeTranscendInputs(base, ...rest);\n\n // sync the configuration\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKeyOrList,\n contents,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n pageSize,\n classifyService: !!classifyService,\n });\n\n // exist with error code\n if (!success) {\n logger.info(\n colors.red(\n `Sync encountered errors. View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n } else {\n // if passed multiple inputs, expect them to be one per instance\n if (transcendInputs.length !== 1 && transcendInputs.length !== apiKeyOrList.length) {\n throw new Error(\n 'Expected list of yml files to be equal to the list of API keys.' +\n `Got ${transcendInputs.length} YML file${\n transcendInputs.length === 1 ? '' : 's'\n } and ${apiKeyOrList.length} API key${apiKeyOrList.length === 1 ? '' : 's'}`,\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${apiKey.organizationName}] `;\n logger.info(colors.magenta(`~~~\\n\\n${prefix}Attempting to push configuration...\\n\\n~~~`));\n\n // use the merged contents if 1 yml passed, else use the contents that map to that organization\n const useContents =\n transcendInputs.length === 1\n ? transcendInputs[0].content\n : transcendInputs.find((input) => input.name === apiKey.organizationName)?.content;\n\n // Throw error if cannot find a yml file matching that organization name\n if (!useContents) {\n logger.error(\n colors.red(\n `${prefix}Failed to find transcend.yml file for organization: \"${apiKey.organizationName}\".`,\n ),\n );\n encounteredErrors.push(apiKey.organizationName);\n return;\n }\n\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKey.apiKey,\n contents: useContents,\n pageSize,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n classifyService,\n });\n\n if (success) {\n logger.info(colors.green(`${prefix}Successfully pushed configuration!`));\n } else {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n\n // Indicate success\n logger.info(\n colors.green(`Successfully synced yaml file to Transcend! View at ${ADMIN_DASH_INTEGRATIONS}`),\n );\n}\n"],"mappings":"2kBAwBA,eAAe,EAAkB,CAC/B,eACA,OACA,WACA,yBACA,WACA,6BAA6B,GAC7B,kBAAkB,IAgBC,CACnB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAG9D,GAAI,CAOF,MAAO,CANkB,MAAM,EAA6B,EAAU,EAAQ,CAC5E,WACA,yBACA,kBACA,6BACD,CAAC,OAEK,EAAK,CAEZ,OADA,EAAO,MAAM,EAAO,IAAI,oDAAoD,EAAI,UAAU,CAAC,CACpF,IAeX,eAAsB,EAEpB,CACE,OAAO,kBACP,eACA,OACA,YACA,WACA,yBACA,kBACA,8BAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAM,EAAsB,EAAK,CAGhD,EAAO,EAAyB,EAAU,CAG5C,EAQJ,GAPA,AAGE,EAHE,MAAM,QAAQ,EAAa,EAAI,EAAU,EAAK,CAAC,aAAa,CACnD,EAAU,EAAK,CAAC,IAAK,GAAa,EAAK,EAAM,EAAS,CAAC,CAEvD,EAAK,MAAM,IAAI,CAIxB,EAAS,OAAS,EACpB,MAAU,MAAM,qBAAqB,CAIvC,IAAM,EAAkB,EAAS,IAAK,GAAa,CAE5C,EAAW,EAAS,CAQvB,EAAO,KAAK,EAAO,QAAQ,iBAAiB,EAAS,MAAM,CAAC,EAP5D,EAAO,MACL,EAAO,IACL,yCAAyC,EAAS,sEACnD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAI,CAEF,IAAM,EAAc,EAAkB,EAAU,EAAK,CAErD,OADA,EAAO,KAAK,EAAO,MAAM,yBAAyB,EAAS,GAAG,CAAC,CACxD,CACL,QAAS,EACT,KAAM,EAAS,MAAM,IAAI,CAAC,KAAK,CAAE,QAAQ,OAAQ,GAAG,CACrD,OACM,EAAK,CACZ,EAAO,MACL,EAAO,IACL,qEAAqE,EAAI,UAC1E,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,GAEtB,CAGF,GAAI,OAAO,GAAiB,SAAU,CAEpC,GAAM,CAAC,EAAM,GAAG,GAAQ,EAAgB,KAAK,CAAE,aAAc,EAAQ,CAIrD,MAAM,EAAkB,CACtC,eACA,KAAM,EACN,SANe,EAAqB,EAAM,GAAG,EAAK,CAOlD,yBACA,6BACA,WACA,gBAAiB,CAAC,CAAC,EACpB,CAAC,GAIA,EAAO,KACL,EAAO,IACL,iFAAiF,IAClF,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,MAEjB,CAEL,GAAI,EAAgB,SAAW,GAAK,EAAgB,SAAW,EAAa,OAC1E,MAAU,MACR,sEACS,EAAgB,OAAO,WAC5B,EAAgB,SAAW,EAAI,GAAK,IACrC,OAAO,EAAa,OAAO,UAAU,EAAa,SAAW,EAAI,GAAK,MAC1E,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAM,EAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAAI,EAAO,iBAAiB,IAC9E,EAAO,KAAK,EAAO,QAAQ,UAAU,EAAO,4CAA4C,CAAC,CAGzF,IAAM,EACJ,EAAgB,SAAW,EACvB,EAAgB,GAAG,QACnB,EAAgB,KAAM,GAAU,EAAM,OAAS,EAAO,iBAAiB,EAAE,QAG/E,GAAI,CAAC,EAAa,CAChB,EAAO,MACL,EAAO,IACL,GAAG,EAAO,uDAAuD,EAAO,iBAAiB,IAC1F,CACF,CACD,EAAkB,KAAK,EAAO,iBAAiB,CAC/C,OAGc,MAAM,EAAkB,CACtC,eACA,KAAM,EAAO,OACb,SAAU,EACV,WACA,yBACA,6BACA,kBACD,CAAC,CAGA,EAAO,KAAK,EAAO,MAAM,GAAG,EAAO,oCAAoC,CAAC,EAExE,EAAO,MAAM,EAAO,IAAI,GAAG,EAAO,+BAA+B,CAAC,CAClE,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAO,KACL,EAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0D,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,EAKxB,EAAO,KACL,EAAO,MAAM,uDAAuD,IAA0B,CAC/F"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./uploadDataFlowsFromCsv-DcTbrsv2.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o}){t(this.process.exit),await e({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o})}export{n as uploadDataFlowsFromCsv};
2
- //# sourceMappingURL=impl-MpkLBntW.mjs.map
1
+ import{t as e}from"./uploadDataFlowsFromCsv-DUSFCae9.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";async function n({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o}){t(this.process.exit),await e({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o})}export{n as uploadDataFlowsFromCsv};
2
+ //# sourceMappingURL=impl-Cmj1Vi5Q.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-MpkLBntW.mjs","names":["uploadDataFlowsFromCsvHelper"],"sources":["../src/commands/consent/upload-data-flows-from-csv/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { uploadDataFlowsFromCsv as uploadDataFlowsFromCsvHelper } from '../../../lib/consent-manager/index.js';\n\nexport interface UploadDataFlowsFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n classifyService: boolean;\n transcendUrl: string;\n}\n\nexport async function uploadDataFlowsFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, classifyService, transcendUrl }: UploadDataFlowsFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await uploadDataFlowsFromCsvHelper({\n auth,\n trackerStatus,\n file,\n classifyService,\n transcendUrl,\n });\n}\n"],"mappings":"8GAcA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,kBAAiB,gBAC/B,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAA6B,CACjC,OACA,gBACA,OACA,kBACA,eACD,CAAC"}
1
+ {"version":3,"file":"impl-Cmj1Vi5Q.mjs","names":["uploadDataFlowsFromCsvHelper"],"sources":["../src/commands/consent/upload-data-flows-from-csv/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { uploadDataFlowsFromCsv as uploadDataFlowsFromCsvHelper } from '../../../lib/consent-manager/index.js';\n\nexport interface UploadDataFlowsFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n classifyService: boolean;\n transcendUrl: string;\n}\n\nexport async function uploadDataFlowsFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, classifyService, transcendUrl }: UploadDataFlowsFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await uploadDataFlowsFromCsvHelper({\n auth,\n trackerStatus,\n file,\n classifyService,\n transcendUrl,\n });\n}\n"],"mappings":"8GAcA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,kBAAiB,gBAC/B,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAA6B,CACjC,OACA,gBACA,OACA,kBACA,eACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{t}from"./pullUnstructuredSubDataPointRecommendations-D0z-vPgq.mjs";import{s as n}from"./writeCsv-C4pjXGsD.mjs";import{t as r}from"./done-input-validation-BcNBxhEs.mjs";import{uniq as i}from"lodash-es";import a from"colors";import{buildTranscendGraphQLClient as o}from"@transcend-io/sdk";async function s({auth:s,file:c,transcendUrl:l,dataSiloIds:u,subCategories:d,status:f,includeEncryptedSnippets:p}){r(this.process.exit);try{let r=await t(o(l,s),{dataSiloIds:u,subCategories:d,status:f,includeEncryptedSnippets:p});e.info(a.magenta(`Writing unstructured discovery files to file "${c}"...`));let m=[];await n(c,r.map(e=>{let t={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...p?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return m=i([...m,...Object.keys(t)]),t}),m)}catch(t){e.error(a.red(`An error occurred syncing the unstructured discovery files: ${t.message}`)),this.process.exit(1)}e.info(a.green(`Successfully synced unstructured discovery files to disk at ${c}!`))}export{s as pullUnstructuredDiscoveryFiles};
2
+ //# sourceMappingURL=impl-Cw3_0zqC.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-Cw3_0zqC.mjs","names":[],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient } from '@transcend-io/sdk';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory/index.js';\nimport { writeLargeCsv } from '../../../lib/helpers/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(colors.magenta(`Writing unstructured discovery files to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the unstructured discovery files: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(colors.green(`Successfully synced unstructured discovery files to disk at ${file}!`));\n}\n"],"mappings":"gVAqBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAM,EAFP,EAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAO,KAAK,EAAO,QAAQ,iDAAiD,EAAK,MAAM,CAAC,CACxF,IAAI,EAAoB,EAAE,CAmB1B,MAAM,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAU,EAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAO,MACL,EAAO,IAAI,+DAA+D,EAAI,UAAU,CACzF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAO,KAAK,EAAO,MAAM,+DAA+D,EAAK,GAAG,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{E as t}from"./codecs-BE3Wmoh8.mjs";import{a as n}from"./readTranscendYaml-D-J1ilS0.mjs";import{t as r}from"./done-input-validation-DLR0-MJ7.mjs";import{ConsentTrackerStatus as i,DataFlowScope as a}from"@transcend-io/privacy-types";import{decodeCodec as o}from"@transcend-io/type-utils";import{existsSync as s,readFileSync as c}from"node:fs";import l from"colors";import*as u from"io-ts";function d({file:d,output:f}){r(this.process.exit),s(d)||(e.error(l.red(`File does not exist: --file="${d}"`)),this.process.exit(1));let p=o(u.array(t),c(d,`utf-8`)),m=[],h=[];p.forEach(e=>{e.dataFlows.filter(({type:e})=>e!==a.CSP).forEach(e=>{m.push({value:e.value,type:e.type,status:i.Live,trackingPurposes:e.trackingPurposes})}),e.cookies.forEach(e=>{h.push({name:e.name,status:i.Live,trackingPurposes:e.trackingPurposes})})}),n(f,{"data-flows":m,cookies:h}),e.info(l.green(`Successfully wrote ${m.length} data flows and ${h.length} cookies to file "${f}"`))}export{d as consentManagerServiceJsonToYml};
2
- //# sourceMappingURL=impl-D-cp0CYr.mjs.map
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{E as t}from"./codecs-CeDPaLYa.mjs";import{a as n}from"./readTranscendYaml-DVkQL2SC.mjs";import{t as r}from"./done-input-validation-BcNBxhEs.mjs";import{ConsentTrackerStatus as i,DataFlowScope as a}from"@transcend-io/privacy-types";import{decodeCodec as o}from"@transcend-io/type-utils";import{existsSync as s,readFileSync as c}from"node:fs";import l from"colors";import*as u from"io-ts";function d({file:d,output:f}){r(this.process.exit),s(d)||(e.error(l.red(`File does not exist: --file="${d}"`)),this.process.exit(1));let p=o(u.array(t),c(d,`utf-8`)),m=[],h=[];p.forEach(e=>{e.dataFlows.filter(({type:e})=>e!==a.CSP).forEach(e=>{m.push({value:e.value,type:e.type,status:i.Live,trackingPurposes:e.trackingPurposes})}),e.cookies.forEach(e=>{h.push({name:e.name,status:i.Live,trackingPurposes:e.trackingPurposes})})}),n(f,{"data-flows":m,cookies:h}),e.info(l.green(`Successfully wrote ${m.length} data flows and ${h.length} cookies to file "${f}"`))}export{d as consentManagerServiceJsonToYml};
2
+ //# sourceMappingURL=impl-CzvCA0Ev.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-D-cp0CYr.mjs","names":[],"sources":["../src/commands/inventory/consent-manager-service-json-to-yml/impl.ts"],"sourcesContent":["import { existsSync, readFileSync } from 'node:fs';\n\nimport { ConsentTrackerStatus, DataFlowScope } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { ConsentManagerServiceMetadata, CookieInput, DataFlowInput } from '../../../codecs.js';\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml.js';\nimport { logger } from '../../../logger.js';\n\nexport interface ConsentManagerServiceJsonToYmlCommandFlags {\n file: string;\n output: string;\n}\n\nexport function consentManagerServiceJsonToYml(\n this: LocalContext,\n { file, output }: ConsentManagerServiceJsonToYmlCommandFlags,\n): void {\n doneInputValidation(this.process.exit);\n\n // Ensure files exist\n if (!existsSync(file)) {\n logger.error(colors.red(`File does not exist: --file=\"${file}\"`));\n this.process.exit(1);\n }\n\n // Read in each consent manager configuration\n const services = decodeCodec(t.array(ConsentManagerServiceMetadata), readFileSync(file, 'utf-8'));\n\n // Create data flows and cookie configurations\n const dataFlows: DataFlowInput[] = [];\n const cookies: CookieInput[] = [];\n services.forEach((service) => {\n service.dataFlows\n .filter(({ type }) => type !== DataFlowScope.CSP)\n .forEach((dataFlow) => {\n dataFlows.push({\n value: dataFlow.value,\n type: dataFlow.type,\n status: ConsentTrackerStatus.Live,\n trackingPurposes: dataFlow.trackingPurposes,\n });\n });\n\n service.cookies.forEach((cookie) => {\n cookies.push({\n name: cookie.name,\n status: ConsentTrackerStatus.Live,\n trackingPurposes: cookie.trackingPurposes,\n });\n });\n });\n\n // write to disk\n writeTranscendYaml(output, {\n 'data-flows': dataFlows,\n cookies,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${dataFlows.length} data flows and ${cookies.length} cookies to file \"${output}\"`,\n ),\n );\n}\n"],"mappings":"obAkBA,SAAgB,EAEd,CAAE,OAAM,UACF,CACN,EAAoB,KAAK,QAAQ,KAAK,CAGjC,EAAW,EAAK,GACnB,EAAO,MAAM,EAAO,IAAI,gCAAgC,EAAK,GAAG,CAAC,CACjE,KAAK,QAAQ,KAAK,EAAE,EAItB,IAAM,EAAW,EAAY,EAAE,MAAM,EAA8B,CAAE,EAAa,EAAM,QAAQ,CAAC,CAG3F,EAA6B,EAAE,CAC/B,EAAyB,EAAE,CACjC,EAAS,QAAS,GAAY,CAC5B,EAAQ,UACL,QAAQ,CAAE,UAAW,IAAS,EAAc,IAAI,CAChD,QAAS,GAAa,CACrB,EAAU,KAAK,CACb,MAAO,EAAS,MAChB,KAAM,EAAS,KACf,OAAQ,EAAqB,KAC7B,iBAAkB,EAAS,iBAC5B,CAAC,EACF,CAEJ,EAAQ,QAAQ,QAAS,GAAW,CAClC,EAAQ,KAAK,CACX,KAAM,EAAO,KACb,OAAQ,EAAqB,KAC7B,iBAAkB,EAAO,iBAC1B,CAAC,EACF,EACF,CAGF,EAAmB,EAAQ,CACzB,aAAc,EACd,UACD,CAAC,CAEF,EAAO,KACL,EAAO,MACL,sBAAsB,EAAU,OAAO,kBAAkB,EAAQ,OAAO,oBAAoB,EAAO,GACpG,CACF"}
1
+ {"version":3,"file":"impl-CzvCA0Ev.mjs","names":[],"sources":["../src/commands/inventory/consent-manager-service-json-to-yml/impl.ts"],"sourcesContent":["import { existsSync, readFileSync } from 'node:fs';\n\nimport { ConsentTrackerStatus, DataFlowScope } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { ConsentManagerServiceMetadata, CookieInput, DataFlowInput } from '../../../codecs.js';\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml.js';\nimport { logger } from '../../../logger.js';\n\nexport interface ConsentManagerServiceJsonToYmlCommandFlags {\n file: string;\n output: string;\n}\n\nexport function consentManagerServiceJsonToYml(\n this: LocalContext,\n { file, output }: ConsentManagerServiceJsonToYmlCommandFlags,\n): void {\n doneInputValidation(this.process.exit);\n\n // Ensure files exist\n if (!existsSync(file)) {\n logger.error(colors.red(`File does not exist: --file=\"${file}\"`));\n this.process.exit(1);\n }\n\n // Read in each consent manager configuration\n const services = decodeCodec(t.array(ConsentManagerServiceMetadata), readFileSync(file, 'utf-8'));\n\n // Create data flows and cookie configurations\n const dataFlows: DataFlowInput[] = [];\n const cookies: CookieInput[] = [];\n services.forEach((service) => {\n service.dataFlows\n .filter(({ type }) => type !== DataFlowScope.CSP)\n .forEach((dataFlow) => {\n dataFlows.push({\n value: dataFlow.value,\n type: dataFlow.type,\n status: ConsentTrackerStatus.Live,\n trackingPurposes: dataFlow.trackingPurposes,\n });\n });\n\n service.cookies.forEach((cookie) => {\n cookies.push({\n name: cookie.name,\n status: ConsentTrackerStatus.Live,\n trackingPurposes: cookie.trackingPurposes,\n });\n });\n });\n\n // write to disk\n writeTranscendYaml(output, {\n 'data-flows': dataFlows,\n cookies,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${dataFlows.length} data flows and ${cookies.length} cookies to file \"${output}\"`,\n ),\n );\n}\n"],"mappings":"obAkBA,SAAgB,EAEd,CAAE,OAAM,UACF,CACN,EAAoB,KAAK,QAAQ,KAAK,CAGjC,EAAW,EAAK,GACnB,EAAO,MAAM,EAAO,IAAI,gCAAgC,EAAK,GAAG,CAAC,CACjE,KAAK,QAAQ,KAAK,EAAE,EAItB,IAAM,EAAW,EAAY,EAAE,MAAM,EAA8B,CAAE,EAAa,EAAM,QAAQ,CAAC,CAG3F,EAA6B,EAAE,CAC/B,EAAyB,EAAE,CACjC,EAAS,QAAS,GAAY,CAC5B,EAAQ,UACL,QAAQ,CAAE,UAAW,IAAS,EAAc,IAAI,CAChD,QAAS,GAAa,CACrB,EAAU,KAAK,CACb,MAAO,EAAS,MAChB,KAAM,EAAS,KACf,OAAQ,EAAqB,KAC7B,iBAAkB,EAAS,iBAC5B,CAAC,EACF,CAEJ,EAAQ,QAAQ,QAAS,GAAW,CAClC,EAAQ,KAAK,CACX,KAAM,EAAO,KACb,OAAQ,EAAqB,KAC7B,iBAAkB,EAAO,iBAC1B,CAAC,EACF,EACF,CAGF,EAAmB,EAAQ,CACzB,aAAc,EACd,UACD,CAAC,CAEF,EAAO,KACL,EAAO,MACL,sBAAsB,EAAU,OAAO,kBAAkB,EAAQ,OAAO,oBAAoB,EAAO,GACpG,CACF"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,file:i,concurrency:a,actions:o,sombraAuth:s}){t(this.process.exit),await e({file:i,transcendUrl:r,concurrency:a,requestActions:o,auth:n,sombraAuth:s})}export{n as pullIdentifiers};
2
- //# sourceMappingURL=impl-CSChmq_t2.mjs.map
1
+ import{t as e}from"./pullManualEnrichmentIdentifiersToCsv-kpGy9H7T.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";async function n({auth:n,transcendUrl:r,file:i,concurrency:a,actions:o,sombraAuth:s}){t(this.process.exit),await e({file:i,transcendUrl:r,concurrency:a,requestActions:o,auth:n,sombraAuth:s})}export{n as pullIdentifiers};
2
+ //# sourceMappingURL=impl-D1DmW5-P.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-D1DmW5-P.mjs","names":[],"sources":["../src/commands/request/preflight/pull-identifiers/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { pullManualEnrichmentIdentifiersToCsv } from '../../../../lib/manual-enrichment/index.js';\n\nexport interface PullIdentifiersCommandFlags {\n auth: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n actions?: RequestAction[];\n concurrency: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n { auth, transcendUrl, file, concurrency, actions, sombraAuth }: PullIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pullManualEnrichmentIdentifiersToCsv({\n file,\n transcendUrl,\n concurrency,\n requestActions: actions,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"4HAeA,eAAsB,EAEpB,CAAE,OAAM,eAAc,OAAM,cAAa,UAAS,cACnC,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAqC,CACzC,OACA,eACA,cACA,eAAgB,EAChB,OACA,aACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,file:i,enricherId:a,concurrency:o,markSilent:s,sombraAuth:c}){t(this.process.exit),await e({file:i,transcendUrl:r,enricherId:a,concurrency:o,markSilent:s,auth:n,sombraAuth:c})}export{n as pushIdentifiers};
2
- //# sourceMappingURL=impl-CCUsnhoW2.mjs.map
1
+ import{t as e}from"./pushManualEnrichmentIdentifiersFromCsv-DXqf8WWy.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";async function n({auth:n,transcendUrl:r,file:i,enricherId:a,concurrency:o,markSilent:s,sombraAuth:c}){t(this.process.exit),await e({file:i,transcendUrl:r,enricherId:a,concurrency:o,markSilent:s,auth:n,sombraAuth:c})}export{n as pushIdentifiers};
2
+ //# sourceMappingURL=impl-D41c_KGj.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-D41c_KGj.mjs","names":[],"sources":["../src/commands/request/preflight/push-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { pushManualEnrichmentIdentifiersFromCsv } from '../../../../lib/manual-enrichment/index.js';\n\nexport interface PushIdentifiersCommandFlags {\n auth: string;\n enricherId: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n markSilent: boolean;\n concurrency: number;\n}\n\nexport async function pushIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n enricherId,\n concurrency,\n markSilent,\n sombraAuth,\n }: PushIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushManualEnrichmentIdentifiersFromCsv({\n file,\n transcendUrl,\n enricherId,\n concurrency,\n markSilent,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"8HAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,cACA,aACA,cAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAuC,CAC3C,OACA,eACA,aACA,cACA,aACA,OACA,aACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./skipPreflightJobs-CYuoMG3z.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";async function n({auth:n,transcendUrl:r,enricherIds:i}){t(this.process.exit),await e({transcendUrl:r,auth:n,enricherIds:i})}export{n as skipPreflightJobs};
2
+ //# sourceMappingURL=impl-DEpCg7UP.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-tbGnvKFm.mjs","names":["skipPreflightJobsHelper"],"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '../../../lib/requests/index.js';\n\nexport interface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"yGAUA,eAAsB,EAEpB,CAAE,OAAM,eAAc,eACP,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAwB,CAC5B,eACA,OACA,cACD,CAAC"}
1
+ {"version":3,"file":"impl-DEpCg7UP.mjs","names":["skipPreflightJobsHelper"],"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '../../../lib/requests/index.js';\n\nexport interface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"yGAUA,eAAsB,EAEpB,CAAE,OAAM,eAAc,eACP,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAwB,CAC5B,eACA,OACA,cACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{n as t}from"./buildXdiSyncEndpoint-Cb-pvpak.mjs";import{t as n}from"./validateTranscendAuth-1W1IylqE.mjs";import{t as r}from"./done-input-validation-DLR0-MJ7.mjs";import{writeFileSync as i}from"node:fs";import a from"colors";async function o({auth:o,xdiLocation:s,file:c,removeIpAddresses:l,domainBlockList:u,xdiAllowedCommands:d,transcendUrl:f}){r(this.process.exit);let{syncGroups:p,html:m}=await t(await n(o),{xdiLocation:s,transcendUrl:f,removeIpAddresses:l,domainBlockList:u.length>0?u:void 0,xdiAllowedCommands:d});e.info(a.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),i(c,m),e.info(a.green(`Wrote configuration to file "${c}"!`))}export{o as buildXdiSyncEndpoint};
2
- //# sourceMappingURL=impl-DGuwD_qz.mjs.map
1
+ import{t as e}from"./logger-Bj782ZYD.mjs";import{n as t}from"./buildXdiSyncEndpoint-CBbcir-p.mjs";import{t as n}from"./validateTranscendAuth-Cuh2Qfdl.mjs";import{t as r}from"./done-input-validation-BcNBxhEs.mjs";import{writeFileSync as i}from"node:fs";import a from"colors";async function o({auth:o,xdiLocation:s,file:c,removeIpAddresses:l,domainBlockList:u,xdiAllowedCommands:d,transcendUrl:f}){r(this.process.exit);let{syncGroups:p,html:m}=await t(await n(o),{xdiLocation:s,transcendUrl:f,removeIpAddresses:l,domainBlockList:u.length>0?u:void 0,xdiAllowedCommands:d});e.info(a.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),i(c,m),e.info(a.green(`Wrote configuration to file "${c}"!`))}export{o as buildXdiSyncEndpoint};
2
+ //# sourceMappingURL=impl-DHOh4ypd.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-DGuwD_qz.mjs","names":["buildXdiSyncEndpointHelper"],"sources":["../src/commands/consent/build-xdi-sync-endpoint/impl.ts"],"sourcesContent":["import { writeFileSync } from 'node:fs';\n\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { validateTranscendAuth } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { buildXdiSyncEndpoint as buildXdiSyncEndpointHelper } from '../../../lib/consent-manager/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface BuildXdiSyncEndpointCommandFlags {\n auth: string;\n xdiLocation: string;\n file: string;\n removeIpAddresses: boolean;\n domainBlockList: string[];\n xdiAllowedCommands: string;\n transcendUrl: string;\n}\n\nexport async function buildXdiSyncEndpoint(\n this: LocalContext,\n {\n auth,\n xdiLocation,\n file,\n removeIpAddresses,\n domainBlockList,\n xdiAllowedCommands,\n transcendUrl,\n }: BuildXdiSyncEndpointCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Build the sync endpoint\n const { syncGroups, html } = await buildXdiSyncEndpointHelper(apiKeyOrList, {\n xdiLocation,\n transcendUrl,\n removeIpAddresses,\n domainBlockList: domainBlockList.length > 0 ? domainBlockList : undefined,\n xdiAllowedCommands,\n });\n\n // Log success\n logger.info(\n colors.green(\n `Successfully constructed sync endpoint for sync groups: ${JSON.stringify(\n syncGroups,\n null,\n 2,\n )}`,\n ),\n );\n\n // Write to disk\n writeFileSync(file, html);\n logger.info(colors.green(`Wrote configuration to file \"${file}\"!`));\n}\n"],"mappings":"kRAoBA,eAAsB,EAEpB,CACE,OACA,cACA,OACA,oBACA,kBACA,qBACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAMtC,GAAM,CAAE,aAAY,QAAS,MAAMA,EAHd,MAAM,EAAsB,EAAK,CAGsB,CAC1E,cACA,eACA,oBACA,gBAAiB,EAAgB,OAAS,EAAI,EAAkB,IAAA,GAChE,qBACD,CAAC,CAGF,EAAO,KACL,EAAO,MACL,2DAA2D,KAAK,UAC9D,EACA,KACA,EACD,GACF,CACF,CAGD,EAAc,EAAM,EAAK,CACzB,EAAO,KAAK,EAAO,MAAM,gCAAgC,EAAK,IAAI,CAAC"}
1
+ {"version":3,"file":"impl-DHOh4ypd.mjs","names":["buildXdiSyncEndpointHelper"],"sources":["../src/commands/consent/build-xdi-sync-endpoint/impl.ts"],"sourcesContent":["import { writeFileSync } from 'node:fs';\n\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { validateTranscendAuth } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { buildXdiSyncEndpoint as buildXdiSyncEndpointHelper } from '../../../lib/consent-manager/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface BuildXdiSyncEndpointCommandFlags {\n auth: string;\n xdiLocation: string;\n file: string;\n removeIpAddresses: boolean;\n domainBlockList: string[];\n xdiAllowedCommands: string;\n transcendUrl: string;\n}\n\nexport async function buildXdiSyncEndpoint(\n this: LocalContext,\n {\n auth,\n xdiLocation,\n file,\n removeIpAddresses,\n domainBlockList,\n xdiAllowedCommands,\n transcendUrl,\n }: BuildXdiSyncEndpointCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Build the sync endpoint\n const { syncGroups, html } = await buildXdiSyncEndpointHelper(apiKeyOrList, {\n xdiLocation,\n transcendUrl,\n removeIpAddresses,\n domainBlockList: domainBlockList.length > 0 ? domainBlockList : undefined,\n xdiAllowedCommands,\n });\n\n // Log success\n logger.info(\n colors.green(\n `Successfully constructed sync endpoint for sync groups: ${JSON.stringify(\n syncGroups,\n null,\n 2,\n )}`,\n ),\n );\n\n // Write to disk\n writeFileSync(file, html);\n logger.info(colors.green(`Wrote configuration to file \"${file}\"!`));\n}\n"],"mappings":"kRAoBA,eAAsB,EAEpB,CACE,OACA,cACA,OACA,oBACA,kBACA,qBACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAMtC,GAAM,CAAE,aAAY,QAAS,MAAMA,EAHd,MAAM,EAAsB,EAAK,CAGsB,CAC1E,cACA,eACA,oBACA,gBAAiB,EAAgB,OAAS,EAAI,EAAkB,IAAA,GAChE,qBACD,CAAC,CAGF,EAAO,KACL,EAAO,MACL,2DAA2D,KAAK,UAC9D,EACA,KACA,EACD,GACF,CACF,CAGD,EAAc,EAAM,EAAK,CACzB,EAAO,KAAK,EAAO,MAAM,gCAAgC,EAAK,IAAI,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./bulkRetryEnrichers-B-Szmin-.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,enricherId:r,actions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,concurrency:d,transcendUrl:f}){t(this.process.exit),await e({auth:n,enricherId:r,requestActions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,updatedAtBefore:l?new Date(l):void 0,updatedAtAfter:u?new Date(u):void 0,concurrency:d,transcendUrl:f})}export{n as enricherRestart};
2
- //# sourceMappingURL=impl-D9NjIwEi2.mjs.map
1
+ import{t as e}from"./bulkRetryEnrichers-DuYXD-64.mjs";import{t}from"./done-input-validation-BcNBxhEs.mjs";async function n({auth:n,enricherId:r,actions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,concurrency:d,transcendUrl:f}){t(this.process.exit),await e({auth:n,enricherId:r,requestActions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,updatedAtBefore:l?new Date(l):void 0,updatedAtAfter:u?new Date(u):void 0,concurrency:d,transcendUrl:f})}export{n as enricherRestart};
2
+ //# sourceMappingURL=impl-DJg0Ibxs.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DJg0Ibxs.mjs","names":[],"sources":["../src/commands/request/enricher-restart/impl.ts"],"sourcesContent":["import type { RequestAction, RequestEnricherStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { bulkRetryEnrichers } from '../../../lib/requests/index.js';\n\nexport interface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n updatedAtBefore: updatedAtBefore ? new Date(updatedAtBefore) : undefined,\n updatedAtAfter: updatedAtAfter ? new Date(updatedAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"0GAoBA,eAAsB,EAEpB,CACE,OACA,aACA,UACA,0BACA,aACA,kBACA,iBACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAmB,CACvB,OACA,aACA,eAAgB,EAChB,0BACA,aACA,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,cACA,eACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{n as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{s as n}from"./writeCsv-C4pjXGsD.mjs";import{t as r}from"./pullAllDatapoints-Bbmky50p.mjs";import{t as i}from"./done-input-validation-BcNBxhEs.mjs";import{groupBy as a,uniq as o}from"lodash-es";import s from"colors";import{buildTranscendGraphQLClient as c}from"@transcend-io/sdk";async function l({auth:l,file:u,transcendUrl:d,dataSiloIds:f,includeAttributes:p,includeGuessedCategories:m,parentCategories:h,subCategories:g=[]}){i(this.process.exit);try{let e=await r(c(d,l),{dataSiloIds:f,includeGuessedCategories:m,parentCategories:h,includeAttributes:p,subCategories:g});t.info(s.magenta(`Writing datapoints to file "${u}"...`));let i=[];await n(u,e.map(e=>{let t={"Property ID":e.id,"Data Silo":e.dataSilo.title,Object:e.dataPoint.name,"Object Path":e.dataPoint.path.join(`.`),Property:e.name,"Property Description":e.description,"Data Categories":e.categories.map(e=>`${e.category}:${e.name}`).join(`, `),"Guessed Category":e.pendingCategoryGuesses?.[0]?`${e.pendingCategoryGuesses[0].category.category}:${e.pendingCategoryGuesses[0].category.name}`:``,"Processing Purposes":e.purposes.map(e=>`${e.purpose}:${e.name}`).join(`, `),...Object.entries(a(e.attributeValues||[],({attributeKey:e})=>e.name)).reduce((e,[t,n])=>(e[t]=n.map(e=>e.name).join(`,`),e),{})};return i=o([...i,...Object.keys(t)]),t}),i)}catch(e){t.error(s.red(`An error occurred syncing the datapoints: ${e.message}`)),this.process.exit(1)}t.info(s.green(`Successfully synced datapoints to disk at ${u}! View at ${e}`))}export{l as pullDatapoints};
2
+ //# sourceMappingURL=impl-DUdbbIpf.mjs.map