@transcend-io/cli 10.0.0 → 10.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (203) hide show
  1. package/dist/{app-By_zDIkK.mjs → app-BfTrk2nc.mjs} +19 -19
  2. package/dist/{app-By_zDIkK.mjs.map → app-BfTrk2nc.mjs.map} +1 -1
  3. package/dist/{approvePrivacyRequests-1cguqGqq.mjs → approvePrivacyRequests-CWGZR2N6.mjs} +2 -2
  4. package/dist/{approvePrivacyRequests-1cguqGqq.mjs.map → approvePrivacyRequests-CWGZR2N6.mjs.map} +1 -1
  5. package/dist/bin/bash-complete.mjs +1 -1
  6. package/dist/bin/cli.mjs +1 -1
  7. package/dist/bin/deprecated-command.mjs +1 -1
  8. package/dist/{buildXdiSyncEndpoint-BMaMHO7Z.mjs → buildXdiSyncEndpoint-Cb-pvpak.mjs} +2 -2
  9. package/dist/{buildXdiSyncEndpoint-BMaMHO7Z.mjs.map → buildXdiSyncEndpoint-Cb-pvpak.mjs.map} +1 -1
  10. package/dist/{bulkRestartRequests-DEPSHov-.mjs → bulkRestartRequests-CKF_xpN0.mjs} +2 -2
  11. package/dist/{bulkRestartRequests-DEPSHov-.mjs.map → bulkRestartRequests-CKF_xpN0.mjs.map} +1 -1
  12. package/dist/{bulkRetryEnrichers-BLkcFKXC.mjs → bulkRetryEnrichers-B-Szmin-.mjs} +2 -2
  13. package/dist/{bulkRetryEnrichers-BLkcFKXC.mjs.map → bulkRetryEnrichers-B-Szmin-.mjs.map} +1 -1
  14. package/dist/{cancelPrivacyRequests-C8MZQvsq.mjs → cancelPrivacyRequests-DNiL13E_.mjs} +2 -2
  15. package/dist/{cancelPrivacyRequests-C8MZQvsq.mjs.map → cancelPrivacyRequests-DNiL13E_.mjs.map} +1 -1
  16. package/dist/{command-BUnCUxva.mjs → command-BXxoAjFo.mjs} +2 -2
  17. package/dist/{command-BUnCUxva.mjs.map → command-BXxoAjFo.mjs.map} +1 -1
  18. package/dist/{createExtraKeyHandler-C_0EVj10.mjs → createExtraKeyHandler-tubeaEjA.mjs} +2 -2
  19. package/dist/createExtraKeyHandler-tubeaEjA.mjs.map +1 -0
  20. package/dist/{createPreferenceAccessTokens-6WLr6z-l.mjs → createPreferenceAccessTokens-DqmFctn3.mjs} +2 -2
  21. package/dist/{createPreferenceAccessTokens-6WLr6z-l.mjs.map → createPreferenceAccessTokens-DqmFctn3.mjs.map} +1 -1
  22. package/dist/{createSombraGotInstance-CahOgD6V.mjs → createSombraGotInstance-D1Il9zUE.mjs} +2 -2
  23. package/dist/{createSombraGotInstance-CahOgD6V.mjs.map → createSombraGotInstance-D1Il9zUE.mjs.map} +1 -1
  24. package/dist/{downloadPrivacyRequestFiles-B2yduagB.mjs → downloadPrivacyRequestFiles-DlpgxqHF.mjs} +2 -2
  25. package/dist/{downloadPrivacyRequestFiles-B2yduagB.mjs.map → downloadPrivacyRequestFiles-DlpgxqHF.mjs.map} +1 -1
  26. package/dist/{fetchAllActions-C0l3wjQV.mjs → fetchAllActions-BJsPdnxy.mjs} +2 -2
  27. package/dist/{fetchAllActions-C0l3wjQV.mjs.map → fetchAllActions-BJsPdnxy.mjs.map} +1 -1
  28. package/dist/{fetchAllDataFlows-AQ9j_NRa.mjs → fetchAllDataFlows-D248lO6_.mjs} +2 -2
  29. package/dist/{fetchAllDataFlows-AQ9j_NRa.mjs.map → fetchAllDataFlows-D248lO6_.mjs.map} +1 -1
  30. package/dist/{fetchAllPreferenceTopics-Bn9PG-rO.mjs → fetchAllPreferenceTopics-ForE9GpZ.mjs} +2 -2
  31. package/dist/{fetchAllPreferenceTopics-Bn9PG-rO.mjs.map → fetchAllPreferenceTopics-ForE9GpZ.mjs.map} +1 -1
  32. package/dist/{fetchAllPurposes-CykSkZRY.mjs → fetchAllPurposes-ZdkO2fMp.mjs} +2 -2
  33. package/dist/{fetchAllPurposes-CykSkZRY.mjs.map → fetchAllPurposes-ZdkO2fMp.mjs.map} +1 -1
  34. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs +2 -0
  35. package/dist/{fetchAllPurposesAndPreferences-Dog6N9L2.mjs.map → fetchAllPurposesAndPreferences-DD6OyA5t.mjs.map} +1 -1
  36. package/dist/{fetchAllRequestEnrichers-q34mRuE5.mjs → fetchAllRequestEnrichers-CK-kk5eg.mjs} +2 -2
  37. package/dist/{fetchAllRequestEnrichers-q34mRuE5.mjs.map → fetchAllRequestEnrichers-CK-kk5eg.mjs.map} +1 -1
  38. package/dist/{fetchAllRequestIdentifiers-YP-geTV4.mjs → fetchAllRequestIdentifiers-DrFFOt0m.mjs} +2 -2
  39. package/dist/{fetchAllRequestIdentifiers-YP-geTV4.mjs.map → fetchAllRequestIdentifiers-DrFFOt0m.mjs.map} +1 -1
  40. package/dist/{fetchAllRequests-DEPTEUbi.mjs → fetchAllRequests-DNQQsY4s.mjs} +2 -2
  41. package/dist/{fetchAllRequests-DEPTEUbi.mjs.map → fetchAllRequests-DNQQsY4s.mjs.map} +1 -1
  42. package/dist/{fetchApiKeys-DkBco7W0.mjs → fetchApiKeys-DjOr44xA.mjs} +2 -2
  43. package/dist/{fetchApiKeys-DkBco7W0.mjs.map → fetchApiKeys-DjOr44xA.mjs.map} +1 -1
  44. package/dist/{fetchCatalogs-CBk871k6.mjs → fetchCatalogs-BM4FCbcS.mjs} +2 -2
  45. package/dist/{fetchCatalogs-CBk871k6.mjs.map → fetchCatalogs-BM4FCbcS.mjs.map} +1 -1
  46. package/dist/{fetchConsentManagerId-DHDA5Py9.mjs → fetchConsentManagerId-CFkg3-RS.mjs} +2 -2
  47. package/dist/{fetchConsentManagerId-DHDA5Py9.mjs.map → fetchConsentManagerId-CFkg3-RS.mjs.map} +1 -1
  48. package/dist/{fetchIdentifiers-DjqjUnaw.mjs → fetchIdentifiers-pjQV4vUg.mjs} +2 -2
  49. package/dist/{fetchIdentifiers-DjqjUnaw.mjs.map → fetchIdentifiers-pjQV4vUg.mjs.map} +1 -1
  50. package/dist/{fetchRequestDataSilo-CF6XOTQ-.mjs → fetchRequestDataSilo-P4yA7Lyc.mjs} +2 -2
  51. package/dist/{fetchRequestDataSilo-CF6XOTQ-.mjs.map → fetchRequestDataSilo-P4yA7Lyc.mjs.map} +1 -1
  52. package/dist/{fetchRequestFilesForRequest-DrHGOdih.mjs → fetchRequestFilesForRequest-BbxrEKFK.mjs} +2 -2
  53. package/dist/{fetchRequestFilesForRequest-DrHGOdih.mjs.map → fetchRequestFilesForRequest-BbxrEKFK.mjs.map} +1 -1
  54. package/dist/{generateCrossAccountApiKeys-F11uqpc5.mjs → generateCrossAccountApiKeys-Bxc_dzMG.mjs} +2 -2
  55. package/dist/{generateCrossAccountApiKeys-F11uqpc5.mjs.map → generateCrossAccountApiKeys-Bxc_dzMG.mjs.map} +1 -1
  56. package/dist/{impl-0ooudQ_J2.mjs → impl-4ltdSmpl2.mjs} +2 -2
  57. package/dist/{impl-0ooudQ_J2.mjs.map → impl-4ltdSmpl2.mjs.map} +1 -1
  58. package/dist/{impl-BzupMfJi.mjs → impl-B19fH75P.mjs} +2 -2
  59. package/dist/{impl-BzupMfJi.mjs.map → impl-B19fH75P.mjs.map} +1 -1
  60. package/dist/{impl-CdoTu8TH.mjs → impl-BBMjv5YQ.mjs} +2 -2
  61. package/dist/{impl-CdoTu8TH.mjs.map → impl-BBMjv5YQ.mjs.map} +1 -1
  62. package/dist/{impl-Cwj9LeEI.mjs → impl-BKH3QRLi.mjs} +2 -2
  63. package/dist/{impl-Cwj9LeEI.mjs.map → impl-BKH3QRLi.mjs.map} +1 -1
  64. package/dist/{impl-KV3yZaHz2.mjs → impl-BOUm7wly2.mjs} +2 -2
  65. package/dist/{impl-KV3yZaHz2.mjs.map → impl-BOUm7wly2.mjs.map} +1 -1
  66. package/dist/{impl-r8tHyAHB.mjs → impl-BUC4ZelU.mjs} +2 -2
  67. package/dist/{impl-r8tHyAHB.mjs.map → impl-BUC4ZelU.mjs.map} +1 -1
  68. package/dist/{impl-dEQtD5uE.mjs → impl-BhTCp0kg.mjs} +2 -2
  69. package/dist/{impl-dEQtD5uE.mjs.map → impl-BhTCp0kg.mjs.map} +1 -1
  70. package/dist/{impl-f4UPMoS_2.mjs → impl-BlHU1bbJ2.mjs} +2 -2
  71. package/dist/{impl-f4UPMoS_2.mjs.map → impl-BlHU1bbJ2.mjs.map} +1 -1
  72. package/dist/{impl-CXK-D84c.mjs → impl-BwjguKHC.mjs} +2 -2
  73. package/dist/{impl-CXK-D84c.mjs.map → impl-BwjguKHC.mjs.map} +1 -1
  74. package/dist/{impl-VHp2K2bg.mjs → impl-C2o0eDzJ.mjs} +2 -2
  75. package/dist/{impl-VHp2K2bg.mjs.map → impl-C2o0eDzJ.mjs.map} +1 -1
  76. package/dist/{impl-2FbPcOv_2.mjs → impl-C8HKnjw82.mjs} +2 -2
  77. package/dist/{impl-2FbPcOv_2.mjs.map → impl-C8HKnjw82.mjs.map} +1 -1
  78. package/dist/{impl-CMX0qQr_2.mjs → impl-CCUsnhoW2.mjs} +2 -2
  79. package/dist/{impl-CMX0qQr_2.mjs.map → impl-CCUsnhoW2.mjs.map} +1 -1
  80. package/dist/{impl-B8iVBYdg.mjs → impl-CCc-wXqD.mjs} +2 -2
  81. package/dist/{impl-B8iVBYdg.mjs.map → impl-CCc-wXqD.mjs.map} +1 -1
  82. package/dist/{impl-ArGeiHuz.mjs → impl-CMmyv1cl.mjs} +2 -2
  83. package/dist/{impl-ArGeiHuz.mjs.map → impl-CMmyv1cl.mjs.map} +1 -1
  84. package/dist/{impl-1U4QBT_L.mjs → impl-CNez1OAw.mjs} +2 -2
  85. package/dist/impl-CNez1OAw.mjs.map +1 -0
  86. package/dist/{impl-DZnSlfwn2.mjs → impl-CNykdy3e2.mjs} +2 -2
  87. package/dist/{impl-DZnSlfwn2.mjs.map → impl-CNykdy3e2.mjs.map} +1 -1
  88. package/dist/{impl--Lmj1RHh2.mjs → impl-CSChmq_t2.mjs} +2 -2
  89. package/dist/{impl--Lmj1RHh2.mjs.map → impl-CSChmq_t2.mjs.map} +1 -1
  90. package/dist/{impl-dlRlTYAQ.mjs → impl-Ce9K4OCp.mjs} +2 -2
  91. package/dist/{impl-dlRlTYAQ.mjs.map → impl-Ce9K4OCp.mjs.map} +1 -1
  92. package/dist/{impl-CoLIqiH-2.mjs → impl-ChCqHkOc2.mjs} +2 -2
  93. package/dist/{impl-CoLIqiH-2.mjs.map → impl-ChCqHkOc2.mjs.map} +1 -1
  94. package/dist/{impl-DXWN22xV.mjs → impl-CqEwwWeD.mjs} +2 -2
  95. package/dist/{impl-DXWN22xV.mjs.map → impl-CqEwwWeD.mjs.map} +1 -1
  96. package/dist/{impl-CeLfAnyA2.mjs → impl-CqXFyvgV2.mjs} +2 -2
  97. package/dist/{impl-CeLfAnyA2.mjs.map → impl-CqXFyvgV2.mjs.map} +1 -1
  98. package/dist/{impl-ph0q6K3i.mjs → impl-CxLSJk2P.mjs} +2 -2
  99. package/dist/{impl-ph0q6K3i.mjs.map → impl-CxLSJk2P.mjs.map} +1 -1
  100. package/dist/{impl-DhIyASha.mjs → impl-CzU9WTiW.mjs} +2 -2
  101. package/dist/{impl-DhIyASha.mjs.map → impl-CzU9WTiW.mjs.map} +1 -1
  102. package/dist/{impl-BpUksm1b2.mjs → impl-D9NjIwEi2.mjs} +2 -2
  103. package/dist/{impl-BpUksm1b2.mjs.map → impl-D9NjIwEi2.mjs.map} +1 -1
  104. package/dist/{impl-BkyC7nnu.mjs → impl-DEWXA_QC.mjs} +2 -2
  105. package/dist/{impl-BkyC7nnu.mjs.map → impl-DEWXA_QC.mjs.map} +1 -1
  106. package/dist/{impl-DgyjJ8RY2.mjs → impl-DGiPB5Vq2.mjs} +2 -2
  107. package/dist/{impl-DgyjJ8RY2.mjs.map → impl-DGiPB5Vq2.mjs.map} +1 -1
  108. package/dist/{impl-CyJBbyuF.mjs → impl-DGuwD_qz.mjs} +2 -2
  109. package/dist/{impl-CyJBbyuF.mjs.map → impl-DGuwD_qz.mjs.map} +1 -1
  110. package/dist/{impl-D6nwGrO8.mjs → impl-DGzvE8aJ.mjs} +2 -2
  111. package/dist/{impl-D6nwGrO8.mjs.map → impl-DGzvE8aJ.mjs.map} +1 -1
  112. package/dist/{impl-Bc8Es_bT.mjs → impl-DTp9OQIZ.mjs} +2 -2
  113. package/dist/{impl-Bc8Es_bT.mjs.map → impl-DTp9OQIZ.mjs.map} +1 -1
  114. package/dist/{impl-DGRuk3AB.mjs → impl-DhscnXSw.mjs} +2 -2
  115. package/dist/{impl-DGRuk3AB.mjs.map → impl-DhscnXSw.mjs.map} +1 -1
  116. package/dist/{impl-BWjBYTQZ.mjs → impl-Dk7MdX-1.mjs} +2 -2
  117. package/dist/{impl-BWjBYTQZ.mjs.map → impl-Dk7MdX-1.mjs.map} +1 -1
  118. package/dist/{impl-Dny1LX9A.mjs → impl-DsNPvet4.mjs} +2 -2
  119. package/dist/{impl-Dny1LX9A.mjs.map → impl-DsNPvet4.mjs.map} +1 -1
  120. package/dist/{impl-DcC8_dCy.mjs → impl-DxUFb0vv.mjs} +2 -2
  121. package/dist/{impl-DcC8_dCy.mjs.map → impl-DxUFb0vv.mjs.map} +1 -1
  122. package/dist/{impl-y1I9Muyc2.mjs → impl-JThkrXiI2.mjs} +2 -2
  123. package/dist/{impl-y1I9Muyc2.mjs.map → impl-JThkrXiI2.mjs.map} +1 -1
  124. package/dist/{impl-Cq_RqK0_2.mjs → impl-KDuBh4bu2.mjs} +2 -2
  125. package/dist/{impl-Cq_RqK0_2.mjs.map → impl-KDuBh4bu2.mjs.map} +1 -1
  126. package/dist/{impl-C05tQHSq.mjs → impl-MpkLBntW.mjs} +2 -2
  127. package/dist/{impl-C05tQHSq.mjs.map → impl-MpkLBntW.mjs.map} +1 -1
  128. package/dist/{impl-Zr8uLP_n.mjs → impl-P_NDC3cX.mjs} +2 -2
  129. package/dist/{impl-Zr8uLP_n.mjs.map → impl-P_NDC3cX.mjs.map} +1 -1
  130. package/dist/{impl-D-ldjJzl2.mjs → impl-c7rUQYDc2.mjs} +2 -2
  131. package/dist/{impl-D-ldjJzl2.mjs.map → impl-c7rUQYDc2.mjs.map} +1 -1
  132. package/dist/{impl-G1brwI4o.mjs → impl-fqOKTw5J.mjs} +2 -2
  133. package/dist/{impl-G1brwI4o.mjs.map → impl-fqOKTw5J.mjs.map} +1 -1
  134. package/dist/{impl-Dfc_yQML2.mjs → impl-oiBTZqQS2.mjs} +2 -2
  135. package/dist/{impl-Dfc_yQML2.mjs.map → impl-oiBTZqQS2.mjs.map} +1 -1
  136. package/dist/{impl-CWHnw3oX.mjs → impl-tbGnvKFm.mjs} +2 -2
  137. package/dist/{impl-CWHnw3oX.mjs.map → impl-tbGnvKFm.mjs.map} +1 -1
  138. package/dist/index.d.mts +3397 -3397
  139. package/dist/index.mjs +1 -1
  140. package/dist/{makeGraphQLRequest-G078PsEL.mjs → makeGraphQLRequest-Cq26A_Lq.mjs} +2 -2
  141. package/dist/{makeGraphQLRequest-G078PsEL.mjs.map → makeGraphQLRequest-Cq26A_Lq.mjs.map} +1 -1
  142. package/dist/{markRequestDataSiloIdsCompleted-DmAz-R0M.mjs → markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs} +2 -2
  143. package/dist/{markRequestDataSiloIdsCompleted-DmAz-R0M.mjs.map → markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs.map} +1 -1
  144. package/dist/{markSilentPrivacyRequests-s7_aBROE.mjs → markSilentPrivacyRequests-BKQUu6Ep.mjs} +2 -2
  145. package/dist/{markSilentPrivacyRequests-s7_aBROE.mjs.map → markSilentPrivacyRequests-BKQUu6Ep.mjs.map} +1 -1
  146. package/dist/{mergeTranscendInputs-C64BJsse.mjs → mergeTranscendInputs-DGC4xUGu.mjs} +2 -2
  147. package/dist/{mergeTranscendInputs-C64BJsse.mjs.map → mergeTranscendInputs-DGC4xUGu.mjs.map} +1 -1
  148. package/dist/{notifyPrivacyRequestsAdditionalTime-BvXIXZYu.mjs → notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs} +2 -2
  149. package/dist/{notifyPrivacyRequestsAdditionalTime-BvXIXZYu.mjs.map → notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs.map} +1 -1
  150. package/dist/package-C4J38oR1.mjs +2 -0
  151. package/dist/package-C4J38oR1.mjs.map +1 -0
  152. package/dist/{pullAllDatapoints-DiMWp8a7.mjs → pullAllDatapoints-Cntwuzw7.mjs} +2 -2
  153. package/dist/{pullAllDatapoints-DiMWp8a7.mjs.map → pullAllDatapoints-Cntwuzw7.mjs.map} +1 -1
  154. package/dist/{pullChunkedCustomSiloOutstandingIdentifiers-DgWgggQt.mjs → pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs} +2 -2
  155. package/dist/{pullChunkedCustomSiloOutstandingIdentifiers-DgWgggQt.mjs.map → pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs.map} +1 -1
  156. package/dist/{pullConsentManagerMetrics-pFRPXTHJ.mjs → pullConsentManagerMetrics-FnhPEszu.mjs} +2 -2
  157. package/dist/{pullConsentManagerMetrics-pFRPXTHJ.mjs.map → pullConsentManagerMetrics-FnhPEszu.mjs.map} +1 -1
  158. package/dist/{pullManualEnrichmentIdentifiersToCsv-DA_4rIzW.mjs → pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs} +2 -2
  159. package/dist/{pullManualEnrichmentIdentifiersToCsv-DA_4rIzW.mjs.map → pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs.map} +1 -1
  160. package/dist/{pullTranscendConfiguration-D2cYlu6V.mjs → pullTranscendConfiguration-CqsgEf9A.mjs} +2 -2
  161. package/dist/{pullTranscendConfiguration-D2cYlu6V.mjs.map → pullTranscendConfiguration-CqsgEf9A.mjs.map} +1 -1
  162. package/dist/{pullUnstructuredSubDataPointRecommendations-C4aVhH-W.mjs → pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs} +2 -2
  163. package/dist/{pullUnstructuredSubDataPointRecommendations-C4aVhH-W.mjs.map → pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs.map} +1 -1
  164. package/dist/{pushCronIdentifiersFromCsv-C34TB9tG.mjs → pushCronIdentifiersFromCsv-D2saGR5i.mjs} +2 -2
  165. package/dist/{pushCronIdentifiersFromCsv-C34TB9tG.mjs.map → pushCronIdentifiersFromCsv-D2saGR5i.mjs.map} +1 -1
  166. package/dist/{pushManualEnrichmentIdentifiersFromCsv-CGS9E3Ft.mjs → pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs} +2 -2
  167. package/dist/{pushManualEnrichmentIdentifiersFromCsv-CGS9E3Ft.mjs.map → pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs.map} +1 -1
  168. package/dist/{removeUnverifiedRequestIdentifiers-pGGOFbfE.mjs → removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs} +2 -2
  169. package/dist/{removeUnverifiedRequestIdentifiers-pGGOFbfE.mjs.map → removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs.map} +1 -1
  170. package/dist/{retryRequestDataSilos-DXwN5uPw.mjs → retryRequestDataSilos-DnwXA1YZ.mjs} +2 -2
  171. package/dist/{retryRequestDataSilos-DXwN5uPw.mjs.map → retryRequestDataSilos-DnwXA1YZ.mjs.map} +1 -1
  172. package/dist/{skipPreflightJobs-BNQhuPZ8.mjs → skipPreflightJobs-jK5lNlmv.mjs} +2 -2
  173. package/dist/{skipPreflightJobs-BNQhuPZ8.mjs.map → skipPreflightJobs-jK5lNlmv.mjs.map} +1 -1
  174. package/dist/{skipRequestDataSilos-C39ji4lO.mjs → skipRequestDataSilos-DQGroOos.mjs} +2 -2
  175. package/dist/{skipRequestDataSilos-C39ji4lO.mjs.map → skipRequestDataSilos-DQGroOos.mjs.map} +1 -1
  176. package/dist/{streamPrivacyRequestsToCsv-C8lquiyd.mjs → streamPrivacyRequestsToCsv-BK07Bm-T.mjs} +2 -2
  177. package/dist/{streamPrivacyRequestsToCsv-C8lquiyd.mjs.map → streamPrivacyRequestsToCsv-BK07Bm-T.mjs.map} +1 -1
  178. package/dist/{syncCodePackages-BHgjfXCI.mjs → syncCodePackages-F-97FNjo.mjs} +2 -2
  179. package/dist/{syncCodePackages-BHgjfXCI.mjs.map → syncCodePackages-F-97FNjo.mjs.map} +1 -1
  180. package/dist/{syncCookies-CiLtxDFf.mjs → syncCookies-BxY36BeJ.mjs} +2 -2
  181. package/dist/{syncCookies-CiLtxDFf.mjs.map → syncCookies-BxY36BeJ.mjs.map} +1 -1
  182. package/dist/{syncDataFlows-DmBUs3G_.mjs → syncDataFlows-Cx5LZCen.mjs} +2 -2
  183. package/dist/{syncDataFlows-DmBUs3G_.mjs.map → syncDataFlows-Cx5LZCen.mjs.map} +1 -1
  184. package/dist/{syncTemplates-BNu1_dmW.mjs → syncTemplates-BrH7Yr0V.mjs} +2 -2
  185. package/dist/{syncTemplates-BNu1_dmW.mjs.map → syncTemplates-BrH7Yr0V.mjs.map} +1 -1
  186. package/dist/{updateConsentManagerVersionToLatest-BBMN94Hs.mjs → updateConsentManagerVersionToLatest-C221vAAw.mjs} +2 -2
  187. package/dist/{updateConsentManagerVersionToLatest-BBMN94Hs.mjs.map → updateConsentManagerVersionToLatest-C221vAAw.mjs.map} +1 -1
  188. package/dist/{uploadCookiesFromCsv-BKZx_E_2.mjs → uploadCookiesFromCsv-roHWekOP.mjs} +2 -2
  189. package/dist/{uploadCookiesFromCsv-BKZx_E_2.mjs.map → uploadCookiesFromCsv-roHWekOP.mjs.map} +1 -1
  190. package/dist/{uploadDataFlowsFromCsv-CJFVLvCJ.mjs → uploadDataFlowsFromCsv-DcTbrsv2.mjs} +2 -2
  191. package/dist/{uploadDataFlowsFromCsv-CJFVLvCJ.mjs.map → uploadDataFlowsFromCsv-DcTbrsv2.mjs.map} +1 -1
  192. package/dist/{uploadPrivacyRequestsFromCsv-BmP1JluQ.mjs → uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs} +2 -2
  193. package/dist/{uploadPrivacyRequestsFromCsv-BmP1JluQ.mjs.map → uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs.map} +1 -1
  194. package/dist/{uploadSiloDiscoveryResults-XpDp2u35.mjs → uploadSiloDiscoveryResults-D2fK92WR.mjs} +2 -2
  195. package/dist/{uploadSiloDiscoveryResults-XpDp2u35.mjs.map → uploadSiloDiscoveryResults-D2fK92WR.mjs.map} +1 -1
  196. package/dist/{withPreferenceRetry-Cb5S310L.mjs → withPreferenceRetry-xLMZyTq9.mjs} +2 -2
  197. package/dist/{withPreferenceRetry-Cb5S310L.mjs.map → withPreferenceRetry-xLMZyTq9.mjs.map} +1 -1
  198. package/package.json +7 -7
  199. package/dist/createExtraKeyHandler-C_0EVj10.mjs.map +0 -1
  200. package/dist/fetchAllPurposesAndPreferences-Dog6N9L2.mjs +0 -2
  201. package/dist/impl-1U4QBT_L.mjs.map +0 -1
  202. package/dist/package-BjNQxHlz.mjs +0 -2
  203. package/dist/package-BjNQxHlz.mjs.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"impl-Dny1LX9A.mjs","names":["pmap"],"sources":["../src/lib/preference-management/transformPreferenceRecordToCsv.ts","../src/lib/preference-management/buildConsentChunks.ts","../src/lib/preference-management/getComparisonTimeForRecord.ts","../src/lib/preference-management/iterateConsentPages.ts","../src/lib/preference-management/pickConsentChunkMode.ts","../src/lib/preference-management/discoverConsentWindow.ts","../src/lib/preference-management/fetchConsentPreferencesChunked.ts","../src/lib/preference-management/fetchConsentPreferences.ts","../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n/**\n * Transforms the output of the consent preferences query into a CSV-friendly format.\n *\n * @param input - The input object containing consent preferences data.\n * @param exportIdentifiersWithDelimiter - Delimiter to use when combining multiple identifier values.\n * @returns A record representing the transformed CSV output.\n */\nexport function transformPreferenceRecordToCsv(\n {\n identifiers = [],\n purposes = [],\n metadata = [],\n consentManagement = {},\n system = {\n decryptionStatus: 'DECRYPTED',\n },\n // keep other top-level fields as-is (e.g., partition, timestamp, metadataTimestamp)\n ...topLevel\n }: PreferenceQueryResponseItem,\n exportIdentifiersWithDelimiter: string,\n): Record<string, unknown> {\n // Start with: all other top-level fields + spread system and consentManagement\n const out: Record<string, unknown> = {\n ...topLevel,\n ...system,\n ...consentManagement,\n };\n\n // ── identifiers: each identifier.name -> CSV of values\n if (Array.isArray(identifiers)) {\n const byName = new Map<string, Set<string>>();\n for (const { name, value } of identifiers) {\n if (!byName.has(name)) byName.set(name, new Set());\n if (value) byName.get(name)!.add(value);\n }\n for (const [name, set] of byName.entries()) {\n out[name] = Array.from(set).join(exportIdentifiersWithDelimiter);\n }\n }\n\n // ── metadata: serialize as JSON\n if (Array.isArray(metadata)) {\n out.metadata = JSON.stringify(\n metadata.reduce(\n (acc, { key, value }) => {\n acc[key] = value;\n return acc;\n },\n {} as Record<string, string>,\n ),\n );\n }\n\n // ── purposes:\n // - purpose.slug column => true/false (enabled)\n // - for each preference: purpose.slug_preference.slug => bool | single | CSV (multi)\n if (Array.isArray(purposes)) {\n for (const { purpose, preferences, enabled } of purposes) {\n out[purpose] = Boolean(enabled);\n\n // nested preferences\n if (Array.isArray(preferences)) {\n for (const { topic, choice } of preferences) {\n const col = `${purpose}_${topic}`;\n\n let val: unknown = null;\n\n if (typeof choice.booleanValue === 'boolean') {\n val = choice.booleanValue;\n } else if (choice.selectValue) {\n val = choice.selectValue;\n } else if (Array.isArray(choice.selectValues)) {\n const vs = choice.selectValues.filter((v) => v.length > 0);\n val = vs.join(',');\n } else {\n // no pref value present -> null\n val = null;\n }\n\n out[col] = val;\n }\n }\n }\n }\n\n return out;\n}\n","import { FIVE_MIN_MS } from '../helpers/index.js';\nimport type { ChunkMode, PreferencesQueryFilter } from './types.js';\n\n/**\n * Build chunk windows by splitting [lower, upperExclusive) into up to `maxChunks`\n * equal-sized ranges, with a minimum chunk span of 5 minutes. Boundaries are snapped\n * once at the start to the nearest 5-minute boundary for stability.\n *\n * Each returned window is already \"half-open\" for an *inclusive* backend:\n * we subtract 1ms from the exclusive end so adjacent chunks do not overlap.\n *\n * Example (timestamp mode): [10:00, 12:00) → { after=10:00:00.000Z, before=11:59:59.999Z }\n *\n * @param mode - 'timestamp' or 'updated'\n * @param lower - Lower bound (inclusive)\n * @param upperExclusive - Upper bound (exclusive)\n * @param maxChunks - Maximum number of chunks to create\n * @returns Array of chunked preference query filters\n */\nexport function buildConsentChunks(\n mode: ChunkMode,\n lower: Date,\n upperExclusive: Date,\n maxChunks = 5000,\n): Array<PreferencesQueryFilter> {\n const totalMs = Math.max(0, upperExclusive.getTime() - lower.getTime());\n if (totalMs === 0) return [];\n\n // Snap only the starting boundary to the nearest 5-minute boundary.\n // We avoid re-snapping every step to prevent cumulative drift.\n const seriesStart = new Date(Math.floor(lower.getTime() / FIVE_MIN_MS) * FIVE_MIN_MS);\n\n // Compute base chunk size (ceil to ensure ≤ maxChunks), enforced ≥ 5m.\n const rawChunkMs = Math.ceil(totalMs / Math.max(1, maxChunks));\n const chunkMs = Math.max(FIVE_MIN_MS, rawChunkMs);\n\n // Number of chunks needed to cover [seriesStart, upperExclusive)\n const count = Math.ceil((upperExclusive.getTime() - seriesStart.getTime()) / chunkMs);\n\n const chunks: PreferencesQueryFilter[] = [];\n\n for (let i = 0; i < count; i += 1) {\n const startMs = seriesStart.getTime() + i * chunkMs;\n const endExclusiveMs = Math.min(upperExclusive.getTime(), startMs + chunkMs);\n\n // Convert exclusive end to inclusive end for an inclusive backend: -1ms.\n const endInclusiveMs = endExclusiveMs - 1;\n\n // Guard: in degenerate cases (shouldn’t happen with the math above), clamp.\n const safeEndMs = Math.max(startMs, endInclusiveMs);\n\n const afterIso = new Date(startMs).toISOString();\n const beforeIso = new Date(safeEndMs).toISOString();\n\n if (mode === 'timestamp') {\n chunks.push({\n timestampAfter: afterIso,\n timestampBefore: beforeIso,\n });\n } else {\n chunks.push({\n system: {\n updatedAfter: afterIso,\n updatedBefore: beforeIso,\n },\n });\n }\n }\n\n return chunks;\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\nimport type { ChunkMode } from './types.js';\n\n/**\n * Get the comparison instant for sorting based on the chosen dimension.\n *\n * @param mode - Chunking mode\n * @param item - Preference item\n * @returns date\n */\nexport function getComparisonTimeForRecord(\n mode: ChunkMode,\n item: PreferenceQueryResponseItem,\n): Date {\n if (mode === 'timestamp') {\n return new Date(item.timestamp);\n }\n // mode === 'updated'\n return item.system?.updatedAt ? new Date(item.system.updatedAt) : new Date();\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { ConsentPreferenceResponse, PreferencesQueryFilter } from './types.js';\nimport { withPreferenceRetry } from './withPreferenceRetry.js';\n\n/**\n * Async generator over pages for a given filter\n *\n * @param sombra - Sombra Got instance\n * @param partition - Partition key\n * @param filter - Query filter\n * @param pageSize - Number of items per page\n * @yields Pages of PreferenceQueryResponseItem\n */\nexport async function* iterateConsentPages(\n sombra: Got,\n partition: string,\n filter: PreferencesQueryFilter,\n pageSize: number,\n): AsyncGenerator<PreferenceQueryResponseItem[], void, void> {\n let cursor: string | undefined;\n\n while (true) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const body: any = { limit: pageSize };\n if (filter && Object.keys(filter).length) body.filter = filter;\n if (cursor) body.cursor = cursor;\n\n const resp = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partition}/query`, {\n json: body,\n })\n .json(),\n {\n onRetry: (attempt, _error, message) => {\n logger.warn(\n colors.yellow(\n `Retry attempt ${attempt} for iterateConsentPages due to error: ${message}`,\n ),\n );\n },\n },\n );\n\n const { nodes, cursor: nextCursor } = decodeCodec(ConsentPreferenceResponse, resp);\n if (!nodes?.length) break;\n\n yield nodes;\n\n if (!nextCursor) break;\n cursor = nextCursor;\n }\n}\n","import type { ChunkMode, PreferencesQueryFilter } from './types.js';\n\n/**\n * Decide which dimension to chunk on: 'timestamp' if timestamps provided, otherwise 'updated'\n *\n * @param filterBy - Filter to examine\n * @returns Chosen chunk mode\n */\nexport function pickConsentChunkMode(filterBy: PreferencesQueryFilter): ChunkMode {\n const hasTimestamp = !!filterBy.timestampAfter || !!filterBy.timestampBefore;\n return hasTimestamp ? 'timestamp' : 'updated';\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n/* eslint-disable max-lines */\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { startOfUtcDay, DAY_MS } from '../helpers/index.js';\nimport { getComparisonTimeForRecord } from './getComparisonTimeForRecord.js';\nimport { iterateConsentPages } from './iterateConsentPages.js';\nimport { pickConsentChunkMode } from './pickConsentChunkMode.js';\nimport { ChunkMode, PreferencesQueryFilter } from './types.js';\n\n/**\n * Get after/before bounds from filter for the given mode\n *\n * @param mode - Chunking mode\n * @param filterBy - Filter to examine\n * @returns after/before dates\n */\nexport function getBoundsFromConsentFilter(\n mode: ChunkMode,\n filterBy: PreferencesQueryFilter,\n): {\n /** After date */\n after?: Date;\n /** Before date */\n before?: Date;\n} {\n if (mode === 'timestamp') {\n return {\n after: filterBy.timestampAfter ? new Date(filterBy.timestampAfter) : undefined,\n before: filterBy.timestampBefore ? new Date(filterBy.timestampBefore) : undefined,\n };\n }\n const u = filterBy.system ?? {};\n return {\n after: u.updatedAfter ? new Date(u.updatedAfter) : undefined,\n before: u.updatedBefore ? new Date(u.updatedBefore) : undefined,\n };\n}\n\n/**\n * Merge base filter with a \"before\" bound (without mixing dimensions).\n *\n * @param mode - Chunking mode\n * @param base - Base filter to augment\n * @param beforeISO - ISO timestamp to apply as the exclusive *Before bound for the chosen dimension\n * @returns New filter with the appropriate *Before constraint applied\n */\nfunction withBeforeBound(\n mode: ChunkMode,\n base: PreferencesQueryFilter,\n beforeISO?: string,\n): PreferencesQueryFilter {\n if (mode === 'timestamp') {\n return {\n ...base,\n timestampBefore: beforeISO ?? base.timestampBefore,\n };\n }\n return {\n ...base,\n system: {\n ...(base.system || {}),\n ...(beforeISO ? { updatedBefore: beforeISO } : {}),\n },\n // ensure we don't mix dimensions\n timestampAfter: undefined,\n timestampBefore: undefined,\n };\n}\n\n/**\n * Fetch a single record (or null) with the given filter.\n *\n * @param sombra - Got instance configured for Sombra API\n * @param partition - Preference Store partition id\n * @param filter - Query filter to use (page size internally forced to 1)\n * @returns The first record or null if none\n */\nasync function fetchOne(\n sombra: Got,\n partition: string,\n filter: PreferencesQueryFilter,\n): Promise<PreferenceQueryResponseItem | null> {\n logger.info(colors.magenta(`Single-record probe with filter: ${JSON.stringify(filter)}`));\n const it = iterateConsentPages(sombra, partition, filter, /* pageSize */ 1);\n const res = await it.next();\n if (res.done || !res.value || res.value.length === 0) {\n logger.info(colors.yellow('Probe result: no record'));\n return null;\n }\n const item = res.value[0]!;\n logger.info(\n colors.green(\n `Probe result: found record at ${getComparisonTimeForRecord(\n pickConsentChunkMode(filter),\n item,\n ).toISOString()}`,\n ),\n );\n return item;\n}\n\n/**\n * Robust earliest-day search (UTC):\n * 1) Anchor at the newest record (single-record probe).\n * 2) Exponential “jump back” using seeds (1d, 7d, 30d) then doubling (60d, 120d, 240d, …)\n * to cross into an empty region and establish a lower empty bound.\n * 3) **Exponential forward-from-empty**: gallop forward from the empty bound toward the last-found\n * to land close to the frontier quickly.\n * 4) Tighten with a short binary search on time using single-record probes.\n *\n * (Implementation note: preserves the public signature and docs while improving efficiency.)\n *\n * @param sombra - Sombra\n * @param opts - Options\n * @returns Earliest day with data (UTC start-of-day)\n */\nexport async function findEarliestDayWithData(\n sombra: Got,\n opts: {\n /** Partition */\n partition: string;\n /** Chunking mode */\n mode: ChunkMode;\n /** Base filter */\n baseFilter: PreferencesQueryFilter;\n /** Optional safety cap in days to avoid unbounded lookback (default ~10 years) */\n maxLookbackDays?: number;\n },\n): Promise<Date> {\n const { partition, mode, baseFilter, maxLookbackDays = 3650 } = opts;\n\n // 1) Find newest record (anchors our backtracking).\n const newest = await fetchOne(sombra, partition, withBeforeBound(mode, baseFilter));\n if (!newest) {\n logger.info(colors.yellow('No records found; defaulting earliest day to today.'));\n return startOfUtcDay(new Date());\n }\n const newestInstant = getComparisonTimeForRecord(mode, newest);\n logger.info(colors.green(`Newest instant: ${newestInstant.toISOString()}`));\n\n // 2) Exponential jump back to find an empty region.\n const seedSteps = [1, 7, 30]; // days\n let stepDaysIdx = 0;\n let stepMs = seedSteps[0] * DAY_MS;\n\n let lastFoundInstant = newestInstant; // last instant we *could* find a record before\n let emptyBeforeInstant: Date | null = null; // first bound that yielded no results\n\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const probeBound =\n stepDaysIdx < seedSteps.length\n ? new Date(newestInstant.getTime() - seedSteps[stepDaysIdx] * DAY_MS)\n : new Date(newestInstant.getTime() - stepMs);\n\n // stop if we exceeded lookback cap\n const daysSince =\n (startOfUtcDay(new Date()).getTime() - startOfUtcDay(probeBound).getTime()) / DAY_MS;\n if (daysSince > maxLookbackDays) {\n logger.warn(\n colors.yellow(\n `Exponential jump exceeded maxLookbackDays=${maxLookbackDays}. Using current bounds.`,\n ),\n );\n emptyBeforeInstant = probeBound;\n break;\n }\n\n logger.info(\n colors.magenta(\n `Probing before=${probeBound.toISOString()} (jump step ${\n stepDaysIdx < seedSteps.length\n ? `${seedSteps[stepDaysIdx]}d`\n : `${Math.round(stepMs / DAY_MS)}d`\n })…`,\n ),\n );\n\n const hit = await fetchOne(\n sombra,\n partition,\n withBeforeBound(mode, baseFilter, probeBound.toISOString()),\n );\n\n if (hit) {\n lastFoundInstant = getComparisonTimeForRecord(mode, hit);\n logger.info(\n colors.green(\n `Found older record at ${lastFoundInstant.toISOString()} — continue jumping back.`,\n ),\n );\n // advance step\n if (stepDaysIdx < seedSteps.length - 1) {\n stepDaysIdx += 1;\n stepMs = seedSteps[stepDaysIdx] * DAY_MS;\n } else if (stepDaysIdx === seedSteps.length - 1) {\n stepDaysIdx += 1; // switch to doubling mode\n stepMs = seedSteps[seedSteps.length - 1] * 2 * DAY_MS; // start at 60d\n } else {\n stepMs *= 2;\n }\n // eslint-disable-next-line no-continue\n continue;\n }\n\n // crossed into an empty zone — remember this bound\n emptyBeforeInstant = probeBound;\n logger.info(\n colors.green(`No record before ${probeBound.toISOString()} — established empty lower bound.`),\n );\n break;\n }\n\n // Guard: if for some reason empty bound wasn't set, synthesize one “just before” lastFound.\n if (!emptyBeforeInstant) {\n emptyBeforeInstant = new Date(lastFoundInstant.getTime() - DAY_MS);\n }\n\n // 3) Exponential forward-from-empty toward the found frontier.\n // This “gallop” reduces the span dramatically before binary search.\n // We keep moving the empty bound forward with exponentially growing steps\n // until we get a hit; then we shrink onto that hit instant.\n let lo = emptyBeforeInstant; // known EMPTY (no data before this bound)\n let hi = lastFoundInstant; // known FOUND (there is data before this instant)\n let fwdStep = Math.max(DAY_MS, Math.floor((hi.getTime() - lo.getTime()) / 64)); // start small-ish\n logger.info(\n colors.magenta(\n `Exponential forward-from-empty start: empty=${lo.toISOString()} found=${hi.toISOString()} step=${Math.round(\n fwdStep / DAY_MS,\n )}d`,\n ),\n );\n\n // Do a few gallop iterations (bounded so we don't loop forever if distribution is dense)\n for (let i = 0; i < 8; i += 1) {\n const probe = new Date(lo.getTime() + fwdStep);\n if (probe.getTime() >= hi.getTime()) break;\n\n logger.info(colors.magenta(`Forward gallop probe before=${probe.toISOString()}…`));\n const hit = await fetchOne(\n sombra,\n partition,\n withBeforeBound(mode, baseFilter, probe.toISOString()),\n );\n\n if (hit) {\n // We crossed into data — tighten hi to the actual hit instant.\n hi = getComparisonTimeForRecord(mode, hit);\n logger.info(\n colors.green(\n `Gallop hit at ${hi.toISOString()} — tightening found bound. Next step halves.`,\n ),\n );\n fwdStep = Math.max(DAY_MS, Math.floor(fwdStep / 2));\n } else {\n // Still empty up to probe — advance lo and double the step.\n lo.setTime(probe.getTime());\n logger.info(\n colors.yellow(\n `Gallop miss — advancing empty bound to ${lo.toISOString()}. Next step doubles.`,\n ),\n );\n fwdStep = Math.min(hi.getTime() - lo.getTime(), fwdStep * 2);\n if (fwdStep < DAY_MS) fwdStep = DAY_MS;\n }\n\n if (hi.getTime() - lo.getTime() <= DAY_MS) break;\n }\n\n // 4) Finish with a short binary search between [lo (empty), hi (found)].\n while (hi.getTime() - lo.getTime() > DAY_MS) {\n const mid = new Date(lo.getTime() + Math.floor((hi.getTime() - lo.getTime()) / 2));\n logger.info(colors.magenta(`Binary probe before=${mid.toISOString()}…`));\n\n const hit = await fetchOne(\n sombra,\n partition,\n withBeforeBound(mode, baseFilter, mid.toISOString()),\n );\n\n if (hit) {\n const when = getComparisonTimeForRecord(mode, hit);\n logger.info(colors.green(`Binary probe found record at ${when.toISOString()}.`));\n hi = when; // there is data before mid -> earliest could be even earlier\n } else {\n logger.info(colors.yellow('Binary probe found no record.'));\n lo = mid; // still empty -> move low up\n }\n }\n\n const earliestDay = startOfUtcDay(hi);\n logger.info(\n colors.green(\n `Earliest day (UTC) resolved to ${earliestDay.toISOString()} (instant ≈ ${hi.toISOString()}).`,\n ),\n );\n return earliestDay;\n}\n\n/**\n * Find latest day with data using exponential growth forward from earliest (UTC day math).\n *\n * (Implementation note: per your request, we now fetch a single newest record to infer the latest day.)\n *\n * @param sombra - Sombra\n * @param opts - Options\n * @returns Latest day with data\n */\nexport async function findLatestDayWithData(\n sombra: Got,\n opts: {\n /** Partition */\n partition: string;\n /** Chunking mode */\n mode: ChunkMode;\n /** Base filter */\n baseFilter: PreferencesQueryFilter;\n /** Earliest date */\n earliest: Date; // inclusive day start\n },\n): Promise<Date> {\n const { partition, mode, baseFilter } = opts;\n\n logger.info(colors.magenta('Latest-day discovery: probing newest record…'));\n const latest = await fetchOne(sombra, partition, withBeforeBound(mode, baseFilter));\n if (!latest) {\n logger.info(colors.yellow('No records found at all; defaulting latest day to today.'));\n return startOfUtcDay(new Date());\n }\n\n const when = getComparisonTimeForRecord(mode, latest);\n logger.info(colors.green(`Newest record instant is ${when.toISOString()}.`));\n\n const latestDay = startOfUtcDay(when);\n logger.info(\n colors.green(\n `Latest day (UTC) resolved to ${latestDay.toISOString()} from instant ${when.toISOString()}.`,\n ),\n );\n\n return latestDay;\n}\n/* eslint-enable max-lines */\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { map as pmap } from '../bluebird.js';\nimport { addDaysUtc, clampPageSize } from '../helpers/index.js';\nimport { buildConsentChunks } from './buildConsentChunks.js';\nimport {\n findEarliestDayWithData,\n findLatestDayWithData,\n getBoundsFromConsentFilter,\n} from './discoverConsentWindow.js';\nimport { iterateConsentPages } from './iterateConsentPages.js';\nimport { pickConsentChunkMode } from './pickConsentChunkMode.js';\nimport { PreferencesQueryFilter, ChunkMode } from './types.js';\n\n/**\n * Merge baseFilter with a window filter, taking care not to mix timestamp/updated fields improperly.\n *\n * @param mode - The chunking mode\n * @param base - The base filter\n * @param window - The per-chunk window filter\n * @returns merged filter\n */\nfunction mergeFilter(\n mode: ChunkMode,\n base: PreferencesQueryFilter,\n window: PreferencesQueryFilter,\n): PreferencesQueryFilter {\n if (mode === 'timestamp') {\n return {\n ...base,\n timestampAfter: window.timestampAfter ?? base.timestampAfter,\n timestampBefore: window.timestampBefore ?? base.timestampBefore,\n // ensure we don't pass `system.*` when chunking by timestamp\n system: undefined,\n };\n }\n // mode === 'updated'\n return {\n ...base,\n system: {\n ...(base.system || {}),\n ...(window.system?.updatedAfter ? { updatedAfter: window.system.updatedAfter } : {}),\n ...(window.system?.updatedBefore ? { updatedBefore: window.system.updatedBefore } : {}),\n },\n // Ensure we don't mix dimensions\n timestampAfter: undefined,\n timestampBefore: undefined,\n };\n}\n\n/**\n * High-level chunked fetch with progress bar.\n *\n * If an `onItems` callback is provided, pages are streamed to the callback\n * as they are fetched (no accumulation in memory). If no callback is provided,\n * the function returns all items (legacy behavior).\n *\n * @param sombra - Got instance\n * @param options - Options\n * @returns preference items (only if onItems is not provided)\n */\nexport async function fetchConsentPreferencesChunked(\n sombra: Got,\n {\n partition,\n filterBy = {},\n limit = 50,\n windowConcurrency = 25,\n maxChunks = 5000,\n maxLookbackDays = 3650,\n onItems,\n }: {\n /** Partition */\n partition: string;\n /** Filter by preferences */\n filterBy?: PreferencesQueryFilter;\n /** Limit number of results (page size) */\n limit?: number;\n /** Window concurrency */\n windowConcurrency?: number;\n /** Max chunks */\n maxChunks?: number; // up to N chunks; min 1 hour per chunk\n /** Max lookback days for discovering bounds */\n maxLookbackDays?: number;\n /** Optional streaming sink; if provided, items are not accumulated */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n onItems?: (items: PreferenceQueryResponseItem[]) => Promise<any> | any;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const mode: ChunkMode = pickConsentChunkMode(filterBy);\n logger.info(\n colors.magenta(\n `Fetching consent preferences in chunks by ${\n mode === 'timestamp' ? 'timestamp' : 'system.updatedAt'\n }...`,\n ),\n );\n\n // Resolve / discover bounds (UTC)\n let { after, before } = getBoundsFromConsentFilter(mode, filterBy);\n logger.info(\n colors.magenta(\n `Initial bounds: after=${after?.toISOString() ?? 'undefined'} before=${\n before?.toISOString() ?? 'undefined'\n }`,\n ),\n );\n\n if (!after || !before) {\n if (!after) {\n logger.info(\n colors.magenta(`Discovering earliest day with data for partition ${partition}...`),\n );\n after = await findEarliestDayWithData(sombra, {\n partition,\n mode,\n baseFilter: filterBy,\n maxLookbackDays,\n });\n logger.info(colors.green(`Discovered earliest day with data: ${after.toISOString()}`));\n }\n if (!before) {\n logger.info(colors.magenta(`Discovering latest day with data for partition ${partition}...`));\n const latestDay = await findLatestDayWithData(sombra, {\n partition,\n mode,\n baseFilter: filterBy,\n earliest: after,\n });\n // Exclusive upper bound = latest day start + 1 day (UTC)\n before = addDaysUtc(latestDay, 1);\n logger.info(colors.green(`Discovered latest day with data: ${latestDay.toISOString()}`));\n }\n }\n\n logger.info(\n colors.green(`Final bounds (UTC): after=${after.toISOString()} before=${before.toISOString()}`),\n );\n\n // Build up to `maxChunks` chunks, min 1 hour each\n const chunks = buildConsentChunks(mode, after, before, maxChunks);\n\n logger.info(\n colors.magenta(\n `Fetching consent preferences from partition ${partition} in ${chunks.length} chunks...`,\n ),\n );\n\n // Progress bar over chunks (unordered):\n // - value = completed chunks (out-of-order OK)\n // - payload fetched = total records fetched\n const bar = new cliProgress.SingleBar(\n {\n format: 'Downloading [{bar}] {percentage}% | chunks {value}/{total} | fetched {fetched}',\n },\n cliProgress.Presets.shades_classic,\n );\n\n let completed = 0; // finished chunks (out-of-order)\n let fetched = 0; // raw records counter\n\n bar.start(chunks.length, 0, { fetched });\n\n const t0 = Date.now();\n const pageSize = clampPageSize(limit);\n\n // If we are streaming, do not accumulate everything in memory.\n const out: PreferenceQueryResponseItem[] = [];\n\n await pmap(\n chunks.map((windowFilter, idx) => ({ windowFilter, idx })),\n async ({ windowFilter }) => {\n const filter = mergeFilter(mode, filterBy, windowFilter);\n\n // Stream this chunk page-by-page\n for await (const page of iterateConsentPages(sombra, partition, filter, pageSize)) {\n fetched += page.length;\n bar.update(completed, { fetched });\n\n if (onItems) {\n await onItems(page);\n } else {\n out.push(...page);\n }\n }\n\n completed += 1;\n bar.update(completed, { fetched });\n },\n { concurrency: Math.max(1, windowConcurrency) },\n );\n\n bar.update(completed, { fetched });\n bar.stop();\n\n logger.info(\n colors.green(\n `Fetched ${fetched} consent preference records from partition ${partition} in ${\n (Date.now() - t0) / 1000\n }s.`,\n ),\n );\n\n return onItems ? [] : out;\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { ConsentPreferenceResponse, PreferencesQueryFilter } from './types.js';\nimport { withPreferenceRetry } from './withPreferenceRetry.js';\n\n/**\n * Fetch consent preferences for the managed consent database (new query endpoint)\n *\n * Uses POST /v1/preferences/{partition}/query with cursor pagination.\n *\n * If `onItems` is provided, this streams pages to the callback and does not\n * accumulate results in memory. If omitted, the function returns all items.\n *\n * @param sombra - Sombra instance (must include auth headers)\n * @param options - Query options\n * @returns All nodes (only when onItems is not provided)\n */\nexport async function fetchConsentPreferences(\n sombra: Got,\n {\n partition,\n filterBy = {},\n limit = 50,\n onItems,\n }: {\n /** Partition key to fetch (moved to URL path on new endpoint) */\n partition: string;\n /** Query filter (wrapped under \"filter\" in request body) */\n filterBy?: PreferencesQueryFilter;\n /** Number of users per page (1–50 per API spec) */\n limit?: number;\n /** Optional streaming sink; if provided, pages are not accumulated */\n onItems?: (items: PreferenceQueryResponseItem[]) => Promise<void> | void;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const collected: PreferenceQueryResponseItem[] = [];\n\n // Cursor-based pagination per new endpoint\n let cursor: string | undefined;\n\n // Build the filter payload, omitting empty filter\n const hasFilter =\n filterBy &&\n (Object.keys(filterBy).length > 0 ||\n (filterBy.system && Object.keys(filterBy.system).length > 0));\n\n // Enforce API max (defensive; backend also validates)\n const pageSize = Math.max(1, Math.min(50, limit ?? 50));\n\n // Keep fetching until no cursor is returned\n // (The API returns an opaque cursor string for the next page)\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const body: {\n /** Filter by user identifiers (new shape) */\n filter?: PreferencesQueryFilter;\n /** Cursor for pagination */\n cursor?: string;\n /** Number of records per page */\n limit: number;\n } = { limit: pageSize };\n\n if (hasFilter) {\n body.filter = filterBy;\n }\n if (cursor) {\n body.cursor = cursor;\n }\n\n const response = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partition}/query`, {\n json: body,\n })\n .json(),\n {\n onRetry: (attempt, _error, message) => {\n logger.warn(\n colors.yellow(\n `Retry attempt ${attempt} for fetchConsentPreferences due to error: ${message}`,\n ),\n );\n },\n },\n );\n\n const { nodes, cursor: nextCursor } = decodeCodec(ConsentPreferenceResponse, response);\n\n if (!nodes || nodes.length === 0) {\n break;\n }\n\n if (onItems) {\n await onItems(nodes);\n } else {\n collected.push(...nodes);\n }\n\n if (!nextCursor) {\n break;\n }\n cursor = nextCursor;\n }\n\n return onItems ? [] : collected;\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql/index.js';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers/index.js';\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management/index.js';\nimport { logger } from '../../../logger.js';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map((identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n });\n\n // Build filter\n const filterBy = {\n ...(timestampBefore ? { timestampBefore: timestampBefore.toISOString() } : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore ? { updatedBefore: updatedBefore.toISOString() } : {}),\n ...(updatedAfter ? { updatedAfter: updatedAfter.toISOString() } : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap((p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? []),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(new Set(purposesWithTopics.map((p) => p.trackingType))).sort(\n (a, b) => a.localeCompare(b),\n );\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [...CORE_COLS, ...identifierCols, ...purposeCols, ...topicCols];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"irBASA,SAAgB,EACd,CACE,cAAc,EAAE,CAChB,WAAW,EAAE,CACb,WAAW,EAAE,CACb,oBAAoB,EAAE,CACtB,SAAS,CACP,iBAAkB,YACnB,CAED,GAAG,GAEL,EACyB,CAEzB,IAAM,EAA+B,CACnC,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGD,GAAI,MAAM,QAAQ,EAAY,CAAE,CAC9B,IAAM,EAAS,IAAI,IACnB,IAAK,GAAM,CAAE,OAAM,WAAW,EACvB,EAAO,IAAI,EAAK,EAAE,EAAO,IAAI,EAAM,IAAI,IAAM,CAC9C,GAAO,EAAO,IAAI,EAAK,CAAE,IAAI,EAAM,CAEzC,IAAK,GAAM,CAAC,EAAM,KAAQ,EAAO,SAAS,CACxC,EAAI,GAAQ,MAAM,KAAK,EAAI,CAAC,KAAK,EAA+B,CAoBpE,GAfI,MAAM,QAAQ,EAAS,GACzB,EAAI,SAAW,KAAK,UAClB,EAAS,QACN,EAAK,CAAE,MAAK,YACX,EAAI,GAAO,EACJ,GAET,EAAE,CACH,CACF,EAMC,MAAM,QAAQ,EAAS,CACzB,KAAK,GAAM,CAAE,UAAS,cAAa,aAAa,EAI9C,GAHA,EAAI,GAAW,EAAQ,EAGnB,MAAM,QAAQ,EAAY,CAC5B,IAAK,GAAM,CAAE,QAAO,YAAY,EAAa,CAC3C,IAAM,EAAM,GAAG,EAAQ,GAAG,IAEtB,EAAe,KAEnB,AASE,EATE,OAAO,EAAO,cAAiB,UAC3B,EAAO,aACJ,EAAO,YACV,EAAO,YACJ,MAAM,QAAQ,EAAO,aAAa,CAChC,EAAO,aAAa,OAAQ,GAAM,EAAE,OAAS,EAAE,CACjD,KAAK,IAAI,CAGZ,KAGR,EAAI,GAAO,GAMnB,OAAO,ECpET,SAAgB,EACd,EACA,EACA,EACA,EAAY,IACmB,CAC/B,IAAM,EAAU,KAAK,IAAI,EAAG,EAAe,SAAS,CAAG,EAAM,SAAS,CAAC,CACvE,GAAI,IAAY,EAAG,MAAO,EAAE,CAI5B,IAAM,EAAc,IAAI,KAAK,KAAK,MAAM,EAAM,SAAS,CAAG,EAAY,CAAG,EAAY,CAG/E,EAAa,KAAK,KAAK,EAAU,KAAK,IAAI,EAAG,EAAU,CAAC,CACxD,EAAU,KAAK,IAAI,EAAa,EAAW,CAG3C,EAAQ,KAAK,MAAM,EAAe,SAAS,CAAG,EAAY,SAAS,EAAI,EAAQ,CAE/E,EAAmC,EAAE,CAE3C,IAAK,IAAI,EAAI,EAAG,EAAI,EAAO,GAAK,EAAG,CACjC,IAAM,EAAU,EAAY,SAAS,CAAG,EAAI,EAItC,EAHiB,KAAK,IAAI,EAAe,SAAS,CAAE,EAAU,EAAQ,CAGpC,EAGlC,EAAY,KAAK,IAAI,EAAS,EAAe,CAE7C,EAAW,IAAI,KAAK,EAAQ,CAAC,aAAa,CAC1C,EAAY,IAAI,KAAK,EAAU,CAAC,aAAa,CAE/C,IAAS,YACX,EAAO,KAAK,CACV,eAAgB,EAChB,gBAAiB,EAClB,CAAC,CAEF,EAAO,KAAK,CACV,OAAQ,CACN,aAAc,EACd,cAAe,EAChB,CACF,CAAC,CAIN,OAAO,EC1DT,SAAgB,EACd,EACA,EACM,CAKN,OAJI,IAAS,YACJ,IAAI,KAAK,EAAK,UAAU,CAG1B,EAAK,QAAQ,UAAY,IAAI,KAAK,EAAK,OAAO,UAAU,CAAG,IAAI,KCDxE,eAAuB,EACrB,EACA,EACA,EACA,EAC2D,CAC3D,IAAI,EAEJ,OAAa,CAEX,IAAM,EAAY,CAAE,MAAO,EAAU,CACjC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAQ,EAAK,OAAS,GACpD,IAAQ,EAAK,OAAS,GAqB1B,GAAM,CAAE,QAAO,OAAQ,GAAe,EAAY,EAnBrC,MAAM,EACjB,uBAEE,EACG,KAAK,kBAAkB,EAAU,QAAS,CACzC,KAAM,EACP,CAAC,CACD,MAAM,CACX,CACE,SAAU,EAAS,EAAQ,IAAY,CACrC,EAAO,KACL,EAAO,OACL,iBAAiB,EAAQ,yCAAyC,IACnE,CACF,EAEJ,CACF,CAEiF,CAKlF,GAJI,CAAC,GAAO,SAEZ,MAAM,EAEF,CAAC,GAAY,MACjB,EAAS,GCjDb,SAAgB,EAAqB,EAA6C,CAEhF,OADuB,EAAS,gBAAoB,EAAS,gBACvC,YAAc,UCStC,SAAgB,EACd,EACA,EAMA,CACA,GAAI,IAAS,YACX,MAAO,CACL,MAAO,EAAS,eAAiB,IAAI,KAAK,EAAS,eAAe,CAAG,IAAA,GACrE,OAAQ,EAAS,gBAAkB,IAAI,KAAK,EAAS,gBAAgB,CAAG,IAAA,GACzE,CAEH,IAAM,EAAI,EAAS,QAAU,EAAE,CAC/B,MAAO,CACL,MAAO,EAAE,aAAe,IAAI,KAAK,EAAE,aAAa,CAAG,IAAA,GACnD,OAAQ,EAAE,cAAgB,IAAI,KAAK,EAAE,cAAc,CAAG,IAAA,GACvD,CAWH,SAAS,EACP,EACA,EACA,EACwB,CAOxB,OANI,IAAS,YACJ,CACL,GAAG,EACH,gBAAiB,GAAa,EAAK,gBACpC,CAEI,CACL,GAAG,EACH,OAAQ,CACN,GAAI,EAAK,QAAU,EAAE,CACrB,GAAI,EAAY,CAAE,cAAe,EAAW,CAAG,EAAE,CAClD,CAED,eAAgB,IAAA,GAChB,gBAAiB,IAAA,GAClB,CAWH,eAAe,EACb,EACA,EACA,EAC6C,CAC7C,EAAO,KAAK,EAAO,QAAQ,oCAAoC,KAAK,UAAU,EAAO,GAAG,CAAC,CAEzF,IAAM,EAAM,MADD,EAAoB,EAAQ,EAAW,EAAuB,EAAE,CACtD,MAAM,CAC3B,GAAI,EAAI,MAAQ,CAAC,EAAI,OAAS,EAAI,MAAM,SAAW,EAEjD,OADA,EAAO,KAAK,EAAO,OAAO,0BAA0B,CAAC,CAC9C,KAET,IAAM,EAAO,EAAI,MAAM,GASvB,OARA,EAAO,KACL,EAAO,MACL,iCAAiC,EAC/B,EAAqB,EAAO,CAC5B,EACD,CAAC,aAAa,GAChB,CACF,CACM,EAkBT,eAAsB,EACpB,EACA,EAUe,CACf,GAAM,CAAE,YAAW,OAAM,aAAY,kBAAkB,MAAS,EAG1D,EAAS,MAAM,EAAS,EAAQ,EAAW,EAAgB,EAAM,EAAW,CAAC,CACnF,GAAI,CAAC,EAEH,OADA,EAAO,KAAK,EAAO,OAAO,sDAAsD,CAAC,CAC1E,EAAc,IAAI,KAAO,CAElC,IAAM,EAAgB,EAA2B,EAAM,EAAO,CAC9D,EAAO,KAAK,EAAO,MAAM,mBAAmB,EAAc,aAAa,GAAG,CAAC,CAG3E,IAAM,EAAY,CAAC,EAAG,EAAG,GAAG,CACxB,EAAc,EACd,EAAS,EAAU,GAAK,EAExB,EAAmB,EACnB,EAAkC,KAGtC,OAAa,CACX,IAAM,EACJ,EAAc,EAAU,OACpB,IAAI,KAAK,EAAc,SAAS,CAAG,EAAU,GAAe,EAAO,CACnE,IAAI,KAAK,EAAc,SAAS,CAAG,EAAO,CAKhD,IADG,EAAc,IAAI,KAAO,CAAC,SAAS,CAAG,EAAc,EAAW,CAAC,SAAS,EAAA,MAC5D,EAAiB,CAC/B,EAAO,KACL,EAAO,OACL,6CAA6C,EAAgB,yBAC9D,CACF,CACD,EAAqB,EACrB,MAGF,EAAO,KACL,EAAO,QACL,kBAAkB,EAAW,aAAa,CAAC,cACzC,EAAc,EAAU,OACpB,GAAG,EAAU,GAAa,GAC1B,GAAG,KAAK,MAAM,EAAS,EAAO,CAAC,GACpC,IACF,CACF,CAED,IAAM,EAAM,MAAM,EAChB,EACA,EACA,EAAgB,EAAM,EAAY,EAAW,aAAa,CAAC,CAC5D,CAED,GAAI,EAAK,CACP,EAAmB,EAA2B,EAAM,EAAI,CACxD,EAAO,KACL,EAAO,MACL,yBAAyB,EAAiB,aAAa,CAAC,2BACzD,CACF,CAEG,EAAc,EAAU,OAAS,GACnC,GAAe,EACf,EAAS,EAAU,GAAe,GACzB,IAAgB,EAAU,OAAS,GAC5C,GAAe,EACf,EAAS,EAAU,EAAU,OAAS,GAAK,EAAI,GAE/C,GAAU,EAGZ,SAIF,EAAqB,EACrB,EAAO,KACL,EAAO,MAAM,oBAAoB,EAAW,aAAa,CAAC,mCAAmC,CAC9F,CACD,MAIF,AACE,IAAqB,IAAI,KAAK,EAAiB,SAAS,CAAG,EAAO,CAOpE,IAAI,EAAK,EACL,EAAK,EACL,EAAU,KAAK,IAAI,EAAQ,KAAK,OAAO,EAAG,SAAS,CAAG,EAAG,SAAS,EAAI,GAAG,CAAC,CAC9E,EAAO,KACL,EAAO,QACL,+CAA+C,EAAG,aAAa,CAAC,SAAS,EAAG,aAAa,CAAC,QAAQ,KAAK,MACrG,EAAU,EACX,CAAC,GACH,CACF,CAGD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAG,GAAK,EAAG,CAC7B,IAAM,EAAQ,IAAI,KAAK,EAAG,SAAS,CAAG,EAAQ,CAC9C,GAAI,EAAM,SAAS,EAAI,EAAG,SAAS,CAAE,MAErC,EAAO,KAAK,EAAO,QAAQ,+BAA+B,EAAM,aAAa,CAAC,GAAG,CAAC,CAClF,IAAM,EAAM,MAAM,EAChB,EACA,EACA,EAAgB,EAAM,EAAY,EAAM,aAAa,CAAC,CACvD,CAuBD,GArBI,GAEF,EAAK,EAA2B,EAAM,EAAI,CAC1C,EAAO,KACL,EAAO,MACL,iBAAiB,EAAG,aAAa,CAAC,8CACnC,CACF,CACD,EAAU,KAAK,IAAI,EAAQ,KAAK,MAAM,EAAU,EAAE,CAAC,GAGnD,EAAG,QAAQ,EAAM,SAAS,CAAC,CAC3B,EAAO,KACL,EAAO,OACL,0CAA0C,EAAG,aAAa,CAAC,sBAC5D,CACF,CACD,EAAU,KAAK,IAAI,EAAG,SAAS,CAAG,EAAG,SAAS,CAAE,EAAU,EAAE,CACxD,EAAA,QAAkB,EAAU,IAG9B,EAAG,SAAS,CAAG,EAAG,SAAS,EAAA,MAAY,MAI7C,KAAO,EAAG,SAAS,CAAG,EAAG,SAAS,CAAG,GAAQ,CAC3C,IAAM,EAAM,IAAI,KAAK,EAAG,SAAS,CAAG,KAAK,OAAO,EAAG,SAAS,CAAG,EAAG,SAAS,EAAI,EAAE,CAAC,CAClF,EAAO,KAAK,EAAO,QAAQ,uBAAuB,EAAI,aAAa,CAAC,GAAG,CAAC,CAExE,IAAM,EAAM,MAAM,EAChB,EACA,EACA,EAAgB,EAAM,EAAY,EAAI,aAAa,CAAC,CACrD,CAED,GAAI,EAAK,CACP,IAAM,EAAO,EAA2B,EAAM,EAAI,CAClD,EAAO,KAAK,EAAO,MAAM,gCAAgC,EAAK,aAAa,CAAC,GAAG,CAAC,CAChF,EAAK,OAEL,EAAO,KAAK,EAAO,OAAO,gCAAgC,CAAC,CAC3D,EAAK,EAIT,IAAM,EAAc,EAAc,EAAG,CAMrC,OALA,EAAO,KACL,EAAO,MACL,kCAAkC,EAAY,aAAa,CAAC,cAAc,EAAG,aAAa,CAAC,IAC5F,CACF,CACM,EAYT,eAAsB,EACpB,EACA,EAUe,CACf,GAAM,CAAE,YAAW,OAAM,cAAe,EAExC,EAAO,KAAK,EAAO,QAAQ,+CAA+C,CAAC,CAC3E,IAAM,EAAS,MAAM,EAAS,EAAQ,EAAW,EAAgB,EAAM,EAAW,CAAC,CACnF,GAAI,CAAC,EAEH,OADA,EAAO,KAAK,EAAO,OAAO,2DAA2D,CAAC,CAC/E,EAAc,IAAI,KAAO,CAGlC,IAAM,EAAO,EAA2B,EAAM,EAAO,CACrD,EAAO,KAAK,EAAO,MAAM,4BAA4B,EAAK,aAAa,CAAC,GAAG,CAAC,CAE5E,IAAM,EAAY,EAAc,EAAK,CAOrC,OANA,EAAO,KACL,EAAO,MACL,gCAAgC,EAAU,aAAa,CAAC,gBAAgB,EAAK,aAAa,CAAC,GAC5F,CACF,CAEM,EC7TT,SAAS,EACP,EACA,EACA,EACwB,CAWxB,OAVI,IAAS,YACJ,CACL,GAAG,EACH,eAAgB,EAAO,gBAAkB,EAAK,eAC9C,gBAAiB,EAAO,iBAAmB,EAAK,gBAEhD,OAAQ,IAAA,GACT,CAGI,CACL,GAAG,EACH,OAAQ,CACN,GAAI,EAAK,QAAU,EAAE,CACrB,GAAI,EAAO,QAAQ,aAAe,CAAE,aAAc,EAAO,OAAO,aAAc,CAAG,EAAE,CACnF,GAAI,EAAO,QAAQ,cAAgB,CAAE,cAAe,EAAO,OAAO,cAAe,CAAG,EAAE,CACvF,CAED,eAAgB,IAAA,GAChB,gBAAiB,IAAA,GAClB,CAcH,eAAsB,EACpB,EACA,CACE,YACA,WAAW,EAAE,CACb,QAAQ,GACR,oBAAoB,GACpB,YAAY,IACZ,kBAAkB,KAClB,WAkBsC,CACxC,IAAM,EAAkB,EAAqB,EAAS,CACtD,EAAO,KACL,EAAO,QACL,6CACE,IAAS,YAAc,YAAc,mBACtC,KACF,CACF,CAGD,GAAI,CAAE,QAAO,UAAW,EAA2B,EAAM,EAAS,CASlE,GARA,EAAO,KACL,EAAO,QACL,yBAAyB,GAAO,aAAa,EAAI,YAAY,UAC3D,GAAQ,aAAa,EAAI,cAE5B,CACF,EAEG,CAAC,GAAS,CAAC,KACR,IACH,EAAO,KACL,EAAO,QAAQ,oDAAoD,EAAU,KAAK,CACnF,CACD,EAAQ,MAAM,EAAwB,EAAQ,CAC5C,YACA,OACA,WAAY,EACZ,kBACD,CAAC,CACF,EAAO,KAAK,EAAO,MAAM,sCAAsC,EAAM,aAAa,GAAG,CAAC,EAEpF,CAAC,GAAQ,CACX,EAAO,KAAK,EAAO,QAAQ,kDAAkD,EAAU,KAAK,CAAC,CAC7F,IAAM,EAAY,MAAM,EAAsB,EAAQ,CACpD,YACA,OACA,WAAY,EACZ,SAAU,EACX,CAAC,CAEF,EAAS,EAAW,EAAW,EAAE,CACjC,EAAO,KAAK,EAAO,MAAM,oCAAoC,EAAU,aAAa,GAAG,CAAC,CAI5F,EAAO,KACL,EAAO,MAAM,6BAA6B,EAAM,aAAa,CAAC,UAAU,EAAO,aAAa,GAAG,CAChG,CAGD,IAAM,EAAS,EAAmB,EAAM,EAAO,EAAQ,EAAU,CAEjE,EAAO,KACL,EAAO,QACL,+CAA+C,EAAU,MAAM,EAAO,OAAO,YAC9E,CACF,CAKD,IAAM,EAAM,IAAI,EAAY,UAC1B,CACE,OAAQ,iFACT,CACD,EAAY,QAAQ,eACrB,CAEG,EAAY,EACZ,EAAU,EAEd,EAAI,MAAM,EAAO,OAAQ,EAAG,CAAE,UAAS,CAAC,CAExC,IAAM,EAAK,KAAK,KAAK,CACf,EAAW,EAAc,EAAM,CAG/B,EAAqC,EAAE,CAoC7C,OAlCA,MAAMA,EACJ,EAAO,KAAK,EAAc,KAAS,CAAE,eAAc,MAAK,EAAE,CAC1D,MAAO,CAAE,kBAAmB,CAC1B,IAAM,EAAS,EAAY,EAAM,EAAU,EAAa,CAGxD,UAAW,IAAM,KAAQ,EAAoB,EAAQ,EAAW,EAAQ,EAAS,CAC/E,GAAW,EAAK,OAChB,EAAI,OAAO,EAAW,CAAE,UAAS,CAAC,CAE9B,EACF,MAAM,EAAQ,EAAK,CAEnB,EAAI,KAAK,GAAG,EAAK,CAIrB,GAAa,EACb,EAAI,OAAO,EAAW,CAAE,UAAS,CAAC,EAEpC,CAAE,YAAa,KAAK,IAAI,EAAG,EAAkB,CAAE,CAChD,CAED,EAAI,OAAO,EAAW,CAAE,UAAS,CAAC,CAClC,EAAI,MAAM,CAEV,EAAO,KACL,EAAO,MACL,WAAW,EAAQ,6CAA6C,EAAU,OACvE,KAAK,KAAK,CAAG,GAAM,IACrB,IACF,CACF,CAEM,EAAU,EAAE,CAAG,EC1LxB,eAAsB,EACpB,EACA,CACE,YACA,WAAW,EAAE,CACb,QAAQ,GACR,WAWsC,CACxC,IAAM,EAA2C,EAAE,CAG/C,EAGE,EACJ,IACC,OAAO,KAAK,EAAS,CAAC,OAAS,GAC7B,EAAS,QAAU,OAAO,KAAK,EAAS,OAAO,CAAC,OAAS,GAGxD,EAAW,KAAK,IAAI,EAAG,KAAK,IAAI,GAAI,GAAS,GAAG,CAAC,CAKvD,OAAa,CACX,IAAM,EAOF,CAAE,MAAO,EAAU,CAEnB,IACF,EAAK,OAAS,GAEZ,IACF,EAAK,OAAS,GAsBhB,GAAM,CAAE,QAAO,OAAQ,GAAe,EAAY,EAnBjC,MAAM,EACrB,uBAEE,EACG,KAAK,kBAAkB,EAAU,QAAS,CACzC,KAAM,EACP,CAAC,CACD,MAAM,CACX,CACE,SAAU,EAAS,EAAQ,IAAY,CACrC,EAAO,KACL,EAAO,OACL,iBAAiB,EAAQ,6CAA6C,IACvE,CACF,EAEJ,CACF,CAEqF,CAYtF,GAVI,CAAC,GAAS,EAAM,SAAW,IAI3B,EACF,MAAM,EAAQ,EAAM,CAEpB,EAAU,KAAK,GAAG,EAAM,CAGtB,CAAC,GACH,MAEF,EAAS,EAGX,OAAO,EAAU,EAAE,CAAG,ECxFxB,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAM,EAAwB,EAAc,EAAM,EAAW,CACtE,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IAAK,GAAqC,CAC9E,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EACtB,CAGI,EAAW,CACf,GAAI,EAAkB,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAAG,EAAE,CAC7E,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EAAgB,CAAE,cAAe,EAAc,aAAa,CAAE,CAAG,EAAE,CACvE,GAAI,EAAe,CAAE,aAAc,EAAa,aAAa,CAAE,CAAG,EAAE,CACrE,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAO,KAAK,EAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7D,EAA+B,EAAO,CACtC,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAAS,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CAAC,CAC7F,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KAAK,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CAAC,CAAC,MACpF,EAAG,IAAM,EAAE,cAAc,EAAE,CAC7B,CAIK,EAAsB,CAAC,GAAG,EAAW,GAAG,EAAgB,GAAG,EAAa,GAAG,EAAU,CAGvF,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtB,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAM,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAO,KAAK,EAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAM,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAO,KAAK,EAAO,MAAM,2BAA2B,IAAO,CAAC"}
1
+ {"version":3,"file":"impl-DsNPvet4.mjs","names":["pmap"],"sources":["../src/lib/preference-management/transformPreferenceRecordToCsv.ts","../src/lib/preference-management/buildConsentChunks.ts","../src/lib/preference-management/getComparisonTimeForRecord.ts","../src/lib/preference-management/iterateConsentPages.ts","../src/lib/preference-management/pickConsentChunkMode.ts","../src/lib/preference-management/discoverConsentWindow.ts","../src/lib/preference-management/fetchConsentPreferencesChunked.ts","../src/lib/preference-management/fetchConsentPreferences.ts","../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n/**\n * Transforms the output of the consent preferences query into a CSV-friendly format.\n *\n * @param input - The input object containing consent preferences data.\n * @param exportIdentifiersWithDelimiter - Delimiter to use when combining multiple identifier values.\n * @returns A record representing the transformed CSV output.\n */\nexport function transformPreferenceRecordToCsv(\n {\n identifiers = [],\n purposes = [],\n metadata = [],\n consentManagement = {},\n system = {\n decryptionStatus: 'DECRYPTED',\n },\n // keep other top-level fields as-is (e.g., partition, timestamp, metadataTimestamp)\n ...topLevel\n }: PreferenceQueryResponseItem,\n exportIdentifiersWithDelimiter: string,\n): Record<string, unknown> {\n // Start with: all other top-level fields + spread system and consentManagement\n const out: Record<string, unknown> = {\n ...topLevel,\n ...system,\n ...consentManagement,\n };\n\n // ── identifiers: each identifier.name -> CSV of values\n if (Array.isArray(identifiers)) {\n const byName = new Map<string, Set<string>>();\n for (const { name, value } of identifiers) {\n if (!byName.has(name)) byName.set(name, new Set());\n if (value) byName.get(name)!.add(value);\n }\n for (const [name, set] of byName.entries()) {\n out[name] = Array.from(set).join(exportIdentifiersWithDelimiter);\n }\n }\n\n // ── metadata: serialize as JSON\n if (Array.isArray(metadata)) {\n out.metadata = JSON.stringify(\n metadata.reduce(\n (acc, { key, value }) => {\n acc[key] = value;\n return acc;\n },\n {} as Record<string, string>,\n ),\n );\n }\n\n // ── purposes:\n // - purpose.slug column => true/false (enabled)\n // - for each preference: purpose.slug_preference.slug => bool | single | CSV (multi)\n if (Array.isArray(purposes)) {\n for (const { purpose, preferences, enabled } of purposes) {\n out[purpose] = Boolean(enabled);\n\n // nested preferences\n if (Array.isArray(preferences)) {\n for (const { topic, choice } of preferences) {\n const col = `${purpose}_${topic}`;\n\n let val: unknown = null;\n\n if (typeof choice.booleanValue === 'boolean') {\n val = choice.booleanValue;\n } else if (choice.selectValue) {\n val = choice.selectValue;\n } else if (Array.isArray(choice.selectValues)) {\n const vs = choice.selectValues.filter((v) => v.length > 0);\n val = vs.join(',');\n } else {\n // no pref value present -> null\n val = null;\n }\n\n out[col] = val;\n }\n }\n }\n }\n\n return out;\n}\n","import { FIVE_MIN_MS } from '../helpers/index.js';\nimport type { ChunkMode, PreferencesQueryFilter } from './types.js';\n\n/**\n * Build chunk windows by splitting [lower, upperExclusive) into up to `maxChunks`\n * equal-sized ranges, with a minimum chunk span of 5 minutes. Boundaries are snapped\n * once at the start to the nearest 5-minute boundary for stability.\n *\n * Each returned window is already \"half-open\" for an *inclusive* backend:\n * we subtract 1ms from the exclusive end so adjacent chunks do not overlap.\n *\n * Example (timestamp mode): [10:00, 12:00) → { after=10:00:00.000Z, before=11:59:59.999Z }\n *\n * @param mode - 'timestamp' or 'updated'\n * @param lower - Lower bound (inclusive)\n * @param upperExclusive - Upper bound (exclusive)\n * @param maxChunks - Maximum number of chunks to create\n * @returns Array of chunked preference query filters\n */\nexport function buildConsentChunks(\n mode: ChunkMode,\n lower: Date,\n upperExclusive: Date,\n maxChunks = 5000,\n): Array<PreferencesQueryFilter> {\n const totalMs = Math.max(0, upperExclusive.getTime() - lower.getTime());\n if (totalMs === 0) return [];\n\n // Snap only the starting boundary to the nearest 5-minute boundary.\n // We avoid re-snapping every step to prevent cumulative drift.\n const seriesStart = new Date(Math.floor(lower.getTime() / FIVE_MIN_MS) * FIVE_MIN_MS);\n\n // Compute base chunk size (ceil to ensure ≤ maxChunks), enforced ≥ 5m.\n const rawChunkMs = Math.ceil(totalMs / Math.max(1, maxChunks));\n const chunkMs = Math.max(FIVE_MIN_MS, rawChunkMs);\n\n // Number of chunks needed to cover [seriesStart, upperExclusive)\n const count = Math.ceil((upperExclusive.getTime() - seriesStart.getTime()) / chunkMs);\n\n const chunks: PreferencesQueryFilter[] = [];\n\n for (let i = 0; i < count; i += 1) {\n const startMs = seriesStart.getTime() + i * chunkMs;\n const endExclusiveMs = Math.min(upperExclusive.getTime(), startMs + chunkMs);\n\n // Convert exclusive end to inclusive end for an inclusive backend: -1ms.\n const endInclusiveMs = endExclusiveMs - 1;\n\n // Guard: in degenerate cases (shouldn’t happen with the math above), clamp.\n const safeEndMs = Math.max(startMs, endInclusiveMs);\n\n const afterIso = new Date(startMs).toISOString();\n const beforeIso = new Date(safeEndMs).toISOString();\n\n if (mode === 'timestamp') {\n chunks.push({\n timestampAfter: afterIso,\n timestampBefore: beforeIso,\n });\n } else {\n chunks.push({\n system: {\n updatedAfter: afterIso,\n updatedBefore: beforeIso,\n },\n });\n }\n }\n\n return chunks;\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\nimport type { ChunkMode } from './types.js';\n\n/**\n * Get the comparison instant for sorting based on the chosen dimension.\n *\n * @param mode - Chunking mode\n * @param item - Preference item\n * @returns date\n */\nexport function getComparisonTimeForRecord(\n mode: ChunkMode,\n item: PreferenceQueryResponseItem,\n): Date {\n if (mode === 'timestamp') {\n return new Date(item.timestamp);\n }\n // mode === 'updated'\n return item.system?.updatedAt ? new Date(item.system.updatedAt) : new Date();\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { ConsentPreferenceResponse, PreferencesQueryFilter } from './types.js';\nimport { withPreferenceRetry } from './withPreferenceRetry.js';\n\n/**\n * Async generator over pages for a given filter\n *\n * @param sombra - Sombra Got instance\n * @param partition - Partition key\n * @param filter - Query filter\n * @param pageSize - Number of items per page\n * @yields Pages of PreferenceQueryResponseItem\n */\nexport async function* iterateConsentPages(\n sombra: Got,\n partition: string,\n filter: PreferencesQueryFilter,\n pageSize: number,\n): AsyncGenerator<PreferenceQueryResponseItem[], void, void> {\n let cursor: string | undefined;\n\n while (true) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const body: any = { limit: pageSize };\n if (filter && Object.keys(filter).length) body.filter = filter;\n if (cursor) body.cursor = cursor;\n\n const resp = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partition}/query`, {\n json: body,\n })\n .json(),\n {\n onRetry: (attempt, _error, message) => {\n logger.warn(\n colors.yellow(\n `Retry attempt ${attempt} for iterateConsentPages due to error: ${message}`,\n ),\n );\n },\n },\n );\n\n const { nodes, cursor: nextCursor } = decodeCodec(ConsentPreferenceResponse, resp);\n if (!nodes?.length) break;\n\n yield nodes;\n\n if (!nextCursor) break;\n cursor = nextCursor;\n }\n}\n","import type { ChunkMode, PreferencesQueryFilter } from './types.js';\n\n/**\n * Decide which dimension to chunk on: 'timestamp' if timestamps provided, otherwise 'updated'\n *\n * @param filterBy - Filter to examine\n * @returns Chosen chunk mode\n */\nexport function pickConsentChunkMode(filterBy: PreferencesQueryFilter): ChunkMode {\n const hasTimestamp = !!filterBy.timestampAfter || !!filterBy.timestampBefore;\n return hasTimestamp ? 'timestamp' : 'updated';\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n/* eslint-disable max-lines */\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { startOfUtcDay, DAY_MS } from '../helpers/index.js';\nimport { getComparisonTimeForRecord } from './getComparisonTimeForRecord.js';\nimport { iterateConsentPages } from './iterateConsentPages.js';\nimport { pickConsentChunkMode } from './pickConsentChunkMode.js';\nimport { ChunkMode, PreferencesQueryFilter } from './types.js';\n\n/**\n * Get after/before bounds from filter for the given mode\n *\n * @param mode - Chunking mode\n * @param filterBy - Filter to examine\n * @returns after/before dates\n */\nexport function getBoundsFromConsentFilter(\n mode: ChunkMode,\n filterBy: PreferencesQueryFilter,\n): {\n /** After date */\n after?: Date;\n /** Before date */\n before?: Date;\n} {\n if (mode === 'timestamp') {\n return {\n after: filterBy.timestampAfter ? new Date(filterBy.timestampAfter) : undefined,\n before: filterBy.timestampBefore ? new Date(filterBy.timestampBefore) : undefined,\n };\n }\n const u = filterBy.system ?? {};\n return {\n after: u.updatedAfter ? new Date(u.updatedAfter) : undefined,\n before: u.updatedBefore ? new Date(u.updatedBefore) : undefined,\n };\n}\n\n/**\n * Merge base filter with a \"before\" bound (without mixing dimensions).\n *\n * @param mode - Chunking mode\n * @param base - Base filter to augment\n * @param beforeISO - ISO timestamp to apply as the exclusive *Before bound for the chosen dimension\n * @returns New filter with the appropriate *Before constraint applied\n */\nfunction withBeforeBound(\n mode: ChunkMode,\n base: PreferencesQueryFilter,\n beforeISO?: string,\n): PreferencesQueryFilter {\n if (mode === 'timestamp') {\n return {\n ...base,\n timestampBefore: beforeISO ?? base.timestampBefore,\n };\n }\n return {\n ...base,\n system: {\n ...(base.system || {}),\n ...(beforeISO ? { updatedBefore: beforeISO } : {}),\n },\n // ensure we don't mix dimensions\n timestampAfter: undefined,\n timestampBefore: undefined,\n };\n}\n\n/**\n * Fetch a single record (or null) with the given filter.\n *\n * @param sombra - Got instance configured for Sombra API\n * @param partition - Preference Store partition id\n * @param filter - Query filter to use (page size internally forced to 1)\n * @returns The first record or null if none\n */\nasync function fetchOne(\n sombra: Got,\n partition: string,\n filter: PreferencesQueryFilter,\n): Promise<PreferenceQueryResponseItem | null> {\n logger.info(colors.magenta(`Single-record probe with filter: ${JSON.stringify(filter)}`));\n const it = iterateConsentPages(sombra, partition, filter, /* pageSize */ 1);\n const res = await it.next();\n if (res.done || !res.value || res.value.length === 0) {\n logger.info(colors.yellow('Probe result: no record'));\n return null;\n }\n const item = res.value[0]!;\n logger.info(\n colors.green(\n `Probe result: found record at ${getComparisonTimeForRecord(\n pickConsentChunkMode(filter),\n item,\n ).toISOString()}`,\n ),\n );\n return item;\n}\n\n/**\n * Robust earliest-day search (UTC):\n * 1) Anchor at the newest record (single-record probe).\n * 2) Exponential “jump back” using seeds (1d, 7d, 30d) then doubling (60d, 120d, 240d, …)\n * to cross into an empty region and establish a lower empty bound.\n * 3) **Exponential forward-from-empty**: gallop forward from the empty bound toward the last-found\n * to land close to the frontier quickly.\n * 4) Tighten with a short binary search on time using single-record probes.\n *\n * (Implementation note: preserves the public signature and docs while improving efficiency.)\n *\n * @param sombra - Sombra\n * @param opts - Options\n * @returns Earliest day with data (UTC start-of-day)\n */\nexport async function findEarliestDayWithData(\n sombra: Got,\n opts: {\n /** Partition */\n partition: string;\n /** Chunking mode */\n mode: ChunkMode;\n /** Base filter */\n baseFilter: PreferencesQueryFilter;\n /** Optional safety cap in days to avoid unbounded lookback (default ~10 years) */\n maxLookbackDays?: number;\n },\n): Promise<Date> {\n const { partition, mode, baseFilter, maxLookbackDays = 3650 } = opts;\n\n // 1) Find newest record (anchors our backtracking).\n const newest = await fetchOne(sombra, partition, withBeforeBound(mode, baseFilter));\n if (!newest) {\n logger.info(colors.yellow('No records found; defaulting earliest day to today.'));\n return startOfUtcDay(new Date());\n }\n const newestInstant = getComparisonTimeForRecord(mode, newest);\n logger.info(colors.green(`Newest instant: ${newestInstant.toISOString()}`));\n\n // 2) Exponential jump back to find an empty region.\n const seedSteps = [1, 7, 30]; // days\n let stepDaysIdx = 0;\n let stepMs = seedSteps[0] * DAY_MS;\n\n let lastFoundInstant = newestInstant; // last instant we *could* find a record before\n let emptyBeforeInstant: Date | null = null; // first bound that yielded no results\n\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const probeBound =\n stepDaysIdx < seedSteps.length\n ? new Date(newestInstant.getTime() - seedSteps[stepDaysIdx] * DAY_MS)\n : new Date(newestInstant.getTime() - stepMs);\n\n // stop if we exceeded lookback cap\n const daysSince =\n (startOfUtcDay(new Date()).getTime() - startOfUtcDay(probeBound).getTime()) / DAY_MS;\n if (daysSince > maxLookbackDays) {\n logger.warn(\n colors.yellow(\n `Exponential jump exceeded maxLookbackDays=${maxLookbackDays}. Using current bounds.`,\n ),\n );\n emptyBeforeInstant = probeBound;\n break;\n }\n\n logger.info(\n colors.magenta(\n `Probing before=${probeBound.toISOString()} (jump step ${\n stepDaysIdx < seedSteps.length\n ? `${seedSteps[stepDaysIdx]}d`\n : `${Math.round(stepMs / DAY_MS)}d`\n })…`,\n ),\n );\n\n const hit = await fetchOne(\n sombra,\n partition,\n withBeforeBound(mode, baseFilter, probeBound.toISOString()),\n );\n\n if (hit) {\n lastFoundInstant = getComparisonTimeForRecord(mode, hit);\n logger.info(\n colors.green(\n `Found older record at ${lastFoundInstant.toISOString()} — continue jumping back.`,\n ),\n );\n // advance step\n if (stepDaysIdx < seedSteps.length - 1) {\n stepDaysIdx += 1;\n stepMs = seedSteps[stepDaysIdx] * DAY_MS;\n } else if (stepDaysIdx === seedSteps.length - 1) {\n stepDaysIdx += 1; // switch to doubling mode\n stepMs = seedSteps[seedSteps.length - 1] * 2 * DAY_MS; // start at 60d\n } else {\n stepMs *= 2;\n }\n // eslint-disable-next-line no-continue\n continue;\n }\n\n // crossed into an empty zone — remember this bound\n emptyBeforeInstant = probeBound;\n logger.info(\n colors.green(`No record before ${probeBound.toISOString()} — established empty lower bound.`),\n );\n break;\n }\n\n // Guard: if for some reason empty bound wasn't set, synthesize one “just before” lastFound.\n if (!emptyBeforeInstant) {\n emptyBeforeInstant = new Date(lastFoundInstant.getTime() - DAY_MS);\n }\n\n // 3) Exponential forward-from-empty toward the found frontier.\n // This “gallop” reduces the span dramatically before binary search.\n // We keep moving the empty bound forward with exponentially growing steps\n // until we get a hit; then we shrink onto that hit instant.\n let lo = emptyBeforeInstant; // known EMPTY (no data before this bound)\n let hi = lastFoundInstant; // known FOUND (there is data before this instant)\n let fwdStep = Math.max(DAY_MS, Math.floor((hi.getTime() - lo.getTime()) / 64)); // start small-ish\n logger.info(\n colors.magenta(\n `Exponential forward-from-empty start: empty=${lo.toISOString()} found=${hi.toISOString()} step=${Math.round(\n fwdStep / DAY_MS,\n )}d`,\n ),\n );\n\n // Do a few gallop iterations (bounded so we don't loop forever if distribution is dense)\n for (let i = 0; i < 8; i += 1) {\n const probe = new Date(lo.getTime() + fwdStep);\n if (probe.getTime() >= hi.getTime()) break;\n\n logger.info(colors.magenta(`Forward gallop probe before=${probe.toISOString()}…`));\n const hit = await fetchOne(\n sombra,\n partition,\n withBeforeBound(mode, baseFilter, probe.toISOString()),\n );\n\n if (hit) {\n // We crossed into data — tighten hi to the actual hit instant.\n hi = getComparisonTimeForRecord(mode, hit);\n logger.info(\n colors.green(\n `Gallop hit at ${hi.toISOString()} — tightening found bound. Next step halves.`,\n ),\n );\n fwdStep = Math.max(DAY_MS, Math.floor(fwdStep / 2));\n } else {\n // Still empty up to probe — advance lo and double the step.\n lo.setTime(probe.getTime());\n logger.info(\n colors.yellow(\n `Gallop miss — advancing empty bound to ${lo.toISOString()}. Next step doubles.`,\n ),\n );\n fwdStep = Math.min(hi.getTime() - lo.getTime(), fwdStep * 2);\n if (fwdStep < DAY_MS) fwdStep = DAY_MS;\n }\n\n if (hi.getTime() - lo.getTime() <= DAY_MS) break;\n }\n\n // 4) Finish with a short binary search between [lo (empty), hi (found)].\n while (hi.getTime() - lo.getTime() > DAY_MS) {\n const mid = new Date(lo.getTime() + Math.floor((hi.getTime() - lo.getTime()) / 2));\n logger.info(colors.magenta(`Binary probe before=${mid.toISOString()}…`));\n\n const hit = await fetchOne(\n sombra,\n partition,\n withBeforeBound(mode, baseFilter, mid.toISOString()),\n );\n\n if (hit) {\n const when = getComparisonTimeForRecord(mode, hit);\n logger.info(colors.green(`Binary probe found record at ${when.toISOString()}.`));\n hi = when; // there is data before mid -> earliest could be even earlier\n } else {\n logger.info(colors.yellow('Binary probe found no record.'));\n lo = mid; // still empty -> move low up\n }\n }\n\n const earliestDay = startOfUtcDay(hi);\n logger.info(\n colors.green(\n `Earliest day (UTC) resolved to ${earliestDay.toISOString()} (instant ≈ ${hi.toISOString()}).`,\n ),\n );\n return earliestDay;\n}\n\n/**\n * Find latest day with data using exponential growth forward from earliest (UTC day math).\n *\n * (Implementation note: per your request, we now fetch a single newest record to infer the latest day.)\n *\n * @param sombra - Sombra\n * @param opts - Options\n * @returns Latest day with data\n */\nexport async function findLatestDayWithData(\n sombra: Got,\n opts: {\n /** Partition */\n partition: string;\n /** Chunking mode */\n mode: ChunkMode;\n /** Base filter */\n baseFilter: PreferencesQueryFilter;\n /** Earliest date */\n earliest: Date; // inclusive day start\n },\n): Promise<Date> {\n const { partition, mode, baseFilter } = opts;\n\n logger.info(colors.magenta('Latest-day discovery: probing newest record…'));\n const latest = await fetchOne(sombra, partition, withBeforeBound(mode, baseFilter));\n if (!latest) {\n logger.info(colors.yellow('No records found at all; defaulting latest day to today.'));\n return startOfUtcDay(new Date());\n }\n\n const when = getComparisonTimeForRecord(mode, latest);\n logger.info(colors.green(`Newest record instant is ${when.toISOString()}.`));\n\n const latestDay = startOfUtcDay(when);\n logger.info(\n colors.green(\n `Latest day (UTC) resolved to ${latestDay.toISOString()} from instant ${when.toISOString()}.`,\n ),\n );\n\n return latestDay;\n}\n/* eslint-enable max-lines */\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { map as pmap } from '../bluebird.js';\nimport { addDaysUtc, clampPageSize } from '../helpers/index.js';\nimport { buildConsentChunks } from './buildConsentChunks.js';\nimport {\n findEarliestDayWithData,\n findLatestDayWithData,\n getBoundsFromConsentFilter,\n} from './discoverConsentWindow.js';\nimport { iterateConsentPages } from './iterateConsentPages.js';\nimport { pickConsentChunkMode } from './pickConsentChunkMode.js';\nimport { PreferencesQueryFilter, ChunkMode } from './types.js';\n\n/**\n * Merge baseFilter with a window filter, taking care not to mix timestamp/updated fields improperly.\n *\n * @param mode - The chunking mode\n * @param base - The base filter\n * @param window - The per-chunk window filter\n * @returns merged filter\n */\nfunction mergeFilter(\n mode: ChunkMode,\n base: PreferencesQueryFilter,\n window: PreferencesQueryFilter,\n): PreferencesQueryFilter {\n if (mode === 'timestamp') {\n return {\n ...base,\n timestampAfter: window.timestampAfter ?? base.timestampAfter,\n timestampBefore: window.timestampBefore ?? base.timestampBefore,\n // ensure we don't pass `system.*` when chunking by timestamp\n system: undefined,\n };\n }\n // mode === 'updated'\n return {\n ...base,\n system: {\n ...(base.system || {}),\n ...(window.system?.updatedAfter ? { updatedAfter: window.system.updatedAfter } : {}),\n ...(window.system?.updatedBefore ? { updatedBefore: window.system.updatedBefore } : {}),\n },\n // Ensure we don't mix dimensions\n timestampAfter: undefined,\n timestampBefore: undefined,\n };\n}\n\n/**\n * High-level chunked fetch with progress bar.\n *\n * If an `onItems` callback is provided, pages are streamed to the callback\n * as they are fetched (no accumulation in memory). If no callback is provided,\n * the function returns all items (legacy behavior).\n *\n * @param sombra - Got instance\n * @param options - Options\n * @returns preference items (only if onItems is not provided)\n */\nexport async function fetchConsentPreferencesChunked(\n sombra: Got,\n {\n partition,\n filterBy = {},\n limit = 50,\n windowConcurrency = 25,\n maxChunks = 5000,\n maxLookbackDays = 3650,\n onItems,\n }: {\n /** Partition */\n partition: string;\n /** Filter by preferences */\n filterBy?: PreferencesQueryFilter;\n /** Limit number of results (page size) */\n limit?: number;\n /** Window concurrency */\n windowConcurrency?: number;\n /** Max chunks */\n maxChunks?: number; // up to N chunks; min 1 hour per chunk\n /** Max lookback days for discovering bounds */\n maxLookbackDays?: number;\n /** Optional streaming sink; if provided, items are not accumulated */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n onItems?: (items: PreferenceQueryResponseItem[]) => Promise<any> | any;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const mode: ChunkMode = pickConsentChunkMode(filterBy);\n logger.info(\n colors.magenta(\n `Fetching consent preferences in chunks by ${\n mode === 'timestamp' ? 'timestamp' : 'system.updatedAt'\n }...`,\n ),\n );\n\n // Resolve / discover bounds (UTC)\n let { after, before } = getBoundsFromConsentFilter(mode, filterBy);\n logger.info(\n colors.magenta(\n `Initial bounds: after=${after?.toISOString() ?? 'undefined'} before=${\n before?.toISOString() ?? 'undefined'\n }`,\n ),\n );\n\n if (!after || !before) {\n if (!after) {\n logger.info(\n colors.magenta(`Discovering earliest day with data for partition ${partition}...`),\n );\n after = await findEarliestDayWithData(sombra, {\n partition,\n mode,\n baseFilter: filterBy,\n maxLookbackDays,\n });\n logger.info(colors.green(`Discovered earliest day with data: ${after.toISOString()}`));\n }\n if (!before) {\n logger.info(colors.magenta(`Discovering latest day with data for partition ${partition}...`));\n const latestDay = await findLatestDayWithData(sombra, {\n partition,\n mode,\n baseFilter: filterBy,\n earliest: after,\n });\n // Exclusive upper bound = latest day start + 1 day (UTC)\n before = addDaysUtc(latestDay, 1);\n logger.info(colors.green(`Discovered latest day with data: ${latestDay.toISOString()}`));\n }\n }\n\n logger.info(\n colors.green(`Final bounds (UTC): after=${after.toISOString()} before=${before.toISOString()}`),\n );\n\n // Build up to `maxChunks` chunks, min 1 hour each\n const chunks = buildConsentChunks(mode, after, before, maxChunks);\n\n logger.info(\n colors.magenta(\n `Fetching consent preferences from partition ${partition} in ${chunks.length} chunks...`,\n ),\n );\n\n // Progress bar over chunks (unordered):\n // - value = completed chunks (out-of-order OK)\n // - payload fetched = total records fetched\n const bar = new cliProgress.SingleBar(\n {\n format: 'Downloading [{bar}] {percentage}% | chunks {value}/{total} | fetched {fetched}',\n },\n cliProgress.Presets.shades_classic,\n );\n\n let completed = 0; // finished chunks (out-of-order)\n let fetched = 0; // raw records counter\n\n bar.start(chunks.length, 0, { fetched });\n\n const t0 = Date.now();\n const pageSize = clampPageSize(limit);\n\n // If we are streaming, do not accumulate everything in memory.\n const out: PreferenceQueryResponseItem[] = [];\n\n await pmap(\n chunks.map((windowFilter, idx) => ({ windowFilter, idx })),\n async ({ windowFilter }) => {\n const filter = mergeFilter(mode, filterBy, windowFilter);\n\n // Stream this chunk page-by-page\n for await (const page of iterateConsentPages(sombra, partition, filter, pageSize)) {\n fetched += page.length;\n bar.update(completed, { fetched });\n\n if (onItems) {\n await onItems(page);\n } else {\n out.push(...page);\n }\n }\n\n completed += 1;\n bar.update(completed, { fetched });\n },\n { concurrency: Math.max(1, windowConcurrency) },\n );\n\n bar.update(completed, { fetched });\n bar.stop();\n\n logger.info(\n colors.green(\n `Fetched ${fetched} consent preference records from partition ${partition} in ${\n (Date.now() - t0) / 1000\n }s.`,\n ),\n );\n\n return onItems ? [] : out;\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport type { Got } from 'got';\n\nimport { logger } from '../../logger.js';\nimport { ConsentPreferenceResponse, PreferencesQueryFilter } from './types.js';\nimport { withPreferenceRetry } from './withPreferenceRetry.js';\n\n/**\n * Fetch consent preferences for the managed consent database (new query endpoint)\n *\n * Uses POST /v1/preferences/{partition}/query with cursor pagination.\n *\n * If `onItems` is provided, this streams pages to the callback and does not\n * accumulate results in memory. If omitted, the function returns all items.\n *\n * @param sombra - Sombra instance (must include auth headers)\n * @param options - Query options\n * @returns All nodes (only when onItems is not provided)\n */\nexport async function fetchConsentPreferences(\n sombra: Got,\n {\n partition,\n filterBy = {},\n limit = 50,\n onItems,\n }: {\n /** Partition key to fetch (moved to URL path on new endpoint) */\n partition: string;\n /** Query filter (wrapped under \"filter\" in request body) */\n filterBy?: PreferencesQueryFilter;\n /** Number of users per page (1–50 per API spec) */\n limit?: number;\n /** Optional streaming sink; if provided, pages are not accumulated */\n onItems?: (items: PreferenceQueryResponseItem[]) => Promise<void> | void;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const collected: PreferenceQueryResponseItem[] = [];\n\n // Cursor-based pagination per new endpoint\n let cursor: string | undefined;\n\n // Build the filter payload, omitting empty filter\n const hasFilter =\n filterBy &&\n (Object.keys(filterBy).length > 0 ||\n (filterBy.system && Object.keys(filterBy.system).length > 0));\n\n // Enforce API max (defensive; backend also validates)\n const pageSize = Math.max(1, Math.min(50, limit ?? 50));\n\n // Keep fetching until no cursor is returned\n // (The API returns an opaque cursor string for the next page)\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const body: {\n /** Filter by user identifiers (new shape) */\n filter?: PreferencesQueryFilter;\n /** Cursor for pagination */\n cursor?: string;\n /** Number of records per page */\n limit: number;\n } = { limit: pageSize };\n\n if (hasFilter) {\n body.filter = filterBy;\n }\n if (cursor) {\n body.cursor = cursor;\n }\n\n const response = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partition}/query`, {\n json: body,\n })\n .json(),\n {\n onRetry: (attempt, _error, message) => {\n logger.warn(\n colors.yellow(\n `Retry attempt ${attempt} for fetchConsentPreferences due to error: ${message}`,\n ),\n );\n },\n },\n );\n\n const { nodes, cursor: nextCursor } = decodeCodec(ConsentPreferenceResponse, response);\n\n if (!nodes || nodes.length === 0) {\n break;\n }\n\n if (onItems) {\n await onItems(nodes);\n } else {\n collected.push(...nodes);\n }\n\n if (!nextCursor) {\n break;\n }\n cursor = nextCursor;\n }\n\n return onItems ? [] : collected;\n}\n","import type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql/index.js';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers/index.js';\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management/index.js';\nimport { logger } from '../../../logger.js';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map((identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n });\n\n // Build filter\n const filterBy = {\n ...(timestampBefore ? { timestampBefore: timestampBefore.toISOString() } : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore ? { updatedBefore: updatedBefore.toISOString() } : {}),\n ...(updatedAfter ? { updatedAfter: updatedAfter.toISOString() } : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap((p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? []),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(new Set(purposesWithTopics.map((p) => p.trackingType))).sort(\n (a, b) => a.localeCompare(b),\n );\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [...CORE_COLS, ...identifierCols, ...purposeCols, ...topicCols];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"irBASA,SAAgB,EACd,CACE,cAAc,EAAE,CAChB,WAAW,EAAE,CACb,WAAW,EAAE,CACb,oBAAoB,EAAE,CACtB,SAAS,CACP,iBAAkB,YACnB,CAED,GAAG,GAEL,EACyB,CAEzB,IAAM,EAA+B,CACnC,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGD,GAAI,MAAM,QAAQ,EAAY,CAAE,CAC9B,IAAM,EAAS,IAAI,IACnB,IAAK,GAAM,CAAE,OAAM,WAAW,EACvB,EAAO,IAAI,EAAK,EAAE,EAAO,IAAI,EAAM,IAAI,IAAM,CAC9C,GAAO,EAAO,IAAI,EAAK,CAAE,IAAI,EAAM,CAEzC,IAAK,GAAM,CAAC,EAAM,KAAQ,EAAO,SAAS,CACxC,EAAI,GAAQ,MAAM,KAAK,EAAI,CAAC,KAAK,EAA+B,CAoBpE,GAfI,MAAM,QAAQ,EAAS,GACzB,EAAI,SAAW,KAAK,UAClB,EAAS,QACN,EAAK,CAAE,MAAK,YACX,EAAI,GAAO,EACJ,GAET,EAAE,CACH,CACF,EAMC,MAAM,QAAQ,EAAS,CACzB,KAAK,GAAM,CAAE,UAAS,cAAa,aAAa,EAI9C,GAHA,EAAI,GAAW,EAAQ,EAGnB,MAAM,QAAQ,EAAY,CAC5B,IAAK,GAAM,CAAE,QAAO,YAAY,EAAa,CAC3C,IAAM,EAAM,GAAG,EAAQ,GAAG,IAEtB,EAAe,KAEnB,AASE,EATE,OAAO,EAAO,cAAiB,UAC3B,EAAO,aACJ,EAAO,YACV,EAAO,YACJ,MAAM,QAAQ,EAAO,aAAa,CAChC,EAAO,aAAa,OAAQ,GAAM,EAAE,OAAS,EAAE,CACjD,KAAK,IAAI,CAGZ,KAGR,EAAI,GAAO,GAMnB,OAAO,ECpET,SAAgB,EACd,EACA,EACA,EACA,EAAY,IACmB,CAC/B,IAAM,EAAU,KAAK,IAAI,EAAG,EAAe,SAAS,CAAG,EAAM,SAAS,CAAC,CACvE,GAAI,IAAY,EAAG,MAAO,EAAE,CAI5B,IAAM,EAAc,IAAI,KAAK,KAAK,MAAM,EAAM,SAAS,CAAG,EAAY,CAAG,EAAY,CAG/E,EAAa,KAAK,KAAK,EAAU,KAAK,IAAI,EAAG,EAAU,CAAC,CACxD,EAAU,KAAK,IAAI,EAAa,EAAW,CAG3C,EAAQ,KAAK,MAAM,EAAe,SAAS,CAAG,EAAY,SAAS,EAAI,EAAQ,CAE/E,EAAmC,EAAE,CAE3C,IAAK,IAAI,EAAI,EAAG,EAAI,EAAO,GAAK,EAAG,CACjC,IAAM,EAAU,EAAY,SAAS,CAAG,EAAI,EAItC,EAHiB,KAAK,IAAI,EAAe,SAAS,CAAE,EAAU,EAAQ,CAGpC,EAGlC,EAAY,KAAK,IAAI,EAAS,EAAe,CAE7C,EAAW,IAAI,KAAK,EAAQ,CAAC,aAAa,CAC1C,EAAY,IAAI,KAAK,EAAU,CAAC,aAAa,CAE/C,IAAS,YACX,EAAO,KAAK,CACV,eAAgB,EAChB,gBAAiB,EAClB,CAAC,CAEF,EAAO,KAAK,CACV,OAAQ,CACN,aAAc,EACd,cAAe,EAChB,CACF,CAAC,CAIN,OAAO,EC1DT,SAAgB,EACd,EACA,EACM,CAKN,OAJI,IAAS,YACJ,IAAI,KAAK,EAAK,UAAU,CAG1B,EAAK,QAAQ,UAAY,IAAI,KAAK,EAAK,OAAO,UAAU,CAAG,IAAI,KCDxE,eAAuB,EACrB,EACA,EACA,EACA,EAC2D,CAC3D,IAAI,EAEJ,OAAa,CAEX,IAAM,EAAY,CAAE,MAAO,EAAU,CACjC,GAAU,OAAO,KAAK,EAAO,CAAC,SAAQ,EAAK,OAAS,GACpD,IAAQ,EAAK,OAAS,GAqB1B,GAAM,CAAE,QAAO,OAAQ,GAAe,EAAY,EAnBrC,MAAM,EACjB,uBAEE,EACG,KAAK,kBAAkB,EAAU,QAAS,CACzC,KAAM,EACP,CAAC,CACD,MAAM,CACX,CACE,SAAU,EAAS,EAAQ,IAAY,CACrC,EAAO,KACL,EAAO,OACL,iBAAiB,EAAQ,yCAAyC,IACnE,CACF,EAEJ,CACF,CAEiF,CAKlF,GAJI,CAAC,GAAO,SAEZ,MAAM,EAEF,CAAC,GAAY,MACjB,EAAS,GCjDb,SAAgB,EAAqB,EAA6C,CAEhF,OADuB,EAAS,gBAAoB,EAAS,gBACvC,YAAc,UCStC,SAAgB,EACd,EACA,EAMA,CACA,GAAI,IAAS,YACX,MAAO,CACL,MAAO,EAAS,eAAiB,IAAI,KAAK,EAAS,eAAe,CAAG,IAAA,GACrE,OAAQ,EAAS,gBAAkB,IAAI,KAAK,EAAS,gBAAgB,CAAG,IAAA,GACzE,CAEH,IAAM,EAAI,EAAS,QAAU,EAAE,CAC/B,MAAO,CACL,MAAO,EAAE,aAAe,IAAI,KAAK,EAAE,aAAa,CAAG,IAAA,GACnD,OAAQ,EAAE,cAAgB,IAAI,KAAK,EAAE,cAAc,CAAG,IAAA,GACvD,CAWH,SAAS,EACP,EACA,EACA,EACwB,CAOxB,OANI,IAAS,YACJ,CACL,GAAG,EACH,gBAAiB,GAAa,EAAK,gBACpC,CAEI,CACL,GAAG,EACH,OAAQ,CACN,GAAI,EAAK,QAAU,EAAE,CACrB,GAAI,EAAY,CAAE,cAAe,EAAW,CAAG,EAAE,CAClD,CAED,eAAgB,IAAA,GAChB,gBAAiB,IAAA,GAClB,CAWH,eAAe,EACb,EACA,EACA,EAC6C,CAC7C,EAAO,KAAK,EAAO,QAAQ,oCAAoC,KAAK,UAAU,EAAO,GAAG,CAAC,CAEzF,IAAM,EAAM,MADD,EAAoB,EAAQ,EAAW,EAAuB,EAAE,CACtD,MAAM,CAC3B,GAAI,EAAI,MAAQ,CAAC,EAAI,OAAS,EAAI,MAAM,SAAW,EAEjD,OADA,EAAO,KAAK,EAAO,OAAO,0BAA0B,CAAC,CAC9C,KAET,IAAM,EAAO,EAAI,MAAM,GASvB,OARA,EAAO,KACL,EAAO,MACL,iCAAiC,EAC/B,EAAqB,EAAO,CAC5B,EACD,CAAC,aAAa,GAChB,CACF,CACM,EAkBT,eAAsB,EACpB,EACA,EAUe,CACf,GAAM,CAAE,YAAW,OAAM,aAAY,kBAAkB,MAAS,EAG1D,EAAS,MAAM,EAAS,EAAQ,EAAW,EAAgB,EAAM,EAAW,CAAC,CACnF,GAAI,CAAC,EAEH,OADA,EAAO,KAAK,EAAO,OAAO,sDAAsD,CAAC,CAC1E,EAAc,IAAI,KAAO,CAElC,IAAM,EAAgB,EAA2B,EAAM,EAAO,CAC9D,EAAO,KAAK,EAAO,MAAM,mBAAmB,EAAc,aAAa,GAAG,CAAC,CAG3E,IAAM,EAAY,CAAC,EAAG,EAAG,GAAG,CACxB,EAAc,EACd,EAAS,EAAU,GAAK,EAExB,EAAmB,EACnB,EAAkC,KAGtC,OAAa,CACX,IAAM,EACJ,EAAc,EAAU,OACpB,IAAI,KAAK,EAAc,SAAS,CAAG,EAAU,GAAe,EAAO,CACnE,IAAI,KAAK,EAAc,SAAS,CAAG,EAAO,CAKhD,IADG,EAAc,IAAI,KAAO,CAAC,SAAS,CAAG,EAAc,EAAW,CAAC,SAAS,EAAA,MAC5D,EAAiB,CAC/B,EAAO,KACL,EAAO,OACL,6CAA6C,EAAgB,yBAC9D,CACF,CACD,EAAqB,EACrB,MAGF,EAAO,KACL,EAAO,QACL,kBAAkB,EAAW,aAAa,CAAC,cACzC,EAAc,EAAU,OACpB,GAAG,EAAU,GAAa,GAC1B,GAAG,KAAK,MAAM,EAAS,EAAO,CAAC,GACpC,IACF,CACF,CAED,IAAM,EAAM,MAAM,EAChB,EACA,EACA,EAAgB,EAAM,EAAY,EAAW,aAAa,CAAC,CAC5D,CAED,GAAI,EAAK,CACP,EAAmB,EAA2B,EAAM,EAAI,CACxD,EAAO,KACL,EAAO,MACL,yBAAyB,EAAiB,aAAa,CAAC,2BACzD,CACF,CAEG,EAAc,EAAU,OAAS,GACnC,GAAe,EACf,EAAS,EAAU,GAAe,GACzB,IAAgB,EAAU,OAAS,GAC5C,GAAe,EACf,EAAS,EAAU,EAAU,OAAS,GAAK,EAAI,GAE/C,GAAU,EAGZ,SAIF,EAAqB,EACrB,EAAO,KACL,EAAO,MAAM,oBAAoB,EAAW,aAAa,CAAC,mCAAmC,CAC9F,CACD,MAIF,AACE,IAAqB,IAAI,KAAK,EAAiB,SAAS,CAAG,EAAO,CAOpE,IAAI,EAAK,EACL,EAAK,EACL,EAAU,KAAK,IAAI,EAAQ,KAAK,OAAO,EAAG,SAAS,CAAG,EAAG,SAAS,EAAI,GAAG,CAAC,CAC9E,EAAO,KACL,EAAO,QACL,+CAA+C,EAAG,aAAa,CAAC,SAAS,EAAG,aAAa,CAAC,QAAQ,KAAK,MACrG,EAAU,EACX,CAAC,GACH,CACF,CAGD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAG,GAAK,EAAG,CAC7B,IAAM,EAAQ,IAAI,KAAK,EAAG,SAAS,CAAG,EAAQ,CAC9C,GAAI,EAAM,SAAS,EAAI,EAAG,SAAS,CAAE,MAErC,EAAO,KAAK,EAAO,QAAQ,+BAA+B,EAAM,aAAa,CAAC,GAAG,CAAC,CAClF,IAAM,EAAM,MAAM,EAChB,EACA,EACA,EAAgB,EAAM,EAAY,EAAM,aAAa,CAAC,CACvD,CAuBD,GArBI,GAEF,EAAK,EAA2B,EAAM,EAAI,CAC1C,EAAO,KACL,EAAO,MACL,iBAAiB,EAAG,aAAa,CAAC,8CACnC,CACF,CACD,EAAU,KAAK,IAAI,EAAQ,KAAK,MAAM,EAAU,EAAE,CAAC,GAGnD,EAAG,QAAQ,EAAM,SAAS,CAAC,CAC3B,EAAO,KACL,EAAO,OACL,0CAA0C,EAAG,aAAa,CAAC,sBAC5D,CACF,CACD,EAAU,KAAK,IAAI,EAAG,SAAS,CAAG,EAAG,SAAS,CAAE,EAAU,EAAE,CACxD,EAAA,QAAkB,EAAU,IAG9B,EAAG,SAAS,CAAG,EAAG,SAAS,EAAA,MAAY,MAI7C,KAAO,EAAG,SAAS,CAAG,EAAG,SAAS,CAAG,GAAQ,CAC3C,IAAM,EAAM,IAAI,KAAK,EAAG,SAAS,CAAG,KAAK,OAAO,EAAG,SAAS,CAAG,EAAG,SAAS,EAAI,EAAE,CAAC,CAClF,EAAO,KAAK,EAAO,QAAQ,uBAAuB,EAAI,aAAa,CAAC,GAAG,CAAC,CAExE,IAAM,EAAM,MAAM,EAChB,EACA,EACA,EAAgB,EAAM,EAAY,EAAI,aAAa,CAAC,CACrD,CAED,GAAI,EAAK,CACP,IAAM,EAAO,EAA2B,EAAM,EAAI,CAClD,EAAO,KAAK,EAAO,MAAM,gCAAgC,EAAK,aAAa,CAAC,GAAG,CAAC,CAChF,EAAK,OAEL,EAAO,KAAK,EAAO,OAAO,gCAAgC,CAAC,CAC3D,EAAK,EAIT,IAAM,EAAc,EAAc,EAAG,CAMrC,OALA,EAAO,KACL,EAAO,MACL,kCAAkC,EAAY,aAAa,CAAC,cAAc,EAAG,aAAa,CAAC,IAC5F,CACF,CACM,EAYT,eAAsB,EACpB,EACA,EAUe,CACf,GAAM,CAAE,YAAW,OAAM,cAAe,EAExC,EAAO,KAAK,EAAO,QAAQ,+CAA+C,CAAC,CAC3E,IAAM,EAAS,MAAM,EAAS,EAAQ,EAAW,EAAgB,EAAM,EAAW,CAAC,CACnF,GAAI,CAAC,EAEH,OADA,EAAO,KAAK,EAAO,OAAO,2DAA2D,CAAC,CAC/E,EAAc,IAAI,KAAO,CAGlC,IAAM,EAAO,EAA2B,EAAM,EAAO,CACrD,EAAO,KAAK,EAAO,MAAM,4BAA4B,EAAK,aAAa,CAAC,GAAG,CAAC,CAE5E,IAAM,EAAY,EAAc,EAAK,CAOrC,OANA,EAAO,KACL,EAAO,MACL,gCAAgC,EAAU,aAAa,CAAC,gBAAgB,EAAK,aAAa,CAAC,GAC5F,CACF,CAEM,EC7TT,SAAS,EACP,EACA,EACA,EACwB,CAWxB,OAVI,IAAS,YACJ,CACL,GAAG,EACH,eAAgB,EAAO,gBAAkB,EAAK,eAC9C,gBAAiB,EAAO,iBAAmB,EAAK,gBAEhD,OAAQ,IAAA,GACT,CAGI,CACL,GAAG,EACH,OAAQ,CACN,GAAI,EAAK,QAAU,EAAE,CACrB,GAAI,EAAO,QAAQ,aAAe,CAAE,aAAc,EAAO,OAAO,aAAc,CAAG,EAAE,CACnF,GAAI,EAAO,QAAQ,cAAgB,CAAE,cAAe,EAAO,OAAO,cAAe,CAAG,EAAE,CACvF,CAED,eAAgB,IAAA,GAChB,gBAAiB,IAAA,GAClB,CAcH,eAAsB,EACpB,EACA,CACE,YACA,WAAW,EAAE,CACb,QAAQ,GACR,oBAAoB,GACpB,YAAY,IACZ,kBAAkB,KAClB,WAkBsC,CACxC,IAAM,EAAkB,EAAqB,EAAS,CACtD,EAAO,KACL,EAAO,QACL,6CACE,IAAS,YAAc,YAAc,mBACtC,KACF,CACF,CAGD,GAAI,CAAE,QAAO,UAAW,EAA2B,EAAM,EAAS,CASlE,GARA,EAAO,KACL,EAAO,QACL,yBAAyB,GAAO,aAAa,EAAI,YAAY,UAC3D,GAAQ,aAAa,EAAI,cAE5B,CACF,EAEG,CAAC,GAAS,CAAC,KACR,IACH,EAAO,KACL,EAAO,QAAQ,oDAAoD,EAAU,KAAK,CACnF,CACD,EAAQ,MAAM,EAAwB,EAAQ,CAC5C,YACA,OACA,WAAY,EACZ,kBACD,CAAC,CACF,EAAO,KAAK,EAAO,MAAM,sCAAsC,EAAM,aAAa,GAAG,CAAC,EAEpF,CAAC,GAAQ,CACX,EAAO,KAAK,EAAO,QAAQ,kDAAkD,EAAU,KAAK,CAAC,CAC7F,IAAM,EAAY,MAAM,EAAsB,EAAQ,CACpD,YACA,OACA,WAAY,EACZ,SAAU,EACX,CAAC,CAEF,EAAS,EAAW,EAAW,EAAE,CACjC,EAAO,KAAK,EAAO,MAAM,oCAAoC,EAAU,aAAa,GAAG,CAAC,CAI5F,EAAO,KACL,EAAO,MAAM,6BAA6B,EAAM,aAAa,CAAC,UAAU,EAAO,aAAa,GAAG,CAChG,CAGD,IAAM,EAAS,EAAmB,EAAM,EAAO,EAAQ,EAAU,CAEjE,EAAO,KACL,EAAO,QACL,+CAA+C,EAAU,MAAM,EAAO,OAAO,YAC9E,CACF,CAKD,IAAM,EAAM,IAAI,EAAY,UAC1B,CACE,OAAQ,iFACT,CACD,EAAY,QAAQ,eACrB,CAEG,EAAY,EACZ,EAAU,EAEd,EAAI,MAAM,EAAO,OAAQ,EAAG,CAAE,UAAS,CAAC,CAExC,IAAM,EAAK,KAAK,KAAK,CACf,EAAW,EAAc,EAAM,CAG/B,EAAqC,EAAE,CAoC7C,OAlCA,MAAMA,EACJ,EAAO,KAAK,EAAc,KAAS,CAAE,eAAc,MAAK,EAAE,CAC1D,MAAO,CAAE,kBAAmB,CAC1B,IAAM,EAAS,EAAY,EAAM,EAAU,EAAa,CAGxD,UAAW,IAAM,KAAQ,EAAoB,EAAQ,EAAW,EAAQ,EAAS,CAC/E,GAAW,EAAK,OAChB,EAAI,OAAO,EAAW,CAAE,UAAS,CAAC,CAE9B,EACF,MAAM,EAAQ,EAAK,CAEnB,EAAI,KAAK,GAAG,EAAK,CAIrB,GAAa,EACb,EAAI,OAAO,EAAW,CAAE,UAAS,CAAC,EAEpC,CAAE,YAAa,KAAK,IAAI,EAAG,EAAkB,CAAE,CAChD,CAED,EAAI,OAAO,EAAW,CAAE,UAAS,CAAC,CAClC,EAAI,MAAM,CAEV,EAAO,KACL,EAAO,MACL,WAAW,EAAQ,6CAA6C,EAAU,OACvE,KAAK,KAAK,CAAG,GAAM,IACrB,IACF,CACF,CAEM,EAAU,EAAE,CAAG,EC1LxB,eAAsB,EACpB,EACA,CACE,YACA,WAAW,EAAE,CACb,QAAQ,GACR,WAWsC,CACxC,IAAM,EAA2C,EAAE,CAG/C,EAGE,EACJ,IACC,OAAO,KAAK,EAAS,CAAC,OAAS,GAC7B,EAAS,QAAU,OAAO,KAAK,EAAS,OAAO,CAAC,OAAS,GAGxD,EAAW,KAAK,IAAI,EAAG,KAAK,IAAI,GAAI,GAAS,GAAG,CAAC,CAKvD,OAAa,CACX,IAAM,EAOF,CAAE,MAAO,EAAU,CAEnB,IACF,EAAK,OAAS,GAEZ,IACF,EAAK,OAAS,GAsBhB,GAAM,CAAE,QAAO,OAAQ,GAAe,EAAY,EAnBjC,MAAM,EACrB,uBAEE,EACG,KAAK,kBAAkB,EAAU,QAAS,CACzC,KAAM,EACP,CAAC,CACD,MAAM,CACX,CACE,SAAU,EAAS,EAAQ,IAAY,CACrC,EAAO,KACL,EAAO,OACL,iBAAiB,EAAQ,6CAA6C,IACvE,CACF,EAEJ,CACF,CAEqF,CAYtF,GAVI,CAAC,GAAS,EAAM,SAAW,IAI3B,EACF,MAAM,EAAQ,EAAM,CAEpB,EAAU,KAAK,GAAG,EAAM,CAGtB,CAAC,GACH,MAEF,EAAS,EAGX,OAAO,EAAU,EAAE,CAAG,ECxFxB,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAM,EAAwB,EAAc,EAAM,EAAW,CACtE,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IAAK,GAAqC,CAC9E,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EACtB,CAGI,EAAW,CACf,GAAI,EAAkB,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAAG,EAAE,CAC7E,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EAAgB,CAAE,cAAe,EAAc,aAAa,CAAE,CAAG,EAAE,CACvE,GAAI,EAAe,CAAE,aAAc,EAAa,aAAa,CAAE,CAAG,EAAE,CACrE,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAO,KAAK,EAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7D,EAA+B,EAAO,CACtC,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAAS,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CAAC,CAC7F,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KAAK,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CAAC,CAAC,MACpF,EAAG,IAAM,EAAE,cAAc,EAAE,CAC7B,CAIK,EAAsB,CAAC,GAAG,EAAW,GAAG,EAAgB,GAAG,EAAa,GAAG,EAAU,CAGvF,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtB,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAM,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAO,KAAK,EAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAM,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAO,KAAK,EAAO,MAAM,2BAA2B,IAAO,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./pullUnstructuredSubDataPointRecommendations-C4aVhH-W.mjs";import{r as n}from"./makeGraphQLRequest-G078PsEL.mjs";import{s as r}from"./writeCsv-B51ulrVl.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{uniq as a}from"lodash-es";import o from"colors";async function s({auth:s,file:c,transcendUrl:l,dataSiloIds:u,subCategories:d,status:f,includeEncryptedSnippets:p}){i(this.process.exit);try{let i=await t(n(l,s),{dataSiloIds:u,subCategories:d,status:f,includeEncryptedSnippets:p});e.info(o.magenta(`Writing unstructured discovery files to file "${c}"...`));let m=[];await r(c,i.map(e=>{let t={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...p?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return m=a([...m,...Object.keys(t)]),t}),m)}catch(t){e.error(o.red(`An error occurred syncing the unstructured discovery files: ${t.message}`)),this.process.exit(1)}e.info(o.green(`Successfully synced unstructured discovery files to disk at ${c}!`))}export{s as pullUnstructuredDiscoveryFiles};
2
- //# sourceMappingURL=impl-DcC8_dCy.mjs.map
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs";import{r as n}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{s as r}from"./writeCsv-B51ulrVl.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{uniq as a}from"lodash-es";import o from"colors";async function s({auth:s,file:c,transcendUrl:l,dataSiloIds:u,subCategories:d,status:f,includeEncryptedSnippets:p}){i(this.process.exit);try{let i=await t(n(l,s),{dataSiloIds:u,subCategories:d,status:f,includeEncryptedSnippets:p});e.info(o.magenta(`Writing unstructured discovery files to file "${c}"...`));let m=[];await r(c,i.map(e=>{let t={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...p?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return m=a([...m,...Object.keys(t)]),t}),m)}catch(t){e.error(o.red(`An error occurred syncing the unstructured discovery files: ${t.message}`)),this.process.exit(1)}e.info(o.green(`Successfully synced unstructured discovery files to disk at ${c}!`))}export{s as pullUnstructuredDiscoveryFiles};
2
+ //# sourceMappingURL=impl-DxUFb0vv.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-DcC8_dCy.mjs","names":[],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory/index.js';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql/index.js';\nimport { writeLargeCsv } from '../../../lib/helpers/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(colors.magenta(`Writing unstructured discovery files to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the unstructured discovery files: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(colors.green(`Successfully synced unstructured discovery files to disk at ${file}!`));\n}\n"],"mappings":"sUAqBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAM,EAFP,EAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAO,KAAK,EAAO,QAAQ,iDAAiD,EAAK,MAAM,CAAC,CACxF,IAAI,EAAoB,EAAE,CAmB1B,MAAM,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAU,EAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAO,MACL,EAAO,IAAI,+DAA+D,EAAI,UAAU,CACzF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAO,KAAK,EAAO,MAAM,+DAA+D,EAAK,GAAG,CAAC"}
1
+ {"version":3,"file":"impl-DxUFb0vv.mjs","names":[],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory/index.js';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql/index.js';\nimport { writeLargeCsv } from '../../../lib/helpers/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(colors.magenta(`Writing unstructured discovery files to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the unstructured discovery files: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(colors.green(`Successfully synced unstructured discovery files to disk at ${file}!`));\n}\n"],"mappings":"sUAqBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAM,EAFP,EAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAO,KAAK,EAAO,QAAQ,iDAAiD,EAAK,MAAM,CAAC,CACxF,IAAI,EAAoB,EAAE,CAmB1B,MAAM,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAU,EAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAO,MACL,EAAO,IAAI,+DAA+D,EAAI,UAAU,CACzF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAO,KAAK,EAAO,MAAM,+DAA+D,EAAK,GAAG,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./bulkRestartRequests-DEPSHov-.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,requestReceiptFolder:r,sombraAuth:i,actions:a,statuses:o,requestIds:s,createdAt:c,emailIsVerified:l,silentModeBefore:u,sendEmailReceipt:d,copyIdentifiers:f,skipWaitingPeriod:p,createdAtBefore:m,createdAtAfter:h,updatedAtBefore:g,updatedAtAfter:_,concurrency:v,transcendUrl:y}){t(this.process.exit),await e({requestReceiptFolder:r,auth:n,sombraAuth:i,requestActions:a,requestStatuses:o,requestIds:s,createdAt:c,emailIsVerified:l,silentModeBefore:u,sendEmailReceipt:d,copyIdentifiers:f,skipWaitingPeriod:p,createdAtBefore:m,createdAtAfter:h,updatedAtBefore:g,updatedAtAfter:_,concurrency:v,transcendUrl:y})}export{n as restart};
2
- //# sourceMappingURL=impl-y1I9Muyc2.mjs.map
1
+ import{t as e}from"./bulkRestartRequests-CKF_xpN0.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,requestReceiptFolder:r,sombraAuth:i,actions:a,statuses:o,requestIds:s,createdAt:c,emailIsVerified:l,silentModeBefore:u,sendEmailReceipt:d,copyIdentifiers:f,skipWaitingPeriod:p,createdAtBefore:m,createdAtAfter:h,updatedAtBefore:g,updatedAtAfter:_,concurrency:v,transcendUrl:y}){t(this.process.exit),await e({requestReceiptFolder:r,auth:n,sombraAuth:i,requestActions:a,requestStatuses:o,requestIds:s,createdAt:c,emailIsVerified:l,silentModeBefore:u,sendEmailReceipt:d,copyIdentifiers:f,skipWaitingPeriod:p,createdAtBefore:m,createdAtAfter:h,updatedAtBefore:g,updatedAtAfter:_,concurrency:v,transcendUrl:y})}export{n as restart};
2
+ //# sourceMappingURL=impl-JThkrXiI2.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-y1I9Muyc2.mjs","names":[],"sources":["../src/commands/request/restart/impl.ts"],"sourcesContent":["import type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { bulkRestartRequests } from '../../../lib/requests/index.js';\n\nexport interface RestartCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses: RequestStatus[];\n transcendUrl: string;\n requestReceiptFolder: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n emailIsVerified: boolean;\n createdAt?: Date;\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n sendEmailReceipt: boolean;\n copyIdentifiers: boolean;\n skipWaitingPeriod: boolean;\n}\n\nexport async function restart(\n this: LocalContext,\n {\n auth,\n requestReceiptFolder,\n sombraAuth,\n actions,\n statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n }: RestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRestartRequests({\n requestReceiptFolder,\n auth,\n sombraAuth,\n requestActions: actions,\n requestStatuses: statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"2GA2BA,eAAsB,EAEpB,CACE,OACA,uBACA,aACA,UACA,WACA,aACA,YACA,kBACA,mBACA,mBACA,kBACA,oBACA,kBACA,iBACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAoB,CACxB,uBACA,OACA,aACA,eAAgB,EAChB,gBAAiB,EACjB,aACA,YACA,kBACA,mBACA,mBACA,kBACA,oBACA,kBACA,iBACA,kBACA,iBACA,cACA,eACD,CAAC"}
1
+ {"version":3,"file":"impl-JThkrXiI2.mjs","names":[],"sources":["../src/commands/request/restart/impl.ts"],"sourcesContent":["import type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { bulkRestartRequests } from '../../../lib/requests/index.js';\n\nexport interface RestartCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses: RequestStatus[];\n transcendUrl: string;\n requestReceiptFolder: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n emailIsVerified: boolean;\n createdAt?: Date;\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n sendEmailReceipt: boolean;\n copyIdentifiers: boolean;\n skipWaitingPeriod: boolean;\n}\n\nexport async function restart(\n this: LocalContext,\n {\n auth,\n requestReceiptFolder,\n sombraAuth,\n actions,\n statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n }: RestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRestartRequests({\n requestReceiptFolder,\n auth,\n sombraAuth,\n requestActions: actions,\n requestStatuses: statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"2GA2BA,eAAsB,EAEpB,CACE,OACA,uBACA,aACA,UACA,WACA,aACA,YACA,kBACA,mBACA,mBACA,kBACA,oBACA,kBACA,iBACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAoB,CACxB,uBACA,OACA,aACA,eAAgB,EAChB,gBAAiB,EACjB,aACA,YACA,kBACA,mBACA,mBACA,kBACA,oBACA,kBACA,iBACA,kBACA,iBACA,cACA,eACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./approvePrivacyRequests-1cguqGqq.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,actions:r,origins:i,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,updatedAtBefore:c,updatedAtAfter:l,transcendUrl:u,concurrency:d}){t(this.process.exit),await e({transcendUrl:u,requestActions:r,auth:n,requestOrigins:i,concurrency:d,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,updatedAtBefore:c,updatedAtAfter:l})}export{n as approve};
2
- //# sourceMappingURL=impl-Cq_RqK0_2.mjs.map
1
+ import{t as e}from"./approvePrivacyRequests-CWGZR2N6.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,actions:r,origins:i,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,updatedAtBefore:c,updatedAtAfter:l,transcendUrl:u,concurrency:d}){t(this.process.exit),await e({transcendUrl:u,requestActions:r,auth:n,requestOrigins:i,concurrency:d,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,updatedAtBefore:c,updatedAtAfter:l})}export{n as approve};
2
+ //# sourceMappingURL=impl-KDuBh4bu2.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-Cq_RqK0_2.mjs","names":[],"sources":["../src/commands/request/approve/impl.ts"],"sourcesContent":["import { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { approvePrivacyRequests } from '../../../lib/requests/index.js';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"8GAmBA,eAAsB,EAEpB,CACE,OACA,UACA,UACA,mBACA,kBACA,iBACA,kBACA,iBACA,eACA,eAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAuB,CAC3B,eACA,eAAgB,EAChB,OACA,eAAgB,EAChB,cACA,mBACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
1
+ {"version":3,"file":"impl-KDuBh4bu2.mjs","names":[],"sources":["../src/commands/request/approve/impl.ts"],"sourcesContent":["import { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { approvePrivacyRequests } from '../../../lib/requests/index.js';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"8GAmBA,eAAsB,EAEpB,CACE,OACA,UACA,UACA,mBACA,kBACA,iBACA,kBACA,iBACA,eACA,eAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAuB,CAC3B,eACA,eAAgB,EAChB,OACA,eAAgB,EAChB,cACA,mBACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./uploadDataFlowsFromCsv-CJFVLvCJ.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o}){t(this.process.exit),await e({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o})}export{n as uploadDataFlowsFromCsv};
2
- //# sourceMappingURL=impl-C05tQHSq.mjs.map
1
+ import{t as e}from"./uploadDataFlowsFromCsv-DcTbrsv2.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o}){t(this.process.exit),await e({auth:n,trackerStatus:r,file:i,classifyService:a,transcendUrl:o})}export{n as uploadDataFlowsFromCsv};
2
+ //# sourceMappingURL=impl-MpkLBntW.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-C05tQHSq.mjs","names":["uploadDataFlowsFromCsvHelper"],"sources":["../src/commands/consent/upload-data-flows-from-csv/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { uploadDataFlowsFromCsv as uploadDataFlowsFromCsvHelper } from '../../../lib/consent-manager/index.js';\n\nexport interface UploadDataFlowsFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n classifyService: boolean;\n transcendUrl: string;\n}\n\nexport async function uploadDataFlowsFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, classifyService, transcendUrl }: UploadDataFlowsFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await uploadDataFlowsFromCsvHelper({\n auth,\n trackerStatus,\n file,\n classifyService,\n transcendUrl,\n });\n}\n"],"mappings":"8GAcA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,kBAAiB,gBAC/B,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAA6B,CACjC,OACA,gBACA,OACA,kBACA,eACD,CAAC"}
1
+ {"version":3,"file":"impl-MpkLBntW.mjs","names":["uploadDataFlowsFromCsvHelper"],"sources":["../src/commands/consent/upload-data-flows-from-csv/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { uploadDataFlowsFromCsv as uploadDataFlowsFromCsvHelper } from '../../../lib/consent-manager/index.js';\n\nexport interface UploadDataFlowsFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n classifyService: boolean;\n transcendUrl: string;\n}\n\nexport async function uploadDataFlowsFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, classifyService, transcendUrl }: UploadDataFlowsFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await uploadDataFlowsFromCsvHelper({\n auth,\n trackerStatus,\n file,\n classifyService,\n transcendUrl,\n });\n}\n"],"mappings":"8GAcA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,kBAAiB,gBAC/B,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAA6B,CACjC,OACA,gBACA,OACA,kBACA,eACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./pullChunkedCustomSiloOutstandingIdentifiers-DgWgggQt.mjs";import{i as n,s as r}from"./writeCsv-B51ulrVl.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{uniq as a}from"lodash-es";import o from"colors";async function s({file:s,transcendUrl:c,auth:l,sombraAuth:u,dataSiloId:d,actions:f,pageLimit:p,skipRequestCount:m,chunkSize:h}){m&&e.info(o.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%p!==0)&&(e.error(o.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${p}.`)),this.process.exit(1)),i(this.process.exit);let{baseName:g,extension:_}=n(s),v=0;await t({transcendUrl:c,apiPageSize:p,savePageSize:h,onSave:async t=>{let n=`${g}-${v}${_}`;return e.info(o.blue(`Saving ${t.length} identifiers to file "${n}"`)),await r(n,t,a(t.map(e=>Object.keys(e)).flat())),e.info(o.green(`Successfully wrote ${t.length} identifiers to file "${n}"`)),v+=1,Promise.resolve()},actions:f,auth:l,sombraAuth:u,dataSiloId:d,skipRequestCount:m})}export{s as pullIdentifiers};
2
- //# sourceMappingURL=impl-Zr8uLP_n.mjs.map
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs";import{i as n,s as r}from"./writeCsv-B51ulrVl.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{uniq as a}from"lodash-es";import o from"colors";async function s({file:s,transcendUrl:c,auth:l,sombraAuth:u,dataSiloId:d,actions:f,pageLimit:p,skipRequestCount:m,chunkSize:h}){m&&e.info(o.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%p!==0)&&(e.error(o.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${p}.`)),this.process.exit(1)),i(this.process.exit);let{baseName:g,extension:_}=n(s),v=0;await t({transcendUrl:c,apiPageSize:p,savePageSize:h,onSave:async t=>{let n=`${g}-${v}${_}`;return e.info(o.blue(`Saving ${t.length} identifiers to file "${n}"`)),await r(n,t,a(t.map(e=>Object.keys(e)).flat())),e.info(o.green(`Successfully wrote ${t.length} identifiers to file "${n}"`)),v+=1,Promise.resolve()},actions:f,auth:l,sombraAuth:u,dataSiloId:d,skipRequestCount:m})}export{s as pullIdentifiers};
2
+ //# sourceMappingURL=impl-P_NDC3cX.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-Zr8uLP_n.mjs","names":[],"sources":["../src/commands/request/cron/pull-identifiers/impl.ts"],"sourcesContent":["import { RequestAction } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport {\n CsvFormattedIdentifier,\n pullChunkedCustomSiloOutstandingIdentifiers,\n} from '../../../../lib/cron/index.js';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers/index.js';\nimport { logger } from '../../../../logger.js';\n\nexport interface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow('Skipping request count as requested. This may help speed up the call.'),\n );\n }\n\n if (Number.isNaN(chunkSize) || chunkSize <= 0 || chunkSize % pageLimit !== 0) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = async (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(colors.blue(`Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`));\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(`Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"],"mappings":"uRAyBA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,aACA,UACA,YACA,mBACA,aAEa,CACX,GACF,EAAO,KACL,EAAO,OAAO,wEAAwE,CACvF,EAGC,OAAO,MAAM,EAAU,EAAI,GAAa,GAAK,EAAY,IAAc,KACzE,EAAO,MACL,EAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,WAAU,aAAc,EAAc,EAAK,CAC/C,EAAY,EAgBhB,MAAM,EAA4C,CAChD,eACA,YAAa,EACb,aAAc,EACd,OAlBa,KAAO,IAAmD,CACvE,IAAM,EAAmB,GAAG,EAAS,GAAG,IAAY,IASpD,OARA,EAAO,KAAK,EAAO,KAAK,UAAU,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAAC,CAG5F,MAAM,EAAc,EAAkB,EADtB,EAAK,EAAM,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACR,CACrD,EAAO,KACL,EAAO,MAAM,sBAAsB,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAC7F,CACD,GAAa,EACN,QAAQ,SAAS,EASxB,UACA,OACA,aACA,aACA,mBACD,CAAC"}
1
+ {"version":3,"file":"impl-P_NDC3cX.mjs","names":[],"sources":["../src/commands/request/cron/pull-identifiers/impl.ts"],"sourcesContent":["import { RequestAction } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport {\n CsvFormattedIdentifier,\n pullChunkedCustomSiloOutstandingIdentifiers,\n} from '../../../../lib/cron/index.js';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers/index.js';\nimport { logger } from '../../../../logger.js';\n\nexport interface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow('Skipping request count as requested. This may help speed up the call.'),\n );\n }\n\n if (Number.isNaN(chunkSize) || chunkSize <= 0 || chunkSize % pageLimit !== 0) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = async (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(colors.blue(`Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`));\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(`Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"],"mappings":"uRAyBA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,aACA,UACA,YACA,mBACA,aAEa,CACX,GACF,EAAO,KACL,EAAO,OAAO,wEAAwE,CACvF,EAGC,OAAO,MAAM,EAAU,EAAI,GAAa,GAAK,EAAY,IAAc,KACzE,EAAO,MACL,EAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,WAAU,aAAc,EAAc,EAAK,CAC/C,EAAY,EAgBhB,MAAM,EAA4C,CAChD,eACA,YAAa,EACb,aAAc,EACd,OAlBa,KAAO,IAAmD,CACvE,IAAM,EAAmB,GAAG,EAAS,GAAG,IAAY,IASpD,OARA,EAAO,KAAK,EAAO,KAAK,UAAU,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAAC,CAG5F,MAAM,EAAc,EAAkB,EADtB,EAAK,EAAM,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACR,CACrD,EAAO,KACL,EAAO,MAAM,sBAAsB,EAAM,OAAO,wBAAwB,EAAiB,GAAG,CAC7F,CACD,GAAa,EACN,QAAQ,SAAS,EASxB,UACA,OACA,aACA,aACA,mBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./markSilentPrivacyRequests-s7_aBROE.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,actions:i,statuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,concurrency:d}){t(this.process.exit),await e({transcendUrl:r,requestActions:i,auth:n,requestIds:o,statuses:a,concurrency:d,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u})}export{n as markSilent};
2
- //# sourceMappingURL=impl-D-ldjJzl2.mjs.map
1
+ import{t as e}from"./markSilentPrivacyRequests-BKQUu6Ep.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,actions:i,statuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,concurrency:d}){t(this.process.exit),await e({transcendUrl:r,requestActions:i,auth:n,requestIds:o,statuses:a,concurrency:d,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u})}export{n as markSilent};
2
+ //# sourceMappingURL=impl-c7rUQYDc2.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-D-ldjJzl2.mjs","names":[],"sources":["../src/commands/request/mark-silent/impl.ts"],"sourcesContent":["import type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { markSilentPrivacyRequests } from '../../../lib/requests/index.js';\n\nexport interface MarkSilentCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses?: RequestStatus[];\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function markSilent(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n actions,\n statuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n }: MarkSilentCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await markSilentPrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestIds,\n statuses,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"iHAmBA,eAAsB,EAEpB,CACE,OACA,eACA,UACA,WACA,aACA,kBACA,iBACA,kBACA,iBACA,eAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAA0B,CAC9B,eACA,eAAgB,EAChB,OACA,aACA,WACA,cACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
1
+ {"version":3,"file":"impl-c7rUQYDc2.mjs","names":[],"sources":["../src/commands/request/mark-silent/impl.ts"],"sourcesContent":["import type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { markSilentPrivacyRequests } from '../../../lib/requests/index.js';\n\nexport interface MarkSilentCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses?: RequestStatus[];\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function markSilent(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n actions,\n statuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n }: MarkSilentCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await markSilentPrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestIds,\n statuses,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"iHAmBA,eAAsB,EAEpB,CACE,OACA,eACA,UACA,WACA,aACA,kBACA,iBACA,kBACA,iBACA,eAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAA0B,CAC9B,eACA,eAAgB,EAChB,OACA,aACA,WACA,cACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./pushCronIdentifiersFromCsv-C34TB9tG.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o}){t(this.process.exit),await e({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o})}export{n as markIdentifiersCompleted};
2
- //# sourceMappingURL=impl-G1brwI4o.mjs.map
1
+ import{t as e}from"./pushCronIdentifiersFromCsv-D2saGR5i.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o}){t(this.process.exit),await e({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o})}export{n as markIdentifiersCompleted};
2
+ //# sourceMappingURL=impl-fqOKTw5J.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-G1brwI4o.mjs","names":[],"sources":["../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron/index.js';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n { file, transcendUrl, auth, sombraAuth, dataSiloId }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"],"mappings":"kHAYA,eAAsB,EAEpB,CAAE,OAAM,eAAc,OAAM,aAAY,cACzB,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAA2B,CAC/B,OACA,eACA,OACA,aACA,aACD,CAAC"}
1
+ {"version":3,"file":"impl-fqOKTw5J.mjs","names":[],"sources":["../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron/index.js';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n { file, transcendUrl, auth, sombraAuth, dataSiloId }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"],"mappings":"kHAYA,eAAsB,EAEpB,CAAE,OAAM,eAAc,OAAM,aAAY,cACzB,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAA2B,CAC/B,OACA,eACA,OACA,aACA,aACD,CAAC"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{n}from"./syncCodePackages-BHgjfXCI.mjs";import{r}from"./makeGraphQLRequest-G078PsEL.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{t as a}from"./constants-AFtS5Nad.mjs";import{getEntries as o}from"@transcend-io/type-utils";import s from"colors";import{execSync as c}from"child_process";import l from"fast-glob";async function u({scanPath:e,ignoreDirs:n=[],repositoryName:r}){return(await Promise.all(o(a).map(async([i,a])=>{let{ignoreDirs:o,supportedFiles:c,scanFunction:u}=a,d=[...n,...o].filter(e=>e.length>0);try{let n=await l(`${e}/**/${c.join(`|`)}`,{ignore:d.map(t=>`${e}/**/${t}`),unique:!0,onlyFiles:!0});t.info(s.magenta(`Scanning: ${n.length} files of type ${i}`));let a=n.map(t=>u(t).map(n=>({...n,relativePath:t.replace(`${e}/`,``)}))).flat();return t.info(s.green(`Found: ${a.length} packages and ${a.map(({softwareDevelopmentKits:e=[]})=>e).flat().length} sdks`)),a.map(e=>({...e,type:i,repositoryName:r}))}catch(e){throw Error(`Error scanning globs ${c} with error: ${e}`)}}))).flat()}const d=`A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository`;async function f({auth:a,scanPath:o,ignoreDirs:l,repositoryName:f,transcendUrl:p}){i(this.process.exit);let m=f;if(!m)try{let e=c(`cd ${o} && git config --get remote.origin.url`).toString(`utf-8`).trim();[m]=e.includes(`https:`)?e.split(`/`).slice(3).join(`/`).split(`.`):(e.split(`:`).pop()||``).split(`.`),m||(t.error(s.red(d)),this.process.exit(1))}catch(e){t.error(s.red(`${d} - Got error: ${e.message}`)),this.process.exit(1)}let h=r(p,a),g=await u({scanPath:o,ignoreDirs:l,repositoryName:m});await n(h,g);let _=new URL(e);_.pathname=`/code-scanning/code-packages`,t.info(s.green(`Scan found ${g.length} packages at ${o}! View results at '${_.href}'`))}export{f as scanPackages};
2
- //# sourceMappingURL=impl-Dfc_yQML2.mjs.map
1
+ import{t as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{n}from"./syncCodePackages-F-97FNjo.mjs";import{r}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{t as a}from"./constants-AFtS5Nad.mjs";import{getEntries as o}from"@transcend-io/type-utils";import s from"colors";import{execSync as c}from"child_process";import l from"fast-glob";async function u({scanPath:e,ignoreDirs:n=[],repositoryName:r}){return(await Promise.all(o(a).map(async([i,a])=>{let{ignoreDirs:o,supportedFiles:c,scanFunction:u}=a,d=[...n,...o].filter(e=>e.length>0);try{let n=await l(`${e}/**/${c.join(`|`)}`,{ignore:d.map(t=>`${e}/**/${t}`),unique:!0,onlyFiles:!0});t.info(s.magenta(`Scanning: ${n.length} files of type ${i}`));let a=n.map(t=>u(t).map(n=>({...n,relativePath:t.replace(`${e}/`,``)}))).flat();return t.info(s.green(`Found: ${a.length} packages and ${a.map(({softwareDevelopmentKits:e=[]})=>e).flat().length} sdks`)),a.map(e=>({...e,type:i,repositoryName:r}))}catch(e){throw Error(`Error scanning globs ${c} with error: ${e}`)}}))).flat()}const d=`A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository`;async function f({auth:a,scanPath:o,ignoreDirs:l,repositoryName:f,transcendUrl:p}){i(this.process.exit);let m=f;if(!m)try{let e=c(`cd ${o} && git config --get remote.origin.url`).toString(`utf-8`).trim();[m]=e.includes(`https:`)?e.split(`/`).slice(3).join(`/`).split(`.`):(e.split(`:`).pop()||``).split(`.`),m||(t.error(s.red(d)),this.process.exit(1))}catch(e){t.error(s.red(`${d} - Got error: ${e.message}`)),this.process.exit(1)}let h=r(p,a),g=await u({scanPath:o,ignoreDirs:l,repositoryName:m});await n(h,g);let _=new URL(e);_.pathname=`/code-scanning/code-packages`,t.info(s.green(`Scan found ${g.length} packages at ${o}! View results at '${_.href}'`))}export{f as scanPackages};
2
+ //# sourceMappingURL=impl-oiBTZqQS2.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-Dfc_yQML2.mjs","names":[],"sources":["../src/lib/code-scanning/findCodePackagesInFolder.ts","../src/commands/inventory/scan-packages/impl.ts"],"sourcesContent":["import { getEntries } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport fastGlob from 'fast-glob';\n\nimport { CodePackageInput } from '../../codecs.js';\nimport { logger } from '../../logger.js';\nimport { CODE_SCANNING_CONFIGS } from './constants.js';\n\n/**\n * Helper to scan and discovery all of the code packages within a folder\n *\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findCodePackagesInFolder({\n scanPath,\n ignoreDirs = [],\n repositoryName,\n}: {\n /** The name of the github repository reporting packages for */\n repositoryName: string;\n /** Where to look for package.json files */\n scanPath: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs?: string[];\n}): Promise<CodePackageInput[]> {\n const allCodePackages = await Promise.all(\n getEntries(CODE_SCANNING_CONFIGS).map(async ([codePackageType, config]) => {\n const { ignoreDirs: configIgnoreDirs, supportedFiles, scanFunction } = config;\n const dirsToIgnore = [...ignoreDirs, ...configIgnoreDirs].filter((dir) => dir.length > 0);\n try {\n const filesToScan: string[] = await fastGlob(`${scanPath}/**/${supportedFiles.join('|')}`, {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n });\n logger.info(\n colors.magenta(`Scanning: ${filesToScan.length} files of type ${codePackageType}`),\n );\n const allPackages = filesToScan\n .map((filePath) =>\n scanFunction(filePath).map((result) => ({\n ...result,\n relativePath: filePath.replace(`${scanPath}/`, ''),\n })),\n )\n .flat();\n logger.info(\n colors.green(\n `Found: ${allPackages.length} packages and ${\n allPackages.map(({ softwareDevelopmentKits = [] }) => softwareDevelopmentKits).flat()\n .length\n } sdks`,\n ),\n );\n\n return allPackages.map(\n (pkg): CodePackageInput => ({\n ...pkg,\n type: codePackageType,\n repositoryName,\n }),\n );\n } catch (error) {\n throw new Error(`Error scanning globs ${supportedFiles} with error: ${error}`);\n }\n }),\n );\n\n return allCodePackages.flat();\n}\n","import { execSync } from 'child_process';\n\nimport colors from 'colors';\n\nimport { ADMIN_DASH } from '../../../constants.js';\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { findCodePackagesInFolder } from '../../../lib/code-scanning/index.js';\nimport { buildTranscendGraphQLClient, syncCodePackages } from '../../../lib/graphql/index.js';\nimport { logger } from '../../../logger.js';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\nexport interface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n { auth, scanPath, ignoreDirs, repositoryName, transcendUrl }: ScanPackagesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(`cd ${scanPath} && git config --get remote.origin.url`);\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n this.process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n this.process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` + `View results at '${newUrl.href}'`,\n ),\n );\n}\n"],"mappings":"saAcA,eAAsB,EAAyB,CAC7C,WACA,aAAa,EAAE,CACf,kBAQ8B,CA4C9B,OA3CwB,MAAM,QAAQ,IACpC,EAAW,EAAsB,CAAC,IAAI,MAAO,CAAC,EAAiB,KAAY,CACzE,GAAM,CAAE,WAAY,EAAkB,iBAAgB,gBAAiB,EACjE,EAAe,CAAC,GAAG,EAAY,GAAG,EAAiB,CAAC,OAAQ,GAAQ,EAAI,OAAS,EAAE,CACzF,GAAI,CACF,IAAM,EAAwB,MAAM,EAAS,GAAG,EAAS,MAAM,EAAe,KAAK,IAAI,GAAI,CACzF,OAAQ,EAAa,IAAK,GAAgB,GAAG,EAAS,MAAM,IAAM,CAClE,OAAQ,GACR,UAAW,GACZ,CAAC,CACF,EAAO,KACL,EAAO,QAAQ,aAAa,EAAY,OAAO,iBAAiB,IAAkB,CACnF,CACD,IAAM,EAAc,EACjB,IAAK,GACJ,EAAa,EAAS,CAAC,IAAK,IAAY,CACtC,GAAG,EACH,aAAc,EAAS,QAAQ,GAAG,EAAS,GAAI,GAAG,CACnD,EAAE,CACJ,CACA,MAAM,CAUT,OATA,EAAO,KACL,EAAO,MACL,UAAU,EAAY,OAAO,gBAC3B,EAAY,KAAK,CAAE,0BAA0B,EAAE,IAAO,EAAwB,CAAC,MAAM,CAClF,OACJ,OACF,CACF,CAEM,EAAY,IAChB,IAA2B,CAC1B,GAAG,EACH,KAAM,EACN,iBACD,EACF,OACM,EAAO,CACd,MAAU,MAAM,wBAAwB,EAAe,eAAe,IAAQ,GAEhF,CACH,EAEsB,MAAM,CC1D/B,MAAM,EACJ,2LAYF,eAAsB,EAEpB,CAAE,OAAM,WAAU,aAAY,iBAAgB,gBAC/B,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAI,EAAoB,EACxB,GAAI,CAAC,EACH,GAAI,CAGF,IAAM,EAFO,EAAS,MAAM,EAAS,wCAAwC,CAE5D,SAAS,QAAQ,CAAC,MAAM,CACzC,CAAC,GAAsB,EAAI,SAAS,SAAS,CAEzC,EAAI,MAAM,IAAI,CAAC,MAAM,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,IAAI,EAD3C,EAAI,MAAM,IAAI,CAAC,KAAK,EAAI,IAAI,MAAM,IAAI,CAEtC,IACH,EAAO,MAAM,EAAO,IAAI,EAAW,CAAC,CACpC,KAAK,QAAQ,KAAK,EAAE,QAEf,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,GAAG,EAAW,gBAAgB,EAAI,UAAU,CAAC,CACrE,KAAK,QAAQ,KAAK,EAAE,CAKxB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAU,MAAM,EAAyB,CAC7C,WACA,aACA,eAAgB,EACjB,CAAC,CAGF,MAAM,EAAiB,EAAQ,EAAQ,CAEvC,IAAM,EAAS,IAAI,IAAI,EAAW,CAClC,EAAO,SAAW,+BAGlB,EAAO,KACL,EAAO,MACL,cAAc,EAAQ,OAAO,eAAe,EAAS,qBAA0B,EAAO,KAAK,GAC5F,CACF"}
1
+ {"version":3,"file":"impl-oiBTZqQS2.mjs","names":[],"sources":["../src/lib/code-scanning/findCodePackagesInFolder.ts","../src/commands/inventory/scan-packages/impl.ts"],"sourcesContent":["import { getEntries } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport fastGlob from 'fast-glob';\n\nimport { CodePackageInput } from '../../codecs.js';\nimport { logger } from '../../logger.js';\nimport { CODE_SCANNING_CONFIGS } from './constants.js';\n\n/**\n * Helper to scan and discovery all of the code packages within a folder\n *\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findCodePackagesInFolder({\n scanPath,\n ignoreDirs = [],\n repositoryName,\n}: {\n /** The name of the github repository reporting packages for */\n repositoryName: string;\n /** Where to look for package.json files */\n scanPath: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs?: string[];\n}): Promise<CodePackageInput[]> {\n const allCodePackages = await Promise.all(\n getEntries(CODE_SCANNING_CONFIGS).map(async ([codePackageType, config]) => {\n const { ignoreDirs: configIgnoreDirs, supportedFiles, scanFunction } = config;\n const dirsToIgnore = [...ignoreDirs, ...configIgnoreDirs].filter((dir) => dir.length > 0);\n try {\n const filesToScan: string[] = await fastGlob(`${scanPath}/**/${supportedFiles.join('|')}`, {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n });\n logger.info(\n colors.magenta(`Scanning: ${filesToScan.length} files of type ${codePackageType}`),\n );\n const allPackages = filesToScan\n .map((filePath) =>\n scanFunction(filePath).map((result) => ({\n ...result,\n relativePath: filePath.replace(`${scanPath}/`, ''),\n })),\n )\n .flat();\n logger.info(\n colors.green(\n `Found: ${allPackages.length} packages and ${\n allPackages.map(({ softwareDevelopmentKits = [] }) => softwareDevelopmentKits).flat()\n .length\n } sdks`,\n ),\n );\n\n return allPackages.map(\n (pkg): CodePackageInput => ({\n ...pkg,\n type: codePackageType,\n repositoryName,\n }),\n );\n } catch (error) {\n throw new Error(`Error scanning globs ${supportedFiles} with error: ${error}`);\n }\n }),\n );\n\n return allCodePackages.flat();\n}\n","import { execSync } from 'child_process';\n\nimport colors from 'colors';\n\nimport { ADMIN_DASH } from '../../../constants.js';\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { findCodePackagesInFolder } from '../../../lib/code-scanning/index.js';\nimport { buildTranscendGraphQLClient, syncCodePackages } from '../../../lib/graphql/index.js';\nimport { logger } from '../../../logger.js';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\nexport interface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n { auth, scanPath, ignoreDirs, repositoryName, transcendUrl }: ScanPackagesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(`cd ${scanPath} && git config --get remote.origin.url`);\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n this.process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n this.process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` + `View results at '${newUrl.href}'`,\n ),\n );\n}\n"],"mappings":"saAcA,eAAsB,EAAyB,CAC7C,WACA,aAAa,EAAE,CACf,kBAQ8B,CA4C9B,OA3CwB,MAAM,QAAQ,IACpC,EAAW,EAAsB,CAAC,IAAI,MAAO,CAAC,EAAiB,KAAY,CACzE,GAAM,CAAE,WAAY,EAAkB,iBAAgB,gBAAiB,EACjE,EAAe,CAAC,GAAG,EAAY,GAAG,EAAiB,CAAC,OAAQ,GAAQ,EAAI,OAAS,EAAE,CACzF,GAAI,CACF,IAAM,EAAwB,MAAM,EAAS,GAAG,EAAS,MAAM,EAAe,KAAK,IAAI,GAAI,CACzF,OAAQ,EAAa,IAAK,GAAgB,GAAG,EAAS,MAAM,IAAM,CAClE,OAAQ,GACR,UAAW,GACZ,CAAC,CACF,EAAO,KACL,EAAO,QAAQ,aAAa,EAAY,OAAO,iBAAiB,IAAkB,CACnF,CACD,IAAM,EAAc,EACjB,IAAK,GACJ,EAAa,EAAS,CAAC,IAAK,IAAY,CACtC,GAAG,EACH,aAAc,EAAS,QAAQ,GAAG,EAAS,GAAI,GAAG,CACnD,EAAE,CACJ,CACA,MAAM,CAUT,OATA,EAAO,KACL,EAAO,MACL,UAAU,EAAY,OAAO,gBAC3B,EAAY,KAAK,CAAE,0BAA0B,EAAE,IAAO,EAAwB,CAAC,MAAM,CAClF,OACJ,OACF,CACF,CAEM,EAAY,IAChB,IAA2B,CAC1B,GAAG,EACH,KAAM,EACN,iBACD,EACF,OACM,EAAO,CACd,MAAU,MAAM,wBAAwB,EAAe,eAAe,IAAQ,GAEhF,CACH,EAEsB,MAAM,CC1D/B,MAAM,EACJ,2LAYF,eAAsB,EAEpB,CAAE,OAAM,WAAU,aAAY,iBAAgB,gBAC/B,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAI,EAAoB,EACxB,GAAI,CAAC,EACH,GAAI,CAGF,IAAM,EAFO,EAAS,MAAM,EAAS,wCAAwC,CAE5D,SAAS,QAAQ,CAAC,MAAM,CACzC,CAAC,GAAsB,EAAI,SAAS,SAAS,CAEzC,EAAI,MAAM,IAAI,CAAC,MAAM,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,IAAI,EAD3C,EAAI,MAAM,IAAI,CAAC,KAAK,EAAI,IAAI,MAAM,IAAI,CAEtC,IACH,EAAO,MAAM,EAAO,IAAI,EAAW,CAAC,CACpC,KAAK,QAAQ,KAAK,EAAE,QAEf,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,GAAG,EAAW,gBAAgB,EAAI,UAAU,CAAC,CACrE,KAAK,QAAQ,KAAK,EAAE,CAKxB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAU,MAAM,EAAyB,CAC7C,WACA,aACA,eAAgB,EACjB,CAAC,CAGF,MAAM,EAAiB,EAAQ,EAAQ,CAEvC,IAAM,EAAS,IAAI,IAAI,EAAW,CAClC,EAAO,SAAW,+BAGlB,EAAO,KACL,EAAO,MACL,cAAc,EAAQ,OAAO,eAAe,EAAS,qBAA0B,EAAO,KAAK,GAC5F,CACF"}
@@ -1,2 +1,2 @@
1
- import{t as e}from"./skipPreflightJobs-BNQhuPZ8.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,enricherIds:i}){t(this.process.exit),await e({transcendUrl:r,auth:n,enricherIds:i})}export{n as skipPreflightJobs};
2
- //# sourceMappingURL=impl-CWHnw3oX.mjs.map
1
+ import{t as e}from"./skipPreflightJobs-jK5lNlmv.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,enricherIds:i}){t(this.process.exit),await e({transcendUrl:r,auth:n,enricherIds:i})}export{n as skipPreflightJobs};
2
+ //# sourceMappingURL=impl-tbGnvKFm.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CWHnw3oX.mjs","names":["skipPreflightJobsHelper"],"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '../../../lib/requests/index.js';\n\nexport interface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"yGAUA,eAAsB,EAEpB,CAAE,OAAM,eAAc,eACP,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAwB,CAC5B,eACA,OACA,cACD,CAAC"}
1
+ {"version":3,"file":"impl-tbGnvKFm.mjs","names":["skipPreflightJobsHelper"],"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '../../../lib/requests/index.js';\n\nexport interface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"yGAUA,eAAsB,EAEpB,CAAE,OAAM,eAAc,eACP,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAwB,CAC5B,eACA,OACA,cACD,CAAC"}