@transcend-io/cli 9.0.1 → 10.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (393) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +26 -34
  3. package/dist/RateCounter-DFL_mnk2.mjs +2 -0
  4. package/dist/RateCounter-DFL_mnk2.mjs.map +1 -0
  5. package/dist/RequestDataSilo-_Iv44M9u.mjs +51 -0
  6. package/dist/RequestDataSilo-_Iv44M9u.mjs.map +1 -0
  7. package/dist/app-BfTrk2nc.mjs +131 -0
  8. package/dist/app-BfTrk2nc.mjs.map +1 -0
  9. package/dist/approvePrivacyRequests-CWGZR2N6.mjs +2 -0
  10. package/dist/approvePrivacyRequests-CWGZR2N6.mjs.map +1 -0
  11. package/dist/assessment-BDywVaGR.mjs +284 -0
  12. package/dist/assessment-BDywVaGR.mjs.map +1 -0
  13. package/dist/bin/bash-complete.mjs +1 -1
  14. package/dist/bin/bash-complete.mjs.map +1 -1
  15. package/dist/bin/cli.mjs +1 -1
  16. package/dist/bin/cli.mjs.map +1 -1
  17. package/dist/bin/deprecated-command.mjs +1 -1
  18. package/dist/bin/deprecated-command.mjs.map +1 -1
  19. package/dist/bluebird-CUitXgsY.mjs +2 -0
  20. package/dist/bluebird-CUitXgsY.mjs.map +1 -0
  21. package/dist/buildXdiSyncEndpoint-Cb-pvpak.mjs +9 -0
  22. package/dist/buildXdiSyncEndpoint-Cb-pvpak.mjs.map +1 -0
  23. package/dist/bulkRestartRequests-CKF_xpN0.mjs +2 -0
  24. package/dist/bulkRestartRequests-CKF_xpN0.mjs.map +1 -0
  25. package/dist/bulkRetryEnrichers-B-Szmin-.mjs +2 -0
  26. package/dist/bulkRetryEnrichers-B-Szmin-.mjs.map +1 -0
  27. package/dist/cancelPrivacyRequests-DNiL13E_.mjs +2 -0
  28. package/dist/cancelPrivacyRequests-DNiL13E_.mjs.map +1 -0
  29. package/dist/codecs-BE3Wmoh8.mjs +2 -0
  30. package/dist/codecs-BE3Wmoh8.mjs.map +1 -0
  31. package/dist/codecs-Dx_vGxsl.mjs +2 -0
  32. package/dist/codecs-Dx_vGxsl.mjs.map +1 -0
  33. package/dist/{command-Bzyj3M2G.mjs → command-BXxoAjFo.mjs} +2 -2
  34. package/dist/command-BXxoAjFo.mjs.map +1 -0
  35. package/dist/consentManagersToBusinessEntities-BDgOFga7.mjs +5 -0
  36. package/dist/consentManagersToBusinessEntities-BDgOFga7.mjs.map +1 -0
  37. package/dist/constants-AFtS5Nad.mjs +4 -0
  38. package/dist/constants-AFtS5Nad.mjs.map +1 -0
  39. package/dist/constants-CeMiHaHx.mjs +2 -0
  40. package/dist/constants-CeMiHaHx.mjs.map +1 -0
  41. package/dist/constants-lIvXgkdp.mjs +2 -0
  42. package/dist/constants-lIvXgkdp.mjs.map +1 -0
  43. package/dist/{context-bkKpii_t.mjs → context-CdSyuBlf.mjs} +1 -1
  44. package/dist/context-CdSyuBlf.mjs.map +1 -0
  45. package/dist/{pooling-CazydwlD.mjs → createExtraKeyHandler-tubeaEjA.mjs} +5 -5
  46. package/dist/createExtraKeyHandler-tubeaEjA.mjs.map +1 -0
  47. package/dist/createPreferenceAccessTokens-DqmFctn3.mjs +10 -0
  48. package/dist/createPreferenceAccessTokens-DqmFctn3.mjs.map +1 -0
  49. package/dist/createSombraGotInstance-D1Il9zUE.mjs +10 -0
  50. package/dist/createSombraGotInstance-D1Il9zUE.mjs.map +1 -0
  51. package/dist/{dataFlowsToDataSilos-RAhfPV0l.mjs → dataFlowsToDataSilos-NhvBw1iy.mjs} +1 -1
  52. package/dist/dataFlowsToDataSilos-NhvBw1iy.mjs.map +1 -0
  53. package/dist/dataSilo-DrFetFXw.mjs +302 -0
  54. package/dist/dataSilo-DrFetFXw.mjs.map +1 -0
  55. package/dist/dataSubject-y_aXI0pa.mjs +92 -0
  56. package/dist/dataSubject-y_aXI0pa.mjs.map +1 -0
  57. package/dist/{done-input-validation-CcZtaz03.mjs → done-input-validation-DLR0-MJ7.mjs} +1 -1
  58. package/dist/{done-input-validation-CcZtaz03.mjs.map → done-input-validation-DLR0-MJ7.mjs.map} +1 -1
  59. package/dist/downloadPrivacyRequestFiles-DlpgxqHF.mjs +2 -0
  60. package/dist/downloadPrivacyRequestFiles-DlpgxqHF.mjs.map +1 -0
  61. package/dist/enums-CyFTrzXY.mjs.map +1 -1
  62. package/dist/extractClientError-DPjv09EH.mjs +2 -0
  63. package/dist/extractClientError-DPjv09EH.mjs.map +1 -0
  64. package/dist/extractErrorMessage-CPnTsT1S.mjs +2 -0
  65. package/dist/extractErrorMessage-CPnTsT1S.mjs.map +1 -0
  66. package/dist/fetchAllActions-BJsPdnxy.mjs +832 -0
  67. package/dist/fetchAllActions-BJsPdnxy.mjs.map +1 -0
  68. package/dist/fetchAllDataFlows-D248lO6_.mjs +2 -0
  69. package/dist/fetchAllDataFlows-D248lO6_.mjs.map +1 -0
  70. package/dist/fetchAllPreferenceTopics-ForE9GpZ.mjs +36 -0
  71. package/dist/fetchAllPreferenceTopics-ForE9GpZ.mjs.map +1 -0
  72. package/dist/fetchAllPurposes-ZdkO2fMp.mjs +29 -0
  73. package/dist/fetchAllPurposes-ZdkO2fMp.mjs.map +1 -0
  74. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs +2 -0
  75. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs.map +1 -0
  76. package/dist/fetchAllRequestEnrichers-CK-kk5eg.mjs +42 -0
  77. package/dist/fetchAllRequestEnrichers-CK-kk5eg.mjs.map +1 -0
  78. package/dist/fetchAllRequestIdentifiers-DrFFOt0m.mjs +10 -0
  79. package/dist/fetchAllRequestIdentifiers-DrFFOt0m.mjs.map +1 -0
  80. package/dist/fetchAllRequests-DNQQsY4s.mjs +2 -0
  81. package/dist/fetchAllRequests-DNQQsY4s.mjs.map +1 -0
  82. package/dist/fetchApiKeys-DjOr44xA.mjs +33 -0
  83. package/dist/fetchApiKeys-DjOr44xA.mjs.map +1 -0
  84. package/dist/fetchCatalogs-BM4FCbcS.mjs +12 -0
  85. package/dist/fetchCatalogs-BM4FCbcS.mjs.map +1 -0
  86. package/dist/fetchConsentManagerId-CFkg3-RS.mjs +321 -0
  87. package/dist/fetchConsentManagerId-CFkg3-RS.mjs.map +1 -0
  88. package/dist/fetchIdentifiers-pjQV4vUg.mjs +54 -0
  89. package/dist/fetchIdentifiers-pjQV4vUg.mjs.map +1 -0
  90. package/dist/fetchRequestDataSilo-P4yA7Lyc.mjs +2 -0
  91. package/dist/fetchRequestDataSilo-P4yA7Lyc.mjs.map +1 -0
  92. package/dist/fetchRequestFilesForRequest-BbxrEKFK.mjs +33 -0
  93. package/dist/fetchRequestFilesForRequest-BbxrEKFK.mjs.map +1 -0
  94. package/dist/generateCrossAccountApiKeys-Bxc_dzMG.mjs +33 -0
  95. package/dist/generateCrossAccountApiKeys-Bxc_dzMG.mjs.map +1 -0
  96. package/dist/impl-4ltdSmpl2.mjs +4 -0
  97. package/dist/impl-4ltdSmpl2.mjs.map +1 -0
  98. package/dist/impl-B19fH75P.mjs +12 -0
  99. package/dist/impl-B19fH75P.mjs.map +1 -0
  100. package/dist/impl-BBMjv5YQ.mjs +2 -0
  101. package/dist/impl-BBMjv5YQ.mjs.map +1 -0
  102. package/dist/{impl-CZP2l3Ds.mjs → impl-BKH3QRLi.mjs} +3 -3
  103. package/dist/impl-BKH3QRLi.mjs.map +1 -0
  104. package/dist/impl-BOUm7wly2.mjs +2 -0
  105. package/dist/impl-BOUm7wly2.mjs.map +1 -0
  106. package/dist/impl-BUC4ZelU.mjs +2 -0
  107. package/dist/impl-BUC4ZelU.mjs.map +1 -0
  108. package/dist/impl-BhTCp0kg.mjs +2 -0
  109. package/dist/impl-BhTCp0kg.mjs.map +1 -0
  110. package/dist/impl-BlHU1bbJ2.mjs +2 -0
  111. package/dist/impl-BlHU1bbJ2.mjs.map +1 -0
  112. package/dist/impl-BwjguKHC.mjs +4 -0
  113. package/dist/impl-BwjguKHC.mjs.map +1 -0
  114. package/dist/impl-C2o0eDzJ.mjs +2 -0
  115. package/dist/impl-C2o0eDzJ.mjs.map +1 -0
  116. package/dist/impl-C8HKnjw82.mjs +2 -0
  117. package/dist/impl-C8HKnjw82.mjs.map +1 -0
  118. package/dist/impl-CCUsnhoW2.mjs +2 -0
  119. package/dist/impl-CCUsnhoW2.mjs.map +1 -0
  120. package/dist/impl-CCc-wXqD.mjs +2 -0
  121. package/dist/impl-CCc-wXqD.mjs.map +1 -0
  122. package/dist/impl-CMmyv1cl.mjs +2 -0
  123. package/dist/impl-CMmyv1cl.mjs.map +1 -0
  124. package/dist/{impl-BYBNi68b.mjs → impl-CNez1OAw.mjs} +2 -2
  125. package/dist/impl-CNez1OAw.mjs.map +1 -0
  126. package/dist/impl-CNykdy3e2.mjs +2 -0
  127. package/dist/impl-CNykdy3e2.mjs.map +1 -0
  128. package/dist/impl-CSChmq_t2.mjs +2 -0
  129. package/dist/impl-CSChmq_t2.mjs.map +1 -0
  130. package/dist/impl-Ce9K4OCp.mjs +2 -0
  131. package/dist/impl-Ce9K4OCp.mjs.map +1 -0
  132. package/dist/impl-Cgg_bv7j.mjs +2 -0
  133. package/dist/impl-Cgg_bv7j.mjs.map +1 -0
  134. package/dist/impl-ChCqHkOc2.mjs +2 -0
  135. package/dist/impl-ChCqHkOc2.mjs.map +1 -0
  136. package/dist/impl-CqEwwWeD.mjs +2 -0
  137. package/dist/impl-CqEwwWeD.mjs.map +1 -0
  138. package/dist/impl-CqXFyvgV2.mjs +2 -0
  139. package/dist/impl-CqXFyvgV2.mjs.map +1 -0
  140. package/dist/impl-CxLSJk2P.mjs +2 -0
  141. package/dist/impl-CxLSJk2P.mjs.map +1 -0
  142. package/dist/impl-CzU9WTiW.mjs +2 -0
  143. package/dist/impl-CzU9WTiW.mjs.map +1 -0
  144. package/dist/impl-D-cp0CYr.mjs +2 -0
  145. package/dist/impl-D-cp0CYr.mjs.map +1 -0
  146. package/dist/impl-D9NjIwEi2.mjs +2 -0
  147. package/dist/impl-D9NjIwEi2.mjs.map +1 -0
  148. package/dist/impl-DEWXA_QC.mjs +2 -0
  149. package/dist/impl-DEWXA_QC.mjs.map +1 -0
  150. package/dist/impl-DGiPB5Vq2.mjs +2 -0
  151. package/dist/impl-DGiPB5Vq2.mjs.map +1 -0
  152. package/dist/impl-DGuwD_qz.mjs +2 -0
  153. package/dist/impl-DGuwD_qz.mjs.map +1 -0
  154. package/dist/impl-DGzvE8aJ.mjs +2 -0
  155. package/dist/impl-DGzvE8aJ.mjs.map +1 -0
  156. package/dist/impl-DTp9OQIZ.mjs +7 -0
  157. package/dist/impl-DTp9OQIZ.mjs.map +1 -0
  158. package/dist/impl-DhscnXSw.mjs +2 -0
  159. package/dist/impl-DhscnXSw.mjs.map +1 -0
  160. package/dist/impl-Dk7MdX-1.mjs +2 -0
  161. package/dist/impl-Dk7MdX-1.mjs.map +1 -0
  162. package/dist/impl-DsNPvet4.mjs +2 -0
  163. package/dist/impl-DsNPvet4.mjs.map +1 -0
  164. package/dist/impl-DxUFb0vv.mjs +2 -0
  165. package/dist/impl-DxUFb0vv.mjs.map +1 -0
  166. package/dist/impl-JThkrXiI2.mjs +2 -0
  167. package/dist/impl-JThkrXiI2.mjs.map +1 -0
  168. package/dist/impl-KDuBh4bu2.mjs +2 -0
  169. package/dist/impl-KDuBh4bu2.mjs.map +1 -0
  170. package/dist/impl-MpkLBntW.mjs +2 -0
  171. package/dist/impl-MpkLBntW.mjs.map +1 -0
  172. package/dist/impl-P_NDC3cX.mjs +2 -0
  173. package/dist/impl-P_NDC3cX.mjs.map +1 -0
  174. package/dist/impl-Rt3C_fDF.mjs +2 -0
  175. package/dist/impl-Rt3C_fDF.mjs.map +1 -0
  176. package/dist/impl-c7rUQYDc2.mjs +2 -0
  177. package/dist/impl-c7rUQYDc2.mjs.map +1 -0
  178. package/dist/impl-fqOKTw5J.mjs +2 -0
  179. package/dist/impl-fqOKTw5J.mjs.map +1 -0
  180. package/dist/impl-oiBTZqQS2.mjs +2 -0
  181. package/dist/impl-oiBTZqQS2.mjs.map +1 -0
  182. package/dist/impl-tbGnvKFm.mjs +2 -0
  183. package/dist/impl-tbGnvKFm.mjs.map +1 -0
  184. package/dist/index.d.mts +3441 -3429
  185. package/dist/index.d.mts.map +1 -0
  186. package/dist/index.mjs +78 -4
  187. package/dist/index.mjs.map +1 -1
  188. package/dist/inquirer-BgNcicZ4.mjs +2 -0
  189. package/dist/inquirer-BgNcicZ4.mjs.map +1 -0
  190. package/dist/listFiles-qzyQMaYH.mjs +2 -0
  191. package/dist/listFiles-qzyQMaYH.mjs.map +1 -0
  192. package/dist/{logger-Bj782ZYD.mjs → logger-B-LXIf3U.mjs} +1 -1
  193. package/dist/{logger-Bj782ZYD.mjs.map → logger-B-LXIf3U.mjs.map} +1 -1
  194. package/dist/makeGraphQLRequest-Cq26A_Lq.mjs +2 -0
  195. package/dist/makeGraphQLRequest-Cq26A_Lq.mjs.map +1 -0
  196. package/dist/markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs +2 -0
  197. package/dist/markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs.map +1 -0
  198. package/dist/markSilentPrivacyRequests-BKQUu6Ep.mjs +2 -0
  199. package/dist/markSilentPrivacyRequests-BKQUu6Ep.mjs.map +1 -0
  200. package/dist/mergeTranscendInputs-DGC4xUGu.mjs +2 -0
  201. package/dist/mergeTranscendInputs-DGC4xUGu.mjs.map +1 -0
  202. package/dist/notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs +2 -0
  203. package/dist/notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs.map +1 -0
  204. package/dist/package-C4J38oR1.mjs +2 -0
  205. package/dist/package-C4J38oR1.mjs.map +1 -0
  206. package/dist/parquetToCsvOneFile-DZVKXrjn.mjs +6 -0
  207. package/dist/parquetToCsvOneFile-DZVKXrjn.mjs.map +1 -0
  208. package/dist/parseAttributesFromString-CZStzJc0.mjs +2 -0
  209. package/dist/parseAttributesFromString-CZStzJc0.mjs.map +1 -0
  210. package/dist/pullAllDatapoints-Cntwuzw7.mjs +45 -0
  211. package/dist/pullAllDatapoints-Cntwuzw7.mjs.map +1 -0
  212. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs +2 -0
  213. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs.map +1 -0
  214. package/dist/pullConsentManagerMetrics-FnhPEszu.mjs +2 -0
  215. package/dist/pullConsentManagerMetrics-FnhPEszu.mjs.map +1 -0
  216. package/dist/pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs +2 -0
  217. package/dist/pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs.map +1 -0
  218. package/dist/pullTranscendConfiguration-CqsgEf9A.mjs +80 -0
  219. package/dist/pullTranscendConfiguration-CqsgEf9A.mjs.map +1 -0
  220. package/dist/pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs +38 -0
  221. package/dist/pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs.map +1 -0
  222. package/dist/pushCronIdentifiersFromCsv-D2saGR5i.mjs +2 -0
  223. package/dist/pushCronIdentifiersFromCsv-D2saGR5i.mjs.map +1 -0
  224. package/dist/pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs +2 -0
  225. package/dist/pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs.map +1 -0
  226. package/dist/readCsv-CyOL7eCc.mjs +2 -0
  227. package/dist/readCsv-CyOL7eCc.mjs.map +1 -0
  228. package/dist/{readTranscendYaml-DhKG1ViI.mjs → readTranscendYaml-D-J1ilS0.mjs} +2 -2
  229. package/dist/readTranscendYaml-D-J1ilS0.mjs.map +1 -0
  230. package/dist/removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs +35 -0
  231. package/dist/removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs.map +1 -0
  232. package/dist/request-CAsR6CMY.mjs +117 -0
  233. package/dist/request-CAsR6CMY.mjs.map +1 -0
  234. package/dist/retryRequestDataSilos-DnwXA1YZ.mjs +2 -0
  235. package/dist/retryRequestDataSilos-DnwXA1YZ.mjs.map +1 -0
  236. package/dist/skipPreflightJobs-jK5lNlmv.mjs +2 -0
  237. package/dist/skipPreflightJobs-jK5lNlmv.mjs.map +1 -0
  238. package/dist/skipRequestDataSilos-DQGroOos.mjs +2 -0
  239. package/dist/skipRequestDataSilos-DQGroOos.mjs.map +1 -0
  240. package/dist/splitCsvToList-BRq_CIfd.mjs +2 -0
  241. package/dist/splitCsvToList-BRq_CIfd.mjs.map +1 -0
  242. package/dist/streamPrivacyRequestsToCsv-BK07Bm-T.mjs +2 -0
  243. package/dist/streamPrivacyRequestsToCsv-BK07Bm-T.mjs.map +1 -0
  244. package/dist/syncCodePackages-F-97FNjo.mjs +232 -0
  245. package/dist/syncCodePackages-F-97FNjo.mjs.map +1 -0
  246. package/dist/syncCookies-BxY36BeJ.mjs +2 -0
  247. package/dist/syncCookies-BxY36BeJ.mjs.map +1 -0
  248. package/dist/syncDataFlows-Cx5LZCen.mjs +2 -0
  249. package/dist/syncDataFlows-Cx5LZCen.mjs.map +1 -0
  250. package/dist/syncTemplates-BrH7Yr0V.mjs +23 -0
  251. package/dist/syncTemplates-BrH7Yr0V.mjs.map +1 -0
  252. package/dist/time-Bl_c3W8U.mjs +2 -0
  253. package/dist/time-Bl_c3W8U.mjs.map +1 -0
  254. package/dist/types-B4CVJCpj.mjs +2 -0
  255. package/dist/types-B4CVJCpj.mjs.map +1 -0
  256. package/dist/updateConsentManagerVersionToLatest-C221vAAw.mjs +2 -0
  257. package/dist/updateConsentManagerVersionToLatest-C221vAAw.mjs.map +1 -0
  258. package/dist/uploadConsents-BbR7_sSt.mjs +2 -0
  259. package/dist/uploadConsents-BbR7_sSt.mjs.map +1 -0
  260. package/dist/uploadCookiesFromCsv-roHWekOP.mjs +2 -0
  261. package/dist/uploadCookiesFromCsv-roHWekOP.mjs.map +1 -0
  262. package/dist/uploadDataFlowsFromCsv-DcTbrsv2.mjs +2 -0
  263. package/dist/uploadDataFlowsFromCsv-DcTbrsv2.mjs.map +1 -0
  264. package/dist/uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs +17 -0
  265. package/dist/uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs.map +1 -0
  266. package/dist/uploadSiloDiscoveryResults-D2fK92WR.mjs +20 -0
  267. package/dist/uploadSiloDiscoveryResults-D2fK92WR.mjs.map +1 -0
  268. package/dist/validateTranscendAuth-1W1IylqE.mjs +2 -0
  269. package/dist/validateTranscendAuth-1W1IylqE.mjs.map +1 -0
  270. package/dist/withPreferenceRetry-xLMZyTq9.mjs +2 -0
  271. package/dist/withPreferenceRetry-xLMZyTq9.mjs.map +1 -0
  272. package/dist/writeCsv-B51ulrVl.mjs +6 -0
  273. package/dist/writeCsv-B51ulrVl.mjs.map +1 -0
  274. package/package.json +37 -56
  275. package/dist/api-keys-CxvKdj2v.mjs +0 -2
  276. package/dist/api-keys-CxvKdj2v.mjs.map +0 -1
  277. package/dist/app-BKMxG7RO.mjs +0 -131
  278. package/dist/app-BKMxG7RO.mjs.map +0 -1
  279. package/dist/buildAIIntegrationType-Bk0EbFKV.mjs +0 -2
  280. package/dist/buildAIIntegrationType-Bk0EbFKV.mjs.map +0 -1
  281. package/dist/code-scanning-Cx1kpssH.mjs +0 -4
  282. package/dist/code-scanning-Cx1kpssH.mjs.map +0 -1
  283. package/dist/codecs-TR6p48v3.mjs +0 -2
  284. package/dist/codecs-TR6p48v3.mjs.map +0 -1
  285. package/dist/command-Bzyj3M2G.mjs.map +0 -1
  286. package/dist/consent-manager-c4bgQF1N.mjs +0 -12
  287. package/dist/consent-manager-c4bgQF1N.mjs.map +0 -1
  288. package/dist/constants-CnLQtIBn.mjs +0 -2
  289. package/dist/constants-CnLQtIBn.mjs.map +0 -1
  290. package/dist/context-bkKpii_t.mjs.map +0 -1
  291. package/dist/cron-BvxWyvDu.mjs +0 -2
  292. package/dist/cron-BvxWyvDu.mjs.map +0 -1
  293. package/dist/data-inventory-CkS_kmus.mjs +0 -75
  294. package/dist/data-inventory-CkS_kmus.mjs.map +0 -1
  295. package/dist/dataFlowsToDataSilos-RAhfPV0l.mjs.map +0 -1
  296. package/dist/impl-8dOatHnF.mjs +0 -2
  297. package/dist/impl-8dOatHnF.mjs.map +0 -1
  298. package/dist/impl-Ah-1lwzr.mjs +0 -2
  299. package/dist/impl-Ah-1lwzr.mjs.map +0 -1
  300. package/dist/impl-B5lTeRbn.mjs +0 -2
  301. package/dist/impl-B5lTeRbn.mjs.map +0 -1
  302. package/dist/impl-B6UhzQcY2.mjs +0 -2
  303. package/dist/impl-B6UhzQcY2.mjs.map +0 -1
  304. package/dist/impl-BFf_CotE2.mjs +0 -2
  305. package/dist/impl-BFf_CotE2.mjs.map +0 -1
  306. package/dist/impl-BGQ0EGS0.mjs +0 -2
  307. package/dist/impl-BGQ0EGS0.mjs.map +0 -1
  308. package/dist/impl-BYBNi68b.mjs.map +0 -1
  309. package/dist/impl-B__p3_wC.mjs +0 -2
  310. package/dist/impl-B__p3_wC.mjs.map +0 -1
  311. package/dist/impl-BcayRe6a.mjs +0 -2
  312. package/dist/impl-BcayRe6a.mjs.map +0 -1
  313. package/dist/impl-BkYKsEVG2.mjs +0 -2
  314. package/dist/impl-BkYKsEVG2.mjs.map +0 -1
  315. package/dist/impl-Bl2yVgh0.mjs +0 -4
  316. package/dist/impl-Bl2yVgh0.mjs.map +0 -1
  317. package/dist/impl-BmAMgEEM.mjs +0 -12
  318. package/dist/impl-BmAMgEEM.mjs.map +0 -1
  319. package/dist/impl-BsttzxTN2.mjs +0 -2
  320. package/dist/impl-BsttzxTN2.mjs.map +0 -1
  321. package/dist/impl-BtnySmbi.mjs +0 -2
  322. package/dist/impl-BtnySmbi.mjs.map +0 -1
  323. package/dist/impl-BwX-evfW2.mjs +0 -4
  324. package/dist/impl-BwX-evfW2.mjs.map +0 -1
  325. package/dist/impl-C-wzeAib2.mjs +0 -2
  326. package/dist/impl-C-wzeAib2.mjs.map +0 -1
  327. package/dist/impl-C61PYfk12.mjs +0 -2
  328. package/dist/impl-C61PYfk12.mjs.map +0 -1
  329. package/dist/impl-CAuNpuF2.mjs +0 -2
  330. package/dist/impl-CAuNpuF2.mjs.map +0 -1
  331. package/dist/impl-CSKrBIuV.mjs +0 -2
  332. package/dist/impl-CSKrBIuV.mjs.map +0 -1
  333. package/dist/impl-CZP2l3Ds.mjs.map +0 -1
  334. package/dist/impl-CiJ8hE5W2.mjs +0 -2
  335. package/dist/impl-CiJ8hE5W2.mjs.map +0 -1
  336. package/dist/impl-Cj3H-m2Z.mjs +0 -2
  337. package/dist/impl-Cj3H-m2Z.mjs.map +0 -1
  338. package/dist/impl-CkY0wfCz.mjs +0 -2
  339. package/dist/impl-CkY0wfCz.mjs.map +0 -1
  340. package/dist/impl-Cm8pUfBU2.mjs +0 -2
  341. package/dist/impl-Cm8pUfBU2.mjs.map +0 -1
  342. package/dist/impl-CpzS9LVu2.mjs +0 -2
  343. package/dist/impl-CpzS9LVu2.mjs.map +0 -1
  344. package/dist/impl-CwfamZ1c.mjs +0 -2
  345. package/dist/impl-CwfamZ1c.mjs.map +0 -1
  346. package/dist/impl-D81et1Yb2.mjs +0 -2
  347. package/dist/impl-D81et1Yb2.mjs.map +0 -1
  348. package/dist/impl-D92PTNk3.mjs +0 -2
  349. package/dist/impl-D92PTNk3.mjs.map +0 -1
  350. package/dist/impl-DTXDVeo6.mjs +0 -2
  351. package/dist/impl-DTXDVeo6.mjs.map +0 -1
  352. package/dist/impl-DWoysXup.mjs +0 -2
  353. package/dist/impl-DWoysXup.mjs.map +0 -1
  354. package/dist/impl-DX3JHZ4v2.mjs +0 -2
  355. package/dist/impl-DX3JHZ4v2.mjs.map +0 -1
  356. package/dist/impl-DhuUrzxQ.mjs +0 -2
  357. package/dist/impl-DhuUrzxQ.mjs.map +0 -1
  358. package/dist/impl-DqMYLKjU.mjs +0 -2
  359. package/dist/impl-DqMYLKjU.mjs.map +0 -1
  360. package/dist/impl-DqQ6CIj0.mjs +0 -2
  361. package/dist/impl-DqQ6CIj0.mjs.map +0 -1
  362. package/dist/impl-Duaq6iWI2.mjs +0 -2
  363. package/dist/impl-Duaq6iWI2.mjs.map +0 -1
  364. package/dist/impl-O5gz8qcm.mjs +0 -2
  365. package/dist/impl-O5gz8qcm.mjs.map +0 -1
  366. package/dist/impl-PH0AoC7i.mjs +0 -2
  367. package/dist/impl-PH0AoC7i.mjs.map +0 -1
  368. package/dist/impl-S8p6toVb2.mjs +0 -2
  369. package/dist/impl-S8p6toVb2.mjs.map +0 -1
  370. package/dist/impl-X2MSb8Ij.mjs +0 -2
  371. package/dist/impl-X2MSb8Ij.mjs.map +0 -1
  372. package/dist/impl-bo95wZIU2.mjs +0 -2
  373. package/dist/impl-bo95wZIU2.mjs.map +0 -1
  374. package/dist/impl-cfdCesro.mjs +0 -2
  375. package/dist/impl-cfdCesro.mjs.map +0 -1
  376. package/dist/impl-iZoXu4nV.mjs +0 -2
  377. package/dist/impl-iZoXu4nV.mjs.map +0 -1
  378. package/dist/impl-lebl6Zek2.mjs +0 -2
  379. package/dist/impl-lebl6Zek2.mjs.map +0 -1
  380. package/dist/impl-p0YN9e2e.mjs +0 -2
  381. package/dist/impl-p0YN9e2e.mjs.map +0 -1
  382. package/dist/manual-enrichment-B6lW5kAX.mjs +0 -2
  383. package/dist/manual-enrichment-B6lW5kAX.mjs.map +0 -1
  384. package/dist/mergeTranscendInputs-Coj_e2N3.mjs +0 -2
  385. package/dist/mergeTranscendInputs-Coj_e2N3.mjs.map +0 -1
  386. package/dist/pooling-CazydwlD.mjs.map +0 -1
  387. package/dist/preference-management-8gj7aSJB.mjs +0 -7
  388. package/dist/preference-management-8gj7aSJB.mjs.map +0 -1
  389. package/dist/readTranscendYaml-DhKG1ViI.mjs.map +0 -1
  390. package/dist/syncConfigurationToTranscend-VJd0PnaZ.mjs +0 -3010
  391. package/dist/syncConfigurationToTranscend-VJd0PnaZ.mjs.map +0 -1
  392. package/dist/uploadConsents-C1S-BNzw.mjs +0 -2
  393. package/dist/uploadConsents-C1S-BNzw.mjs.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CqEwwWeD.mjs","names":[],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { readdirSync, statSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../context.js';\nimport { logger } from '../../logger.js';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found ${files.length} CSV files in ${directory}`));\n return files;\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling/index.js';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(ctx: CommonCtx<TTotals, ChunkSlotProgress>): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(ctx: CommonCtx<TTotals, ChunkSlotProgress>): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import { once } from 'node:events';\nimport { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { basename, dirname, join } from 'node:path';\nimport { Transform } from 'node:stream';\nimport { pipeline } from 'node:stream/promises';\n\nimport colors from 'colors';\nimport { Parser } from 'csv-parse';\nimport * as fastcsv from 'fast-csv';\n\nimport { logger } from '../../logger.js';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`));\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal = avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(colors.yellow(`Row has ${row.length} cols; expected ${expectedCols}`));\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (writer && currentSize > 0 && currentSize + rowBytes > chunkSizeBytes) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile.js';\nimport { extractErrorMessage } from '../../../lib/helpers/index.js';\nimport type { ToWorker } from '../../../lib/pooling/index.js';\nimport { logger } from '../../../logger.js';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit.js';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling/index.js';\nimport { logger } from '../../../logger.js';\nimport { chunkCsvPlugin } from './ui/index.js';\nimport { runChild, type ChunkProgress, type ChunkResult, type ChunkTask } from './worker.js';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(this: LocalContext, flags: ChunkCsvCommandFlags): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { directory, outputDir, clearOutputDir, chunkSizeMB, concurrency, viewerMode } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({}) as Totals,\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"sqBAgBA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAO,MAAM,EAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,EADgB,EAAY,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,GAAM,EAAK,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAO,EAAS,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAO,MAAM,EAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAO,MAAM,EAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAO,KAAK,EAAO,MAAM,SAAS,EAAM,OAAO,gBAAgB,IAAY,CAAC,CACrE,ECnCT,SAAS,EAAsB,EAAsD,CAEnF,OAAO,EAAW,EAAI,CASxB,SAAS,EAAuB,EAAsD,CACpF,OAAO,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCOD,SAAS,EACP,EACA,EAMA,CACA,IAAM,EAAK,EAAkB,EAAS,CAChC,EAAM,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAM,EAAK,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,CAAC,EAAK,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAM,EAAK,EAAS,CAC5C,EAAW,EAEf,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CAAC,CAEnF,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,EAAW,EAAS,EAAU,OAAO,CACrC,EAAS,GAAa,EAAQ,EAAS,CAK7C,GAJA,EAAO,KAAK,EAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAM,EAAM,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAO,KAAK,EAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAM,EAAQ,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,GAAM,EAAO,EAAK,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAI,EAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EAAW,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC9E,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,MACJ,EAAK,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAI,EAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAO,KAAK,EAAO,OAAO,WAAW,EAAI,OAAO,kBAAkB,IAAe,CAAC,CAGpF,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAI9C,GAAU,EAAc,GAAK,EAAc,EAAW,IACxD,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAO,KACL,EAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAM,EADK,EAAiB,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAO,KACL,EAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CC7NH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAU,EAAoB,EAAI,CACxC,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CChGJ,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAA6B,EAA4C,CAC7F,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,YAAW,YAAW,iBAAgB,cAAa,cAAa,cAAe,EAGjF,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAa,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAO,KACL,EAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAM,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAW,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAU,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvC,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAAA,aAAoB,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,identifierNames:i,actions:a=[]}){t(this.process.exit),await e({requestActions:a,transcendUrl:r,auth:n,identifierNames:i})}export{n as rejectUnverifiedIdentifiers};
2
+ //# sourceMappingURL=impl-CqXFyvgV2.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CqXFyvgV2.mjs","names":[],"sources":["../src/commands/request/reject-unverified-identifiers/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { removeUnverifiedRequestIdentifiers } from '../../../lib/requests/index.js';\n\nexport interface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n { auth, transcendUrl, identifierNames, actions = [] }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"],"mappings":"0HAaA,eAAsB,EAEpB,CAAE,OAAM,eAAc,kBAAiB,UAAU,EAAE,EACpC,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAmC,CACvC,eAAgB,EAChB,eACA,OACA,kBACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{a as t,r as n}from"./readTranscendYaml-D-J1ilS0.mjs";import{n as r}from"./fetchCatalogs-BM4FCbcS.mjs";import{r as i}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{t as a}from"./listFiles-qzyQMaYH.mjs";import{t as o}from"./done-input-validation-DLR0-MJ7.mjs";import{t as s}from"./dataFlowsToDataSilos-NhvBw1iy.mjs";import{existsSync as c,lstatSync as l}from"node:fs";import{join as u}from"node:path";import d from"colors";async function f({auth:f,dataFlowsYmlFolder:p,dataSilosYmlFolder:m,ignoreYmls:h=[],transcendUrl:g}){o(this.process.exit),(!c(p)||!l(p).isDirectory())&&(e.error(d.red(`Folder does not exist: "${p}"`)),this.process.exit(1)),(!c(m)||!l(m).isDirectory())&&(e.error(d.red(`Folder does not exist: "${m}"`)),this.process.exit(1));let{serviceToTitle:_,serviceToSupportedIntegration:v}=await r(i(g,f));a(p).forEach(r=>{let{"data-flows":i=[]}=n(u(p,r)),{adTechDataSilos:a,siteTechDataSilos:o}=s(i,{serviceToSupportedIntegration:v,serviceToTitle:_}),c=[...a,...o];e.log(`Total Services: ${c.length}`),e.log(`Ad Tech Services: ${a.length}`),e.log(`Site Tech Services: ${o.length}`),t(u(m,r),{"data-silos":h.includes(r)?[]:c})})}export{f as deriveDataSilosFromDataFlows};
2
+ //# sourceMappingURL=impl-CxLSJk2P.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CxLSJk2P.mjs","names":[],"sources":["../src/commands/inventory/derive-data-silos-from-data-flows/impl.ts"],"sourcesContent":["import { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\n\nimport colors from 'colors';\n\nimport { DataFlowInput } from '../../../codecs.js';\nimport type { LocalContext } from '../../../context.js';\nimport { listFiles } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos.js';\nimport { fetchAndIndexCatalogs, buildTranscendGraphQLClient } from '../../../lib/graphql/index.js';\nimport { readTranscendYaml, writeTranscendYaml } from '../../../lib/readTranscendYaml.js';\nimport { logger } from '../../../logger.js';\n\nexport interface DeriveDataSilosFromDataFlowsCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n dataSilosYmlFolder: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlows(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n dataSilosYmlFolder,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (!existsSync(dataFlowsYmlFolder) || !lstatSync(dataFlowsYmlFolder).isDirectory()) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ensure folder is passed\n if (!existsSync(dataSilosYmlFolder) || !lstatSync(dataSilosYmlFolder).isDirectory()) {\n logger.error(colors.red(`Folder does not exist: \"${dataSilosYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } = await fetchAndIndexCatalogs(client);\n\n // List of each data flow yml file\n listFiles(dataFlowsYmlFolder).forEach((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(join(dataFlowsYmlFolder, directory));\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n // combine and write to yml file\n const dataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechDataSilos.length}`);\n logger.log(`Site Tech Services: ${siteTechDataSilos.length}`);\n writeTranscendYaml(join(dataSilosYmlFolder, directory), {\n 'data-silos': ignoreYmls.includes(directory) ? [] : dataSilos,\n });\n });\n}\n"],"mappings":"sdAsBA,eAAsB,EAEpB,CACE,OACA,qBACA,qBACA,aAAa,EAAE,CACf,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,EAGlC,CAAC,EAAW,EAAmB,EAAI,CAAC,EAAU,EAAmB,CAAC,aAAa,IACjF,EAAO,MAAM,EAAO,IAAI,2BAA2B,EAAmB,GAAG,CAAC,CAC1E,KAAK,QAAQ,KAAK,EAAE,GAIlB,CAAC,EAAW,EAAmB,EAAI,CAAC,EAAU,EAAmB,CAAC,aAAa,IACjF,EAAO,MAAM,EAAO,IAAI,2BAA2B,EAAmB,GAAG,CAAC,CAC1E,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAM,CAAE,iBAAgB,iCAAkC,MAAM,EADjD,EAA4B,EAAc,EAAK,CAC+B,CAG7F,EAAU,EAAmB,CAAC,QAAS,GAAc,CAEnD,GAAM,CAAE,aAAc,EAAY,EAAE,EAAK,EAAkB,EAAK,EAAoB,EAAU,CAAC,CAGzF,CAAE,kBAAiB,qBAAsB,EAC7C,EACA,CACE,gCACA,iBACD,CACF,CAGK,EAAY,CAAC,GAAG,EAAiB,GAAG,EAAkB,CAC5D,EAAO,IAAI,mBAAmB,EAAU,SAAS,CACjD,EAAO,IAAI,qBAAqB,EAAgB,SAAS,CACzD,EAAO,IAAI,uBAAuB,EAAkB,SAAS,CAC7D,EAAmB,EAAK,EAAoB,EAAU,CAAE,CACtD,aAAc,EAAW,SAAS,EAAU,CAAG,EAAE,CAAG,EACrD,CAAC,EACF"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./createPreferenceAccessTokens-DqmFctn3.mjs";import{t as n}from"./readCsv-CyOL7eCc.mjs";import{r}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{a as i}from"./writeCsv-B51ulrVl.mjs";import{t as a}from"./done-input-validation-DLR0-MJ7.mjs";import{SombraStandardScope as o}from"@transcend-io/privacy-types";import{existsSync as s}from"node:fs";import c from"colors";import*as l from"io-ts";import u from"cli-progress";async function d({auth:d,file:f,transcendUrl:p,duration:m,subjectType:h,emailColumnName:g,coreIdentifierColumnName:_}){a(this.process.exit),s(f)||(e.error(c.red(`File does not exist: "${f}". Please provide a valid path to a CSV file.`)),this.process.exit(1));try{let a=r(p,d),s=n(f,l.type({[g]:l.string,..._?{[_]:l.string}:{}}));if(!s.length)throw Error(`Input CSV is empty.`);let v=s.map((e,t)=>[e,t]).filter(([e])=>!e[g]?.trim());if(v.length){let e=v.map(([,e])=>e+2).join(`, `);throw Error(`The following rows are missing the required "${g}" column: ${e}`)}if(_){let e=s.map((e,t)=>[e,t]).filter(([e])=>!e[_]?.trim());if(e.length){let t=e.map(([,e])=>e+2).join(`, `);throw Error(`The following rows are missing the required "${_}" column: ${t}`)}}let y=Math.max(1,Math.floor(m/1e3)),b=s.map((e,t)=>{let n=e[g].trim(),r=_?e[_]?.trim():void 0;return{subjectType:h,scopes:[o.PreferenceManagement],expiresIn:y,email:n,...r?{coreIdentifier:r}:{},index:t}}),x=new u.SingleBar({},u.Presets.shades_classic);x.start(b.length,0);let S=Date.now(),C=await t(a,b,e=>{x.update(e)});x.update(b.length),x.stop();let w=C.map(({accessToken:e,input:t})=>{if(typeof t.index!=`number`)throw Error(`Internal error: missing input index.`);return{...s[t.index],token:e}});e.info(c.magenta(`Writing access tokens to file "${f}"...`)),await i(f,w,!0);let T=Math.round((Date.now()-S)/1e3);e.info(c.green(`Successfully generated ${C.length} access tokens to "${f}" in ${T}s!`))}catch(t){e.error(c.red(`An error occurred while generating access tokens: ${t?.message||String(t)}`)),this.process.exit(1)}}export{d as generateAccessTokens};
2
+ //# sourceMappingURL=impl-CzU9WTiW.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-CzU9WTiW.mjs","names":[],"sources":["../src/commands/consent/generate-access-tokens/impl.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\n\nimport { SombraStandardScope } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport {\n buildTranscendGraphQLClient,\n createPreferenceAccessTokens,\n type PreferenceAccessTokenInputWithIndex,\n} from '../../../lib/graphql/index.js';\nimport { writeCsv } from '../../../lib/helpers/index.js';\nimport { readCsv } from '../../../lib/requests/index.js';\nimport { logger } from '../../../logger.js';\n\n/**\n * CLI flags accepted by the `generate-access-tokens` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type GenerateAccessTokenCommandFlags = {\n auth: string;\n file: string;\n duration: number;\n transcendUrl: string;\n subjectType: string;\n emailColumnName: string;\n coreIdentifierColumnName?: string;\n};\n\n/**\n * Take in a CSV of user identifiers and generate access tokens for each user.\n *\n * Expected CSV columns:\n * - [emailColumnName] (required)\n * - [coreIdentifierColumnName] (optional)\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function generateAccessTokens(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n duration,\n subjectType,\n emailColumnName,\n coreIdentifierColumnName,\n }: GenerateAccessTokenCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n if (!existsSync(file)) {\n logger.error(\n colors.red(`File does not exist: \"${file}\". Please provide a valid path to a CSV file.`),\n );\n this.process.exit(1);\n }\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read + parse CSV\n const codec = t.type({\n [emailColumnName]: t.string,\n ...(coreIdentifierColumnName ? { [coreIdentifierColumnName]: t.string } : {}),\n });\n const rows: Array<Record<string, string>> = readCsv(file, codec);\n if (!rows.length) {\n throw new Error('Input CSV is empty.');\n }\n\n // Ensure emails and core identifiers exist\n const missingEmail = rows\n .map((r, i) => [r, i] as const)\n .filter(([r]) => !r[emailColumnName]?.trim());\n if (missingEmail.length) {\n const rowNumbers = missingEmail\n .map(([, i]) => i + 2) // +2 to account for header row and 0-indexing\n .join(', ');\n throw new Error(\n `The following rows are missing the required \"${emailColumnName}\" column: ${rowNumbers}`,\n );\n }\n if (coreIdentifierColumnName) {\n const missingCoreId = rows\n .map((r, i) => [r, i] as const)\n .filter(([r]) => !r[coreIdentifierColumnName]?.trim());\n if (missingCoreId.length) {\n const rowNumbers = missingCoreId\n .map(([, i]) => i + 2) // +2 to account for header row and 0-indexing\n .join(', ');\n throw new Error(\n `The following rows are missing the required \"${coreIdentifierColumnName}\" column: ${rowNumbers}`,\n );\n }\n }\n\n // Duration provided by CLI is in ms; GraphQL expects seconds\n const expiresInSeconds = Math.max(1, Math.floor(duration / 1000));\n\n // Build inputs for GraphQL\n const inputs = rows.map((r, index): PreferenceAccessTokenInputWithIndex => {\n const email = r[emailColumnName].trim();\n const coreIdentifier = coreIdentifierColumnName\n ? r[coreIdentifierColumnName]?.trim()\n : undefined;\n const scopes = [SombraStandardScope.PreferenceManagement];\n return {\n subjectType,\n scopes,\n expiresIn: expiresInSeconds,\n email,\n ...(coreIdentifier ? { coreIdentifier } : {}),\n index,\n };\n });\n\n // Progress bar\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n progressBar.start(inputs.length, 0);\n\n // Kick off token creation (batched internally)\n const t0 = Date.now();\n const results = await createPreferenceAccessTokens(client, inputs, (progress) => {\n progressBar.update(progress);\n });\n progressBar.update(inputs.length);\n progressBar.stop();\n\n // Prepare output CSV rows\n const outputRows = results.map(({ accessToken, input }) => {\n if (typeof input.index !== 'number') {\n throw new Error('Internal error: missing input index.');\n }\n return {\n ...rows[input.index],\n token: accessToken,\n };\n });\n\n logger.info(colors.magenta(`Writing access tokens to file \"${file}\"...`));\n await writeCsv(file, outputRows, true);\n\n const totalTimeSec = Math.round((Date.now() - t0) / 1000);\n logger.info(\n colors.green(\n `Successfully generated ${results.length} access tokens to \"${file}\" in ${totalTimeSec}s!`,\n ),\n );\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n logger.error(\n colors.red(\n `An error occurred while generating access tokens: ${err?.message || String(err)}`,\n ),\n );\n this.process.exit(1);\n }\n}\n"],"mappings":"udA2CA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,WACA,cACA,kBACA,4BAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CACjC,EAAW,EAAK,GACnB,EAAO,MACL,EAAO,IAAI,yBAAyB,EAAK,+CAA+C,CACzF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,GAAI,CAEF,IAAM,EAAS,EAA4B,EAAc,EAAK,CAOxD,EAAsC,EAAQ,EAJtC,EAAE,KAAK,EAClB,GAAkB,EAAE,OACrB,GAAI,EAA2B,EAAG,GAA2B,EAAE,OAAQ,CAAG,EAAE,CAC7E,CAAC,CAC8D,CAChE,GAAI,CAAC,EAAK,OACR,MAAU,MAAM,sBAAsB,CAIxC,IAAM,EAAe,EAClB,KAAK,EAAG,IAAM,CAAC,EAAG,EAAE,CAAU,CAC9B,QAAQ,CAAC,KAAO,CAAC,EAAE,IAAkB,MAAM,CAAC,CAC/C,GAAI,EAAa,OAAQ,CACvB,IAAM,EAAa,EAChB,KAAK,EAAG,KAAO,EAAI,EAAE,CACrB,KAAK,KAAK,CACb,MAAU,MACR,gDAAgD,EAAgB,YAAY,IAC7E,CAEH,GAAI,EAA0B,CAC5B,IAAM,EAAgB,EACnB,KAAK,EAAG,IAAM,CAAC,EAAG,EAAE,CAAU,CAC9B,QAAQ,CAAC,KAAO,CAAC,EAAE,IAA2B,MAAM,CAAC,CACxD,GAAI,EAAc,OAAQ,CACxB,IAAM,EAAa,EAChB,KAAK,EAAG,KAAO,EAAI,EAAE,CACrB,KAAK,KAAK,CACb,MAAU,MACR,gDAAgD,EAAyB,YAAY,IACtF,EAKL,IAAM,EAAmB,KAAK,IAAI,EAAG,KAAK,MAAM,EAAW,IAAK,CAAC,CAG3D,EAAS,EAAK,KAAK,EAAG,IAA+C,CACzE,IAAM,EAAQ,EAAE,GAAiB,MAAM,CACjC,EAAiB,EACnB,EAAE,IAA2B,MAAM,CACnC,IAAA,GAEJ,MAAO,CACL,cACA,OAHa,CAAC,EAAoB,qBAAqB,CAIvD,UAAW,EACX,QACA,GAAI,EAAiB,CAAE,iBAAgB,CAAG,EAAE,CAC5C,QACD,EACD,CAGI,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CACrF,EAAY,MAAM,EAAO,OAAQ,EAAE,CAGnC,IAAM,EAAK,KAAK,KAAK,CACf,EAAU,MAAM,EAA6B,EAAQ,EAAS,GAAa,CAC/E,EAAY,OAAO,EAAS,EAC5B,CACF,EAAY,OAAO,EAAO,OAAO,CACjC,EAAY,MAAM,CAGlB,IAAM,EAAa,EAAQ,KAAK,CAAE,cAAa,WAAY,CACzD,GAAI,OAAO,EAAM,OAAU,SACzB,MAAU,MAAM,uCAAuC,CAEzD,MAAO,CACL,GAAG,EAAK,EAAM,OACd,MAAO,EACR,EACD,CAEF,EAAO,KAAK,EAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,MAAM,EAAS,EAAM,EAAY,GAAK,CAEtC,IAAM,EAAe,KAAK,OAAO,KAAK,KAAK,CAAG,GAAM,IAAK,CACzD,EAAO,KACL,EAAO,MACL,0BAA0B,EAAQ,OAAO,qBAAqB,EAAK,OAAO,EAAa,IACxF,CACF,OAEM,EAAU,CACjB,EAAO,MACL,EAAO,IACL,qDAAqD,GAAK,SAAW,OAAO,EAAI,GACjF,CACF,CACD,KAAK,QAAQ,KAAK,EAAE"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{E as t}from"./codecs-BE3Wmoh8.mjs";import{a as n}from"./readTranscendYaml-D-J1ilS0.mjs";import{t as r}from"./done-input-validation-DLR0-MJ7.mjs";import{ConsentTrackerStatus as i,DataFlowScope as a}from"@transcend-io/privacy-types";import{decodeCodec as o}from"@transcend-io/type-utils";import{existsSync as s,readFileSync as c}from"node:fs";import l from"colors";import*as u from"io-ts";function d({file:d,output:f}){r(this.process.exit),s(d)||(e.error(l.red(`File does not exist: --file="${d}"`)),this.process.exit(1));let p=o(u.array(t),c(d,`utf-8`)),m=[],h=[];p.forEach(e=>{e.dataFlows.filter(({type:e})=>e!==a.CSP).forEach(e=>{m.push({value:e.value,type:e.type,status:i.Live,trackingPurposes:e.trackingPurposes})}),e.cookies.forEach(e=>{h.push({name:e.name,status:i.Live,trackingPurposes:e.trackingPurposes})})}),n(f,{"data-flows":m,cookies:h}),e.info(l.green(`Successfully wrote ${m.length} data flows and ${h.length} cookies to file "${f}"`))}export{d as consentManagerServiceJsonToYml};
2
+ //# sourceMappingURL=impl-D-cp0CYr.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-D-cp0CYr.mjs","names":[],"sources":["../src/commands/inventory/consent-manager-service-json-to-yml/impl.ts"],"sourcesContent":["import { existsSync, readFileSync } from 'node:fs';\n\nimport { ConsentTrackerStatus, DataFlowScope } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { ConsentManagerServiceMetadata, CookieInput, DataFlowInput } from '../../../codecs.js';\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml.js';\nimport { logger } from '../../../logger.js';\n\nexport interface ConsentManagerServiceJsonToYmlCommandFlags {\n file: string;\n output: string;\n}\n\nexport function consentManagerServiceJsonToYml(\n this: LocalContext,\n { file, output }: ConsentManagerServiceJsonToYmlCommandFlags,\n): void {\n doneInputValidation(this.process.exit);\n\n // Ensure files exist\n if (!existsSync(file)) {\n logger.error(colors.red(`File does not exist: --file=\"${file}\"`));\n this.process.exit(1);\n }\n\n // Read in each consent manager configuration\n const services = decodeCodec(t.array(ConsentManagerServiceMetadata), readFileSync(file, 'utf-8'));\n\n // Create data flows and cookie configurations\n const dataFlows: DataFlowInput[] = [];\n const cookies: CookieInput[] = [];\n services.forEach((service) => {\n service.dataFlows\n .filter(({ type }) => type !== DataFlowScope.CSP)\n .forEach((dataFlow) => {\n dataFlows.push({\n value: dataFlow.value,\n type: dataFlow.type,\n status: ConsentTrackerStatus.Live,\n trackingPurposes: dataFlow.trackingPurposes,\n });\n });\n\n service.cookies.forEach((cookie) => {\n cookies.push({\n name: cookie.name,\n status: ConsentTrackerStatus.Live,\n trackingPurposes: cookie.trackingPurposes,\n });\n });\n });\n\n // write to disk\n writeTranscendYaml(output, {\n 'data-flows': dataFlows,\n cookies,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${dataFlows.length} data flows and ${cookies.length} cookies to file \"${output}\"`,\n ),\n );\n}\n"],"mappings":"obAkBA,SAAgB,EAEd,CAAE,OAAM,UACF,CACN,EAAoB,KAAK,QAAQ,KAAK,CAGjC,EAAW,EAAK,GACnB,EAAO,MAAM,EAAO,IAAI,gCAAgC,EAAK,GAAG,CAAC,CACjE,KAAK,QAAQ,KAAK,EAAE,EAItB,IAAM,EAAW,EAAY,EAAE,MAAM,EAA8B,CAAE,EAAa,EAAM,QAAQ,CAAC,CAG3F,EAA6B,EAAE,CAC/B,EAAyB,EAAE,CACjC,EAAS,QAAS,GAAY,CAC5B,EAAQ,UACL,QAAQ,CAAE,UAAW,IAAS,EAAc,IAAI,CAChD,QAAS,GAAa,CACrB,EAAU,KAAK,CACb,MAAO,EAAS,MAChB,KAAM,EAAS,KACf,OAAQ,EAAqB,KAC7B,iBAAkB,EAAS,iBAC5B,CAAC,EACF,CAEJ,EAAQ,QAAQ,QAAS,GAAW,CAClC,EAAQ,KAAK,CACX,KAAM,EAAO,KACb,OAAQ,EAAqB,KAC7B,iBAAkB,EAAO,iBAC1B,CAAC,EACF,EACF,CAGF,EAAmB,EAAQ,CACzB,aAAc,EACd,UACD,CAAC,CAEF,EAAO,KACL,EAAO,MACL,sBAAsB,EAAU,OAAO,kBAAkB,EAAQ,OAAO,oBAAoB,EAAO,GACpG,CACF"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./bulkRetryEnrichers-B-Szmin-.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,enricherId:r,actions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,concurrency:d,transcendUrl:f}){t(this.process.exit),await e({auth:n,enricherId:r,requestActions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,updatedAtBefore:l?new Date(l):void 0,updatedAtAfter:u?new Date(u):void 0,concurrency:d,transcendUrl:f})}export{n as enricherRestart};
2
+ //# sourceMappingURL=impl-D9NjIwEi2.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-D9NjIwEi2.mjs","names":[],"sources":["../src/commands/request/enricher-restart/impl.ts"],"sourcesContent":["import type { RequestAction, RequestEnricherStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { bulkRetryEnrichers } from '../../../lib/requests/index.js';\n\nexport interface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n updatedAtBefore: updatedAtBefore ? new Date(updatedAtBefore) : undefined,\n updatedAtAfter: updatedAtAfter ? new Date(updatedAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"0GAoBA,eAAsB,EAEpB,CACE,OACA,aACA,UACA,0BACA,aACA,kBACA,iBACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAmB,CACvB,OACA,aACA,eAAgB,EAChB,0BACA,aACA,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,cACA,eACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./readCsv-CyOL7eCc.mjs";import{t as n}from"./markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs";import{t as r}from"./done-input-validation-DLR0-MJ7.mjs";import i from"colors";import*as a from"io-ts";const o=a.type({"Request Id":a.string});async function s({auth:a,dataSiloId:s,file:c,transcendUrl:l}){r(this.process.exit),e.info(i.magenta(`Reading "${c}" from disk`)),await n({requestIds:t(c,o).map(e=>e[`Request Id`]),transcendUrl:l,auth:a,dataSiloId:s})}export{s as markRequestDataSilosCompleted};
2
+ //# sourceMappingURL=impl-DEWXA_QC.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DEWXA_QC.mjs","names":[],"sources":["../src/commands/request/system/mark-request-data-silos-completed/impl.ts"],"sourcesContent":["import colors from 'colors';\nimport * as t from 'io-ts';\n\nimport type { LocalContext } from '../../../../context.js';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation.js';\nimport { markRequestDataSiloIdsCompleted } from '../../../../lib/cron/index.js';\nimport { readCsv } from '../../../../lib/requests/index.js';\nimport { logger } from '../../../../logger.js';\n\nconst RequestIdRow = t.type({\n 'Request Id': t.string,\n});\n\nexport interface MarkRequestDataSilosCompletedCommandFlags {\n auth: string;\n dataSiloId: string;\n file: string;\n transcendUrl: string;\n}\n\nexport async function markRequestDataSilosCompleted(\n this: LocalContext,\n { auth, dataSiloId, file, transcendUrl }: MarkRequestDataSilosCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, RequestIdRow);\n\n await markRequestDataSiloIdsCompleted({\n requestIds: activeResults.map((request) => request['Request Id']),\n transcendUrl,\n auth,\n dataSiloId,\n });\n}\n"],"mappings":"0PASA,MAAM,EAAe,EAAE,KAAK,CAC1B,aAAc,EAAE,OACjB,CAAC,CASF,eAAsB,EAEpB,CAAE,OAAM,aAAY,OAAM,gBACX,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAG1D,MAAM,EAAgC,CACpC,WAHoB,EAAQ,EAAM,EAAa,CAGrB,IAAK,GAAY,EAAQ,cAAc,CACjE,eACA,OACA,aACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,transcendUrl:r,createdAtBefore:i,createdAtAfter:a,updatedAtBefore:o,updatedAtAfter:s,actions:c,daysLeft:l,days:u,requestIds:d,emailTemplate:f,concurrency:p}){t(this.process.exit),await e({transcendUrl:r,requestActions:c,auth:n,emailTemplate:f,days:u,daysLeft:l,requestIds:d,concurrency:p,createdAtBefore:i,createdAtAfter:a,updatedAtBefore:o,updatedAtAfter:s})}export{n as notifyAdditionalTime};
2
+ //# sourceMappingURL=impl-DGiPB5Vq2.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DGiPB5Vq2.mjs","names":[],"sources":["../src/commands/request/notify-additional-time/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { notifyPrivacyRequestsAdditionalTime } from '../../../lib/requests/index.js';\n\nexport interface NotifyAdditionalTimeCommandFlags {\n auth: string;\n createdAtBefore: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n actions?: RequestAction[];\n daysLeft: number;\n days: number;\n requestIds?: string[];\n emailTemplate: string;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function notifyAdditionalTime(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n actions,\n daysLeft,\n days,\n requestIds,\n emailTemplate,\n concurrency,\n }: NotifyAdditionalTimeCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await notifyPrivacyRequestsAdditionalTime({\n transcendUrl,\n requestActions: actions,\n auth,\n emailTemplate,\n days,\n daysLeft,\n requestIds,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"2HAqBA,eAAsB,EAEpB,CACE,OACA,eACA,kBACA,iBACA,kBACA,iBACA,UACA,WACA,OACA,aACA,gBACA,eAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAoC,CACxC,eACA,eAAgB,EAChB,OACA,gBACA,OACA,WACA,aACA,cACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{n as t}from"./buildXdiSyncEndpoint-Cb-pvpak.mjs";import{t as n}from"./validateTranscendAuth-1W1IylqE.mjs";import{t as r}from"./done-input-validation-DLR0-MJ7.mjs";import{writeFileSync as i}from"node:fs";import a from"colors";async function o({auth:o,xdiLocation:s,file:c,removeIpAddresses:l,domainBlockList:u,xdiAllowedCommands:d,transcendUrl:f}){r(this.process.exit);let{syncGroups:p,html:m}=await t(await n(o),{xdiLocation:s,transcendUrl:f,removeIpAddresses:l,domainBlockList:u.length>0?u:void 0,xdiAllowedCommands:d});e.info(a.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),i(c,m),e.info(a.green(`Wrote configuration to file "${c}"!`))}export{o as buildXdiSyncEndpoint};
2
+ //# sourceMappingURL=impl-DGuwD_qz.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DGuwD_qz.mjs","names":["buildXdiSyncEndpointHelper"],"sources":["../src/commands/consent/build-xdi-sync-endpoint/impl.ts"],"sourcesContent":["import { writeFileSync } from 'node:fs';\n\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { validateTranscendAuth } from '../../../lib/api-keys/index.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { buildXdiSyncEndpoint as buildXdiSyncEndpointHelper } from '../../../lib/consent-manager/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface BuildXdiSyncEndpointCommandFlags {\n auth: string;\n xdiLocation: string;\n file: string;\n removeIpAddresses: boolean;\n domainBlockList: string[];\n xdiAllowedCommands: string;\n transcendUrl: string;\n}\n\nexport async function buildXdiSyncEndpoint(\n this: LocalContext,\n {\n auth,\n xdiLocation,\n file,\n removeIpAddresses,\n domainBlockList,\n xdiAllowedCommands,\n transcendUrl,\n }: BuildXdiSyncEndpointCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Build the sync endpoint\n const { syncGroups, html } = await buildXdiSyncEndpointHelper(apiKeyOrList, {\n xdiLocation,\n transcendUrl,\n removeIpAddresses,\n domainBlockList: domainBlockList.length > 0 ? domainBlockList : undefined,\n xdiAllowedCommands,\n });\n\n // Log success\n logger.info(\n colors.green(\n `Successfully constructed sync endpoint for sync groups: ${JSON.stringify(\n syncGroups,\n null,\n 2,\n )}`,\n ),\n );\n\n // Write to disk\n writeFileSync(file, html);\n logger.info(colors.green(`Wrote configuration to file \"${file}\"!`));\n}\n"],"mappings":"kRAoBA,eAAsB,EAEpB,CACE,OACA,cACA,OACA,oBACA,kBACA,qBACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAMtC,GAAM,CAAE,aAAY,QAAS,MAAMA,EAHd,MAAM,EAAsB,EAAK,CAGsB,CAC1E,cACA,eACA,oBACA,gBAAiB,EAAgB,OAAS,EAAI,EAAkB,IAAA,GAChE,qBACD,CAAC,CAGF,EAAO,KACL,EAAO,MACL,2DAA2D,KAAK,UAC9D,EACA,KACA,EACD,GACF,CACF,CAGD,EAAc,EAAM,EAAK,CACzB,EAAO,KAAK,EAAO,MAAM,gCAAgC,EAAK,IAAI,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./cancelPrivacyRequests-DNiL13E_.mjs";import{t}from"./done-input-validation-DLR0-MJ7.mjs";async function n({auth:n,actions:r,statuses:i=[],requestIds:a,silentModeBefore:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,cancellationTitle:d,transcendUrl:f,concurrency:p}){t(this.process.exit),await e({transcendUrl:f,requestActions:r,auth:n,cancellationTitle:d,requestIds:a,statuses:i,concurrency:p,silentModeBefore:o?new Date(o):void 0,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,updatedAtBefore:l?new Date(l):void 0,updatedAtAfter:u?new Date(u):void 0})}export{n as cancel};
2
+ //# sourceMappingURL=impl-DGzvE8aJ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DGzvE8aJ.mjs","names":[],"sources":["../src/commands/request/cancel/impl.ts"],"sourcesContent":["import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { cancelPrivacyRequests } from '../../../lib/requests/index.js';\n\nexport interface CancelCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses?: RequestStatus[];\n requestIds?: string[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n cancellationTitle: string;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function cancel(\n this: LocalContext,\n {\n auth,\n actions,\n statuses = [],\n requestIds,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n cancellationTitle,\n transcendUrl,\n concurrency,\n }: CancelCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await cancelPrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n cancellationTitle,\n requestIds,\n statuses,\n concurrency,\n silentModeBefore: silentModeBefore ? new Date(silentModeBefore) : undefined,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n updatedAtBefore: updatedAtBefore ? new Date(updatedAtBefore) : undefined,\n updatedAtAfter: updatedAtAfter ? new Date(updatedAtAfter) : undefined,\n });\n}\n"],"mappings":"6GAqBA,eAAsB,EAEpB,CACE,OACA,UACA,WAAW,EAAE,CACb,aACA,mBACA,kBACA,iBACA,kBACA,iBACA,oBACA,eACA,eAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAM,EAAsB,CAC1B,eACA,eAAgB,EAChB,OACA,oBACA,aACA,WACA,cACA,iBAAkB,EAAmB,IAAI,KAAK,EAAiB,CAAG,IAAA,GAClE,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC7D,CAAC"}
@@ -0,0 +1,7 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{n as t,t as n}from"./bluebird-CUitXgsY.mjs";import{t as r}from"./createSombraGotInstance-D1Il9zUE.mjs";import{t as i}from"./fetchAllPurposes-ZdkO2fMp.mjs";import{t as a}from"./fetchAllPreferenceTopics-ForE9GpZ.mjs";import{n as o}from"./parseAttributesFromString-CZStzJc0.mjs";import{t as s}from"./splitCsvToList-BRq_CIfd.mjs";import{t as c}from"./readCsv-CyOL7eCc.mjs";import{r as l}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{n as u}from"./inquirer-BgNcicZ4.mjs";import{t as d}from"./done-input-validation-DLR0-MJ7.mjs";import{r as f}from"./codecs-Dx_vGxsl.mjs";import{t as p}from"./types-B4CVJCpj.mjs";import{t as m}from"./withPreferenceRetry-xLMZyTq9.mjs";import{PreferenceTopicType as h}from"@transcend-io/privacy-types";import{chunk as g,difference as _,groupBy as v,keyBy as y,uniq as b}from"lodash-es";import{apply as x,decodeCodec as S}from"@transcend-io/type-utils";import{readdirSync as C}from"node:fs";import{basename as w,join as T}from"node:path";import E from"colors";import*as D from"io-ts";import O from"inquirer";import k from"cli-progress";import{PersistedState as A}from"@transcend-io/persisted-state";function j({row:e,columnToPurposeName:t,purposeSlugs:n,preferenceTopics:r}){let i={};return Object.entries(t).forEach(([t,{purpose:a,preference:o,valueMapping:c}])=>{if(!n.includes(a))throw Error(`Invalid purpose slug: ${a}, expected: ${n.join(`, `)}`);let l=e[t];if(o){let e=r.find(e=>e.slug===o&&e.purpose.trackingType===a);if(!e){let e=r.filter(e=>e.purpose.trackingType===a).map(e=>e.slug);throw Error(`Invalid preference slug: ${o} for purpose: ${a}. Allowed preference slugs for purpose are: ${e.join(`,`)}`)}switch(i[a]||(i[a]={preferences:[]}),i[a].preferences||(i[a].preferences=[]),e.type){case h.Boolean:{let e=c[l];if(e===void 0&&l!==``)throw Error(`No preference mapping found for value "${l}" in column "${t}" (purpose=${a}, preference=${o})`);if(e==null)return;if(typeof e!=`boolean`)throw Error(`Invalid value for boolean preference: ${o}, expected boolean, got: ${l}`);i[a].preferences.push({topic:o,choice:{booleanValue:e}});break}case h.Select:{let n=c[l];if(n===void 0&&l!==``)throw Error(`No preference mapping found for value "${l}" in column "${t}" (purpose=${a}, preference=${o})`);if(n==null)return;if(typeof n!=`string`)throw Error(`Invalid value for select preference: ${o}, expected string, got: ${l}`);let r=n.trim()||null;if(r&&!e.preferenceOptionValues.map(({slug:e})=>e).includes(r))throw Error(`Invalid value for select preference: ${o}, expected one of: ${e.preferenceOptionValues.map(({slug:e})=>e).join(`, `)}, got: ${l}`);i[a].preferences.push({topic:o,choice:{selectValue:r}});break}case h.MultiSelect:{if(typeof l!=`string`)throw Error(`Invalid value for multi select preference: ${o}, expected string, got: ${l}`);let n=s(l).map(n=>{let r=c[n];if(r===void 0&&l!==``)throw Error(`No preference mapping found for multi select token "${l}" in column "${t}" (purpose=${a}, preference=${o})`);if(r==null)return null;if(typeof r!=`string`)throw Error(`Invalid value for multi select preference: ${o}, expected one of: ${e.preferenceOptionValues.map(({slug:e})=>e).join(`, `)}, got: ${n}`);return r}).filter(e=>e!==null).sort((e,t)=>e.localeCompare(t));n.length>0&&i[a].preferences.push({topic:o,choice:{selectValues:n}});break}default:throw Error(`Unknown preference type: ${e.type}`)}}else{let n=c[l];if(n===void 0&&l!==``)throw Error(`No preference mapping found for value "${l}" in column "${t}" (purpose=${a}, preference=∅) ${JSON.stringify(e)}`);if(n===null)return;i[a]?i[a].enabled=n===!0:i[a]={enabled:n===!0}}}),x(i,(e,t)=>{if(typeof e.enabled!=`boolean`)throw Error(`No mapping provided for purpose.enabled=true/false value: ${t}`);return{...e,enabled:e.enabled}})}function M({currentConsentRecord:e,pendingUpdates:t,preferenceTopics:n}){return Object.entries(t).every(([t,{preferences:r=[],enabled:i}])=>{let a=e.purposes.find(e=>e.purpose===t);return a&&a.enabled===i?r.every(({topic:e,choice:r})=>a.preferences&&a.preferences.find(i=>{if(i.topic!==e)return!1;let a=n.find(n=>n.slug===e&&n.purpose.trackingType===t);if(!a)throw Error(`Could not find preference topic for ${e}`);switch(a.type){case h.Boolean:return i.choice.booleanValue===r.booleanValue;case h.Select:return i.choice.selectValue===r.selectValue;case h.MultiSelect:let e=(i.choice.selectValues||[]).sort(),t=(r.selectValues||[]).sort();return e.length===t.length&&e.every((e,n)=>e===t[n]);default:throw Error(`Unknown preference topic type: ${a.type}`)}})):!1})}function N({currentConsentRecord:t,pendingUpdates:n,preferenceTopics:r,log:i}){return!!Object.entries(n).find(([n,{preferences:a=[],enabled:o}])=>{let s=t.purposes.find(e=>e.purpose===n);return s?s.enabled===o?!!a.find(({topic:a,choice:o})=>{let c=(s.preferences||[]).find(e=>e.topic===a);if(!c)return i&&e.warn(`No existing preference found for topic ${a} in purpose ${n} for user ${t.userId}.`),!1;let l=r.find(e=>e.slug===a&&e.purpose.trackingType===n);if(!l)throw Error(`Could not find preference topic for ${a}`);let u,d;switch(l.type){case h.Boolean:return u=c.choice.booleanValue!==o.booleanValue,i&&e.warn(`Preference topic ${a} boolean value conflict for user ${t.userId}. Expected: ${o.booleanValue}, Found: ${c.choice.booleanValue}`),u;case h.Select:return d=c.choice.selectValue!==o.selectValue,i&&e.warn(`Preference topic ${a} select value conflict for user ${t.userId}. Expected: ${o.selectValue}, Found: ${c.choice.selectValue}`),d;case h.MultiSelect:let n=(c.choice.selectValues||[]).sort(),r=(o.selectValues||[]).sort();return d=n.length!==r.length||!n.every((e,t)=>e===r[t]),i&&e.warn(`Preference topic ${a} multi-select value conflict for user ${t.userId}. Expected: ${r.join(`, `)}, Found: ${n.join(`, `)}`),d;default:throw Error(`Unknown preference topic type: ${l.type}`)}}):(i&&e.warn(`Purpose ${n} enabled value conflict for user ${t.userId}. Pending Value: ${o}, Current Value: ${s.enabled}`),!0):(i&&e.warn(`No existing purpose found for ${n} in consent record for ${t.userId}.`),!1)})}async function P(t,{identifiers:r,partitionKey:i,skipLogging:a=!1,concurrency:o=40}){let s=[],c=g(r,100),l=new Date().getTime(),u=new k.SingleBar({},k.Presets.shades_classic);a||u.start(r.length,0);let d=0;await n(c,async n=>{let r=S(p,await m(`Preference Query`,()=>t.post(`v1/preferences/${i}/query`,{json:{filter:{identifiers:n},limit:n.length}}).json(),{onRetry:(t,r,a)=>{e.warn(E.yellow(`[RETRY] group size=${n.length} partition=${i} attempt=${t}: ${a}`))}}));s.push(...r.nodes),d+=n.length,u.update(d)},{concurrency:o}),u.stop();let f=new Date().getTime()-l;return a||e.info(E.green(`Completed download in "${f/1e3}" seconds.`)),s}async function F(n,r,{purposeSlugs:i,preferenceTopics:a,forceTriggerWorkflows:o}){let c=_(b(n.map(e=>Object.keys(e)).flat()),[...r.identifierColumn?[r.identifierColumn]:[],...r.timestampColum?[r.timestampColum]:[]]);if(c.length===0){if(o)return r;throw Error(`No other columns to process`)}let l=[...i,...a.map(e=>`${e.purpose.trackingType}->${e.slug}`)];return await t(c,async o=>{let c=b(n.map(e=>e[o])),u=r.columnToPurposeName[o];if(u)e.info(E.magenta(`Column "${o}" is associated with purpose "${u.purpose}"`));else{let{purposeName:e}=await O.prompt([{name:`purposeName`,message:`Choose the purpose that column ${o} is associated with`,type:`list`,default:l.find(e=>e.startsWith(i[0])),choices:l}]),[t,n]=e.split(`->`);u={purpose:t,preference:n||null,valueMapping:{}}}await t(c,async n=>{if(u.valueMapping[n]!==void 0){e.info(E.magenta(`Value "${n}" is associated with purpose value "${u.valueMapping[n]}"`));return}if(u.preference===null){let{purposeValue:e}=await O.prompt([{name:`purposeValue`,message:`Choose the purpose value for value "${n}" associated with purpose "${u.purpose}"`,type:`confirm`,default:n!==`false`}]);u.valueMapping[n]=e}if(u.preference!==null){let r=a.find(e=>e.slug===u.preference);if(!r){e.error(E.red(`Preference topic "${u.preference}" not found`));return}let i=r.preferenceOptionValues.map(({slug:e})=>e);if(r.type===h.Boolean){let{preferenceValue:e}=await O.prompt([{name:`preferenceValue`,message:`Choose the preference value for "${r.slug}" value "${n}" associated with purpose "${u.purpose}"`,type:`confirm`,default:n!==`false`}]);u.valueMapping[n]=e;return}if(r.type===h.Select){let{preferenceValue:e}=await O.prompt([{name:`preferenceValue`,message:`Choose the preference value for "${r.slug}" value "${n}" associated with purpose "${u.purpose}"`,type:`list`,choices:i,default:i.find(e=>e===n)}]);u.valueMapping[n]=e;return}if(r.type===h.MultiSelect){await t(s(n),async e=>{if(u.valueMapping[e]!==void 0)return;let{preferenceValue:t}=await O.prompt([{name:`preferenceValue`,message:`Choose the preference value for "${r.slug}" value "${e}" associated with purpose "${u.purpose}"`,type:`list`,choices:i,default:i.find(t=>t===e)}]);u.valueMapping[e]=t});return}throw Error(`Unknown preference topic type: ${r.type}`)}}),r.columnToPurposeName[o]=u}),r}async function I(t,n){let r=_(b(t.map(e=>Object.keys(e)).flat()),[...n.identifierColumn?[n.identifierColumn]:[],...Object.keys(n.columnToPurposeName)]);if(!n.identifierColumn){let{identifierName:e}=await O.prompt([{name:`identifierName`,message:`Choose the column that will be used as the identifier to upload consent preferences by`,type:`list`,default:r.find(e=>e.toLowerCase().includes(`email`))||r[0],choices:r}]);n.identifierColumn=e}e.info(E.magenta(`Using identifier column "${n.identifierColumn}"`));let i=t.map((e,t)=>e[n.identifierColumn]?null:[t]).filter(e=>!!e).flat();if(i.length>0){let r=`The identifier column "${n.identifierColumn}" is missing a value for the following rows: ${i.join(`, `)}`;if(e.warn(E.yellow(r)),!await u({message:`Would you like to skip rows missing an identifier?`}))throw Error(r);let a=t.length;t=t.filter(e=>e[n.identifierColumn]),e.info(E.yellow(`Skipped ${a-t.length} rows missing an identifier`))}e.info(E.magenta(`The identifier column "${n.identifierColumn}" is present for all rows`));let a=v(t,n.identifierColumn),o=Object.entries(a).filter(([,e])=>e.length>1);if(o.length>0){let r=`The identifier column "${n.identifierColumn}" has duplicate values for the following rows: ${o.slice(0,10).map(([e,t])=>`${e} (${t.length})`).join(`
2
+ `)}`;if(e.warn(E.yellow(r)),!await u({message:`Would you like to automatically take the latest update?`}))throw Error(r);t=Object.entries(a).map(([,e])=>e.sort((e,t)=>new Date(t[n.timestampColum]).getTime()-new Date(e[n.timestampColum]).getTime())[0]).filter(e=>e)}return{currentState:n,preferences:t}}async function L(t,n){let r=_(b(t.map(e=>Object.keys(e)).flat()),[...n.identifierColumn?[n.identifierColumn]:[],...Object.keys(n.columnToPurposeName)]);if(!n.timestampColum){let{timestampName:e}=await O.prompt([{name:`timestampName`,message:`Choose the column that will be used as the timestamp of last preference update`,type:`list`,default:r.find(e=>e.toLowerCase().includes(`date`))||r.find(e=>e.toLowerCase().includes(`time`))||r[0],choices:[...r,`[NONE]`]}]);n.timestampColum=e}if(e.info(E.magenta(`Using timestamp column "${n.timestampColum}"`)),n.timestampColum!==`[NONE]`){let r=t.map((e,t)=>e[n.timestampColum]?null:[t]).filter(e=>!!e).flat();if(r.length>0)throw Error(`The timestamp column "${n.timestampColum}" is missing a value for the following rows: ${r.join(`
3
+ `)}`);e.info(E.magenta(`The timestamp column "${n.timestampColum}" is present for all row`))}return n}async function R({file:t,sombra:n,purposeSlugs:r,preferenceTopics:i,partitionKey:a,skipExistingRecordCheck:o,forceTriggerWorkflows:s},l){let u=new Date().getTime(),d=l.getValue(`fileMetadata`);e.info(E.magenta(`Reading in file: "${t}"`));let f=c(t,D.record(D.string,D.string)),p={columnToPurposeName:{},pendingSafeUpdates:{},pendingConflictUpdates:{},skippedUpdates:{},...d[t]||{},lastFetchedAt:new Date().toISOString()};p=await L(f,p),d[t]=p,await l.setValue(d,`fileMetadata`);let m=await I(f,p);p=m.currentState,f=m.preferences,d[t]=p,await l.setValue(d,`fileMetadata`),p=await F(f,p,{preferenceTopics:i,purposeSlugs:r,forceTriggerWorkflows:s}),d[t]=p,await l.setValue(d,`fileMetadata`);let h=f.map(e=>e[p.identifierColumn]),g=y(o?[]:await P(n,{identifiers:h.map(e=>({value:e})),partitionKey:a}),`userId`);p.pendingConflictUpdates={},p.pendingSafeUpdates={},p.skippedUpdates={},f.forEach(e=>{let t=e[p.identifierColumn],n=j({row:e,columnToPurposeName:p.columnToPurposeName,preferenceTopics:i,purposeSlugs:r}),a=g[t];if(s&&!a)throw Error(`No existing consent record found for user with id: ${t}.
4
+ When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(a&&M({currentConsentRecord:a,pendingUpdates:n,preferenceTopics:i})&&!s){p.skippedUpdates[t]=e;return}if(a&&N({currentConsentRecord:a,pendingUpdates:n,preferenceTopics:i})){p.pendingConflictUpdates[t]={row:e,record:a};return}p.pendingSafeUpdates[t]=e}),d[t]=p,await l.setValue(d,`fileMetadata`);let _=new Date().getTime();e.info(E.green(`Successfully pre-processed file: "${t}" in ${(_-u)/1e3}s`))}async function z({auth:t,sombraAuth:s,receiptFilepath:c,file:u,partition:d,isSilent:p=!0,dryRun:m=!1,skipWorkflowTriggers:h=!1,skipConflictUpdates:_=!1,skipExistingRecordCheck:v=!1,attributes:y=[],transcendUrl:b,forceTriggerWorkflows:S=!1}){let C=o(y),w=new A(c,f,{fileMetadata:{},failingUpdates:{},pendingUpdates:{}}),T=w.getValue(`failingUpdates`),D=w.getValue(`pendingUpdates`),O=w.getValue(`fileMetadata`);e.info(E.magenta(`Restored cache, there are:
5
+ ${Object.values(T).length} failing requests to be retried\n${Object.values(D).length} pending requests to be processed\nThe following files are stored in cache and will be used:\n${Object.keys(O).map(e=>e).join(`
6
+ `)}\nThe following file will be processed: ${u}\n`));let M=l(b,t),[N,P,F]=await Promise.all([r(b,t,s),i(M),a(M)]);await R({file:u,purposeSlugs:P.map(e=>e.trackingType),preferenceTopics:F,sombra:N,partitionKey:d,skipExistingRecordCheck:v,forceTriggerWorkflows:S},w);let I={};O=w.getValue(`fileMetadata`);let L=O[u];if(e.info(E.magenta(`Found ${Object.entries(L.pendingSafeUpdates).length} safe updates in ${u}`)),e.info(E.magenta(`Found ${Object.entries(L.pendingConflictUpdates).length} conflict updates in ${u}`)),e.info(E.magenta(`Found ${Object.entries(L.skippedUpdates).length} skipped updates in ${u}`)),Object.entries({...L.pendingSafeUpdates,..._?{}:x(L.pendingConflictUpdates,({row:e})=>e)}).forEach(([e,t])=>{let n=L.timestampColum===`[NONE]`?new Date:new Date(t[L.timestampColum]),r=j({row:t,columnToPurposeName:L.columnToPurposeName,preferenceTopics:F,purposeSlugs:P.map(e=>e.trackingType)});I[e]={userId:e,partition:d,timestamp:n.toISOString(),purposes:Object.entries(r).map(([e,t])=>({...t,purpose:e,workflowSettings:{attributes:C,isSilent:p,skipWorkflowTrigger:h,...S?{forceTriggerWorkflow:S}:{}}}))}}),await w.setValue(I,`pendingUpdates`),await w.setValue({},`failingUpdates`),m){e.info(E.green(`Dry run complete, exiting. ${Object.values(I).length} pending updates. Check file: ${c}`));return}e.info(E.magenta(`Uploading ${Object.values(I).length} preferences to partition: ${d}`));let z=new Date().getTime(),B=new k.SingleBar({},k.Presets.shades_classic),V=0,H=Object.entries(I),U=g(H,h?100:10);B.start(H.length,0),await n(U,async t=>{try{await N.put(`v1/preferences`,{json:{records:t.map(([,e])=>e),skipWorkflowTriggers:h}}).json()}catch(n){try{let t=JSON.parse(n?.response?.body||`{}`);t.error&&e.error(E.red(`Error: ${t.error}`))}catch{}e.error(E.red(`Failed to upload ${t.length} user preferences to partition ${d}: ${n?.response?.body||n?.message}`));let r=w.getValue(`failingUpdates`);t.forEach(([e,t])=>{r[e]={uploadedAt:new Date().toISOString(),update:t,error:n?.response?.body||n?.message||`Unknown error`}}),await w.setValue(r,`failingUpdates`)}V+=t.length,B.update(V)},{concurrency:40}),B.stop();let W=new Date().getTime()-z;e.info(E.green(`Successfully uploaded ${H.length} user preferences to partition ${d} in "${W/1e3}" seconds!`))}async function B({auth:t,partition:r,sombraAuth:i,transcendUrl:a,file:o=``,directory:c,dryRun:l,skipExistingRecordCheck:u,receiptFileDir:f,skipWorkflowTriggers:p,forceTriggerWorkflows:m,skipConflictUpdates:h,isSilent:g,attributes:_,concurrency:v}){c&&o&&(e.error(E.red(`Cannot provide both a directory and a file. Please provide only one.`)),this.process.exit(1)),!o&&!c&&(e.error(E.red(`A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences`)),this.process.exit(1)),d(this.process.exit);let y=[];if(c)try{let t=C(c).filter(e=>e.endsWith(`.csv`));t.length===0&&(e.error(E.red(`No CSV files found in directory: ${c}`)),this.process.exit(1)),y.push(...t.map(e=>T(c,e)))}catch(t){e.error(E.red(`Failed to read directory: ${c}`)),e.error(E.red(t.message)),this.process.exit(1)}else try{o.endsWith(`.csv`)||(e.error(E.red(`File must be a CSV file`)),this.process.exit(1)),y.push(o)}catch(t){e.error(E.red(`Failed to access file: ${o}`)),e.error(E.red(t.message)),this.process.exit(1)}e.info(E.green(`Processing ${y.length} consent preferences files for partition: ${r}`)),e.debug(`Files to process: ${y.join(`, `)}`),u&&e.info(E.bgYellow(`Skipping existing record check: ${u}`)),await n(y,async e=>{await z({receiptFilepath:T(f,`${w(e).replace(`.csv`,``)}-receipts.json`),auth:t,sombraAuth:i,file:e,partition:r,transcendUrl:a,skipConflictUpdates:h,skipWorkflowTriggers:p,skipExistingRecordCheck:u,isSilent:g,dryRun:l,attributes:s(_),forceTriggerWorkflows:m})},{concurrency:v})}export{B as uploadPreferences};
7
+ //# sourceMappingURL=impl-DTp9OQIZ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DTp9OQIZ.mjs","names":[],"sources":["../src/lib/preference-management/getPreferenceUpdatesFromRow.ts","../src/lib/preference-management/checkIfPendingPreferenceUpdatesAreNoOp.ts","../src/lib/preference-management/checkIfPendingPreferenceUpdatesCauseConflict.ts","../src/lib/preference-management/getPreferencesForIdentifiers.ts","../src/lib/preference-management/parsePreferenceAndPurposeValuesFromCsv.ts","../src/lib/preference-management/parsePreferenceIdentifiersFromCsv.ts","../src/lib/preference-management/parsePreferenceTimestampsFromCsv.ts","../src/lib/preference-management/parsePreferenceManagementCsv.ts","../src/lib/preference-management/uploadPreferenceManagementPreferencesInteractive.ts","../src/commands/consent/upload-preferences/impl.ts"],"sourcesContent":["import { PreferenceStorePurposeResponse, PreferenceTopicType } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\n\nimport { PreferenceTopic } from '../graphql/index.js';\nimport { splitCsvToList } from '../requests/index.js';\nimport { PurposeRowMapping } from './codecs.js';\n\n/**\n * Parse an arbitrary object to the Transcend PUT /v1/preference update shape\n * by using a mapping of column names to purpose/preference slugs.\n *\n * `columnToPurposeName` looks like:\n * {\n * 'my_purpose': { purpose: 'Marketing', preference: null, valueMapping: { 'true': true, 'false': false } },\n * 'has_topic_1': { purpose: 'Marketing', preference: 'BooleanPreference1', valueMapping: { 'true': true, 'false': false } },\n * 'has_topic_2': { purpose: 'Marketing', preference: 'SingleSelectPreference', valueMapping: { 'Option 1': 'Value1', 'Option 2': 'Value2' } }\n * }\n *\n * `row` looks like:\n * {\n * 'my_purpose': 'true',\n * 'has_topic_1': 'true',\n * 'has_topic_2': 'Option 1'\n * }\n *\n * OMISSION RULE:\n * - If `valueMapping[row[columnName]]`\n * returns `undefined` or `null`, we **omit** that column entirely (do not set purpose enabled, do not push a preference).\n * - For MultiSelect, **each token** is treated independently: tokens that map to `undefined|null` are skipped;\n * if all tokens are skipped, nothing is pushed.\n * - We still validate **types** for mapped values (e.g., boolean must map to boolean, select must map to string, etc.).\n *\n * NOTE:\n * - Final shape must have `enabled` for every purpose touched (enforced by `apply` below). If you omit all top-level purpose mappings,\n * but emit preferences, this will throw at the end. This preserves the existing “enabled required” contract.\n *\n * @param options - Options\n * @returns The parsed row\n */\nexport function getPreferenceUpdatesFromRow({\n row,\n columnToPurposeName,\n purposeSlugs,\n preferenceTopics,\n}: {\n /** Row to parse */\n row: Record<string, string>;\n /** Mapping from column name to parser config */\n columnToPurposeName: Record<string, PurposeRowMapping>;\n /** The set of allowed purpose slugs */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n}): {\n [k in string]: Omit<PreferenceStorePurposeResponse, 'purpose'>;\n} {\n // Create a result object to store the parsed preferences\n const result: {\n [k in string]: Partial<PreferenceStorePurposeResponse>;\n } = {};\n\n // Iterate over each column and map to the purpose or preference\n Object.entries(columnToPurposeName).forEach(\n ([columnName, { purpose, preference, valueMapping }]) => {\n // Ensure the purpose is valid\n if (!purposeSlugs.includes(purpose)) {\n throw new Error(`Invalid purpose slug: ${purpose}, expected: ${purposeSlugs.join(', ')}`);\n }\n\n // The raw value from the CSV row for this column\n const rawValue = row[columnName];\n\n // Check if parsing a preference or just the top level purpose\n if (preference) {\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === preference && x.purpose.trackingType === purpose,\n );\n if (!preferenceTopic) {\n const allowedTopics = preferenceTopics\n .filter((x) => x.purpose.trackingType === purpose)\n .map((x) => x.slug);\n throw new Error(\n `Invalid preference slug: ${preference} for purpose: ${purpose}. ` +\n `Allowed preference slugs for purpose are: ${allowedTopics.join(',')}`,\n );\n }\n\n // Ensure destination array\n if (!result[purpose]) {\n result[purpose] = {\n preferences: [],\n };\n }\n if (!result[purpose].preferences) {\n result[purpose].preferences = [];\n }\n\n // handle each type of preference\n switch (preferenceTopic.type) {\n case PreferenceTopicType.Boolean: {\n const mappedValue = valueMapping[rawValue];\n // Throw error on missing mapping\n if (mappedValue === undefined && rawValue !== '') {\n throw new Error(\n `No preference mapping found for value \"${rawValue}\" in column ` +\n `\"${columnName}\" (purpose=${purpose}, preference=${preference})`,\n );\n }\n\n // Purposefully missing mapping\n if (mappedValue === null || mappedValue === undefined) {\n return;\n }\n\n // Ensure boolean\n if (typeof mappedValue !== 'boolean') {\n throw new Error(\n `Invalid value for boolean preference: ${preference}, expected boolean, got: ${rawValue}`,\n );\n }\n result[purpose].preferences!.push({\n topic: preference,\n choice: { booleanValue: mappedValue },\n });\n break;\n }\n\n case PreferenceTopicType.Select: {\n const mappedValue = valueMapping[rawValue];\n // Throw error on missing mapping\n if (mappedValue === undefined && rawValue !== '') {\n throw new Error(\n `No preference mapping found for value \"${rawValue}\" in column ` +\n `\"${columnName}\" (purpose=${purpose}, preference=${preference})`,\n );\n }\n\n // Omit if null\n if (mappedValue === null || mappedValue === undefined) {\n return;\n }\n\n // Ensure string\n if (typeof mappedValue !== 'string') {\n throw new Error(\n `Invalid value for select preference: ${preference}, expected string, got: ${rawValue}`,\n );\n }\n const trimmed = mappedValue.trim() || null;\n\n if (\n trimmed &&\n !preferenceTopic.preferenceOptionValues.map(({ slug }) => slug).includes(trimmed)\n ) {\n throw new Error(\n `Invalid value for select preference: ${preference}, expected one of: ` +\n `${preferenceTopic.preferenceOptionValues\n .map(({ slug }) => slug)\n .join(', ')}, got: ${rawValue}`,\n );\n }\n\n result[purpose].preferences!.push({\n topic: preference,\n choice: { selectValue: trimmed },\n });\n break;\n }\n\n case PreferenceTopicType.MultiSelect: {\n if (typeof rawValue !== 'string') {\n throw new Error(\n `Invalid value for multi select preference: ${preference}, expected string, got: ${rawValue}`,\n );\n }\n\n // IMPORTANT: Do NOT rely on valueMapping[rawValue] for CSV.\n // Split and map per token with the new rule.\n const selectValues = splitCsvToList(rawValue)\n .map((token) => {\n const tokenMapped = valueMapping[token];\n // Throw error on missing mapping\n if (tokenMapped === undefined && rawValue !== '') {\n throw new Error(\n `No preference mapping found for multi select token \"${rawValue}\" in column ` +\n `\"${columnName}\" (purpose=${purpose}, preference=${preference})`,\n );\n }\n\n // Omit if null\n if (tokenMapped === null || tokenMapped === undefined) {\n return null;\n }\n\n // Ensure string\n if (typeof tokenMapped !== 'string') {\n throw new Error(\n `Invalid value for multi select preference: ${preference}, ` +\n `expected one of: ${preferenceTopic.preferenceOptionValues\n .map(({ slug }) => slug)\n .join(', ')}, got: ${token}`,\n );\n }\n return tokenMapped;\n })\n .filter((x): x is string => x !== null)\n .sort((a, b) => a.localeCompare(b));\n\n // Only push if at least one mapped token survived\n if (selectValues.length > 0) {\n result[purpose].preferences!.push({\n topic: preference,\n choice: { selectValues },\n });\n }\n break;\n }\n\n default:\n throw new Error(`Unknown preference type: ${preferenceTopic.type}`);\n }\n } else {\n // Top-level purpose (no preference)\n const mappedValue = valueMapping[rawValue];\n if (mappedValue === undefined && rawValue !== '') {\n throw new Error(\n `No preference mapping found for value \"${rawValue}\" in column ` +\n `\"${columnName}\" (purpose=${purpose}, preference=∅) ${JSON.stringify(row)}`,\n );\n }\n if (mappedValue === null) {\n return; // Omit if null\n }\n\n if (!result[purpose]) {\n // Top-level purpose: set enabled strictly from mapped boolean\n result[purpose] = { enabled: mappedValue === true };\n } else {\n // Preserve preferences; update enabled\n result[purpose].enabled = mappedValue === true;\n }\n }\n },\n );\n\n // Ensure that enabled is provided for any purpose that appears.\n // (This preserves the prior contract and existing tests.)\n return apply(result, (x, purposeName) => {\n if (typeof x.enabled !== 'boolean') {\n throw new Error(`No mapping provided for purpose.enabled=true/false value: ${purposeName}`);\n }\n return {\n ...x,\n enabled: x.enabled!,\n };\n });\n}\n","import {\n PreferenceQueryResponseItem,\n PreferenceStorePurposeResponse,\n PreferenceTopicType,\n} from '@transcend-io/privacy-types';\n\nimport { PreferenceTopic } from '../graphql/index.js';\n\n/**\n * Check if the pending set of updates are exactly the same as the current consent record.\n *\n * @param options - Options\n * @returns Whether the pending updates already exist in the preference store\n */\nexport function checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n}: {\n /** The current consent record */\n currentConsentRecord: PreferenceQueryResponseItem;\n /** The pending updates */\n pendingUpdates: {\n [purposeName in string]: Omit<PreferenceStorePurposeResponse, 'purpose'>;\n };\n /** The preference topic configurations */\n preferenceTopics: PreferenceTopic[];\n}): boolean {\n // Check each update\n return Object.entries(pendingUpdates).every(([purposeName, { preferences = [], enabled }]) => {\n // Ensure the purpose exists\n const currentPurpose = currentConsentRecord.purposes.find(\n (existingPurpose) => existingPurpose.purpose === purposeName,\n );\n\n // Ensure purpose.enabled is in sync\n // Also false if the purpose does not exist\n const enabledIsInSync = !!currentPurpose && currentPurpose.enabled === enabled;\n if (!enabledIsInSync) {\n return false;\n }\n\n // Compare the preferences are in sync\n return preferences.every(\n ({ topic, choice }) =>\n // ensure preferences exist on record\n currentPurpose.preferences &&\n currentPurpose.preferences.find((existingPreference) => {\n // find matching topic\n if (existingPreference.topic !== topic) {\n return false;\n }\n\n // Determine type of preference topic\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === topic && x.purpose.trackingType === purposeName,\n );\n if (!preferenceTopic) {\n throw new Error(`Could not find preference topic for ${topic}`);\n }\n\n // Handle comparison based on type\n switch (preferenceTopic.type) {\n case PreferenceTopicType.Boolean:\n return existingPreference.choice.booleanValue === choice.booleanValue;\n case PreferenceTopicType.Select:\n return existingPreference.choice.selectValue === choice.selectValue;\n case PreferenceTopicType.MultiSelect:\n // eslint-disable-next-line no-case-declarations\n const sortedCurrentValues = (existingPreference.choice.selectValues || []).sort();\n // eslint-disable-next-line no-case-declarations\n const sortedNewValues = (choice.selectValues || []).sort();\n return (\n sortedCurrentValues.length === sortedNewValues.length &&\n sortedCurrentValues.every((x, i) => x === sortedNewValues[i])\n );\n default:\n throw new Error(`Unknown preference topic type: ${preferenceTopic.type}`);\n }\n }),\n );\n });\n}\n","import {\n PreferenceQueryResponseItem,\n PreferenceStorePurposeResponse,\n PreferenceTopicType,\n} from '@transcend-io/privacy-types';\n\nimport { logger } from '../../logger.js';\nimport { PreferenceTopic } from '../graphql/index.js';\n\n/**\n * Check if the pending set of updates will result in a change of\n * value to an existing purpose or preference in the preference store.\n *\n * @param options - Options\n * @returns True if conflict, false if no conflict and just adding new data for first time\n */\nexport function checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n log,\n}: {\n /** The current consent record */\n currentConsentRecord: PreferenceQueryResponseItem;\n /** The pending updates */\n pendingUpdates: {\n [purposeName in string]: Omit<PreferenceStorePurposeResponse, 'purpose'>;\n };\n /** The preference topic configurations */\n preferenceTopics: PreferenceTopic[];\n /** Whether to log the conflict */\n log?: boolean;\n}): boolean {\n // Check if any update has conflict\n return !!Object.entries(pendingUpdates).find(([purposeName, { preferences = [], enabled }]) => {\n // Ensure the purpose exists\n const currentPurpose = currentConsentRecord.purposes.find(\n (existingPurpose) => existingPurpose.purpose === purposeName,\n );\n\n // If no purpose exists, then it is not a conflict\n if (!currentPurpose) {\n if (log) {\n logger.warn(\n `No existing purpose found for ${purposeName} in consent record for ${currentConsentRecord.userId}.`,\n );\n }\n return false;\n }\n\n // If purpose.enabled value is off, this is a conflict\n if (currentPurpose.enabled !== enabled) {\n if (log) {\n logger.warn(\n `Purpose ${purposeName} enabled value conflict for user ${currentConsentRecord.userId}. ` +\n `Pending Value: ${enabled}, Current Value: ${currentPurpose.enabled}`,\n );\n }\n return true;\n }\n\n // Check if any preferences are out of sync\n return !!preferences.find(({ topic, choice }) => {\n // find matching topic\n const currentPreference = (currentPurpose.preferences || []).find(\n (existingPreference) => existingPreference.topic === topic,\n );\n\n // if no topic exists, no conflict\n if (!currentPreference) {\n if (log) {\n logger.warn(\n `No existing preference found for topic ${topic} in purpose ` +\n `${purposeName} for user ${currentConsentRecord.userId}.`,\n );\n }\n return false;\n }\n\n // Determine type of preference topic\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === topic && x.purpose.trackingType === purposeName,\n );\n if (!preferenceTopic) {\n throw new Error(`Could not find preference topic for ${topic}`);\n }\n\n // Handle comparison based on type\n let boolMatch: boolean;\n let selectMatch: boolean;\n switch (preferenceTopic.type) {\n case PreferenceTopicType.Boolean:\n boolMatch = currentPreference.choice.booleanValue !== choice.booleanValue;\n if (log) {\n logger.warn(\n `Preference topic ${topic} boolean value conflict for user ` +\n `${currentConsentRecord.userId}. Expected: ${choice.booleanValue}, ` +\n `Found: ${currentPreference.choice.booleanValue}`,\n );\n }\n return boolMatch;\n case PreferenceTopicType.Select:\n selectMatch = currentPreference.choice.selectValue !== choice.selectValue;\n if (log) {\n logger.warn(\n `Preference topic ${topic} select value conflict for user ` +\n `${currentConsentRecord.userId}. Expected: ${choice.selectValue}, ` +\n `Found: ${currentPreference.choice.selectValue}`,\n );\n }\n return selectMatch;\n case PreferenceTopicType.MultiSelect:\n // eslint-disable-next-line no-case-declarations\n const sortedCurrentValues = (currentPreference.choice.selectValues || []).sort();\n // eslint-disable-next-line no-case-declarations\n const sortedNewValues = (choice.selectValues || []).sort();\n selectMatch =\n sortedCurrentValues.length !== sortedNewValues.length ||\n !sortedCurrentValues.every((x, i) => x === sortedNewValues[i]);\n if (log) {\n logger.warn(\n `Preference topic ${topic} multi-select value conflict for user ` +\n `${currentConsentRecord.userId}. Expected: ${sortedNewValues.join(\n ', ',\n )}, Found: ${sortedCurrentValues.join(', ')}`,\n );\n }\n return selectMatch;\n default:\n throw new Error(`Unknown preference topic type: ${preferenceTopic.type}`);\n }\n });\n });\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport type { Got } from 'got';\nimport { chunk } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport { ConsentPreferenceResponse } from './types.js';\nimport { withPreferenceRetry } from './withPreferenceRetry.js';\n\n/**\n * Grab the current consent preference values for a list of identifiers\n *\n * @param sombra - Backend to make API call to\n * @param options - Options\n * @returns Plaintext context information\n */\nexport async function getPreferencesForIdentifiers(\n sombra: Got,\n {\n identifiers,\n partitionKey,\n skipLogging = false,\n concurrency = 40,\n }: {\n /** The list of identifiers to look up */\n identifiers: {\n /** The value of the identifier */\n value: string;\n }[];\n /** The partition key to look up */\n partitionKey: string;\n /** Whether to skip logging */\n skipLogging?: boolean;\n /** Concurrency for requests (default 40) */\n concurrency?: number;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const results: PreferenceQueryResponseItem[] = [];\n const groupedIdentifiers = chunk(identifiers, 100);\n\n // create a new progress bar instance and use shades_classic theme\n const t0 = new Date().getTime();\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n if (!skipLogging) {\n progressBar.start(identifiers.length, 0);\n }\n\n let total = 0;\n await map(\n groupedIdentifiers,\n async (group) => {\n const rawResult = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partitionKey}/query`, {\n json: {\n filter: { identifiers: group },\n limit: group.length,\n },\n })\n .json(),\n {\n onRetry: (attempt, _err, msg) => {\n logger.warn(\n colors.yellow(\n `[RETRY] group size=${group.length} partition=${partitionKey} attempt=${attempt}: ${msg}`,\n ),\n );\n },\n },\n );\n\n const result = decodeCodec(ConsentPreferenceResponse, rawResult);\n results.push(...result.nodes);\n total += group.length;\n progressBar.update(total);\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n if (!skipLogging) {\n // Log completion time\n logger.info(colors.green(`Completed download in \"${totalTime / 1000}\" seconds.`));\n }\n\n return results;\n}\n","import { PreferenceTopicType } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { uniq, difference } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { mapSeries } from '../bluebird.js';\nimport { PreferenceTopic } from '../graphql/index.js';\nimport { splitCsvToList } from '../requests/index.js';\nimport { FileMetadataState } from './codecs.js';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse out the purpose.enabled and preference values from a CSV file\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @param options - Options\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceAndPurposeValuesFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n {\n purposeSlugs,\n preferenceTopics,\n forceTriggerWorkflows,\n }: {\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const otherColumns = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...(currentState.timestampColum ? [currentState.timestampColum] : []),\n ]);\n if (otherColumns.length === 0) {\n if (forceTriggerWorkflows) {\n return currentState;\n }\n throw new Error('No other columns to process');\n }\n\n // The purpose and preferences to map to\n const purposeNames = [\n ...purposeSlugs,\n ...preferenceTopics.map((x) => `${x.purpose.trackingType}->${x.slug}`),\n ];\n\n // Ensure all columns are accounted for\n await mapSeries(otherColumns, async (col) => {\n // Determine the unique values to map in this column\n const uniqueValues = uniq(preferences.map((x) => x[col]));\n\n // Map the column to a purpose\n let purposeMapping = currentState.columnToPurposeName[col];\n if (purposeMapping) {\n logger.info(\n colors.magenta(`Column \"${col}\" is associated with purpose \"${purposeMapping.purpose}\"`),\n );\n } else {\n const { purposeName } = await inquirer.prompt<{\n /** purpose name */\n purposeName: string;\n }>([\n {\n name: 'purposeName',\n message: `Choose the purpose that column ${col} is associated with`,\n type: 'list',\n default: purposeNames.find((x) => x.startsWith(purposeSlugs[0])),\n choices: purposeNames,\n },\n ]);\n const [purposeSlug, preferenceSlug] = purposeName.split('->');\n purposeMapping = {\n purpose: purposeSlug,\n preference: preferenceSlug || null,\n valueMapping: {},\n };\n }\n\n // map each value to the purpose value\n await mapSeries(uniqueValues, async (value) => {\n if (purposeMapping.valueMapping[value] !== undefined) {\n logger.info(\n colors.magenta(\n `Value \"${value}\" is associated with purpose value \"${purposeMapping.valueMapping[value]}\"`,\n ),\n );\n return;\n }\n // if preference is null, this column is just for the purpose\n if (purposeMapping.preference === null) {\n const { purposeValue } = await inquirer.prompt<{\n /** purpose value */\n purposeValue: boolean;\n }>([\n {\n name: 'purposeValue',\n message: `Choose the purpose value for value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = purposeValue;\n }\n\n // if preference is not null, this column is for a specific preference\n if (purposeMapping.preference !== null) {\n const preferenceTopic = preferenceTopics.find((x) => x.slug === purposeMapping.preference);\n if (!preferenceTopic) {\n logger.error(colors.red(`Preference topic \"${purposeMapping.preference}\" not found`));\n return;\n }\n const preferenceOptions = preferenceTopic.preferenceOptionValues.map(({ slug }) => slug);\n\n if (preferenceTopic.type === PreferenceTopicType.Boolean) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n message:\n // eslint-disable-next-line max-len\n `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.Select) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === value),\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.MultiSelect) {\n const parsedValues = splitCsvToList(value);\n // need to do this serially\n await mapSeries(parsedValues, async (parsedValue) => {\n // if we already have a value, skip re-processing it again\n if (purposeMapping.valueMapping[parsedValue] !== undefined) {\n return;\n }\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${parsedValue}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === parsedValue),\n },\n ]);\n purposeMapping.valueMapping[parsedValue] = preferenceValue;\n });\n return;\n }\n\n throw new Error(`Unknown preference topic type: ${preferenceTopic.type}`);\n }\n });\n\n currentState.columnToPurposeName[col] = purposeMapping;\n });\n\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import colors from 'colors';\nimport inquirer from 'inquirer';\nimport { uniq, groupBy, difference } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { inquirerConfirmBoolean } from '../helpers/index.js';\nimport { FileMetadataState } from './codecs.js';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse identifiers from a CSV list of preferences\n *\n * Ensures that all rows have a valid identifier\n * and that all identifiers are unique.\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceIdentifiersFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<{\n /** The updated state */\n currentState: FileMetadataState;\n /** The updated preferences */\n preferences: Record<string, string>[];\n}> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const remainingColumnsForIdentifier = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the identifier column to work off of\n if (!currentState.identifierColumn) {\n const { identifierName } = await inquirer.prompt<{\n /** Identifier name */\n identifierName: string;\n }>([\n {\n name: 'identifierName',\n message:\n 'Choose the column that will be used as the identifier to upload consent preferences by',\n type: 'list',\n default:\n remainingColumnsForIdentifier.find((col) => col.toLowerCase().includes('email')) ||\n remainingColumnsForIdentifier[0],\n choices: remainingColumnsForIdentifier,\n },\n ]);\n currentState.identifierColumn = identifierName;\n }\n logger.info(colors.magenta(`Using identifier column \"${currentState.identifierColumn}\"`));\n\n // Validate that the identifier column is present for all rows and unique\n const identifierColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.identifierColumn!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (identifierColumnsMissing.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" is missing a value for the following rows: ${identifierColumnsMissing.join(', ')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to skip rows missing an identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to skip rows missing an identifier?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n\n // Filter out rows missing an identifier\n const previous = preferences.length;\n preferences = preferences.filter((pref) => pref[currentState.identifierColumn!]);\n logger.info(\n colors.yellow(`Skipped ${previous - preferences.length} rows missing an identifier`),\n );\n }\n logger.info(\n colors.magenta(\n `The identifier column \"${currentState.identifierColumn}\" is present for all rows`,\n ),\n );\n\n // Validate that all identifiers are unique\n const rowsByUserId = groupBy(preferences, currentState.identifierColumn);\n const duplicateIdentifiers = Object.entries(rowsByUserId).filter(([, rows]) => rows.length > 1);\n if (duplicateIdentifiers.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" has duplicate values for the following rows: ${duplicateIdentifiers\n .slice(0, 10)\n .map(([userId, rows]) => `${userId} (${rows.length})`)\n .join('\\n')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to take the most recent update\n // for each duplicate identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to automatically take the latest update?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n preferences = Object.entries(rowsByUserId)\n .map(([, rows]) => {\n const sorted = rows.sort(\n (a, b) =>\n new Date(b[currentState.timestampColum!]).getTime() -\n new Date(a[currentState.timestampColum!]).getTime(),\n );\n return sorted[0];\n })\n .filter((x) => x);\n }\n\n return { currentState, preferences };\n}\n/* eslint-enable no-param-reassign */\n","import colors from 'colors';\nimport inquirer from 'inquirer';\nimport { uniq, difference } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { FileMetadataState } from './codecs.js';\n\nexport const NONE_PREFERENCE_MAP = '[NONE]';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse timestamps from a CSV list of preferences\n *\n * When timestamp is requested, this script\n * ensures that all rows have a valid timestamp.\n *\n * Error is throw if timestamp is missing\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceTimestampsFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for timestamp\n const remainingColumnsForTimestamp = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the timestamp column to work off of\n if (!currentState.timestampColum) {\n const { timestampName } = await inquirer.prompt<{\n /** timestamp name */\n timestampName: string;\n }>([\n {\n name: 'timestampName',\n message: 'Choose the column that will be used as the timestamp of last preference update',\n type: 'list',\n default:\n remainingColumnsForTimestamp.find((col) => col.toLowerCase().includes('date')) ||\n remainingColumnsForTimestamp.find((col) => col.toLowerCase().includes('time')) ||\n remainingColumnsForTimestamp[0],\n choices: [...remainingColumnsForTimestamp, NONE_PREFERENCE_MAP],\n },\n ]);\n currentState.timestampColum = timestampName;\n }\n logger.info(colors.magenta(`Using timestamp column \"${currentState.timestampColum}\"`));\n\n // Validate that all rows have valid timestamp\n if (currentState.timestampColum !== NONE_PREFERENCE_MAP) {\n const timestampColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.timestampColum!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (timestampColumnsMissing.length > 0) {\n throw new Error(\n `The timestamp column \"${\n currentState.timestampColum\n }\" is missing a value for the following rows: ${timestampColumnsMissing.join('\\n')}`,\n );\n }\n logger.info(\n colors.magenta(\n `The timestamp column \"${currentState.timestampColum}\" is present for all row`,\n ),\n );\n }\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport colors from 'colors';\nimport type { Got } from 'got';\nimport * as t from 'io-ts';\nimport { keyBy } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { PreferenceTopic } from '../graphql/index.js';\nimport { readCsv } from '../requests/index.js';\nimport { checkIfPendingPreferenceUpdatesAreNoOp } from './checkIfPendingPreferenceUpdatesAreNoOp.js';\nimport { checkIfPendingPreferenceUpdatesCauseConflict } from './checkIfPendingPreferenceUpdatesCauseConflict.js';\nimport { FileMetadataState, PreferenceState } from './codecs.js';\nimport { getPreferencesForIdentifiers } from './getPreferencesForIdentifiers.js';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow.js';\nimport { parsePreferenceAndPurposeValuesFromCsv } from './parsePreferenceAndPurposeValuesFromCsv.js';\nimport { parsePreferenceIdentifiersFromCsv } from './parsePreferenceIdentifiersFromCsv.js';\nimport { parsePreferenceTimestampsFromCsv } from './parsePreferenceTimestampsFromCsv.js';\n\n/**\n * Parse a file into the cache\n *\n *\n * @param options - Options\n * @param cache - The cache to store the parsed file in\n * @returns The cache with the parsed file\n */\nexport async function parsePreferenceManagementCsvWithCache(\n {\n file,\n sombra,\n purposeSlugs,\n preferenceTopics,\n partitionKey,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n }: {\n /** File to parse */\n file: string;\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Sombra got instance */\n sombra: Got;\n /** Partition key */\n partitionKey: string;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck: boolean;\n /** Whether to force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n cache: PersistedState<typeof PreferenceState>,\n): Promise<void> {\n // Start the timer\n const t0 = new Date().getTime();\n\n // Get the current metadata\n const fileMetadata = cache.getValue('fileMetadata');\n\n // Read in the file\n logger.info(colors.magenta(`Reading in file: \"${file}\"`));\n let preferences = readCsv(file, t.record(t.string, t.string));\n\n // start building the cache, can use previous cache as well\n let currentState: FileMetadataState = {\n columnToPurposeName: {},\n pendingSafeUpdates: {},\n pendingConflictUpdates: {},\n skippedUpdates: {},\n // Load in the last fetched time\n ...((fileMetadata[file] || {}) as Partial<FileMetadataState>),\n lastFetchedAt: new Date().toISOString(),\n };\n\n // Validate that all timestamps are present in the file\n currentState = await parsePreferenceTimestampsFromCsv(preferences, currentState);\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Validate that all identifiers are present and unique\n const result = await parsePreferenceIdentifiersFromCsv(preferences, currentState);\n currentState = result.currentState;\n preferences = result.preferences;\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Ensure all other columns are mapped to purpose and preference\n // slug values\n currentState = await parsePreferenceAndPurposeValuesFromCsv(preferences, currentState, {\n preferenceTopics,\n purposeSlugs,\n forceTriggerWorkflows,\n });\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Grab existing preference store records\n const identifiers = preferences.map((pref) => pref[currentState.identifierColumn!]);\n const existingConsentRecords = skipExistingRecordCheck\n ? []\n : await getPreferencesForIdentifiers(sombra, {\n identifiers: identifiers.map((x) => ({ value: x })),\n partitionKey,\n });\n const consentRecordByIdentifier = keyBy(existingConsentRecords, 'userId');\n\n // Clear out previous updates\n currentState.pendingConflictUpdates = {};\n currentState.pendingSafeUpdates = {};\n currentState.skippedUpdates = {};\n\n // Process each row\n preferences.forEach((pref) => {\n // Grab unique Id for the user\n const userId = pref[currentState.identifierColumn!];\n\n // determine updates for user\n const pendingUpdates = getPreferenceUpdatesFromRow({\n row: pref,\n columnToPurposeName: currentState.columnToPurposeName,\n preferenceTopics,\n purposeSlugs,\n });\n\n // Grab current state of the update\n const currentConsentRecord = consentRecordByIdentifier[userId];\n if (forceTriggerWorkflows && !currentConsentRecord) {\n throw new Error(\n `No existing consent record found for user with id: ${userId}.\n When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`,\n );\n }\n // Check if the update can be skipped\n // this is the case if a record exists, and the purpose\n // and preference values are all in sync\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n }) &&\n !forceTriggerWorkflows\n ) {\n currentState.skippedUpdates[userId] = pref;\n return;\n }\n\n // Determine if there are any conflicts\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n })\n ) {\n currentState.pendingConflictUpdates[userId] = {\n row: pref,\n record: currentConsentRecord,\n };\n return;\n }\n\n // Add to pending updates\n currentState.pendingSafeUpdates[userId] = pref;\n });\n\n // Read in the file\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n const t1 = new Date().getTime();\n logger.info(colors.green(`Successfully pre-processed file: \"${file}\" in ${(t1 - t0) / 1000}s`));\n}\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport { PreferenceUpdateItem } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { chunk } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllPurposes,\n fetchAllPreferenceTopics,\n} from '../graphql/index.js';\nimport { parseAttributesFromString } from '../requests/index.js';\nimport { PreferenceState } from './codecs.js';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow.js';\nimport { parsePreferenceManagementCsvWithCache } from './parsePreferenceManagementCsv.js';\nimport { NONE_PREFERENCE_MAP } from './parsePreferenceTimestampsFromCsv.js';\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadPreferenceManagementPreferencesInteractive({\n auth,\n sombraAuth,\n receiptFilepath,\n file,\n partition,\n isSilent = true,\n dryRun = false,\n skipWorkflowTriggers = false,\n skipConflictUpdates = false,\n skipExistingRecordCheck = false,\n attributes = [],\n transcendUrl,\n forceTriggerWorkflows = false,\n}: {\n /** The Transcend API key */\n auth: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Partition key */\n partition: string;\n /** File where to store receipt and continue from where left off */\n receiptFilepath: string;\n /** The file to process */\n file: string;\n /** API URL for Transcend backend */\n transcendUrl: string;\n /** Whether to do a dry run */\n dryRun?: boolean;\n /** Whether to upload as isSilent */\n isSilent?: boolean;\n /** Attributes string pre-parse. In format Key:Value */\n attributes?: string[];\n /** Skip workflow triggers */\n skipWorkflowTriggers?: boolean;\n /**\n * When true, only update preferences that do not conflict with existing\n * preferences. When false, update all preferences in CSV based on timestamp.\n */\n skipConflictUpdates?: boolean;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck?: boolean;\n /** Whether to force trigger workflows */\n forceTriggerWorkflows?: boolean;\n}): Promise<void> {\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state file to store the requests from this run\n const preferenceState = new PersistedState(receiptFilepath, PreferenceState, {\n fileMetadata: {},\n failingUpdates: {},\n pendingUpdates: {},\n });\n const failingRequests = preferenceState.getValue('failingUpdates');\n const pendingRequests = preferenceState.getValue('pendingUpdates');\n let fileMetadata = preferenceState.getValue('fileMetadata');\n\n logger.info(\n colors.magenta(\n 'Restored cache, there are: \\n' +\n `${Object.values(failingRequests).length} failing requests to be retried\\n` +\n `${Object.values(pendingRequests).length} pending requests to be processed\\n` +\n `The following files are stored in cache and will be used:\\n${Object.keys(fileMetadata)\n .map((x) => x)\n .join('\\n')}\\n` +\n `The following file will be processed: ${file}\\n`,\n ),\n );\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const [sombra, purposes, preferenceTopics] = await Promise.all([\n // Create sombra instance to communicate with\n createSombraGotInstance(transcendUrl, auth, sombraAuth),\n // get all purposes and topics\n fetchAllPurposes(client),\n fetchAllPreferenceTopics(client),\n ]);\n\n // Process the file\n await parsePreferenceManagementCsvWithCache(\n {\n file,\n purposeSlugs: purposes.map((x) => x.trackingType),\n preferenceTopics,\n sombra,\n partitionKey: partition,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n },\n preferenceState,\n );\n\n // Construct the pending updates\n const pendingUpdates: Record<string, PreferenceUpdateItem> = {};\n fileMetadata = preferenceState.getValue('fileMetadata');\n const metadata = fileMetadata[file];\n\n logger.info(\n colors.magenta(\n `Found ${Object.entries(metadata.pendingSafeUpdates).length} safe updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${Object.entries(metadata.pendingConflictUpdates).length} conflict updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${Object.entries(metadata.skippedUpdates).length} skipped updates in ${file}`,\n ),\n );\n\n // Update either safe updates only or safe + conflict\n Object.entries({\n ...metadata.pendingSafeUpdates,\n ...(skipConflictUpdates ? {} : apply(metadata.pendingConflictUpdates, ({ row }) => row)),\n }).forEach(([userId, update]) => {\n // Determine timestamp\n const timestamp =\n metadata.timestampColum === NONE_PREFERENCE_MAP\n ? new Date()\n : new Date(update[metadata.timestampColum!]);\n\n // Determine updates\n const updates = getPreferenceUpdatesFromRow({\n row: update,\n columnToPurposeName: metadata.columnToPurposeName,\n preferenceTopics,\n purposeSlugs: purposes.map((x) => x.trackingType),\n });\n pendingUpdates[userId] = {\n userId,\n partition,\n timestamp: timestamp.toISOString(),\n purposes: Object.entries(updates).map(([purpose, value]) => ({\n ...value,\n purpose,\n workflowSettings: {\n attributes: parsedAttributes,\n isSilent,\n skipWorkflowTrigger: skipWorkflowTriggers,\n ...(forceTriggerWorkflows ? { forceTriggerWorkflow: forceTriggerWorkflows } : {}),\n },\n })),\n };\n });\n await preferenceState.setValue(pendingUpdates, 'pendingUpdates');\n await preferenceState.setValue({}, 'failingUpdates');\n\n // Exist early if dry run\n if (dryRun) {\n logger.info(\n colors.green(\n `Dry run complete, exiting. ${\n Object.values(pendingUpdates).length\n } pending updates. Check file: ${receiptFilepath}`,\n ),\n );\n return;\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${Object.values(pendingUpdates).length} preferences to partition: ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Build a GraphQL client\n let total = 0;\n const updatesToRun = Object.entries(pendingUpdates);\n const chunkedUpdates = chunk(updatesToRun, skipWorkflowTriggers ? 100 : 10);\n progressBar.start(updatesToRun.length, 0);\n await map(\n chunkedUpdates,\n async (currentChunk) => {\n // Make the request\n try {\n await sombra\n .put('v1/preferences', {\n json: {\n records: currentChunk.map(([, update]) => update),\n skipWorkflowTriggers,\n },\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n logger.error(\n colors.red(\n `Failed to upload ${currentChunk.length} user preferences to partition ${partition}: ${\n err?.response?.body || err?.message\n }`,\n ),\n );\n const failingUpdates = preferenceState.getValue('failingUpdates');\n currentChunk.forEach(([userId, update]) => {\n failingUpdates[userId] = {\n uploadedAt: new Date().toISOString(),\n update,\n error: err?.response?.body || err?.message || 'Unknown error',\n };\n });\n await preferenceState.setValue(failingUpdates, 'failingUpdates');\n }\n\n total += currentChunk.length;\n progressBar.update(total);\n },\n {\n concurrency: 40,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n logger.info(\n colors.green(\n `Successfully uploaded ${\n updatesToRun.length\n } user preferences to partition ${partition} in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n}\n","import { readdirSync } from 'node:fs';\nimport { basename, join } from 'node:path';\n\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { map } from '../../../lib/bluebird.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { uploadPreferenceManagementPreferencesInteractive } from '../../../lib/preference-management/index.js';\nimport { splitCsvToList } from '../../../lib/requests/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface UploadPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n transcendUrl: string;\n file?: string;\n directory?: string;\n dryRun: boolean;\n skipExistingRecordCheck: boolean;\n receiptFileDir: string;\n skipWorkflowTriggers: boolean;\n forceTriggerWorkflows: boolean;\n skipConflictUpdates: boolean;\n isSilent: boolean;\n attributes: string;\n receiptFilepath: string;\n concurrency: number;\n}\n\nexport async function uploadPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n transcendUrl,\n file = '',\n directory,\n dryRun,\n skipExistingRecordCheck,\n receiptFileDir,\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n skipConflictUpdates,\n isSilent,\n attributes,\n concurrency,\n }: UploadPreferencesCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red('Cannot provide both a directory and a file. Please provide only one.'),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.info(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`Files to process: ${files.join(', ')}`);\n\n if (skipExistingRecordCheck) {\n logger.info(colors.bgYellow(`Skipping existing record check: ${skipExistingRecordCheck}`));\n }\n\n await map(\n files,\n async (filePath) => {\n const fileName = basename(filePath).replace('.csv', '');\n await uploadPreferenceManagementPreferencesInteractive({\n receiptFilepath: join(receiptFileDir, `${fileName}-receipts.json`),\n auth,\n sombraAuth,\n file: filePath,\n partition,\n transcendUrl,\n skipConflictUpdates,\n skipWorkflowTriggers,\n skipExistingRecordCheck,\n isSilent,\n dryRun,\n attributes: splitCsvToList(attributes),\n forceTriggerWorkflows,\n });\n },\n { concurrency },\n );\n}\n"],"mappings":"qpCAuCA,SAAgB,EAA4B,CAC1C,MACA,sBACA,eACA,oBAYA,CAEA,IAAM,EAEF,EAAE,CA4LN,OAzLA,OAAO,QAAQ,EAAoB,CAAC,SACjC,CAAC,EAAY,CAAE,UAAS,aAAY,mBAAoB,CAEvD,GAAI,CAAC,EAAa,SAAS,EAAQ,CACjC,MAAU,MAAM,yBAAyB,EAAQ,cAAc,EAAa,KAAK,KAAK,GAAG,CAI3F,IAAM,EAAW,EAAI,GAGrB,GAAI,EAAY,CACd,IAAM,EAAkB,EAAiB,KACtC,GAAM,EAAE,OAAS,GAAc,EAAE,QAAQ,eAAiB,EAC5D,CACD,GAAI,CAAC,EAAiB,CACpB,IAAM,EAAgB,EACnB,OAAQ,GAAM,EAAE,QAAQ,eAAiB,EAAQ,CACjD,IAAK,GAAM,EAAE,KAAK,CACrB,MAAU,MACR,4BAA4B,EAAW,gBAAgB,EAAQ,8CAChB,EAAc,KAAK,IAAI,GACvE,CAcH,OAVK,EAAO,KACV,EAAO,GAAW,CAChB,YAAa,EAAE,CAChB,EAEE,EAAO,GAAS,cACnB,EAAO,GAAS,YAAc,EAAE,EAI1B,EAAgB,KAAxB,CACE,KAAK,EAAoB,QAAS,CAChC,IAAM,EAAc,EAAa,GAEjC,GAAI,IAAgB,IAAA,IAAa,IAAa,GAC5C,MAAU,MACR,0CAA0C,EAAS,eAC7C,EAAW,aAAa,EAAQ,eAAe,EAAW,GACjE,CAIH,GAAI,GAAgB,KAClB,OAIF,GAAI,OAAO,GAAgB,UACzB,MAAU,MACR,yCAAyC,EAAW,2BAA2B,IAChF,CAEH,EAAO,GAAS,YAAa,KAAK,CAChC,MAAO,EACP,OAAQ,CAAE,aAAc,EAAa,CACtC,CAAC,CACF,MAGF,KAAK,EAAoB,OAAQ,CAC/B,IAAM,EAAc,EAAa,GAEjC,GAAI,IAAgB,IAAA,IAAa,IAAa,GAC5C,MAAU,MACR,0CAA0C,EAAS,eAC7C,EAAW,aAAa,EAAQ,eAAe,EAAW,GACjE,CAIH,GAAI,GAAgB,KAClB,OAIF,GAAI,OAAO,GAAgB,SACzB,MAAU,MACR,wCAAwC,EAAW,0BAA0B,IAC9E,CAEH,IAAM,EAAU,EAAY,MAAM,EAAI,KAEtC,GACE,GACA,CAAC,EAAgB,uBAAuB,KAAK,CAAE,UAAW,EAAK,CAAC,SAAS,EAAQ,CAEjF,MAAU,MACR,wCAAwC,EAAW,qBAC9C,EAAgB,uBAChB,KAAK,CAAE,UAAW,EAAK,CACvB,KAAK,KAAK,CAAC,SAAS,IAC1B,CAGH,EAAO,GAAS,YAAa,KAAK,CAChC,MAAO,EACP,OAAQ,CAAE,YAAa,EAAS,CACjC,CAAC,CACF,MAGF,KAAK,EAAoB,YAAa,CACpC,GAAI,OAAO,GAAa,SACtB,MAAU,MACR,8CAA8C,EAAW,0BAA0B,IACpF,CAKH,IAAM,EAAe,EAAe,EAAS,CAC1C,IAAK,GAAU,CACd,IAAM,EAAc,EAAa,GAEjC,GAAI,IAAgB,IAAA,IAAa,IAAa,GAC5C,MAAU,MACR,uDAAuD,EAAS,eAC1D,EAAW,aAAa,EAAQ,eAAe,EAAW,GACjE,CAIH,GAAI,GAAgB,KAClB,OAAO,KAIT,GAAI,OAAO,GAAgB,SACzB,MAAU,MACR,8CAA8C,EAAW,qBACnC,EAAgB,uBACjC,KAAK,CAAE,UAAW,EAAK,CACvB,KAAK,KAAK,CAAC,SAAS,IAC1B,CAEH,OAAO,GACP,CACD,OAAQ,GAAmB,IAAM,KAAK,CACtC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAGjC,EAAa,OAAS,GACxB,EAAO,GAAS,YAAa,KAAK,CAChC,MAAO,EACP,OAAQ,CAAE,eAAc,CACzB,CAAC,CAEJ,MAGF,QACE,MAAU,MAAM,4BAA4B,EAAgB,OAAO,MAElE,CAEL,IAAM,EAAc,EAAa,GACjC,GAAI,IAAgB,IAAA,IAAa,IAAa,GAC5C,MAAU,MACR,0CAA0C,EAAS,eAC7C,EAAW,aAAa,EAAQ,kBAAkB,KAAK,UAAU,EAAI,GAC5E,CAEH,GAAI,IAAgB,KAClB,OAGG,EAAO,GAKV,EAAO,GAAS,QAAU,IAAgB,GAH1C,EAAO,GAAW,CAAE,QAAS,IAAgB,GAAM,GAO1D,CAIM,EAAM,GAAS,EAAG,IAAgB,CACvC,GAAI,OAAO,EAAE,SAAY,UACvB,MAAU,MAAM,6DAA6D,IAAc,CAE7F,MAAO,CACL,GAAG,EACH,QAAS,EAAE,QACZ,EACD,CCjPJ,SAAgB,EAAuC,CACrD,uBACA,iBACA,oBAUU,CAEV,OAAO,OAAO,QAAQ,EAAe,CAAC,OAAO,CAAC,EAAa,CAAE,cAAc,EAAE,CAAE,cAAe,CAE5F,IAAM,EAAiB,EAAqB,SAAS,KAClD,GAAoB,EAAgB,UAAY,EAClD,CAUD,OAN0B,GAAkB,EAAe,UAAY,EAMhE,EAAY,OAChB,CAAE,QAAO,YAER,EAAe,aACf,EAAe,YAAY,KAAM,GAAuB,CAEtD,GAAI,EAAmB,QAAU,EAC/B,MAAO,GAIT,IAAM,EAAkB,EAAiB,KACtC,GAAM,EAAE,OAAS,GAAS,EAAE,QAAQ,eAAiB,EACvD,CACD,GAAI,CAAC,EACH,MAAU,MAAM,uCAAuC,IAAQ,CAIjE,OAAQ,EAAgB,KAAxB,CACE,KAAK,EAAoB,QACvB,OAAO,EAAmB,OAAO,eAAiB,EAAO,aAC3D,KAAK,EAAoB,OACvB,OAAO,EAAmB,OAAO,cAAgB,EAAO,YAC1D,KAAK,EAAoB,YAEvB,IAAM,GAAuB,EAAmB,OAAO,cAAgB,EAAE,EAAE,MAAM,CAE3E,GAAmB,EAAO,cAAgB,EAAE,EAAE,MAAM,CAC1D,OACE,EAAoB,SAAW,EAAgB,QAC/C,EAAoB,OAAO,EAAG,IAAM,IAAM,EAAgB,GAAG,CAEjE,QACE,MAAU,MAAM,kCAAkC,EAAgB,OAAO,GAE7E,CACL,CAzCQ,IA0CT,CCjEJ,SAAgB,EAA6C,CAC3D,uBACA,iBACA,mBACA,OAYU,CAEV,MAAO,CAAC,CAAC,OAAO,QAAQ,EAAe,CAAC,MAAM,CAAC,EAAa,CAAE,cAAc,EAAE,CAAE,cAAe,CAE7F,IAAM,EAAiB,EAAqB,SAAS,KAClD,GAAoB,EAAgB,UAAY,EAClD,CAwBD,OArBK,EAUD,EAAe,UAAY,EAWxB,CAAC,CAAC,EAAY,MAAM,CAAE,QAAO,YAAa,CAE/C,IAAM,GAAqB,EAAe,aAAe,EAAE,EAAE,KAC1D,GAAuB,EAAmB,QAAU,EACtD,CAGD,GAAI,CAAC,EAOH,OANI,GACF,EAAO,KACL,0CAA0C,EAAM,cAC3C,EAAY,YAAY,EAAqB,OAAO,GAC1D,CAEI,GAIT,IAAM,EAAkB,EAAiB,KACtC,GAAM,EAAE,OAAS,GAAS,EAAE,QAAQ,eAAiB,EACvD,CACD,GAAI,CAAC,EACH,MAAU,MAAM,uCAAuC,IAAQ,CAIjE,IAAI,EACA,EACJ,OAAQ,EAAgB,KAAxB,CACE,KAAK,EAAoB,QASvB,MARA,GAAY,EAAkB,OAAO,eAAiB,EAAO,aACzD,GACF,EAAO,KACL,oBAAoB,EAAM,mCACrB,EAAqB,OAAO,cAAc,EAAO,aAAa,WACvD,EAAkB,OAAO,eACtC,CAEI,EACT,KAAK,EAAoB,OASvB,MARA,GAAc,EAAkB,OAAO,cAAgB,EAAO,YAC1D,GACF,EAAO,KACL,oBAAoB,EAAM,kCACrB,EAAqB,OAAO,cAAc,EAAO,YAAY,WACtD,EAAkB,OAAO,cACtC,CAEI,EACT,KAAK,EAAoB,YAEvB,IAAM,GAAuB,EAAkB,OAAO,cAAgB,EAAE,EAAE,MAAM,CAE1E,GAAmB,EAAO,cAAgB,EAAE,EAAE,MAAM,CAY1D,MAXA,GACE,EAAoB,SAAW,EAAgB,QAC/C,CAAC,EAAoB,OAAO,EAAG,IAAM,IAAM,EAAgB,GAAG,CAC5D,GACF,EAAO,KACL,oBAAoB,EAAM,wCACrB,EAAqB,OAAO,cAAc,EAAgB,KAC3D,KACD,CAAC,WAAW,EAAoB,KAAK,KAAK,GAC9C,CAEI,EACT,QACE,MAAU,MAAM,kCAAkC,EAAgB,OAAO,GAE7E,EA/EI,GACF,EAAO,KACL,WAAW,EAAY,mCAAmC,EAAqB,OAAO,mBAClE,EAAQ,mBAAmB,EAAe,UAC/D,CAEI,KAhBH,GACF,EAAO,KACL,iCAAiC,EAAY,yBAAyB,EAAqB,OAAO,GACnG,CAEI,KAqFT,CCjHJ,eAAsB,EACpB,EACA,CACE,cACA,eACA,cAAc,GACd,cAAc,IAcwB,CACxC,IAAM,EAAyC,EAAE,CAC3C,EAAqB,EAAM,EAAa,IAAI,CAG5C,EAAK,IAAI,MAAM,CAAC,SAAS,CACzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAChF,GACH,EAAY,MAAM,EAAY,OAAQ,EAAE,CAG1C,IAAI,EAAQ,EACZ,MAAM,EACJ,EACA,KAAO,IAAU,CAuBf,IAAM,EAAS,EAAY,EAtBT,MAAM,EACtB,uBAEE,EACG,KAAK,kBAAkB,EAAa,QAAS,CAC5C,KAAM,CACJ,OAAQ,CAAE,YAAa,EAAO,CAC9B,MAAO,EAAM,OACd,CACF,CAAC,CACD,MAAM,CACX,CACE,SAAU,EAAS,EAAM,IAAQ,CAC/B,EAAO,KACL,EAAO,OACL,sBAAsB,EAAM,OAAO,aAAa,EAAa,WAAW,EAAQ,IAAI,IACrF,CACF,EAEJ,CACF,CAE+D,CAChE,EAAQ,KAAK,GAAG,EAAO,MAAM,CAC7B,GAAS,EAAM,OACf,EAAY,OAAO,EAAM,EAE3B,CACE,cACD,CACF,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAOvB,OALK,GAEH,EAAO,KAAK,EAAO,MAAM,0BAA0B,EAAY,IAAK,YAAY,CAAC,CAG5E,EC1ET,eAAsB,EACpB,EACA,EACA,CACE,eACA,mBACA,yBAS0B,CAK5B,IAAM,EAAe,EAHD,EAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAG1B,CAC3C,GAAI,EAAa,iBAAmB,CAAC,EAAa,iBAAiB,CAAG,EAAE,CACxE,GAAI,EAAa,eAAiB,CAAC,EAAa,eAAe,CAAG,EAAE,CACrE,CAAC,CACF,GAAI,EAAa,SAAW,EAAG,CAC7B,GAAI,EACF,OAAO,EAET,MAAU,MAAM,8BAA8B,CAIhD,IAAM,EAAe,CACnB,GAAG,EACH,GAAG,EAAiB,IAAK,GAAM,GAAG,EAAE,QAAQ,aAAa,IAAI,EAAE,OAAO,CACvE,CA0ID,OAvIA,MAAM,EAAU,EAAc,KAAO,IAAQ,CAE3C,IAAM,EAAe,EAAK,EAAY,IAAK,GAAM,EAAE,GAAK,CAAC,CAGrD,EAAiB,EAAa,oBAAoB,GACtD,GAAI,EACF,EAAO,KACL,EAAO,QAAQ,WAAW,EAAI,gCAAgC,EAAe,QAAQ,GAAG,CACzF,KACI,CACL,GAAM,CAAE,eAAgB,MAAM,EAAS,OAGpC,CACD,CACE,KAAM,cACN,QAAS,kCAAkC,EAAI,qBAC/C,KAAM,OACN,QAAS,EAAa,KAAM,GAAM,EAAE,WAAW,EAAa,GAAG,CAAC,CAChE,QAAS,EACV,CACF,CAAC,CACI,CAAC,EAAa,GAAkB,EAAY,MAAM,KAAK,CAC7D,EAAiB,CACf,QAAS,EACT,WAAY,GAAkB,KAC9B,aAAc,EAAE,CACjB,CAIH,MAAM,EAAU,EAAc,KAAO,IAAU,CAC7C,GAAI,EAAe,aAAa,KAAW,IAAA,GAAW,CACpD,EAAO,KACL,EAAO,QACL,UAAU,EAAM,sCAAsC,EAAe,aAAa,GAAO,GAC1F,CACF,CACD,OAGF,GAAI,EAAe,aAAe,KAAM,CACtC,GAAM,CAAE,gBAAiB,MAAM,EAAS,OAGrC,CACD,CACE,KAAM,eACN,QAAS,uCAAuC,EAAM,6BAA6B,EAAe,QAAQ,GAC1G,KAAM,UACN,QAAS,IAAU,QACpB,CACF,CAAC,CACF,EAAe,aAAa,GAAS,EAIvC,GAAI,EAAe,aAAe,KAAM,CACtC,IAAM,EAAkB,EAAiB,KAAM,GAAM,EAAE,OAAS,EAAe,WAAW,CAC1F,GAAI,CAAC,EAAiB,CACpB,EAAO,MAAM,EAAO,IAAI,qBAAqB,EAAe,WAAW,aAAa,CAAC,CACrF,OAEF,IAAM,EAAoB,EAAgB,uBAAuB,KAAK,CAAE,UAAW,EAAK,CAExF,GAAI,EAAgB,OAAS,EAAoB,QAAS,CACxD,GAAM,CAAE,mBAAoB,MAAM,EAAS,OAGxC,CACD,CACE,KAAM,kBACN,QAEE,oCAAoC,EAAgB,KAAK,WAAW,EAAM,6BAA6B,EAAe,QAAQ,GAChI,KAAM,UACN,QAAS,IAAU,QACpB,CACF,CAAC,CACF,EAAe,aAAa,GAAS,EACrC,OAGF,GAAI,EAAgB,OAAS,EAAoB,OAAQ,CACvD,GAAM,CAAE,mBAAoB,MAAM,EAAS,OAGxC,CACD,CACE,KAAM,kBAEN,QAAS,oCAAoC,EAAgB,KAAK,WAAW,EAAM,6BAA6B,EAAe,QAAQ,GACvI,KAAM,OACN,QAAS,EACT,QAAS,EAAkB,KAAM,GAAM,IAAM,EAAM,CACpD,CACF,CAAC,CACF,EAAe,aAAa,GAAS,EACrC,OAGF,GAAI,EAAgB,OAAS,EAAoB,YAAa,CAG5D,MAAM,EAFe,EAAe,EAAM,CAEZ,KAAO,IAAgB,CAEnD,GAAI,EAAe,aAAa,KAAiB,IAAA,GAC/C,OAEF,GAAM,CAAE,mBAAoB,MAAM,EAAS,OAGxC,CACD,CACE,KAAM,kBAEN,QAAS,oCAAoC,EAAgB,KAAK,WAAW,EAAY,6BAA6B,EAAe,QAAQ,GAC7I,KAAM,OACN,QAAS,EACT,QAAS,EAAkB,KAAM,GAAM,IAAM,EAAY,CAC1D,CACF,CAAC,CACF,EAAe,aAAa,GAAe,GAC3C,CACF,OAGF,MAAU,MAAM,kCAAkC,EAAgB,OAAO,GAE3E,CAEF,EAAa,oBAAoB,GAAO,GACxC,CAEK,EC9KT,eAAsB,EACpB,EACA,EAMC,CAKD,IAAM,EAAgC,EAHlB,EAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAGT,CAC5D,GAAI,EAAa,iBAAmB,CAAC,EAAa,iBAAiB,CAAG,EAAE,CACxE,GAAG,OAAO,KAAK,EAAa,oBAAoB,CACjD,CAAC,CAGF,GAAI,CAAC,EAAa,iBAAkB,CAClC,GAAM,CAAE,kBAAmB,MAAM,EAAS,OAGvC,CACD,CACE,KAAM,iBACN,QACE,yFACF,KAAM,OACN,QACE,EAA8B,KAAM,GAAQ,EAAI,aAAa,CAAC,SAAS,QAAQ,CAAC,EAChF,EAA8B,GAChC,QAAS,EACV,CACF,CAAC,CACF,EAAa,iBAAmB,EAElC,EAAO,KAAK,EAAO,QAAQ,4BAA4B,EAAa,iBAAiB,GAAG,CAAC,CAGzF,IAAM,EAA2B,EAC9B,KAAK,EAAM,IAAS,EAAK,EAAa,kBAAqB,KAAO,CAAC,EAAI,CAAE,CACzE,OAAQ,GAAqB,CAAC,CAAC,EAAE,CACjC,MAAM,CACT,GAAI,EAAyB,OAAS,EAAG,CACvC,IAAM,EAAM,0BACV,EAAa,iBACd,+CAA+C,EAAyB,KAAK,KAAK,GAOnF,GANA,EAAO,KAAK,EAAO,OAAO,EAAI,CAAC,CAM3B,CAHS,MAAM,EAAuB,CACxC,QAAS,qDACV,CAAC,CAEA,MAAU,MAAM,EAAI,CAItB,IAAM,EAAW,EAAY,OAC7B,EAAc,EAAY,OAAQ,GAAS,EAAK,EAAa,kBAAmB,CAChF,EAAO,KACL,EAAO,OAAO,WAAW,EAAW,EAAY,OAAO,6BAA6B,CACrF,CAEH,EAAO,KACL,EAAO,QACL,0BAA0B,EAAa,iBAAiB,2BACzD,CACF,CAGD,IAAM,EAAe,EAAQ,EAAa,EAAa,iBAAiB,CAClE,EAAuB,OAAO,QAAQ,EAAa,CAAC,QAAQ,EAAG,KAAU,EAAK,OAAS,EAAE,CAC/F,GAAI,EAAqB,OAAS,EAAG,CACnC,IAAM,EAAM,0BACV,EAAa,iBACd,iDAAiD,EAC/C,MAAM,EAAG,GAAG,CACZ,KAAK,CAAC,EAAQ,KAAU,GAAG,EAAO,IAAI,EAAK,OAAO,GAAG,CACrD,KAAK;EAAK,GAQb,GAPA,EAAO,KAAK,EAAO,OAAO,EAAI,CAAC,CAO3B,CAHS,MAAM,EAAuB,CACxC,QAAS,0DACV,CAAC,CAEA,MAAU,MAAM,EAAI,CAEtB,EAAc,OAAO,QAAQ,EAAa,CACvC,KAAK,EAAG,KACQ,EAAK,MACjB,EAAG,IACF,IAAI,KAAK,EAAE,EAAa,gBAAiB,CAAC,SAAS,CACnD,IAAI,KAAK,EAAE,EAAa,gBAAiB,CAAC,SAAS,CACtD,CACa,GACd,CACD,OAAQ,GAAM,EAAE,CAGrB,MAAO,CAAE,eAAc,cAAa,CCpGtC,eAAsB,EACpB,EACA,EAC4B,CAK5B,IAAM,EAA+B,EAHjB,EAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAGV,CAC3D,GAAI,EAAa,iBAAmB,CAAC,EAAa,iBAAiB,CAAG,EAAE,CACxE,GAAG,OAAO,KAAK,EAAa,oBAAoB,CACjD,CAAC,CAGF,GAAI,CAAC,EAAa,eAAgB,CAChC,GAAM,CAAE,iBAAkB,MAAM,EAAS,OAGtC,CACD,CACE,KAAM,gBACN,QAAS,iFACT,KAAM,OACN,QACE,EAA6B,KAAM,GAAQ,EAAI,aAAa,CAAC,SAAS,OAAO,CAAC,EAC9E,EAA6B,KAAM,GAAQ,EAAI,aAAa,CAAC,SAAS,OAAO,CAAC,EAC9E,EAA6B,GAC/B,QAAS,CAAC,GAAG,EAA8B,SAAoB,CAChE,CACF,CAAC,CACF,EAAa,eAAiB,EAKhC,GAHA,EAAO,KAAK,EAAO,QAAQ,2BAA2B,EAAa,eAAe,GAAG,CAAC,CAGlF,EAAa,iBAAA,SAAwC,CACvD,IAAM,EAA0B,EAC7B,KAAK,EAAM,IAAS,EAAK,EAAa,gBAAmB,KAAO,CAAC,EAAI,CAAE,CACvE,OAAQ,GAAqB,CAAC,CAAC,EAAE,CACjC,MAAM,CACT,GAAI,EAAwB,OAAS,EACnC,MAAU,MACR,yBACE,EAAa,eACd,+CAA+C,EAAwB,KAAK;EAAK,GACnF,CAEH,EAAO,KACL,EAAO,QACL,yBAAyB,EAAa,eAAe,0BACtD,CACF,CAEH,OAAO,EClDT,eAAsB,EACpB,CACE,OACA,SACA,eACA,mBACA,eACA,0BACA,yBAiBF,EACe,CAEf,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAe,EAAM,SAAS,eAAe,CAGnD,EAAO,KAAK,EAAO,QAAQ,qBAAqB,EAAK,GAAG,CAAC,CACzD,IAAI,EAAc,EAAQ,EAAM,EAAE,OAAO,EAAE,OAAQ,EAAE,OAAO,CAAC,CAGzD,EAAkC,CACpC,oBAAqB,EAAE,CACvB,mBAAoB,EAAE,CACtB,uBAAwB,EAAE,CAC1B,eAAgB,EAAE,CAElB,GAAK,EAAa,IAAS,EAAE,CAC7B,cAAe,IAAI,MAAM,CAAC,aAAa,CACxC,CAGD,EAAe,MAAM,EAAiC,EAAa,EAAa,CAChF,EAAa,GAAQ,EACrB,MAAM,EAAM,SAAS,EAAc,eAAe,CAGlD,IAAM,EAAS,MAAM,EAAkC,EAAa,EAAa,CACjF,EAAe,EAAO,aACtB,EAAc,EAAO,YACrB,EAAa,GAAQ,EACrB,MAAM,EAAM,SAAS,EAAc,eAAe,CAIlD,EAAe,MAAM,EAAuC,EAAa,EAAc,CACrF,mBACA,eACA,wBACD,CAAC,CACF,EAAa,GAAQ,EACrB,MAAM,EAAM,SAAS,EAAc,eAAe,CAGlD,IAAM,EAAc,EAAY,IAAK,GAAS,EAAK,EAAa,kBAAmB,CAO7E,EAA4B,EANH,EAC3B,EAAE,CACF,MAAM,EAA6B,EAAQ,CACzC,YAAa,EAAY,IAAK,IAAO,CAAE,MAAO,EAAG,EAAE,CACnD,eACD,CAAC,CAC0D,SAAS,CAGzE,EAAa,uBAAyB,EAAE,CACxC,EAAa,mBAAqB,EAAE,CACpC,EAAa,eAAiB,EAAE,CAGhC,EAAY,QAAS,GAAS,CAE5B,IAAM,EAAS,EAAK,EAAa,kBAG3B,EAAiB,EAA4B,CACjD,IAAK,EACL,oBAAqB,EAAa,oBAClC,mBACA,eACD,CAAC,CAGI,EAAuB,EAA0B,GACvD,GAAI,GAAyB,CAAC,EAC5B,MAAU,MACR,sDAAsD,EAAO;sGAE9D,CAKH,GACE,GACA,EAAuC,CACrC,uBACA,iBACA,mBACD,CAAC,EACF,CAAC,EACD,CACA,EAAa,eAAe,GAAU,EACtC,OAIF,GACE,GACA,EAA6C,CAC3C,uBACA,iBACA,mBACD,CAAC,CACF,CACA,EAAa,uBAAuB,GAAU,CAC5C,IAAK,EACL,OAAQ,EACT,CACD,OAIF,EAAa,mBAAmB,GAAU,GAC1C,CAGF,EAAa,GAAQ,EACrB,MAAM,EAAM,SAAS,EAAc,eAAe,CAClD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAC/B,EAAO,KAAK,EAAO,MAAM,qCAAqC,EAAK,QAAQ,EAAK,GAAM,IAAK,GAAG,CAAC,CClJjG,eAAsB,EAAiD,CACrE,OACA,aACA,kBACA,OACA,YACA,WAAW,GACX,SAAS,GACT,uBAAuB,GACvB,sBAAsB,GACtB,0BAA0B,GAC1B,aAAa,EAAE,CACf,eACA,wBAAwB,IA+BR,CAEhB,IAAM,EAAmB,EAA0B,EAAW,CAGxD,EAAkB,IAAI,EAAe,EAAiB,EAAiB,CAC3E,aAAc,EAAE,CAChB,eAAgB,EAAE,CAClB,eAAgB,EAAE,CACnB,CAAC,CACI,EAAkB,EAAgB,SAAS,iBAAiB,CAC5D,EAAkB,EAAgB,SAAS,iBAAiB,CAC9D,EAAe,EAAgB,SAAS,eAAe,CAE3D,EAAO,KACL,EAAO,QACL;EACK,OAAO,OAAO,EAAgB,CAAC,OAAO,mCACtC,OAAO,OAAO,EAAgB,CAAC,OAAO,gGACqB,OAAO,KAAK,EAAa,CACpF,IAAK,GAAM,EAAE,CACb,KAAK;EAAK,CAAC,0CAC2B,EAAK,IACjD,CACF,CAGD,IAAM,EAAS,EAA4B,EAAc,EAAK,CAExD,CAAC,EAAQ,EAAU,GAAoB,MAAM,QAAQ,IAAI,CAE7D,EAAwB,EAAc,EAAM,EAAW,CAEvD,EAAiB,EAAO,CACxB,EAAyB,EAAO,CACjC,CAAC,CAGF,MAAM,EACJ,CACE,OACA,aAAc,EAAS,IAAK,GAAM,EAAE,aAAa,CACjD,mBACA,SACA,aAAc,EACd,0BACA,wBACD,CACD,EACD,CAGD,IAAM,EAAuD,EAAE,CAC/D,EAAe,EAAgB,SAAS,eAAe,CACvD,IAAM,EAAW,EAAa,GAwD9B,GAtDA,EAAO,KACL,EAAO,QACL,SAAS,OAAO,QAAQ,EAAS,mBAAmB,CAAC,OAAO,mBAAmB,IAChF,CACF,CACD,EAAO,KACL,EAAO,QACL,SAAS,OAAO,QAAQ,EAAS,uBAAuB,CAAC,OAAO,uBAAuB,IACxF,CACF,CACD,EAAO,KACL,EAAO,QACL,SAAS,OAAO,QAAQ,EAAS,eAAe,CAAC,OAAO,sBAAsB,IAC/E,CACF,CAGD,OAAO,QAAQ,CACb,GAAG,EAAS,mBACZ,GAAI,EAAsB,EAAE,CAAG,EAAM,EAAS,wBAAyB,CAAE,SAAU,EAAI,CACxF,CAAC,CAAC,SAAS,CAAC,EAAQ,KAAY,CAE/B,IAAM,EACJ,EAAS,iBAAA,SACL,IAAI,KACJ,IAAI,KAAK,EAAO,EAAS,gBAAiB,CAG1C,EAAU,EAA4B,CAC1C,IAAK,EACL,oBAAqB,EAAS,oBAC9B,mBACA,aAAc,EAAS,IAAK,GAAM,EAAE,aAAa,CAClD,CAAC,CACF,EAAe,GAAU,CACvB,SACA,YACA,UAAW,EAAU,aAAa,CAClC,SAAU,OAAO,QAAQ,EAAQ,CAAC,KAAK,CAAC,EAAS,MAAY,CAC3D,GAAG,EACH,UACA,iBAAkB,CAChB,WAAY,EACZ,WACA,oBAAqB,EACrB,GAAI,EAAwB,CAAE,qBAAsB,EAAuB,CAAG,EAAE,CACjF,CACF,EAAE,CACJ,EACD,CACF,MAAM,EAAgB,SAAS,EAAgB,iBAAiB,CAChE,MAAM,EAAgB,SAAS,EAAE,CAAE,iBAAiB,CAGhD,EAAQ,CACV,EAAO,KACL,EAAO,MACL,8BACE,OAAO,OAAO,EAAe,CAAC,OAC/B,gCAAgC,IAClC,CACF,CACD,OAGF,EAAO,KACL,EAAO,QACL,aAAa,OAAO,OAAO,EAAe,CAAC,OAAO,6BAA6B,IAChF,CACF,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAGjF,EAAQ,EACN,EAAe,OAAO,QAAQ,EAAe,CAC7C,EAAiB,EAAM,EAAc,EAAuB,IAAM,GAAG,CAC3E,EAAY,MAAM,EAAa,OAAQ,EAAE,CACzC,MAAM,EACJ,EACA,KAAO,IAAiB,CAEtB,GAAI,CACF,MAAM,EACH,IAAI,iBAAkB,CACrB,KAAM,CACJ,QAAS,EAAa,KAAK,EAAG,KAAY,EAAO,CACjD,uBACD,CACF,CAAC,CACD,MAAM,OACF,EAAK,CACZ,GAAI,CACF,IAAM,EAAS,KAAK,MAAM,GAAK,UAAU,MAAQ,KAAK,CAClD,EAAO,OACT,EAAO,MAAM,EAAO,IAAI,UAAU,EAAO,QAAQ,CAAC,MAE1C,EAGZ,EAAO,MACL,EAAO,IACL,oBAAoB,EAAa,OAAO,iCAAiC,EAAU,IACjF,GAAK,UAAU,MAAQ,GAAK,UAE/B,CACF,CACD,IAAM,EAAiB,EAAgB,SAAS,iBAAiB,CACjE,EAAa,SAAS,CAAC,EAAQ,KAAY,CACzC,EAAe,GAAU,CACvB,WAAY,IAAI,MAAM,CAAC,aAAa,CACpC,SACA,MAAO,GAAK,UAAU,MAAQ,GAAK,SAAW,gBAC/C,EACD,CACF,MAAM,EAAgB,SAAS,EAAgB,iBAAiB,CAGlE,GAAS,EAAa,OACtB,EAAY,OAAO,EAAM,EAE3B,CACE,YAAa,GACd,CACF,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EACvB,EAAO,KACL,EAAO,MACL,yBACE,EAAa,OACd,iCAAiC,EAAU,OAAO,EAAY,IAAK,YACrE,CACF,CC1OH,eAAsB,EAEpB,CACE,OACA,YACA,aACA,eACA,OAAO,GACP,YACA,SACA,0BACA,iBACA,uBACA,wBACA,sBACA,WACA,aACA,eAEa,CACT,GAAe,IACnB,EAAO,MACL,EAAO,IAAI,uEAAuE,CACnF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAO,MACL,EAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,EADmB,EAAY,EAAU,CACb,OAAQ,GAAS,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAO,MAAM,EAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,GAAS,EAAK,EAAW,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAO,MAAM,EAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAO,MAAM,EAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAO,MAAM,EAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAO,MAAM,EAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAO,KACL,EAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAO,MAAM,qBAAqB,EAAM,KAAK,KAAK,GAAG,CAEjD,GACF,EAAO,KAAK,EAAO,SAAS,mCAAmC,IAA0B,CAAC,CAG5F,MAAM,EACJ,EACA,KAAO,IAAa,CAElB,MAAM,EAAiD,CACrD,gBAAiB,EAAK,EAAgB,GAFvB,EAAS,EAAS,CAAC,QAAQ,OAAQ,GAAG,CAEH,gBAAgB,CAClE,OACA,aACA,KAAM,EACN,YACA,eACA,sBACA,uBACA,0BACA,WACA,SACA,WAAY,EAAe,EAAW,CACtC,wBACD,CAAC,EAEJ,CAAE,cAAa,CAChB"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{n as t}from"./bluebird-CUitXgsY.mjs";import{t as n}from"./updateConsentManagerVersionToLatest-C221vAAw.mjs";import{t as r}from"./validateTranscendAuth-1W1IylqE.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{ConsentBundleType as a}from"@transcend-io/privacy-types";import o from"colors";async function s({auth:s,bundleTypes:c=[a.Production,a.Test],deploy:l,transcendUrl:u}){i(this.process.exit);let d=await r(s);typeof d==`string`?(await n({deploy:l,transcendUrl:u,auth:d,bundleTypes:c}),e.info(o.green(`Successfully updated Consent Manager!`))):(await t(d,async t=>{e.info(o.magenta(`Updating Consent Manager for organization "${t.organizationName}"...`)),await n({deploy:l,transcendUrl:u,auth:t.apiKey,bundleTypes:c}),e.info(o.green(`Successfully updated Consent Manager for organization "${t.organizationName}"!`))}),e.info(o.green(`Successfully updated Consent Managers!`)))}export{s as updateConsentManager};
2
+ //# sourceMappingURL=impl-DhscnXSw.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-DhscnXSw.mjs","names":[],"sources":["../src/commands/consent/update-consent-manager/impl.ts"],"sourcesContent":["import { ConsentBundleType } from '@transcend-io/privacy-types';\nimport colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { validateTranscendAuth } from '../../../lib/api-keys/index.js';\nimport { mapSeries } from '../../../lib/bluebird.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { updateConsentManagerVersionToLatest } from '../../../lib/consent-manager/index.js';\nimport { logger } from '../../../logger.js';\n\nexport interface UpdateConsentManagerCommandFlags {\n auth: string;\n bundleTypes: ConsentBundleType[];\n deploy: boolean;\n transcendUrl: string;\n}\n\nexport async function updateConsentManager(\n this: LocalContext,\n {\n auth,\n bundleTypes = [ConsentBundleType.Production, ConsentBundleType.Test],\n deploy,\n transcendUrl,\n }: UpdateConsentManagerCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Handle single update\n if (typeof apiKeyOrList === 'string') {\n // Update consent manager\n await updateConsentManagerVersionToLatest({\n deploy,\n transcendUrl,\n auth: apiKeyOrList,\n bundleTypes,\n });\n logger.info(colors.green('Successfully updated Consent Manager!'));\n } else {\n await mapSeries(apiKeyOrList, async (apiKey) => {\n logger.info(\n colors.magenta(`Updating Consent Manager for organization \"${apiKey.organizationName}\"...`),\n );\n\n await updateConsentManagerVersionToLatest({\n deploy,\n transcendUrl,\n auth: apiKey.apiKey,\n bundleTypes,\n });\n\n logger.info(\n colors.green(\n `Successfully updated Consent Manager for organization \"${apiKey.organizationName}\"!`,\n ),\n );\n });\n logger.info(colors.green('Successfully updated Consent Managers!'));\n }\n}\n"],"mappings":"qWAiBA,eAAsB,EAEpB,CACE,OACA,cAAc,CAAC,EAAkB,WAAY,EAAkB,KAAK,CACpE,SACA,gBAEa,CACf,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAM,EAAsB,EAAK,CAGlD,OAAO,GAAiB,UAE1B,MAAM,EAAoC,CACxC,SACA,eACA,KAAM,EACN,cACD,CAAC,CACF,EAAO,KAAK,EAAO,MAAM,wCAAwC,CAAC,GAElE,MAAM,EAAU,EAAc,KAAO,IAAW,CAC9C,EAAO,KACL,EAAO,QAAQ,8CAA8C,EAAO,iBAAiB,MAAM,CAC5F,CAED,MAAM,EAAoC,CACxC,SACA,eACA,KAAM,EAAO,OACb,cACD,CAAC,CAEF,EAAO,KACL,EAAO,MACL,0DAA0D,EAAO,iBAAiB,IACnF,CACF,EACD,CACF,EAAO,KAAK,EAAO,MAAM,yCAAyC,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./extractErrorMessage-CPnTsT1S.mjs";import{n,t as r}from"./parquetToCsvOneFile-DZVKXrjn.mjs";import{t as i}from"./done-input-validation-DLR0-MJ7.mjs";import{a,i as o,n as s,o as c,r as l,s as u,t as d}from"./createExtraKeyHandler-tubeaEjA.mjs";import f from"colors";function p(e){return l(e)}function m(e){return o(e)}const h={renderHeader:p,renderWorkers:m};async function g(){let n=Number(process.env.WORKER_ID||`0`);e.info(`[w${n}] ready pid=${process.pid}`),process.send?.({type:`ready`}),process.on(`message`,async i=>{if(!i||typeof i!=`object`||(i.type===`shutdown`&&process.exit(0),i.type!==`task`))return;let{filePath:a,options:o}=i.payload,{outputDir:s,clearOutputDir:c}=o;try{e.info(`[w${n}] processing ${a}`);let{DuckDBInstance:t}=await import(`@duckdb/node-api`);await r({filePath:a,outputDir:s,clearOutputDir:c,onProgress:(e,t)=>process.send?.({type:`progress`,payload:{filePath:a,processed:e,total:t}})},t),process.send?.({type:`result`,payload:{ok:!0,filePath:a}})}catch(r){let i=t(r);e.error(`[w${n}] ERROR ${a}: ${r.stack||i}`),process.send?.({type:`result`,payload:{ok:!1,filePath:a,error:i}})}}),await new Promise(()=>{})}function _(){return typeof __filename<`u`?__filename:process.argv[1]}async function v(t){i(this.process.exit);let{directory:r,outputDir:o,clearOutputDir:l,concurrency:p,viewerMode:m}=t,g=n(r,this),{poolSize:v,cpuCount:y}=u(p,g.length);e.info(f.green(`Converting ${g.length} Parquet file(s) → CSV with pool size ${v} (CPU=${y})`));let b=g.map(e=>({filePath:e,options:{outputDir:o,clearOutputDir:l}}));await s({title:`Parquet → CSV - ${r}`,baseDir:r||o||process.cwd(),childFlag:c,childModulePath:_(),poolSize:v,cpuCount:y,filesTotal:g.length,hooks:{nextTask:()=>b.shift(),taskLabel:e=>e.filePath,initTotals:()=>({}),initSlotProgress:()=>void 0,onProgress:e=>e,onResult:(e,t)=>({totals:e,ok:!!t.ok}),postProcess:async()=>{}},viewerMode:m,render:e=>a(e,h,m),extraKeyHandler:({logsBySlot:e,repaint:t,setPaused:n})=>d({logsBySlot:e,repaint:t,setPaused:n})})}process.argv.includes(`--as-child`)&&g().catch(t=>{e.error(t),process.exit(1)});export{v as parquetToCsv};
2
+ //# sourceMappingURL=impl-Dk7MdX-1.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"impl-Dk7MdX-1.mjs","names":[],"sources":["../src/commands/admin/parquet-to-csv/ui/plugin.ts","../src/commands/admin/parquet-to-csv/worker.ts","../src/commands/admin/parquet-to-csv/impl.ts"],"sourcesContent":["import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling/index.js';\n\n/**\n * Header for parquet-to-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(ctx: CommonCtx<TTotals, ChunkSlotProgress>): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for parquet-to-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(ctx: CommonCtx<TTotals, ChunkSlotProgress>): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const parquetToCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import { parquetToCsvOneFile, extractErrorMessage } from '../../../lib/helpers/index.js';\nimport type { ToWorker } from '../../../lib/pooling/index.js';\nimport { logger } from '../../../logger.js';\n\nexport type ParquetTask = {\n /** Absolute path of the Parquet file to convert. */\n filePath: string;\n options: {\n /** Optional directory where CSV output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output before writing new ones. */\n clearOutputDir: boolean;\n };\n};\n\nexport type ParquetProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Rows processed so far. */\n processed: number;\n /** Optional known total rows (not always available). */\n total?: number;\n};\n\nexport type ParquetResult = {\n ok: boolean;\n filePath: string;\n error?: string;\n};\n\n/**\n * Worker loop: convert a single Parquet file to one or more CSV files.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n process.send?.({ type: 'ready' });\n\n process.on('message', async (msg: ToWorker<ParquetTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir } = options;\n\n try {\n logger.info(`[w${workerId}] processing ${filePath}`);\n const { DuckDBInstance } = await import('@duckdb/node-api');\n await parquetToCsvOneFile(\n {\n filePath,\n outputDir,\n clearOutputDir,\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n },\n DuckDBInstance,\n );\n\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${err.stack || message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive until shutdown\n await new Promise<never>(() => {\n // Do nothing\n });\n}\n","import colors from 'colors';\n\nimport type { LocalContext } from '../../../context.js';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation.js';\nimport { collectParquetFilesOrExit } from '../../../lib/helpers/index.js';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling/index.js';\nimport { logger } from '../../../logger.js';\nimport { parquetToCsvPlugin } from './ui/index.js';\nimport { runChild, type ParquetProgress, type ParquetResult, type ParquetTask } from './worker.js';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path.\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/** No custom totals for the header; the runner’s built-ins suffice. */\ntype Totals = Record<string, never>;\n\nexport type ParquetToCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Convert all Parquet files in a directory to CSV, in parallel.\n *\n * @param flags - The command flags.\n */\nexport async function parquetToCsv(\n this: LocalContext,\n flags: ParquetToCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { directory, outputDir, clearOutputDir, concurrency, viewerMode } = flags;\n\n /* 1) Discover .parquet inputs */\n const files = collectParquetFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Converting ${files.length} Parquet file(s) → CSV with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Build FIFO queue of tasks (one per file) */\n const queue = files.map<ParquetTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir },\n }));\n\n /* 4) Pool hooks */\n const hooks: PoolHooks<ParquetTask, ParquetProgress, ParquetResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({}) as Totals,\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n postProcess: async () => {\n // nothing special post-run\n },\n };\n\n /* 5) Launch the pool runner with custom dashboard plugin */\n await runPool({\n title: `Parquet → CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, parquetToCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({ logsBySlot, repaint, setPaused }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"kUAcA,SAAS,EAAsB,EAAsD,CAEnF,OAAO,EAAW,EAAI,CASxB,SAAS,EAAuB,EAAsD,CACpF,OAAO,EAAe,EAAI,CAG5B,MAAa,EAAkE,CAC7E,eACA,gBAED,CCAD,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CACtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAEjC,QAAQ,GAAG,UAAW,KAAO,IAA+B,CAM1D,GALI,CAAC,GAAO,OAAO,GAAQ,WAEvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAEb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,kBAAmB,EAEtC,GAAI,CACF,EAAO,KAAK,KAAK,EAAS,eAAe,IAAW,CACpD,GAAM,CAAE,kBAAmB,MAAM,OAAO,oBACxC,MAAM,EACJ,CACE,WACA,YACA,iBACA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CACD,EACD,CAED,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CACZ,IAAM,EAAU,EAAoB,EAAI,CACxC,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,EAAI,OAAS,IAAU,CACzE,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAE7B,CC5DJ,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAmBtB,eAAsB,EAEpB,EACe,CACf,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,YAAW,YAAW,iBAAgB,cAAa,cAAe,EAGpE,EAAQ,EAA0B,EAAW,KAAK,CAGlD,CAAE,WAAU,YAAa,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAO,KACL,EAAO,MACL,cAAc,EAAM,OAAO,wCAAwC,EAAS,QAAQ,EAAS,GAC9F,CACF,CAGD,IAAM,EAAQ,EAAM,IAAkB,IAAc,CAClD,WACA,QAAS,CAAE,YAAW,iBAAgB,CACvC,EAAE,CAgBH,MAAM,EAAQ,CACZ,MAAO,mBAAmB,IAC1B,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAW,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MArB4E,CAC5E,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EACpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAU,EAAgB,EAAO,EAAoB,EAAW,CACzE,iBAAkB,CAAE,aAAY,UAAS,eACvC,EAAsB,CAAE,aAAY,UAAS,YAAW,CAAC,CAC5D,CAAC,CAMA,QAAQ,KAAK,SAAA,aAAoB,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
@@ -0,0 +1,2 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{t}from"./bluebird-CUitXgsY.mjs";import{t as n}from"./fetchIdentifiers-pjQV4vUg.mjs";import{t as r}from"./createSombraGotInstance-D1Il9zUE.mjs";import{r as i}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{t as a}from"./fetchAllPurposesAndPreferences-DD6OyA5t.mjs";import{c as o,i as s,n as c,o as l,t as u}from"./time-Bl_c3W8U.mjs";import{r as d,t as f}from"./writeCsv-B51ulrVl.mjs";import{t as p}from"./done-input-validation-DLR0-MJ7.mjs";import{t as m}from"./types-B4CVJCpj.mjs";import{t as h}from"./withPreferenceRetry-xLMZyTq9.mjs";import{decodeCodec as g}from"@transcend-io/type-utils";import _ from"colors";import v from"cli-progress";function y({identifiers:e=[],purposes:t=[],metadata:n=[],consentManagement:r={},system:i={decryptionStatus:`DECRYPTED`},...a},o){let s={...a,...i,...r};if(Array.isArray(e)){let t=new Map;for(let{name:n,value:r}of e)t.has(n)||t.set(n,new Set),r&&t.get(n).add(r);for(let[e,n]of t.entries())s[e]=Array.from(n).join(o)}if(Array.isArray(n)&&(s.metadata=JSON.stringify(n.reduce((e,{key:t,value:n})=>(e[t]=n,e),{}))),Array.isArray(t)){for(let{purpose:e,preferences:n,enabled:r}of t)if(s[e]=!!r,Array.isArray(n))for(let{topic:t,choice:r}of n){let n=`${e}_${t}`,i=null;i=typeof r.booleanValue==`boolean`?r.booleanValue:r.selectValue?r.selectValue:Array.isArray(r.selectValues)?r.selectValues.filter(e=>e.length>0).join(`,`):null,s[n]=i}}return s}function b(e,t,n,r=5e3){let i=Math.max(0,n.getTime()-t.getTime());if(i===0)return[];let a=new Date(Math.floor(t.getTime()/c)*c),o=Math.ceil(i/Math.max(1,r)),s=Math.max(c,o),l=Math.ceil((n.getTime()-a.getTime())/s),u=[];for(let t=0;t<l;t+=1){let r=a.getTime()+t*s,i=Math.min(n.getTime(),r+s)-1,o=Math.max(r,i),c=new Date(r).toISOString(),l=new Date(o).toISOString();e===`timestamp`?u.push({timestampAfter:c,timestampBefore:l}):u.push({system:{updatedAfter:c,updatedBefore:l}})}return u}function x(e,t){return e===`timestamp`?new Date(t.timestamp):t.system?.updatedAt?new Date(t.system.updatedAt):new Date}async function*S(t,n,r,i){let a;for(;;){let o={limit:i};r&&Object.keys(r).length&&(o.filter=r),a&&(o.cursor=a);let{nodes:s,cursor:c}=g(m,await h(`Preference Query`,()=>t.post(`v1/preferences/${n}/query`,{json:o}).json(),{onRetry:(t,n,r)=>{e.warn(_.yellow(`Retry attempt ${t} for iterateConsentPages due to error: ${r}`))}}));if(!s?.length||(yield s,!c))break;a=c}}function C(e){return e.timestampAfter||e.timestampBefore?`timestamp`:`updated`}function w(e,t){if(e===`timestamp`)return{after:t.timestampAfter?new Date(t.timestampAfter):void 0,before:t.timestampBefore?new Date(t.timestampBefore):void 0};let n=t.system??{};return{after:n.updatedAfter?new Date(n.updatedAfter):void 0,before:n.updatedBefore?new Date(n.updatedBefore):void 0}}function T(e,t,n){return e===`timestamp`?{...t,timestampBefore:n??t.timestampBefore}:{...t,system:{...t.system||{},...n?{updatedBefore:n}:{}},timestampAfter:void 0,timestampBefore:void 0}}async function E(t,n,r){e.info(_.magenta(`Single-record probe with filter: ${JSON.stringify(r)}`));let i=await S(t,n,r,1).next();if(i.done||!i.value||i.value.length===0)return e.info(_.yellow(`Probe result: no record`)),null;let a=i.value[0];return e.info(_.green(`Probe result: found record at ${x(C(r),a).toISOString()}`)),a}async function D(t,n){let{partition:r,mode:i,baseFilter:a,maxLookbackDays:s=3650}=n,c=await E(t,r,T(i,a));if(!c)return e.info(_.yellow(`No records found; defaulting earliest day to today.`)),o(new Date);let l=x(i,c);e.info(_.green(`Newest instant: ${l.toISOString()}`));let d=[1,7,30],f=0,p=d[0]*u,m=l,h=null;for(;;){let n=f<d.length?new Date(l.getTime()-d[f]*u):new Date(l.getTime()-p);if((o(new Date).getTime()-o(n).getTime())/864e5>s){e.warn(_.yellow(`Exponential jump exceeded maxLookbackDays=${s}. Using current bounds.`)),h=n;break}e.info(_.magenta(`Probing before=${n.toISOString()} (jump step ${f<d.length?`${d[f]}d`:`${Math.round(p/u)}d`})…`));let c=await E(t,r,T(i,a,n.toISOString()));if(c){m=x(i,c),e.info(_.green(`Found older record at ${m.toISOString()} — continue jumping back.`)),f<d.length-1?(f+=1,p=d[f]*u):f===d.length-1?(f+=1,p=d[d.length-1]*2*u):p*=2;continue}h=n,e.info(_.green(`No record before ${n.toISOString()} — established empty lower bound.`));break}h||=new Date(m.getTime()-u);let g=h,v=m,y=Math.max(u,Math.floor((v.getTime()-g.getTime())/64));e.info(_.magenta(`Exponential forward-from-empty start: empty=${g.toISOString()} found=${v.toISOString()} step=${Math.round(y/u)}d`));for(let n=0;n<8;n+=1){let n=new Date(g.getTime()+y);if(n.getTime()>=v.getTime())break;e.info(_.magenta(`Forward gallop probe before=${n.toISOString()}…`));let o=await E(t,r,T(i,a,n.toISOString()));if(o?(v=x(i,o),e.info(_.green(`Gallop hit at ${v.toISOString()} — tightening found bound. Next step halves.`)),y=Math.max(u,Math.floor(y/2))):(g.setTime(n.getTime()),e.info(_.yellow(`Gallop miss — advancing empty bound to ${g.toISOString()}. Next step doubles.`)),y=Math.min(v.getTime()-g.getTime(),y*2),y<864e5&&(y=u)),v.getTime()-g.getTime()<=864e5)break}for(;v.getTime()-g.getTime()>u;){let n=new Date(g.getTime()+Math.floor((v.getTime()-g.getTime())/2));e.info(_.magenta(`Binary probe before=${n.toISOString()}…`));let o=await E(t,r,T(i,a,n.toISOString()));if(o){let t=x(i,o);e.info(_.green(`Binary probe found record at ${t.toISOString()}.`)),v=t}else e.info(_.yellow(`Binary probe found no record.`)),g=n}let b=o(v);return e.info(_.green(`Earliest day (UTC) resolved to ${b.toISOString()} (instant ≈ ${v.toISOString()}).`)),b}async function O(t,n){let{partition:r,mode:i,baseFilter:a}=n;e.info(_.magenta(`Latest-day discovery: probing newest record…`));let s=await E(t,r,T(i,a));if(!s)return e.info(_.yellow(`No records found at all; defaulting latest day to today.`)),o(new Date);let c=x(i,s);e.info(_.green(`Newest record instant is ${c.toISOString()}.`));let l=o(c);return e.info(_.green(`Latest day (UTC) resolved to ${l.toISOString()} from instant ${c.toISOString()}.`)),l}function k(e,t,n){return e===`timestamp`?{...t,timestampAfter:n.timestampAfter??t.timestampAfter,timestampBefore:n.timestampBefore??t.timestampBefore,system:void 0}:{...t,system:{...t.system||{},...n.system?.updatedAfter?{updatedAfter:n.system.updatedAfter}:{},...n.system?.updatedBefore?{updatedBefore:n.system.updatedBefore}:{}},timestampAfter:void 0,timestampBefore:void 0}}async function A(n,{partition:r,filterBy:i={},limit:a=50,windowConcurrency:o=25,maxChunks:c=5e3,maxLookbackDays:u=3650,onItems:d}){let f=C(i);e.info(_.magenta(`Fetching consent preferences in chunks by ${f===`timestamp`?`timestamp`:`system.updatedAt`}...`));let{after:p,before:m}=w(f,i);if(e.info(_.magenta(`Initial bounds: after=${p?.toISOString()??`undefined`} before=${m?.toISOString()??`undefined`}`)),(!p||!m)&&(p||(e.info(_.magenta(`Discovering earliest day with data for partition ${r}...`)),p=await D(n,{partition:r,mode:f,baseFilter:i,maxLookbackDays:u}),e.info(_.green(`Discovered earliest day with data: ${p.toISOString()}`))),!m)){e.info(_.magenta(`Discovering latest day with data for partition ${r}...`));let t=await O(n,{partition:r,mode:f,baseFilter:i,earliest:p});m=s(t,1),e.info(_.green(`Discovered latest day with data: ${t.toISOString()}`))}e.info(_.green(`Final bounds (UTC): after=${p.toISOString()} before=${m.toISOString()}`));let h=b(f,p,m,c);e.info(_.magenta(`Fetching consent preferences from partition ${r} in ${h.length} chunks...`));let g=new v.SingleBar({format:`Downloading [{bar}] {percentage}% | chunks {value}/{total} | fetched {fetched}`},v.Presets.shades_classic),y=0,x=0;g.start(h.length,0,{fetched:x});let T=Date.now(),E=l(a),A=[];return await t(h.map((e,t)=>({windowFilter:e,idx:t})),async({windowFilter:e})=>{let t=k(f,i,e);for await(let e of S(n,r,t,E))x+=e.length,g.update(y,{fetched:x}),d?await d(e):A.push(...e);y+=1,g.update(y,{fetched:x})},{concurrency:Math.max(1,o)}),g.update(y,{fetched:x}),g.stop(),e.info(_.green(`Fetched ${x} consent preference records from partition ${r} in ${(Date.now()-T)/1e3}s.`)),d?[]:A}async function j(t,{partition:n,filterBy:r={},limit:i=50,onItems:a}){let o=[],s,c=r&&(Object.keys(r).length>0||r.system&&Object.keys(r.system).length>0),l=Math.max(1,Math.min(50,i??50));for(;;){let i={limit:l};c&&(i.filter=r),s&&(i.cursor=s);let{nodes:u,cursor:d}=g(m,await h(`Preference Query`,()=>t.post(`v1/preferences/${n}/query`,{json:i}).json(),{onRetry:(t,n,r)=>{e.warn(_.yellow(`Retry attempt ${t} for fetchConsentPreferences due to error: ${r}`))}}));if(!u||u.length===0||(a?await a(u):o.push(...u),!d))break;s=d}return a?[]:o}const M=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function N({auth:t,partition:o,sombraAuth:s,file:c,transcendUrl:l,timestampBefore:u,timestampAfter:m,updatedBefore:h,updatedAfter:g,identifiers:v=[],concurrency:b,shouldChunk:x,windowConcurrency:S,maxChunks:C,exportIdentifiersWithDelimiter:w,maxLookbackDays:T}){p(this.process.exit);let E=await r(l,t,s),D=i(l,t),O=v.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),k={...u?{timestampBefore:u.toISOString()}:{},...m?{timestampAfter:m.toISOString()}:{},...g||h?{system:{...h?{updatedBefore:h.toISOString()}:{},...g?{updatedAfter:g.toISOString()}:{}}}:{},...O.length>0?{identifiers:O}:{}};e.info(`Fetching consent preferences from partition ${o}, using mode=${x?`chunked-stream`:`paged-stream`}...`),e.info(_.magenta(`Preparing CSV at: ${c}`));let[N,P]=await Promise.all([a(D),n(D)]),F=P.map(e=>e.name),I=Array.from(new Set(N.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),L=Array.from(new Set(N.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),R=[...M,...F,...L,...I],z=null,B=!1,V=e=>{if(!e||e.length===0)return;let t=e.map(e=>y(e,w));if(!B){let e=Object.keys(t[0]??{}),n=new Set;z=[...R,...e].filter(e=>e===void 0||n.has(e)?!1:(n.add(e),!0)),d(c,z),B=!0}f(c,t,z)};if(x){await A(E,{partition:o,filterBy:k,limit:b,windowConcurrency:S,maxChunks:C,maxLookbackDays:T,onItems:e=>V(e)}),e.info(_.green(`Finished writing CSV to ${c}`));return}await j(E,{partition:o,filterBy:k,limit:b,onItems:e=>V(e)}),e.info(_.green(`Finished writing CSV to ${c}`))}export{N as pullConsentPreferences};
2
+ //# sourceMappingURL=impl-DsNPvet4.mjs.map