@transcend-io/cli 9.0.1 → 10.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (393) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +26 -34
  3. package/dist/RateCounter-DFL_mnk2.mjs +2 -0
  4. package/dist/RateCounter-DFL_mnk2.mjs.map +1 -0
  5. package/dist/RequestDataSilo-_Iv44M9u.mjs +51 -0
  6. package/dist/RequestDataSilo-_Iv44M9u.mjs.map +1 -0
  7. package/dist/app-BfTrk2nc.mjs +131 -0
  8. package/dist/app-BfTrk2nc.mjs.map +1 -0
  9. package/dist/approvePrivacyRequests-CWGZR2N6.mjs +2 -0
  10. package/dist/approvePrivacyRequests-CWGZR2N6.mjs.map +1 -0
  11. package/dist/assessment-BDywVaGR.mjs +284 -0
  12. package/dist/assessment-BDywVaGR.mjs.map +1 -0
  13. package/dist/bin/bash-complete.mjs +1 -1
  14. package/dist/bin/bash-complete.mjs.map +1 -1
  15. package/dist/bin/cli.mjs +1 -1
  16. package/dist/bin/cli.mjs.map +1 -1
  17. package/dist/bin/deprecated-command.mjs +1 -1
  18. package/dist/bin/deprecated-command.mjs.map +1 -1
  19. package/dist/bluebird-CUitXgsY.mjs +2 -0
  20. package/dist/bluebird-CUitXgsY.mjs.map +1 -0
  21. package/dist/buildXdiSyncEndpoint-Cb-pvpak.mjs +9 -0
  22. package/dist/buildXdiSyncEndpoint-Cb-pvpak.mjs.map +1 -0
  23. package/dist/bulkRestartRequests-CKF_xpN0.mjs +2 -0
  24. package/dist/bulkRestartRequests-CKF_xpN0.mjs.map +1 -0
  25. package/dist/bulkRetryEnrichers-B-Szmin-.mjs +2 -0
  26. package/dist/bulkRetryEnrichers-B-Szmin-.mjs.map +1 -0
  27. package/dist/cancelPrivacyRequests-DNiL13E_.mjs +2 -0
  28. package/dist/cancelPrivacyRequests-DNiL13E_.mjs.map +1 -0
  29. package/dist/codecs-BE3Wmoh8.mjs +2 -0
  30. package/dist/codecs-BE3Wmoh8.mjs.map +1 -0
  31. package/dist/codecs-Dx_vGxsl.mjs +2 -0
  32. package/dist/codecs-Dx_vGxsl.mjs.map +1 -0
  33. package/dist/{command-Bzyj3M2G.mjs → command-BXxoAjFo.mjs} +2 -2
  34. package/dist/command-BXxoAjFo.mjs.map +1 -0
  35. package/dist/consentManagersToBusinessEntities-BDgOFga7.mjs +5 -0
  36. package/dist/consentManagersToBusinessEntities-BDgOFga7.mjs.map +1 -0
  37. package/dist/constants-AFtS5Nad.mjs +4 -0
  38. package/dist/constants-AFtS5Nad.mjs.map +1 -0
  39. package/dist/constants-CeMiHaHx.mjs +2 -0
  40. package/dist/constants-CeMiHaHx.mjs.map +1 -0
  41. package/dist/constants-lIvXgkdp.mjs +2 -0
  42. package/dist/constants-lIvXgkdp.mjs.map +1 -0
  43. package/dist/{context-bkKpii_t.mjs → context-CdSyuBlf.mjs} +1 -1
  44. package/dist/context-CdSyuBlf.mjs.map +1 -0
  45. package/dist/{pooling-CazydwlD.mjs → createExtraKeyHandler-tubeaEjA.mjs} +5 -5
  46. package/dist/createExtraKeyHandler-tubeaEjA.mjs.map +1 -0
  47. package/dist/createPreferenceAccessTokens-DqmFctn3.mjs +10 -0
  48. package/dist/createPreferenceAccessTokens-DqmFctn3.mjs.map +1 -0
  49. package/dist/createSombraGotInstance-D1Il9zUE.mjs +10 -0
  50. package/dist/createSombraGotInstance-D1Il9zUE.mjs.map +1 -0
  51. package/dist/{dataFlowsToDataSilos-RAhfPV0l.mjs → dataFlowsToDataSilos-NhvBw1iy.mjs} +1 -1
  52. package/dist/dataFlowsToDataSilos-NhvBw1iy.mjs.map +1 -0
  53. package/dist/dataSilo-DrFetFXw.mjs +302 -0
  54. package/dist/dataSilo-DrFetFXw.mjs.map +1 -0
  55. package/dist/dataSubject-y_aXI0pa.mjs +92 -0
  56. package/dist/dataSubject-y_aXI0pa.mjs.map +1 -0
  57. package/dist/{done-input-validation-CcZtaz03.mjs → done-input-validation-DLR0-MJ7.mjs} +1 -1
  58. package/dist/{done-input-validation-CcZtaz03.mjs.map → done-input-validation-DLR0-MJ7.mjs.map} +1 -1
  59. package/dist/downloadPrivacyRequestFiles-DlpgxqHF.mjs +2 -0
  60. package/dist/downloadPrivacyRequestFiles-DlpgxqHF.mjs.map +1 -0
  61. package/dist/enums-CyFTrzXY.mjs.map +1 -1
  62. package/dist/extractClientError-DPjv09EH.mjs +2 -0
  63. package/dist/extractClientError-DPjv09EH.mjs.map +1 -0
  64. package/dist/extractErrorMessage-CPnTsT1S.mjs +2 -0
  65. package/dist/extractErrorMessage-CPnTsT1S.mjs.map +1 -0
  66. package/dist/fetchAllActions-BJsPdnxy.mjs +832 -0
  67. package/dist/fetchAllActions-BJsPdnxy.mjs.map +1 -0
  68. package/dist/fetchAllDataFlows-D248lO6_.mjs +2 -0
  69. package/dist/fetchAllDataFlows-D248lO6_.mjs.map +1 -0
  70. package/dist/fetchAllPreferenceTopics-ForE9GpZ.mjs +36 -0
  71. package/dist/fetchAllPreferenceTopics-ForE9GpZ.mjs.map +1 -0
  72. package/dist/fetchAllPurposes-ZdkO2fMp.mjs +29 -0
  73. package/dist/fetchAllPurposes-ZdkO2fMp.mjs.map +1 -0
  74. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs +2 -0
  75. package/dist/fetchAllPurposesAndPreferences-DD6OyA5t.mjs.map +1 -0
  76. package/dist/fetchAllRequestEnrichers-CK-kk5eg.mjs +42 -0
  77. package/dist/fetchAllRequestEnrichers-CK-kk5eg.mjs.map +1 -0
  78. package/dist/fetchAllRequestIdentifiers-DrFFOt0m.mjs +10 -0
  79. package/dist/fetchAllRequestIdentifiers-DrFFOt0m.mjs.map +1 -0
  80. package/dist/fetchAllRequests-DNQQsY4s.mjs +2 -0
  81. package/dist/fetchAllRequests-DNQQsY4s.mjs.map +1 -0
  82. package/dist/fetchApiKeys-DjOr44xA.mjs +33 -0
  83. package/dist/fetchApiKeys-DjOr44xA.mjs.map +1 -0
  84. package/dist/fetchCatalogs-BM4FCbcS.mjs +12 -0
  85. package/dist/fetchCatalogs-BM4FCbcS.mjs.map +1 -0
  86. package/dist/fetchConsentManagerId-CFkg3-RS.mjs +321 -0
  87. package/dist/fetchConsentManagerId-CFkg3-RS.mjs.map +1 -0
  88. package/dist/fetchIdentifiers-pjQV4vUg.mjs +54 -0
  89. package/dist/fetchIdentifiers-pjQV4vUg.mjs.map +1 -0
  90. package/dist/fetchRequestDataSilo-P4yA7Lyc.mjs +2 -0
  91. package/dist/fetchRequestDataSilo-P4yA7Lyc.mjs.map +1 -0
  92. package/dist/fetchRequestFilesForRequest-BbxrEKFK.mjs +33 -0
  93. package/dist/fetchRequestFilesForRequest-BbxrEKFK.mjs.map +1 -0
  94. package/dist/generateCrossAccountApiKeys-Bxc_dzMG.mjs +33 -0
  95. package/dist/generateCrossAccountApiKeys-Bxc_dzMG.mjs.map +1 -0
  96. package/dist/impl-4ltdSmpl2.mjs +4 -0
  97. package/dist/impl-4ltdSmpl2.mjs.map +1 -0
  98. package/dist/impl-B19fH75P.mjs +12 -0
  99. package/dist/impl-B19fH75P.mjs.map +1 -0
  100. package/dist/impl-BBMjv5YQ.mjs +2 -0
  101. package/dist/impl-BBMjv5YQ.mjs.map +1 -0
  102. package/dist/{impl-CZP2l3Ds.mjs → impl-BKH3QRLi.mjs} +3 -3
  103. package/dist/impl-BKH3QRLi.mjs.map +1 -0
  104. package/dist/impl-BOUm7wly2.mjs +2 -0
  105. package/dist/impl-BOUm7wly2.mjs.map +1 -0
  106. package/dist/impl-BUC4ZelU.mjs +2 -0
  107. package/dist/impl-BUC4ZelU.mjs.map +1 -0
  108. package/dist/impl-BhTCp0kg.mjs +2 -0
  109. package/dist/impl-BhTCp0kg.mjs.map +1 -0
  110. package/dist/impl-BlHU1bbJ2.mjs +2 -0
  111. package/dist/impl-BlHU1bbJ2.mjs.map +1 -0
  112. package/dist/impl-BwjguKHC.mjs +4 -0
  113. package/dist/impl-BwjguKHC.mjs.map +1 -0
  114. package/dist/impl-C2o0eDzJ.mjs +2 -0
  115. package/dist/impl-C2o0eDzJ.mjs.map +1 -0
  116. package/dist/impl-C8HKnjw82.mjs +2 -0
  117. package/dist/impl-C8HKnjw82.mjs.map +1 -0
  118. package/dist/impl-CCUsnhoW2.mjs +2 -0
  119. package/dist/impl-CCUsnhoW2.mjs.map +1 -0
  120. package/dist/impl-CCc-wXqD.mjs +2 -0
  121. package/dist/impl-CCc-wXqD.mjs.map +1 -0
  122. package/dist/impl-CMmyv1cl.mjs +2 -0
  123. package/dist/impl-CMmyv1cl.mjs.map +1 -0
  124. package/dist/{impl-BYBNi68b.mjs → impl-CNez1OAw.mjs} +2 -2
  125. package/dist/impl-CNez1OAw.mjs.map +1 -0
  126. package/dist/impl-CNykdy3e2.mjs +2 -0
  127. package/dist/impl-CNykdy3e2.mjs.map +1 -0
  128. package/dist/impl-CSChmq_t2.mjs +2 -0
  129. package/dist/impl-CSChmq_t2.mjs.map +1 -0
  130. package/dist/impl-Ce9K4OCp.mjs +2 -0
  131. package/dist/impl-Ce9K4OCp.mjs.map +1 -0
  132. package/dist/impl-Cgg_bv7j.mjs +2 -0
  133. package/dist/impl-Cgg_bv7j.mjs.map +1 -0
  134. package/dist/impl-ChCqHkOc2.mjs +2 -0
  135. package/dist/impl-ChCqHkOc2.mjs.map +1 -0
  136. package/dist/impl-CqEwwWeD.mjs +2 -0
  137. package/dist/impl-CqEwwWeD.mjs.map +1 -0
  138. package/dist/impl-CqXFyvgV2.mjs +2 -0
  139. package/dist/impl-CqXFyvgV2.mjs.map +1 -0
  140. package/dist/impl-CxLSJk2P.mjs +2 -0
  141. package/dist/impl-CxLSJk2P.mjs.map +1 -0
  142. package/dist/impl-CzU9WTiW.mjs +2 -0
  143. package/dist/impl-CzU9WTiW.mjs.map +1 -0
  144. package/dist/impl-D-cp0CYr.mjs +2 -0
  145. package/dist/impl-D-cp0CYr.mjs.map +1 -0
  146. package/dist/impl-D9NjIwEi2.mjs +2 -0
  147. package/dist/impl-D9NjIwEi2.mjs.map +1 -0
  148. package/dist/impl-DEWXA_QC.mjs +2 -0
  149. package/dist/impl-DEWXA_QC.mjs.map +1 -0
  150. package/dist/impl-DGiPB5Vq2.mjs +2 -0
  151. package/dist/impl-DGiPB5Vq2.mjs.map +1 -0
  152. package/dist/impl-DGuwD_qz.mjs +2 -0
  153. package/dist/impl-DGuwD_qz.mjs.map +1 -0
  154. package/dist/impl-DGzvE8aJ.mjs +2 -0
  155. package/dist/impl-DGzvE8aJ.mjs.map +1 -0
  156. package/dist/impl-DTp9OQIZ.mjs +7 -0
  157. package/dist/impl-DTp9OQIZ.mjs.map +1 -0
  158. package/dist/impl-DhscnXSw.mjs +2 -0
  159. package/dist/impl-DhscnXSw.mjs.map +1 -0
  160. package/dist/impl-Dk7MdX-1.mjs +2 -0
  161. package/dist/impl-Dk7MdX-1.mjs.map +1 -0
  162. package/dist/impl-DsNPvet4.mjs +2 -0
  163. package/dist/impl-DsNPvet4.mjs.map +1 -0
  164. package/dist/impl-DxUFb0vv.mjs +2 -0
  165. package/dist/impl-DxUFb0vv.mjs.map +1 -0
  166. package/dist/impl-JThkrXiI2.mjs +2 -0
  167. package/dist/impl-JThkrXiI2.mjs.map +1 -0
  168. package/dist/impl-KDuBh4bu2.mjs +2 -0
  169. package/dist/impl-KDuBh4bu2.mjs.map +1 -0
  170. package/dist/impl-MpkLBntW.mjs +2 -0
  171. package/dist/impl-MpkLBntW.mjs.map +1 -0
  172. package/dist/impl-P_NDC3cX.mjs +2 -0
  173. package/dist/impl-P_NDC3cX.mjs.map +1 -0
  174. package/dist/impl-Rt3C_fDF.mjs +2 -0
  175. package/dist/impl-Rt3C_fDF.mjs.map +1 -0
  176. package/dist/impl-c7rUQYDc2.mjs +2 -0
  177. package/dist/impl-c7rUQYDc2.mjs.map +1 -0
  178. package/dist/impl-fqOKTw5J.mjs +2 -0
  179. package/dist/impl-fqOKTw5J.mjs.map +1 -0
  180. package/dist/impl-oiBTZqQS2.mjs +2 -0
  181. package/dist/impl-oiBTZqQS2.mjs.map +1 -0
  182. package/dist/impl-tbGnvKFm.mjs +2 -0
  183. package/dist/impl-tbGnvKFm.mjs.map +1 -0
  184. package/dist/index.d.mts +3441 -3429
  185. package/dist/index.d.mts.map +1 -0
  186. package/dist/index.mjs +78 -4
  187. package/dist/index.mjs.map +1 -1
  188. package/dist/inquirer-BgNcicZ4.mjs +2 -0
  189. package/dist/inquirer-BgNcicZ4.mjs.map +1 -0
  190. package/dist/listFiles-qzyQMaYH.mjs +2 -0
  191. package/dist/listFiles-qzyQMaYH.mjs.map +1 -0
  192. package/dist/{logger-Bj782ZYD.mjs → logger-B-LXIf3U.mjs} +1 -1
  193. package/dist/{logger-Bj782ZYD.mjs.map → logger-B-LXIf3U.mjs.map} +1 -1
  194. package/dist/makeGraphQLRequest-Cq26A_Lq.mjs +2 -0
  195. package/dist/makeGraphQLRequest-Cq26A_Lq.mjs.map +1 -0
  196. package/dist/markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs +2 -0
  197. package/dist/markRequestDataSiloIdsCompleted-DzqJ5MNY.mjs.map +1 -0
  198. package/dist/markSilentPrivacyRequests-BKQUu6Ep.mjs +2 -0
  199. package/dist/markSilentPrivacyRequests-BKQUu6Ep.mjs.map +1 -0
  200. package/dist/mergeTranscendInputs-DGC4xUGu.mjs +2 -0
  201. package/dist/mergeTranscendInputs-DGC4xUGu.mjs.map +1 -0
  202. package/dist/notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs +2 -0
  203. package/dist/notifyPrivacyRequestsAdditionalTime-TEHAJe4C.mjs.map +1 -0
  204. package/dist/package-C4J38oR1.mjs +2 -0
  205. package/dist/package-C4J38oR1.mjs.map +1 -0
  206. package/dist/parquetToCsvOneFile-DZVKXrjn.mjs +6 -0
  207. package/dist/parquetToCsvOneFile-DZVKXrjn.mjs.map +1 -0
  208. package/dist/parseAttributesFromString-CZStzJc0.mjs +2 -0
  209. package/dist/parseAttributesFromString-CZStzJc0.mjs.map +1 -0
  210. package/dist/pullAllDatapoints-Cntwuzw7.mjs +45 -0
  211. package/dist/pullAllDatapoints-Cntwuzw7.mjs.map +1 -0
  212. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs +2 -0
  213. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BT-GZpT1.mjs.map +1 -0
  214. package/dist/pullConsentManagerMetrics-FnhPEszu.mjs +2 -0
  215. package/dist/pullConsentManagerMetrics-FnhPEszu.mjs.map +1 -0
  216. package/dist/pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs +2 -0
  217. package/dist/pullManualEnrichmentIdentifiersToCsv-B_4REnga.mjs.map +1 -0
  218. package/dist/pullTranscendConfiguration-CqsgEf9A.mjs +80 -0
  219. package/dist/pullTranscendConfiguration-CqsgEf9A.mjs.map +1 -0
  220. package/dist/pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs +38 -0
  221. package/dist/pullUnstructuredSubDataPointRecommendations-DZd2q6S2.mjs.map +1 -0
  222. package/dist/pushCronIdentifiersFromCsv-D2saGR5i.mjs +2 -0
  223. package/dist/pushCronIdentifiersFromCsv-D2saGR5i.mjs.map +1 -0
  224. package/dist/pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs +2 -0
  225. package/dist/pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs.map +1 -0
  226. package/dist/readCsv-CyOL7eCc.mjs +2 -0
  227. package/dist/readCsv-CyOL7eCc.mjs.map +1 -0
  228. package/dist/{readTranscendYaml-DhKG1ViI.mjs → readTranscendYaml-D-J1ilS0.mjs} +2 -2
  229. package/dist/readTranscendYaml-D-J1ilS0.mjs.map +1 -0
  230. package/dist/removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs +35 -0
  231. package/dist/removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs.map +1 -0
  232. package/dist/request-CAsR6CMY.mjs +117 -0
  233. package/dist/request-CAsR6CMY.mjs.map +1 -0
  234. package/dist/retryRequestDataSilos-DnwXA1YZ.mjs +2 -0
  235. package/dist/retryRequestDataSilos-DnwXA1YZ.mjs.map +1 -0
  236. package/dist/skipPreflightJobs-jK5lNlmv.mjs +2 -0
  237. package/dist/skipPreflightJobs-jK5lNlmv.mjs.map +1 -0
  238. package/dist/skipRequestDataSilos-DQGroOos.mjs +2 -0
  239. package/dist/skipRequestDataSilos-DQGroOos.mjs.map +1 -0
  240. package/dist/splitCsvToList-BRq_CIfd.mjs +2 -0
  241. package/dist/splitCsvToList-BRq_CIfd.mjs.map +1 -0
  242. package/dist/streamPrivacyRequestsToCsv-BK07Bm-T.mjs +2 -0
  243. package/dist/streamPrivacyRequestsToCsv-BK07Bm-T.mjs.map +1 -0
  244. package/dist/syncCodePackages-F-97FNjo.mjs +232 -0
  245. package/dist/syncCodePackages-F-97FNjo.mjs.map +1 -0
  246. package/dist/syncCookies-BxY36BeJ.mjs +2 -0
  247. package/dist/syncCookies-BxY36BeJ.mjs.map +1 -0
  248. package/dist/syncDataFlows-Cx5LZCen.mjs +2 -0
  249. package/dist/syncDataFlows-Cx5LZCen.mjs.map +1 -0
  250. package/dist/syncTemplates-BrH7Yr0V.mjs +23 -0
  251. package/dist/syncTemplates-BrH7Yr0V.mjs.map +1 -0
  252. package/dist/time-Bl_c3W8U.mjs +2 -0
  253. package/dist/time-Bl_c3W8U.mjs.map +1 -0
  254. package/dist/types-B4CVJCpj.mjs +2 -0
  255. package/dist/types-B4CVJCpj.mjs.map +1 -0
  256. package/dist/updateConsentManagerVersionToLatest-C221vAAw.mjs +2 -0
  257. package/dist/updateConsentManagerVersionToLatest-C221vAAw.mjs.map +1 -0
  258. package/dist/uploadConsents-BbR7_sSt.mjs +2 -0
  259. package/dist/uploadConsents-BbR7_sSt.mjs.map +1 -0
  260. package/dist/uploadCookiesFromCsv-roHWekOP.mjs +2 -0
  261. package/dist/uploadCookiesFromCsv-roHWekOP.mjs.map +1 -0
  262. package/dist/uploadDataFlowsFromCsv-DcTbrsv2.mjs +2 -0
  263. package/dist/uploadDataFlowsFromCsv-DcTbrsv2.mjs.map +1 -0
  264. package/dist/uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs +17 -0
  265. package/dist/uploadPrivacyRequestsFromCsv-BUGTS-pY.mjs.map +1 -0
  266. package/dist/uploadSiloDiscoveryResults-D2fK92WR.mjs +20 -0
  267. package/dist/uploadSiloDiscoveryResults-D2fK92WR.mjs.map +1 -0
  268. package/dist/validateTranscendAuth-1W1IylqE.mjs +2 -0
  269. package/dist/validateTranscendAuth-1W1IylqE.mjs.map +1 -0
  270. package/dist/withPreferenceRetry-xLMZyTq9.mjs +2 -0
  271. package/dist/withPreferenceRetry-xLMZyTq9.mjs.map +1 -0
  272. package/dist/writeCsv-B51ulrVl.mjs +6 -0
  273. package/dist/writeCsv-B51ulrVl.mjs.map +1 -0
  274. package/package.json +37 -56
  275. package/dist/api-keys-CxvKdj2v.mjs +0 -2
  276. package/dist/api-keys-CxvKdj2v.mjs.map +0 -1
  277. package/dist/app-BKMxG7RO.mjs +0 -131
  278. package/dist/app-BKMxG7RO.mjs.map +0 -1
  279. package/dist/buildAIIntegrationType-Bk0EbFKV.mjs +0 -2
  280. package/dist/buildAIIntegrationType-Bk0EbFKV.mjs.map +0 -1
  281. package/dist/code-scanning-Cx1kpssH.mjs +0 -4
  282. package/dist/code-scanning-Cx1kpssH.mjs.map +0 -1
  283. package/dist/codecs-TR6p48v3.mjs +0 -2
  284. package/dist/codecs-TR6p48v3.mjs.map +0 -1
  285. package/dist/command-Bzyj3M2G.mjs.map +0 -1
  286. package/dist/consent-manager-c4bgQF1N.mjs +0 -12
  287. package/dist/consent-manager-c4bgQF1N.mjs.map +0 -1
  288. package/dist/constants-CnLQtIBn.mjs +0 -2
  289. package/dist/constants-CnLQtIBn.mjs.map +0 -1
  290. package/dist/context-bkKpii_t.mjs.map +0 -1
  291. package/dist/cron-BvxWyvDu.mjs +0 -2
  292. package/dist/cron-BvxWyvDu.mjs.map +0 -1
  293. package/dist/data-inventory-CkS_kmus.mjs +0 -75
  294. package/dist/data-inventory-CkS_kmus.mjs.map +0 -1
  295. package/dist/dataFlowsToDataSilos-RAhfPV0l.mjs.map +0 -1
  296. package/dist/impl-8dOatHnF.mjs +0 -2
  297. package/dist/impl-8dOatHnF.mjs.map +0 -1
  298. package/dist/impl-Ah-1lwzr.mjs +0 -2
  299. package/dist/impl-Ah-1lwzr.mjs.map +0 -1
  300. package/dist/impl-B5lTeRbn.mjs +0 -2
  301. package/dist/impl-B5lTeRbn.mjs.map +0 -1
  302. package/dist/impl-B6UhzQcY2.mjs +0 -2
  303. package/dist/impl-B6UhzQcY2.mjs.map +0 -1
  304. package/dist/impl-BFf_CotE2.mjs +0 -2
  305. package/dist/impl-BFf_CotE2.mjs.map +0 -1
  306. package/dist/impl-BGQ0EGS0.mjs +0 -2
  307. package/dist/impl-BGQ0EGS0.mjs.map +0 -1
  308. package/dist/impl-BYBNi68b.mjs.map +0 -1
  309. package/dist/impl-B__p3_wC.mjs +0 -2
  310. package/dist/impl-B__p3_wC.mjs.map +0 -1
  311. package/dist/impl-BcayRe6a.mjs +0 -2
  312. package/dist/impl-BcayRe6a.mjs.map +0 -1
  313. package/dist/impl-BkYKsEVG2.mjs +0 -2
  314. package/dist/impl-BkYKsEVG2.mjs.map +0 -1
  315. package/dist/impl-Bl2yVgh0.mjs +0 -4
  316. package/dist/impl-Bl2yVgh0.mjs.map +0 -1
  317. package/dist/impl-BmAMgEEM.mjs +0 -12
  318. package/dist/impl-BmAMgEEM.mjs.map +0 -1
  319. package/dist/impl-BsttzxTN2.mjs +0 -2
  320. package/dist/impl-BsttzxTN2.mjs.map +0 -1
  321. package/dist/impl-BtnySmbi.mjs +0 -2
  322. package/dist/impl-BtnySmbi.mjs.map +0 -1
  323. package/dist/impl-BwX-evfW2.mjs +0 -4
  324. package/dist/impl-BwX-evfW2.mjs.map +0 -1
  325. package/dist/impl-C-wzeAib2.mjs +0 -2
  326. package/dist/impl-C-wzeAib2.mjs.map +0 -1
  327. package/dist/impl-C61PYfk12.mjs +0 -2
  328. package/dist/impl-C61PYfk12.mjs.map +0 -1
  329. package/dist/impl-CAuNpuF2.mjs +0 -2
  330. package/dist/impl-CAuNpuF2.mjs.map +0 -1
  331. package/dist/impl-CSKrBIuV.mjs +0 -2
  332. package/dist/impl-CSKrBIuV.mjs.map +0 -1
  333. package/dist/impl-CZP2l3Ds.mjs.map +0 -1
  334. package/dist/impl-CiJ8hE5W2.mjs +0 -2
  335. package/dist/impl-CiJ8hE5W2.mjs.map +0 -1
  336. package/dist/impl-Cj3H-m2Z.mjs +0 -2
  337. package/dist/impl-Cj3H-m2Z.mjs.map +0 -1
  338. package/dist/impl-CkY0wfCz.mjs +0 -2
  339. package/dist/impl-CkY0wfCz.mjs.map +0 -1
  340. package/dist/impl-Cm8pUfBU2.mjs +0 -2
  341. package/dist/impl-Cm8pUfBU2.mjs.map +0 -1
  342. package/dist/impl-CpzS9LVu2.mjs +0 -2
  343. package/dist/impl-CpzS9LVu2.mjs.map +0 -1
  344. package/dist/impl-CwfamZ1c.mjs +0 -2
  345. package/dist/impl-CwfamZ1c.mjs.map +0 -1
  346. package/dist/impl-D81et1Yb2.mjs +0 -2
  347. package/dist/impl-D81et1Yb2.mjs.map +0 -1
  348. package/dist/impl-D92PTNk3.mjs +0 -2
  349. package/dist/impl-D92PTNk3.mjs.map +0 -1
  350. package/dist/impl-DTXDVeo6.mjs +0 -2
  351. package/dist/impl-DTXDVeo6.mjs.map +0 -1
  352. package/dist/impl-DWoysXup.mjs +0 -2
  353. package/dist/impl-DWoysXup.mjs.map +0 -1
  354. package/dist/impl-DX3JHZ4v2.mjs +0 -2
  355. package/dist/impl-DX3JHZ4v2.mjs.map +0 -1
  356. package/dist/impl-DhuUrzxQ.mjs +0 -2
  357. package/dist/impl-DhuUrzxQ.mjs.map +0 -1
  358. package/dist/impl-DqMYLKjU.mjs +0 -2
  359. package/dist/impl-DqMYLKjU.mjs.map +0 -1
  360. package/dist/impl-DqQ6CIj0.mjs +0 -2
  361. package/dist/impl-DqQ6CIj0.mjs.map +0 -1
  362. package/dist/impl-Duaq6iWI2.mjs +0 -2
  363. package/dist/impl-Duaq6iWI2.mjs.map +0 -1
  364. package/dist/impl-O5gz8qcm.mjs +0 -2
  365. package/dist/impl-O5gz8qcm.mjs.map +0 -1
  366. package/dist/impl-PH0AoC7i.mjs +0 -2
  367. package/dist/impl-PH0AoC7i.mjs.map +0 -1
  368. package/dist/impl-S8p6toVb2.mjs +0 -2
  369. package/dist/impl-S8p6toVb2.mjs.map +0 -1
  370. package/dist/impl-X2MSb8Ij.mjs +0 -2
  371. package/dist/impl-X2MSb8Ij.mjs.map +0 -1
  372. package/dist/impl-bo95wZIU2.mjs +0 -2
  373. package/dist/impl-bo95wZIU2.mjs.map +0 -1
  374. package/dist/impl-cfdCesro.mjs +0 -2
  375. package/dist/impl-cfdCesro.mjs.map +0 -1
  376. package/dist/impl-iZoXu4nV.mjs +0 -2
  377. package/dist/impl-iZoXu4nV.mjs.map +0 -1
  378. package/dist/impl-lebl6Zek2.mjs +0 -2
  379. package/dist/impl-lebl6Zek2.mjs.map +0 -1
  380. package/dist/impl-p0YN9e2e.mjs +0 -2
  381. package/dist/impl-p0YN9e2e.mjs.map +0 -1
  382. package/dist/manual-enrichment-B6lW5kAX.mjs +0 -2
  383. package/dist/manual-enrichment-B6lW5kAX.mjs.map +0 -1
  384. package/dist/mergeTranscendInputs-Coj_e2N3.mjs +0 -2
  385. package/dist/mergeTranscendInputs-Coj_e2N3.mjs.map +0 -1
  386. package/dist/pooling-CazydwlD.mjs.map +0 -1
  387. package/dist/preference-management-8gj7aSJB.mjs +0 -7
  388. package/dist/preference-management-8gj7aSJB.mjs.map +0 -1
  389. package/dist/readTranscendYaml-DhKG1ViI.mjs.map +0 -1
  390. package/dist/syncConfigurationToTranscend-VJd0PnaZ.mjs +0 -3010
  391. package/dist/syncConfigurationToTranscend-VJd0PnaZ.mjs.map +0 -1
  392. package/dist/uploadConsents-C1S-BNzw.mjs +0 -2
  393. package/dist/uploadConsents-C1S-BNzw.mjs.map +0 -1
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{n,t as r}from"./bluebird-CUitXgsY.mjs";import{t as i}from"./createSombraGotInstance-D1Il9zUE.mjs";import{t as a}from"./readCsv-CyOL7eCc.mjs";import{chunk as o}from"lodash-es";import s from"colors";import*as c from"io-ts";import l from"cli-progress";const u=c.type({nonce:c.string,identifier:c.string});async function d(e,{nonce:t,identifier:n}){try{return await e.put(`v1/data-silo`,{headers:{"x-transcend-nonce":t},json:{profiles:[{profileId:n}]}}),!0}catch(e){if(e.response?.statusCode===409)return!1;throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}}async function f({file:c,dataSiloId:f,auth:p,sombraAuth:m,concurrency:h=100,transcendUrl:g=e,sleepSeconds:_=10}){let v=await i(g,p,m);t.info(s.magenta(`Reading "${c}" from disk`));let y=a(c,u);t.info(s.magenta(`Notifying Transcend for data silo "${f}" marking "${y.length}" identifiers as completed.`));let b=new Date().getTime(),x=new l.SingleBar({},l.Presets.shades_classic),S=0,C=0,w=0;x.start(y.length,0);let T=o(y,h),E=T.length;await n(T,async(e,n)=>{t.info(s.blue(`Processing chunk ${n+1}/${E} (${o.length} items)`)),await r(e,async e=>{try{await d(v,e)?S+=1:C+=1}catch(n){t.error(s.red(`Error notifying Transcend for identifier "${e.identifier}" - ${n?.message}`)),w+=1}x.update(S+C)}),_>0&&n<E-1&&(t.info(s.yellow(`Sleeping for ${_}s before next chunk...`)),await new Promise(e=>{setTimeout(e,_*1e3)}))}),x.stop();let D=new Date().getTime()-b;if(t.info(s.green(`Successfully notified Transcend for ${S} identifiers in "${D/1e3}" seconds!`)),C&&t.info(s.magenta(`There were ${C} identifiers that were not in a state to be updated.They likely have already been resolved.`)),w)throw t.error(s.red(`There were ${w} identifiers that failed to be updated. Please review the logs for more information.`)),Error(`Failed to update all identifiers`);return y.length}export{u as n,d as r,f as t};
2
+ //# sourceMappingURL=pushCronIdentifiersFromCsv-D2saGR5i.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pushCronIdentifiersFromCsv-D2saGR5i.mjs","names":[],"sources":["../src/lib/cron/markCronIdentifierCompleted.ts","../src/lib/cron/pushCronIdentifiersFromCsv.ts"],"sourcesContent":["import type { Got } from 'got';\nimport * as t from 'io-ts';\n\n/**\n * Minimal set required to mark as completed\n */\nexport const CronIdentifierPush = t.type({\n nonce: t.string,\n identifier: t.string,\n});\n\n/** Type override */\nexport type CronIdentifierPush = t.TypeOf<typeof CronIdentifierPush>;\n\n/**\n * Mark an identifier output by the cron job as completed.\n *\n * @see https://docs.transcend.io/docs/api-reference/PUT/v1/data-silo\n * @param sombra - Sombra instance configured to make requests\n * @param options - Additional options\n * @returns Successfully submitted request, false if not in a state to update\n */\nexport async function markCronIdentifierCompleted(\n sombra: Got,\n { nonce, identifier }: CronIdentifierPush,\n): Promise<boolean> {\n try {\n // Make the GraphQL request\n await sombra.put('v1/data-silo', {\n headers: {\n 'x-transcend-nonce': nonce,\n },\n json: {\n profiles: [\n {\n profileId: identifier,\n },\n ],\n },\n });\n return true;\n } catch (err) {\n // handle gracefully\n if (err.response?.statusCode === 409) {\n return false;\n }\n throw new Error(`Received an error from server: ${err?.response?.body || err?.message}`);\n }\n}\n","import cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { chunk } from 'lodash-es';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { map, mapSeries } from '../bluebird.js';\nimport { createSombraGotInstance } from '../graphql/index.js';\nimport { readCsv } from '../requests/index.js';\nimport { markCronIdentifierCompleted, CronIdentifierPush } from './markCronIdentifierCompleted.js';\n\n/**\n * Given a CSV of cron job outputs, mark all requests as completed in Transcend\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function pushCronIdentifiersFromCsv({\n file,\n dataSiloId,\n auth,\n sombraAuth,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n sleepSeconds = 10,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Sleep time in seconds between chunks of concurrent calls */\n sleepSeconds?: number;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, CronIdentifierPush);\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Notifying Transcend for data silo \"${dataSiloId}\" marking \"${activeResults.length}\" identifiers as completed.`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n let successCount = 0;\n let failureCount = 0;\n let errorCount = 0;\n progressBar.start(activeResults.length, 0);\n\n // Process in chunks with sleep intervals\n const chunks = chunk(activeResults, concurrency);\n const totalChunks = chunks.length;\n const processChunk = async (items: CronIdentifierPush[], chunkIndex: number): Promise<void> => {\n logger.info(\n colors.blue(`Processing chunk ${chunkIndex + 1}/${totalChunks} (${chunk.length} items)`),\n );\n\n // Process the items of the chunk concurrently\n await map(items, async (identifier) => {\n try {\n const success = await markCronIdentifierCompleted(sombra, identifier);\n if (success) {\n successCount += 1;\n } else {\n failureCount += 1;\n }\n } catch (e) {\n logger.error(\n colors.red(\n `Error notifying Transcend for identifier \"${identifier.identifier}\" - ${e?.message}`,\n ),\n );\n errorCount += 1;\n }\n progressBar.update(successCount + failureCount);\n });\n\n // Sleep between chunks (except for the last chunk)\n if (sleepSeconds > 0 && chunkIndex < totalChunks - 1) {\n logger.info(colors.yellow(`Sleeping for ${sleepSeconds}s before next chunk...`));\n\n await new Promise((resolve) => {\n setTimeout(resolve, sleepSeconds * 1000);\n });\n }\n };\n\n // Process all chunks sequentially\n await mapSeries(chunks, processChunk);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully notified Transcend for ${successCount} identifiers in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n if (failureCount) {\n logger.info(\n colors.magenta(\n `There were ${failureCount} identifiers that were not in a state to be updated.` +\n 'They likely have already been resolved.',\n ),\n );\n }\n if (errorCount) {\n logger.error(\n colors.red(\n `There were ${errorCount} identifiers that failed to be updated. Please review the logs for more information.`,\n ),\n );\n throw new Error('Failed to update all identifiers');\n }\n return activeResults.length;\n}\n"],"mappings":"kVAMA,MAAa,EAAqB,EAAE,KAAK,CACvC,MAAO,EAAE,OACT,WAAY,EAAE,OACf,CAAC,CAaF,eAAsB,EACpB,EACA,CAAE,QAAO,cACS,CAClB,GAAI,CAcF,OAZA,MAAM,EAAO,IAAI,eAAgB,CAC/B,QAAS,CACP,oBAAqB,EACtB,CACD,KAAM,CACJ,SAAU,CACR,CACE,UAAW,EACZ,CACF,CACF,CACF,CAAC,CACK,SACA,EAAK,CAEZ,GAAI,EAAI,UAAU,aAAe,IAC/B,MAAO,GAET,MAAU,MAAM,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAAU,EC7B5F,eAAsB,EAA2B,CAC/C,OACA,aACA,OACA,aACA,cAAc,IACd,eAAe,EACf,eAAe,IAgBG,CAElB,IAAM,EAAS,MAAM,EAAwB,EAAc,EAAM,EAAW,CAG5E,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAC1D,IAAM,EAAgB,EAAQ,EAAM,EAAmB,CAGvD,EAAO,KACL,EAAO,QACL,sCAAsC,EAAW,aAAa,EAAc,OAAO,6BACpF,CACF,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAEjF,EAAe,EACf,EAAe,EACf,EAAa,EACjB,EAAY,MAAM,EAAc,OAAQ,EAAE,CAG1C,IAAM,EAAS,EAAM,EAAe,EAAY,CAC1C,EAAc,EAAO,OAqC3B,MAAM,EAAU,EApCK,MAAO,EAA6B,IAAsC,CAC7F,EAAO,KACL,EAAO,KAAK,oBAAoB,EAAa,EAAE,GAAG,EAAY,IAAI,EAAM,OAAO,SAAS,CACzF,CAGD,MAAM,EAAI,EAAO,KAAO,IAAe,CACrC,GAAI,CACc,MAAM,EAA4B,EAAQ,EAAW,CAEnE,GAAgB,EAEhB,GAAgB,QAEX,EAAG,CACV,EAAO,MACL,EAAO,IACL,6CAA6C,EAAW,WAAW,MAAM,GAAG,UAC7E,CACF,CACD,GAAc,EAEhB,EAAY,OAAO,EAAe,EAAa,EAC/C,CAGE,EAAe,GAAK,EAAa,EAAc,IACjD,EAAO,KAAK,EAAO,OAAO,gBAAgB,EAAa,wBAAwB,CAAC,CAEhF,MAAM,IAAI,QAAS,GAAY,CAC7B,WAAW,EAAS,EAAe,IAAK,EACxC,GAK+B,CAErC,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAiBvB,GAfA,EAAO,KACL,EAAO,MACL,uCAAuC,EAAa,mBAClD,EAAY,IACb,YACF,CACF,CACG,GACF,EAAO,KACL,EAAO,QACL,cAAc,EAAa,6FAE5B,CACF,CAEC,EAMF,MALA,EAAO,MACL,EAAO,IACL,cAAc,EAAW,sFAC1B,CACF,CACS,MAAM,mCAAmC,CAErD,OAAO,EAAc"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{t as n}from"./bluebird-CUitXgsY.mjs";import{t as r}from"./createSombraGotInstance-D1Il9zUE.mjs";import{o as i}from"./request-CAsR6CMY.mjs";import{t as a}from"./splitCsvToList-BRq_CIfd.mjs";import{t as o}from"./readCsv-CyOL7eCc.mjs";import{r as s,t as c}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{uniq as l}from"lodash-es";import u from"colors";import*as d from"io-ts";const f=`https://app.transcend.io/privacy-requests/incoming-requests/`,p=d.record(d.string,d.string);async function m(e,{id:n,...r},i,o){if(!n){let e=`Request ID must be provided to enricher request.${o?` Found error in row: ${o}`:``}`;throw t.error(u.red(e)),Error(e)}let s=n.toLowerCase(),c=Object.entries(r).reduce((e,[t,n])=>l(a(n)).length===0?e:Object.assign(e,{[t]:l(a(n)).map(e=>({value:t===`email`?e.toLowerCase():e}))}),{});try{return await e.post(`v1/enrich-identifiers`,{headers:{"x-transcend-request-id":s,"x-transcend-enricher-id":i},json:{enrichedIdentifiers:c}}).json(),t.error(u.green(`Successfully enriched request: ${f}${s}`)),!0}catch(e){if(typeof e.response.body==`string`&&e.response.body.includes(`Cannot update a resolved RequestEnricher`))return t.warn(u.magenta(`Skipped enrichment for request: ${f}${s}, request is no longer in the enriching phase.`)),!1;throw t.error(u.red(`Failed to enricher identifiers for request with id: ${f}${s} - ${e.message} - ${e.response.body}`)),e}}async function h({file:a,auth:l,sombraAuth:d,enricherId:f,markSilent:h,concurrency:g=100,transcendUrl:_=e}){let v=await r(_,l,d),y=s(_,l);t.info(u.magenta(`Reading "${a}" from disk`));let b=o(a,p);t.info(u.magenta(`Enriching "${b.length}" privacy requests.`));let x=0,S=0,C=0;if(await n(b,async(e,n)=>{try{h&&(await c(y,i,{input:{id:e.id,isSilent:!0}}),t.info(u.magenta(`Mark request as silent mode - ${e.id}`))),await m(v,e,f,n)?x+=1:S+=1}catch{C+=1}},{concurrency:g}),t.info(u.green(`Successfully notified Transcend! \n Success count: ${x}.`)),S>0&&t.info(u.magenta(`Skipped count: ${S}.`)),C>0)throw t.info(u.red(`Error Count: ${C}.`)),Error(`Failed to enrich: ${C} requests.`);return b.length}export{p as n,m as r,h as t};
2
+ //# sourceMappingURL=pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pushManualEnrichmentIdentifiersFromCsv-DOvAzMyt.mjs","names":[],"sources":["../src/lib/manual-enrichment/enrichPrivacyRequest.ts","../src/lib/manual-enrichment/pushManualEnrichmentIdentifiersFromCsv.ts"],"sourcesContent":["import colors from 'colors';\nimport type { Got } from 'got';\nimport * as t from 'io-ts';\nimport { uniq } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { splitCsvToList } from '../requests/splitCsvToList.js';\n\nconst ADMIN_URL = 'https://app.transcend.io/privacy-requests/incoming-requests/';\n/**\n * Minimal set required to mark as completed\n */\nexport const EnrichPrivacyRequest = t.record(t.string, t.string);\n\n/** Type override */\nexport type EnrichPrivacyRequest = t.TypeOf<typeof EnrichPrivacyRequest>;\n\n/**\n * Upload identifiers to a privacy request or mark request as\n *\n * @param sombra - Sombra instance configured to make requests\n * @param request - Request to enricher\n * @param enricherId - The ID of the enricher being uploaded to\n * @param index - Index of request ID\n * @returns True if enriched successfully, false if skipped, throws error if failed\n */\nexport async function enrichPrivacyRequest(\n sombra: Got,\n { id: rawId, ...rest }: EnrichPrivacyRequest,\n enricherId: string,\n index?: number,\n): Promise<boolean> {\n if (!rawId) {\n // error\n const msg = `Request ID must be provided to enricher request.${\n index ? ` Found error in row: ${index}` : ''\n }`;\n logger.error(colors.red(msg));\n throw new Error(msg);\n }\n\n const id = rawId.toLowerCase();\n\n // Pull out the identifiers\n const enrichedIdentifiers = Object.entries(rest).reduce(\n (acc, [key, value]) => {\n const values = uniq(splitCsvToList(value));\n return values.length === 0\n ? acc\n : Object.assign(acc, {\n [key]: uniq(splitCsvToList(value)).map((val) => ({\n value: key === 'email' ? val.toLowerCase() : val,\n })),\n });\n },\n {} as Record<string, string[]>,\n );\n\n // Make the GraphQL request\n try {\n await sombra\n .post('v1/enrich-identifiers', {\n headers: {\n 'x-transcend-request-id': id,\n 'x-transcend-enricher-id': enricherId,\n },\n json: {\n enrichedIdentifiers,\n },\n })\n .json();\n\n logger.error(colors.green(`Successfully enriched request: ${ADMIN_URL}${id}`));\n return true;\n } catch (err) {\n // skip if already enriched\n if (\n typeof err.response.body === 'string' &&\n err.response.body.includes('Cannot update a resolved RequestEnricher')\n ) {\n logger.warn(\n colors.magenta(\n `Skipped enrichment for request: ${ADMIN_URL}${id}, request is no longer in the enriching phase.`,\n ),\n );\n return false;\n }\n\n // error\n logger.error(\n colors.red(\n `Failed to enricher identifiers for request with id: ${ADMIN_URL}${id} - ${err.message} - ${err.response.body}`,\n ),\n );\n throw err;\n }\n}\n","import colors from 'colors';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport {\n UPDATE_PRIVACY_REQUEST,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n makeGraphQLRequest,\n} from '../graphql/index.js';\nimport { readCsv } from '../requests/index.js';\nimport { enrichPrivacyRequest, EnrichPrivacyRequest } from './enrichPrivacyRequest.js';\n\n/**\n * Push a CSV of enriched requests back into Transcend\n *\n * @param options - Options\n * @returns Number of items processed\n */\nexport async function pushManualEnrichmentIdentifiersFromCsv({\n file,\n auth,\n sombraAuth,\n enricherId,\n markSilent,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** ID of enricher being uploaded to */\n enricherId: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Mark requests in silent mode before enriching */\n markSilent?: boolean;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, EnrichPrivacyRequest);\n\n // Notify Transcend\n logger.info(colors.magenta(`Enriching \"${activeResults.length}\" privacy requests.`));\n\n let successCount = 0;\n let skippedCount = 0;\n let errorCount = 0;\n\n await map(\n activeResults,\n async (request, index) => {\n try {\n // Mark requests in silent mode before a certain date\n if (markSilent) {\n await makeGraphQLRequest(client, UPDATE_PRIVACY_REQUEST, {\n input: {\n id: request.id,\n isSilent: true,\n },\n });\n\n logger.info(colors.magenta(`Mark request as silent mode - ${request.id}`));\n }\n\n const result = await enrichPrivacyRequest(sombra, request, enricherId, index);\n if (result) {\n successCount += 1;\n } else {\n skippedCount += 1;\n }\n } catch (err) {\n errorCount += 1;\n }\n },\n { concurrency },\n );\n\n logger.info(colors.green(`Successfully notified Transcend! \\n Success count: ${successCount}.`));\n\n if (skippedCount > 0) {\n logger.info(colors.magenta(`Skipped count: ${skippedCount}.`));\n }\n\n if (errorCount > 0) {\n logger.info(colors.red(`Error Count: ${errorCount}.`));\n throw new Error(`Failed to enrich: ${errorCount} requests.`);\n }\n\n return activeResults.length;\n}\n"],"mappings":"6cAQA,MAAM,EAAY,+DAIL,EAAuB,EAAE,OAAO,EAAE,OAAQ,EAAE,OAAO,CAchE,eAAsB,EACpB,EACA,CAAE,GAAI,EAAO,GAAG,GAChB,EACA,EACkB,CAClB,GAAI,CAAC,EAAO,CAEV,IAAM,EAAM,mDACV,EAAQ,wBAAwB,IAAU,KAG5C,MADA,EAAO,MAAM,EAAO,IAAI,EAAI,CAAC,CACnB,MAAM,EAAI,CAGtB,IAAM,EAAK,EAAM,aAAa,CAGxB,EAAsB,OAAO,QAAQ,EAAK,CAAC,QAC9C,EAAK,CAAC,EAAK,KACK,EAAK,EAAe,EAAM,CAAC,CAC5B,SAAW,EACrB,EACA,OAAO,OAAO,EAAK,EAChB,GAAM,EAAK,EAAe,EAAM,CAAC,CAAC,IAAK,IAAS,CAC/C,MAAO,IAAQ,QAAU,EAAI,aAAa,CAAG,EAC9C,EAAE,CACJ,CAAC,CAER,EAAE,CACH,CAGD,GAAI,CAcF,OAbA,MAAM,EACH,KAAK,wBAAyB,CAC7B,QAAS,CACP,yBAA0B,EAC1B,0BAA2B,EAC5B,CACD,KAAM,CACJ,sBACD,CACF,CAAC,CACD,MAAM,CAET,EAAO,MAAM,EAAO,MAAM,kCAAkC,IAAY,IAAK,CAAC,CACvE,SACA,EAAK,CAEZ,GACE,OAAO,EAAI,SAAS,MAAS,UAC7B,EAAI,SAAS,KAAK,SAAS,2CAA2C,CAOtE,OALA,EAAO,KACL,EAAO,QACL,mCAAmC,IAAY,EAAG,gDACnD,CACF,CACM,GAST,MALA,EAAO,MACL,EAAO,IACL,uDAAuD,IAAY,EAAG,KAAK,EAAI,QAAQ,KAAK,EAAI,SAAS,OAC1G,CACF,CACK,GC1EV,eAAsB,EAAuC,CAC3D,OACA,OACA,aACA,aACA,aACA,cAAc,IACd,eAAe,GAgBG,CAElB,IAAM,EAAS,MAAM,EAAwB,EAAc,EAAM,EAAW,CACtE,EAAS,EAA4B,EAAc,EAAK,CAG9D,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAC1D,IAAM,EAAgB,EAAQ,EAAM,EAAqB,CAGzD,EAAO,KAAK,EAAO,QAAQ,cAAc,EAAc,OAAO,qBAAqB,CAAC,CAEpF,IAAI,EAAe,EACf,EAAe,EACf,EAAa,EAqCjB,GAnCA,MAAM,EACJ,EACA,MAAO,EAAS,IAAU,CACxB,GAAI,CAEE,IACF,MAAM,EAAmB,EAAQ,EAAwB,CACvD,MAAO,CACL,GAAI,EAAQ,GACZ,SAAU,GACX,CACF,CAAC,CAEF,EAAO,KAAK,EAAO,QAAQ,iCAAiC,EAAQ,KAAK,CAAC,EAG7D,MAAM,EAAqB,EAAQ,EAAS,EAAY,EAAM,CAE3E,GAAgB,EAEhB,GAAgB,OAEN,CACZ,GAAc,IAGlB,CAAE,cAAa,CAChB,CAED,EAAO,KAAK,EAAO,MAAM,sDAAsD,EAAa,GAAG,CAAC,CAE5F,EAAe,GACjB,EAAO,KAAK,EAAO,QAAQ,kBAAkB,EAAa,GAAG,CAAC,CAG5D,EAAa,EAEf,MADA,EAAO,KAAK,EAAO,IAAI,gBAAgB,EAAW,GAAG,CAAC,CAC5C,MAAM,qBAAqB,EAAW,YAAY,CAG9D,OAAO,EAAc"}
@@ -0,0 +1,2 @@
1
+ import{decodeCodec as e}from"@transcend-io/type-utils";import{readFileSync as t}from"node:fs";import*as n from"io-ts";import{parse as r}from"csv-parse/sync";function i(i,a,o={columns:!0}){let s=r(t(i,`utf-8`),o);return e(n.array(a),s).map(e=>Object.entries(e).reduce((e,[t,n])=>Object.assign(e,{[t.replace(/[^a-z_.+\-A-Z -~]/g,``)]:n}),{}))}export{i as t};
2
+ //# sourceMappingURL=readCsv-CyOL7eCc.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"readCsv-CyOL7eCc.mjs","names":[],"sources":["../src/lib/requests/readCsv.ts"],"sourcesContent":["import { readFileSync } from 'node:fs';\n\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport type { Options } from 'csv-parse';\nimport { parse } from 'csv-parse/sync';\nimport * as t from 'io-ts';\n\n/**\n * Read in a CSV and validate its shape\n *\n * @param pathToFile - Path to file\n * @param codec - The codec to validate against. This is the codec for individual, non-header, rows\n * @param options - CSV parse options\n * @returns The JSON data\n */\nexport function readCsv<T extends t.Any>(\n pathToFile: string,\n codec: T,\n options: Options = { columns: true },\n): t.TypeOf<T>[] {\n // read file contents and parse\n const fileContent = parse(readFileSync(pathToFile, 'utf-8'), options);\n\n // validate codec\n const data = decodeCodec(t.array(codec), fileContent);\n\n // remove any special characters from object keys\n const parsed = data.map((datum) =>\n Object.entries(datum).reduce(\n (acc, [key, value]) =>\n Object.assign(acc, {\n [key.replace(/[^a-z_.+\\-A-Z -~]/g, '')]: value,\n }),\n {} as T,\n ),\n );\n return parsed;\n}\n"],"mappings":"6JAeA,SAAgB,EACd,EACA,EACA,EAAmB,CAAE,QAAS,GAAM,CACrB,CAEf,IAAM,EAAc,EAAM,EAAa,EAAY,QAAQ,CAAE,EAAQ,CAerE,OAZa,EAAY,EAAE,MAAM,EAAM,CAAE,EAAY,CAGjC,IAAK,GACvB,OAAO,QAAQ,EAAM,CAAC,QACnB,EAAK,CAAC,EAAK,KACV,OAAO,OAAO,EAAK,EAChB,EAAI,QAAQ,qBAAsB,GAAG,EAAG,EAC1C,CAAC,CACJ,EAAE,CACH,CACF"}
@@ -1,4 +1,4 @@
1
- import{_t as e}from"./codecs-TR6p48v3.mjs";import{decodeCodec as t}from"@transcend-io/type-utils";import{readFileSync as n,writeFileSync as r}from"node:fs";import i from"js-yaml";const a=/<<parameters\.(.+?)>>/,o=`parameters`;function s(e,t,n=``){let r=e;if(Object.entries(t).forEach(([e,t])=>{r=r.split(`<<${o}.${e}>>`).join(t)}),a.test(r)){let[,e]=a.exec(r)||[];throw Error(`Found variable that was not set: ${e}.
1
+ import{_t as e}from"./codecs-BE3Wmoh8.mjs";import{decodeCodec as t}from"@transcend-io/type-utils";import{readFileSync as n,writeFileSync as r}from"node:fs";import i from"js-yaml";const a=/<<parameters\.(.+?)>>/,o=`parameters`;function s(e,t,n=``){let r=e;if(Object.entries(t).forEach(([e,t])=>{r=r.split(`<<${o}.${e}>>`).join(t)}),a.test(r)){let[,e]=a.exec(r)||[];throw Error(`Found variable that was not set: ${e}.
2
2
  Make sure you are passing all parameters through the --${o}=${e}:value-for-param flag.
3
3
  ${n}`)}return r}function c(r,a={}){let o=s(n(r,`utf-8`),a,`Also check that there are no extra variables defined in your yaml: ${r}`);return t(e,i.load(o))}function l(n,a){r(n,i.dump(t(e,a)))}export{l as a,s as i,a as n,c as r,o as t};
4
- //# sourceMappingURL=readTranscendYaml-DhKG1ViI.mjs.map
4
+ //# sourceMappingURL=readTranscendYaml-D-J1ilS0.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"readTranscendYaml-D-J1ilS0.mjs","names":[],"sources":["../src/lib/readTranscendYaml.ts"],"sourcesContent":["import { readFileSync, writeFileSync } from 'node:fs';\n\nimport { decodeCodec, ObjByString } from '@transcend-io/type-utils';\nimport yaml from 'js-yaml';\n\nimport { TranscendInput } from '../codecs.js';\n\nexport const VARIABLE_PARAMETERS_REGEXP = /<<parameters\\.(.+?)>>/;\nexport const VARIABLE_PARAMETERS_NAME = 'parameters';\n\n/**\n * Function that replaces variables in a text file.\n * Throws error if there are variables that have not been replaced\n *\n * @param input - Input text\n * @param variables - Variables to replace\n * @param extraErrorMessage - Additional error message text\n * @returns Output text\n */\nexport function replaceVariablesInYaml(\n input: string,\n variables: ObjByString,\n extraErrorMessage = '',\n): string {\n let contents = input;\n // Replace variables\n Object.entries(variables).forEach(([name, value]) => {\n contents = contents.split(`<<${VARIABLE_PARAMETERS_NAME}.${name}>>`).join(value);\n });\n\n // Throw error if unfilled variables\n if (VARIABLE_PARAMETERS_REGEXP.test(contents)) {\n const [, name] = VARIABLE_PARAMETERS_REGEXP.exec(contents) || [];\n throw new Error(\n `Found variable that was not set: ${name}.\nMake sure you are passing all parameters through the --${VARIABLE_PARAMETERS_NAME}=${name}:value-for-param flag.\n${extraErrorMessage}`,\n );\n }\n\n return contents;\n}\n\n/**\n * Read in the contents of a yaml file and validate that the shape\n * of the yaml file matches the codec API\n *\n * @param filePath - Path to yaml file\n * @param variables - Variables to fill in\n * @returns The contents of the yaml file, type-checked\n */\nexport function readTranscendYaml(filePath: string, variables: ObjByString = {}): TranscendInput {\n // Read in contents\n const fileContents = readFileSync(filePath, 'utf-8');\n\n // Replace variables\n const replacedVariables = replaceVariablesInYaml(\n fileContents,\n variables,\n `Also check that there are no extra variables defined in your yaml: ${filePath}`,\n );\n\n // Validate shape\n return decodeCodec(TranscendInput, yaml.load(replacedVariables));\n}\n\n/**\n * Write a Transcend configuration to disk\n *\n * @param filePath - Path to yaml file\n * @param input - The input to write out\n */\nexport function writeTranscendYaml(filePath: string, input: TranscendInput): void {\n writeFileSync(filePath, yaml.dump(decodeCodec(TranscendInput, input)));\n}\n"],"mappings":"mLAOA,MAAa,EAA6B,wBAC7B,EAA2B,aAWxC,SAAgB,EACd,EACA,EACA,EAAoB,GACZ,CACR,IAAI,EAAW,EAOf,GALA,OAAO,QAAQ,EAAU,CAAC,SAAS,CAAC,EAAM,KAAW,CACnD,EAAW,EAAS,MAAM,KAAK,EAAyB,GAAG,EAAK,IAAI,CAAC,KAAK,EAAM,EAChF,CAGE,EAA2B,KAAK,EAAS,CAAE,CAC7C,GAAM,EAAG,GAAQ,EAA2B,KAAK,EAAS,EAAI,EAAE,CAChE,MAAU,MACR,oCAAoC,EAAK;yDACU,EAAyB,GAAG,EAAK;EACxF,IACG,CAGH,OAAO,EAWT,SAAgB,EAAkB,EAAkB,EAAyB,EAAE,CAAkB,CAK/F,IAAM,EAAoB,EAHL,EAAa,EAAU,QAAQ,CAKlD,EACA,sEAAsE,IACvE,CAGD,OAAO,EAAY,EAAgB,EAAK,KAAK,EAAkB,CAAC,CASlE,SAAgB,EAAmB,EAAkB,EAA6B,CAChF,EAAc,EAAU,EAAK,KAAK,EAAY,EAAgB,EAAM,CAAC,CAAC"}
@@ -0,0 +1,35 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{t as n}from"./bluebird-CUitXgsY.mjs";import{r,t as i}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{r as a}from"./fetchAllRequests-DNQQsY4s.mjs";import{RequestStatus as o}from"@transcend-io/privacy-types";import s from"colors";import{gql as c}from"graphql-request";import l from"cli-progress";const u=c`
2
+ mutation TranscendCliRemoveRequestIdentifiers($input: RemoveRequestIdentifiersInput!) {
3
+ removeRequestIdentifiers(input: $input) {
4
+ count
5
+ }
6
+ }
7
+ `,d=c`
8
+ query TranscendCliRequestIdentifiers(
9
+ $first: Int!
10
+ $offset: Int!
11
+ $requestIds: [ID!]
12
+ $updatedAtBefore: Date
13
+ $updatedAtAfter: Date
14
+ ) {
15
+ requestIdentifiers(
16
+ input: {
17
+ requestIds: $requestIds
18
+ updatedAtBefore: $updatedAtBefore
19
+ updatedAtAfter: $updatedAtAfter
20
+ }
21
+ first: $first
22
+ offset: $offset
23
+ useMaster: false
24
+ orderBy: [{ field: createdAt, direction: ASC }, { field: name, direction: ASC }]
25
+ ) {
26
+ nodes {
27
+ id
28
+ name
29
+ isVerifiedAtLeastOnce
30
+ }
31
+ totalCount
32
+ }
33
+ }
34
+ `;async function f(e,{requestId:t,requestIds:n,updatedAtBefore:r,updatedAtAfter:a}){let o=n??(t?[t]:void 0),s=[],c=0,l=!1;do{let{requestIdentifiers:{nodes:t}}=await i(e,d,{first:50,offset:c,requestIds:o,updatedAtBefore:r?r.toISOString():void 0,updatedAtAfter:a?a.toISOString():void 0});s.push(...t),c+=50,l=t.length===50}while(l);return s}async function p({requestActions:c,identifierNames:d,auth:p,concurrency:m=20,transcendUrl:h=e}){let g=r(h,p),_=new Date().getTime(),v=new l.SingleBar({},l.Presets.shades_classic),y=await a(g,{actions:c,statuses:[o.Enriching]});t.info(s.magenta(`Fetched requests in preflight/enriching state.`));let b=0,x=0;v.start(y.length,0),await n(y,async e=>{let t=(await f(g,{requestId:e.id})).filter(({isVerifiedAtLeastOnce:e,name:t})=>e===!1&&d.includes(t)).map(({id:e})=>e);t.length>0&&(await i(g,u,{input:{requestId:e.id,requestIdentifierIds:t}}),x+=t.length),b+=1,v.update(b)},{concurrency:m}),v.stop();let S=new Date().getTime()-_;return t.info(s.green(`Successfully cleared out unverified identifiers "${S/1e3}" seconds for ${b} requests, ${x} identifiers were cleared out!`)),y.length}export{d as i,f as n,u as r,p as t};
35
+ //# sourceMappingURL=removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"removeUnverifiedRequestIdentifiers-ChlwRmhd.mjs","names":[],"sources":["../src/lib/graphql/gqls/RequestIdentifier.ts","../src/lib/graphql/fetchAllRequestIdentifierMetadata.ts","../src/lib/requests/removeUnverifiedRequestIdentifiers.ts"],"sourcesContent":["import { gql } from 'graphql-request';\n\nexport const REMOVE_REQUEST_IDENTIFIERS = gql`\n mutation TranscendCliRemoveRequestIdentifiers($input: RemoveRequestIdentifiersInput!) {\n removeRequestIdentifiers(input: $input) {\n count\n }\n }\n`;\n\n// TODO: https://transcend.height.app/T-27909 - enable optimizations\n// isExportCsv: true\nexport const REQUEST_IDENTIFIERS = gql`\n query TranscendCliRequestIdentifiers(\n $first: Int!\n $offset: Int!\n $requestIds: [ID!]\n $updatedAtBefore: Date\n $updatedAtAfter: Date\n ) {\n requestIdentifiers(\n input: {\n requestIds: $requestIds\n updatedAtBefore: $updatedAtBefore\n updatedAtAfter: $updatedAtAfter\n }\n first: $first\n offset: $offset\n useMaster: false\n orderBy: [{ field: createdAt, direction: ASC }, { field: name, direction: ASC }]\n ) {\n nodes {\n id\n name\n isVerifiedAtLeastOnce\n }\n totalCount\n }\n }\n`;\n","import { GraphQLClient } from 'graphql-request';\n\nimport { REQUEST_IDENTIFIERS } from './gqls/index.js';\nimport { makeGraphQLRequest } from './makeGraphQLRequest.js';\n\nexport interface RequestIdentifierMetadata {\n /** ID of request identifier */\n id: string;\n /** Name of identifier */\n name: string;\n /** Status of identifier */\n isVerifiedAtLeastOnce: boolean;\n}\n\nconst PAGE_SIZE = 50;\n\n/**\n * Fetch all request identifier metadata for a particular request\n *\n * @param client - GraphQL client\n * @param options - Filter options\n * @returns List of request identifiers\n */\nexport async function fetchAllRequestIdentifierMetadata(\n client: GraphQLClient,\n {\n requestId,\n requestIds,\n updatedAtBefore,\n updatedAtAfter,\n }: {\n /** ID of request to filter on */\n requestId?: string;\n /** IDs of requests to filter on */\n requestIds?: string[];\n /** Filter for request identifiers updated before this date */\n updatedAtBefore?: Date;\n /** Filter for request identifiers updated after this date */\n updatedAtAfter?: Date;\n },\n): Promise<RequestIdentifierMetadata[]> {\n const resolvedRequestIds = requestIds ?? (requestId ? [requestId] : undefined);\n const requestIdentifiers: RequestIdentifierMetadata[] = [];\n let offset = 0;\n\n // Paginate\n let shouldContinue = false;\n do {\n const {\n requestIdentifiers: { nodes },\n } = await makeGraphQLRequest<{\n /** Request Identifiers */\n requestIdentifiers: {\n /** List */\n nodes: RequestIdentifierMetadata[];\n };\n }>(client, REQUEST_IDENTIFIERS, {\n first: PAGE_SIZE,\n offset,\n requestIds: resolvedRequestIds,\n updatedAtBefore: updatedAtBefore ? updatedAtBefore.toISOString() : undefined,\n updatedAtAfter: updatedAtAfter ? updatedAtAfter.toISOString() : undefined,\n });\n requestIdentifiers.push(...nodes);\n offset += PAGE_SIZE;\n shouldContinue = nodes.length === PAGE_SIZE;\n } while (shouldContinue);\n\n return requestIdentifiers;\n}\n","import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport {\n REMOVE_REQUEST_IDENTIFIERS,\n fetchAllRequests,\n fetchAllRequestIdentifierMetadata,\n makeGraphQLRequest,\n buildTranscendGraphQLClient,\n} from '../graphql/index.js';\n\n/**\n * Remove a set of unverified request identifier\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function removeUnverifiedRequestIdentifiers({\n requestActions,\n identifierNames,\n auth,\n concurrency = 20,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** The request actions that should be restarted */\n requestActions: RequestAction[];\n /** Transcend API key authentication */\n auth: string;\n /** The set of identifier names to remove */\n identifierNames: string[];\n /** Concurrency to upload requests in parallel */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<number> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Pull in the requests\n const allRequests = await fetchAllRequests(client, {\n actions: requestActions,\n statuses: [RequestStatus.Enriching],\n });\n\n // Notify Transcend\n logger.info(colors.magenta('Fetched requests in preflight/enriching state.'));\n\n let total = 0;\n let processed = 0;\n progressBar.start(allRequests.length, 0);\n await map(\n allRequests,\n async (requestToRestart) => {\n const requestIdentifiers = await fetchAllRequestIdentifierMetadata(client, {\n requestId: requestToRestart.id,\n });\n const clearOut = requestIdentifiers\n .filter(\n ({ isVerifiedAtLeastOnce, name }) =>\n isVerifiedAtLeastOnce === false && identifierNames.includes(name),\n )\n .map(({ id }) => id);\n\n if (clearOut.length > 0) {\n await makeGraphQLRequest<{\n /** Whether we successfully uploaded the results */\n success: boolean;\n }>(client, REMOVE_REQUEST_IDENTIFIERS, {\n input: {\n requestId: requestToRestart.id,\n requestIdentifierIds: clearOut,\n },\n });\n processed += clearOut.length;\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully cleared out unverified identifiers \"${\n totalTime / 1000\n }\" seconds for ${total} requests, ${processed} identifiers were cleared out!`,\n ),\n );\n return allRequests.length;\n}\n"],"mappings":"8XAEA,MAAa,EAA6B,CAAG;;;;;;EAUhC,EAAsB,CAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;ECWtC,eAAsB,EACpB,EACA,CACE,YACA,aACA,kBACA,kBAWoC,CACtC,IAAM,EAAqB,IAAe,EAAY,CAAC,EAAU,CAAG,IAAA,IAC9D,EAAkD,EAAE,CACtD,EAAS,EAGT,EAAiB,GACrB,EAAG,CACD,GAAM,CACJ,mBAAoB,CAAE,UACpB,MAAM,EAMP,EAAQ,EAAqB,CAC9B,MAAO,GACP,SACA,WAAY,EACZ,gBAAiB,EAAkB,EAAgB,aAAa,CAAG,IAAA,GACnE,eAAgB,EAAiB,EAAe,aAAa,CAAG,IAAA,GACjE,CAAC,CACF,EAAmB,KAAK,GAAG,EAAM,CACjC,GAAU,GACV,EAAiB,EAAM,SAAW,SAC3B,GAET,OAAO,EC/CT,eAAsB,EAAmC,CACvD,iBACA,kBACA,OACA,cAAc,GACd,eAAe,GAYG,CAElB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAG/E,EAAc,MAAM,EAAiB,EAAQ,CACjD,QAAS,EACT,SAAU,CAAC,EAAc,UAAU,CACpC,CAAC,CAGF,EAAO,KAAK,EAAO,QAAQ,iDAAiD,CAAC,CAE7E,IAAI,EAAQ,EACR,EAAY,EAChB,EAAY,MAAM,EAAY,OAAQ,EAAE,CACxC,MAAM,EACJ,EACA,KAAO,IAAqB,CAI1B,IAAM,GAHqB,MAAM,EAAkC,EAAQ,CACzE,UAAW,EAAiB,GAC7B,CAAC,EAEC,QACE,CAAE,wBAAuB,UACxB,IAA0B,IAAS,EAAgB,SAAS,EAAK,CACpE,CACA,KAAK,CAAE,QAAS,EAAG,CAElB,EAAS,OAAS,IACpB,MAAM,EAGH,EAAQ,EAA4B,CACrC,MAAO,CACL,UAAW,EAAiB,GAC5B,qBAAsB,EACvB,CACF,CAAC,CACF,GAAa,EAAS,QAGxB,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAO,KACL,EAAO,MACL,oDACE,EAAY,IACb,gBAAgB,EAAM,aAAa,EAAU,gCAC/C,CACF,CACM,EAAY"}
@@ -0,0 +1,117 @@
1
+ import{gql as e}from"graphql-request";const t=e`
2
+ query TranscendCliRequestsCount($filterBy: RequestFiltersInput!) {
3
+ requests(filterBy: $filterBy, first: 0, useMaster: false) {
4
+ totalCount
5
+ }
6
+ }
7
+ `,n=e`
8
+ query TranscendCliRequests($first: Int!, $after: String, $filterBy: RequestFiltersInput!) {
9
+ requests(
10
+ filterBy: $filterBy
11
+ first: $first
12
+ after: $after
13
+ orderBy: [{ field: createdAt, direction: ASC }, { field: id, direction: ASC }]
14
+ useMaster: false
15
+ ) {
16
+ nodes {
17
+ id
18
+ createdAt
19
+ email
20
+ link
21
+ status
22
+ details
23
+ isTest
24
+ locale
25
+ origin
26
+ isSilent
27
+ coreIdentifier
28
+ daysRemaining
29
+ successfullyCompletedAt
30
+ type
31
+ subjectType
32
+ country
33
+ countrySubDivision
34
+ purpose {
35
+ title
36
+ name
37
+ consent
38
+ enrichedPreferences {
39
+ topic
40
+ selectValues {
41
+ id
42
+ name
43
+ preferenceOption {
44
+ id
45
+ slug
46
+ title {
47
+ defaultMessage
48
+ }
49
+ }
50
+ }
51
+ selectValue {
52
+ id
53
+ name
54
+ }
55
+ selectValue {
56
+ id
57
+ name
58
+ }
59
+ preferenceTopic {
60
+ title {
61
+ defaultMessage
62
+ }
63
+ id
64
+ slug
65
+ }
66
+ name
67
+ id
68
+ booleanValue
69
+ }
70
+ }
71
+ attributeValues {
72
+ id
73
+ name
74
+ attributeKey {
75
+ id
76
+ name
77
+ }
78
+ }
79
+ }
80
+ pageInfo {
81
+ endCursor
82
+ hasNextPage
83
+ }
84
+ }
85
+ }
86
+ `,r=e`
87
+ mutation TranscendCliApprovePrivacyRequest($input: CommunicationInput!) {
88
+ approveRequest(input: $input) {
89
+ request {
90
+ id
91
+ }
92
+ }
93
+ }
94
+ `,i=e`
95
+ mutation TranscendCliCancelPrivacyRequest($input: CommunicationInput!) {
96
+ cancelRequest(input: $input) {
97
+ request {
98
+ id
99
+ }
100
+ }
101
+ }
102
+ `,a=e`
103
+ mutation TranscendCliUpdatePrivacyRequest($input: UpdateRequestInput!) {
104
+ updateRequest(input: $input) {
105
+ request {
106
+ id
107
+ }
108
+ }
109
+ }
110
+ `,o=e`
111
+ mutation TranscendCliNotifyAdditionalTime($input: AdditionalTimeInput!) {
112
+ notifyAdditionalTime(input: $input) {
113
+ clientMutationId
114
+ }
115
+ }
116
+ `;export{t as a,n as i,i as n,a as o,o as r,r as t};
117
+ //# sourceMappingURL=request-CAsR6CMY.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"request-CAsR6CMY.mjs","names":[],"sources":["../src/lib/graphql/gqls/request.ts"],"sourcesContent":["import { gql } from 'graphql-request';\n\n// TODO: https://transcend.height.app/T-27909 - enable optimizations\n// isExportCsv: true\nexport const REQUESTS_COUNT = gql`\n query TranscendCliRequestsCount($filterBy: RequestFiltersInput!) {\n requests(filterBy: $filterBy, first: 0, useMaster: false) {\n totalCount\n }\n }\n`;\n\nexport const REQUESTS = gql`\n query TranscendCliRequests($first: Int!, $after: String, $filterBy: RequestFiltersInput!) {\n requests(\n filterBy: $filterBy\n first: $first\n after: $after\n orderBy: [{ field: createdAt, direction: ASC }, { field: id, direction: ASC }]\n useMaster: false\n ) {\n nodes {\n id\n createdAt\n email\n link\n status\n details\n isTest\n locale\n origin\n isSilent\n coreIdentifier\n daysRemaining\n successfullyCompletedAt\n type\n subjectType\n country\n countrySubDivision\n purpose {\n title\n name\n consent\n enrichedPreferences {\n topic\n selectValues {\n id\n name\n preferenceOption {\n id\n slug\n title {\n defaultMessage\n }\n }\n }\n selectValue {\n id\n name\n }\n selectValue {\n id\n name\n }\n preferenceTopic {\n title {\n defaultMessage\n }\n id\n slug\n }\n name\n id\n booleanValue\n }\n }\n attributeValues {\n id\n name\n attributeKey {\n id\n name\n }\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n`;\n\nexport const APPROVE_PRIVACY_REQUEST = gql`\n mutation TranscendCliApprovePrivacyRequest($input: CommunicationInput!) {\n approveRequest(input: $input) {\n request {\n id\n }\n }\n }\n`;\nexport const CANCEL_PRIVACY_REQUEST = gql`\n mutation TranscendCliCancelPrivacyRequest($input: CommunicationInput!) {\n cancelRequest(input: $input) {\n request {\n id\n }\n }\n }\n`;\n\nexport const UPDATE_PRIVACY_REQUEST = gql`\n mutation TranscendCliUpdatePrivacyRequest($input: UpdateRequestInput!) {\n updateRequest(input: $input) {\n request {\n id\n }\n }\n }\n`;\n\nexport const NOTIFY_ADDITIONAL_TIME = gql`\n mutation TranscendCliNotifyAdditionalTime($input: AdditionalTimeInput!) {\n notifyAdditionalTime(input: $input) {\n clientMutationId\n }\n }\n`;\n"],"mappings":"sCAIA,MAAa,EAAiB,CAAG;;;;;;EAQpB,EAAW,CAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAiFd,EAA0B,CAAG;;;;;;;;EAS7B,EAAyB,CAAG;;;;;;;;EAU5B,EAAyB,CAAG;;;;;;;;EAU5B,EAAyB,CAAG"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{t as n}from"./bluebird-CUitXgsY.mjs";import{i as r}from"./RequestDataSilo-_Iv44M9u.mjs";import{r as i,t as a}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{r as o}from"./fetchAllRequests-DNQQsY4s.mjs";import{t as s}from"./fetchRequestDataSilo-P4yA7Lyc.mjs";import{RequestStatus as c}from"@transcend-io/privacy-types";import l from"colors";import u from"cli-progress";async function d({requestActions:d,dataSiloId:f,auth:p,concurrency:m=20,transcendUrl:h=e}){let g=i(h,p),_=new Date().getTime(),v=new u.SingleBar({},u.Presets.shades_classic),y=await o(g,{actions:d,statuses:[c.Compiling,c.Approving]});t.info(l.magenta(`Retrying requests for Data Silo: "${f}", restarting "${y.length}" requests.`));let b=0,x=0;v.start(y.length,0),await n(y,async e=>{try{await a(g,r,{requestDataSiloId:(await s(g,{requestId:e.id,dataSiloId:f})).id})}catch(e){if(!e.message.includes(`Failed to find RequestDataSilo`))throw e;x+=1}b+=1,v.update(b)},{concurrency:m}),v.stop();let S=new Date().getTime()-_;return t.info(l.green(`Successfully notified Transcend in "${S/1e3}" seconds for ${b} requests, ${x} requests were skipped because data silo was not attached to the request!`)),y.length}export{d as t};
2
+ //# sourceMappingURL=retryRequestDataSilos-DnwXA1YZ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"retryRequestDataSilos-DnwXA1YZ.mjs","names":[],"sources":["../src/lib/requests/retryRequestDataSilos.ts"],"sourcesContent":["import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport {\n RETRY_REQUEST_DATA_SILO,\n fetchRequestDataSilo,\n fetchAllRequests,\n makeGraphQLRequest,\n buildTranscendGraphQLClient,\n} from '../graphql/index.js';\n\n/**\n * Retry a set of RequestDataSilos\n *\n * @param options - Options\n * @returns Number of items marked as completed\n */\nexport async function retryRequestDataSilos({\n requestActions,\n dataSiloId,\n auth,\n concurrency = 20,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** The request actions that should be restarted */\n requestActions: RequestAction[];\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Concurrency to upload requests in parallel */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<number> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Pull in the requests\n const allRequests = await fetchAllRequests(client, {\n actions: requestActions,\n statuses: [RequestStatus.Compiling, RequestStatus.Approving],\n });\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Retrying requests for Data Silo: \"${dataSiloId}\", restarting \"${allRequests.length}\" requests.`,\n ),\n );\n\n let total = 0;\n let skipped = 0;\n progressBar.start(allRequests.length, 0);\n await map(\n allRequests,\n async (requestToRestart) => {\n try {\n const requestDataSilo = await fetchRequestDataSilo(client, {\n requestId: requestToRestart.id,\n dataSiloId,\n });\n\n await makeGraphQLRequest<{\n /** Whether we successfully uploaded the results */\n success: boolean;\n }>(client, RETRY_REQUEST_DATA_SILO, {\n requestDataSiloId: requestDataSilo.id,\n });\n } catch (err) {\n // some requests may not have this data silo connected\n if (!err.message.includes('Failed to find RequestDataSilo')) {\n throw err;\n }\n skipped += 1;\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully notified Transcend in \"${\n totalTime / 1000\n }\" seconds for ${total} requests, ${skipped} requests were skipped because data silo was not attached to the request!`,\n ),\n );\n return allRequests.length;\n}\n"],"mappings":"wcAqBA,eAAsB,EAAsB,CAC1C,iBACA,aACA,OACA,cAAc,GACd,eAAe,GAYG,CAElB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAG/E,EAAc,MAAM,EAAiB,EAAQ,CACjD,QAAS,EACT,SAAU,CAAC,EAAc,UAAW,EAAc,UAAU,CAC7D,CAAC,CAGF,EAAO,KACL,EAAO,QACL,qCAAqC,EAAW,iBAAiB,EAAY,OAAO,aACrF,CACF,CAED,IAAI,EAAQ,EACR,EAAU,EACd,EAAY,MAAM,EAAY,OAAQ,EAAE,CACxC,MAAM,EACJ,EACA,KAAO,IAAqB,CAC1B,GAAI,CAMF,MAAM,EAGH,EAAQ,EAAyB,CAClC,mBATsB,MAAM,EAAqB,EAAQ,CACzD,UAAW,EAAiB,GAC5B,aACD,CAAC,EAMmC,GACpC,CAAC,OACK,EAAK,CAEZ,GAAI,CAAC,EAAI,QAAQ,SAAS,iCAAiC,CACzD,MAAM,EAER,GAAW,EAGb,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAO,KACL,EAAO,MACL,uCACE,EAAY,IACb,gBAAgB,EAAM,aAAa,EAAQ,2EAC7C,CACF,CACM,EAAY"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{n,t as r}from"./bluebird-CUitXgsY.mjs";import{i,t as a}from"./fetchAllRequestEnrichers-CK-kk5eg.mjs";import{r as o,t as s}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{r as c}from"./fetchAllRequests-DNQQsY4s.mjs";import{RequestEnricherStatus as l,RequestStatus as u}from"@transcend-io/privacy-types";import d from"colors";import f from"cli-progress";async function p({enricherIds:p,auth:m,concurrency:h=100,transcendUrl:g=e}){let _=o(g,m),v=new Date().getTime(),y=await c(_,{statuses:[u.Enriching]});t.info(d.magenta(`Processing enricher: "${p.join(`,`)}" fetched "${y.length}" in enriching status.`));let b=new f.SingleBar({},f.Presets.shades_classic),x=0;b.start(y.length,0);let S=0;await r(y,async e=>{let t=(await a(_,{requestId:e.id})).filter(e=>p.includes(e.enricher.id)&&![l.Resolved,l.Skipped].includes(e.status));t.length>0&&await n(t,async e=>{try{await s(_,i,{requestEnricherId:e.id}),S+=1}catch(e){if(!e.message.includes(`Client error: Cannot skip Request enricher because it has already completed`))throw e}}),x+=1,b.update(x)},{concurrency:h}),b.stop();let C=new Date().getTime()-v;return t.info(d.green(`Successfully skipped "${S}" for "${y.length}" requests in "${C/1e3}" seconds!`)),y.length}export{p as t};
2
+ //# sourceMappingURL=skipPreflightJobs-jK5lNlmv.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"skipPreflightJobs-jK5lNlmv.mjs","names":[],"sources":["../src/lib/requests/skipPreflightJobs.ts"],"sourcesContent":["import { RequestEnricherStatus, RequestStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { mapSeries, map } from '../bluebird.js';\nimport {\n makeGraphQLRequest,\n buildTranscendGraphQLClient,\n fetchAllRequestEnrichers,\n fetchAllRequests,\n SKIP_REQUEST_ENRICHER,\n} from '../graphql/index.js';\n\n/**\n * Given an enricher ID, mark all open request enrichers as skipped\n *\n * @param options - Options\n * @returns Number of items skipped\n */\nexport async function skipPreflightJobs({\n enricherIds,\n auth,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** Transcend API key authentication */\n auth: string;\n /** Enricher IDs to pull down jobs for */\n enricherIds: string[];\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Request statuses to mark as completed */\n requestStatuses?: RequestStatus[];\n}): Promise<number> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Time duration\n const t0 = new Date().getTime();\n\n // fetch all RequestDataSilos that are open\n const requests = await fetchAllRequests(client, {\n statuses: [RequestStatus.Enriching],\n });\n\n // Notify Transcend\n logger.info(\n colors.magenta(\n `Processing enricher: \"${enricherIds.join(',')}\" fetched \"${\n requests.length\n }\" in enriching status.`,\n ),\n );\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n let total = 0;\n progressBar.start(requests.length, 0);\n let totalSkipped = 0;\n await map(\n requests,\n async (request) => {\n // TODO dont pull all in\n const requestEnrichers = await fetchAllRequestEnrichers(client, {\n requestId: request.id,\n });\n const requestEnrichersFiltered = requestEnrichers.filter(\n (enricher) =>\n enricherIds.includes(enricher.enricher.id) &&\n ![\n RequestEnricherStatus.Resolved,\n RequestEnricherStatus.Skipped,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ].includes(enricher.status as any),\n );\n\n // TODO\n if (requestEnrichersFiltered.length > 0) {\n await mapSeries(requestEnrichersFiltered, async (requestEnricher) => {\n try {\n await makeGraphQLRequest<{\n /** Whether we successfully uploaded the results */\n success: boolean;\n }>(client, SKIP_REQUEST_ENRICHER, {\n requestEnricherId: requestEnricher.id,\n });\n totalSkipped += 1;\n } catch (err) {\n if (\n !err.message.includes(\n 'Client error: Cannot skip Request enricher because it has already completed',\n )\n ) {\n throw err;\n }\n }\n });\n }\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully skipped \"${totalSkipped}\" for \"${\n requests.length\n }\" requests in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n return requests.length;\n}\n"],"mappings":"wbAqBA,eAAsB,EAAkB,CACtC,cACA,OACA,cAAc,IACd,eAAe,GAYG,CAElB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAW,MAAM,EAAiB,EAAQ,CAC9C,SAAU,CAAC,EAAc,UAAU,CACpC,CAAC,CAGF,EAAO,KACL,EAAO,QACL,yBAAyB,EAAY,KAAK,IAAI,CAAC,aAC7C,EAAS,OACV,wBACF,CACF,CAGD,IAAM,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAEjF,EAAQ,EACZ,EAAY,MAAM,EAAS,OAAQ,EAAE,CACrC,IAAI,EAAe,EACnB,MAAM,EACJ,EACA,KAAO,IAAY,CAKjB,IAAM,GAHmB,MAAM,EAAyB,EAAQ,CAC9D,UAAW,EAAQ,GACpB,CAAC,EACgD,OAC/C,GACC,EAAY,SAAS,EAAS,SAAS,GAAG,EAC1C,CAAC,CACC,EAAsB,SACtB,EAAsB,QAEvB,CAAC,SAAS,EAAS,OAAc,CACrC,CAGG,EAAyB,OAAS,GACpC,MAAM,EAAU,EAA0B,KAAO,IAAoB,CACnE,GAAI,CACF,MAAM,EAGH,EAAQ,EAAuB,CAChC,kBAAmB,EAAgB,GACpC,CAAC,CACF,GAAgB,QACT,EAAK,CACZ,GACE,CAAC,EAAI,QAAQ,SACX,8EACD,CAED,MAAM,IAGV,CAEJ,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EASvB,OAPA,EAAO,KACL,EAAO,MACL,yBAAyB,EAAa,UACpC,EAAS,OACV,iBAAiB,EAAY,IAAK,YACpC,CACF,CACM,EAAS"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{t as n}from"./bluebird-CUitXgsY.mjs";import{t as r}from"./RequestDataSilo-_Iv44M9u.mjs";import{r as i,t as a}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{n as o,r as s}from"./fetchRequestDataSilo-P4yA7Lyc.mjs";import{RequestStatus as c}from"@transcend-io/privacy-types";import l from"colors";import u from"cli-progress";async function d({dataSiloId:d,auth:f,concurrency:p=50,maxUploadPerChunk:m=5e4,status:h=`SKIPPED`,transcendUrl:g=e,requestStatuses:_=[c.Compiling,c.Secondary]}){let v=i(g,f),y=new Date().getTime(),b=await s(v,{dataSiloId:d,requestStatuses:_});t.info(l.magenta(`Marking ${b} request data silos as completed`));let x=new u.SingleBar({},u.Presets.shades_classic),S=0;for(x.start(b,0);S<b;)await n(await o(v,{dataSiloId:d,requestStatuses:_,limit:m,onProgress:e=>{S+=e/2,x.update(S)}}),async e=>{try{await a(v,r,{requestDataSiloId:e.id,status:h})}catch(e){if(!e.message.includes(`Client error: Request must be active:`))throw e}S+=.5,x.update(S)},{concurrency:p});x.stop();let C=new Date().getTime()-y;return t.info(l.green(`Successfully skipped "${b}" requests in "${C/1e3}" seconds!`)),b}export{d as t};
2
+ //# sourceMappingURL=skipRequestDataSilos-DQGroOos.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"skipRequestDataSilos-DQGroOos.mjs","names":[],"sources":["../src/lib/requests/skipRequestDataSilos.ts"],"sourcesContent":["import { RequestStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport {\n CHANGE_REQUEST_DATA_SILO_STATUS,\n makeGraphQLRequest,\n buildTranscendGraphQLClient,\n fetchRequestDataSilos,\n fetchRequestDataSilosCount,\n} from '../graphql/index.js';\n\n/**\n * Given a data silo ID, mark all open request data silos as skipped\n *\n * @param options - Options\n * @returns Number of items skipped\n */\nexport async function skipRequestDataSilos({\n dataSiloId,\n auth,\n concurrency = 50,\n maxUploadPerChunk = 50000,\n status = 'SKIPPED',\n transcendUrl = DEFAULT_TRANSCEND_API,\n requestStatuses = [RequestStatus.Compiling, RequestStatus.Secondary],\n}: {\n /** Transcend API key authentication */\n auth: string;\n /** Data Silo ID to pull down jobs for */\n dataSiloId: string;\n /** Status to set */\n status?: 'SKIPPED' | 'RESOLVED';\n /** Upload concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Request statuses to mark as completed */\n requestStatuses?: RequestStatus[];\n /** Maximum number of items to mark skipped per go */\n maxUploadPerChunk?: number;\n}): Promise<number> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Time duration\n const t0 = new Date().getTime();\n\n // Determine total number of request data silos\n const requestDataSiloCount = await fetchRequestDataSilosCount(client, {\n dataSiloId,\n requestStatuses,\n });\n\n logger.info(colors.magenta(`Marking ${requestDataSiloCount} request data silos as completed`));\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n let total = 0;\n progressBar.start(requestDataSiloCount, 0);\n\n // fetch all RequestDataSilos that are open\n while (total < requestDataSiloCount) {\n const requestDataSilos = await fetchRequestDataSilos(client, {\n dataSiloId,\n requestStatuses,\n limit: maxUploadPerChunk,\n // eslint-disable-next-line no-loop-func\n onProgress: (numUpdated) => {\n total += numUpdated / 2;\n progressBar.update(total);\n },\n });\n\n await map(\n requestDataSilos,\n // eslint-disable-next-line no-loop-func\n async (requestDataSilo) => {\n try {\n await makeGraphQLRequest<{\n /** Whether we successfully uploaded the results */\n success: boolean;\n }>(client, CHANGE_REQUEST_DATA_SILO_STATUS, {\n requestDataSiloId: requestDataSilo.id,\n status,\n });\n } catch (err) {\n if (!err.message.includes('Client error: Request must be active:')) {\n throw err;\n }\n }\n\n total += 0.5;\n progressBar.update(total);\n },\n { concurrency },\n );\n }\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully skipped \"${requestDataSiloCount}\" requests in \"${totalTime / 1000}\" seconds!`,\n ),\n );\n return requestDataSiloCount;\n}\n"],"mappings":"2ZAqBA,eAAsB,EAAqB,CACzC,aACA,OACA,cAAc,GACd,oBAAoB,IACpB,SAAS,UACT,eAAe,EACf,kBAAkB,CAAC,EAAc,UAAW,EAAc,UAAU,EAgBlD,CAElB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAK,IAAI,MAAM,CAAC,SAAS,CAGzB,EAAuB,MAAM,EAA2B,EAAQ,CACpE,aACA,kBACD,CAAC,CAEF,EAAO,KAAK,EAAO,QAAQ,WAAW,EAAqB,kCAAkC,CAAC,CAG9F,IAAM,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAEjF,EAAQ,EAIZ,IAHA,EAAY,MAAM,EAAsB,EAAE,CAGnC,EAAQ,GAYb,MAAM,EAXmB,MAAM,EAAsB,EAAQ,CAC3D,aACA,kBACA,MAAO,EAEP,WAAa,GAAe,CAC1B,GAAS,EAAa,EACtB,EAAY,OAAO,EAAM,EAE5B,CAAC,CAKA,KAAO,IAAoB,CACzB,GAAI,CACF,MAAM,EAGH,EAAQ,EAAiC,CAC1C,kBAAmB,EAAgB,GACnC,SACD,CAAC,OACK,EAAK,CACZ,GAAI,CAAC,EAAI,QAAQ,SAAS,wCAAwC,CAChE,MAAM,EAIV,GAAS,GACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAGH,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAOvB,OALA,EAAO,KACL,EAAO,MACL,0BAA0B,EAAqB,iBAAiB,EAAY,IAAK,YAClF,CACF,CACM"}
@@ -0,0 +1,2 @@
1
+ function e(e){return e.split(`,`).map(e=>e.trim()).filter(e=>e)}export{e as t};
2
+ //# sourceMappingURL=splitCsvToList-BRq_CIfd.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"splitCsvToList-BRq_CIfd.mjs","names":[],"sources":["../src/lib/requests/splitCsvToList.ts"],"sourcesContent":["/**\n * Split string to CSV\n *\n * Filter out double commas and spaces like:\n * Dog, Cat -> ['Dog', 'Cat']\n * Dog,,Cat -> ['Dog', 'Cat']\n *\n * @param value - Value\n * @returns List of values\n */\nexport function splitCsvToList(value: string): string[] {\n return value\n .split(',')\n .map((x) => x.trim())\n .filter((x) => x);\n}\n"],"mappings":"AAUA,SAAgB,EAAe,EAAyB,CACtD,OAAO,EACJ,MAAM,IAAI,CACV,IAAK,GAAM,EAAE,MAAM,CAAC,CACpB,OAAQ,GAAM,EAAE"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-CeMiHaHx.mjs";import{t}from"./logger-B-LXIf3U.mjs";import{t as n}from"./bluebird-CUitXgsY.mjs";import{t as r}from"./createSombraGotInstance-D1Il9zUE.mjs";import{n as i,r as a}from"./fetchAllRequestIdentifiers-DrFFOt0m.mjs";import{r as o}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{i as s,r as c}from"./fetchAllRequests-DNQQsY4s.mjs";import{i as l,r as u,t as d}from"./writeCsv-B51ulrVl.mjs";import{groupBy as f,uniq as p}from"lodash-es";import m from"colors";import h from"cli-progress";function g({attributeValues:e,requestIdentifiers:t,id:n,email:r,type:i,status:a,subjectType:o,details:s,createdAt:c,successfullyCompletedAt:l,country:u,locale:d,origin:p,countrySubDivision:m,isSilent:h,isTest:g,coreIdentifier:_,purpose:v,...y}){return{"Request ID":n,"Created At":c,"Successfully Completed At":l||``,Email:r,"Core Identifier":_,"Request Type":i,"Data Subject Type":o,Status:a,Country:u,"Country Sub Division":m,Details:s,Origin:p,"Silent Mode":h,"Is Test Request":g,Language:d,"Purpose Trigger Name":v?.title||v?.name||``,"Purpose Trigger Value":v?.consent?.toString()||``,...(v?.enrichedPreferences||[]).reduce((e,t)=>{let n=t.preferenceTopic?.title.defaultMessage||t.name;return n?{...e,[n]:t.selectValues?t.selectValues.map(e=>e.name).join(`;`):t.selectValue?.name||t.booleanValue}:e},{}),...y,...Object.entries(f(e,`attributeKey.name`)).reduce((e,[t,n])=>Object.assign(e,{[t]:n.map(({name:e})=>e).join(`,`)}),{}),...Object.entries(f(t,`name`)).reduce((e,[t,n])=>Object.assign(e,{[t]:n.map(({value:e})=>e).join(`,`)}),{})}}function _(e,t,n){let r=e.getTime(),i=t.getTime(),a=(i-r)/n;return Array.from({length:n},(e,t)=>({createdAtAfter:new Date(r+a*t),createdAtBefore:new Date(t===n-1?i:r+a*(t+1))}))}async function v({auth:f,sombraAuth:v,actions:y=[],statuses:b=[],identifierSearch:x,concurrency:S=1,pageLimit:C=100,transcendUrl:w=e,createdAtBefore:T,createdAtAfter:E,updatedAtBefore:D,updatedAtAfter:O,isTest:k,skipRequestIdentifiers:A=!1,file:j}){let M=o(w,f),N=A?void 0:await r(w,f,v),P=``;T&&(P+=` before ${T.toISOString()}`),E&&(P+=`${P?`, and`:``} after ${E.toISOString()}`),t.info(m.magenta(`${y.length>0?`Pulling requests of type "${y.join(`" , "`)}"`:`Pulling all requests`}${P}`));let F=S>1&&E&&T,I=F?_(E,T,S):[{createdAtAfter:E,createdAtBefore:T}];F&&t.info(m.magenta(`Splitting date range into ${S} parallel chunks`));let L={type:y.length>0?y:void 0,status:b.length>0?b:void 0,isTest:k,createdAtBefore:T?T.toISOString():void 0,createdAtAfter:E?E.toISOString():void 0,updatedAtBefore:D?D.toISOString():void 0,updatedAtAfter:O?O.toISOString():void 0},R=Date.now();A||await a(M);let z=await s(M,L);t.info(m.magenta(`Fetching ${z} requests`));let B=new h.SingleBar({},h.Presets.shades_classic);B.start(z,0);let V=0,{baseName:H,extension:U}=l(j),W=I.map((e,t)=>I.length===1?j:`${H}-${t}${U}`),G=[],K=await n(I,async(e,r)=>{let a=W[r],o,s=0;try{await c(M,{actions:y,text:x,statuses:b,createdAtBefore:e.createdAtBefore,createdAtAfter:e.createdAtAfter,updatedAtBefore:D,updatedAtAfter:O,isTest:k,onPage:async e=>{if(e.length===0)return;let t=(A?e.map(e=>({...e,requestIdentifiers:[]})):await n(e,async e=>({...e,requestIdentifiers:await i(M,N,{requestId:e.id,skipSombraCheck:!0})}),{concurrency:C})).map(g);o||(o=p(t.map(e=>Object.keys(e)).flat()),u(a,o)),d(a,t,o),s+=t.length,V+=t.length,B.update(V)}})}catch(n){let i=n instanceof Error?n.message:String(n);t.error(m.red(`Chunk ${r} failed (${e.createdAtAfter?.toISOString()??`start`} → ${e.createdAtBefore?.toISOString()??`end`}): ${i}`)),G.push({index:r,createdAtAfter:e.createdAtAfter,createdAtBefore:e.createdAtBefore,error:i})}return o||u(a,[]),s},{concurrency:F?S:1});B.stop();let q=K.reduce((e,t)=>e+t,0),J=(Date.now()-R)/1e3;if(G.length>0){t.error(m.red(`\n${G.length} chunk(s) failed. Re-run with these date ranges to fill the gaps:`));for(let e of G)t.error(m.red(` Chunk ${e.index}: --createdAtAfter=${e.createdAtAfter?.toISOString()??``} --createdAtBefore=${e.createdAtBefore?.toISOString()??``}`))}return t.info(m.green(`Streamed ${q} requests to ${W.length} file(s) in ${J}s`)),{filePaths:W,totalCount:q}}export{g as n,v as t};
2
+ //# sourceMappingURL=streamPrivacyRequestsToCsv-BK07Bm-T.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"streamPrivacyRequestsToCsv-BK07Bm-T.mjs","names":[],"sources":["../src/lib/requests/formatRequestForCsv.ts","../src/lib/requests/streamPrivacyRequestsToCsv.ts"],"sourcesContent":["import { groupBy } from 'lodash-es';\n\nimport type { PrivacyRequest, RequestIdentifier } from '../graphql/index.js';\n\nexport interface ExportedPrivacyRequest extends PrivacyRequest {\n /** Request identifiers */\n requestIdentifiers: RequestIdentifier[];\n}\n\n/** A single CSV row */\nexport type CsvRow = { [k in string]: string | null | number | boolean };\n\n/**\n * Format a single privacy request (with optional identifiers) into a flat CSV row.\n *\n * @param request - The request with identifiers attached\n * @returns Flat object suitable for CSV output\n */\nexport function formatRequestForCsv({\n attributeValues,\n requestIdentifiers,\n id,\n email,\n type,\n status,\n subjectType,\n details,\n createdAt,\n successfullyCompletedAt,\n country,\n locale,\n origin,\n countrySubDivision,\n isSilent,\n isTest,\n coreIdentifier,\n purpose,\n ...request\n}: ExportedPrivacyRequest): CsvRow {\n return {\n 'Request ID': id,\n 'Created At': createdAt,\n 'Successfully Completed At': successfullyCompletedAt || '',\n Email: email,\n 'Core Identifier': coreIdentifier,\n 'Request Type': type,\n 'Data Subject Type': subjectType,\n Status: status,\n Country: country,\n 'Country Sub Division': countrySubDivision,\n Details: details,\n Origin: origin,\n 'Silent Mode': isSilent,\n 'Is Test Request': isTest,\n Language: locale,\n 'Purpose Trigger Name': purpose?.title || purpose?.name || '',\n 'Purpose Trigger Value': purpose?.consent?.toString() || '',\n ...(purpose?.enrichedPreferences || []).reduce((acc: Record<string, string | boolean>, p) => {\n const title = p.preferenceTopic?.title.defaultMessage || p.name;\n return title\n ? {\n ...acc,\n [title]: p.selectValues\n ? p.selectValues.map((x) => x.name).join(';')\n : p.selectValue?.name || p.booleanValue,\n }\n : acc;\n }, {}),\n ...request,\n ...Object.entries(groupBy(attributeValues, 'attributeKey.name')).reduce(\n (acc, [name, values]) =>\n Object.assign(acc, {\n [name]: values.map(({ name: n }) => n).join(','),\n }),\n {},\n ),\n ...Object.entries(groupBy(requestIdentifiers, 'name')).reduce(\n (acc, [name, values]) =>\n Object.assign(acc, {\n [name]: values.map(({ value }) => value).join(','),\n }),\n {},\n ),\n };\n}\n","import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { map } from '../bluebird.js';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllRequestIdentifiers,\n fetchAllRequests,\n fetchRequestsTotalCount,\n validateSombraVersion,\n} from '../graphql/index.js';\nimport { initCsvFile, appendCsvRowsOrdered, parseFilePath } from '../helpers/index.js';\nimport { formatRequestForCsv, ExportedPrivacyRequest } from './formatRequestForCsv.js';\n\ninterface ChunkedDateRange {\n /** Chunk start */\n createdAtAfter: Date;\n /** Chunk end */\n createdAtBefore: Date;\n}\n\n/**\n * Split a date range into N evenly-spaced chunks.\n *\n * @param after - Start of the date range\n * @param before - End of the date range\n * @param chunks - Number of chunks to split into\n * @returns Array of date range bounds\n */\nfunction splitDateRange(after: Date, before: Date, chunks: number): ChunkedDateRange[] {\n const startMs = after.getTime();\n const endMs = before.getTime();\n const chunkSize = (endMs - startMs) / chunks;\n return Array.from({ length: chunks }, (_, i) => ({\n createdAtAfter: new Date(startMs + chunkSize * i),\n createdAtBefore: new Date(i === chunks - 1 ? endMs : startMs + chunkSize * (i + 1)),\n }));\n}\n\n/**\n * Stream privacy requests directly to CSV files, one file per date-range chunk.\n * Memory stays bounded to a single page of results at a time.\n * Supports both with and without request identifier enrichment.\n *\n * @param options - Options\n * @returns The list of written file paths and total row count\n */\nexport async function streamPrivacyRequestsToCsv({\n auth,\n sombraAuth,\n actions = [],\n statuses = [],\n identifierSearch,\n concurrency = 1,\n pageLimit = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n isTest,\n skipRequestIdentifiers = false,\n file,\n}: {\n /** Transcend API key authentication */\n auth: string;\n /** Search for a specific identifier */\n identifierSearch?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Statuses to filter on */\n statuses?: RequestStatus[];\n /** The request action to fetch */\n actions?: RequestAction[];\n /** Number of parallel date-range chunks */\n concurrency?: number;\n /** Concurrency for fetching identifiers per page */\n pageLimit?: number;\n /** Filter for requests created before this date */\n createdAtBefore?: Date;\n /** Filter for requests created after this date */\n createdAtAfter?: Date;\n /** Filter for requests updated before this date */\n updatedAtBefore?: Date;\n /** Filter for requests updated after this date */\n updatedAtAfter?: Date;\n /** Return test requests */\n isTest?: boolean;\n /** Skip fetching request identifiers */\n skipRequestIdentifiers?: boolean;\n /** Output CSV file path */\n file: string;\n}): Promise<{\n /** Paths to written CSV files */\n filePaths: string[];\n /** Total rows written */\n totalCount: number;\n}> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const sombra = skipRequestIdentifiers\n ? undefined\n : await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n // Log date range\n let dateRange = '';\n if (createdAtBefore) {\n dateRange += ` before ${createdAtBefore.toISOString()}`;\n }\n if (createdAtAfter) {\n dateRange += `${dateRange ? ', and' : ''} after ${createdAtAfter.toISOString()}`;\n }\n logger.info(\n colors.magenta(\n `${\n actions.length > 0\n ? `Pulling requests of type \"${actions.join('\" , \"')}\"`\n : 'Pulling all requests'\n }${dateRange}`,\n ),\n );\n\n // Split into parallel date-range chunks when possible\n const useChunks = concurrency > 1 && createdAtAfter && createdAtBefore;\n const chunks = useChunks\n ? splitDateRange(createdAtAfter, createdAtBefore, concurrency)\n : [{ createdAtAfter, createdAtBefore }];\n\n if (useChunks) {\n logger.info(colors.magenta(`Splitting date range into ${concurrency} parallel chunks`));\n }\n\n // Fetch total count once for the shared progress bar\n const filterBy = {\n type: actions.length > 0 ? actions : undefined,\n status: statuses.length > 0 ? statuses : undefined,\n isTest,\n createdAtBefore: createdAtBefore ? createdAtBefore.toISOString() : undefined,\n createdAtAfter: createdAtAfter ? createdAtAfter.toISOString() : undefined,\n updatedAtBefore: updatedAtBefore ? updatedAtBefore.toISOString() : undefined,\n updatedAtAfter: updatedAtAfter ? updatedAtAfter.toISOString() : undefined,\n };\n\n const t0 = Date.now();\n\n // Validate Sombra version once before bulk-fetching identifiers\n if (!skipRequestIdentifiers) {\n await validateSombraVersion(client);\n }\n\n const totalExpected = await fetchRequestsTotalCount(client, filterBy);\n logger.info(colors.magenta(`Fetching ${totalExpected} requests`));\n\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n progressBar.start(totalExpected, 0);\n\n let globalFetched = 0;\n\n const { baseName, extension } = parseFilePath(file);\n\n const filePaths = chunks.map((_, i) =>\n chunks.length === 1 ? file : `${baseName}-${i}${extension}`,\n );\n\n interface FailedChunk {\n /** Chunk index */\n index: number;\n /** Start of failed date range */\n createdAtAfter?: Date;\n /** End of failed date range */\n createdAtBefore?: Date;\n /** Error message */\n error: string;\n }\n\n const failedChunks: FailedChunk[] = [];\n\n const chunkCounts = await map(\n chunks,\n async (chunk, i) => {\n const chunkFile = filePaths[i];\n let headers: string[] | undefined;\n let rowCount = 0;\n\n try {\n await fetchAllRequests(client, {\n actions,\n text: identifierSearch,\n statuses,\n createdAtBefore: chunk.createdAtBefore,\n createdAtAfter: chunk.createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n isTest,\n onPage: async (nodes) => {\n if (nodes.length === 0) return;\n\n // Optionally enrich each request with its identifiers\n const enriched: ExportedPrivacyRequest[] = skipRequestIdentifiers\n ? nodes.map((n) => ({ ...n, requestIdentifiers: [] }))\n : await map(\n nodes,\n async (n) => ({\n ...n,\n requestIdentifiers: await fetchAllRequestIdentifiers(client, sombra!, {\n requestId: n.id,\n skipSombraCheck: true,\n }),\n }),\n { concurrency: pageLimit },\n );\n\n const rows: Record<string, string | null | number | boolean>[] =\n enriched.map(formatRequestForCsv);\n\n if (!headers) {\n headers = uniq(rows.map((r: Record<string, unknown>) => Object.keys(r)).flat());\n initCsvFile(chunkFile, headers);\n }\n\n appendCsvRowsOrdered(chunkFile, rows, headers);\n rowCount += rows.length;\n globalFetched += rows.length;\n progressBar.update(globalFetched);\n },\n });\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err);\n logger.error(\n colors.red(\n `Chunk ${i} failed (${\n chunk.createdAtAfter?.toISOString() ?? 'start'\n } → ${chunk.createdAtBefore?.toISOString() ?? 'end'}): ${message}`,\n ),\n );\n failedChunks.push({\n index: i,\n createdAtAfter: chunk.createdAtAfter,\n createdAtBefore: chunk.createdAtBefore,\n error: message,\n });\n }\n\n if (!headers) {\n initCsvFile(chunkFile, []);\n }\n\n return rowCount;\n },\n { concurrency: useChunks ? concurrency : 1 },\n );\n\n progressBar.stop();\n const totalCount = chunkCounts.reduce((a, b) => a + b, 0);\n const elapsed = (Date.now() - t0) / 1000;\n\n if (failedChunks.length > 0) {\n logger.error(\n colors.red(\n `\\n${failedChunks.length} chunk(s) failed. ` +\n 'Re-run with these date ranges to fill the gaps:',\n ),\n );\n for (const fc of failedChunks) {\n logger.error(\n colors.red(\n ` Chunk ${fc.index}: --createdAtAfter=${\n fc.createdAtAfter?.toISOString() ?? ''\n } --createdAtBefore=${fc.createdAtBefore?.toISOString() ?? ''}`,\n ),\n );\n }\n }\n\n logger.info(\n colors.green(`Streamed ${totalCount} requests to ${filePaths.length} file(s) in ${elapsed}s`),\n );\n\n return { filePaths, totalCount };\n}\n"],"mappings":"ygBAkBA,SAAgB,EAAoB,CAClC,kBACA,qBACA,KACA,QACA,OACA,SACA,cACA,UACA,YACA,0BACA,UACA,SACA,SACA,qBACA,WACA,SACA,iBACA,UACA,GAAG,GAC8B,CACjC,MAAO,CACL,aAAc,EACd,aAAc,EACd,4BAA6B,GAA2B,GACxD,MAAO,EACP,kBAAmB,EACnB,eAAgB,EAChB,oBAAqB,EACrB,OAAQ,EACR,QAAS,EACT,uBAAwB,EACxB,QAAS,EACT,OAAQ,EACR,cAAe,EACf,kBAAmB,EACnB,SAAU,EACV,uBAAwB,GAAS,OAAS,GAAS,MAAQ,GAC3D,wBAAyB,GAAS,SAAS,UAAU,EAAI,GACzD,IAAI,GAAS,qBAAuB,EAAE,EAAE,QAAQ,EAAuC,IAAM,CAC3F,IAAM,EAAQ,EAAE,iBAAiB,MAAM,gBAAkB,EAAE,KAC3D,OAAO,EACH,CACE,GAAG,GACF,GAAQ,EAAE,aACP,EAAE,aAAa,IAAK,GAAM,EAAE,KAAK,CAAC,KAAK,IAAI,CAC3C,EAAE,aAAa,MAAQ,EAAE,aAC9B,CACD,GACH,EAAE,CAAC,CACN,GAAG,EACH,GAAG,OAAO,QAAQ,EAAQ,EAAiB,oBAAoB,CAAC,CAAC,QAC9D,EAAK,CAAC,EAAM,KACX,OAAO,OAAO,EAAK,EAChB,GAAO,EAAO,KAAK,CAAE,KAAM,KAAQ,EAAE,CAAC,KAAK,IAAI,CACjD,CAAC,CACJ,EAAE,CACH,CACD,GAAG,OAAO,QAAQ,EAAQ,EAAoB,OAAO,CAAC,CAAC,QACpD,EAAK,CAAC,EAAM,KACX,OAAO,OAAO,EAAK,EAChB,GAAO,EAAO,KAAK,CAAE,WAAY,EAAM,CAAC,KAAK,IAAI,CACnD,CAAC,CACJ,EAAE,CACH,CACF,CCjDH,SAAS,EAAe,EAAa,EAAc,EAAoC,CACrF,IAAM,EAAU,EAAM,SAAS,CACzB,EAAQ,EAAO,SAAS,CACxB,GAAa,EAAQ,GAAW,EACtC,OAAO,MAAM,KAAK,CAAE,OAAQ,EAAQ,EAAG,EAAG,KAAO,CAC/C,eAAgB,IAAI,KAAK,EAAU,EAAY,EAAE,CACjD,gBAAiB,IAAI,KAAK,IAAM,EAAS,EAAI,EAAQ,EAAU,GAAa,EAAI,GAAG,CACpF,EAAE,CAWL,eAAsB,EAA2B,CAC/C,OACA,aACA,UAAU,EAAE,CACZ,WAAW,EAAE,CACb,mBACA,cAAc,EACd,YAAY,IACZ,eAAe,EACf,kBACA,iBACA,kBACA,iBACA,SACA,yBAAyB,GACzB,QAqCC,CACD,IAAM,EAAS,EAA4B,EAAc,EAAK,CACxD,EAAS,EACX,IAAA,GACA,MAAM,EAAwB,EAAc,EAAM,EAAW,CAG7D,EAAY,GACZ,IACF,GAAa,WAAW,EAAgB,aAAa,IAEnD,IACF,GAAa,GAAG,EAAY,QAAU,GAAG,SAAS,EAAe,aAAa,IAEhF,EAAO,KACL,EAAO,QACL,GACE,EAAQ,OAAS,EACb,6BAA6B,EAAQ,KAAK,QAAQ,CAAC,GACnD,yBACH,IACJ,CACF,CAGD,IAAM,EAAY,EAAc,GAAK,GAAkB,EACjD,EAAS,EACX,EAAe,EAAgB,EAAiB,EAAY,CAC5D,CAAC,CAAE,iBAAgB,kBAAiB,CAAC,CAErC,GACF,EAAO,KAAK,EAAO,QAAQ,6BAA6B,EAAY,kBAAkB,CAAC,CAIzF,IAAM,EAAW,CACf,KAAM,EAAQ,OAAS,EAAI,EAAU,IAAA,GACrC,OAAQ,EAAS,OAAS,EAAI,EAAW,IAAA,GACzC,SACA,gBAAiB,EAAkB,EAAgB,aAAa,CAAG,IAAA,GACnE,eAAgB,EAAiB,EAAe,aAAa,CAAG,IAAA,GAChE,gBAAiB,EAAkB,EAAgB,aAAa,CAAG,IAAA,GACnE,eAAgB,EAAiB,EAAe,aAAa,CAAG,IAAA,GACjE,CAEK,EAAK,KAAK,KAAK,CAGhB,GACH,MAAM,EAAsB,EAAO,CAGrC,IAAM,EAAgB,MAAM,EAAwB,EAAQ,EAAS,CACrE,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAc,WAAW,CAAC,CAEjE,IAAM,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CACrF,EAAY,MAAM,EAAe,EAAE,CAEnC,IAAI,EAAgB,EAEd,CAAE,WAAU,aAAc,EAAc,EAAK,CAE7C,EAAY,EAAO,KAAK,EAAG,IAC/B,EAAO,SAAW,EAAI,EAAO,GAAG,EAAS,GAAG,IAAI,IACjD,CAaK,EAA8B,EAAE,CAEhC,EAAc,MAAM,EACxB,EACA,MAAO,EAAO,IAAM,CAClB,IAAM,EAAY,EAAU,GACxB,EACA,EAAW,EAEf,GAAI,CACF,MAAM,EAAiB,EAAQ,CAC7B,UACA,KAAM,EACN,WACA,gBAAiB,EAAM,gBACvB,eAAgB,EAAM,eACtB,kBACA,iBACA,SACA,OAAQ,KAAO,IAAU,CACvB,GAAI,EAAM,SAAW,EAAG,OAiBxB,IAAM,GAdqC,EACvC,EAAM,IAAK,IAAO,CAAE,GAAG,EAAG,mBAAoB,EAAE,CAAE,EAAE,CACpD,MAAM,EACJ,EACA,KAAO,KAAO,CACZ,GAAG,EACH,mBAAoB,MAAM,EAA2B,EAAQ,EAAS,CACpE,UAAW,EAAE,GACb,gBAAiB,GAClB,CAAC,CACH,EACD,CAAE,YAAa,EAAW,CAC3B,EAGM,IAAI,EAAoB,CAE9B,IACH,EAAU,EAAK,EAAK,IAAK,GAA+B,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAC/E,EAAY,EAAW,EAAQ,EAGjC,EAAqB,EAAW,EAAM,EAAQ,CAC9C,GAAY,EAAK,OACjB,GAAiB,EAAK,OACtB,EAAY,OAAO,EAAc,EAEpC,CAAC,OACK,EAAK,CACZ,IAAM,EAAU,aAAe,MAAQ,EAAI,QAAU,OAAO,EAAI,CAChE,EAAO,MACL,EAAO,IACL,SAAS,EAAE,WACT,EAAM,gBAAgB,aAAa,EAAI,QACxC,KAAK,EAAM,iBAAiB,aAAa,EAAI,MAAM,KAAK,IAC1D,CACF,CACD,EAAa,KAAK,CAChB,MAAO,EACP,eAAgB,EAAM,eACtB,gBAAiB,EAAM,gBACvB,MAAO,EACR,CAAC,CAOJ,OAJK,GACH,EAAY,EAAW,EAAE,CAAC,CAGrB,GAET,CAAE,YAAa,EAAY,EAAc,EAAG,CAC7C,CAED,EAAY,MAAM,CAClB,IAAM,EAAa,EAAY,QAAQ,EAAG,IAAM,EAAI,EAAG,EAAE,CACnD,GAAW,KAAK,KAAK,CAAG,GAAM,IAEpC,GAAI,EAAa,OAAS,EAAG,CAC3B,EAAO,MACL,EAAO,IACL,KAAK,EAAa,OAAO,mEAE1B,CACF,CACD,IAAK,IAAM,KAAM,EACf,EAAO,MACL,EAAO,IACL,WAAW,EAAG,MAAM,qBAClB,EAAG,gBAAgB,aAAa,EAAI,GACrC,qBAAqB,EAAG,iBAAiB,aAAa,EAAI,KAC5D,CACF,CAQL,OAJA,EAAO,KACL,EAAO,MAAM,YAAY,EAAW,eAAe,EAAU,OAAO,cAAc,EAAQ,GAAG,CAC9F,CAEM,CAAE,YAAW,aAAY"}
@@ -0,0 +1,232 @@
1
+ import{t as e}from"./logger-B-LXIf3U.mjs";import{n as t,t as n}from"./bluebird-CUitXgsY.mjs";import{t as r}from"./makeGraphQLRequest-Cq26A_Lq.mjs";import{chunk as i,keyBy as a,uniq as o,uniqBy as s}from"lodash-es";import c from"colors";import{gql as l}from"graphql-request";const u=l`
2
+ query TranscendCliRepositories($first: Int!, $offset: Int!, $input: RepositoryFiltersInput) {
3
+ repositories(
4
+ first: $first
5
+ offset: $offset
6
+ filterBy: $input
7
+ orderBy: [{ field: createdAt, direction: ASC }, { field: name, direction: ASC }]
8
+ ) {
9
+ nodes {
10
+ id
11
+ name
12
+ description
13
+ url
14
+ teams {
15
+ id
16
+ name
17
+ }
18
+ owners {
19
+ id
20
+ email
21
+ }
22
+ }
23
+ }
24
+ }
25
+ `,d=l`
26
+ mutation TranscendCliUpdateRepositories($input: UpdateRepositoriesInput!) {
27
+ updateRepositories(input: $input) {
28
+ clientMutationId
29
+ repositories {
30
+ id
31
+ name
32
+ url
33
+ teams {
34
+ id
35
+ name
36
+ }
37
+ owners {
38
+ id
39
+ email
40
+ }
41
+ }
42
+ }
43
+ }
44
+ `,f=l`
45
+ mutation TranscendCliCreateRepository($input: CreateRepositoryInput!) {
46
+ createRepository(input: $input) {
47
+ clientMutationId
48
+ repository {
49
+ id
50
+ name
51
+ url
52
+ teams {
53
+ id
54
+ name
55
+ }
56
+ owners {
57
+ id
58
+ email
59
+ }
60
+ }
61
+ }
62
+ }
63
+ `,p=l`
64
+ query TranscendCliSoftwareDevelopmentKits(
65
+ $first: Int!
66
+ $offset: Int!
67
+ $input: SoftwareDevelopmentKitFiltersInput
68
+ ) {
69
+ softwareDevelopmentKits(
70
+ first: $first
71
+ offset: $offset
72
+ filterBy: $input
73
+ orderBy: [{ field: createdAt, direction: ASC }, { field: name, direction: ASC }]
74
+ ) {
75
+ nodes {
76
+ id
77
+ name
78
+ description
79
+ codePackageType
80
+ documentationLinks
81
+ repositoryUrl
82
+ teams {
83
+ id
84
+ name
85
+ }
86
+ owners {
87
+ id
88
+ email
89
+ }
90
+ }
91
+ }
92
+ }
93
+ `,m=l`
94
+ mutation TranscendCliUpdateSoftwareDevelopmentKits($input: UpdateSoftwareDevelopmentKitsInput!) {
95
+ updateSoftwareDevelopmentKits(input: $input) {
96
+ clientMutationId
97
+ softwareDevelopmentKits {
98
+ id
99
+ name
100
+ description
101
+ codePackageType
102
+ documentationLinks
103
+ repositoryUrl
104
+ teams {
105
+ id
106
+ name
107
+ }
108
+ owners {
109
+ id
110
+ email
111
+ }
112
+ }
113
+ }
114
+ }
115
+ `,h=l`
116
+ mutation TranscendCliCreateSoftwareDevelopmentKit($input: CreateSoftwareDevelopmentKitInput!) {
117
+ createSoftwareDevelopmentKit(input: $input) {
118
+ clientMutationId
119
+ softwareDevelopmentKit {
120
+ id
121
+ name
122
+ description
123
+ codePackageType
124
+ documentationLinks
125
+ repositoryUrl
126
+ teams {
127
+ id
128
+ name
129
+ }
130
+ owners {
131
+ id
132
+ email
133
+ }
134
+ }
135
+ }
136
+ }
137
+ `,g=l`
138
+ query TranscendCliCodePackages($first: Int!, $offset: Int!, $input: CodePackageFiltersInput) {
139
+ codePackages(
140
+ first: $first
141
+ offset: $offset
142
+ filterBy: $input
143
+ orderBy: [{ field: createdAt, direction: ASC }, { field: name, direction: ASC }]
144
+ ) {
145
+ nodes {
146
+ id
147
+ name
148
+ description
149
+ type
150
+ relativePath
151
+ teams {
152
+ id
153
+ name
154
+ }
155
+ owners {
156
+ id
157
+ email
158
+ }
159
+ repository {
160
+ id
161
+ name
162
+ }
163
+ dataSilo {
164
+ id
165
+ title
166
+ type
167
+ }
168
+ }
169
+ }
170
+ }
171
+ `,_=l`
172
+ mutation TranscendCliUpdateCodePackages($input: UpdateCodePackagesInput!) {
173
+ updateCodePackages(input: $input) {
174
+ clientMutationId
175
+ codePackages {
176
+ id
177
+ name
178
+ description
179
+ type
180
+ relativePath
181
+ teams {
182
+ id
183
+ name
184
+ }
185
+ owners {
186
+ id
187
+ email
188
+ }
189
+ repository {
190
+ id
191
+ name
192
+ }
193
+ dataSilo {
194
+ id
195
+ title
196
+ type
197
+ }
198
+ }
199
+ }
200
+ }
201
+ `,v=l`
202
+ mutation TranscendCliCreateCodePackage($input: CreateCodePackageInput!) {
203
+ createCodePackage(input: $input) {
204
+ clientMutationId
205
+ codePackage {
206
+ id
207
+ name
208
+ description
209
+ type
210
+ relativePath
211
+ teams {
212
+ id
213
+ name
214
+ }
215
+ owners {
216
+ id
217
+ email
218
+ }
219
+ repository {
220
+ id
221
+ name
222
+ }
223
+ dataSilo {
224
+ id
225
+ title
226
+ type
227
+ }
228
+ }
229
+ }
230
+ }
231
+ `;async function y(e){let t=[],n=0,i=!1;do{let{codePackages:{nodes:a}}=await r(e,g,{first:20,offset:n});t.push(...a),n+=20,i=a.length===20}while(i);return t.sort((e,t)=>e.name.localeCompare(t.name))}async function b(e){let t=[],n=0,i=!1;do{let{repositories:{nodes:a}}=await r(e,u,{first:20,offset:n});t.push(...a),n+=20,i=a.length===20}while(i);return t.sort((e,t)=>e.name.localeCompare(t.name))}async function x(t,n){let{createRepository:{repository:i}}=await r(t,f,{input:n});return e.info(c.green(`Successfully created repository "${n.name}"!`)),i}async function S(t,n){let{updateRepositories:{repositories:i}}=await r(t,d,{input:{repositories:n}});return e.info(c.green(`Successfully updated ${n.length} repositories!`)),i}async function C(r,o,s=20){let l=!1,u=[],d=a(await b(r),`name`),f=o.map(e=>[e,d[e.name]?.id]),p=f.filter(([,e])=>!e).map(([e])=>e);try{e.info(c.magenta(`Creating "${p.length}" new repositories...`)),await n(p,async e=>{let t=await x(r,e);u.push(t)},{concurrency:s}),e.info(c.green(`Successfully synced ${p.length} repositories!`))}catch(t){l=!0,e.info(c.red(`Failed to create repositories! - ${t.message}`))}let m=f.filter(e=>!!e[1]),h=i(m,100);return e.info(c.magenta(`Updating "${m.length}" repositories...`)),await t(h,async t=>{try{let n=await S(r,t.map(([e,t])=>({...e,id:t})));u.push(...n),e.info(c.green(`Successfully updated "${m.length}" repositories!`))}catch(t){l=!0,e.info(c.red(`Failed to update repositories! - ${t.message}`))}e.info(c.green(`Synced "${o.length}" repositories!`))}),{repositories:u,success:!l}}async function w(e){let t=[],n=0,i=!1;do{let{softwareDevelopmentKits:{nodes:a}}=await r(e,p,{first:20,offset:n});t.push(...a),n+=20,i=a.length===20}while(i);return t.sort((e,t)=>e.name.localeCompare(t.name))}async function T(t,n){let{createSoftwareDevelopmentKit:{softwareDevelopmentKit:i}}=await r(t,h,{input:n});return e.info(c.green(`Successfully created software development kit "${n.name}"!`)),i}async function E(t,n){let{updateSoftwareDevelopmentKits:{softwareDevelopmentKits:i}}=await r(t,m,{input:{softwareDevelopmentKits:n}});return e.info(c.green(`Successfully updated ${n.length} software development kits!`)),i}async function D(r,o,s=20){let l=!1,u=[];e.info(c.magenta(`Syncing software development kits...`));let d=a(await w(r),({name:e,codePackageType:t})=>JSON.stringify({name:e,codePackageType:t})),f=o.map(e=>[e,d[JSON.stringify({name:e.name,codePackageType:e.codePackageType})]?.id]),p=f.filter(([,e])=>!e).map(([e])=>e);try{e.info(c.magenta(`Creating "${p.length}" new software development kits...`)),await n(p,async e=>{let t=await T(r,e);u.push(t)},{concurrency:s}),e.info(c.green(`Successfully synced ${p.length} software development kits!`))}catch(t){l=!0,e.info(c.red(`Failed to create software development kits! - ${t.message}`))}let m=f.filter(e=>!!e[1]),h=i(m,100);return e.info(c.magenta(`Updating "${m.length}" software development kits...`)),await t(h,async t=>{try{let n=await E(r,t.map(([{codePackageType:e,...t},n])=>({...t,id:n})));u.push(...n),e.info(c.green(`Successfully updated "${m.length}" software development kits!`))}catch(t){l=!0,e.info(c.red(`Failed to update software development kits! - ${t.message}`))}e.info(c.green(`Synced "${o.length}" software development kits!`))}),{softwareDevelopmentKits:u,success:!l}}const O=`%%%%`;async function k(t,n){let{createCodePackage:{codePackage:i}}=await r(t,v,{input:n});return e.info(c.green(`Successfully created code package "${n.name}"!`)),i}async function A(t,n){let{updateCodePackages:{codePackages:i}}=await r(t,_,{input:{codePackages:n}});return e.info(c.green(`Successfully updated ${n.length} code packages!`)),i}async function j(r,l,u=20){let d=!1,[f,{softwareDevelopmentKits:p}]=await Promise.all([y(r),D(r,s(l.map(({type:e,softwareDevelopmentKits:t=[]})=>t.map(({name:t})=>({name:t,codePackageType:e}))).flat(),({name:e,codePackageType:t})=>`${e}${O}${t}`),u),C(r,s(l,`repositoryName`).map(({repositoryName:e})=>({name:e,url:`https://github.com/${e}`})))]),m=a(p,({name:e,codePackageType:t})=>`${e}${O}${t}`),h=a(f,({name:e,type:t})=>`${e}${O}${t}`),g=l.map(e=>[e,h[`${e.name}${O}${e.type}`]?.id]),_=g.filter(([,e])=>!e).map(([e])=>e);try{e.info(c.magenta(`Creating "${_.length}" new code packages...`)),await n(_,async({softwareDevelopmentKits:e,...t})=>{await k(r,{...t,...e?{softwareDevelopmentKitIds:o(e.map(({name:e})=>{let n=m[`${e}${O}${t.type}`];if(!n)throw Error(`Failed to find SDK with name: "${e}"`);return n.id}))}:{}})},{concurrency:u}),e.info(c.green(`Successfully synced ${_.length} code packages!`))}catch(t){d=!0,e.info(c.red(`Failed to create code packages! - ${t.message}`))}let v=g.filter(e=>!!e[1]);return e.info(c.magenta(`Updating "${v.length}" code packages...`)),await t(i(v,100),async t=>{try{await A(r,t.map(([{softwareDevelopmentKits:e,repositoryName:t,...n},r])=>({...n,...e?{softwareDevelopmentKitIds:o(e.map(({name:e})=>{let t=m[`${e}${O}${n.type}`];if(!t)throw Error(`Failed to find SDK with name: "${e}"`);return t.id}))}:{},id:r}))),e.info(c.green(`Successfully updated "${t.length}" code packages!`))}catch(t){d=!0,e.info(c.red(`Failed to update code packages! - ${t.message}`))}}),e.info(c.green(`Synced "${l.length}" code packages!`)),!d}export{u as _,D as a,C as c,v as d,_ as f,f as g,m as h,T as i,S as l,p as m,j as n,E as o,h as p,A as r,x as s,k as t,g as u,d as v};
232
+ //# sourceMappingURL=syncCodePackages-F-97FNjo.mjs.map