@transcend-io/cli 10.1.0 → 10.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. package/dist/{app-Cx8-4u8K.mjs → app-C1m4rExX.mjs} +20 -20
  2. package/dist/{app-Cx8-4u8K.mjs.map → app-C1m4rExX.mjs.map} +1 -1
  3. package/dist/{approvePrivacyRequests-Bjq5cPSI.mjs → approvePrivacyRequests-BIHcACAj.mjs} +2 -2
  4. package/dist/{approvePrivacyRequests-Bjq5cPSI.mjs.map → approvePrivacyRequests-BIHcACAj.mjs.map} +1 -1
  5. package/dist/bin/bash-complete.mjs +1 -1
  6. package/dist/bin/cli.mjs +1 -1
  7. package/dist/bin/deprecated-command.mjs +1 -1
  8. package/dist/{buildXdiSyncEndpoint-DWs9ImOw.mjs → buildXdiSyncEndpoint-CBbcir-p.mjs} +2 -2
  9. package/dist/{buildXdiSyncEndpoint-DWs9ImOw.mjs.map → buildXdiSyncEndpoint-CBbcir-p.mjs.map} +1 -1
  10. package/dist/bulkRestartRequests-ByH7TjH2.mjs +2 -0
  11. package/dist/bulkRestartRequests-ByH7TjH2.mjs.map +1 -0
  12. package/dist/bulkRetryEnrichers-DuYXD-64.mjs +2 -0
  13. package/dist/bulkRetryEnrichers-DuYXD-64.mjs.map +1 -0
  14. package/dist/cancelPrivacyRequests-DMgQOffA.mjs +2 -0
  15. package/dist/cancelPrivacyRequests-DMgQOffA.mjs.map +1 -0
  16. package/dist/{collectCsvFilesOrExit-D-csvd13.mjs → collectCsvFilesOrExit-CbtyKAzu.mjs} +1 -1
  17. package/dist/{collectCsvFilesOrExit-D-csvd13.mjs.map → collectCsvFilesOrExit-CbtyKAzu.mjs.map} +1 -1
  18. package/dist/{collectParquetFilesOrExit-C8qT5_57.mjs → collectParquetFilesOrExit-BJiAyaQ5.mjs} +1 -1
  19. package/dist/{collectParquetFilesOrExit-C8qT5_57.mjs.map → collectParquetFilesOrExit-BJiAyaQ5.mjs.map} +1 -1
  20. package/dist/{command-rzZKmlky.mjs → command-DnoHX-eW.mjs} +2 -2
  21. package/dist/{command-rzZKmlky.mjs.map → command-DnoHX-eW.mjs.map} +1 -1
  22. package/dist/commands/admin/parquet-to-csv/worker.mjs +1 -1
  23. package/dist/{consentManagersToBusinessEntities-D1bdBgnA.mjs → consentManagersToBusinessEntities-BdKDganK.mjs} +1 -1
  24. package/dist/{consentManagersToBusinessEntities-D1bdBgnA.mjs.map → consentManagersToBusinessEntities-BdKDganK.mjs.map} +1 -1
  25. package/dist/{constants-mjLYTIJm.mjs → constants-BmwXDQu9.mjs} +2 -2
  26. package/dist/{constants-mjLYTIJm.mjs.map → constants-BmwXDQu9.mjs.map} +1 -1
  27. package/dist/{constants-DYbzl8QH.mjs → constants-ClkQQhJs.mjs} +1 -1
  28. package/dist/{constants-DYbzl8QH.mjs.map → constants-ClkQQhJs.mjs.map} +1 -1
  29. package/dist/{constants-XOsAW1__.mjs → constants-muOBBQA_.mjs} +2 -2
  30. package/dist/{constants-XOsAW1__.mjs.map → constants-muOBBQA_.mjs.map} +1 -1
  31. package/dist/{createExtraKeyHandler-Jp5XpTJi.mjs → createExtraKeyHandler-srtG2U7q.mjs} +2 -2
  32. package/dist/{createExtraKeyHandler-Jp5XpTJi.mjs.map → createExtraKeyHandler-srtG2U7q.mjs.map} +1 -1
  33. package/dist/{dataFlowsToDataSilos-DUj1NhOt.mjs → dataFlowsToDataSilos-Ca2DtTsd.mjs} +1 -1
  34. package/dist/{dataFlowsToDataSilos-DUj1NhOt.mjs.map → dataFlowsToDataSilos-Ca2DtTsd.mjs.map} +1 -1
  35. package/dist/{done-input-validation-C5rgR0Wr.mjs → done-input-validation-BcNBxhEs.mjs} +1 -1
  36. package/dist/{done-input-validation-C5rgR0Wr.mjs.map → done-input-validation-BcNBxhEs.mjs.map} +1 -1
  37. package/dist/{downloadPrivacyRequestFiles-GUbd_PRc.mjs → downloadPrivacyRequestFiles-kKhGnFmx.mjs} +2 -2
  38. package/dist/{downloadPrivacyRequestFiles-GUbd_PRc.mjs.map → downloadPrivacyRequestFiles-kKhGnFmx.mjs.map} +1 -1
  39. package/dist/{extractClientError-X9wJVqGq.mjs → extractClientError-i-Tw_az7.mjs} +1 -1
  40. package/dist/{extractClientError-X9wJVqGq.mjs.map → extractClientError-i-Tw_az7.mjs.map} +1 -1
  41. package/dist/{fetchAllRequests-xGgt_STo.mjs → fetchAllRequests-CHHdyb4Q.mjs} +2 -2
  42. package/dist/{fetchAllRequests-xGgt_STo.mjs.map → fetchAllRequests-CHHdyb4Q.mjs.map} +1 -1
  43. package/dist/generateCrossAccountApiKeys-C7yH3Rbi.mjs +2 -0
  44. package/dist/generateCrossAccountApiKeys-C7yH3Rbi.mjs.map +1 -0
  45. package/dist/{impl-B-PzeHxN.mjs → impl-3VLH9aat.mjs} +2 -2
  46. package/dist/{impl-B-PzeHxN.mjs.map → impl-3VLH9aat.mjs.map} +1 -1
  47. package/dist/{impl-iGMjSniP.mjs → impl-3sDUDXru.mjs} +2 -2
  48. package/dist/{impl-iGMjSniP.mjs.map → impl-3sDUDXru.mjs.map} +1 -1
  49. package/dist/{impl-DfVep2mE.mjs → impl-6mCOBlSD.mjs} +2 -2
  50. package/dist/{impl-DfVep2mE.mjs.map → impl-6mCOBlSD.mjs.map} +1 -1
  51. package/dist/impl-84ylH4aO.mjs +2 -0
  52. package/dist/{impl-BVHfSIVG.mjs.map → impl-84ylH4aO.mjs.map} +1 -1
  53. package/dist/impl-B62XN4tV.mjs +2 -0
  54. package/dist/impl-B62XN4tV.mjs.map +1 -0
  55. package/dist/{impl-BMnXA_Vd.mjs → impl-B9BsXBxS.mjs} +2 -2
  56. package/dist/{impl-BMnXA_Vd.mjs.map → impl-B9BsXBxS.mjs.map} +1 -1
  57. package/dist/{impl-C3DXXn8M.mjs → impl-BNqmxytJ.mjs} +2 -2
  58. package/dist/{impl-C3DXXn8M.mjs.map → impl-BNqmxytJ.mjs.map} +1 -1
  59. package/dist/{impl-Dw9uW5zy2.mjs → impl-BYf4MpWP.mjs} +2 -2
  60. package/dist/impl-BYf4MpWP.mjs.map +1 -0
  61. package/dist/{impl-BBKJIP0Q.mjs → impl-BaC9iEO_.mjs} +2 -2
  62. package/dist/{impl-BBKJIP0Q.mjs.map → impl-BaC9iEO_.mjs.map} +1 -1
  63. package/dist/{impl-CpJljZV2.mjs → impl-BhDS0QIt.mjs} +2 -2
  64. package/dist/{impl-CpJljZV2.mjs.map → impl-BhDS0QIt.mjs.map} +1 -1
  65. package/dist/{impl-CqH3YYuv.mjs → impl-BjCQSRLu.mjs} +2 -2
  66. package/dist/{impl-CqH3YYuv.mjs.map → impl-BjCQSRLu.mjs.map} +1 -1
  67. package/dist/{impl-Cpndlxar.mjs → impl-BjIylEKQ.mjs} +2 -2
  68. package/dist/{impl-Cpndlxar.mjs.map → impl-BjIylEKQ.mjs.map} +1 -1
  69. package/dist/{impl-BBnnC5xq.mjs → impl-BwrEi3s7.mjs} +2 -2
  70. package/dist/{impl-BBnnC5xq.mjs.map → impl-BwrEi3s7.mjs.map} +1 -1
  71. package/dist/{impl-DKAV-8XC.mjs → impl-C4AI1Fsj.mjs} +2 -2
  72. package/dist/{impl-DKAV-8XC.mjs.map → impl-C4AI1Fsj.mjs.map} +1 -1
  73. package/dist/{impl-BKrNGF2F.mjs → impl-CCAeEeMR.mjs} +2 -2
  74. package/dist/{impl-BKrNGF2F.mjs.map → impl-CCAeEeMR.mjs.map} +1 -1
  75. package/dist/{impl-BRiRfzgu.mjs → impl-CFI5y5U-.mjs} +2 -2
  76. package/dist/{impl-BRiRfzgu.mjs.map → impl-CFI5y5U-.mjs.map} +1 -1
  77. package/dist/{impl-CC0rkA9s.mjs → impl-CIfRN0ux.mjs} +2 -2
  78. package/dist/{impl-CC0rkA9s.mjs.map → impl-CIfRN0ux.mjs.map} +1 -1
  79. package/dist/{impl-DhXQb3bm.mjs → impl-CLznNZ5F.mjs} +2 -2
  80. package/dist/{impl-DhXQb3bm.mjs.map → impl-CLznNZ5F.mjs.map} +1 -1
  81. package/dist/{impl-DpwyYsfg.mjs → impl-CUdo0Jyh.mjs} +2 -2
  82. package/dist/{impl-DpwyYsfg.mjs.map → impl-CUdo0Jyh.mjs.map} +1 -1
  83. package/dist/{impl-CvJtt8H2.mjs → impl-Cmj1Vi5Q.mjs} +2 -2
  84. package/dist/{impl-CvJtt8H2.mjs.map → impl-Cmj1Vi5Q.mjs.map} +1 -1
  85. package/dist/{impl-BVnfUDUm.mjs → impl-Cw3_0zqC.mjs} +2 -2
  86. package/dist/{impl-BVnfUDUm.mjs.map → impl-Cw3_0zqC.mjs.map} +1 -1
  87. package/dist/{impl-DaK9UOwL.mjs → impl-CzvCA0Ev.mjs} +2 -2
  88. package/dist/{impl-DaK9UOwL.mjs.map → impl-CzvCA0Ev.mjs.map} +1 -1
  89. package/dist/{impl-BffzTHKU.mjs → impl-D1DmW5-P.mjs} +2 -2
  90. package/dist/{impl-BffzTHKU.mjs.map → impl-D1DmW5-P.mjs.map} +1 -1
  91. package/dist/{impl-Cy8-6_Oo2.mjs → impl-D41c_KGj.mjs} +2 -2
  92. package/dist/impl-D41c_KGj.mjs.map +1 -0
  93. package/dist/impl-DEpCg7UP.mjs +2 -0
  94. package/dist/impl-DEpCg7UP.mjs.map +1 -0
  95. package/dist/{impl-BSKl6rC6.mjs → impl-DHOh4ypd.mjs} +2 -2
  96. package/dist/{impl-BSKl6rC6.mjs.map → impl-DHOh4ypd.mjs.map} +1 -1
  97. package/dist/{impl-StdJMCiM.mjs → impl-DJg0Ibxs.mjs} +2 -2
  98. package/dist/{impl-StdJMCiM.mjs.map → impl-DJg0Ibxs.mjs.map} +1 -1
  99. package/dist/{impl-ogUHfunr.mjs → impl-DUdbbIpf.mjs} +2 -2
  100. package/dist/{impl-ogUHfunr.mjs.map → impl-DUdbbIpf.mjs.map} +1 -1
  101. package/dist/{impl-CODwodEc.mjs → impl-DXHqqWJb.mjs} +2 -2
  102. package/dist/{impl-CODwodEc.mjs.map → impl-DXHqqWJb.mjs.map} +1 -1
  103. package/dist/{impl-CnHiD4zU.mjs → impl-DXaA3sMt.mjs} +2 -2
  104. package/dist/{impl-CnHiD4zU.mjs.map → impl-DXaA3sMt.mjs.map} +1 -1
  105. package/dist/{impl-CPIMsZg-.mjs → impl-Dl4RcPKp.mjs} +2 -2
  106. package/dist/{impl-CPIMsZg-.mjs.map → impl-Dl4RcPKp.mjs.map} +1 -1
  107. package/dist/{impl-CZsYoSZQ.mjs → impl-DvAwxl6Z.mjs} +2 -2
  108. package/dist/{impl-CZsYoSZQ.mjs.map → impl-DvAwxl6Z.mjs.map} +1 -1
  109. package/dist/{impl-D_AxguFh2.mjs → impl-GRdcDZQ4.mjs} +2 -2
  110. package/dist/impl-GRdcDZQ4.mjs.map +1 -0
  111. package/dist/{impl-DJ4VCAcc.mjs → impl-UMb9wjra.mjs} +2 -2
  112. package/dist/{impl-DJ4VCAcc.mjs.map → impl-UMb9wjra.mjs.map} +1 -1
  113. package/dist/{impl-BxOydpyJ.mjs → impl-Yq33AV90.mjs} +2 -2
  114. package/dist/{impl-BxOydpyJ.mjs.map → impl-Yq33AV90.mjs.map} +1 -1
  115. package/dist/{impl-BGGm947r2.mjs → impl-aGDJJgGc.mjs} +2 -2
  116. package/dist/impl-aGDJJgGc.mjs.map +1 -0
  117. package/dist/{impl-DvrSuAJv.mjs → impl-fZQxhZRu.mjs} +2 -2
  118. package/dist/{impl-DvrSuAJv.mjs.map → impl-fZQxhZRu.mjs.map} +1 -1
  119. package/dist/{impl-C-u5h8We.mjs → impl-gitQPEo3.mjs} +2 -2
  120. package/dist/{impl-C-u5h8We.mjs.map → impl-gitQPEo3.mjs.map} +1 -1
  121. package/dist/impl-i-vquwbD.mjs +2 -0
  122. package/dist/impl-i-vquwbD.mjs.map +1 -0
  123. package/dist/{impl-B6TXE2oE.mjs → impl-iteb85IZ.mjs} +2 -2
  124. package/dist/{impl-B6TXE2oE.mjs.map → impl-iteb85IZ.mjs.map} +1 -1
  125. package/dist/{impl-uwkj-RbF.mjs → impl-tYtVXUz2.mjs} +2 -2
  126. package/dist/{impl-uwkj-RbF.mjs.map → impl-tYtVXUz2.mjs.map} +1 -1
  127. package/dist/{impl-yvc0y1uO.mjs → impl-wcRtA0L3.mjs} +2 -2
  128. package/dist/{impl-yvc0y1uO.mjs.map → impl-wcRtA0L3.mjs.map} +1 -1
  129. package/dist/{impl-DpGVNllB.mjs → impl-xtlx25UP.mjs} +2 -2
  130. package/dist/{impl-DpGVNllB.mjs.map → impl-xtlx25UP.mjs.map} +1 -1
  131. package/dist/{impl-Cw10WeUv.mjs → impl-yMumZUUX.mjs} +2 -2
  132. package/dist/{impl-Cw10WeUv.mjs.map → impl-yMumZUUX.mjs.map} +1 -1
  133. package/dist/index.d.mts +842 -1645
  134. package/dist/index.d.mts.map +1 -1
  135. package/dist/index.mjs +4 -4
  136. package/dist/index.mjs.map +1 -1
  137. package/dist/{inquirer-DyRwhvoh.mjs → inquirer-BqZXFEt1.mjs} +2 -2
  138. package/dist/{inquirer-DyRwhvoh.mjs.map → inquirer-BqZXFEt1.mjs.map} +1 -1
  139. package/dist/{listFiles-Odj7j2E1.mjs → listFiles-D2wMHnEr.mjs} +1 -1
  140. package/dist/{listFiles-Odj7j2E1.mjs.map → listFiles-D2wMHnEr.mjs.map} +1 -1
  141. package/dist/markRequestDataSiloIdsCompleted-BaVxVfDe.mjs +2 -0
  142. package/dist/markRequestDataSiloIdsCompleted-BaVxVfDe.mjs.map +1 -0
  143. package/dist/{markSilentPrivacyRequests-ytCzpUkY.mjs → markSilentPrivacyRequests-miaumnaC.mjs} +2 -2
  144. package/dist/{markSilentPrivacyRequests-ytCzpUkY.mjs.map → markSilentPrivacyRequests-miaumnaC.mjs.map} +1 -1
  145. package/dist/notifyPrivacyRequestsAdditionalTime-BUdhSCNL.mjs +2 -0
  146. package/dist/notifyPrivacyRequestsAdditionalTime-BUdhSCNL.mjs.map +1 -0
  147. package/dist/{parquetToCsvOneFile-bgEgRoAi.mjs → parquetToCsvOneFile-B84XXInh.mjs} +1 -1
  148. package/dist/{parquetToCsvOneFile-bgEgRoAi.mjs.map → parquetToCsvOneFile-B84XXInh.mjs.map} +1 -1
  149. package/dist/{parseAttributesFromString-B8h4DudO.mjs → parseAttributesFromString-D1Yl0xwT.mjs} +2 -2
  150. package/dist/{parseAttributesFromString-B8h4DudO.mjs.map → parseAttributesFromString-D1Yl0xwT.mjs.map} +1 -1
  151. package/dist/parseVariablesFromString-BeKOGw5n.mjs +3 -0
  152. package/dist/parseVariablesFromString-BeKOGw5n.mjs.map +1 -0
  153. package/dist/pullAllDatapoints-Bbmky50p.mjs +45 -0
  154. package/dist/pullAllDatapoints-Bbmky50p.mjs.map +1 -0
  155. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BW5Vws25.mjs +2 -0
  156. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-BW5Vws25.mjs.map +1 -0
  157. package/dist/{pullConsentManagerMetrics-BO0hYPDG.mjs → pullConsentManagerMetrics-zKgjc3Ap.mjs} +1 -1
  158. package/dist/{pullConsentManagerMetrics-BO0hYPDG.mjs.map → pullConsentManagerMetrics-zKgjc3Ap.mjs.map} +1 -1
  159. package/dist/pullManualEnrichmentIdentifiersToCsv-kpGy9H7T.mjs +2 -0
  160. package/dist/pullManualEnrichmentIdentifiersToCsv-kpGy9H7T.mjs.map +1 -0
  161. package/dist/pullTranscendConfiguration-DjOELnPo.mjs +58 -0
  162. package/dist/pullTranscendConfiguration-DjOELnPo.mjs.map +1 -0
  163. package/dist/{pullUnstructuredSubDataPointRecommendations-jE-tdoVK.mjs → pullUnstructuredSubDataPointRecommendations-D0z-vPgq.mjs} +3 -3
  164. package/dist/{pullUnstructuredSubDataPointRecommendations-jE-tdoVK.mjs.map → pullUnstructuredSubDataPointRecommendations-D0z-vPgq.mjs.map} +1 -1
  165. package/dist/{pushCronIdentifiersFromCsv-D9Hzna0W.mjs → pushCronIdentifiersFromCsv-BZRA1n_8.mjs} +2 -2
  166. package/dist/{pushCronIdentifiersFromCsv-D9Hzna0W.mjs.map → pushCronIdentifiersFromCsv-BZRA1n_8.mjs.map} +1 -1
  167. package/dist/{pushManualEnrichmentIdentifiersFromCsv-BiR7PS_d.mjs → pushManualEnrichmentIdentifiersFromCsv-DXqf8WWy.mjs} +2 -2
  168. package/dist/{pushManualEnrichmentIdentifiersFromCsv-BiR7PS_d.mjs.map → pushManualEnrichmentIdentifiersFromCsv-DXqf8WWy.mjs.map} +1 -1
  169. package/dist/{readCsv-0PIlJQCN.mjs → readCsv-C4TyEs-r.mjs} +1 -1
  170. package/dist/{readCsv-0PIlJQCN.mjs.map → readCsv-C4TyEs-r.mjs.map} +1 -1
  171. package/dist/removeUnverifiedRequestIdentifiers-BxWSsJit.mjs +2 -0
  172. package/dist/removeUnverifiedRequestIdentifiers-BxWSsJit.mjs.map +1 -0
  173. package/dist/{request-SLqRySNU.mjs → request-DfkRPQFr.mjs} +1 -1
  174. package/dist/{request-SLqRySNU.mjs.map → request-DfkRPQFr.mjs.map} +1 -1
  175. package/dist/retryRequestDataSilos-BVrJz_GC.mjs +2 -0
  176. package/dist/retryRequestDataSilos-BVrJz_GC.mjs.map +1 -0
  177. package/dist/skipPreflightJobs-CYuoMG3z.mjs +2 -0
  178. package/dist/skipPreflightJobs-CYuoMG3z.mjs.map +1 -0
  179. package/dist/skipRequestDataSilos-BNspAsjR.mjs +2 -0
  180. package/dist/skipRequestDataSilos-BNspAsjR.mjs.map +1 -0
  181. package/dist/streamPrivacyRequestsToCsv-PoyTmQd6.mjs +2 -0
  182. package/dist/streamPrivacyRequestsToCsv-PoyTmQd6.mjs.map +1 -0
  183. package/dist/{syncCodePackages-BOS5foh6.mjs → syncCodePackages-CAk_Hjyl.mjs} +1 -1
  184. package/dist/{syncCodePackages-BOS5foh6.mjs.map → syncCodePackages-CAk_Hjyl.mjs.map} +1 -1
  185. package/dist/updateConsentManagerVersionToLatest-lAw3E1wm.mjs +2 -0
  186. package/dist/updateConsentManagerVersionToLatest-lAw3E1wm.mjs.map +1 -0
  187. package/dist/{uploadConsents-BP5XILuw.mjs → uploadConsents-BzmWrNc1.mjs} +2 -2
  188. package/dist/{uploadConsents-BP5XILuw.mjs.map → uploadConsents-BzmWrNc1.mjs.map} +1 -1
  189. package/dist/{uploadCookiesFromCsv-B42cZgYW.mjs → uploadCookiesFromCsv-TH10UBgw.mjs} +2 -2
  190. package/dist/{uploadCookiesFromCsv-B42cZgYW.mjs.map → uploadCookiesFromCsv-TH10UBgw.mjs.map} +1 -1
  191. package/dist/{uploadDataFlowsFromCsv-D2V567pP.mjs → uploadDataFlowsFromCsv-DUSFCae9.mjs} +2 -2
  192. package/dist/{uploadDataFlowsFromCsv-D2V567pP.mjs.map → uploadDataFlowsFromCsv-DUSFCae9.mjs.map} +1 -1
  193. package/dist/uploadPrivacyRequestsFromCsv-sKSFfE6q.mjs +2 -0
  194. package/dist/uploadPrivacyRequestsFromCsv-sKSFfE6q.mjs.map +1 -0
  195. package/dist/{validateTranscendAuth-DCwAtgvh.mjs → validateTranscendAuth-Cuh2Qfdl.mjs} +1 -1
  196. package/dist/{validateTranscendAuth-DCwAtgvh.mjs.map → validateTranscendAuth-Cuh2Qfdl.mjs.map} +1 -1
  197. package/dist/{writeCsv-Da8NUe1V.mjs → writeCsv-C4pjXGsD.mjs} +1 -1
  198. package/dist/{writeCsv-Da8NUe1V.mjs.map → writeCsv-C4pjXGsD.mjs.map} +1 -1
  199. package/package.json +6 -6
  200. package/dist/RequestDataSilo-Rrc2dL9g.mjs +0 -54
  201. package/dist/RequestDataSilo-Rrc2dL9g.mjs.map +0 -1
  202. package/dist/bulkRestartRequests-sie3tM3W.mjs +0 -2
  203. package/dist/bulkRestartRequests-sie3tM3W.mjs.map +0 -1
  204. package/dist/bulkRetryEnrichers-C1RrxiTR.mjs +0 -2
  205. package/dist/bulkRetryEnrichers-C1RrxiTR.mjs.map +0 -1
  206. package/dist/cancelPrivacyRequests-DmvFijq_.mjs +0 -2
  207. package/dist/cancelPrivacyRequests-DmvFijq_.mjs.map +0 -1
  208. package/dist/dataSilo-Dvi8-PkH.mjs +0 -302
  209. package/dist/dataSilo-Dvi8-PkH.mjs.map +0 -1
  210. package/dist/dataSubject-CF784Ug0.mjs +0 -92
  211. package/dist/dataSubject-CF784Ug0.mjs.map +0 -1
  212. package/dist/fetchAllRequestEnrichers-Bt97Bb7F.mjs +0 -42
  213. package/dist/fetchAllRequestEnrichers-Bt97Bb7F.mjs.map +0 -1
  214. package/dist/fetchAllRequestIdentifiers-BXx3rSee.mjs +0 -10
  215. package/dist/fetchAllRequestIdentifiers-BXx3rSee.mjs.map +0 -1
  216. package/dist/fetchRequestDataSilo-0UvyeL60.mjs +0 -2
  217. package/dist/fetchRequestDataSilo-0UvyeL60.mjs.map +0 -1
  218. package/dist/fetchRequestFilesForRequest-CJH2iB-P.mjs +0 -33
  219. package/dist/fetchRequestFilesForRequest-CJH2iB-P.mjs.map +0 -1
  220. package/dist/generateCrossAccountApiKeys-DztJoLQS.mjs +0 -2
  221. package/dist/generateCrossAccountApiKeys-DztJoLQS.mjs.map +0 -1
  222. package/dist/impl-BGGm947r2.mjs.map +0 -1
  223. package/dist/impl-BVHfSIVG.mjs +0 -2
  224. package/dist/impl-BfeWet_F2.mjs +0 -2
  225. package/dist/impl-BfeWet_F2.mjs.map +0 -1
  226. package/dist/impl-Cy8-6_Oo2.mjs.map +0 -1
  227. package/dist/impl-D_AxguFh2.mjs.map +0 -1
  228. package/dist/impl-Dw9uW5zy2.mjs.map +0 -1
  229. package/dist/impl-PdIU1pLr2.mjs +0 -2
  230. package/dist/impl-PdIU1pLr2.mjs.map +0 -1
  231. package/dist/impl-daUiLV3c.mjs +0 -2
  232. package/dist/impl-daUiLV3c.mjs.map +0 -1
  233. package/dist/markRequestDataSiloIdsCompleted-DJSICILv.mjs +0 -2
  234. package/dist/markRequestDataSiloIdsCompleted-DJSICILv.mjs.map +0 -1
  235. package/dist/notifyPrivacyRequestsAdditionalTime-D8v68eAg.mjs +0 -2
  236. package/dist/notifyPrivacyRequestsAdditionalTime-D8v68eAg.mjs.map +0 -1
  237. package/dist/parseVariablesFromString-CvoeZZ75.mjs +0 -23
  238. package/dist/parseVariablesFromString-CvoeZZ75.mjs.map +0 -1
  239. package/dist/pullAllDatapoints-CqgqXRbp.mjs +0 -45
  240. package/dist/pullAllDatapoints-CqgqXRbp.mjs.map +0 -1
  241. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-DaYEDZ66.mjs +0 -2
  242. package/dist/pullChunkedCustomSiloOutstandingIdentifiers-DaYEDZ66.mjs.map +0 -1
  243. package/dist/pullManualEnrichmentIdentifiersToCsv-BNuhsG20.mjs +0 -2
  244. package/dist/pullManualEnrichmentIdentifiersToCsv-BNuhsG20.mjs.map +0 -1
  245. package/dist/pullTranscendConfiguration-DSyMRyPe.mjs +0 -58
  246. package/dist/pullTranscendConfiguration-DSyMRyPe.mjs.map +0 -1
  247. package/dist/removeUnverifiedRequestIdentifiers-B0Gx09XN.mjs +0 -35
  248. package/dist/removeUnverifiedRequestIdentifiers-B0Gx09XN.mjs.map +0 -1
  249. package/dist/retryRequestDataSilos-DFjFhhC0.mjs +0 -2
  250. package/dist/retryRequestDataSilos-DFjFhhC0.mjs.map +0 -1
  251. package/dist/skipPreflightJobs-Bm8lZZk-.mjs +0 -2
  252. package/dist/skipPreflightJobs-Bm8lZZk-.mjs.map +0 -1
  253. package/dist/skipRequestDataSilos-B5FByYTj.mjs +0 -2
  254. package/dist/skipRequestDataSilos-B5FByYTj.mjs.map +0 -1
  255. package/dist/streamPrivacyRequestsToCsv-CBzh80oQ.mjs +0 -2
  256. package/dist/streamPrivacyRequestsToCsv-CBzh80oQ.mjs.map +0 -1
  257. package/dist/syncEnrichers-C9HcWCrs.mjs +0 -3
  258. package/dist/syncEnrichers-C9HcWCrs.mjs.map +0 -1
  259. package/dist/updateConsentManagerVersionToLatest-X1HAM_IX.mjs +0 -2
  260. package/dist/updateConsentManagerVersionToLatest-X1HAM_IX.mjs.map +0 -1
  261. package/dist/uploadPrivacyRequestsFromCsv-Czc3vGfJ.mjs +0 -2
  262. package/dist/uploadPrivacyRequestsFromCsv-Czc3vGfJ.mjs.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"syncCodePackages-BOS5foh6.mjs","names":[],"sources":["../src/lib/graphql/syncCodePackages.ts"],"sourcesContent":["import { CodePackageType } from '@transcend-io/privacy-types';\nimport {\n CREATE_CODE_PACKAGE,\n UPDATE_CODE_PACKAGES,\n fetchAllCodePackages,\n type CodePackage,\n makeGraphQLRequest,\n syncRepositories,\n syncSoftwareDevelopmentKits,\n} from '@transcend-io/sdk';\nimport { map, mapSeries } from '@transcend-io/utils';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport { chunk, uniq, keyBy, uniqBy } from 'lodash-es';\n\nimport { CodePackageInput, RepositoryInput } from '../../codecs.js';\nimport { logger } from '../../logger.js';\n\nconst CHUNK_SIZE = 100;\n\nconst LOOKUP_SPLIT_KEY = '%%%%';\n\n/**\n * Create a new code package\n *\n * @param client - GraphQL client\n * @param input - Code package input\n * @returns Code package ID\n */\nexport async function createCodePackage(\n client: GraphQLClient,\n input: {\n /** Name of package */\n name: string;\n /** Description of package */\n description?: string;\n /** Type of package */\n type: CodePackageType;\n /** Relative path to package */\n relativePath: string;\n /** Repository ID */\n repositoryId?: string;\n /** Name of repository */\n repositoryName?: string;\n /** IDs of SDKs */\n softwareDevelopmentKitIds?: string[];\n /** IDs of owners */\n ownerIds?: string[];\n /** Emails of owners */\n ownerEmails?: string[];\n /** IDs of teams */\n teamIds?: string[];\n /** Names of teams */\n teamNames?: string[];\n },\n): Promise<CodePackage> {\n const {\n createCodePackage: { codePackage },\n } = await makeGraphQLRequest<{\n /** createCodePackage mutation */\n createCodePackage: {\n /** Code package */\n codePackage: CodePackage;\n };\n }>(client, CREATE_CODE_PACKAGE, {\n variables: { input },\n logger,\n });\n logger.info(colors.green(`Successfully created code package \"${input.name}\"!`));\n return codePackage;\n}\n\n/**\n * Update an existing code package\n *\n * @param client - GraphQL client\n * @param inputs - Code package input\n * @returns Code packages that were updated\n */\nexport async function updateCodePackages(\n client: GraphQLClient,\n inputs: {\n /** ID of code package */\n id: string;\n /** Name of package */\n name: string;\n /** Description of package */\n description?: string;\n /** Type of package */\n type: CodePackageType;\n /** Relative path to package */\n relativePath: string;\n /** Repository ID */\n repositoryId?: string;\n /** Name of repository */\n repositoryName?: string;\n /** IDs of SDKs */\n softwareDevelopmentKitIds?: string[];\n /** IDs of owners */\n ownerIds?: string[];\n /** Emails of owners */\n ownerEmails?: string[];\n /** IDs of teams */\n teamIds?: string[];\n /** Names of teams */\n teamNames?: string[];\n }[],\n): Promise<CodePackage[]> {\n const {\n updateCodePackages: { codePackages },\n } = await makeGraphQLRequest<{\n /** updateCodePackages mutation */\n updateCodePackages: {\n /** Code packages */\n codePackages: CodePackage[];\n };\n }>(client, UPDATE_CODE_PACKAGES, {\n variables: {\n input: {\n codePackages: inputs,\n },\n },\n logger,\n });\n logger.info(colors.green(`Successfully updated ${inputs.length} code packages!`));\n return codePackages;\n}\n\n/**\n * Uploads silo discovery results for Transcend to classify\n *\n * @param client - GraphQL Client\n * @param codePackages - Packages to upload\n * @param concurrency - How many concurrent requests to make\n * @returns True if successful, false if any updates failed, or an error occurs\n */\nexport async function syncCodePackages(\n client: GraphQLClient,\n codePackages: CodePackageInput[],\n concurrency = 20,\n): Promise<boolean> {\n let encounteredError = false;\n const [existingCodePackages, { softwareDevelopmentKits: existingSoftwareDevelopmentKits }] =\n await Promise.all([\n // fetch all code packages\n fetchAllCodePackages(client, { logger }),\n // make sure all SDKs exist\n syncSoftwareDevelopmentKits(\n client,\n uniqBy(\n codePackages\n .map(({ type, softwareDevelopmentKits = [] }) =>\n softwareDevelopmentKits.map(({ name }) => ({\n name,\n codePackageType: type,\n })),\n )\n .flat(),\n ({ name, codePackageType }) => `${name}${LOOKUP_SPLIT_KEY}${codePackageType}`,\n ),\n { logger, concurrency },\n ),\n // make sure all Repositories exist\n syncRepositories(\n client,\n uniqBy(codePackages, 'repositoryName').map(\n ({ repositoryName }) =>\n ({\n name: repositoryName,\n url: `https://github.com/${repositoryName}`,\n }) as RepositoryInput,\n ),\n { logger },\n ),\n ]);\n\n const softwareDevelopmentKitLookup = keyBy(\n existingSoftwareDevelopmentKits,\n ({ name, codePackageType }) => `${name}${LOOKUP_SPLIT_KEY}${codePackageType}`,\n );\n const codePackagesLookup = keyBy(\n existingCodePackages,\n ({ name, type }) => `${name}${LOOKUP_SPLIT_KEY}${type}`,\n );\n\n // Determine which codePackages are new vs existing\n const mapCodePackagesToExisting = codePackages.map((codePackageInput) => [\n codePackageInput,\n codePackagesLookup[`${codePackageInput.name}${LOOKUP_SPLIT_KEY}${codePackageInput.type}`]?.id,\n ]);\n\n // Create the new codePackages\n const newCodePackages = mapCodePackagesToExisting\n .filter(([, existing]) => !existing)\n .map(([codePackageInput]) => codePackageInput as CodePackageInput);\n try {\n logger.info(colors.magenta(`Creating \"${newCodePackages.length}\" new code packages...`));\n await map(\n newCodePackages,\n async ({ softwareDevelopmentKits, ...codePackage }) => {\n await createCodePackage(client, {\n ...codePackage,\n ...(softwareDevelopmentKits\n ? {\n softwareDevelopmentKitIds: uniq(\n softwareDevelopmentKits.map(({ name }) => {\n const sdk =\n softwareDevelopmentKitLookup[`${name}${LOOKUP_SPLIT_KEY}${codePackage.type}`];\n if (!sdk) {\n throw new Error(`Failed to find SDK with name: \"${name}\"`);\n }\n return sdk.id;\n }),\n ),\n }\n : {}),\n });\n },\n {\n concurrency,\n },\n );\n logger.info(colors.green(`Successfully synced ${newCodePackages.length} code packages!`));\n } catch (err) {\n encounteredError = true;\n logger.error(colors.red(`Failed to create code packages! - ${err.message}`));\n }\n\n // Update existing codePackages\n const existingCodePackageInputs = mapCodePackagesToExisting.filter(\n (x): x is [CodePackageInput, string] => !!x[1],\n );\n logger.info(colors.magenta(`Updating \"${existingCodePackageInputs.length}\" code packages...`));\n const chunks = chunk(existingCodePackageInputs, CHUNK_SIZE);\n\n await mapSeries(chunks, async (chunk) => {\n try {\n await updateCodePackages(\n client,\n chunk.map(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n ([{ softwareDevelopmentKits, repositoryName, ...input }, id]) => ({\n ...input,\n ...(softwareDevelopmentKits\n ? {\n softwareDevelopmentKitIds: uniq(\n softwareDevelopmentKits.map(({ name }) => {\n const sdk =\n softwareDevelopmentKitLookup[`${name}${LOOKUP_SPLIT_KEY}${input.type}`];\n if (!sdk) {\n throw new Error(`Failed to find SDK with name: \"${name}\"`);\n }\n return sdk.id;\n }),\n ),\n }\n : {}),\n id,\n }),\n ),\n );\n logger.info(colors.green(`Successfully updated \"${chunk.length}\" code packages!`));\n } catch (err) {\n encounteredError = true;\n logger.error(colors.red(`Failed to update code packages! - ${err.message}`));\n }\n });\n\n logger.info(colors.green(`Synced \"${codePackages.length}\" code packages!`));\n return !encounteredError;\n}\n"],"mappings":"uXAkBA,MAEM,EAAmB,OASzB,eAAsB,EACpB,EACA,EAwBsB,CACtB,GAAM,CACJ,kBAAmB,CAAE,gBACnB,MAAM,EAMP,EAAQ,EAAqB,CAC9B,UAAW,CAAE,QAAO,CACpB,SACD,CAAC,CAEF,OADA,EAAO,KAAK,EAAO,MAAM,sCAAsC,EAAM,KAAK,IAAI,CAAC,CACxE,EAUT,eAAsB,EACpB,EACA,EA0BwB,CACxB,GAAM,CACJ,mBAAoB,CAAE,iBACpB,MAAM,EAMP,EAAQ,EAAsB,CAC/B,UAAW,CACT,MAAO,CACL,aAAc,EACf,CACF,CACD,SACD,CAAC,CAEF,OADA,EAAO,KAAK,EAAO,MAAM,wBAAwB,EAAO,OAAO,iBAAiB,CAAC,CAC1E,EAWT,eAAsB,EACpB,EACA,EACA,EAAc,GACI,CAClB,IAAI,EAAmB,GACjB,CAAC,EAAsB,CAAE,wBAAyB,IACtD,MAAM,QAAQ,IAAI,CAEhB,EAAqB,EAAQ,CAAE,SAAQ,CAAC,CAExC,EACE,EACA,EACE,EACG,KAAK,CAAE,OAAM,0BAA0B,EAAE,IACxC,EAAwB,KAAK,CAAE,WAAY,CACzC,OACA,gBAAiB,EAClB,EAAE,CACJ,CACA,MAAM,EACR,CAAE,OAAM,qBAAsB,GAAG,IAAO,IAAmB,IAC7D,CACD,CAAE,SAAQ,cAAa,CACxB,CAED,EACE,EACA,EAAO,EAAc,iBAAiB,CAAC,KACpC,CAAE,qBACA,CACC,KAAM,EACN,IAAK,sBAAsB,IAC5B,EACJ,CACD,CAAE,SAAQ,CACX,CACF,CAAC,CAEE,EAA+B,EACnC,GACC,CAAE,OAAM,qBAAsB,GAAG,IAAO,IAAmB,IAC7D,CACK,EAAqB,EACzB,GACC,CAAE,OAAM,UAAW,GAAG,IAAO,IAAmB,IAClD,CAGK,EAA4B,EAAa,IAAK,GAAqB,CACvE,EACA,EAAmB,GAAG,EAAiB,OAAO,IAAmB,EAAiB,SAAS,GAC5F,CAAC,CAGI,EAAkB,EACrB,QAAQ,EAAG,KAAc,CAAC,EAAS,CACnC,KAAK,CAAC,KAAsB,EAAqC,CACpE,GAAI,CACF,EAAO,KAAK,EAAO,QAAQ,aAAa,EAAgB,OAAO,wBAAwB,CAAC,CACxF,MAAM,EACJ,EACA,MAAO,CAAE,0BAAyB,GAAG,KAAkB,CACrD,MAAM,EAAkB,EAAQ,CAC9B,GAAG,EACH,GAAI,EACA,CACE,0BAA2B,EACzB,EAAwB,KAAK,CAAE,UAAW,CACxC,IAAM,EACJ,EAA6B,GAAG,IAAO,IAAmB,EAAY,QACxE,GAAI,CAAC,EACH,MAAU,MAAM,kCAAkC,EAAK,GAAG,CAE5D,OAAO,EAAI,IACX,CACH,CACF,CACD,EAAE,CACP,CAAC,EAEJ,CACE,cACD,CACF,CACD,EAAO,KAAK,EAAO,MAAM,uBAAuB,EAAgB,OAAO,iBAAiB,CAAC,OAClF,EAAK,CACZ,EAAmB,GACnB,EAAO,MAAM,EAAO,IAAI,qCAAqC,EAAI,UAAU,CAAC,CAI9E,IAAM,EAA4B,EAA0B,OACzD,GAAuC,CAAC,CAAC,EAAE,GAC7C,CAsCD,OArCA,EAAO,KAAK,EAAO,QAAQ,aAAa,EAA0B,OAAO,oBAAoB,CAAC,CAG9F,MAAM,EAFS,EAAM,EAA2B,IAAW,CAEnC,KAAO,IAAU,CACvC,GAAI,CACF,MAAM,EACJ,EACA,EAAM,KAEH,CAAC,CAAE,0BAAyB,iBAAgB,GAAG,GAAS,MAAS,CAChE,GAAG,EACH,GAAI,EACA,CACE,0BAA2B,EACzB,EAAwB,KAAK,CAAE,UAAW,CACxC,IAAM,EACJ,EAA6B,GAAG,IAAO,IAAmB,EAAM,QAClE,GAAI,CAAC,EACH,MAAU,MAAM,kCAAkC,EAAK,GAAG,CAE5D,OAAO,EAAI,IACX,CACH,CACF,CACD,EAAE,CACN,KACD,EACF,CACF,CACD,EAAO,KAAK,EAAO,MAAM,yBAAyB,EAAM,OAAO,kBAAkB,CAAC,OAC3E,EAAK,CACZ,EAAmB,GACnB,EAAO,MAAM,EAAO,IAAI,qCAAqC,EAAI,UAAU,CAAC,GAE9E,CAEF,EAAO,KAAK,EAAO,MAAM,WAAW,EAAa,OAAO,kBAAkB,CAAC,CACpE,CAAC"}
1
+ {"version":3,"file":"syncCodePackages-CAk_Hjyl.mjs","names":[],"sources":["../src/lib/graphql/syncCodePackages.ts"],"sourcesContent":["import { CodePackageType } from '@transcend-io/privacy-types';\nimport {\n CREATE_CODE_PACKAGE,\n UPDATE_CODE_PACKAGES,\n fetchAllCodePackages,\n type CodePackage,\n makeGraphQLRequest,\n syncRepositories,\n syncSoftwareDevelopmentKits,\n} from '@transcend-io/sdk';\nimport { map, mapSeries } from '@transcend-io/utils';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport { chunk, uniq, keyBy, uniqBy } from 'lodash-es';\n\nimport { CodePackageInput, RepositoryInput } from '../../codecs.js';\nimport { logger } from '../../logger.js';\n\nconst CHUNK_SIZE = 100;\n\nconst LOOKUP_SPLIT_KEY = '%%%%';\n\n/**\n * Create a new code package\n *\n * @param client - GraphQL client\n * @param input - Code package input\n * @returns Code package ID\n */\nexport async function createCodePackage(\n client: GraphQLClient,\n input: {\n /** Name of package */\n name: string;\n /** Description of package */\n description?: string;\n /** Type of package */\n type: CodePackageType;\n /** Relative path to package */\n relativePath: string;\n /** Repository ID */\n repositoryId?: string;\n /** Name of repository */\n repositoryName?: string;\n /** IDs of SDKs */\n softwareDevelopmentKitIds?: string[];\n /** IDs of owners */\n ownerIds?: string[];\n /** Emails of owners */\n ownerEmails?: string[];\n /** IDs of teams */\n teamIds?: string[];\n /** Names of teams */\n teamNames?: string[];\n },\n): Promise<CodePackage> {\n const {\n createCodePackage: { codePackage },\n } = await makeGraphQLRequest<{\n /** createCodePackage mutation */\n createCodePackage: {\n /** Code package */\n codePackage: CodePackage;\n };\n }>(client, CREATE_CODE_PACKAGE, {\n variables: { input },\n logger,\n });\n logger.info(colors.green(`Successfully created code package \"${input.name}\"!`));\n return codePackage;\n}\n\n/**\n * Update an existing code package\n *\n * @param client - GraphQL client\n * @param inputs - Code package input\n * @returns Code packages that were updated\n */\nexport async function updateCodePackages(\n client: GraphQLClient,\n inputs: {\n /** ID of code package */\n id: string;\n /** Name of package */\n name: string;\n /** Description of package */\n description?: string;\n /** Type of package */\n type: CodePackageType;\n /** Relative path to package */\n relativePath: string;\n /** Repository ID */\n repositoryId?: string;\n /** Name of repository */\n repositoryName?: string;\n /** IDs of SDKs */\n softwareDevelopmentKitIds?: string[];\n /** IDs of owners */\n ownerIds?: string[];\n /** Emails of owners */\n ownerEmails?: string[];\n /** IDs of teams */\n teamIds?: string[];\n /** Names of teams */\n teamNames?: string[];\n }[],\n): Promise<CodePackage[]> {\n const {\n updateCodePackages: { codePackages },\n } = await makeGraphQLRequest<{\n /** updateCodePackages mutation */\n updateCodePackages: {\n /** Code packages */\n codePackages: CodePackage[];\n };\n }>(client, UPDATE_CODE_PACKAGES, {\n variables: {\n input: {\n codePackages: inputs,\n },\n },\n logger,\n });\n logger.info(colors.green(`Successfully updated ${inputs.length} code packages!`));\n return codePackages;\n}\n\n/**\n * Uploads silo discovery results for Transcend to classify\n *\n * @param client - GraphQL Client\n * @param codePackages - Packages to upload\n * @param concurrency - How many concurrent requests to make\n * @returns True if successful, false if any updates failed, or an error occurs\n */\nexport async function syncCodePackages(\n client: GraphQLClient,\n codePackages: CodePackageInput[],\n concurrency = 20,\n): Promise<boolean> {\n let encounteredError = false;\n const [existingCodePackages, { softwareDevelopmentKits: existingSoftwareDevelopmentKits }] =\n await Promise.all([\n // fetch all code packages\n fetchAllCodePackages(client, { logger }),\n // make sure all SDKs exist\n syncSoftwareDevelopmentKits(\n client,\n uniqBy(\n codePackages\n .map(({ type, softwareDevelopmentKits = [] }) =>\n softwareDevelopmentKits.map(({ name }) => ({\n name,\n codePackageType: type,\n })),\n )\n .flat(),\n ({ name, codePackageType }) => `${name}${LOOKUP_SPLIT_KEY}${codePackageType}`,\n ),\n { logger, concurrency },\n ),\n // make sure all Repositories exist\n syncRepositories(\n client,\n uniqBy(codePackages, 'repositoryName').map(\n ({ repositoryName }) =>\n ({\n name: repositoryName,\n url: `https://github.com/${repositoryName}`,\n }) as RepositoryInput,\n ),\n { logger },\n ),\n ]);\n\n const softwareDevelopmentKitLookup = keyBy(\n existingSoftwareDevelopmentKits,\n ({ name, codePackageType }) => `${name}${LOOKUP_SPLIT_KEY}${codePackageType}`,\n );\n const codePackagesLookup = keyBy(\n existingCodePackages,\n ({ name, type }) => `${name}${LOOKUP_SPLIT_KEY}${type}`,\n );\n\n // Determine which codePackages are new vs existing\n const mapCodePackagesToExisting = codePackages.map((codePackageInput) => [\n codePackageInput,\n codePackagesLookup[`${codePackageInput.name}${LOOKUP_SPLIT_KEY}${codePackageInput.type}`]?.id,\n ]);\n\n // Create the new codePackages\n const newCodePackages = mapCodePackagesToExisting\n .filter(([, existing]) => !existing)\n .map(([codePackageInput]) => codePackageInput as CodePackageInput);\n try {\n logger.info(colors.magenta(`Creating \"${newCodePackages.length}\" new code packages...`));\n await map(\n newCodePackages,\n async ({ softwareDevelopmentKits, ...codePackage }) => {\n await createCodePackage(client, {\n ...codePackage,\n ...(softwareDevelopmentKits\n ? {\n softwareDevelopmentKitIds: uniq(\n softwareDevelopmentKits.map(({ name }) => {\n const sdk =\n softwareDevelopmentKitLookup[`${name}${LOOKUP_SPLIT_KEY}${codePackage.type}`];\n if (!sdk) {\n throw new Error(`Failed to find SDK with name: \"${name}\"`);\n }\n return sdk.id;\n }),\n ),\n }\n : {}),\n });\n },\n {\n concurrency,\n },\n );\n logger.info(colors.green(`Successfully synced ${newCodePackages.length} code packages!`));\n } catch (err) {\n encounteredError = true;\n logger.error(colors.red(`Failed to create code packages! - ${err.message}`));\n }\n\n // Update existing codePackages\n const existingCodePackageInputs = mapCodePackagesToExisting.filter(\n (x): x is [CodePackageInput, string] => !!x[1],\n );\n logger.info(colors.magenta(`Updating \"${existingCodePackageInputs.length}\" code packages...`));\n const chunks = chunk(existingCodePackageInputs, CHUNK_SIZE);\n\n await mapSeries(chunks, async (chunk) => {\n try {\n await updateCodePackages(\n client,\n chunk.map(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n ([{ softwareDevelopmentKits, repositoryName, ...input }, id]) => ({\n ...input,\n ...(softwareDevelopmentKits\n ? {\n softwareDevelopmentKitIds: uniq(\n softwareDevelopmentKits.map(({ name }) => {\n const sdk =\n softwareDevelopmentKitLookup[`${name}${LOOKUP_SPLIT_KEY}${input.type}`];\n if (!sdk) {\n throw new Error(`Failed to find SDK with name: \"${name}\"`);\n }\n return sdk.id;\n }),\n ),\n }\n : {}),\n id,\n }),\n ),\n );\n logger.info(colors.green(`Successfully updated \"${chunk.length}\" code packages!`));\n } catch (err) {\n encounteredError = true;\n logger.error(colors.red(`Failed to update code packages! - ${err.message}`));\n }\n });\n\n logger.info(colors.green(`Synced \"${codePackages.length}\" code packages!`));\n return !encounteredError;\n}\n"],"mappings":"uXAkBA,MAEM,EAAmB,OASzB,eAAsB,EACpB,EACA,EAwBsB,CACtB,GAAM,CACJ,kBAAmB,CAAE,gBACnB,MAAM,EAMP,EAAQ,EAAqB,CAC9B,UAAW,CAAE,QAAO,CACpB,SACD,CAAC,CAEF,OADA,EAAO,KAAK,EAAO,MAAM,sCAAsC,EAAM,KAAK,IAAI,CAAC,CACxE,EAUT,eAAsB,EACpB,EACA,EA0BwB,CACxB,GAAM,CACJ,mBAAoB,CAAE,iBACpB,MAAM,EAMP,EAAQ,EAAsB,CAC/B,UAAW,CACT,MAAO,CACL,aAAc,EACf,CACF,CACD,SACD,CAAC,CAEF,OADA,EAAO,KAAK,EAAO,MAAM,wBAAwB,EAAO,OAAO,iBAAiB,CAAC,CAC1E,EAWT,eAAsB,EACpB,EACA,EACA,EAAc,GACI,CAClB,IAAI,EAAmB,GACjB,CAAC,EAAsB,CAAE,wBAAyB,IACtD,MAAM,QAAQ,IAAI,CAEhB,EAAqB,EAAQ,CAAE,SAAQ,CAAC,CAExC,EACE,EACA,EACE,EACG,KAAK,CAAE,OAAM,0BAA0B,EAAE,IACxC,EAAwB,KAAK,CAAE,WAAY,CACzC,OACA,gBAAiB,EAClB,EAAE,CACJ,CACA,MAAM,EACR,CAAE,OAAM,qBAAsB,GAAG,IAAO,IAAmB,IAC7D,CACD,CAAE,SAAQ,cAAa,CACxB,CAED,EACE,EACA,EAAO,EAAc,iBAAiB,CAAC,KACpC,CAAE,qBACA,CACC,KAAM,EACN,IAAK,sBAAsB,IAC5B,EACJ,CACD,CAAE,SAAQ,CACX,CACF,CAAC,CAEE,EAA+B,EACnC,GACC,CAAE,OAAM,qBAAsB,GAAG,IAAO,IAAmB,IAC7D,CACK,EAAqB,EACzB,GACC,CAAE,OAAM,UAAW,GAAG,IAAO,IAAmB,IAClD,CAGK,EAA4B,EAAa,IAAK,GAAqB,CACvE,EACA,EAAmB,GAAG,EAAiB,OAAO,IAAmB,EAAiB,SAAS,GAC5F,CAAC,CAGI,EAAkB,EACrB,QAAQ,EAAG,KAAc,CAAC,EAAS,CACnC,KAAK,CAAC,KAAsB,EAAqC,CACpE,GAAI,CACF,EAAO,KAAK,EAAO,QAAQ,aAAa,EAAgB,OAAO,wBAAwB,CAAC,CACxF,MAAM,EACJ,EACA,MAAO,CAAE,0BAAyB,GAAG,KAAkB,CACrD,MAAM,EAAkB,EAAQ,CAC9B,GAAG,EACH,GAAI,EACA,CACE,0BAA2B,EACzB,EAAwB,KAAK,CAAE,UAAW,CACxC,IAAM,EACJ,EAA6B,GAAG,IAAO,IAAmB,EAAY,QACxE,GAAI,CAAC,EACH,MAAU,MAAM,kCAAkC,EAAK,GAAG,CAE5D,OAAO,EAAI,IACX,CACH,CACF,CACD,EAAE,CACP,CAAC,EAEJ,CACE,cACD,CACF,CACD,EAAO,KAAK,EAAO,MAAM,uBAAuB,EAAgB,OAAO,iBAAiB,CAAC,OAClF,EAAK,CACZ,EAAmB,GACnB,EAAO,MAAM,EAAO,IAAI,qCAAqC,EAAI,UAAU,CAAC,CAI9E,IAAM,EAA4B,EAA0B,OACzD,GAAuC,CAAC,CAAC,EAAE,GAC7C,CAsCD,OArCA,EAAO,KAAK,EAAO,QAAQ,aAAa,EAA0B,OAAO,oBAAoB,CAAC,CAG9F,MAAM,EAFS,EAAM,EAA2B,IAAW,CAEnC,KAAO,IAAU,CACvC,GAAI,CACF,MAAM,EACJ,EACA,EAAM,KAEH,CAAC,CAAE,0BAAyB,iBAAgB,GAAG,GAAS,MAAS,CAChE,GAAG,EACH,GAAI,EACA,CACE,0BAA2B,EACzB,EAAwB,KAAK,CAAE,UAAW,CACxC,IAAM,EACJ,EAA6B,GAAG,IAAO,IAAmB,EAAM,QAClE,GAAI,CAAC,EACH,MAAU,MAAM,kCAAkC,EAAK,GAAG,CAE5D,OAAO,EAAI,IACX,CACH,CACF,CACD,EAAE,CACN,KACD,EACF,CACF,CACD,EAAO,KAAK,EAAO,MAAM,yBAAyB,EAAM,OAAO,kBAAkB,CAAC,OAC3E,EAAK,CACZ,EAAmB,GACnB,EAAO,MAAM,EAAO,IAAI,qCAAqC,EAAI,UAAU,CAAC,GAE9E,CAEF,EAAO,KAAK,EAAO,MAAM,WAAW,EAAa,OAAO,kBAAkB,CAAC,CACpE,CAAC"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{ConsentBundleType as n}from"@transcend-io/privacy-types";import r from"colors";import{buildTranscendGraphQLClient as i,deployConsentManager as a,fetchConsentManagerId as o,updateConsentManagerToLatest as s}from"@transcend-io/sdk";import{mapSeries as c}from"@transcend-io/utils";async function l({auth:l,deploy:u=!1,transcendUrl:d=e,bundleTypes:f=Object.values(n)}){let p=i(d,l),m=await o(p,{logger:t});await c(f,async e=>{t.info(r.magenta(`Update Consent Manager bundle with ID "${m}" and type "${e}" to latest version...`)),await s(p,{input:{id:m,bundleType:e},logger:t}),t.info(r.green(`Updated Consent Manager bundle with ID "${m}" and type "${e}" to latest version!`))}),u&&await c(f,async e=>{t.info(r.magenta(`Deploying Consent Manager bundle with ID "${m}" and type "${e}"...`)),await a(p,{id:m,bundleType:e},{logger:t}),t.info(r.green(`Deployed Consent Manager bundle with ID "${m}" and type "${e}"!`))})}export{l as t};
2
+ //# sourceMappingURL=updateConsentManagerVersionToLatest-lAw3E1wm.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"updateConsentManagerVersionToLatest-lAw3E1wm.mjs","names":[],"sources":["../src/lib/consent-manager/updateConsentManagerVersionToLatest.ts"],"sourcesContent":["import { ConsentBundleType } from '@transcend-io/privacy-types';\nimport {\n buildTranscendGraphQLClient,\n deployConsentManager,\n fetchConsentManagerId,\n updateConsentManagerToLatest,\n} from '@transcend-io/sdk';\nimport { mapSeries } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\n\n/**\n * Update the consent manager to latest version\n *\n * @param options - Options\n */\nexport async function updateConsentManagerVersionToLatest({\n auth,\n deploy = false,\n transcendUrl = DEFAULT_TRANSCEND_API,\n bundleTypes = Object.values(ConsentBundleType),\n}: {\n /** Transcend API key authentication */\n auth: string;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Deploy consent manager with this update */\n deploy?: boolean;\n /** The bundle types to update and deploy */\n bundleTypes?: ConsentBundleType[];\n}): Promise<void> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Grab Consent Manager ID\n const consentManagerId = await fetchConsentManagerId(client, { logger });\n\n // Update each bundle type to latest version\n await mapSeries(bundleTypes, async (bundleType) => {\n logger.info(\n colors.magenta(\n `Update Consent Manager bundle with ID \"${consentManagerId}\" and type \"${bundleType}\" to latest version...`,\n ),\n );\n await updateConsentManagerToLatest(client, {\n input: {\n id: consentManagerId,\n bundleType,\n },\n logger,\n });\n logger.info(\n colors.green(\n `Updated Consent Manager bundle with ID \"${consentManagerId}\" and type \"${bundleType}\" to latest version!`,\n ),\n );\n });\n\n // deploy Consent Managers\n if (deploy) {\n // Update each bundle type to latest version\n await mapSeries(bundleTypes, async (bundleType) => {\n logger.info(\n colors.magenta(\n `Deploying Consent Manager bundle with ID \"${consentManagerId}\" and type \"${bundleType}\"...`,\n ),\n );\n await deployConsentManager(\n client,\n {\n id: consentManagerId,\n bundleType,\n },\n { logger },\n );\n logger.info(\n colors.green(\n `Deployed Consent Manager bundle with ID \"${consentManagerId}\" and type \"${bundleType}\"!`,\n ),\n );\n });\n }\n}\n"],"mappings":"+WAkBA,eAAsB,EAAoC,CACxD,OACA,SAAS,GACT,eAAe,EACf,cAAc,OAAO,OAAO,EAAkB,EAU9B,CAEhB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAGxD,EAAmB,MAAM,EAAsB,EAAQ,CAAE,SAAQ,CAAC,CAGxE,MAAM,EAAU,EAAa,KAAO,IAAe,CACjD,EAAO,KACL,EAAO,QACL,0CAA0C,EAAiB,cAAc,EAAW,wBACrF,CACF,CACD,MAAM,EAA6B,EAAQ,CACzC,MAAO,CACL,GAAI,EACJ,aACD,CACD,SACD,CAAC,CACF,EAAO,KACL,EAAO,MACL,2CAA2C,EAAiB,cAAc,EAAW,sBACtF,CACF,EACD,CAGE,GAEF,MAAM,EAAU,EAAa,KAAO,IAAe,CACjD,EAAO,KACL,EAAO,QACL,6CAA6C,EAAiB,cAAc,EAAW,MACxF,CACF,CACD,MAAM,EACJ,EACA,CACE,GAAI,EACJ,aACD,CACD,CAAE,SAAQ,CACX,CACD,EAAO,KACL,EAAO,MACL,4CAA4C,EAAiB,cAAc,EAAW,IACvF,CACF,EACD"}
@@ -1,2 +1,2 @@
1
- import{o as e}from"./constants-XOsAW1__.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{decodeCodec as n}from"@transcend-io/type-utils";import r from"colors";import*as i from"io-ts";import{createTranscendConsentGotInstance as a}from"@transcend-io/sdk";import{map as o}from"@transcend-io/utils";import s from"cli-progress";import*as c from"crypto";import*as l from"jsonwebtoken";function u(e,t,n){let r=Buffer.from(n,`base64`),i=Buffer.from(t,`base64`),a=Buffer.from(`A65959A6`,`hex`),o=c.createCipheriv(`id-aes256-wrap-pad`,i,a),s={encryptedIdentifier:Buffer.concat([o.update(e),o.final()]).toString(`base64`)};return l.sign(s,r,{algorithm:`HS384`})}const d=/^[0-9][Y|N]([Y|N])[Y|N]$/,f=i.record(i.string,i.union([i.boolean,i.literal(`Auto`)]));async function p({base64EncryptionKey:i,base64SigningKey:c,preferences:l,partition:p,concurrency:m=100,transcendUrl:h=e}){let g=a(h),_=l.filter(e=>e.usp&&!d.test(e.usp));if(_.length>0)throw Error(`Received invalid usp strings: ${JSON.stringify(_,null,2)}`);let v=l.map((e,t)=>[e,t]).filter(([e])=>{if(!e.purposes)return!1;try{return n(f,e.purposes),!1}catch{return!0}});if(v.length>0)throw Error(`Received invalid purpose maps: ${JSON.stringify(v,null,2)}`);let y=l.filter(e=>!e.usp&&!e.purposes);if(y.length>0)throw Error(`Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(y,null,2)}`);t.info(r.magenta(`Uploading ${l.length} user preferences to partition ${p}`));let b=new Date().getTime(),x=new s.SingleBar({},s.Presets.shades_classic),S=0;x.start(l.length,0),await o(l,async({userId:e,confirmed:a=`true`,updated:o,prompted:s,purposes:l,...m})=>{let h=u(e,i,c),[,_]=m.usp&&d.exec(m.usp)||[],v={token:h,partition:p,consent:{confirmed:a===`true`,purposes:l?n(f,l):m.usp?{SaleOfInfo:_===`Y`}:{},...o?{updated:o===`true`}:{},...s?{prompted:s===`true`}:{},...m}};try{await g.post(`sync`,{json:v}).json()}catch(e){try{let n=JSON.parse(e?.response?.body||`{}`);n.error&&t.error(r.red(`Error: ${n.error}`))}catch{}throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}S+=1,x.update(S)},{concurrency:m}),x.stop();let C=new Date().getTime()-b;t.info(r.green(`Successfully uploaded ${l.length} user preferences to partition ${p} in "${C/1e3}" seconds!`))}export{u as i,d as n,p as r,f as t};
2
- //# sourceMappingURL=uploadConsents-BP5XILuw.mjs.map
1
+ import{o as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{decodeCodec as n}from"@transcend-io/type-utils";import r from"colors";import*as i from"io-ts";import{createTranscendConsentGotInstance as a}from"@transcend-io/sdk";import{map as o}from"@transcend-io/utils";import s from"cli-progress";import*as c from"crypto";import*as l from"jsonwebtoken";function u(e,t,n){let r=Buffer.from(n,`base64`),i=Buffer.from(t,`base64`),a=Buffer.from(`A65959A6`,`hex`),o=c.createCipheriv(`id-aes256-wrap-pad`,i,a),s={encryptedIdentifier:Buffer.concat([o.update(e),o.final()]).toString(`base64`)};return l.sign(s,r,{algorithm:`HS384`})}const d=/^[0-9][Y|N]([Y|N])[Y|N]$/,f=i.record(i.string,i.union([i.boolean,i.literal(`Auto`)]));async function p({base64EncryptionKey:i,base64SigningKey:c,preferences:l,partition:p,concurrency:m=100,transcendUrl:h=e}){let g=a(h),_=l.filter(e=>e.usp&&!d.test(e.usp));if(_.length>0)throw Error(`Received invalid usp strings: ${JSON.stringify(_,null,2)}`);let v=l.map((e,t)=>[e,t]).filter(([e])=>{if(!e.purposes)return!1;try{return n(f,e.purposes),!1}catch{return!0}});if(v.length>0)throw Error(`Received invalid purpose maps: ${JSON.stringify(v,null,2)}`);let y=l.filter(e=>!e.usp&&!e.purposes);if(y.length>0)throw Error(`Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(y,null,2)}`);t.info(r.magenta(`Uploading ${l.length} user preferences to partition ${p}`));let b=new Date().getTime(),x=new s.SingleBar({},s.Presets.shades_classic),S=0;x.start(l.length,0),await o(l,async({userId:e,confirmed:a=`true`,updated:o,prompted:s,purposes:l,...m})=>{let h=u(e,i,c),[,_]=m.usp&&d.exec(m.usp)||[],v={token:h,partition:p,consent:{confirmed:a===`true`,purposes:l?n(f,l):m.usp?{SaleOfInfo:_===`Y`}:{},...o?{updated:o===`true`}:{},...s?{prompted:s===`true`}:{},...m}};try{await g.post(`sync`,{json:v}).json()}catch(e){try{let n=JSON.parse(e?.response?.body||`{}`);n.error&&t.error(r.red(`Error: ${n.error}`))}catch{}throw Error(`Received an error from server: ${e?.response?.body||e?.message}`)}S+=1,x.update(S)},{concurrency:m}),x.stop();let C=new Date().getTime()-b;t.info(r.green(`Successfully uploaded ${l.length} user preferences to partition ${p} in "${C/1e3}" seconds!`))}export{u as i,d as n,p as r,f as t};
2
+ //# sourceMappingURL=uploadConsents-BzmWrNc1.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"uploadConsents-BP5XILuw.mjs","names":[],"sources":["../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/uploadConsents.ts"],"sourcesContent":["import * as crypto from 'crypto';\n\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([cipher.update(userId), cipher.final()]).toString(\n 'base64',\n );\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\nimport { createTranscendConsentGotInstance } from '@transcend-io/sdk';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from '@transcend-io/utils';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { createConsentToken } from './createConsentToken.js';\nimport type { ConsentPreferenceUpload } from './types.js';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(t.string, t.union([t.boolean, t.literal('Auto')]));\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(`Received invalid usp strings: ${JSON.stringify(invalidUspStrings, null, 2)}`);\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(invalidPurposeMaps, null, 2)}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter((pref) => !pref.usp && !pref.purposes);\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(`Uploading ${preferences.length} user preferences to partition ${partition}`),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({ userId, confirmed = 'true', updated, prompted, purposes, ...consent }) => {\n const token = createConsentToken(userId, base64EncryptionKey, base64SigningKey);\n\n // parse usp string\n const [, saleStatus] = consent.usp ? USP_STRING_REGEX.exec(consent.usp) || [] : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch {\n // continue\n }\n throw new Error(`Received an error from server: ${err?.response?.body || err?.message}`);\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${preferences.length} user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"],"mappings":"2XAaA,SAAgB,EACd,EACA,EACA,EACQ,CAER,IAAM,EAAa,OAAO,KAAK,EAAkB,SAAS,CACpD,EAAgB,OAAO,KAAK,EAAqB,SAAS,CAK1D,EAAK,OAAO,KAAK,WAAY,MAAM,CAEnC,EAAS,EAAO,eAAe,qBAAqB,EAAe,EAAG,CAWtE,EAAa,CACjB,oBAT0B,OAAO,OAAO,CAAC,EAAO,OAAO,EAAO,CAAE,EAAO,OAAO,CAAC,CAAC,CAAC,SACjF,SACD,CAQA,CAOD,OAJqB,EAAI,KAAK,EAAY,EAAY,CACpD,UAAW,QACZ,CAAC,CChCJ,MAAa,EAAmB,2BAEnB,EAAa,EAAE,OAAO,EAAE,OAAQ,EAAE,MAAM,CAAC,EAAE,QAAS,EAAE,QAAQ,OAAO,CAAC,CAAC,CAAC,CAOrF,eAAsB,EAAe,CACnC,sBACA,mBACA,cACA,YACA,cAAc,IACd,eAAe,GAcC,CAEhB,IAAM,EAAsB,EAAkC,EAAa,CAGrE,EAAoB,EAAY,OACnC,GAAS,EAAK,KAAO,CAAC,EAAiB,KAAK,EAAK,IAAI,CACvD,CACD,GAAI,EAAkB,OAAS,EAC7B,MAAU,MAAM,iCAAiC,KAAK,UAAU,EAAmB,KAAM,EAAE,GAAG,CAIhG,IAAM,EAAqB,EACxB,KAAK,EAAM,IAAQ,CAAC,EAAM,EAAI,CAAsC,CACpE,QAAQ,CAAC,KAAU,CAClB,GAAI,CAAC,EAAK,SACR,MAAO,GAET,GAAI,CAEF,OADA,EAAY,EAAY,EAAK,SAAS,CAC/B,QACD,CACN,MAAO,KAET,CACJ,GAAI,EAAmB,OAAS,EAC9B,MAAU,MACR,kCAAkC,KAAK,UAAU,EAAoB,KAAM,EAAE,GAC9E,CAIH,IAAM,EAAgB,EAAY,OAAQ,GAAS,CAAC,EAAK,KAAO,CAAC,EAAK,SAAS,CAC/E,GAAI,EAAc,OAAS,EACzB,MAAU,MACR,2EAA2E,KAAK,UAC9E,EACA,KACA,EACD,GACF,CAGH,EAAO,KACL,EAAO,QAAQ,aAAa,EAAY,OAAO,iCAAiC,IAAY,CAC7F,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAGjF,EAAQ,EACZ,EAAY,MAAM,EAAY,OAAQ,EAAE,CACxC,MAAM,EACJ,EACA,MAAO,CAAE,SAAQ,YAAY,OAAQ,UAAS,WAAU,WAAU,GAAG,KAAc,CACjF,IAAM,EAAQ,EAAmB,EAAQ,EAAqB,EAAiB,CAGzE,EAAG,GAAc,EAAQ,KAAM,EAAiB,KAAK,EAAQ,IAAI,EAAS,EAAE,CAE5E,EAAQ,CACZ,QACA,YACA,QAAS,CACP,UAAW,IAAc,OACzB,SAAU,EACN,EAAY,EAAY,EAAS,CACjC,EAAQ,IACN,CAAE,WAAY,IAAe,IAAK,CAClC,EAAE,CACR,GAAI,EAAU,CAAE,QAAS,IAAY,OAAQ,CAAG,EAAE,CAClD,GAAI,EAAW,CAAE,SAAU,IAAa,OAAQ,CAAG,EAAE,CACrD,GAAG,EACJ,CACF,CAGD,GAAI,CACF,MAAM,EACH,KAAK,OAAQ,CACZ,KAAM,EACP,CAAC,CACD,MAAM,OACF,EAAK,CACZ,GAAI,CACF,IAAM,EAAS,KAAK,MAAM,GAAK,UAAU,MAAQ,KAAK,CAClD,EAAO,OACT,EAAO,MAAM,EAAO,IAAI,UAAU,EAAO,QAAQ,CAAC,MAE9C,EAGR,MAAU,MAAM,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAAU,CAG1F,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAEvB,EAAO,KACL,EAAO,MACL,yBAAyB,EAAY,OAAO,iCAAiC,EAAU,OACrF,EAAY,IACb,YACF,CACF"}
1
+ {"version":3,"file":"uploadConsents-BzmWrNc1.mjs","names":[],"sources":["../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/uploadConsents.ts"],"sourcesContent":["import * as crypto from 'crypto';\n\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([cipher.update(userId), cipher.final()]).toString(\n 'base64',\n );\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\nimport { createTranscendConsentGotInstance } from '@transcend-io/sdk';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from '@transcend-io/utils';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { createConsentToken } from './createConsentToken.js';\nimport type { ConsentPreferenceUpload } from './types.js';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(t.string, t.union([t.boolean, t.literal('Auto')]));\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(`Received invalid usp strings: ${JSON.stringify(invalidUspStrings, null, 2)}`);\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(invalidPurposeMaps, null, 2)}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter((pref) => !pref.usp && !pref.purposes);\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(`Uploading ${preferences.length} user preferences to partition ${partition}`),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({ userId, confirmed = 'true', updated, prompted, purposes, ...consent }) => {\n const token = createConsentToken(userId, base64EncryptionKey, base64SigningKey);\n\n // parse usp string\n const [, saleStatus] = consent.usp ? USP_STRING_REGEX.exec(consent.usp) || [] : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch {\n // continue\n }\n throw new Error(`Received an error from server: ${err?.response?.body || err?.message}`);\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${preferences.length} user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"],"mappings":"2XAaA,SAAgB,EACd,EACA,EACA,EACQ,CAER,IAAM,EAAa,OAAO,KAAK,EAAkB,SAAS,CACpD,EAAgB,OAAO,KAAK,EAAqB,SAAS,CAK1D,EAAK,OAAO,KAAK,WAAY,MAAM,CAEnC,EAAS,EAAO,eAAe,qBAAqB,EAAe,EAAG,CAWtE,EAAa,CACjB,oBAT0B,OAAO,OAAO,CAAC,EAAO,OAAO,EAAO,CAAE,EAAO,OAAO,CAAC,CAAC,CAAC,SACjF,SACD,CAQA,CAOD,OAJqB,EAAI,KAAK,EAAY,EAAY,CACpD,UAAW,QACZ,CAAC,CChCJ,MAAa,EAAmB,2BAEnB,EAAa,EAAE,OAAO,EAAE,OAAQ,EAAE,MAAM,CAAC,EAAE,QAAS,EAAE,QAAQ,OAAO,CAAC,CAAC,CAAC,CAOrF,eAAsB,EAAe,CACnC,sBACA,mBACA,cACA,YACA,cAAc,IACd,eAAe,GAcC,CAEhB,IAAM,EAAsB,EAAkC,EAAa,CAGrE,EAAoB,EAAY,OACnC,GAAS,EAAK,KAAO,CAAC,EAAiB,KAAK,EAAK,IAAI,CACvD,CACD,GAAI,EAAkB,OAAS,EAC7B,MAAU,MAAM,iCAAiC,KAAK,UAAU,EAAmB,KAAM,EAAE,GAAG,CAIhG,IAAM,EAAqB,EACxB,KAAK,EAAM,IAAQ,CAAC,EAAM,EAAI,CAAsC,CACpE,QAAQ,CAAC,KAAU,CAClB,GAAI,CAAC,EAAK,SACR,MAAO,GAET,GAAI,CAEF,OADA,EAAY,EAAY,EAAK,SAAS,CAC/B,QACD,CACN,MAAO,KAET,CACJ,GAAI,EAAmB,OAAS,EAC9B,MAAU,MACR,kCAAkC,KAAK,UAAU,EAAoB,KAAM,EAAE,GAC9E,CAIH,IAAM,EAAgB,EAAY,OAAQ,GAAS,CAAC,EAAK,KAAO,CAAC,EAAK,SAAS,CAC/E,GAAI,EAAc,OAAS,EACzB,MAAU,MACR,2EAA2E,KAAK,UAC9E,EACA,KACA,EACD,GACF,CAGH,EAAO,KACL,EAAO,QAAQ,aAAa,EAAY,OAAO,iCAAiC,IAAY,CAC7F,CAGD,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAGjF,EAAQ,EACZ,EAAY,MAAM,EAAY,OAAQ,EAAE,CACxC,MAAM,EACJ,EACA,MAAO,CAAE,SAAQ,YAAY,OAAQ,UAAS,WAAU,WAAU,GAAG,KAAc,CACjF,IAAM,EAAQ,EAAmB,EAAQ,EAAqB,EAAiB,CAGzE,EAAG,GAAc,EAAQ,KAAM,EAAiB,KAAK,EAAQ,IAAI,EAAS,EAAE,CAE5E,EAAQ,CACZ,QACA,YACA,QAAS,CACP,UAAW,IAAc,OACzB,SAAU,EACN,EAAY,EAAY,EAAS,CACjC,EAAQ,IACN,CAAE,WAAY,IAAe,IAAK,CAClC,EAAE,CACR,GAAI,EAAU,CAAE,QAAS,IAAY,OAAQ,CAAG,EAAE,CAClD,GAAI,EAAW,CAAE,SAAU,IAAa,OAAQ,CAAG,EAAE,CACrD,GAAG,EACJ,CACF,CAGD,GAAI,CACF,MAAM,EACH,KAAK,OAAQ,CACZ,KAAM,EACP,CAAC,CACD,MAAM,OACF,EAAK,CACZ,GAAI,CACF,IAAM,EAAS,KAAK,MAAM,GAAK,UAAU,MAAQ,KAAK,CAClD,EAAO,OACT,EAAO,MAAM,EAAO,IAAI,UAAU,EAAO,QAAQ,CAAC,MAE9C,EAGR,MAAU,MAAM,kCAAkC,GAAK,UAAU,MAAQ,GAAK,UAAU,CAG1F,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAEvB,EAAO,KACL,EAAO,MACL,yBAAyB,EAAY,OAAO,iCAAiC,EAAU,OACrF,EAAY,IACb,YACF,CACF"}
@@ -1,2 +1,2 @@
1
- import{a as e}from"./constants-XOsAW1__.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{A as n}from"./codecs-CeDPaLYa.mjs";import{t as r}from"./readCsv-0PIlJQCN.mjs";import i from"colors";import{buildTranscendGraphQLClient as a,syncCookies as o}from"@transcend-io/sdk";import{splitCsvToList as s}from"@transcend-io/utils";const c=[`ID`,`Activity`,`Encounters`,`Last Seen At`,`Has Native Do Not Sell/Share Support`,`IAB USP API Support`,`Service Description`,`Website URL`,`Categories of Recipients`];async function l({auth:l,trackerStatus:u,file:d,transcendUrl:f=e}){let p=a(f,l);t.info(i.magenta(`Reading "${d}" from disk`)),await o(p,r(d,n).map(({"Is Regex?":e,Notes:t,Service:n,Purpose:r,Status:i,Owners:a,Teams:o,Name:l,...d})=>({...typeof e==`string`?{isRegex:e.toLowerCase()===`true`}:{},name:l,description:t,trackingPurposes:s(r),status:i||u,owners:a?s(a):void 0,teams:o?s(o):void 0,attributes:Object.entries(d).filter(([e])=>!c.includes(e)).map(([e,t])=>({key:e,values:s(t)}))})),{logger:t})||(t.error(i.red(`Encountered error(s) syncing cookies from CSV, see logs above for more info. `)),process.exit(1))}export{l as t};
2
- //# sourceMappingURL=uploadCookiesFromCsv-B42cZgYW.mjs.map
1
+ import{a as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{A as n}from"./codecs-CeDPaLYa.mjs";import{t as r}from"./readCsv-C4TyEs-r.mjs";import i from"colors";import{buildTranscendGraphQLClient as a,syncCookies as o}from"@transcend-io/sdk";import{splitCsvToList as s}from"@transcend-io/utils";const c=[`ID`,`Activity`,`Encounters`,`Last Seen At`,`Has Native Do Not Sell/Share Support`,`IAB USP API Support`,`Service Description`,`Website URL`,`Categories of Recipients`];async function l({auth:l,trackerStatus:u,file:d,transcendUrl:f=e}){let p=a(f,l);t.info(i.magenta(`Reading "${d}" from disk`)),await o(p,r(d,n).map(({"Is Regex?":e,Notes:t,Service:n,Purpose:r,Status:i,Owners:a,Teams:o,Name:l,...d})=>({...typeof e==`string`?{isRegex:e.toLowerCase()===`true`}:{},name:l,description:t,trackingPurposes:s(r),status:i||u,owners:a?s(a):void 0,teams:o?s(o):void 0,attributes:Object.entries(d).filter(([e])=>!c.includes(e)).map(([e,t])=>({key:e,values:s(t)}))})),{logger:t})||(t.error(i.red(`Encountered error(s) syncing cookies from CSV, see logs above for more info. `)),process.exit(1))}export{l as t};
2
+ //# sourceMappingURL=uploadCookiesFromCsv-TH10UBgw.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"uploadCookiesFromCsv-B42cZgYW.mjs","names":[],"sources":["../src/lib/consent-manager/uploadCookiesFromCsv.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient, syncCookies } from '@transcend-io/sdk';\nimport { splitCsvToList } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { CookieInput, CookieCsvInput } from '../../codecs.js';\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { readCsv } from '../requests/readCsv.js';\n\nconst OMIT_COLUMNS = [\n 'ID',\n 'Activity',\n 'Encounters',\n 'Last Seen At',\n 'Has Native Do Not Sell/Share Support',\n 'IAB USP API Support',\n 'Service Description',\n 'Website URL',\n 'Categories of Recipients',\n];\n\n/**\n * Upload a set of cookies from CSV\n *\n * @param options - Options\n */\nexport async function uploadCookiesFromCsv({\n auth,\n trackerStatus,\n file,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key authentication */\n trackerStatus: ConsentTrackerStatus;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<void> {\n // Build a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV the set of cookie inputs\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const cookieInputs = readCsv(file, CookieCsvInput);\n\n // Convert these inputs into a format that the other function can use\n const validatedCookieInputs = cookieInputs.map(\n ({\n 'Is Regex?': isRegex,\n Notes,\n // TODO: https://transcend.height.app/T-26391 - export in CSV\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n Service,\n Purpose,\n Status,\n Owners,\n Teams,\n Name,\n ...rest\n }): CookieInput => ({\n ...(typeof isRegex === 'string' ? { isRegex: isRegex.toLowerCase() === 'true' } : {}),\n name: Name,\n description: Notes,\n trackingPurposes: splitCsvToList(Purpose),\n // TODO: https://transcend.height.app/T-26391\n // service: Service,\n // Apply the trackerStatus to all values in the CSV -> allows for customer to define tracker status\n // on a row by row basis if needed\n status: Status || trackerStatus,\n owners: Owners ? splitCsvToList(Owners) : undefined,\n teams: Teams ? splitCsvToList(Teams) : undefined,\n // all remaining options are attribute\n attributes: Object.entries(rest)\n // filter out native columns that are exported from the admin dashboard\n // but not custom attributes\n .filter(([key]) => !OMIT_COLUMNS.includes(key))\n .map(([key, value]) => ({\n key,\n values: splitCsvToList(value),\n })),\n }),\n );\n\n // Upload the cookies into Transcend dashboard\n const syncedCookies = await syncCookies(client, validatedCookieInputs, { logger });\n\n // Log errors\n if (!syncedCookies) {\n logger.error(\n colors.red('Encountered error(s) syncing cookies from CSV, see logs above for more info. '),\n );\n process.exit(1);\n }\n}\n"],"mappings":"mUAUA,MAAM,EAAe,CACnB,KACA,WACA,aACA,eACA,uCACA,sBACA,sBACA,cACA,2BACD,CAOD,eAAsB,EAAqB,CACzC,OACA,gBACA,OACA,eAAe,GAUC,CAEhB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAG9D,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CA0CpC,MAAM,EAAY,EAzCnB,EAAQ,EAAM,EAAe,CAGP,KACxC,CACC,YAAa,EACb,QAGA,UACA,UACA,SACA,SACA,QACA,OACA,GAAG,MACe,CAClB,GAAI,OAAO,GAAY,SAAW,CAAE,QAAS,EAAQ,aAAa,GAAK,OAAQ,CAAG,EAAE,CACpF,KAAM,EACN,YAAa,EACb,iBAAkB,EAAe,EAAQ,CAKzC,OAAQ,GAAU,EAClB,OAAQ,EAAS,EAAe,EAAO,CAAG,IAAA,GAC1C,MAAO,EAAQ,EAAe,EAAM,CAAG,IAAA,GAEvC,WAAY,OAAO,QAAQ,EAAK,CAG7B,QAAQ,CAAC,KAAS,CAAC,EAAa,SAAS,EAAI,CAAC,CAC9C,KAAK,CAAC,EAAK,MAAY,CACtB,MACA,OAAQ,EAAe,EAAM,CAC9B,EAAE,CACN,EACF,CAGsE,CAAE,SAAQ,CAAC,GAIhF,EAAO,MACL,EAAO,IAAI,gFAAgF,CAC5F,CACD,QAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"uploadCookiesFromCsv-TH10UBgw.mjs","names":[],"sources":["../src/lib/consent-manager/uploadCookiesFromCsv.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient, syncCookies } from '@transcend-io/sdk';\nimport { splitCsvToList } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { CookieInput, CookieCsvInput } from '../../codecs.js';\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { readCsv } from '../requests/readCsv.js';\n\nconst OMIT_COLUMNS = [\n 'ID',\n 'Activity',\n 'Encounters',\n 'Last Seen At',\n 'Has Native Do Not Sell/Share Support',\n 'IAB USP API Support',\n 'Service Description',\n 'Website URL',\n 'Categories of Recipients',\n];\n\n/**\n * Upload a set of cookies from CSV\n *\n * @param options - Options\n */\nexport async function uploadCookiesFromCsv({\n auth,\n trackerStatus,\n file,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key authentication */\n trackerStatus: ConsentTrackerStatus;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<void> {\n // Build a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV the set of cookie inputs\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const cookieInputs = readCsv(file, CookieCsvInput);\n\n // Convert these inputs into a format that the other function can use\n const validatedCookieInputs = cookieInputs.map(\n ({\n 'Is Regex?': isRegex,\n Notes,\n // TODO: https://transcend.height.app/T-26391 - export in CSV\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n Service,\n Purpose,\n Status,\n Owners,\n Teams,\n Name,\n ...rest\n }): CookieInput => ({\n ...(typeof isRegex === 'string' ? { isRegex: isRegex.toLowerCase() === 'true' } : {}),\n name: Name,\n description: Notes,\n trackingPurposes: splitCsvToList(Purpose),\n // TODO: https://transcend.height.app/T-26391\n // service: Service,\n // Apply the trackerStatus to all values in the CSV -> allows for customer to define tracker status\n // on a row by row basis if needed\n status: Status || trackerStatus,\n owners: Owners ? splitCsvToList(Owners) : undefined,\n teams: Teams ? splitCsvToList(Teams) : undefined,\n // all remaining options are attribute\n attributes: Object.entries(rest)\n // filter out native columns that are exported from the admin dashboard\n // but not custom attributes\n .filter(([key]) => !OMIT_COLUMNS.includes(key))\n .map(([key, value]) => ({\n key,\n values: splitCsvToList(value),\n })),\n }),\n );\n\n // Upload the cookies into Transcend dashboard\n const syncedCookies = await syncCookies(client, validatedCookieInputs, { logger });\n\n // Log errors\n if (!syncedCookies) {\n logger.error(\n colors.red('Encountered error(s) syncing cookies from CSV, see logs above for more info. '),\n );\n process.exit(1);\n }\n}\n"],"mappings":"mUAUA,MAAM,EAAe,CACnB,KACA,WACA,aACA,eACA,uCACA,sBACA,sBACA,cACA,2BACD,CAOD,eAAsB,EAAqB,CACzC,OACA,gBACA,OACA,eAAe,GAUC,CAEhB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAG9D,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CA0CpC,MAAM,EAAY,EAzCnB,EAAQ,EAAM,EAAe,CAGP,KACxC,CACC,YAAa,EACb,QAGA,UACA,UACA,SACA,SACA,QACA,OACA,GAAG,MACe,CAClB,GAAI,OAAO,GAAY,SAAW,CAAE,QAAS,EAAQ,aAAa,GAAK,OAAQ,CAAG,EAAE,CACpF,KAAM,EACN,YAAa,EACb,iBAAkB,EAAe,EAAQ,CAKzC,OAAQ,GAAU,EAClB,OAAQ,EAAS,EAAe,EAAO,CAAG,IAAA,GAC1C,MAAO,EAAQ,EAAe,EAAM,CAAG,IAAA,GAEvC,WAAY,OAAO,QAAQ,EAAK,CAG7B,QAAQ,CAAC,KAAS,CAAC,EAAa,SAAS,EAAI,CAAC,CAC9C,KAAK,CAAC,EAAK,MAAY,CACtB,MACA,OAAQ,EAAe,EAAM,CAC9B,EAAE,CACN,EACF,CAGsE,CAAE,SAAQ,CAAC,GAIhF,EAAO,MACL,EAAO,IAAI,gFAAgF,CAC5F,CACD,QAAQ,KAAK,EAAE"}
@@ -1,2 +1,2 @@
1
- import{a as e}from"./constants-XOsAW1__.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{I as n}from"./codecs-CeDPaLYa.mjs";import{t as r}from"./readCsv-0PIlJQCN.mjs";import i from"colors";import{buildTranscendGraphQLClient as a,syncDataFlows as o}from"@transcend-io/sdk";import{splitCsvToList as s}from"@transcend-io/utils";const c=[`ID`,`Activity`,`Encounters`,`Last Seen At`,`Has Native Do Not Sell/Share Support`,`IAB USP API Support`,`Service Description`,`Website URL`,`Categories of Recipients`];async function l({auth:l,trackerStatus:u,file:d,classifyService:f=!1,transcendUrl:p=e}){let m=a(p,l);t.info(i.magenta(`Reading "${d}" from disk`)),await o(m,r(d,n).map(({Type:e,Notes:t,Service:n,Purpose:r,Status:i,Owners:a,Teams:o,"Connections Made To":l,...d})=>({value:l,type:e,description:t,trackingPurposes:s(r),status:i||u,owners:a?s(a):void 0,teams:o?s(o):void 0,attributes:Object.entries(d).filter(([e])=>!c.includes(e)).map(([e,t])=>({key:e,values:s(t)}))})),f,{logger:t})||(t.error(i.red(`Encountered error(s) syncing data flows from CSV, see logs above for more info. `)),process.exit(1))}export{l as t};
2
- //# sourceMappingURL=uploadDataFlowsFromCsv-D2V567pP.mjs.map
1
+ import{a as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{I as n}from"./codecs-CeDPaLYa.mjs";import{t as r}from"./readCsv-C4TyEs-r.mjs";import i from"colors";import{buildTranscendGraphQLClient as a,syncDataFlows as o}from"@transcend-io/sdk";import{splitCsvToList as s}from"@transcend-io/utils";const c=[`ID`,`Activity`,`Encounters`,`Last Seen At`,`Has Native Do Not Sell/Share Support`,`IAB USP API Support`,`Service Description`,`Website URL`,`Categories of Recipients`];async function l({auth:l,trackerStatus:u,file:d,classifyService:f=!1,transcendUrl:p=e}){let m=a(p,l);t.info(i.magenta(`Reading "${d}" from disk`)),await o(m,r(d,n).map(({Type:e,Notes:t,Service:n,Purpose:r,Status:i,Owners:a,Teams:o,"Connections Made To":l,...d})=>({value:l,type:e,description:t,trackingPurposes:s(r),status:i||u,owners:a?s(a):void 0,teams:o?s(o):void 0,attributes:Object.entries(d).filter(([e])=>!c.includes(e)).map(([e,t])=>({key:e,values:s(t)}))})),{classifyService:f,logger:t})||(t.error(i.red(`Encountered error(s) syncing data flows from CSV, see logs above for more info. `)),process.exit(1))}export{l as t};
2
+ //# sourceMappingURL=uploadDataFlowsFromCsv-DUSFCae9.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"uploadDataFlowsFromCsv-D2V567pP.mjs","names":[],"sources":["../src/lib/consent-manager/uploadDataFlowsFromCsv.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient, syncDataFlows } from '@transcend-io/sdk';\nimport { splitCsvToList } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { DataFlowInput, DataFlowCsvInput } from '../../codecs.js';\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { readCsv } from '../requests/readCsv.js';\n\nconst OMIT_COLUMNS = [\n 'ID',\n 'Activity',\n 'Encounters',\n 'Last Seen At',\n 'Has Native Do Not Sell/Share Support',\n 'IAB USP API Support',\n 'Service Description',\n 'Website URL',\n 'Categories of Recipients',\n];\n\n/**\n * Upload a set of data flows from CSV\n *\n * @param options - Options\n */\nexport async function uploadDataFlowsFromCsv({\n auth,\n trackerStatus,\n file,\n classifyService = false,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key authentication */\n trackerStatus: ConsentTrackerStatus;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<void> {\n // Build a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV the set of data flow inputs\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const dataFlowInputs = readCsv(file, DataFlowCsvInput);\n\n // Convert these data flow inputs into a format that the other function can use\n const validatedDataFlowInputs = dataFlowInputs.map(\n ({\n Type,\n Notes,\n // TODO: https://transcend.height.app/T-26391 - export in CSV\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n Service,\n Purpose,\n Status,\n Owners,\n Teams,\n 'Connections Made To': value,\n ...rest\n }): DataFlowInput => ({\n value,\n type: Type,\n description: Notes,\n trackingPurposes: splitCsvToList(Purpose),\n // TODO: https://transcend.height.app/T-26391\n // service: Service,\n // Apply the trackerStatus to all values in the CSV -> allows for customer to define tracker status\n // on a row by row basis if needed\n status: Status || trackerStatus,\n owners: Owners ? splitCsvToList(Owners) : undefined,\n teams: Teams ? splitCsvToList(Teams) : undefined,\n // all remaining options are attribute\n attributes: Object.entries(rest)\n // filter out native columns that are exported from the admin dashboard\n // but not custom attributes\n .filter(([key]) => !OMIT_COLUMNS.includes(key))\n .map(([key, value]) => ({\n key,\n values: splitCsvToList(value),\n })),\n }),\n );\n\n // Upload the data flows into Transcend dashboard\n const syncedDataFlows = await syncDataFlows(client, validatedDataFlowInputs, classifyService, {\n logger,\n });\n\n // Log errors\n if (!syncedDataFlows) {\n logger.error(\n colors.red(\n 'Encountered error(s) syncing data flows from CSV, see logs above for more info. ',\n ),\n );\n process.exit(1);\n }\n}\n"],"mappings":"qUAUA,MAAM,EAAe,CACnB,KACA,WACA,aACA,eACA,uCACA,sBACA,sBACA,cACA,2BACD,CAOD,eAAsB,EAAuB,CAC3C,OACA,gBACA,OACA,kBAAkB,GAClB,eAAe,GAYC,CAEhB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAG9D,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CA0ClC,MAAM,EAAc,EAzCrB,EAAQ,EAAM,EAAiB,CAGP,KAC5C,CACC,OACA,QAGA,UACA,UACA,SACA,SACA,QACA,sBAAuB,EACvB,GAAG,MACiB,CACpB,QACA,KAAM,EACN,YAAa,EACb,iBAAkB,EAAe,EAAQ,CAKzC,OAAQ,GAAU,EAClB,OAAQ,EAAS,EAAe,EAAO,CAAG,IAAA,GAC1C,MAAO,EAAQ,EAAe,EAAM,CAAG,IAAA,GAEvC,WAAY,OAAO,QAAQ,EAAK,CAG7B,QAAQ,CAAC,KAAS,CAAC,EAAa,SAAS,EAAI,CAAC,CAC9C,KAAK,CAAC,EAAK,MAAY,CACtB,MACA,OAAQ,EAAe,EAAM,CAC9B,EAAE,CACN,EACF,CAG4E,EAAiB,CAC5F,SACD,CAAC,GAIA,EAAO,MACL,EAAO,IACL,mFACD,CACF,CACD,QAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"uploadDataFlowsFromCsv-DUSFCae9.mjs","names":[],"sources":["../src/lib/consent-manager/uploadDataFlowsFromCsv.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient, syncDataFlows } from '@transcend-io/sdk';\nimport { splitCsvToList } from '@transcend-io/utils';\nimport colors from 'colors';\n\nimport { DataFlowInput, DataFlowCsvInput } from '../../codecs.js';\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { readCsv } from '../requests/readCsv.js';\n\nconst OMIT_COLUMNS = [\n 'ID',\n 'Activity',\n 'Encounters',\n 'Last Seen At',\n 'Has Native Do Not Sell/Share Support',\n 'IAB USP API Support',\n 'Service Description',\n 'Website URL',\n 'Categories of Recipients',\n];\n\n/**\n * Upload a set of data flows from CSV\n *\n * @param options - Options\n */\nexport async function uploadDataFlowsFromCsv({\n auth,\n trackerStatus,\n file,\n classifyService = false,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key authentication */\n trackerStatus: ConsentTrackerStatus;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<void> {\n // Build a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV the set of data flow inputs\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const dataFlowInputs = readCsv(file, DataFlowCsvInput);\n\n // Convert these data flow inputs into a format that the other function can use\n const validatedDataFlowInputs = dataFlowInputs.map(\n ({\n Type,\n Notes,\n // TODO: https://transcend.height.app/T-26391 - export in CSV\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n Service,\n Purpose,\n Status,\n Owners,\n Teams,\n 'Connections Made To': value,\n ...rest\n }): DataFlowInput => ({\n value,\n type: Type,\n description: Notes,\n trackingPurposes: splitCsvToList(Purpose),\n // TODO: https://transcend.height.app/T-26391\n // service: Service,\n // Apply the trackerStatus to all values in the CSV -> allows for customer to define tracker status\n // on a row by row basis if needed\n status: Status || trackerStatus,\n owners: Owners ? splitCsvToList(Owners) : undefined,\n teams: Teams ? splitCsvToList(Teams) : undefined,\n // all remaining options are attribute\n attributes: Object.entries(rest)\n // filter out native columns that are exported from the admin dashboard\n // but not custom attributes\n .filter(([key]) => !OMIT_COLUMNS.includes(key))\n .map(([key, value]) => ({\n key,\n values: splitCsvToList(value),\n })),\n }),\n );\n\n // Upload the data flows into Transcend dashboard\n const syncedDataFlows = await syncDataFlows(client, validatedDataFlowInputs, {\n classifyService,\n logger,\n });\n\n // Log errors\n if (!syncedDataFlows) {\n logger.error(\n colors.red(\n 'Encountered error(s) syncing data flows from CSV, see logs above for more info. ',\n ),\n );\n process.exit(1);\n }\n}\n"],"mappings":"qUAUA,MAAM,EAAe,CACnB,KACA,WACA,aACA,eACA,uCACA,sBACA,sBACA,cACA,2BACD,CAOD,eAAsB,EAAuB,CAC3C,OACA,gBACA,OACA,kBAAkB,GAClB,eAAe,GAYC,CAEhB,IAAM,EAAS,EAA4B,EAAc,EAAK,CAG9D,EAAO,KAAK,EAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CA0ClC,MAAM,EAAc,EAzCrB,EAAQ,EAAM,EAAiB,CAGP,KAC5C,CACC,OACA,QAGA,UACA,UACA,SACA,SACA,QACA,sBAAuB,EACvB,GAAG,MACiB,CACpB,QACA,KAAM,EACN,YAAa,EACb,iBAAkB,EAAe,EAAQ,CAKzC,OAAQ,GAAU,EAClB,OAAQ,EAAS,EAAe,EAAO,CAAG,IAAA,GAC1C,MAAO,EAAQ,EAAe,EAAM,CAAG,IAAA,GAEvC,WAAY,OAAO,QAAQ,EAAK,CAG7B,QAAQ,CAAC,KAAS,CAAC,EAAa,SAAS,EAAI,CAAC,CAC9C,KAAK,CAAC,EAAK,MAAY,CACtB,MACA,OAAQ,EAAe,EAAM,CAC9B,EAAE,CACN,EACF,CAG4E,CAC3E,kBACA,SACD,CAAC,GAIA,EAAO,MACL,EAAO,IACL,mFACD,CACF,CACD,QAAQ,KAAK,EAAE"}
@@ -0,0 +1,2 @@
1
+ import{a as e}from"./constants-muOBBQA_.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{a as n,c as r,i,l as a,o,r as s,s as c,t as l}from"./constants-ClkQQhJs.mjs";import{i as u,n as d,r as f,t as p}from"./parseAttributesFromString-D1Yl0xwT.mjs";import{t as m}from"./readCsv-C4TyEs-r.mjs";import{r as h,t as g}from"./extractClientError-i-Tw_az7.mjs";import{CompletedRequestStatus as _,IdentifierType as v,IsoCountryCode as y,IsoCountrySubdivisionCode as b,NORMALIZE_PHONE_NUMBER as x,RequestAction as S}from"@transcend-io/privacy-types";import{startCase as C,uniq as w}from"lodash-es";import{apply as T,getEntries as E,getValues as D,valuesOf as O}from"@transcend-io/type-utils";import{join as ee}from"node:path";import k from"colors";import{LOCALE_KEY as A}from"@transcend-io/internationalization";import*as j from"io-ts";import{DATA_SUBJECTS as M,INITIALIZER as N,buildTranscendGraphQLClient as te,createSombraGotInstance as ne,fetchAllRequestAttributeKeys as re,makeGraphQLRequest as P}from"@transcend-io/sdk";import{map as F,splitCsvToList as I}from"@transcend-io/utils";import L from"cli-progress";import R from"inquirer";import z from"inquirer-autocomplete-prompt";import{DateFromISOString as B}from"io-ts-types";import{PersistedState as V}from"@transcend-io/persisted-state";async function H(e,t,n){R.registerPrompt(`autocomplete`,z);let r=e.map(e=>e||`<blank>`).filter(e=>!n[e]);if(r.length===0)return n;let i=await R.prompt(r.map(e=>({name:e,message:`Map value of: ${e}`,type:`autocomplete`,default:t.find(t=>u(e,t)),source:(e,n)=>n?t.filter(e=>typeof e==`string`&&u(n,e)):t})));return{...n,...T(i,e=>typeof e==`string`?e:Object.values(e)[0])}}function U(e,t){return w(e.map(e=>e[t]||``).flat())}async function W(e){let n=w(e.map(e=>Object.keys(e)).flat()),r=e,i=!0;for(;i;){let{filterColumnName:e}=await R.prompt([{name:`filterColumnName`,message:`If you need to filter the list of requests to import, choose the column to filter on. Currently ${r.length} rows.`,type:`list`,default:n,choices:[a,...n]}]);if(i=a!==e,i){let t=U(r,e),{valuesToKeep:i}=await R.prompt([{name:`valuesToKeep`,message:`Keep rows matching this value`,type:`checkbox`,default:n,choices:t}]);r=r.filter(t=>i.includes(t[e]))}}return t.info(k.magenta(`Importing ${r.length} requests`)),r}async function G(e,t){let n=D(o).filter(e=>!t.getValue(`columnNames`,e)),i=n.length===0?{}:await R.prompt(n.map(t=>{let n=C(t.replace(`ColumnName`,``)),i=f(e,n,r[t],!!s[t]);return{name:t,message:`Choose the column that will be used to map in the field: ${n}`,type:`list`,default:i[0],choices:i}}));return await Promise.all(E(i).map(([e,n])=>t.setValue(n,`columnNames`,e))),i}async function K(e,n,{state:r,columnNameMap:i}){let s=e=>r.getValue(`columnNames`,e)||i[e],{internalSubjects:c}=await P(e,M,{logger:t});t.info(k.magenta(`Determining mapping of columns for request action`));let l=await H(U(n,s(o.RequestType)),Object.values(S),r.getValue(`requestTypeToRequestAction`));await r.setValue(l,`requestTypeToRequestAction`),t.info(k.magenta(`Determining mapping of columns for subject`));let u=await H(U(n,s(o.SubjectType)),c.map(({type:e})=>e),r.getValue(`subjectTypeToSubjectName`));await r.setValue(u,`subjectTypeToSubjectName`),t.info(k.magenta(`Determining mapping of columns for locale`));let d=await H(U(n,s(o.Locale)),Object.values(A),r.getValue(`languageToLocale`));await r.setValue(d,`languageToLocale`),t.info(k.magenta(`Determining mapping of columns for request status`)),t.info(k.magenta(`Determining mapping of columns for request status`));let f=s(o.RequestStatus),p=f===`[NONE]`?{}:await H(U(n,f),[...Object.values(_),a],r.getValue(`statusToRequestStatus`));await r.setValue(p,`statusToRequestStatus`),t.info(k.magenta(`Determining mapping of columns for country`));let m=s(o.Country),h=m===`[NONE]`?{}:await H(U(n,m),[...Object.values(y),a],r.getValue(`regionToCountry`));await r.setValue(h,`regionToCountry`),t.info(k.magenta(`Determining mapping of columns for country sub division`));let g=s(o.CountrySubDivision),v=g===`[NONE]`?{}:await H(U(n,g),[...Object.values(b),a],r.getValue(`regionToCountrySubDivision`));await r.setValue(v,`regionToCountrySubDivision`)}const q=j.record(j.string,j.array(j.intersection([j.type({value:j.string}),j.partial({name:j.string})]))),J=j.intersection([j.type({email:j.string,attestedExtraIdentifiers:q,coreIdentifier:j.string,requestType:O(S),subjectType:j.string}),j.partial({country:O(y),countrySubDivision:O(b),attributes:j.array(p),status:O(_),createdAt:B,dataSiloIds:j.array(j.string),locale:O(A)})]);function Y(e,t,n){if(t===v.Email)return e.toLowerCase();if(t===v.Phone){let t=e.replace(x,``).replace(/[()]/g,``).replace(/[–]/g,``).replace(/[:]/g,``).replace(/[‭‬]/g,``).replace(/[A-Za-z]/g,``);return t?t.startsWith(`+`)?t:`+${n}${t}`:``}return e}function X(e,t,{columnNameMap:n,identifierNameMap:r,attributeNameMap:i,requestAttributeKeys:s,defaultPhoneCountryCode:c=`1`}){let u=e=>t.getValue(`columnNames`,e)||n[e];return e.map(e=>{let n={};Object.entries(r).filter(([,e])=>e!==a).forEach(([t,r])=>{let i=Object.values(v).includes(t)?t:v.Custom,a=e[r];if(a){let e=Y(a,i,c);e&&(n[i]||(n[i]=[]),n[i].push({value:e,name:t}))}});let d=[];Object.entries(i).filter(([,e])=>e!==a).forEach(([t,n])=>{let r=e[n];if(r){let e=s.find(e=>e.name===t)?.type===`MULTI_SELECT`;d.push({values:e?I(r):r,key:t})}});let f=u(o.RequestType),p=u(o.SubjectType);return[e,{email:e[u(o.Email)],attestedExtraIdentifiers:n,attributes:d,coreIdentifier:e[u(o.CoreIdentifier)],requestType:f===`[APPLY VALUE TO ALL ROWS]`?t.getValue(`requestTypeToRequestAction`,l):t.getValue(`requestTypeToRequestAction`,e[f]),subjectType:p===`[APPLY VALUE TO ALL ROWS]`?t.getValue(`subjectTypeToSubjectName`,l):t.getValue(`subjectTypeToSubjectName`,e[p]),...u(o.Locale)!==`[NONE]`&&e[u(o.Locale)]?{locale:t.getValue(`languageToLocale`,e[u(o.Locale)])}:{},...u(o.Country)!==`[NONE]`&&e[u(o.Country)]?{country:t.getValue(`regionToCountry`,e[u(o.Country)])}:{},...u(o.CountrySubDivision)!==`[NONE]`&&e[u(o.CountrySubDivision)]?{countrySubDivision:t.getValue(`regionToCountrySubDivision`,e[u(o.CountrySubDivision)])}:{},...u(o.RequestStatus)!==`[NONE]`&&t.getValue(`statusToRequestStatus`,e[u(o.RequestStatus)])!==`[NONE]`&&e[u(o.RequestStatus)]?{status:t.getValue(`statusToRequestStatus`,e[u(o.RequestStatus)])}:{},...u(o.CreatedAt)!==`[NONE]`&&e[u(o.CreatedAt)]?{createdAt:new Date(e[u(o.CreatedAt)])}:{},...u(o.DataSiloIds)!==`[NONE]`&&e[u(o.DataSiloIds)]?{dataSiloIds:I(e[u(o.DataSiloIds)])}:{}}]})}async function Z(e,n,r){let{initializer:i}=await P(e,N,{logger:t}),a=i.identifiers.filter(({name:e})=>!r.getValue(`identifierNames`,e)&&!c.includes(e)),o=a.length===0?{}:await R.prompt(a.map(({name:e})=>{let t=f(n,e,!1);return{name:e,message:`Choose the column that will be used to map in the identifier: ${e}`,type:`list`,default:t[0],choices:t}}));return await Promise.all(Object.entries(o).map(([e,t])=>r.setValue(t,`identifierNames`,e))),{...r.getValue(`identifierNames`),...o}}async function Q(e,t,n,r){let i=r.filter(({name:e})=>!n.getValue(`attributeNames`,e)),a=i.length===0?{}:await R.prompt(i.map(({name:e})=>{let n=f(t,e,!1);return{name:e,message:`Choose the column that will be used to map in the attribute: ${e}`,type:`list`,default:n[0],choices:n}}));return await Promise.all(Object.entries(a).map(([e,t])=>n.setValue(t,`attributeNames`,e))),{...n.getValue(`attributeNames`),...a}}async function $({cacheFilepath:r,requestReceiptFolder:a,file:o,auth:s,sombraAuth:c,concurrency:l=100,defaultPhoneCountryCode:u=`1`,transcendUrl:f=e,attributes:p=[],emailIsVerified:_=!0,skipFilterStep:v=!1,skipSendingReceipt:y=!0,isTest:b=!1,isSilent:x=!0,debug:S=!1,dryRun:C=!1}){let T=new Date().getTime(),E=new L.SingleBar({},L.Presets.shades_classic),D=d(p),O=new V(r,i,{columnNames:{},requestTypeToRequestAction:{},subjectTypeToSubjectName:{},languageToLocale:{},statusToRequestStatus:{},identifierNames:{},attributeNames:{},regionToCountrySubDivision:{},regionToCountry:{}}),A=ee(a,`tr-request-upload-${new Date().toISOString()}-${o.split(`/`).pop()}`.replace(`.csv`,`.json`)),M=new V(A,n,{successfulRequests:[],duplicateRequests:[],failingRequests:[]}),N=await ne(f,s,{logger:t,sombraApiKey:c,sombraUrl:process.env.SOMBRA_URL}),P=m(o,j.record(j.string,j.string)),I=w(P.map(e=>Object.keys(e)).flat());if(P.length===0)throw Error(`No Requests found in list! Ensure the first row of the CSV is a header and the rest are requests.`);if(S){let e=P[0];t.info(k.magenta(`First request: ${JSON.stringify(e,null,2)}`))}let R=v?P:await W(P),z=te(f,s),B=await re(z,{logger:t}),H=await G(I,O),U=await Z(z,I,O),q=await Q(z,I,O,B);await K(z,R,{state:O,columnNameMap:H});let J=X(R,O,{defaultPhoneCountryCode:u,columnNameMap:H,identifierNameMap:U,attributeNameMap:q,requestAttributeKeys:B});S||E.start(J.length,0);let Y=0;await F(J,async([e,n],r)=>{let i=S?`email:${n.email} | coreIdentifier:${n.coreIdentifier}`:`row:${r.toString()}`;if(S&&t.info(k.magenta(`[${r+1}/${J.length}] Importing: ${JSON.stringify(n,null,2)}`)),C){t.info(k.magenta(`Bailing out on dry run because dryRun is set`));return}try{let a=await h(N,n,{details:`Uploaded by Transcend Cli: "tr-request-upload" : ${JSON.stringify(e,null,2)}`,isTest:b,emailIsVerified:_,skipSendingReceipt:y,isSilent:x,additionalAttributes:D});S&&(t.info(k.green(`[${r+1}/${J.length}] Successfully submitted the test data subject request: "${i}"`)),t.info(k.green(`[${r+1}/${J.length}] View it at: "${a.link}"`)));let o=M.getValue(`successfulRequests`);o.push({id:a.id,link:a.link,rowIndex:r,coreIdentifier:a.coreIdentifier,attemptedAt:new Date().toISOString()}),await M.setValue(o,`successfulRequests`)}catch(e){let a=`${e.message} - ${JSON.stringify(e.response?.body,null,2)}`,o=g(a);if(o===`Client error: You have already made this request.`){S&&t.info(k.yellow(`[${r+1}/${J.length}] Skipping request as it is a duplicate`));let e=M.getValue(`duplicateRequests`);e.push({coreIdentifier:n.coreIdentifier,rowIndex:r,attemptedAt:new Date().toISOString()}),await M.setValue(e,`duplicateRequests`)}else{let e=M.getValue(`failingRequests`);e.push({...n,rowIndex:r,error:o||a,attemptedAt:new Date().toISOString()}),await M.setValue(e,`failingRequests`),S&&(t.error(k.red(o||a)),t.error(k.red(`[${r+1}/${J.length}] Failed to submit request for: "${i}"`)))}}Y+=1,S||E.update(Y)},{concurrency:l}),E.stop();let $=new Date().getTime()-T;t.info(k.green(`Completed upload in "${$/1e3}" seconds.`)),M.getValue(`duplicateRequests`).length>0&&t.info(k.yellow(`Encountered "${M.getValue(`duplicateRequests`).length}" duplicate requests. See "${A}" to review the core identifiers for these requests.`)),M.getValue(`failingRequests`).length>0&&(t.error(k.red(`Encountered "${M.getValue(`failingRequests`).length}" errors. See "${A}" to review the error messages and inputs.`)),process.exit(1))}export{J as a,K as c,U as d,H as f,q as i,G as l,Q as n,X as o,Z as r,Y as s,$ as t,W as u};
2
+ //# sourceMappingURL=uploadPrivacyRequestsFromCsv-sKSFfE6q.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"uploadPrivacyRequestsFromCsv-sKSFfE6q.mjs","names":[],"sources":["../src/lib/requests/mapEnumValues.ts","../src/lib/requests/getUniqueValuesForColumn.ts","../src/lib/requests/filterRows.ts","../src/lib/requests/mapCsvColumnsToApi.ts","../src/lib/requests/mapRequestEnumValues.ts","../src/lib/requests/mapCsvRowsToRequestInputs.ts","../src/lib/requests/mapColumnsToIdentifiers.ts","../src/lib/requests/mapColumnsToAttributes.ts","../src/lib/requests/uploadPrivacyRequestsFromCsv.ts"],"sourcesContent":["import { apply, ObjByString } from '@transcend-io/type-utils';\nimport inquirer from 'inquirer';\nimport autoCompletePrompt from 'inquirer-autocomplete-prompt';\n\nimport { fuzzySearch } from './fuzzyMatchColumns.js';\n\n/**\n * Map a set of inputs to a set of outputs\n *\n * @param csvInputs - Input list\n * @param expectedOutputs - Output list\n * @param cache - Cache\n * @returns Mapping from row to enum value\n */\nexport async function mapEnumValues<TValue extends string>(\n csvInputs: string[],\n expectedOutputs: TValue[],\n cache: { [k in string]: TValue },\n): Promise<{ [k in string]: TValue }> {\n inquirer.registerPrompt('autocomplete', autoCompletePrompt);\n\n const inputs = csvInputs.map((item) => item || '<blank>').filter((value) => !cache[value]);\n if (inputs.length === 0) {\n return cache;\n }\n const result = await inquirer.prompt<{ [k in string]: TValue }>(\n inputs.map((value) => ({\n name: value,\n message: `Map value of: ${value}`,\n type: 'autocomplete',\n default: expectedOutputs.find((x) => fuzzySearch(value, x)),\n source: (answersSoFar: ObjByString, input: string) =>\n !input\n ? expectedOutputs\n : expectedOutputs.filter((x) => typeof x === 'string' && fuzzySearch(input, x)),\n })),\n );\n return {\n ...cache,\n ...apply(result, (r) =>\n typeof r === 'string' ? (r as TValue) : (Object.values(r)[0] as TValue),\n ),\n };\n}\n","import { ObjByString } from '@transcend-io/type-utils';\nimport { uniq } from 'lodash-es';\n\n/**\n * Return the unique set of values for a column in a CSV\n *\n * @param rows - Rows to look up\n * @param columnName - Name of column to grab values for\n * @returns Unique set of values in that column\n */\nexport function getUniqueValuesForColumn(rows: ObjByString[], columnName: string): string[] {\n return uniq(rows.map((row) => row[columnName] || '').flat());\n}\n","import { ObjByString } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { uniq } from 'lodash-es';\n\nimport { logger } from '../../logger.js';\nimport { NONE } from './constants.js';\nimport { getUniqueValuesForColumn } from './getUniqueValuesForColumn.js';\n\n/**\n * Filter a list of CSV rows by column values\n * Choose columns that contain metadata to filter the requests\n *\n * @param rows - Rows to filter\n * @returns Filtered rows\n */\nexport async function filterRows(rows: ObjByString[]): Promise<ObjByString[]> {\n // Determine set of column names\n const columnNames = uniq(rows.map((x) => Object.keys(x)).flat());\n\n // update these variables recursively\n let filteredRows = rows;\n let keepFiltering = true;\n\n // loop over\n while (keepFiltering) {\n // Prompt user for column to filter on\n\n const { filterColumnName } = await inquirer.prompt<{\n /** Name of column to filter on */\n filterColumnName: string;\n }>([\n {\n name: 'filterColumnName',\n // eslint-disable-next-line max-len\n message: `If you need to filter the list of requests to import, choose the column to filter on. Currently ${filteredRows.length} rows.`,\n type: 'list',\n default: columnNames,\n choices: [NONE, ...columnNames],\n },\n ]);\n\n // Determine if filtering should continue, or loop should be exited\n keepFiltering = NONE !== filterColumnName;\n if (keepFiltering) {\n const options = getUniqueValuesForColumn(filteredRows, filterColumnName);\n\n const { valuesToKeep } = await inquirer.prompt<{\n /** Values to keep */\n valuesToKeep: string[];\n }>([\n {\n name: 'valuesToKeep',\n message: 'Keep rows matching this value',\n type: 'checkbox',\n default: columnNames,\n choices: options,\n },\n ]);\n\n filteredRows = filteredRows.filter((request) =>\n valuesToKeep.includes(request[filterColumnName]),\n );\n }\n }\n\n logger.info(colors.magenta(`Importing ${filteredRows.length} requests`));\n return filteredRows;\n}\n","import type { PersistedState } from '@transcend-io/persisted-state';\nimport { getValues, getEntries } from '@transcend-io/type-utils';\nimport inquirer from 'inquirer';\nimport { startCase } from 'lodash-es';\n\nimport { ColumnName, CachedFileState, IS_REQUIRED, CAN_APPLY_IN_BULK } from './constants.js';\nimport { fuzzyMatchColumns } from './fuzzyMatchColumns.js';\n\n/**\n * Mapping from column name to request input parameter\n */\nexport type ColumnNameMap = {\n [k in ColumnName]?: string;\n};\n\n/**\n * Determine the mapping between columns in CSV\n *\n * @param columnNames - The set of column names\n * @param state - The cached file state used to map DSR inputs\n * @returns The column name mapping\n */\nexport async function mapCsvColumnsToApi(\n columnNames: string[],\n state: PersistedState<typeof CachedFileState>,\n): Promise<ColumnNameMap> {\n // Determine the columns that should be mapped\n const columnQuestions = getValues(ColumnName).filter(\n (name) => !state.getValue('columnNames', name),\n );\n\n // Skip mapping when everything is mapped\n const columnNameMap =\n columnQuestions.length === 0\n ? {}\n : // prompt questions to map columns\n await inquirer.prompt<{\n [k in ColumnName]?: string;\n }>(\n columnQuestions.map((name) => {\n const field = startCase(name.replace('ColumnName', ''));\n const matches = fuzzyMatchColumns(\n columnNames,\n field,\n IS_REQUIRED[name],\n !!CAN_APPLY_IN_BULK[name],\n );\n return {\n name,\n message: `Choose the column that will be used to map in the field: ${field}`,\n type: 'list',\n default: matches[0],\n choices: matches,\n };\n }),\n );\n\n await Promise.all(getEntries(columnNameMap).map(([k, v]) => state.setValue(v, 'columnNames', k)));\n return columnNameMap;\n}\n","import { LOCALE_KEY, type LocaleValue } from '@transcend-io/internationalization';\nimport type { PersistedState } from '@transcend-io/persisted-state';\nimport {\n CompletedRequestStatus,\n RequestAction,\n IsoCountryCode,\n IsoCountrySubdivisionCode,\n} from '@transcend-io/privacy-types';\nimport { makeGraphQLRequest, DATA_SUBJECTS, type DataSubject } from '@transcend-io/sdk';\nimport { ObjByString } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\n\nimport { logger } from '../../logger.js';\nimport { CachedFileState, NONE, ColumnName } from './constants.js';\nimport { getUniqueValuesForColumn } from './getUniqueValuesForColumn.js';\nimport { ColumnNameMap } from './mapCsvColumnsToApi.js';\nimport { mapEnumValues } from './mapEnumValues.js';\n\n/**\n * Map the values in a CSV to the enum values in Transcend\n *\n * @param client - GraphQL client\n * @param requests - Set of privacy requests\n * @param options - Options\n */\nexport async function mapRequestEnumValues(\n client: GraphQLClient,\n requests: ObjByString[],\n {\n state,\n columnNameMap,\n }: {\n /** State value to write cache to */\n state: PersistedState<typeof CachedFileState>;\n /** Mapping of column names */\n columnNameMap: ColumnNameMap;\n },\n): Promise<void> {\n // Get mapped value\n const getMappedName = (attribute: ColumnName): string =>\n state.getValue('columnNames', attribute) || columnNameMap[attribute]!;\n\n // Fetch all data subjects in the organization\n const { internalSubjects } = await makeGraphQLRequest<{\n /** Query response */\n internalSubjects: DataSubject[];\n }>(client, DATA_SUBJECTS, { logger });\n\n // Map RequestAction\n logger.info(colors.magenta('Determining mapping of columns for request action'));\n const requestTypeToRequestAction: { [k in string]: RequestAction } = await mapEnumValues(\n getUniqueValuesForColumn(requests, getMappedName(ColumnName.RequestType)),\n Object.values(RequestAction),\n state.getValue('requestTypeToRequestAction'),\n );\n await state.setValue(requestTypeToRequestAction, 'requestTypeToRequestAction');\n\n // Map data subject type\n logger.info(colors.magenta('Determining mapping of columns for subject'));\n const subjectTypeToSubjectName: { [k in string]: string } = await mapEnumValues(\n getUniqueValuesForColumn(requests, getMappedName(ColumnName.SubjectType)),\n internalSubjects.map(({ type }) => type),\n state.getValue('subjectTypeToSubjectName'),\n );\n await state.setValue(subjectTypeToSubjectName, 'subjectTypeToSubjectName');\n\n // Map locale\n logger.info(colors.magenta('Determining mapping of columns for locale'));\n const languageToLocale: { [k in string]: LocaleValue } = await mapEnumValues(\n getUniqueValuesForColumn(requests, getMappedName(ColumnName.Locale)),\n Object.values(LOCALE_KEY),\n state.getValue('languageToLocale'),\n );\n await state.setValue(languageToLocale, 'languageToLocale');\n logger.info(colors.magenta('Determining mapping of columns for request status'));\n\n // Map request status\n logger.info(colors.magenta('Determining mapping of columns for request status'));\n const requestStatusColumn = getMappedName(ColumnName.RequestStatus);\n const statusToRequestStatus: {\n [k in string]: CompletedRequestStatus | typeof NONE;\n } =\n requestStatusColumn === NONE\n ? {}\n : await mapEnumValues(\n getUniqueValuesForColumn(requests, requestStatusColumn),\n [...Object.values(CompletedRequestStatus), NONE],\n state.getValue('statusToRequestStatus'),\n );\n await state.setValue(statusToRequestStatus, 'statusToRequestStatus');\n\n // Map country\n logger.info(colors.magenta('Determining mapping of columns for country'));\n const countryColumn = getMappedName(ColumnName.Country);\n const regionToCountry: {\n [k in string]: IsoCountryCode | typeof NONE;\n } =\n countryColumn === NONE\n ? {}\n : await mapEnumValues(\n getUniqueValuesForColumn(requests, countryColumn),\n [...Object.values(IsoCountryCode), NONE],\n state.getValue('regionToCountry'),\n );\n await state.setValue(regionToCountry, 'regionToCountry');\n\n // Map country sub division\n logger.info(colors.magenta('Determining mapping of columns for country sub division'));\n const countrySubDivisionColumn = getMappedName(ColumnName.CountrySubDivision);\n const regionToCountrySubDivision: {\n [k in string]: IsoCountrySubdivisionCode | typeof NONE;\n } =\n countrySubDivisionColumn === NONE\n ? {}\n : await mapEnumValues(\n getUniqueValuesForColumn(requests, countrySubDivisionColumn),\n [...Object.values(IsoCountrySubdivisionCode), NONE],\n state.getValue('regionToCountrySubDivision'),\n );\n await state.setValue(regionToCountrySubDivision, 'regionToCountrySubDivision');\n}\n","import { LOCALE_KEY } from '@transcend-io/internationalization';\nimport type { PersistedState } from '@transcend-io/persisted-state';\nimport {\n NORMALIZE_PHONE_NUMBER,\n CompletedRequestStatus,\n RequestAction,\n IdentifierType,\n IsoCountryCode,\n IsoCountrySubdivisionCode,\n} from '@transcend-io/privacy-types';\nimport type { AttributeKey } from '@transcend-io/sdk';\nimport { ObjByString, valuesOf } from '@transcend-io/type-utils';\nimport { splitCsvToList } from '@transcend-io/utils';\nimport * as t from 'io-ts';\nimport { DateFromISOString } from 'io-ts-types';\n\nimport { CachedFileState, BLANK, BULK_APPLY, ColumnName, NONE } from './constants.js';\nimport { AttributeNameMap } from './mapColumnsToAttributes.js';\nimport { IdentifierNameMap } from './mapColumnsToIdentifiers.js';\nimport { ColumnNameMap } from './mapCsvColumnsToApi.js';\nimport { ParsedAttributeInput } from './parseAttributesFromString.js';\n\n/**\n * Shape of additional identifiers\n *\n * key of object is IdentifierType\n */\nexport const AttestedExtraIdentifiers = t.record(\n t.string,\n t.array(\n t.intersection([\n t.type({\n /** Value of identifier */\n value: t.string,\n }),\n t.partial({\n /** Name of identifier - option for non-custom identifier types */\n name: t.string,\n }),\n ]),\n ),\n);\n\n/** Type override */\nexport type AttestedExtraIdentifiers = t.TypeOf<typeof AttestedExtraIdentifiers>;\n\nexport const PrivacyRequestInput = t.intersection([\n t.type({\n /** Email of user */\n email: t.string,\n /** Extra identifiers */\n attestedExtraIdentifiers: AttestedExtraIdentifiers,\n /** Core identifier for user */\n coreIdentifier: t.string,\n /** Action type being submitted */\n requestType: valuesOf(RequestAction),\n /** Type of data subject */\n subjectType: t.string,\n }),\n t.partial({\n /** Country */\n country: valuesOf(IsoCountryCode),\n /** Country sub division */\n countrySubDivision: valuesOf(IsoCountrySubdivisionCode),\n /** Attribute inputs */\n attributes: t.array(ParsedAttributeInput),\n /** The status that the request should be created as */\n status: valuesOf(CompletedRequestStatus),\n /** The time that the request was created */\n createdAt: DateFromISOString,\n /** Data silo IDs to submit for */\n dataSiloIds: t.array(t.string),\n /** Language key to map to */\n locale: valuesOf(LOCALE_KEY),\n }),\n]);\n\n/** Type override */\nexport type PrivacyRequestInput = t.TypeOf<typeof PrivacyRequestInput>;\n\n/**\n * Transform the identifier value based on type\n *\n * @param identifierValue - Value of identifier\n * @param identifierType - Type of identifier\n * @param defaultPhoneCountryCode - Default country code for phone numbers\n * @returns Post-processed identifier\n */\nexport function normalizeIdentifierValue(\n identifierValue: string,\n identifierType: IdentifierType,\n defaultPhoneCountryCode: string,\n): string {\n // Lowercase email\n if (identifierType === IdentifierType.Email) {\n return identifierValue.toLowerCase();\n }\n\n // Normalize phone number\n if (identifierType === IdentifierType.Phone) {\n const normalized = identifierValue\n .replace(NORMALIZE_PHONE_NUMBER, '')\n .replace(/[()]/g, '')\n .replace(/[–]/g, '')\n .replace(/[:]/g, '')\n .replace(/[‭‬]/g, '')\n .replace(/[A-Za-z]/g, '');\n return !normalized\n ? ''\n : normalized.startsWith('+')\n ? normalized\n : `+${defaultPhoneCountryCode}${normalized}`;\n }\n return identifierValue;\n}\n\n/**\n * Take the raw rows in a CSV upload, and map those rows to the request\n * input shape that can be passed to the Transcend API to submit a privacy\n * request.\n *\n * @param requestInputs - CSV of requests to be uploaded\n * @param state - The cached set of mapping values\n * @param options - Options\n * @returns [raw input, request input] list\n */\nexport function mapCsvRowsToRequestInputs(\n requestInputs: ObjByString[],\n state: PersistedState<typeof CachedFileState>,\n {\n columnNameMap,\n identifierNameMap,\n attributeNameMap,\n requestAttributeKeys,\n defaultPhoneCountryCode = '1', // US\n }: {\n /** Default country code */\n defaultPhoneCountryCode?: string;\n /** Mapping of column names */\n columnNameMap: ColumnNameMap;\n /** Mapping of identifier names */\n identifierNameMap: IdentifierNameMap;\n /** Mapping of attribute names */\n attributeNameMap: AttributeNameMap;\n /** Request attribute keys */\n requestAttributeKeys: AttributeKey[];\n },\n): [Record<string, string>, PrivacyRequestInput][] {\n // map the CSV to request input\n const getMappedName = (attribute: ColumnName): string =>\n state.getValue('columnNames', attribute) || columnNameMap[attribute]!;\n return requestInputs.map((input): [Record<string, string>, PrivacyRequestInput] => {\n // The extra identifiers to upload for this request\n const attestedExtraIdentifiers: AttestedExtraIdentifiers = {};\n Object.entries(identifierNameMap)\n // filter out skipped identifiers\n .filter(([, columnName]) => columnName !== NONE)\n .forEach(([identifierName, columnName]) => {\n // Determine the identifier type being specified\n const identifierType = Object.values(IdentifierType).includes(\n identifierName as any, // eslint-disable-line @typescript-eslint/no-explicit-any\n )\n ? (identifierName as IdentifierType)\n : IdentifierType.Custom;\n\n // Only add the identifier if the value exists\n const identifierValue = input[columnName];\n if (identifierValue) {\n const normalized = normalizeIdentifierValue(\n identifierValue,\n identifierType,\n defaultPhoneCountryCode,\n );\n if (normalized) {\n // Initialize\n if (!attestedExtraIdentifiers[identifierType]) {\n attestedExtraIdentifiers[identifierType] = [];\n }\n\n // Add the identifier\n attestedExtraIdentifiers[identifierType]!.push({\n value: normalized,\n name: identifierName,\n });\n }\n }\n });\n\n // The extra attributes to upload for this request\n const attributes: ParsedAttributeInput[] = [];\n Object.entries(attributeNameMap)\n // filter out skipped attributes\n .filter(([, columnName]) => columnName !== NONE)\n .forEach(([attributeName, columnName]) => {\n // Only add the identifier if the value exists\n const attributeValueString = input[columnName];\n if (attributeValueString) {\n // Add the attribute\n const isMulti =\n requestAttributeKeys.find((attr) => attr.name === attributeName)?.type ===\n 'MULTI_SELECT';\n attributes.push({\n values: isMulti ? splitCsvToList(attributeValueString) : attributeValueString,\n key: attributeName,\n });\n }\n });\n\n const requestTypeColumn = getMappedName(ColumnName.RequestType);\n const dataSubjectTypeColumn = getMappedName(ColumnName.SubjectType);\n return [\n input,\n {\n email: input[getMappedName(ColumnName.Email)],\n attestedExtraIdentifiers,\n attributes,\n coreIdentifier: input[getMappedName(ColumnName.CoreIdentifier)],\n requestType:\n requestTypeColumn === BULK_APPLY\n ? state.getValue('requestTypeToRequestAction', BLANK)\n : state.getValue('requestTypeToRequestAction', input[requestTypeColumn]),\n subjectType:\n dataSubjectTypeColumn === BULK_APPLY\n ? state.getValue('subjectTypeToSubjectName', BLANK)\n : state.getValue('subjectTypeToSubjectName', input[dataSubjectTypeColumn]),\n ...(getMappedName(ColumnName.Locale) !== NONE && input[getMappedName(ColumnName.Locale)]\n ? {\n locale: state.getValue('languageToLocale', input[getMappedName(ColumnName.Locale)]),\n }\n : {}),\n ...(getMappedName(ColumnName.Country) !== NONE && input[getMappedName(ColumnName.Country)]\n ? {\n country: state.getValue(\n 'regionToCountry',\n input[getMappedName(ColumnName.Country)],\n ) as IsoCountryCode,\n }\n : {}),\n ...(getMappedName(ColumnName.CountrySubDivision) !== NONE &&\n input[getMappedName(ColumnName.CountrySubDivision)]\n ? {\n countrySubDivision: state.getValue(\n 'regionToCountrySubDivision',\n input[getMappedName(ColumnName.CountrySubDivision)],\n ) as IsoCountrySubdivisionCode,\n }\n : {}),\n ...(getMappedName(ColumnName.RequestStatus) !== NONE &&\n state.getValue('statusToRequestStatus', input[getMappedName(ColumnName.RequestStatus)]) !==\n NONE &&\n input[getMappedName(ColumnName.RequestStatus)]\n ? {\n status: state.getValue(\n 'statusToRequestStatus',\n input[getMappedName(ColumnName.RequestStatus)],\n ) as CompletedRequestStatus,\n }\n : {}),\n ...(getMappedName(ColumnName.CreatedAt) !== NONE &&\n input[getMappedName(ColumnName.CreatedAt)]\n ? {\n createdAt: new Date(input[getMappedName(ColumnName.CreatedAt)]),\n }\n : {}),\n ...(getMappedName(ColumnName.DataSiloIds) !== NONE &&\n input[getMappedName(ColumnName.DataSiloIds)]\n ? {\n dataSiloIds: splitCsvToList(input[getMappedName(ColumnName.DataSiloIds)]),\n }\n : {}),\n },\n ];\n });\n}\n","import type { PersistedState } from '@transcend-io/persisted-state';\nimport { INITIALIZER, type Initializer, makeGraphQLRequest } from '@transcend-io/sdk';\nimport type { GraphQLClient } from 'graphql-request';\nimport inquirer from 'inquirer';\n\nimport { logger } from '../../logger.js';\nimport { CachedFileState, IDENTIFIER_BLOCK_LIST } from './constants.js';\nimport { fuzzyMatchColumns } from './fuzzyMatchColumns.js';\n\n/**\n * Mapping from identifier name to request input parameter\n */\nexport type IdentifierNameMap = {\n [k in string]: string;\n};\n\n/**\n * Create a mapping from the identifier names that can be included\n * at request submission, to the names of the columns that map to those\n * identifiers.\n *\n * @param client - GraphQL client\n * @param columnNames - The set of all column names\n * @param state - Cached state of this mapping\n * @returns Mapping from identifier name to column name\n */\nexport async function mapColumnsToIdentifiers(\n client: GraphQLClient,\n columnNames: string[],\n state: PersistedState<typeof CachedFileState>,\n): Promise<IdentifierNameMap> {\n // Grab the initializer\n const { initializer } = await makeGraphQLRequest<{\n /** Query response */\n initializer: Initializer;\n }>(client, INITIALIZER, { logger });\n\n // Determine the columns that should be mapped\n const columnQuestions = initializer.identifiers.filter(\n ({ name }) => !state.getValue('identifierNames', name) && !IDENTIFIER_BLOCK_LIST.includes(name),\n );\n\n // Skip mapping when everything is mapped\n const identifierNameMap =\n columnQuestions.length === 0\n ? {}\n : // prompt questions to map columns\n await inquirer.prompt<{\n [k in string]: string;\n }>(\n columnQuestions.map(({ name }) => {\n const matches = fuzzyMatchColumns(columnNames, name, false);\n return {\n name,\n message: `Choose the column that will be used to map in the identifier: ${name}`,\n type: 'list',\n default: matches[0],\n choices: matches,\n };\n }),\n );\n await Promise.all(\n Object.entries(identifierNameMap).map(([k, v]) => state.setValue(v, 'identifierNames', k)),\n );\n\n return {\n ...state.getValue('identifierNames'),\n ...identifierNameMap,\n };\n}\n","import type { PersistedState } from '@transcend-io/persisted-state';\nimport type { AttributeKey } from '@transcend-io/sdk';\nimport type { GraphQLClient } from 'graphql-request';\nimport inquirer from 'inquirer';\n\nimport { CachedFileState } from './constants.js';\nimport { fuzzyMatchColumns } from './fuzzyMatchColumns.js';\n\n/**\n * Mapping from attribute name to request input parameter\n */\nexport type AttributeNameMap = {\n [k in string]: string;\n};\n\n/**\n * Create a mapping from the attributes names that can be included\n * at request submission, to the names of the columns that map to those\n * attributes.\n *\n * @param client - GraphQL client\n * @param columnNames - The set of all column names\n * @param state - Cached state of this mapping\n * @param requestAttributeKeys - Attribute keys to map\n * @returns Mapping from attributes name to column name\n */\nexport async function mapColumnsToAttributes(\n client: GraphQLClient,\n columnNames: string[],\n state: PersistedState<typeof CachedFileState>,\n requestAttributeKeys: AttributeKey[],\n): Promise<AttributeNameMap> {\n // Determine the columns that should be mapped\n const columnQuestions = requestAttributeKeys.filter(\n ({ name }) => !state.getValue('attributeNames', name),\n );\n\n // Skip mapping when everything is mapped\n const attributeNameMap =\n columnQuestions.length === 0\n ? {}\n : // prompt questions to map columns\n await inquirer.prompt<{\n [k in string]: string;\n }>(\n columnQuestions.map(({ name }) => {\n const matches = fuzzyMatchColumns(columnNames, name, false);\n return {\n name,\n message: `Choose the column that will be used to map in the attribute: ${name}`,\n type: 'list',\n default: matches[0],\n choices: matches,\n };\n }),\n );\n await Promise.all(\n Object.entries(attributeNameMap).map(([k, v]) => state.setValue(v, 'attributeNames', k)),\n );\n\n return {\n ...state.getValue('attributeNames'),\n ...attributeNameMap,\n };\n}\n","import { join } from 'node:path';\n\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllRequestAttributeKeys,\n} from '@transcend-io/sdk';\nimport { map } from '@transcend-io/utils';\nimport cliProgress from 'cli-progress';\n/* eslint-disable max-lines */\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { uniq } from 'lodash-es';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport { CachedRequestState, CachedFileState } from './constants.js';\nimport { extractClientError } from './extractClientError.js';\nimport { filterRows } from './filterRows.js';\nimport { mapColumnsToAttributes } from './mapColumnsToAttributes.js';\nimport { mapColumnsToIdentifiers } from './mapColumnsToIdentifiers.js';\nimport { mapCsvColumnsToApi } from './mapCsvColumnsToApi.js';\nimport { mapCsvRowsToRequestInputs } from './mapCsvRowsToRequestInputs.js';\nimport { mapRequestEnumValues } from './mapRequestEnumValues.js';\nimport { parseAttributesFromString } from './parseAttributesFromString.js';\nimport { readCsv } from './readCsv.js';\nimport { submitPrivacyRequest } from './submitPrivacyRequest.js';\n\n/**\n * Upload a set of privacy requests from CSV\n *\n * @param options - Options\n */\nexport async function uploadPrivacyRequestsFromCsv({\n cacheFilepath,\n requestReceiptFolder,\n file,\n auth,\n sombraAuth,\n concurrency = 100,\n defaultPhoneCountryCode = '1', // USA\n transcendUrl = DEFAULT_TRANSCEND_API,\n attributes = [],\n emailIsVerified = true,\n skipFilterStep = false,\n skipSendingReceipt = true,\n isTest = false,\n isSilent = true,\n debug = false,\n dryRun = false,\n}: {\n /** File to cache metadata about mapping of CSV shape to script */\n cacheFilepath: string;\n /** File where request receipts are stored */\n requestReceiptFolder: string;\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Default country code for phone numbers */\n defaultPhoneCountryCode?: string;\n /** Concurrency to upload in */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Include debug logs */\n debug?: boolean;\n /** Skip the step where requests are filtered */\n skipFilterStep?: boolean;\n /** Whether test requests are being uploaded */\n isTest?: boolean;\n /** Whether requests are uploaded in silent mode */\n isSilent?: boolean;\n /** Whether to send the email receipt */\n skipSendingReceipt?: boolean;\n /** Whether the email was verified up front */\n emailIsVerified?: boolean;\n /** Attributes string pre-parse */\n attributes?: string[];\n /** Whether a dry run is happening */\n dryRun?: boolean;\n}): Promise<void> {\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state to persist the metadata that\n // maps the request inputs to the Transcend API shape\n const state = new PersistedState(cacheFilepath, CachedFileState, {\n columnNames: {},\n requestTypeToRequestAction: {},\n subjectTypeToSubjectName: {},\n languageToLocale: {},\n statusToRequestStatus: {},\n identifierNames: {},\n attributeNames: {},\n regionToCountrySubDivision: {},\n regionToCountry: {},\n });\n\n // Create a new state file to store the requests from this run\n const requestCacheFile = join(\n requestReceiptFolder,\n `tr-request-upload-${new Date().toISOString()}-${file.split('/').pop()}`.replace(\n '.csv',\n '.json',\n ),\n );\n const requestState = new PersistedState(requestCacheFile, CachedRequestState, {\n successfulRequests: [],\n duplicateRequests: [],\n failingRequests: [],\n });\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, {\n logger,\n sombraApiKey: sombraAuth,\n sombraUrl: process.env.SOMBRA_URL,\n });\n\n // Read in the list of integration requests\n const requestsList = readCsv(file, t.record(t.string, t.string));\n const columnNames = uniq(requestsList.map((x) => Object.keys(x)).flat());\n\n // Log out an example request\n if (requestsList.length === 0) {\n throw new Error(\n 'No Requests found in list! Ensure the first row of the CSV is a header and the rest are requests.',\n );\n }\n if (debug) {\n const firstRequest = requestsList[0];\n logger.info(colors.magenta(`First request: ${JSON.stringify(firstRequest, null, 2)}`));\n }\n // Determine what rows in the CSV should be imported\n // Choose columns that contain metadata to filter the requests\n const filteredRequestList = skipFilterStep ? requestsList : await filterRows(requestsList);\n\n // Build a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n // Grab the request attributes\n const requestAttributeKeys = await fetchAllRequestAttributeKeys(client, { logger });\n // Determine the columns that should be mapped\n const columnNameMap = await mapCsvColumnsToApi(columnNames, state);\n const identifierNameMap = await mapColumnsToIdentifiers(client, columnNames, state);\n const attributeNameMap = await mapColumnsToAttributes(\n client,\n columnNames,\n state,\n requestAttributeKeys,\n );\n await mapRequestEnumValues(client, filteredRequestList, {\n state,\n columnNameMap,\n });\n\n // map the CSV to request input\n const requestInputs = mapCsvRowsToRequestInputs(filteredRequestList, state, {\n defaultPhoneCountryCode,\n columnNameMap,\n identifierNameMap,\n attributeNameMap,\n requestAttributeKeys,\n });\n\n // start the progress bar with a total value of 200 and start value of 0\n if (!debug) {\n progressBar.start(requestInputs.length, 0);\n }\n let total = 0;\n // Submit each request\n await map(\n requestInputs,\n async ([rawRow, requestInput], ind) => {\n // The identifier to log, only include personal data if debug mode is on\n const requestLogId = debug\n ? `email:${requestInput.email} | coreIdentifier:${requestInput.coreIdentifier}`\n : `row:${ind.toString()}`;\n\n if (debug) {\n logger.info(\n colors.magenta(\n `[${ind + 1}/${requestInputs.length}] Importing: ${JSON.stringify(\n requestInput,\n null,\n 2,\n )}`,\n ),\n );\n }\n\n // Skip on dry run\n if (dryRun) {\n logger.info(colors.magenta('Bailing out on dry run because dryRun is set'));\n return;\n }\n\n try {\n // Make the GraphQL request to submit the privacy request\n const requestResponse = await submitPrivacyRequest(sombra, requestInput, {\n details: `Uploaded by Transcend Cli: \"tr-request-upload\" : ${JSON.stringify(\n rawRow,\n null,\n 2,\n )}`,\n isTest,\n emailIsVerified,\n skipSendingReceipt,\n isSilent,\n additionalAttributes: parsedAttributes,\n });\n\n // Log success\n if (debug) {\n logger.info(\n colors.green(\n `[${ind + 1}/${\n requestInputs.length\n }] Successfully submitted the test data subject request: \"${requestLogId}\"`,\n ),\n );\n logger.info(\n colors.green(\n `[${ind + 1}/${requestInputs.length}] View it at: \"${requestResponse.link}\"`,\n ),\n );\n }\n\n // Cache successful upload\n const successfulRequests = requestState.getValue('successfulRequests');\n successfulRequests.push({\n id: requestResponse.id,\n link: requestResponse.link,\n rowIndex: ind,\n coreIdentifier: requestResponse.coreIdentifier,\n attemptedAt: new Date().toISOString(),\n });\n await requestState.setValue(successfulRequests, 'successfulRequests');\n } catch (err) {\n const msg = `${err.message} - ${JSON.stringify(err.response?.body, null, 2)}`;\n const clientError = extractClientError(msg);\n\n if (clientError === 'Client error: You have already made this request.') {\n if (debug) {\n logger.info(\n colors.yellow(\n `[${ind + 1}/${requestInputs.length}] Skipping request as it is a duplicate`,\n ),\n );\n }\n const duplicateRequests = requestState.getValue('duplicateRequests');\n duplicateRequests.push({\n coreIdentifier: requestInput.coreIdentifier,\n rowIndex: ind,\n attemptedAt: new Date().toISOString(),\n });\n await requestState.setValue(duplicateRequests, 'duplicateRequests');\n } else {\n const failingRequests = requestState.getValue('failingRequests');\n failingRequests.push({\n ...requestInput,\n rowIndex: ind,\n error: clientError || msg,\n attemptedAt: new Date().toISOString(),\n });\n await requestState.setValue(failingRequests, 'failingRequests');\n if (debug) {\n logger.error(colors.red(clientError || msg));\n logger.error(\n colors.red(\n `[${ind + 1}/${\n requestInputs.length\n }] Failed to submit request for: \"${requestLogId}\"`,\n ),\n );\n }\n }\n }\n\n total += 1;\n if (!debug) {\n progressBar.update(total);\n }\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n // Log completion time\n logger.info(colors.green(`Completed upload in \"${totalTime / 1000}\" seconds.`));\n\n // Log duplicates\n if (requestState.getValue('duplicateRequests').length > 0) {\n logger.info(\n colors.yellow(\n `Encountered \"${requestState.getValue('duplicateRequests').length}\" duplicate requests. ` +\n `See \"${requestCacheFile}\" to review the core identifiers for these requests.`,\n ),\n );\n }\n\n // Log errors\n if (requestState.getValue('failingRequests').length > 0) {\n logger.error(\n colors.red(\n `Encountered \"${requestState.getValue('failingRequests').length}\" errors. ` +\n `See \"${requestCacheFile}\" to review the error messages and inputs.`,\n ),\n );\n process.exit(1);\n }\n}\n/* eslint-enable max-lines */\n"],"mappings":"owCAcA,eAAsB,EACpB,EACA,EACA,EACoC,CACpC,EAAS,eAAe,eAAgB,EAAmB,CAE3D,IAAM,EAAS,EAAU,IAAK,GAAS,GAAQ,UAAU,CAAC,OAAQ,GAAU,CAAC,EAAM,GAAO,CAC1F,GAAI,EAAO,SAAW,EACpB,OAAO,EAET,IAAM,EAAS,MAAM,EAAS,OAC5B,EAAO,IAAK,IAAW,CACrB,KAAM,EACN,QAAS,iBAAiB,IAC1B,KAAM,eACN,QAAS,EAAgB,KAAM,GAAM,EAAY,EAAO,EAAE,CAAC,CAC3D,QAAS,EAA2B,IACjC,EAEG,EAAgB,OAAQ,GAAM,OAAO,GAAM,UAAY,EAAY,EAAO,EAAE,CAAC,CAD7E,EAEP,EAAE,CACJ,CACD,MAAO,CACL,GAAG,EACH,GAAG,EAAM,EAAS,GAChB,OAAO,GAAM,SAAY,EAAgB,OAAO,OAAO,EAAE,CAAC,GAC3D,CACF,CChCH,SAAgB,EAAyB,EAAqB,EAA8B,CAC1F,OAAO,EAAK,EAAK,IAAK,GAAQ,EAAI,IAAe,GAAG,CAAC,MAAM,CAAC,CCK9D,eAAsB,EAAW,EAA6C,CAE5E,IAAM,EAAc,EAAK,EAAK,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAG5D,EAAe,EACf,EAAgB,GAGpB,KAAO,GAAe,CAGpB,GAAM,CAAE,oBAAqB,MAAM,EAAS,OAGzC,CACD,CACE,KAAM,mBAEN,QAAS,mGAAmG,EAAa,OAAO,QAChI,KAAM,OACN,QAAS,EACT,QAAS,CAAC,EAAM,GAAG,EAAY,CAChC,CACF,CAAC,CAIF,GADA,EAAgB,IAAS,EACrB,EAAe,CACjB,IAAM,EAAU,EAAyB,EAAc,EAAiB,CAElE,CAAE,gBAAiB,MAAM,EAAS,OAGrC,CACD,CACE,KAAM,eACN,QAAS,gCACT,KAAM,WACN,QAAS,EACT,QAAS,EACV,CACF,CAAC,CAEF,EAAe,EAAa,OAAQ,GAClC,EAAa,SAAS,EAAQ,GAAkB,CACjD,EAKL,OADA,EAAO,KAAK,EAAO,QAAQ,aAAa,EAAa,OAAO,WAAW,CAAC,CACjE,EC7CT,eAAsB,EACpB,EACA,EACwB,CAExB,IAAM,EAAkB,EAAU,EAAW,CAAC,OAC3C,GAAS,CAAC,EAAM,SAAS,cAAe,EAAK,CAC/C,CAGK,EACJ,EAAgB,SAAW,EACvB,EAAE,CAEF,MAAM,EAAS,OAGb,EAAgB,IAAK,GAAS,CAC5B,IAAM,EAAQ,EAAU,EAAK,QAAQ,aAAc,GAAG,CAAC,CACjD,EAAU,EACd,EACA,EACA,EAAY,GACZ,CAAC,CAAC,EAAkB,GACrB,CACD,MAAO,CACL,OACA,QAAS,4DAA4D,IACrE,KAAM,OACN,QAAS,EAAQ,GACjB,QAAS,EACV,EACD,CACH,CAGP,OADA,MAAM,QAAQ,IAAI,EAAW,EAAc,CAAC,KAAK,CAAC,EAAG,KAAO,EAAM,SAAS,EAAG,cAAe,EAAE,CAAC,CAAC,CAC1F,EChCT,eAAsB,EACpB,EACA,EACA,CACE,QACA,iBAOa,CAEf,IAAM,EAAiB,GACrB,EAAM,SAAS,cAAe,EAAU,EAAI,EAAc,GAGtD,CAAE,oBAAqB,MAAM,EAGhC,EAAQ,EAAe,CAAE,SAAQ,CAAC,CAGrC,EAAO,KAAK,EAAO,QAAQ,oDAAoD,CAAC,CAChF,IAAM,EAA+D,MAAM,EACzE,EAAyB,EAAU,EAAc,EAAW,YAAY,CAAC,CACzE,OAAO,OAAO,EAAc,CAC5B,EAAM,SAAS,6BAA6B,CAC7C,CACD,MAAM,EAAM,SAAS,EAA4B,6BAA6B,CAG9E,EAAO,KAAK,EAAO,QAAQ,6CAA6C,CAAC,CACzE,IAAM,EAAsD,MAAM,EAChE,EAAyB,EAAU,EAAc,EAAW,YAAY,CAAC,CACzE,EAAiB,KAAK,CAAE,UAAW,EAAK,CACxC,EAAM,SAAS,2BAA2B,CAC3C,CACD,MAAM,EAAM,SAAS,EAA0B,2BAA2B,CAG1E,EAAO,KAAK,EAAO,QAAQ,4CAA4C,CAAC,CACxE,IAAM,EAAmD,MAAM,EAC7D,EAAyB,EAAU,EAAc,EAAW,OAAO,CAAC,CACpE,OAAO,OAAO,EAAW,CACzB,EAAM,SAAS,mBAAmB,CACnC,CACD,MAAM,EAAM,SAAS,EAAkB,mBAAmB,CAC1D,EAAO,KAAK,EAAO,QAAQ,oDAAoD,CAAC,CAGhF,EAAO,KAAK,EAAO,QAAQ,oDAAoD,CAAC,CAChF,IAAM,EAAsB,EAAc,EAAW,cAAc,CAC7D,EAGJ,IAAA,SACI,EAAE,CACF,MAAM,EACJ,EAAyB,EAAU,EAAoB,CACvD,CAAC,GAAG,OAAO,OAAO,EAAuB,CAAE,EAAK,CAChD,EAAM,SAAS,wBAAwB,CACxC,CACP,MAAM,EAAM,SAAS,EAAuB,wBAAwB,CAGpE,EAAO,KAAK,EAAO,QAAQ,6CAA6C,CAAC,CACzE,IAAM,EAAgB,EAAc,EAAW,QAAQ,CACjD,EAGJ,IAAA,SACI,EAAE,CACF,MAAM,EACJ,EAAyB,EAAU,EAAc,CACjD,CAAC,GAAG,OAAO,OAAO,EAAe,CAAE,EAAK,CACxC,EAAM,SAAS,kBAAkB,CAClC,CACP,MAAM,EAAM,SAAS,EAAiB,kBAAkB,CAGxD,EAAO,KAAK,EAAO,QAAQ,0DAA0D,CAAC,CACtF,IAAM,EAA2B,EAAc,EAAW,mBAAmB,CACvE,EAGJ,IAAA,SACI,EAAE,CACF,MAAM,EACJ,EAAyB,EAAU,EAAyB,CAC5D,CAAC,GAAG,OAAO,OAAO,EAA0B,CAAE,EAAK,CACnD,EAAM,SAAS,6BAA6B,CAC7C,CACP,MAAM,EAAM,SAAS,EAA4B,6BAA6B,CC7FhF,MAAa,EAA2B,EAAE,OACxC,EAAE,OACF,EAAE,MACA,EAAE,aAAa,CACb,EAAE,KAAK,CAEL,MAAO,EAAE,OACV,CAAC,CACF,EAAE,QAAQ,CAER,KAAM,EAAE,OACT,CAAC,CACH,CAAC,CACH,CACF,CAKY,EAAsB,EAAE,aAAa,CAChD,EAAE,KAAK,CAEL,MAAO,EAAE,OAET,yBAA0B,EAE1B,eAAgB,EAAE,OAElB,YAAa,EAAS,EAAc,CAEpC,YAAa,EAAE,OAChB,CAAC,CACF,EAAE,QAAQ,CAER,QAAS,EAAS,EAAe,CAEjC,mBAAoB,EAAS,EAA0B,CAEvD,WAAY,EAAE,MAAM,EAAqB,CAEzC,OAAQ,EAAS,EAAuB,CAExC,UAAW,EAEX,YAAa,EAAE,MAAM,EAAE,OAAO,CAE9B,OAAQ,EAAS,EAAW,CAC7B,CAAC,CACH,CAAC,CAaF,SAAgB,EACd,EACA,EACA,EACQ,CAER,GAAI,IAAmB,EAAe,MACpC,OAAO,EAAgB,aAAa,CAItC,GAAI,IAAmB,EAAe,MAAO,CAC3C,IAAM,EAAa,EAChB,QAAQ,EAAwB,GAAG,CACnC,QAAQ,QAAS,GAAG,CACpB,QAAQ,OAAQ,GAAG,CACnB,QAAQ,OAAQ,GAAG,CACnB,QAAQ,QAAS,GAAG,CACpB,QAAQ,YAAa,GAAG,CAC3B,OAAQ,EAEJ,EAAW,WAAW,IAAI,CACxB,EACA,IAAI,IAA0B,IAHhC,GAKN,OAAO,EAaT,SAAgB,EACd,EACA,EACA,CACE,gBACA,oBACA,mBACA,uBACA,0BAA0B,KAaqB,CAEjD,IAAM,EAAiB,GACrB,EAAM,SAAS,cAAe,EAAU,EAAI,EAAc,GAC5D,OAAO,EAAc,IAAK,GAAyD,CAEjF,IAAM,EAAqD,EAAE,CAC7D,OAAO,QAAQ,EAAkB,CAE9B,QAAQ,EAAG,KAAgB,IAAe,EAAK,CAC/C,SAAS,CAAC,EAAgB,KAAgB,CAEzC,IAAM,EAAiB,OAAO,OAAO,EAAe,CAAC,SACnD,EACD,CACI,EACD,EAAe,OAGb,EAAkB,EAAM,GAC9B,GAAI,EAAiB,CACnB,IAAM,EAAa,EACjB,EACA,EACA,EACD,CACG,IAEG,EAAyB,KAC5B,EAAyB,GAAkB,EAAE,EAI/C,EAAyB,GAAiB,KAAK,CAC7C,MAAO,EACP,KAAM,EACP,CAAC,IAGN,CAGJ,IAAM,EAAqC,EAAE,CAC7C,OAAO,QAAQ,EAAiB,CAE7B,QAAQ,EAAG,KAAgB,IAAe,EAAK,CAC/C,SAAS,CAAC,EAAe,KAAgB,CAExC,IAAM,EAAuB,EAAM,GACnC,GAAI,EAAsB,CAExB,IAAM,EACJ,EAAqB,KAAM,GAAS,EAAK,OAAS,EAAc,EAAE,OAClE,eACF,EAAW,KAAK,CACd,OAAQ,EAAU,EAAe,EAAqB,CAAG,EACzD,IAAK,EACN,CAAC,GAEJ,CAEJ,IAAM,EAAoB,EAAc,EAAW,YAAY,CACzD,EAAwB,EAAc,EAAW,YAAY,CACnE,MAAO,CACL,EACA,CACE,MAAO,EAAM,EAAc,EAAW,MAAM,EAC5C,2BACA,aACA,eAAgB,EAAM,EAAc,EAAW,eAAe,EAC9D,YACE,IAAA,4BACI,EAAM,SAAS,6BAA8B,EAAM,CACnD,EAAM,SAAS,6BAA8B,EAAM,GAAmB,CAC5E,YACE,IAAA,4BACI,EAAM,SAAS,2BAA4B,EAAM,CACjD,EAAM,SAAS,2BAA4B,EAAM,GAAuB,CAC9E,GAAI,EAAc,EAAW,OAAO,GAAA,UAAa,EAAM,EAAc,EAAW,OAAO,EACnF,CACE,OAAQ,EAAM,SAAS,mBAAoB,EAAM,EAAc,EAAW,OAAO,EAAE,CACpF,CACD,EAAE,CACN,GAAI,EAAc,EAAW,QAAQ,GAAA,UAAa,EAAM,EAAc,EAAW,QAAQ,EACrF,CACE,QAAS,EAAM,SACb,kBACA,EAAM,EAAc,EAAW,QAAQ,EACxC,CACF,CACD,EAAE,CACN,GAAI,EAAc,EAAW,mBAAmB,GAAA,UAChD,EAAM,EAAc,EAAW,mBAAmB,EAC9C,CACE,mBAAoB,EAAM,SACxB,6BACA,EAAM,EAAc,EAAW,mBAAmB,EACnD,CACF,CACD,EAAE,CACN,GAAI,EAAc,EAAW,cAAc,GAAA,UAC3C,EAAM,SAAS,wBAAyB,EAAM,EAAc,EAAW,cAAc,EAAE,GAAA,UAEvF,EAAM,EAAc,EAAW,cAAc,EACzC,CACE,OAAQ,EAAM,SACZ,wBACA,EAAM,EAAc,EAAW,cAAc,EAC9C,CACF,CACD,EAAE,CACN,GAAI,EAAc,EAAW,UAAU,GAAA,UACvC,EAAM,EAAc,EAAW,UAAU,EACrC,CACE,UAAW,IAAI,KAAK,EAAM,EAAc,EAAW,UAAU,EAAE,CAChE,CACD,EAAE,CACN,GAAI,EAAc,EAAW,YAAY,GAAA,UACzC,EAAM,EAAc,EAAW,YAAY,EACvC,CACE,YAAa,EAAe,EAAM,EAAc,EAAW,YAAY,EAAE,CAC1E,CACD,EAAE,CACP,CACF,EACD,CCtPJ,eAAsB,EACpB,EACA,EACA,EAC4B,CAE5B,GAAM,CAAE,eAAgB,MAAM,EAG3B,EAAQ,EAAa,CAAE,SAAQ,CAAC,CAG7B,EAAkB,EAAY,YAAY,QAC7C,CAAE,UAAW,CAAC,EAAM,SAAS,kBAAmB,EAAK,EAAI,CAAC,EAAsB,SAAS,EAAK,CAChG,CAGK,EACJ,EAAgB,SAAW,EACvB,EAAE,CAEF,MAAM,EAAS,OAGb,EAAgB,KAAK,CAAE,UAAW,CAChC,IAAM,EAAU,EAAkB,EAAa,EAAM,GAAM,CAC3D,MAAO,CACL,OACA,QAAS,iEAAiE,IAC1E,KAAM,OACN,QAAS,EAAQ,GACjB,QAAS,EACV,EACD,CACH,CAKP,OAJA,MAAM,QAAQ,IACZ,OAAO,QAAQ,EAAkB,CAAC,KAAK,CAAC,EAAG,KAAO,EAAM,SAAS,EAAG,kBAAmB,EAAE,CAAC,CAC3F,CAEM,CACL,GAAG,EAAM,SAAS,kBAAkB,CACpC,GAAG,EACJ,CC1CH,eAAsB,EACpB,EACA,EACA,EACA,EAC2B,CAE3B,IAAM,EAAkB,EAAqB,QAC1C,CAAE,UAAW,CAAC,EAAM,SAAS,iBAAkB,EAAK,CACtD,CAGK,EACJ,EAAgB,SAAW,EACvB,EAAE,CAEF,MAAM,EAAS,OAGb,EAAgB,KAAK,CAAE,UAAW,CAChC,IAAM,EAAU,EAAkB,EAAa,EAAM,GAAM,CAC3D,MAAO,CACL,OACA,QAAS,gEAAgE,IACzE,KAAM,OACN,QAAS,EAAQ,GACjB,QAAS,EACV,EACD,CACH,CAKP,OAJA,MAAM,QAAQ,IACZ,OAAO,QAAQ,EAAiB,CAAC,KAAK,CAAC,EAAG,KAAO,EAAM,SAAS,EAAG,iBAAkB,EAAE,CAAC,CACzF,CAEM,CACL,GAAG,EAAM,SAAS,iBAAiB,CACnC,GAAG,EACJ,CC7BH,eAAsB,EAA6B,CACjD,gBACA,uBACA,OACA,OACA,aACA,cAAc,IACd,0BAA0B,IAC1B,eAAe,EACf,aAAa,EAAE,CACf,kBAAkB,GAClB,iBAAiB,GACjB,qBAAqB,GACrB,SAAS,GACT,WAAW,GACX,QAAQ,GACR,SAAS,IAkCO,CAEhB,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAG/E,EAAmB,EAA0B,EAAW,CAIxD,EAAQ,IAAI,EAAe,EAAe,EAAiB,CAC/D,YAAa,EAAE,CACf,2BAA4B,EAAE,CAC9B,yBAA0B,EAAE,CAC5B,iBAAkB,EAAE,CACpB,sBAAuB,EAAE,CACzB,gBAAiB,EAAE,CACnB,eAAgB,EAAE,CAClB,2BAA4B,EAAE,CAC9B,gBAAiB,EAAE,CACpB,CAAC,CAGI,EAAmB,GACvB,EACA,qBAAqB,IAAI,MAAM,CAAC,aAAa,CAAC,GAAG,EAAK,MAAM,IAAI,CAAC,KAAK,GAAG,QACvE,OACA,QACD,CACF,CACK,EAAe,IAAI,EAAe,EAAkB,EAAoB,CAC5E,mBAAoB,EAAE,CACtB,kBAAmB,EAAE,CACrB,gBAAiB,EAAE,CACpB,CAAC,CAGI,EAAS,MAAM,GAAwB,EAAc,EAAM,CAC/D,SACA,aAAc,EACd,UAAW,QAAQ,IAAI,WACxB,CAAC,CAGI,EAAe,EAAQ,EAAM,EAAE,OAAO,EAAE,OAAQ,EAAE,OAAO,CAAC,CAC1D,EAAc,EAAK,EAAa,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAGxE,GAAI,EAAa,SAAW,EAC1B,MAAU,MACR,oGACD,CAEH,GAAI,EAAO,CACT,IAAM,EAAe,EAAa,GAClC,EAAO,KAAK,EAAO,QAAQ,kBAAkB,KAAK,UAAU,EAAc,KAAM,EAAE,GAAG,CAAC,CAIxF,IAAM,EAAsB,EAAiB,EAAe,MAAM,EAAW,EAAa,CAGpF,EAAS,GAA4B,EAAc,EAAK,CAExD,EAAuB,MAAM,GAA6B,EAAQ,CAAE,SAAQ,CAAC,CAE7E,EAAgB,MAAM,EAAmB,EAAa,EAAM,CAC5D,EAAoB,MAAM,EAAwB,EAAQ,EAAa,EAAM,CAC7E,EAAmB,MAAM,EAC7B,EACA,EACA,EACA,EACD,CACD,MAAM,EAAqB,EAAQ,EAAqB,CACtD,QACA,gBACD,CAAC,CAGF,IAAM,EAAgB,EAA0B,EAAqB,EAAO,CAC1E,0BACA,gBACA,oBACA,mBACA,uBACD,CAAC,CAGG,GACH,EAAY,MAAM,EAAc,OAAQ,EAAE,CAE5C,IAAI,EAAQ,EAEZ,MAAM,EACJ,EACA,MAAO,CAAC,EAAQ,GAAe,IAAQ,CAErC,IAAM,EAAe,EACjB,SAAS,EAAa,MAAM,oBAAoB,EAAa,iBAC7D,OAAO,EAAI,UAAU,GAezB,GAbI,GACF,EAAO,KACL,EAAO,QACL,IAAI,EAAM,EAAE,GAAG,EAAc,OAAO,eAAe,KAAK,UACtD,EACA,KACA,EACD,GACF,CACF,CAIC,EAAQ,CACV,EAAO,KAAK,EAAO,QAAQ,+CAA+C,CAAC,CAC3E,OAGF,GAAI,CAEF,IAAM,EAAkB,MAAM,EAAqB,EAAQ,EAAc,CACvE,QAAS,oDAAoD,KAAK,UAChE,EACA,KACA,EACD,GACD,SACA,kBACA,qBACA,WACA,qBAAsB,EACvB,CAAC,CAGE,IACF,EAAO,KACL,EAAO,MACL,IAAI,EAAM,EAAE,GACV,EAAc,OACf,2DAA2D,EAAa,GAC1E,CACF,CACD,EAAO,KACL,EAAO,MACL,IAAI,EAAM,EAAE,GAAG,EAAc,OAAO,iBAAiB,EAAgB,KAAK,GAC3E,CACF,EAIH,IAAM,EAAqB,EAAa,SAAS,qBAAqB,CACtE,EAAmB,KAAK,CACtB,GAAI,EAAgB,GACpB,KAAM,EAAgB,KACtB,SAAU,EACV,eAAgB,EAAgB,eAChC,YAAa,IAAI,MAAM,CAAC,aAAa,CACtC,CAAC,CACF,MAAM,EAAa,SAAS,EAAoB,qBAAqB,OAC9D,EAAK,CACZ,IAAM,EAAM,GAAG,EAAI,QAAQ,KAAK,KAAK,UAAU,EAAI,UAAU,KAAM,KAAM,EAAE,GACrE,EAAc,EAAmB,EAAI,CAE3C,GAAI,IAAgB,oDAAqD,CACnE,GACF,EAAO,KACL,EAAO,OACL,IAAI,EAAM,EAAE,GAAG,EAAc,OAAO,yCACrC,CACF,CAEH,IAAM,EAAoB,EAAa,SAAS,oBAAoB,CACpE,EAAkB,KAAK,CACrB,eAAgB,EAAa,eAC7B,SAAU,EACV,YAAa,IAAI,MAAM,CAAC,aAAa,CACtC,CAAC,CACF,MAAM,EAAa,SAAS,EAAmB,oBAAoB,KAC9D,CACL,IAAM,EAAkB,EAAa,SAAS,kBAAkB,CAChE,EAAgB,KAAK,CACnB,GAAG,EACH,SAAU,EACV,MAAO,GAAe,EACtB,YAAa,IAAI,MAAM,CAAC,aAAa,CACtC,CAAC,CACF,MAAM,EAAa,SAAS,EAAiB,kBAAkB,CAC3D,IACF,EAAO,MAAM,EAAO,IAAI,GAAe,EAAI,CAAC,CAC5C,EAAO,MACL,EAAO,IACL,IAAI,EAAM,EAAE,GACV,EAAc,OACf,mCAAmC,EAAa,GAClD,CACF,GAKP,GAAS,EACJ,GACH,EAAY,OAAO,EAAM,EAG7B,CACE,cACD,CACF,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAGvB,EAAO,KAAK,EAAO,MAAM,wBAAwB,EAAY,IAAK,YAAY,CAAC,CAG3E,EAAa,SAAS,oBAAoB,CAAC,OAAS,GACtD,EAAO,KACL,EAAO,OACL,gBAAgB,EAAa,SAAS,oBAAoB,CAAC,OAAO,6BACxD,EAAiB,sDAC5B,CACF,CAIC,EAAa,SAAS,kBAAkB,CAAC,OAAS,IACpD,EAAO,MACL,EAAO,IACL,gBAAgB,EAAa,SAAS,kBAAkB,CAAC,OAAO,iBACtD,EAAiB,4CAC5B,CACF,CACD,QAAQ,KAAK,EAAE"}
@@ -1,2 +1,2 @@
1
1
  import{t as e}from"./logger-Bj782ZYD.mjs";import{mt as t}from"./codecs-CeDPaLYa.mjs";import{decodeCodec as n}from"@transcend-io/type-utils";import{existsSync as r,readFileSync as i}from"node:fs";import a from"colors";import*as o from"io-ts";function s(s){return s||(e.error(a.red(`A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY`)),process.exit(1)),r(s)?n(o.array(t),i(s,`utf-8`)):s}export{s as t};
2
- //# sourceMappingURL=validateTranscendAuth-DCwAtgvh.mjs.map
2
+ //# sourceMappingURL=validateTranscendAuth-Cuh2Qfdl.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"validateTranscendAuth-DCwAtgvh.mjs","names":[],"sources":["../src/lib/api-keys/validateTranscendAuth.ts"],"sourcesContent":["import { existsSync, readFileSync } from 'node:fs';\n\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { StoredApiKey } from '../../codecs.js';\nimport { logger } from '../../logger.js';\n\n/**\n * Determine if the `--auth` parameter is an API key or a path to a JSON\n * file containing a list of API keys.\n *\n * @param auth - Raw auth parameter\n * @returns The API key or the list API keys\n */\nexport function validateTranscendAuth(auth: string): string | StoredApiKey[] {\n // Ensure auth is passed\n if (!auth) {\n logger.error(\n colors.red(\n 'A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY',\n ),\n );\n process.exit(1);\n }\n\n // Read from disk\n if (existsSync(auth)) {\n // validate that file is a list of API keys\n return decodeCodec(t.array(StoredApiKey), readFileSync(auth, 'utf-8'));\n }\n\n // Return as single API key\n return auth;\n}\n"],"mappings":"iPAgBA,SAAgB,EAAsB,EAAuC,CAkB3E,OAhBK,IACH,EAAO,MACL,EAAO,IACL,wFACD,CACF,CACD,QAAQ,KAAK,EAAE,EAIb,EAAW,EAAK,CAEX,EAAY,EAAE,MAAM,EAAa,CAAE,EAAa,EAAM,QAAQ,CAAC,CAIjE"}
1
+ {"version":3,"file":"validateTranscendAuth-Cuh2Qfdl.mjs","names":[],"sources":["../src/lib/api-keys/validateTranscendAuth.ts"],"sourcesContent":["import { existsSync, readFileSync } from 'node:fs';\n\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { StoredApiKey } from '../../codecs.js';\nimport { logger } from '../../logger.js';\n\n/**\n * Determine if the `--auth` parameter is an API key or a path to a JSON\n * file containing a list of API keys.\n *\n * @param auth - Raw auth parameter\n * @returns The API key or the list API keys\n */\nexport function validateTranscendAuth(auth: string): string | StoredApiKey[] {\n // Ensure auth is passed\n if (!auth) {\n logger.error(\n colors.red(\n 'A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY',\n ),\n );\n process.exit(1);\n }\n\n // Read from disk\n if (existsSync(auth)) {\n // validate that file is a list of API keys\n return decodeCodec(t.array(StoredApiKey), readFileSync(auth, 'utf-8'));\n }\n\n // Return as single API key\n return auth;\n}\n"],"mappings":"iPAgBA,SAAgB,EAAsB,EAAuC,CAkB3E,OAhBK,IACH,EAAO,MACL,EAAO,IACL,wFACD,CACF,CACD,QAAQ,KAAK,EAAE,EAIb,EAAW,EAAK,CAEX,EAAY,EAAE,MAAM,EAAa,CAAE,EAAa,EAAM,QAAQ,CAAC,CAIjE"}
@@ -3,4 +3,4 @@ import{appendFileSync as e,createWriteStream as t,writeFileSync as n}from"node:f
3
3
  `))}function o(e,t){if(!t||t.length===0){n(e,``);return}n(e,`${t.map(i).join(`,`)}\n`)}function s(t,n,r){n.length&&e(t,`${n.map(e=>r.map(t=>{let n=e[t];return n==null?``:String(n)}).map(i).join(`,`)).join(`
4
4
  `)}\n`)}function c(t,n){e(t,`\n${n.map(e=>Object.values(e)).map(e=>e.map(i).join(`,`)).join(`
5
5
  `)}`)}async function l(e,n,i=!0){let a=t(e);await new Promise((e,t)=>{try{let o=r.write(n,{headers:i,objectMode:!0}).on(`error`,t);a.on(`error`,t),a.on(`finish`,()=>e()),o.pipe(a)}catch(e){t(e)}})}function u(e){let t=e.lastIndexOf(`.`);return{baseName:t===-1?e:e.substring(0,t),extension:t===-1?`.csv`:e.substring(t)}}function d(e,t){let n={};for(let r of t)n[r]=e[r];return n}function f(e){return new Promise(t=>{e.once(`drain`,t)})}async function p(e,n,i=!0){let a;a=Array.isArray(i)?i:i===!0?n.length>0?Object.keys(n[0]):[]:!1;let o=t(e),s=r.format({headers:a||void 0,objectMode:!0}),c=s.pipe(o),l=new Promise((e,t)=>{c.on(`finish`,()=>e()),c.on(`error`,t),s.on(`error`,t),o.on(`error`,t)});for(let e of n){let t=a?d(e,a):e;s.write(t)||await f(s)}return s.end(),await l,[e]}export{l as a,u as i,c as n,a as o,o as r,p as s,s as t};
6
- //# sourceMappingURL=writeCsv-Da8NUe1V.mjs.map
6
+ //# sourceMappingURL=writeCsv-C4pjXGsD.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"writeCsv-Da8NUe1V.mjs","names":[],"sources":["../src/lib/helpers/writeCsv.ts"],"sourcesContent":["import { createWriteStream, writeFileSync, appendFileSync } from 'node:fs';\n\nimport { ObjByString } from '@transcend-io/type-utils';\nimport * as fastcsv from 'fast-csv';\n\n/**\n * Escape a CSV value\n *\n * @param value - Value to escape\n * @returns Escaped value\n */\nfunction escapeCsvValue(value: string): string {\n if (value.includes('\"') || value.includes(',') || value.includes('\\n')) {\n return `\"${value.replace(/\"/g, '\"\"')}\"`;\n }\n return value;\n}\n\n/**\n * Write a csv to file synchronously, overwriting any existing content\n *\n * @param filePath - File to write out to\n * @param data - Data to write\n * @param headers - Headers. If true, use object keys as headers. If array, use provided headers.\n */\nexport function writeCsvSync(filePath: string, data: ObjByString[], headers: string[]): void {\n const rows: string[][] = [];\n\n rows.push(headers);\n rows.push(...data.map((row) => Object.values(row)));\n\n // Build CSV content with proper escaping\n const csvContent = rows.map((row) => row.map(escapeCsvValue).join(',')).join('\\n');\n\n // Write to file, overwriting existing content\n writeFileSync(filePath, csvContent);\n}\n\n/**\n * Initialize a CSV file by writing only the header row (or an empty file if no headers).\n *\n * @param filePath - CSV path\n * @param headers - Ordered list of column names; if empty, creates/empties the file\n */\nexport function initCsvFile(filePath: string, headers: string[]): void {\n if (!headers || headers.length === 0) {\n writeFileSync(filePath, '');\n return;\n }\n const headerLine = headers.map(escapeCsvValue).join(',');\n writeFileSync(filePath, `${headerLine}\\n`);\n}\n\n/**\n * Append rows to CSV using an explicit header order (no header line).\n * Values are written in the order of `headerOrder`.\n *\n * @param filePath - CSV path\n * @param data - Row objects\n * @param headerOrder - Column order to apply\n */\nexport function appendCsvRowsOrdered(\n filePath: string,\n data: ObjByString[],\n headerOrder: string[],\n): void {\n if (!data.length) return;\n\n const lines = data.map((row) => {\n const vals = headerOrder.map((key) => {\n const v = row[key];\n return v == null ? '' : String(v);\n });\n return vals.map(escapeCsvValue).join(',');\n });\n\n appendFileSync(filePath, `${lines.join('\\n')}\\n`);\n}\n\n/**\n * Append data to an existing csv file synchronously (legacy, uses Object.values order).\n * Prefer appendCsvRowsOrdered for deterministic column order.\n *\n * @param filePath - File to append to\n * @param data - Data to append\n */\nexport function appendCsvSync(filePath: string, data: ObjByString[]): void {\n // Convert data to CSV rows\n const rows = data.map((row) => Object.values(row));\n\n // Build CSV content with proper escaping\n const csvContent = rows.map((row) => row.map(escapeCsvValue).join(',')).join('\\n');\n\n // Append to file with leading newline\n appendFileSync(filePath, `\\n${csvContent}`);\n}\n\n/**\n * Write a csv to file asynchronously\n *\n * @param filePath - File to write out to\n * @param data - Data to write\n * @param headers - Headers\n */\nexport async function writeCsv(\n filePath: string,\n data: ObjByString[],\n headers: boolean | string[] = true,\n): Promise<void> {\n const ws = createWriteStream(filePath);\n await new Promise<void>((resolve, reject) => {\n try {\n const stream = fastcsv.write(data, { headers, objectMode: true }).on('error', reject);\n\n ws.on('error', reject);\n ws.on('finish', () => resolve());\n\n stream.pipe(ws);\n } catch (err) {\n reject(err);\n }\n });\n}\n\n/**\n * Parse a file path into a base name and extension\n *\n * @param filePath - File path to parse\n * @returns Base name and extension\n */\nexport function parseFilePath(filePath: string): {\n /** Base name of the file */\n baseName: string;\n /** Extension of the file */\n extension: string;\n} {\n const lastDotIndex = filePath.lastIndexOf('.');\n return {\n baseName: lastDotIndex !== -1 ? filePath.substring(0, lastDotIndex) : filePath,\n extension: lastDotIndex !== -1 ? filePath.substring(lastDotIndex) : '.csv',\n };\n}\n\n/**\n * Convert an object row into values aligned to header order\n *\n * @param row - Row object\n * @param headerOrder - Header order\n * @returns Aligned row object\n */\nfunction rowToValues(row: ObjByString, headerOrder: string[]): Record<string, unknown> {\n // fast-csv with objectMode expects objects; we ensure consistent key ordering\n // by building a new object with keys in headerOrder.\n const ordered: Record<string, unknown> = {};\n for (const key of headerOrder) {\n // Preserve undefined -> becomes empty cell in CSV\n ordered[key] = row[key];\n }\n return ordered;\n}\n\n/**\n * Await the 'drain' event when backpressure indicates buffering\n *\n * @param stream - Writable stream\n * @returns Promise that resolves on 'drain'\n */\nfunction waitForDrain(stream: NodeJS.WritableStream): Promise<void> {\n return new Promise((resolve) => {\n stream.once('drain', resolve);\n });\n}\n\n/**\n * Stream a large CSV dataset to a single file with proper backpressure handling.\n * (Kept for completeness; not used by the incremental write path.)\n *\n * @param filePath - File to write out to\n * @param data - Data to write (iterated without buffering the entire file content)\n * @param headers - If true, infer from first row; if string[], use provided; if false, omit header row\n * @returns Array with a single written file path\n */\nexport async function writeLargeCsv(\n filePath: string,\n data: ObjByString[],\n headers: boolean | string[] = true,\n): Promise<string[]> {\n // Determine header order\n let headerOrder: string[] | false;\n if (Array.isArray(headers)) {\n headerOrder = headers;\n } else if (headers === true) {\n headerOrder = data.length > 0 ? Object.keys(data[0]) : [];\n } else {\n headerOrder = false;\n }\n\n const ws = createWriteStream(filePath);\n const csvStream = fastcsv.format<ObjByString, ObjByString>({\n headers: headerOrder || undefined,\n objectMode: true,\n });\n\n // Pipe CSV stream into file write stream\n const piping = csvStream.pipe(ws);\n\n const completion = new Promise<void>((resolve, reject) => {\n piping.on('finish', () => resolve());\n piping.on('error', reject);\n csvStream.on('error', reject);\n ws.on('error', reject);\n });\n\n // Stream rows with backpressure handling\n for (const row of data) {\n const toWrite = headerOrder ? rowToValues(row, headerOrder) : row;\n const ok = csvStream.write(toWrite);\n if (!ok) {\n // Respect backpressure: wait until the internal buffer drains\n await waitForDrain(csvStream);\n }\n }\n\n // Signal end of input and wait for finish\n csvStream.end();\n await completion;\n\n return [filePath];\n}\n"],"mappings":"8GAWA,SAAS,EAAe,EAAuB,CAI7C,OAHI,EAAM,SAAS,IAAI,EAAI,EAAM,SAAS,IAAI,EAAI,EAAM,SAAS;EAAK,CAC7D,IAAI,EAAM,QAAQ,KAAM,KAAK,CAAC,GAEhC,EAUT,SAAgB,EAAa,EAAkB,EAAqB,EAAyB,CAC3F,IAAM,EAAmB,EAAE,CAE3B,EAAK,KAAK,EAAQ,CAClB,EAAK,KAAK,GAAG,EAAK,IAAK,GAAQ,OAAO,OAAO,EAAI,CAAC,CAAC,CAMnD,EAAc,EAHK,EAAK,IAAK,GAAQ,EAAI,IAAI,EAAe,CAAC,KAAK,IAAI,CAAC,CAAC,KAAK;EAAK,CAG/C,CASrC,SAAgB,EAAY,EAAkB,EAAyB,CACrE,GAAI,CAAC,GAAW,EAAQ,SAAW,EAAG,CACpC,EAAc,EAAU,GAAG,CAC3B,OAGF,EAAc,EAAU,GADL,EAAQ,IAAI,EAAe,CAAC,KAAK,IAAI,CAClB,IAAI,CAW5C,SAAgB,EACd,EACA,EACA,EACM,CACD,EAAK,QAUV,EAAe,EAAU,GARX,EAAK,IAAK,GACT,EAAY,IAAK,GAAQ,CACpC,IAAM,EAAI,EAAI,GACd,OAAO,GAAK,KAAO,GAAK,OAAO,EAAE,EACjC,CACU,IAAI,EAAe,CAAC,KAAK,IAAI,CACzC,CAEgC,KAAK;EAAK,CAAC,IAAI,CAUnD,SAAgB,EAAc,EAAkB,EAA2B,CAQzE,EAAe,EAAU,KANZ,EAAK,IAAK,GAAQ,OAAO,OAAO,EAAI,CAAC,CAG1B,IAAK,GAAQ,EAAI,IAAI,EAAe,CAAC,KAAK,IAAI,CAAC,CAAC,KAAK;EAAK,GAGvC,CAU7C,eAAsB,EACpB,EACA,EACA,EAA8B,GACf,CACf,IAAM,EAAK,EAAkB,EAAS,CACtC,MAAM,IAAI,SAAe,EAAS,IAAW,CAC3C,GAAI,CACF,IAAM,EAAS,EAAQ,MAAM,EAAM,CAAE,UAAS,WAAY,GAAM,CAAC,CAAC,GAAG,QAAS,EAAO,CAErF,EAAG,GAAG,QAAS,EAAO,CACtB,EAAG,GAAG,aAAgB,GAAS,CAAC,CAEhC,EAAO,KAAK,EAAG,OACR,EAAK,CACZ,EAAO,EAAI,GAEb,CASJ,SAAgB,EAAc,EAK5B,CACA,IAAM,EAAe,EAAS,YAAY,IAAI,CAC9C,MAAO,CACL,SAAU,IAAiB,GAA2C,EAAtC,EAAS,UAAU,EAAG,EAAa,CACnE,UAAW,IAAiB,GAAwC,OAAnC,EAAS,UAAU,EAAa,CAClE,CAUH,SAAS,EAAY,EAAkB,EAAgD,CAGrF,IAAM,EAAmC,EAAE,CAC3C,IAAK,IAAM,KAAO,EAEhB,EAAQ,GAAO,EAAI,GAErB,OAAO,EAST,SAAS,EAAa,EAA8C,CAClE,OAAO,IAAI,QAAS,GAAY,CAC9B,EAAO,KAAK,QAAS,EAAQ,EAC7B,CAYJ,eAAsB,EACpB,EACA,EACA,EAA8B,GACX,CAEnB,IAAI,EACJ,AAKE,EALE,MAAM,QAAQ,EAAQ,CACV,EACL,IAAY,GACP,EAAK,OAAS,EAAI,OAAO,KAAK,EAAK,GAAG,CAAG,EAAE,CAE3C,GAGhB,IAAM,EAAK,EAAkB,EAAS,CAChC,EAAY,EAAQ,OAAiC,CACzD,QAAS,GAAe,IAAA,GACxB,WAAY,GACb,CAAC,CAGI,EAAS,EAAU,KAAK,EAAG,CAE3B,EAAa,IAAI,SAAe,EAAS,IAAW,CACxD,EAAO,GAAG,aAAgB,GAAS,CAAC,CACpC,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAU,GAAG,QAAS,EAAO,CAC7B,EAAG,GAAG,QAAS,EAAO,EACtB,CAGF,IAAK,IAAM,KAAO,EAAM,CACtB,IAAM,EAAU,EAAc,EAAY,EAAK,EAAY,CAAG,EACnD,EAAU,MAAM,EAAQ,EAGjC,MAAM,EAAa,EAAU,CAQjC,OAHA,EAAU,KAAK,CACf,MAAM,EAEC,CAAC,EAAS"}
1
+ {"version":3,"file":"writeCsv-C4pjXGsD.mjs","names":[],"sources":["../src/lib/helpers/writeCsv.ts"],"sourcesContent":["import { createWriteStream, writeFileSync, appendFileSync } from 'node:fs';\n\nimport { ObjByString } from '@transcend-io/type-utils';\nimport * as fastcsv from 'fast-csv';\n\n/**\n * Escape a CSV value\n *\n * @param value - Value to escape\n * @returns Escaped value\n */\nfunction escapeCsvValue(value: string): string {\n if (value.includes('\"') || value.includes(',') || value.includes('\\n')) {\n return `\"${value.replace(/\"/g, '\"\"')}\"`;\n }\n return value;\n}\n\n/**\n * Write a csv to file synchronously, overwriting any existing content\n *\n * @param filePath - File to write out to\n * @param data - Data to write\n * @param headers - Headers. If true, use object keys as headers. If array, use provided headers.\n */\nexport function writeCsvSync(filePath: string, data: ObjByString[], headers: string[]): void {\n const rows: string[][] = [];\n\n rows.push(headers);\n rows.push(...data.map((row) => Object.values(row)));\n\n // Build CSV content with proper escaping\n const csvContent = rows.map((row) => row.map(escapeCsvValue).join(',')).join('\\n');\n\n // Write to file, overwriting existing content\n writeFileSync(filePath, csvContent);\n}\n\n/**\n * Initialize a CSV file by writing only the header row (or an empty file if no headers).\n *\n * @param filePath - CSV path\n * @param headers - Ordered list of column names; if empty, creates/empties the file\n */\nexport function initCsvFile(filePath: string, headers: string[]): void {\n if (!headers || headers.length === 0) {\n writeFileSync(filePath, '');\n return;\n }\n const headerLine = headers.map(escapeCsvValue).join(',');\n writeFileSync(filePath, `${headerLine}\\n`);\n}\n\n/**\n * Append rows to CSV using an explicit header order (no header line).\n * Values are written in the order of `headerOrder`.\n *\n * @param filePath - CSV path\n * @param data - Row objects\n * @param headerOrder - Column order to apply\n */\nexport function appendCsvRowsOrdered(\n filePath: string,\n data: ObjByString[],\n headerOrder: string[],\n): void {\n if (!data.length) return;\n\n const lines = data.map((row) => {\n const vals = headerOrder.map((key) => {\n const v = row[key];\n return v == null ? '' : String(v);\n });\n return vals.map(escapeCsvValue).join(',');\n });\n\n appendFileSync(filePath, `${lines.join('\\n')}\\n`);\n}\n\n/**\n * Append data to an existing csv file synchronously (legacy, uses Object.values order).\n * Prefer appendCsvRowsOrdered for deterministic column order.\n *\n * @param filePath - File to append to\n * @param data - Data to append\n */\nexport function appendCsvSync(filePath: string, data: ObjByString[]): void {\n // Convert data to CSV rows\n const rows = data.map((row) => Object.values(row));\n\n // Build CSV content with proper escaping\n const csvContent = rows.map((row) => row.map(escapeCsvValue).join(',')).join('\\n');\n\n // Append to file with leading newline\n appendFileSync(filePath, `\\n${csvContent}`);\n}\n\n/**\n * Write a csv to file asynchronously\n *\n * @param filePath - File to write out to\n * @param data - Data to write\n * @param headers - Headers\n */\nexport async function writeCsv(\n filePath: string,\n data: ObjByString[],\n headers: boolean | string[] = true,\n): Promise<void> {\n const ws = createWriteStream(filePath);\n await new Promise<void>((resolve, reject) => {\n try {\n const stream = fastcsv.write(data, { headers, objectMode: true }).on('error', reject);\n\n ws.on('error', reject);\n ws.on('finish', () => resolve());\n\n stream.pipe(ws);\n } catch (err) {\n reject(err);\n }\n });\n}\n\n/**\n * Parse a file path into a base name and extension\n *\n * @param filePath - File path to parse\n * @returns Base name and extension\n */\nexport function parseFilePath(filePath: string): {\n /** Base name of the file */\n baseName: string;\n /** Extension of the file */\n extension: string;\n} {\n const lastDotIndex = filePath.lastIndexOf('.');\n return {\n baseName: lastDotIndex !== -1 ? filePath.substring(0, lastDotIndex) : filePath,\n extension: lastDotIndex !== -1 ? filePath.substring(lastDotIndex) : '.csv',\n };\n}\n\n/**\n * Convert an object row into values aligned to header order\n *\n * @param row - Row object\n * @param headerOrder - Header order\n * @returns Aligned row object\n */\nfunction rowToValues(row: ObjByString, headerOrder: string[]): Record<string, unknown> {\n // fast-csv with objectMode expects objects; we ensure consistent key ordering\n // by building a new object with keys in headerOrder.\n const ordered: Record<string, unknown> = {};\n for (const key of headerOrder) {\n // Preserve undefined -> becomes empty cell in CSV\n ordered[key] = row[key];\n }\n return ordered;\n}\n\n/**\n * Await the 'drain' event when backpressure indicates buffering\n *\n * @param stream - Writable stream\n * @returns Promise that resolves on 'drain'\n */\nfunction waitForDrain(stream: NodeJS.WritableStream): Promise<void> {\n return new Promise((resolve) => {\n stream.once('drain', resolve);\n });\n}\n\n/**\n * Stream a large CSV dataset to a single file with proper backpressure handling.\n * (Kept for completeness; not used by the incremental write path.)\n *\n * @param filePath - File to write out to\n * @param data - Data to write (iterated without buffering the entire file content)\n * @param headers - If true, infer from first row; if string[], use provided; if false, omit header row\n * @returns Array with a single written file path\n */\nexport async function writeLargeCsv(\n filePath: string,\n data: ObjByString[],\n headers: boolean | string[] = true,\n): Promise<string[]> {\n // Determine header order\n let headerOrder: string[] | false;\n if (Array.isArray(headers)) {\n headerOrder = headers;\n } else if (headers === true) {\n headerOrder = data.length > 0 ? Object.keys(data[0]) : [];\n } else {\n headerOrder = false;\n }\n\n const ws = createWriteStream(filePath);\n const csvStream = fastcsv.format<ObjByString, ObjByString>({\n headers: headerOrder || undefined,\n objectMode: true,\n });\n\n // Pipe CSV stream into file write stream\n const piping = csvStream.pipe(ws);\n\n const completion = new Promise<void>((resolve, reject) => {\n piping.on('finish', () => resolve());\n piping.on('error', reject);\n csvStream.on('error', reject);\n ws.on('error', reject);\n });\n\n // Stream rows with backpressure handling\n for (const row of data) {\n const toWrite = headerOrder ? rowToValues(row, headerOrder) : row;\n const ok = csvStream.write(toWrite);\n if (!ok) {\n // Respect backpressure: wait until the internal buffer drains\n await waitForDrain(csvStream);\n }\n }\n\n // Signal end of input and wait for finish\n csvStream.end();\n await completion;\n\n return [filePath];\n}\n"],"mappings":"8GAWA,SAAS,EAAe,EAAuB,CAI7C,OAHI,EAAM,SAAS,IAAI,EAAI,EAAM,SAAS,IAAI,EAAI,EAAM,SAAS;EAAK,CAC7D,IAAI,EAAM,QAAQ,KAAM,KAAK,CAAC,GAEhC,EAUT,SAAgB,EAAa,EAAkB,EAAqB,EAAyB,CAC3F,IAAM,EAAmB,EAAE,CAE3B,EAAK,KAAK,EAAQ,CAClB,EAAK,KAAK,GAAG,EAAK,IAAK,GAAQ,OAAO,OAAO,EAAI,CAAC,CAAC,CAMnD,EAAc,EAHK,EAAK,IAAK,GAAQ,EAAI,IAAI,EAAe,CAAC,KAAK,IAAI,CAAC,CAAC,KAAK;EAAK,CAG/C,CASrC,SAAgB,EAAY,EAAkB,EAAyB,CACrE,GAAI,CAAC,GAAW,EAAQ,SAAW,EAAG,CACpC,EAAc,EAAU,GAAG,CAC3B,OAGF,EAAc,EAAU,GADL,EAAQ,IAAI,EAAe,CAAC,KAAK,IAAI,CAClB,IAAI,CAW5C,SAAgB,EACd,EACA,EACA,EACM,CACD,EAAK,QAUV,EAAe,EAAU,GARX,EAAK,IAAK,GACT,EAAY,IAAK,GAAQ,CACpC,IAAM,EAAI,EAAI,GACd,OAAO,GAAK,KAAO,GAAK,OAAO,EAAE,EACjC,CACU,IAAI,EAAe,CAAC,KAAK,IAAI,CACzC,CAEgC,KAAK;EAAK,CAAC,IAAI,CAUnD,SAAgB,EAAc,EAAkB,EAA2B,CAQzE,EAAe,EAAU,KANZ,EAAK,IAAK,GAAQ,OAAO,OAAO,EAAI,CAAC,CAG1B,IAAK,GAAQ,EAAI,IAAI,EAAe,CAAC,KAAK,IAAI,CAAC,CAAC,KAAK;EAAK,GAGvC,CAU7C,eAAsB,EACpB,EACA,EACA,EAA8B,GACf,CACf,IAAM,EAAK,EAAkB,EAAS,CACtC,MAAM,IAAI,SAAe,EAAS,IAAW,CAC3C,GAAI,CACF,IAAM,EAAS,EAAQ,MAAM,EAAM,CAAE,UAAS,WAAY,GAAM,CAAC,CAAC,GAAG,QAAS,EAAO,CAErF,EAAG,GAAG,QAAS,EAAO,CACtB,EAAG,GAAG,aAAgB,GAAS,CAAC,CAEhC,EAAO,KAAK,EAAG,OACR,EAAK,CACZ,EAAO,EAAI,GAEb,CASJ,SAAgB,EAAc,EAK5B,CACA,IAAM,EAAe,EAAS,YAAY,IAAI,CAC9C,MAAO,CACL,SAAU,IAAiB,GAA2C,EAAtC,EAAS,UAAU,EAAG,EAAa,CACnE,UAAW,IAAiB,GAAwC,OAAnC,EAAS,UAAU,EAAa,CAClE,CAUH,SAAS,EAAY,EAAkB,EAAgD,CAGrF,IAAM,EAAmC,EAAE,CAC3C,IAAK,IAAM,KAAO,EAEhB,EAAQ,GAAO,EAAI,GAErB,OAAO,EAST,SAAS,EAAa,EAA8C,CAClE,OAAO,IAAI,QAAS,GAAY,CAC9B,EAAO,KAAK,QAAS,EAAQ,EAC7B,CAYJ,eAAsB,EACpB,EACA,EACA,EAA8B,GACX,CAEnB,IAAI,EACJ,AAKE,EALE,MAAM,QAAQ,EAAQ,CACV,EACL,IAAY,GACP,EAAK,OAAS,EAAI,OAAO,KAAK,EAAK,GAAG,CAAG,EAAE,CAE3C,GAGhB,IAAM,EAAK,EAAkB,EAAS,CAChC,EAAY,EAAQ,OAAiC,CACzD,QAAS,GAAe,IAAA,GACxB,WAAY,GACb,CAAC,CAGI,EAAS,EAAU,KAAK,EAAG,CAE3B,EAAa,IAAI,SAAe,EAAS,IAAW,CACxD,EAAO,GAAG,aAAgB,GAAS,CAAC,CACpC,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAU,GAAG,QAAS,EAAO,CAC7B,EAAG,GAAG,QAAS,EAAO,EACtB,CAGF,IAAK,IAAM,KAAO,EAAM,CACtB,IAAM,EAAU,EAAc,EAAY,EAAK,EAAY,CAAG,EACnD,EAAU,MAAM,EAAQ,EAGjC,MAAM,EAAa,EAAU,CAQjC,OAHA,EAAU,KAAK,CACf,MAAM,EAEC,CAAC,EAAS"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@transcend-io/cli",
3
- "version": "10.1.0",
3
+ "version": "10.2.0",
4
4
  "description": "A command line interface for programmatic operations across Transcend.",
5
5
  "homepage": "https://github.com/transcend-io/tools/tree/main/packages/cli",
6
6
  "license": "Apache-2.0",
@@ -75,7 +75,7 @@
75
75
  "@stricli/core": "^1.2.0",
76
76
  "@transcend-io/airgap.js-types": "^12.16.0",
77
77
  "@transcend-io/handlebars-utils": "^1.3.2",
78
- "@transcend-io/internationalization": "^2.3.2",
78
+ "@transcend-io/internationalization": "^3.0.0",
79
79
  "@transcend-io/persisted-state": "^1.0.6",
80
80
  "@transcend-io/secret-value": "^1.2.3",
81
81
  "@transcend-io/type-utils": "^1.8.9",
@@ -103,11 +103,11 @@
103
103
  "ms": "3.0.0-canary.202508261828",
104
104
  "newtype-ts": "^0.3.5",
105
105
  "query-string": "=7.0.0",
106
- "semver": "^7.6.0",
106
+ "semver": "^7.7.4",
107
107
  "undici": "^5.22.1",
108
108
  "yargs-parser": "^21.1.1",
109
- "@transcend-io/privacy-types": "5.0.1",
110
- "@transcend-io/sdk": "0.1.0",
109
+ "@transcend-io/privacy-types": "5.1.0",
110
+ "@transcend-io/sdk": "1.0.0",
111
111
  "@transcend-io/utils": "0.1.0"
112
112
  },
113
113
  "devDependencies": {
@@ -125,7 +125,7 @@
125
125
  "@types/jsonwebtoken": "^9",
126
126
  "@types/lodash-es": "^4.17.12",
127
127
  "@types/node": "^22.19.15",
128
- "@types/semver": "^7",
128
+ "@types/semver": "^7.7.1",
129
129
  "@types/yargs-parser": "^21.0.0",
130
130
  "date-fns": "^4.1.0",
131
131
  "doctoc": "^2.2.1",
@@ -1,54 +0,0 @@
1
- import{gql as e}from"graphql-request";const t=e`
2
- query TranscendCliRequestDataSilos(
3
- $first: Int!
4
- $offset: Int!
5
- $filterBy: RequestDataSiloFiltersInput!
6
- ) {
7
- requestDataSilos(
8
- filterBy: $filterBy
9
- first: $first
10
- offset: $offset
11
- useMaster: false
12
- orderBy: [
13
- { field: createdAt, direction: DESC }
14
- { field: title, direction: ASC, model: dataSilo }
15
- ]
16
- ) {
17
- nodes {
18
- id
19
- request {
20
- type
21
- }
22
- }
23
- totalCount
24
- }
25
- }
26
- `,n=e`
27
- mutation TranscendCliMarkRequestDataSiloCompleted(
28
- $requestDataSiloId: ID!
29
- $status: UpdateRequestDataSiloStatus!
30
- ) {
31
- changeRequestDataSiloStatus(input: { id: $requestDataSiloId, status: $status }) {
32
- requestDataSilo {
33
- id
34
- }
35
- }
36
- }
37
- `,r=e`
38
- mutation TranscendCliRetryRequestDataSilo($requestDataSiloId: ID!) {
39
- retryRequestDataSilo(id: $requestDataSiloId) {
40
- requestDataSilo {
41
- id
42
- }
43
- }
44
- }
45
- `,i=e`
46
- query TranscendCliListReducedRequestsForDataSiloCount(
47
- $input: BulkCompletionReducedRequestInput!
48
- ) {
49
- listReducedRequestsForDataSilo(input: $input) {
50
- totalCount
51
- }
52
- }
53
- `;export{r as i,i as n,t as r,n as t};
54
- //# sourceMappingURL=RequestDataSilo-Rrc2dL9g.mjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"RequestDataSilo-Rrc2dL9g.mjs","names":[],"sources":["../src/lib/graphql/gqls/RequestDataSilo.ts"],"sourcesContent":["import { gql } from 'graphql-request';\n\n// TODO: https://transcend.height.app/T-27909 - enable optimizations\n// isExportCsv: true\nexport const REQUEST_DATA_SILOS = gql`\n query TranscendCliRequestDataSilos(\n $first: Int!\n $offset: Int!\n $filterBy: RequestDataSiloFiltersInput!\n ) {\n requestDataSilos(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n orderBy: [\n { field: createdAt, direction: DESC }\n { field: title, direction: ASC, model: dataSilo }\n ]\n ) {\n nodes {\n id\n request {\n type\n }\n }\n totalCount\n }\n }\n`;\n\nexport const CHANGE_REQUEST_DATA_SILO_STATUS = gql`\n mutation TranscendCliMarkRequestDataSiloCompleted(\n $requestDataSiloId: ID!\n $status: UpdateRequestDataSiloStatus!\n ) {\n changeRequestDataSiloStatus(input: { id: $requestDataSiloId, status: $status }) {\n requestDataSilo {\n id\n }\n }\n }\n`;\n\nexport const RETRY_REQUEST_DATA_SILO = gql`\n mutation TranscendCliRetryRequestDataSilo($requestDataSiloId: ID!) {\n retryRequestDataSilo(id: $requestDataSiloId) {\n requestDataSilo {\n id\n }\n }\n }\n`;\n\n// TODO: https://transcend.height.app/T-27909 - enable optimizations\n// isExportCsv: true\n// useMaster: false\n// orderBy: [\n// { field: createdAt, direction: DESC }\n// { field: title, direction: ASC, model: dataSilo }\n// ]\nexport const REDUCED_REQUESTS_FOR_DATA_SILO_COUNT = gql`\n query TranscendCliListReducedRequestsForDataSiloCount(\n $input: BulkCompletionReducedRequestInput!\n ) {\n listReducedRequestsForDataSilo(input: $input) {\n totalCount\n }\n }\n`;\n"],"mappings":"sCAIA,MAAa,EAAqB,CAAG;;;;;;;;;;;;;;;;;;;;;;;;;EA2BxB,EAAkC,CAAG;;;;;;;;;;;EAarC,EAA0B,CAAG;;;;;;;;EAiB7B,EAAuC,CAAG"}
@@ -1,2 +0,0 @@
1
- import{a as e}from"./constants-XOsAW1__.mjs";import{t}from"./logger-Bj782ZYD.mjs";import{n,r}from"./fetchAllRequestIdentifiers-BXx3rSee.mjs";import{s as i,u as a}from"./constants-DYbzl8QH.mjs";import{r as o}from"./fetchAllRequests-xGgt_STo.mjs";import{n as s,t as c}from"./extractClientError-X9wJVqGq.mjs";import{IdentifierType as l}from"@transcend-io/privacy-types";import{difference as u,groupBy as d}from"lodash-es";import{apply as f,decodeCodec as p}from"@transcend-io/type-utils";import{join as m,resolve as h}from"node:path";import g from"colors";import*as _ from"io-ts";import{buildTranscendGraphQLClient as v,createSombraGotInstance as y}from"@transcend-io/sdk";import{map as b}from"@transcend-io/utils";import x from"cli-progress";import{PersistedState as S}from"@transcend-io/persisted-state";async function C(e,t,{sendEmailReceipt:n=!1,skipWaitingPeriod:r=!1,emailIsVerified:a=!0,requestIdentifiers:o=[]}={}){let c=await e.post(`v1/data-subject-request`,{json:{type:t.type,subject:{coreIdentifier:t.coreIdentifier,email:t.email,emailIsVerified:a,...o.length>0?{attestedExtraIdentifiers:f(d(o.filter(e=>!(e.name===`email`&&e.value===t.email)&&!i.includes(e.name)).map(e=>({...e,type:Object.values(l).includes(e.name)?e.name:l.Custom})),`type`),(e,t)=>e.map(({name:e,value:n})=>({...t===l.Custom?{name:e}:{},value:n})))}:{}},requestId:t.id,subjectType:t.subjectType,isSilent:t.isSilent,isTest:t.isTest,locale:t.locale,skipWaitingPeriod:r,createdAt:t.createdAt,details:`Restarted by Transcend cli: "tr-request-restart" - ${t.details}`,skipSendingReceipt:!n}}).json(),{request:u}=p(_.type({request:s}),c);return u}const w=_.intersection([a,_.type({error:_.string})]),T=_.type({restartedRequests:_.array(a),failingRequests:_.array(w)});async function E({requestReceiptFolder:i,auth:a,sombraAuth:s,requestActions:l,requestStatuses:d,createdAtBefore:f,createdAtAfter:p,updatedAtBefore:_,updatedAtAfter:w,transcendUrl:E=e,requestIds:D=[],createdAt:O=new Date,silentModeBefore:k,sendEmailReceipt:A=!1,emailIsVerified:j=!0,copyIdentifiers:M=!1,skipWaitingPeriod:N=!1,concurrency:P=20}){let F=new Date().getTime(),I=new x.SingleBar({},x.Presets.shades_classic),L=m(i,`tr-request-restart-${new Date().toISOString()}.json`),R=new S(L,T,{restartedRequests:[],failingRequests:[]}),z=await y(E,a,{logger:t,sombraApiKey:s,sombraUrl:process.env.SOMBRA_URL}),B=v(E,a),V=(await o(B,{requestIds:D,actions:l,statuses:d,createdAtBefore:f,createdAtAfter:p,updatedAtBefore:_,updatedAtAfter:w})).filter(e=>new Date(e.createdAt)<O);if(t.info(`Found ${V.length} requests to restart`),M&&t.info(`copyIdentifiers detected - All Identifiers will be copied.`),A&&t.info(`sendEmailReceipt detected - Email receipts will be sent.`),N&&t.info(`skipWaitingPeriod detected - Waiting period will be skipped.`),D.length>0&&D.length!==V.length){let e=u(D,V.map(({id:e})=>e));e.length>0&&(t.error(g.red(`Failed to find the following requests by ID: ${e.join(`,`)}.`)),process.exit(1))}M&&await r(B);let H=0;I.start(V.length,0),await b(V,async(e,t)=>{try{let r=M?await n(B,z,{requestId:e.id,skipSombraCheck:!0}):[],i=await C(z,{...e,isSilent:k&&new Date(e.createdAt)<k?!0:e.isSilent},{requestIdentifiers:r,skipWaitingPeriod:N,sendEmailReceipt:A,emailIsVerified:j}),a=R.getValue(`restartedRequests`);a.push({id:i.id,link:i.link,rowIndex:t,coreIdentifier:i.coreIdentifier,attemptedAt:new Date().toISOString()}),await R.setValue(a,`restartedRequests`)}catch(n){let r=`${n.message} - ${JSON.stringify(n.response?.body,null,2)}`,i=c(r),a=R.getValue(`failingRequests`);a.push({id:e.id,link:e.link,rowIndex:t,coreIdentifier:e.coreIdentifier,attemptedAt:new Date().toISOString(),error:i||r}),await R.setValue(a,`failingRequests`)}H+=1,I.update(H)},{concurrency:P}),I.stop();let U=new Date().getTime()-F;t.info(g.green(`Completed restarting of requests in "${U/1e3}" seconds.`)),R.getValue(`failingRequests`).length>0&&(t.error(g.red(`Encountered "${R.getValue(`failingRequests`).length}" errors. See "${h(L)}" to review the error messages and inputs.`)),process.exit(1))}export{C as n,E as t};
2
- //# sourceMappingURL=bulkRestartRequests-sie3tM3W.mjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"bulkRestartRequests-sie3tM3W.mjs","names":[],"sources":["../src/lib/requests/restartPrivacyRequest.ts","../src/lib/requests/bulkRestartRequests.ts"],"sourcesContent":["import { IdentifierType } from '@transcend-io/privacy-types';\nimport { apply, decodeCodec } from '@transcend-io/type-utils';\nimport type { Got } from 'got';\nimport * as t from 'io-ts';\nimport { groupBy } from 'lodash-es';\n\nimport { PrivacyRequest, RequestIdentifier } from '../graphql/index.js';\nimport { IDENTIFIER_BLOCK_LIST } from './constants.js';\nimport { PrivacyRequestResponse } from './submitPrivacyRequest.js';\n\n/**\n * Restart a privacy request to the Transcend API\n *\n * @param sombra - Sombra instance configured to make requests\n * @param request - Request to restart\n * @param input - Request input\n * @returns Successfully submitted request\n */\nexport async function restartPrivacyRequest(\n sombra: Got,\n request: PrivacyRequest,\n {\n sendEmailReceipt = false,\n skipWaitingPeriod = false,\n emailIsVerified = true,\n requestIdentifiers = [],\n }: {\n /** List of request identifiers to include */\n requestIdentifiers?: RequestIdentifier[];\n /** When true, send an email receipt to data subject */\n sendEmailReceipt?: boolean;\n /** Whether the email is verified */\n emailIsVerified?: boolean;\n /** Whether to skip waiting period */\n skipWaitingPeriod?: boolean;\n } = {},\n): Promise<PrivacyRequestResponse> {\n // Make the GraphQL request\n const response = await sombra\n .post('v1/data-subject-request', {\n json: {\n type: request.type,\n subject: {\n coreIdentifier: request.coreIdentifier,\n email: request.email,\n emailIsVerified,\n ...(requestIdentifiers.length > 0\n ? {\n attestedExtraIdentifiers: apply(\n groupBy(\n requestIdentifiers\n .filter(\n (ri) =>\n // these are already submitted above\n !(ri.name === 'email' && ri.value === request.email) &&\n !IDENTIFIER_BLOCK_LIST.includes(ri.name),\n )\n .map((ri) => ({\n ...ri,\n type: Object.values(IdentifierType).includes(\n ri.name as any, // eslint-disable-line @typescript-eslint/no-explicit-any\n )\n ? ri.name\n : IdentifierType.Custom,\n })),\n 'type',\n ),\n (values, type) =>\n values.map(({ name, value }) => ({\n ...(type === IdentifierType.Custom ? { name } : {}),\n value,\n })),\n ),\n }\n : {}),\n },\n requestId: request.id,\n subjectType: request.subjectType,\n isSilent: request.isSilent,\n isTest: request.isTest,\n locale: request.locale,\n skipWaitingPeriod,\n createdAt: request.createdAt,\n details: `Restarted by Transcend cli: \"tr-request-restart\" - ${request.details}`,\n skipSendingReceipt: !sendEmailReceipt,\n },\n })\n .json();\n\n const { request: requestResponse } = decodeCodec(\n t.type({\n request: PrivacyRequestResponse,\n }),\n response,\n );\n return requestResponse;\n}\n","import { join, resolve } from 'node:path';\n\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { buildTranscendGraphQLClient, createSombraGotInstance } from '@transcend-io/sdk';\nimport { map } from '@transcend-io/utils';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { difference } from 'lodash-es';\n\nimport { DEFAULT_TRANSCEND_API } from '../../constants.js';\nimport { logger } from '../../logger.js';\nimport {\n fetchAllRequestIdentifiers,\n fetchAllRequests,\n validateSombraVersion,\n} from '../graphql/index.js';\nimport { SuccessfulRequest } from './constants.js';\nimport { extractClientError } from './extractClientError.js';\nimport { restartPrivacyRequest } from './restartPrivacyRequest.js';\n\n/** Minimal state we need to keep a list of requests */\nconst ErrorRequest = t.intersection([\n SuccessfulRequest,\n t.type({\n error: t.string,\n }),\n]);\n\n/** Type override */\ntype ErrorRequest = t.TypeOf<typeof ErrorRequest>;\n\n/** Persist this data between runs of the script */\nconst CachedRequestState = t.type({\n restartedRequests: t.array(SuccessfulRequest),\n failingRequests: t.array(ErrorRequest),\n});\n\n/**\n * Upload a set of privacy requests from CSV\n *\n * @param options - Options\n */\nexport async function bulkRestartRequests({\n requestReceiptFolder,\n auth,\n sombraAuth,\n requestActions,\n requestStatuses,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n transcendUrl = DEFAULT_TRANSCEND_API,\n requestIds = [],\n createdAt = new Date(),\n silentModeBefore,\n sendEmailReceipt = false,\n emailIsVerified = true,\n copyIdentifiers = false,\n skipWaitingPeriod = false,\n concurrency = 20,\n}: {\n /** Actions to filter for */\n requestActions: RequestAction[];\n /** Statues to filter for */\n requestStatuses: RequestStatus[];\n /** File where request receipts are stored */\n requestReceiptFolder: string;\n /** Transcend API key authentication */\n auth: string;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Request IDs to filter for */\n requestIds?: string[];\n /** Whether to re-verify the email when restarting the request */\n emailIsVerified?: boolean;\n /** Filter for requests that were submitted before this date */\n createdAt?: Date;\n /** Requests that have been open for this length of time should be marked as silent mode */\n silentModeBefore?: Date;\n /** Send an email receipt to the restarted requests */\n sendEmailReceipt?: boolean;\n /** Copy over all identifiers rather than restarting the request only with the core identifier */\n copyIdentifiers?: boolean;\n /** Skip the waiting period when restarting requests */\n skipWaitingPeriod?: boolean;\n /** Filter for requests created before this date */\n createdAtBefore?: Date;\n /** Filter for requests created after this date */\n createdAtAfter?: Date;\n /** Filter for requests updated before this date */\n updatedAtBefore?: Date;\n /** Filter for requests updated after this date */\n updatedAtAfter?: Date;\n /** Concurrency to upload requests at */\n concurrency?: number;\n}): Promise<void> {\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);\n\n // Create a new state file to store the requests from this run\n const cacheFile = join(\n requestReceiptFolder,\n `tr-request-restart-${new Date().toISOString()}.json`,\n );\n const state = new PersistedState(cacheFile, CachedRequestState, {\n restartedRequests: [],\n failingRequests: [],\n });\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, {\n logger,\n sombraApiKey: sombraAuth,\n sombraUrl: process.env.SOMBRA_URL,\n });\n\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const allRequests = await fetchAllRequests(client, {\n requestIds,\n actions: requestActions,\n statuses: requestStatuses,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n const requests = allRequests.filter((request) => new Date(request.createdAt) < createdAt);\n logger.info(`Found ${requests.length} requests to restart`);\n\n if (copyIdentifiers) {\n logger.info('copyIdentifiers detected - All Identifiers will be copied.');\n }\n if (sendEmailReceipt) {\n logger.info('sendEmailReceipt detected - Email receipts will be sent.');\n }\n if (skipWaitingPeriod) {\n logger.info('skipWaitingPeriod detected - Waiting period will be skipped.');\n }\n\n // Validate request IDs\n if (requestIds.length > 0 && requestIds.length !== requests.length) {\n const missingRequests = difference(\n requestIds,\n requests.map(({ id }) => id),\n );\n if (missingRequests.length > 0) {\n logger.error(\n colors.red(`Failed to find the following requests by ID: ${missingRequests.join(',')}.`),\n );\n process.exit(1);\n }\n }\n\n if (copyIdentifiers) {\n await validateSombraVersion(client);\n }\n\n // Map over the requests\n let total = 0;\n progressBar.start(requests.length, 0);\n await map(\n requests,\n async (request, ind) => {\n try {\n // Pull the request identifiers\n const requestIdentifiers = copyIdentifiers\n ? await fetchAllRequestIdentifiers(client, sombra, {\n requestId: request.id,\n skipSombraCheck: true,\n })\n : [];\n\n // Make the GraphQL request to restart the request\n const requestResponse = await restartPrivacyRequest(\n sombra,\n {\n ...request,\n // override silent mode\n isSilent:\n !!silentModeBefore && new Date(request.createdAt) < silentModeBefore\n ? true\n : request.isSilent,\n },\n {\n requestIdentifiers,\n skipWaitingPeriod,\n sendEmailReceipt,\n emailIsVerified,\n },\n );\n\n // Cache successful upload\n const restartedRequests = state.getValue('restartedRequests');\n restartedRequests.push({\n id: requestResponse.id,\n link: requestResponse.link,\n rowIndex: ind,\n coreIdentifier: requestResponse.coreIdentifier,\n attemptedAt: new Date().toISOString(),\n });\n await state.setValue(restartedRequests, 'restartedRequests');\n } catch (err) {\n const msg = `${err.message} - ${JSON.stringify(err.response?.body, null, 2)}`;\n const clientError = extractClientError(msg);\n\n const failingRequests = state.getValue('failingRequests');\n failingRequests.push({\n id: request.id,\n link: request.link,\n rowIndex: ind,\n coreIdentifier: request.coreIdentifier,\n attemptedAt: new Date().toISOString(),\n error: clientError || msg,\n });\n await state.setValue(failingRequests, 'failingRequests');\n }\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n // Log completion time\n logger.info(colors.green(`Completed restarting of requests in \"${totalTime / 1000}\" seconds.`));\n\n // Log errors\n if (state.getValue('failingRequests').length > 0) {\n logger.error(\n colors.red(\n `Encountered \"${state.getValue('failingRequests').length}\" errors. ` +\n `See \"${resolve(cacheFile)}\" to review the error messages and inputs.`,\n ),\n );\n process.exit(1);\n }\n}\n"],"mappings":"myBAkBA,eAAsB,EACpB,EACA,EACA,CACE,mBAAmB,GACnB,oBAAoB,GACpB,kBAAkB,GAClB,qBAAqB,EAAE,EAUrB,EAAE,CAC2B,CAEjC,IAAM,EAAW,MAAM,EACpB,KAAK,0BAA2B,CAC/B,KAAM,CACJ,KAAM,EAAQ,KACd,QAAS,CACP,eAAgB,EAAQ,eACxB,MAAO,EAAQ,MACf,kBACA,GAAI,EAAmB,OAAS,EAC5B,CACE,yBAA0B,EACxB,EACE,EACG,OACE,GAEC,EAAE,EAAG,OAAS,SAAW,EAAG,QAAU,EAAQ,QAC9C,CAAC,EAAsB,SAAS,EAAG,KAAK,CAC3C,CACA,IAAK,IAAQ,CACZ,GAAG,EACH,KAAM,OAAO,OAAO,EAAe,CAAC,SAClC,EAAG,KACJ,CACG,EAAG,KACH,EAAe,OACpB,EAAE,CACL,OACD,EACA,EAAQ,IACP,EAAO,KAAK,CAAE,OAAM,YAAa,CAC/B,GAAI,IAAS,EAAe,OAAS,CAAE,OAAM,CAAG,EAAE,CAClD,QACD,EAAE,CACN,CACF,CACD,EAAE,CACP,CACD,UAAW,EAAQ,GACnB,YAAa,EAAQ,YACrB,SAAU,EAAQ,SAClB,OAAQ,EAAQ,OAChB,OAAQ,EAAQ,OAChB,oBACA,UAAW,EAAQ,UACnB,QAAS,sDAAsD,EAAQ,UACvE,mBAAoB,CAAC,EACtB,CACF,CAAC,CACD,MAAM,CAEH,CAAE,QAAS,GAAoB,EACnC,EAAE,KAAK,CACL,QAAS,EACV,CAAC,CACF,EACD,CACD,OAAO,ECxET,MAAM,EAAe,EAAE,aAAa,CAClC,EACA,EAAE,KAAK,CACL,MAAO,EAAE,OACV,CAAC,CACH,CAAC,CAMI,EAAqB,EAAE,KAAK,CAChC,kBAAmB,EAAE,MAAM,EAAkB,CAC7C,gBAAiB,EAAE,MAAM,EAAa,CACvC,CAAC,CAOF,eAAsB,EAAoB,CACxC,uBACA,OACA,aACA,iBACA,kBACA,kBACA,iBACA,kBACA,iBACA,eAAe,EACf,aAAa,EAAE,CACf,YAAY,IAAI,KAChB,mBACA,mBAAmB,GACnB,kBAAkB,GAClB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,IAsCE,CAEhB,IAAM,EAAK,IAAI,MAAM,CAAC,SAAS,CAEzB,EAAc,IAAI,EAAY,UAAU,EAAE,CAAE,EAAY,QAAQ,eAAe,CAG/E,EAAY,EAChB,EACA,sBAAsB,IAAI,MAAM,CAAC,aAAa,CAAC,OAChD,CACK,EAAQ,IAAI,EAAe,EAAW,EAAoB,CAC9D,kBAAmB,EAAE,CACrB,gBAAiB,EAAE,CACpB,CAAC,CAGI,EAAS,MAAM,EAAwB,EAAc,EAAM,CAC/D,SACA,aAAc,EACd,UAAW,QAAQ,IAAI,WACxB,CAAC,CAGI,EAAS,EAA4B,EAAc,EAAK,CAUxD,GATc,MAAM,EAAiB,EAAQ,CACjD,aACA,QAAS,EACT,SAAU,EACV,kBACA,iBACA,kBACA,iBACD,CAAC,EAC2B,OAAQ,GAAY,IAAI,KAAK,EAAQ,UAAU,CAAG,EAAU,CAczF,GAbA,EAAO,KAAK,SAAS,EAAS,OAAO,sBAAsB,CAEvD,GACF,EAAO,KAAK,6DAA6D,CAEvE,GACF,EAAO,KAAK,2DAA2D,CAErE,GACF,EAAO,KAAK,+DAA+D,CAIzE,EAAW,OAAS,GAAK,EAAW,SAAW,EAAS,OAAQ,CAClE,IAAM,EAAkB,EACtB,EACA,EAAS,KAAK,CAAE,QAAS,EAAG,CAC7B,CACG,EAAgB,OAAS,IAC3B,EAAO,MACL,EAAO,IAAI,gDAAgD,EAAgB,KAAK,IAAI,CAAC,GAAG,CACzF,CACD,QAAQ,KAAK,EAAE,EAIf,GACF,MAAM,EAAsB,EAAO,CAIrC,IAAI,EAAQ,EACZ,EAAY,MAAM,EAAS,OAAQ,EAAE,CACrC,MAAM,EACJ,EACA,MAAO,EAAS,IAAQ,CACtB,GAAI,CAEF,IAAM,EAAqB,EACvB,MAAM,EAA2B,EAAQ,EAAQ,CAC/C,UAAW,EAAQ,GACnB,gBAAiB,GAClB,CAAC,CACF,EAAE,CAGA,EAAkB,MAAM,EAC5B,EACA,CACE,GAAG,EAEH,SACI,GAAoB,IAAI,KAAK,EAAQ,UAAU,CAAG,EAChD,GACA,EAAQ,SACf,CACD,CACE,qBACA,oBACA,mBACA,kBACD,CACF,CAGK,EAAoB,EAAM,SAAS,oBAAoB,CAC7D,EAAkB,KAAK,CACrB,GAAI,EAAgB,GACpB,KAAM,EAAgB,KACtB,SAAU,EACV,eAAgB,EAAgB,eAChC,YAAa,IAAI,MAAM,CAAC,aAAa,CACtC,CAAC,CACF,MAAM,EAAM,SAAS,EAAmB,oBAAoB,OACrD,EAAK,CACZ,IAAM,EAAM,GAAG,EAAI,QAAQ,KAAK,KAAK,UAAU,EAAI,UAAU,KAAM,KAAM,EAAE,GACrE,EAAc,EAAmB,EAAI,CAErC,EAAkB,EAAM,SAAS,kBAAkB,CACzD,EAAgB,KAAK,CACnB,GAAI,EAAQ,GACZ,KAAM,EAAQ,KACd,SAAU,EACV,eAAgB,EAAQ,eACxB,YAAa,IAAI,MAAM,CAAC,aAAa,CACrC,MAAO,GAAe,EACvB,CAAC,CACF,MAAM,EAAM,SAAS,EAAiB,kBAAkB,CAE1D,GAAS,EACT,EAAY,OAAO,EAAM,EAE3B,CAAE,cAAa,CAChB,CAED,EAAY,MAAM,CAElB,IAAM,EADK,IAAI,MAAM,CAAC,SAAS,CACR,EAGvB,EAAO,KAAK,EAAO,MAAM,wCAAwC,EAAY,IAAK,YAAY,CAAC,CAG3F,EAAM,SAAS,kBAAkB,CAAC,OAAS,IAC7C,EAAO,MACL,EAAO,IACL,gBAAgB,EAAM,SAAS,kBAAkB,CAAC,OAAO,iBAC/C,EAAQ,EAAU,CAAC,4CAC9B,CACF,CACD,QAAQ,KAAK,EAAE"}