@transcend-io/cli 8.38.2 → 9.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (278) hide show
  1. package/dist/api-keys-CxvKdj2v.mjs +2 -0
  2. package/dist/api-keys-CxvKdj2v.mjs.map +1 -0
  3. package/dist/app-BKMxG7RO.mjs +131 -0
  4. package/dist/app-BKMxG7RO.mjs.map +1 -0
  5. package/dist/bin/bash-complete.mjs +3 -0
  6. package/dist/bin/bash-complete.mjs.map +1 -0
  7. package/dist/bin/cli.mjs +3 -0
  8. package/dist/bin/cli.mjs.map +1 -0
  9. package/dist/bin/deprecated-command.mjs +5 -0
  10. package/dist/bin/deprecated-command.mjs.map +1 -0
  11. package/dist/buildAIIntegrationType-Bk0EbFKV.mjs +2 -0
  12. package/dist/buildAIIntegrationType-Bk0EbFKV.mjs.map +1 -0
  13. package/dist/code-scanning-Cx1kpssH.mjs +4 -0
  14. package/dist/code-scanning-Cx1kpssH.mjs.map +1 -0
  15. package/dist/codecs-TR6p48v3.mjs +2 -0
  16. package/dist/codecs-TR6p48v3.mjs.map +1 -0
  17. package/dist/command-Bzyj3M2G.mjs +9 -0
  18. package/dist/command-Bzyj3M2G.mjs.map +1 -0
  19. package/dist/consent-manager-c4bgQF1N.mjs +12 -0
  20. package/dist/consent-manager-c4bgQF1N.mjs.map +1 -0
  21. package/dist/constants-CnLQtIBn.mjs +2 -0
  22. package/dist/constants-CnLQtIBn.mjs.map +1 -0
  23. package/dist/context-bkKpii_t.mjs +2 -0
  24. package/dist/{context-_8xfl0dt.cjs.map → context-bkKpii_t.mjs.map} +1 -1
  25. package/dist/cron-BvxWyvDu.mjs +2 -0
  26. package/dist/cron-BvxWyvDu.mjs.map +1 -0
  27. package/dist/data-inventory-CkS_kmus.mjs +75 -0
  28. package/dist/data-inventory-CkS_kmus.mjs.map +1 -0
  29. package/dist/dataFlowsToDataSilos-RAhfPV0l.mjs +2 -0
  30. package/dist/dataFlowsToDataSilos-RAhfPV0l.mjs.map +1 -0
  31. package/dist/done-input-validation-CcZtaz03.mjs +2 -0
  32. package/dist/{done-input-validation-DGckEJ5a.cjs.map → done-input-validation-CcZtaz03.mjs.map} +1 -1
  33. package/dist/enums-CyFTrzXY.mjs +2 -0
  34. package/dist/{enums-BZulhPFa.cjs.map → enums-CyFTrzXY.mjs.map} +1 -1
  35. package/dist/impl-8dOatHnF.mjs +2 -0
  36. package/dist/{impl-BN8N7BHo.cjs.map → impl-8dOatHnF.mjs.map} +1 -1
  37. package/dist/impl-Ah-1lwzr.mjs +2 -0
  38. package/dist/impl-Ah-1lwzr.mjs.map +1 -0
  39. package/dist/impl-B5lTeRbn.mjs +2 -0
  40. package/dist/impl-B5lTeRbn.mjs.map +1 -0
  41. package/dist/impl-B6UhzQcY2.mjs +2 -0
  42. package/dist/impl-B6UhzQcY2.mjs.map +1 -0
  43. package/dist/impl-BFf_CotE2.mjs +2 -0
  44. package/dist/impl-BFf_CotE2.mjs.map +1 -0
  45. package/dist/impl-BGQ0EGS0.mjs +2 -0
  46. package/dist/impl-BGQ0EGS0.mjs.map +1 -0
  47. package/dist/impl-BYBNi68b.mjs +5 -0
  48. package/dist/impl-BYBNi68b.mjs.map +1 -0
  49. package/dist/impl-B__p3_wC.mjs +2 -0
  50. package/dist/impl-B__p3_wC.mjs.map +1 -0
  51. package/dist/impl-BcayRe6a.mjs +2 -0
  52. package/dist/{impl-CA7X_TDD.cjs.map → impl-BcayRe6a.mjs.map} +1 -1
  53. package/dist/impl-BkYKsEVG2.mjs +2 -0
  54. package/dist/impl-BkYKsEVG2.mjs.map +1 -0
  55. package/dist/impl-Bl2yVgh0.mjs +4 -0
  56. package/dist/impl-Bl2yVgh0.mjs.map +1 -0
  57. package/dist/impl-BmAMgEEM.mjs +12 -0
  58. package/dist/impl-BmAMgEEM.mjs.map +1 -0
  59. package/dist/impl-BsttzxTN2.mjs +2 -0
  60. package/dist/impl-BsttzxTN2.mjs.map +1 -0
  61. package/dist/impl-BtnySmbi.mjs +2 -0
  62. package/dist/impl-BtnySmbi.mjs.map +1 -0
  63. package/dist/impl-BwX-evfW2.mjs +4 -0
  64. package/dist/impl-BwX-evfW2.mjs.map +1 -0
  65. package/dist/impl-C-wzeAib2.mjs +2 -0
  66. package/dist/impl-C-wzeAib2.mjs.map +1 -0
  67. package/dist/impl-C61PYfk12.mjs +2 -0
  68. package/dist/impl-C61PYfk12.mjs.map +1 -0
  69. package/dist/impl-CAuNpuF2.mjs +2 -0
  70. package/dist/impl-CAuNpuF2.mjs.map +1 -0
  71. package/dist/impl-CSKrBIuV.mjs +2 -0
  72. package/dist/impl-CSKrBIuV.mjs.map +1 -0
  73. package/dist/impl-CZP2l3Ds.mjs +3 -0
  74. package/dist/impl-CZP2l3Ds.mjs.map +1 -0
  75. package/dist/impl-CiJ8hE5W2.mjs +2 -0
  76. package/dist/impl-CiJ8hE5W2.mjs.map +1 -0
  77. package/dist/impl-Cj3H-m2Z.mjs +2 -0
  78. package/dist/impl-Cj3H-m2Z.mjs.map +1 -0
  79. package/dist/impl-CkY0wfCz.mjs +2 -0
  80. package/dist/impl-CkY0wfCz.mjs.map +1 -0
  81. package/dist/impl-Cm8pUfBU2.mjs +2 -0
  82. package/dist/impl-Cm8pUfBU2.mjs.map +1 -0
  83. package/dist/impl-CpzS9LVu2.mjs +2 -0
  84. package/dist/impl-CpzS9LVu2.mjs.map +1 -0
  85. package/dist/impl-CwfamZ1c.mjs +2 -0
  86. package/dist/{impl-BO1fP5DL.cjs.map → impl-CwfamZ1c.mjs.map} +1 -1
  87. package/dist/impl-D81et1Yb2.mjs +2 -0
  88. package/dist/impl-D81et1Yb2.mjs.map +1 -0
  89. package/dist/impl-D92PTNk3.mjs +2 -0
  90. package/dist/impl-D92PTNk3.mjs.map +1 -0
  91. package/dist/impl-DTXDVeo6.mjs +2 -0
  92. package/dist/impl-DTXDVeo6.mjs.map +1 -0
  93. package/dist/impl-DWoysXup.mjs +2 -0
  94. package/dist/impl-DWoysXup.mjs.map +1 -0
  95. package/dist/impl-DX3JHZ4v2.mjs +2 -0
  96. package/dist/impl-DX3JHZ4v2.mjs.map +1 -0
  97. package/dist/impl-DhuUrzxQ.mjs +2 -0
  98. package/dist/impl-DhuUrzxQ.mjs.map +1 -0
  99. package/dist/impl-DqMYLKjU.mjs +2 -0
  100. package/dist/impl-DqMYLKjU.mjs.map +1 -0
  101. package/dist/impl-DqQ6CIj0.mjs +2 -0
  102. package/dist/impl-DqQ6CIj0.mjs.map +1 -0
  103. package/dist/impl-Duaq6iWI2.mjs +2 -0
  104. package/dist/impl-Duaq6iWI2.mjs.map +1 -0
  105. package/dist/impl-O5gz8qcm.mjs +2 -0
  106. package/dist/impl-O5gz8qcm.mjs.map +1 -0
  107. package/dist/impl-PH0AoC7i.mjs +2 -0
  108. package/dist/impl-PH0AoC7i.mjs.map +1 -0
  109. package/dist/impl-S8p6toVb2.mjs +2 -0
  110. package/dist/impl-S8p6toVb2.mjs.map +1 -0
  111. package/dist/impl-X2MSb8Ij.mjs +2 -0
  112. package/dist/impl-X2MSb8Ij.mjs.map +1 -0
  113. package/dist/impl-bo95wZIU2.mjs +2 -0
  114. package/dist/impl-bo95wZIU2.mjs.map +1 -0
  115. package/dist/impl-cfdCesro.mjs +2 -0
  116. package/dist/impl-cfdCesro.mjs.map +1 -0
  117. package/dist/impl-iZoXu4nV.mjs +2 -0
  118. package/dist/{impl-DZMWJNLE.cjs.map → impl-iZoXu4nV.mjs.map} +1 -1
  119. package/dist/impl-lebl6Zek2.mjs +2 -0
  120. package/dist/impl-lebl6Zek2.mjs.map +1 -0
  121. package/dist/impl-p0YN9e2e.mjs +2 -0
  122. package/dist/impl-p0YN9e2e.mjs.map +1 -0
  123. package/dist/{index.d.cts → index.d.mts} +1704 -3463
  124. package/dist/index.mjs +5 -0
  125. package/dist/index.mjs.map +1 -0
  126. package/dist/logger-Bj782ZYD.mjs +2 -0
  127. package/dist/logger-Bj782ZYD.mjs.map +1 -0
  128. package/dist/manual-enrichment-B6lW5kAX.mjs +2 -0
  129. package/dist/manual-enrichment-B6lW5kAX.mjs.map +1 -0
  130. package/dist/mergeTranscendInputs-Coj_e2N3.mjs +2 -0
  131. package/dist/{mergeTranscendInputs-BIBCYbug.cjs.map → mergeTranscendInputs-Coj_e2N3.mjs.map} +1 -1
  132. package/dist/pooling-CazydwlD.mjs +23 -0
  133. package/dist/pooling-CazydwlD.mjs.map +1 -0
  134. package/dist/preference-management-8gj7aSJB.mjs +7 -0
  135. package/dist/preference-management-8gj7aSJB.mjs.map +1 -0
  136. package/dist/readTranscendYaml-DhKG1ViI.mjs +4 -0
  137. package/dist/readTranscendYaml-DhKG1ViI.mjs.map +1 -0
  138. package/dist/syncConfigurationToTranscend-VJd0PnaZ.mjs +3010 -0
  139. package/dist/syncConfigurationToTranscend-VJd0PnaZ.mjs.map +1 -0
  140. package/dist/uploadConsents-C1S-BNzw.mjs +2 -0
  141. package/dist/uploadConsents-C1S-BNzw.mjs.map +1 -0
  142. package/package.json +46 -49
  143. package/dist/api-keys-DMPYZTne.cjs +0 -2
  144. package/dist/api-keys-DMPYZTne.cjs.map +0 -1
  145. package/dist/app-bPlpZQj_.cjs +0 -131
  146. package/dist/app-bPlpZQj_.cjs.map +0 -1
  147. package/dist/bin/bash-complete.cjs +0 -3
  148. package/dist/bin/bash-complete.cjs.map +0 -1
  149. package/dist/bin/cli.cjs +0 -3
  150. package/dist/bin/cli.cjs.map +0 -1
  151. package/dist/bin/deprecated-command.cjs +0 -5
  152. package/dist/bin/deprecated-command.cjs.map +0 -1
  153. package/dist/buildAIIntegrationType-BwuCYR-o.cjs +0 -2
  154. package/dist/buildAIIntegrationType-BwuCYR-o.cjs.map +0 -1
  155. package/dist/chunk-Bmb41Sf3.cjs +0 -1
  156. package/dist/code-scanning-D6YstOWo.cjs +0 -4
  157. package/dist/code-scanning-D6YstOWo.cjs.map +0 -1
  158. package/dist/codecs-Bvmb8o9R.cjs +0 -2
  159. package/dist/codecs-Bvmb8o9R.cjs.map +0 -1
  160. package/dist/command-C39HGpGR.cjs +0 -9
  161. package/dist/command-C39HGpGR.cjs.map +0 -1
  162. package/dist/consent-manager-C3UVAKd_.cjs +0 -12
  163. package/dist/consent-manager-C3UVAKd_.cjs.map +0 -1
  164. package/dist/constants-gJm1eQH0.cjs +0 -2
  165. package/dist/constants-gJm1eQH0.cjs.map +0 -1
  166. package/dist/context-_8xfl0dt.cjs +0 -2
  167. package/dist/cron-NLlyCiml.cjs +0 -2
  168. package/dist/cron-NLlyCiml.cjs.map +0 -1
  169. package/dist/data-inventory-a9Nz9lUO.cjs +0 -75
  170. package/dist/data-inventory-a9Nz9lUO.cjs.map +0 -1
  171. package/dist/dataFlowsToDataSilos-BJh0hzJI.cjs +0 -2
  172. package/dist/dataFlowsToDataSilos-BJh0hzJI.cjs.map +0 -1
  173. package/dist/done-input-validation-DGckEJ5a.cjs +0 -2
  174. package/dist/enums-BZulhPFa.cjs +0 -2
  175. package/dist/impl--RUTvJko.cjs +0 -2
  176. package/dist/impl--RUTvJko.cjs.map +0 -1
  177. package/dist/impl-6vrF2_M3.cjs +0 -2
  178. package/dist/impl-6vrF2_M3.cjs.map +0 -1
  179. package/dist/impl-73q3K0b_.cjs +0 -2
  180. package/dist/impl-73q3K0b_.cjs.map +0 -1
  181. package/dist/impl-BAZ34gSp.cjs +0 -2
  182. package/dist/impl-BAZ34gSp.cjs.map +0 -1
  183. package/dist/impl-BAs-7neS.cjs +0 -12
  184. package/dist/impl-BAs-7neS.cjs.map +0 -1
  185. package/dist/impl-BF44h4oH.cjs +0 -2
  186. package/dist/impl-BF44h4oH.cjs.map +0 -1
  187. package/dist/impl-BKXkRTVx.cjs +0 -2
  188. package/dist/impl-BKXkRTVx.cjs.map +0 -1
  189. package/dist/impl-BN8N7BHo.cjs +0 -2
  190. package/dist/impl-BO1fP5DL.cjs +0 -2
  191. package/dist/impl-BRAiiMt3.cjs +0 -2
  192. package/dist/impl-BRAiiMt3.cjs.map +0 -1
  193. package/dist/impl-BRNha4nQ.cjs +0 -2
  194. package/dist/impl-BRNha4nQ.cjs.map +0 -1
  195. package/dist/impl-Ba1d91O1.cjs +0 -2
  196. package/dist/impl-Ba1d91O1.cjs.map +0 -1
  197. package/dist/impl-Bgp_TuxN.cjs +0 -2
  198. package/dist/impl-Bgp_TuxN.cjs.map +0 -1
  199. package/dist/impl-BreaZGaV.cjs +0 -2
  200. package/dist/impl-BreaZGaV.cjs.map +0 -1
  201. package/dist/impl-BzAwMfNS.cjs +0 -2
  202. package/dist/impl-BzAwMfNS.cjs.map +0 -1
  203. package/dist/impl-C0eKnbO8.cjs +0 -4
  204. package/dist/impl-C0eKnbO8.cjs.map +0 -1
  205. package/dist/impl-C2kTLgRr.cjs +0 -2
  206. package/dist/impl-C2kTLgRr.cjs.map +0 -1
  207. package/dist/impl-CA7X_TDD.cjs +0 -2
  208. package/dist/impl-CC0T0jNd.cjs +0 -2
  209. package/dist/impl-CC0T0jNd.cjs.map +0 -1
  210. package/dist/impl-CJT35FpG.cjs +0 -2
  211. package/dist/impl-CJT35FpG.cjs.map +0 -1
  212. package/dist/impl-CTXPMoXc.cjs +0 -3
  213. package/dist/impl-CTXPMoXc.cjs.map +0 -1
  214. package/dist/impl-CUScCLAS.cjs +0 -2
  215. package/dist/impl-CUScCLAS.cjs.map +0 -1
  216. package/dist/impl-CYmPZogW.cjs +0 -2
  217. package/dist/impl-CYmPZogW.cjs.map +0 -1
  218. package/dist/impl-Ccd0FaOU.cjs +0 -2
  219. package/dist/impl-Ccd0FaOU.cjs.map +0 -1
  220. package/dist/impl-CmKWcYiV.cjs +0 -2
  221. package/dist/impl-CmKWcYiV.cjs.map +0 -1
  222. package/dist/impl-CqshC7my.cjs +0 -2
  223. package/dist/impl-CqshC7my.cjs.map +0 -1
  224. package/dist/impl-Cru4riTc.cjs +0 -5
  225. package/dist/impl-Cru4riTc.cjs.map +0 -1
  226. package/dist/impl-Cypfgm4t.cjs +0 -2
  227. package/dist/impl-Cypfgm4t.cjs.map +0 -1
  228. package/dist/impl-D2SWeIGe.cjs +0 -2
  229. package/dist/impl-D2SWeIGe.cjs.map +0 -1
  230. package/dist/impl-DLwIQVA6.cjs +0 -2
  231. package/dist/impl-DLwIQVA6.cjs.map +0 -1
  232. package/dist/impl-DMm4lt-A.cjs +0 -2
  233. package/dist/impl-DMm4lt-A.cjs.map +0 -1
  234. package/dist/impl-DRsuGuRx.cjs +0 -2
  235. package/dist/impl-DRsuGuRx.cjs.map +0 -1
  236. package/dist/impl-DSrvfkVp.cjs +0 -2
  237. package/dist/impl-DSrvfkVp.cjs.map +0 -1
  238. package/dist/impl-DZMWJNLE.cjs +0 -2
  239. package/dist/impl-DbqAvW7X.cjs +0 -2
  240. package/dist/impl-DbqAvW7X.cjs.map +0 -1
  241. package/dist/impl-Dl78xJu6.cjs +0 -2
  242. package/dist/impl-Dl78xJu6.cjs.map +0 -1
  243. package/dist/impl-Dvpc-Qa5.cjs +0 -2
  244. package/dist/impl-Dvpc-Qa5.cjs.map +0 -1
  245. package/dist/impl-DySeNL1m.cjs +0 -2
  246. package/dist/impl-DySeNL1m.cjs.map +0 -1
  247. package/dist/impl-Fj-Esff-.cjs +0 -2
  248. package/dist/impl-Fj-Esff-.cjs.map +0 -1
  249. package/dist/impl-W6tVmm8N.cjs +0 -2
  250. package/dist/impl-W6tVmm8N.cjs.map +0 -1
  251. package/dist/impl-WphnR0cX.cjs +0 -2
  252. package/dist/impl-WphnR0cX.cjs.map +0 -1
  253. package/dist/impl-YNAicr-z.cjs +0 -2
  254. package/dist/impl-YNAicr-z.cjs.map +0 -1
  255. package/dist/impl-jEpWgC2N.cjs +0 -2
  256. package/dist/impl-jEpWgC2N.cjs.map +0 -1
  257. package/dist/impl-x2P-_Pk2.cjs +0 -4
  258. package/dist/impl-x2P-_Pk2.cjs.map +0 -1
  259. package/dist/index.cjs +0 -5
  260. package/dist/index.cjs.map +0 -1
  261. package/dist/logger-DQwEYtSS.cjs +0 -2
  262. package/dist/logger-DQwEYtSS.cjs.map +0 -1
  263. package/dist/manual-enrichment-DTVJo7hP.cjs +0 -2
  264. package/dist/manual-enrichment-DTVJo7hP.cjs.map +0 -1
  265. package/dist/mergeTranscendInputs-BIBCYbug.cjs +0 -2
  266. package/dist/pooling-C-TYBnHI.cjs +0 -23
  267. package/dist/pooling-C-TYBnHI.cjs.map +0 -1
  268. package/dist/preference-management-Ch77Yxod.cjs +0 -7
  269. package/dist/preference-management-Ch77Yxod.cjs.map +0 -1
  270. package/dist/readTranscendYaml-Cycz6RxW.cjs +0 -4
  271. package/dist/readTranscendYaml-Cycz6RxW.cjs.map +0 -1
  272. package/dist/syncConfigurationToTranscend-s-cjtUI3.cjs +0 -3010
  273. package/dist/syncConfigurationToTranscend-s-cjtUI3.cjs.map +0 -1
  274. package/dist/uploadConsents-CDkk_sWY.cjs +0 -2
  275. package/dist/uploadConsents-CDkk_sWY.cjs.map +0 -1
  276. /package/dist/bin/{bash-complete.d.cts → bash-complete.d.mts} +0 -0
  277. /package/dist/bin/{cli.d.cts → cli.d.mts} +0 -0
  278. /package/dist/bin/{deprecated-command.d.cts → deprecated-command.d.mts} +0 -0
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-CmKWcYiV.cjs","names":["bulkRestartRequests"],"sources":["../src/commands/request/restart/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { bulkRestartRequests } from '../../../lib/requests';\nimport type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface RestartCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses: RequestStatus[];\n transcendUrl: string;\n requestReceiptFolder: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n emailIsVerified: boolean;\n createdAt?: Date;\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n sendEmailReceipt: boolean;\n copyIdentifiers: boolean;\n skipWaitingPeriod: boolean;\n}\n\nexport async function restart(\n this: LocalContext,\n {\n auth,\n requestReceiptFolder,\n sombraAuth,\n actions,\n statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n }: RestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRestartRequests({\n requestReceiptFolder,\n auth,\n sombraAuth,\n requestActions: actions,\n requestStatuses: statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"6QA0BA,eAAsB,EAEpB,CACE,OACA,uBACA,aACA,UACA,WACA,aACA,YACA,kBACA,mBACA,mBACA,kBACA,oBACA,kBACA,iBACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAoB,CACxB,uBACA,OACA,aACA,eAAgB,EAChB,gBAAiB,EACjB,aACA,YACA,kBACA,mBACA,mBACA,kBACA,oBACA,kBACA,iBACA,kBACA,iBACA,cACA,eACD,CAAC"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./cron-NLlyCiml.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({file:e,transcendUrl:o,auth:s,sombraAuth:c,dataSiloId:l,actions:u,pageLimit:d,skipRequestCount:f,chunkSize:p}){f&&n.t.info(a.default.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(p)||p<=0||p%d!==0)&&(n.t.error(a.default.red(`Invalid chunk size: "${p}". Must be a positive integer that is a multiple of ${d}.`)),this.process.exit(1)),i.t(this.process.exit);let{baseName:m,extension:h}=t.c(e),g=0;await r.t({transcendUrl:o,apiPageSize:d,savePageSize:p,onSave:async e=>{let r=`${m}-${g}${h}`;return n.t.info(a.default.blue(`Saving ${e.length} identifiers to file "${r}"`)),await t.d(r,e,t.Ms(e.map(e=>Object.keys(e)).flat())),n.t.info(a.default.green(`Successfully wrote ${e.length} identifiers to file "${r}"`)),g+=1,Promise.resolve()},actions:u,auth:s,sombraAuth:c,dataSiloId:l,skipRequestCount:f})}exports.pullIdentifiers=o;
2
- //# sourceMappingURL=impl-CqshC7my.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-CqshC7my.cjs","names":["parseFilePath","writeLargeCsv","uniq","pullChunkedCustomSiloOutstandingIdentifiers"],"sources":["../src/commands/request/cron/pull-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport colors from 'colors';\n\nimport { logger } from '../../../../logger';\nimport { uniq } from 'lodash-es';\nimport {\n CsvFormattedIdentifier,\n pullChunkedCustomSiloOutstandingIdentifiers,\n} from '../../../../lib/cron';\nimport { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = async (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(\n colors.blue(\n `Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`,\n ),\n );\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`,\n ),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"],"mappings":"+XAyBA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,aACA,UACA,YACA,mBACA,aAEa,CACX,GACF,EAAA,EAAO,KACL,EAAA,QAAO,OACL,wEACD,CACF,EAID,OAAO,MAAM,EAAU,EACvB,GAAa,GACb,EAAY,IAAc,KAE1B,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,WAAU,aAAcA,EAAAA,EAAc,EAAK,CAC/C,EAAY,EAsBhB,MAAMG,EAAAA,EAA4C,CAChD,eACA,YAAa,EACb,aAAc,EACd,OAxBa,KAAO,IAAmD,CACvE,IAAM,EAAmB,GAAG,EAAS,GAAG,IAAY,IAepD,OAdA,EAAA,EAAO,KACL,EAAA,QAAO,KACL,UAAU,EAAM,OAAO,wBAAwB,EAAiB,GACjE,CACF,CAGD,MAAMF,EAAAA,EAAc,EAAkB,EADtBC,EAAAA,GAAK,EAAM,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACR,CACrD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAM,OAAO,wBAAwB,EAAiB,GAC7E,CACF,CACD,GAAa,EACN,QAAQ,SAAS,EASxB,UACA,OACA,aACA,aACA,mBACD,CAAC"}
@@ -1,5 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./logger-DQwEYtSS.cjs`),n=require(`./done-input-validation-DGckEJ5a.cjs`);let r=require(`node:fs`);r=e.t(r);let i=require(`node:path`);i=e.t(i);let a=require(`colors`);a=e.t(a);let o=require(`fast-glob`);o=e.t(o);let s=require(`node:child_process`);function c(e,t,n){return new Promise((i,a)=>{let o=r.default.createReadStream(e),s=Buffer.alloc(0),c=t.length,l=0;o.on(`data`,e=>{let r=e;if(n){let e=n-l;if(e<=0){o.destroy(),i(!1);return}r.length>e&&(r=r.subarray(0,e)),l+=r.length}let a=s.length?Buffer.concat([s,r]):r;if(a.toString(`utf8`).toLowerCase().includes(t.toString(`utf8`))){o.destroy(),i(!0);return}s=c>1?Buffer.from(a.subarray(Math.max(0,a.length-(c-1)))):Buffer.alloc(0)}),o.on(`error`,a),o.on(`close`,()=>i(!1)),o.on(`end`,()=>i(!1))})}async function l(e,t,n){let r=0,i=Array.from({length:Math.min(t,e.length)},async()=>{for(;;){let t=r;if(r+=1,t>=e.length)return;await n(e[t])}});await Promise.all(i)}function u(e,t){return new Promise((n,r)=>{let i=(0,s.spawn)(e,[`-noheader`,`-batch`,`-cmd`,t],{stdio:[`ignore`,`pipe`,`pipe`]}),a=``,o=``;i.stdout.on(`data`,e=>{a+=String(e)}),i.stderr.on(`data`,e=>{o+=String(e)}),i.on(`error`,r),i.on(`close`,e=>{e===0?n(a):r(Error(`duckdb exited ${e}: ${o}`))})})}async function d(e,t){return(await u(e,[`SELECT column_name`,`FROM parquet_schema('${t.replace(/'/g,`''`)}')`,`WHERE lower(column_type) LIKE '%varchar%'`,` OR lower(column_type) LIKE '%string%';`].join(`
2
- `))).split(`
3
- `).map(e=>e.trim()).filter(Boolean)}async function f(e,t,n){let r=await d(e,t);if(r.length===0)return!1;let i=t.replace(/'/g,`''`),a=r.map(e=>`"${e.replace(/"/g,`""`)}" = '${n.replace(/'/g,`''`)}'`).join(` OR `);return(await u(e,[`SELECT 1 AS hit FROM read_parquet('${i}')`,`WHERE ${a}`,`LIMIT 1;`].join(`
4
- `))).trim().length>0}async function p(e){n.t(this.process.exit);let{needle:r,root:s,exts:u,noParquet:d,concurrency:p,maxBytes:m}=e,h=i.default.resolve(s),g=new Set(u.split(`,`).map(e=>e.trim().replace(/^\./,``).toLowerCase()).filter(Boolean)),_=Array.from(g).map(e=>`**/*.${e}`);t.t.info(a.default.green(`Searching for "${r}" in ${h} (exts: ${[...g].join(`, `)})`));let v=await(0,o.default)(_,{cwd:h,absolute:!0,onlyFiles:!0,followSymbolicLinks:!1,suppressErrors:!0}),y=Buffer.from(r.toLowerCase(),`utf8`),b=[];if(await l(v,p,async e=>{try{await c(e,y,m)&&(b.push(e),this.process.stdout.write(`${e}\n`))}catch{}}),!d){let e=await(0,o.default)([`**/*.parquet`],{cwd:h,absolute:!0,onlyFiles:!0,followSymbolicLinks:!1,suppressErrors:!0});e.length>0&&(t.t.info(a.default.green(`Scanning ${e.length} parquet file(s) via DuckDB...`)),await l(e,Math.max(2,Math.floor(p/4)),async e=>{try{await f(`duckdb`,e,r)&&(b.push(e),this.process.stdout.write(`${e}\n`))}catch{}}))}t.t.info(a.default.green(`Done. Found ${b.length} matching file(s).`))}exports.findTextInFolder=p;
5
- //# sourceMappingURL=impl-Cru4riTc.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-Cru4riTc.cjs","names":["fs","path"],"sources":["../src/commands/admin/find-text-in-folder/impl.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport { spawn } from 'node:child_process';\nimport fg from 'fast-glob';\nimport colors from 'colors';\nimport type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/** CLI flags accepted by the `find-text-in-folder` command. */\nexport type FindTextInFolderCommandFlags = {\n /** The text string to search for */\n needle: string;\n /** Root directory to search */\n root: string;\n /** Comma-separated file extensions */\n exts: string;\n /** Skip parquet file scanning */\n noParquet: boolean;\n /** Max concurrent file scans */\n concurrency: number;\n /** Stop scanning each file after N bytes */\n maxBytes?: number;\n};\n\n/**\n * Streams through a file checking if it contains the needle (case-insensitive).\n *\n * @param filePath - Absolute path to the file to scan\n * @param needle - Lowercased needle as a Buffer\n * @param maxBytes - Optional byte limit per file\n * @returns Whether the file contains the needle\n */\nexport function fileContainsExactBytes(\n filePath: string,\n needle: Buffer,\n maxBytes?: number,\n): Promise<boolean> {\n return new Promise<boolean>((resolve, reject) => {\n const stream = fs.createReadStream(filePath);\n let carry = Buffer.alloc(0);\n const n = needle.length;\n let seen = 0;\n\n stream.on('data', (raw: Buffer) => {\n let chunk = raw;\n\n if (maxBytes) {\n const remaining = maxBytes - seen;\n if (remaining <= 0) {\n stream.destroy();\n resolve(false);\n return;\n }\n if (chunk.length > remaining) {\n chunk = chunk.subarray(0, remaining);\n }\n seen += chunk.length;\n }\n\n const buf = carry.length ? Buffer.concat([carry, chunk]) : chunk;\n const haystack = buf.toString('utf8').toLowerCase();\n if (haystack.includes(needle.toString('utf8'))) {\n stream.destroy();\n resolve(true);\n return;\n }\n\n // Keep last n-1 bytes to catch boundary matches\n if (n > 1) {\n carry = Buffer.from(buf.subarray(Math.max(0, buf.length - (n - 1))));\n } else {\n carry = Buffer.alloc(0);\n }\n });\n\n stream.on('error', reject);\n stream.on('close', () => resolve(false));\n stream.on('end', () => resolve(false));\n });\n}\n\n/**\n * Run async workers over items with bounded concurrency.\n *\n * @param items - Array of items to process\n * @param limit - Maximum concurrent workers\n * @param worker - Async function to run per item\n * @returns Resolves when all items are processed\n */\nasync function runWithConcurrency<T>(\n items: T[],\n limit: number,\n worker: (item: T) => Promise<void>,\n): Promise<void> {\n let idx = 0;\n const runners = Array.from(\n { length: Math.min(limit, items.length) },\n async () => {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const current = idx;\n idx += 1;\n if (current >= items.length) return;\n await worker(items[current]);\n }\n },\n );\n await Promise.all(runners);\n}\n\n/**\n * Execute a DuckDB query and return stdout.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param sql - SQL query to execute\n * @returns The stdout output from duckdb\n */\nfunction duckdbQuery(duckdbPath: string, sql: string): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n const child = spawn(duckdbPath, ['-noheader', '-batch', '-cmd', sql], {\n stdio: ['ignore', 'pipe', 'pipe'],\n });\n\n let stdout = '';\n let stderr = '';\n child.stdout.on('data', (d) => {\n stdout += String(d);\n });\n child.stderr.on('data', (d) => {\n stderr += String(d);\n });\n\n child.on('error', reject);\n child.on('close', (code) => {\n if (code === 0) resolve(stdout);\n else reject(new Error(`duckdb exited ${code}: ${stderr}`));\n });\n });\n}\n\n/**\n * Get all VARCHAR/STRING column names from a parquet file.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param filePath - Absolute path to the parquet file\n * @returns Array of string column names\n */\nasync function duckdbGetParquetStringColumns(\n duckdbPath: string,\n filePath: string,\n): Promise<string[]> {\n const escaped = filePath.replace(/'/g, \"''\");\n const sql = [\n 'SELECT column_name',\n `FROM parquet_schema('${escaped}')`,\n \"WHERE lower(column_type) LIKE '%varchar%'\",\n \" OR lower(column_type) LIKE '%string%';\",\n ].join('\\n');\n\n const out = await duckdbQuery(duckdbPath, sql);\n return out\n .split('\\n')\n .map((l) => l.trim())\n .filter(Boolean);\n}\n\n/**\n * Check if any string column in a parquet file contains the needle value.\n *\n * @param duckdbPath - Path to the duckdb binary\n * @param filePath - Absolute path to the parquet file\n * @param needle - The string to search for (exact equality per column)\n * @returns Whether any row/column matches\n */\nasync function parquetFileHasExactString(\n duckdbPath: string,\n filePath: string,\n needle: string,\n): Promise<boolean> {\n const cols = await duckdbGetParquetStringColumns(duckdbPath, filePath);\n if (cols.length === 0) return false;\n\n const escaped = filePath.replace(/'/g, \"''\");\n const orChain = cols\n .map((c) => `\"${c.replace(/\"/g, '\"\"')}\" = '${needle.replace(/'/g, \"''\")}'`)\n .join(' OR ');\n\n const sql = [\n `SELECT 1 AS hit FROM read_parquet('${escaped}')`,\n `WHERE ${orChain}`,\n 'LIMIT 1;',\n ].join('\\n');\n\n const out = await duckdbQuery(duckdbPath, sql);\n return out.trim().length > 0;\n}\n\n/**\n * Entrypoint for the `admin find-text-in-folder` command.\n *\n * Searches a folder of files for a given text string. Useful for finding\n * a needle in a haystack across many large files (multi-GB CSVs, JSON\n * dumps, log archives). Files are streamed so memory stays flat.\n *\n * @param this - Bound CLI context\n * @param flags - CLI flags for the run\n */\nexport async function findTextInFolder(\n this: LocalContext,\n flags: FindTextInFolderCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { needle, root, exts, noParquet, concurrency, maxBytes } = flags;\n const rootAbs = path.resolve(root);\n\n const extSet = new Set(\n exts\n .split(',')\n .map((x) => x.trim().replace(/^\\./, '').toLowerCase())\n .filter(Boolean),\n );\n const patterns = Array.from(extSet).map((e) => `**/*.${e}`);\n\n logger.info(\n colors.green(\n `Searching for \"${needle}\" in ${rootAbs} (exts: ${[...extSet].join(\n ', ',\n )})`,\n ),\n );\n\n const normalFiles = await fg(patterns, {\n cwd: rootAbs,\n absolute: true,\n onlyFiles: true,\n followSymbolicLinks: false,\n suppressErrors: true,\n });\n\n const needleBuf = Buffer.from(needle.toLowerCase(), 'utf8');\n const hits: string[] = [];\n\n await runWithConcurrency(normalFiles, concurrency, async (file) => {\n try {\n const ok = await fileContainsExactBytes(file, needleBuf, maxBytes);\n if (ok) {\n hits.push(file);\n this.process.stdout.write(`${file}\\n`);\n }\n } catch {\n // ignore unreadable files\n }\n });\n\n if (!noParquet) {\n const parquetFiles = await fg(['**/*.parquet'], {\n cwd: rootAbs,\n absolute: true,\n onlyFiles: true,\n followSymbolicLinks: false,\n suppressErrors: true,\n });\n\n if (parquetFiles.length > 0) {\n logger.info(\n colors.green(\n `Scanning ${parquetFiles.length} parquet file(s) via DuckDB...`,\n ),\n );\n\n await runWithConcurrency(\n parquetFiles,\n Math.max(2, Math.floor(concurrency / 4)),\n async (file) => {\n try {\n const ok = await parquetFileHasExactString('duckdb', file, needle);\n if (ok) {\n hits.push(file);\n this.process.stdout.write(`${file}\\n`);\n }\n } catch {\n // ignore parquet read issues\n }\n },\n );\n }\n }\n\n logger.info(colors.green(`Done. Found ${hits.length} matching file(s).`));\n}\n"],"mappings":"4SAiCA,SAAgB,EACd,EACA,EACA,EACkB,CAClB,OAAO,IAAI,SAAkB,EAAS,IAAW,CAC/C,IAAM,EAASA,EAAAA,QAAG,iBAAiB,EAAS,CACxC,EAAQ,OAAO,MAAM,EAAE,CACrB,EAAI,EAAO,OACb,EAAO,EAEX,EAAO,GAAG,OAAS,GAAgB,CACjC,IAAI,EAAQ,EAEZ,GAAI,EAAU,CACZ,IAAM,EAAY,EAAW,EAC7B,GAAI,GAAa,EAAG,CAClB,EAAO,SAAS,CAChB,EAAQ,GAAM,CACd,OAEE,EAAM,OAAS,IACjB,EAAQ,EAAM,SAAS,EAAG,EAAU,EAEtC,GAAQ,EAAM,OAGhB,IAAM,EAAM,EAAM,OAAS,OAAO,OAAO,CAAC,EAAO,EAAM,CAAC,CAAG,EAE3D,GADiB,EAAI,SAAS,OAAO,CAAC,aAAa,CACtC,SAAS,EAAO,SAAS,OAAO,CAAC,CAAE,CAC9C,EAAO,SAAS,CAChB,EAAQ,GAAK,CACb,OAIF,AAGE,EAHE,EAAI,EACE,OAAO,KAAK,EAAI,SAAS,KAAK,IAAI,EAAG,EAAI,QAAU,EAAI,GAAG,CAAC,CAAC,CAE5D,OAAO,MAAM,EAAE,EAEzB,CAEF,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,YAAe,EAAQ,GAAM,CAAC,CACxC,EAAO,GAAG,UAAa,EAAQ,GAAM,CAAC,EACtC,CAWJ,eAAe,EACb,EACA,EACA,EACe,CACf,IAAI,EAAM,EACJ,EAAU,MAAM,KACpB,CAAE,OAAQ,KAAK,IAAI,EAAO,EAAM,OAAO,CAAE,CACzC,SAAY,CAEV,OAAa,CACX,IAAM,EAAU,EAEhB,GADA,GAAO,EACH,GAAW,EAAM,OAAQ,OAC7B,MAAM,EAAO,EAAM,GAAS,GAGjC,CACD,MAAM,QAAQ,IAAI,EAAQ,CAU5B,SAAS,EAAY,EAAoB,EAA8B,CACrE,OAAO,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,GAAA,EAAA,EAAA,OAAc,EAAY,CAAC,YAAa,SAAU,OAAQ,EAAI,CAAE,CACpE,MAAO,CAAC,SAAU,OAAQ,OAAO,CAClC,CAAC,CAEE,EAAS,GACT,EAAS,GACb,EAAM,OAAO,GAAG,OAAS,GAAM,CAC7B,GAAU,OAAO,EAAE,EACnB,CACF,EAAM,OAAO,GAAG,OAAS,GAAM,CAC7B,GAAU,OAAO,EAAE,EACnB,CAEF,EAAM,GAAG,QAAS,EAAO,CACzB,EAAM,GAAG,QAAU,GAAS,CACtB,IAAS,EAAG,EAAQ,EAAO,CAC1B,EAAW,MAAM,iBAAiB,EAAK,IAAI,IAAS,CAAC,EAC1D,EACF,CAUJ,eAAe,EACb,EACA,EACmB,CAUnB,OADY,MAAM,EAAY,EAPlB,CACV,qBACA,wBAHc,EAAS,QAAQ,KAAM,KAAK,CAGV,IAChC,4CACA,4CACD,CAAC,KAAK;EAAK,CAEkC,EAE3C,MAAM;EAAK,CACX,IAAK,GAAM,EAAE,MAAM,CAAC,CACpB,OAAO,QAAQ,CAWpB,eAAe,EACb,EACA,EACA,EACkB,CAClB,IAAM,EAAO,MAAM,EAA8B,EAAY,EAAS,CACtE,GAAI,EAAK,SAAW,EAAG,MAAO,GAE9B,IAAM,EAAU,EAAS,QAAQ,KAAM,KAAK,CACtC,EAAU,EACb,IAAK,GAAM,IAAI,EAAE,QAAQ,KAAM,KAAK,CAAC,OAAO,EAAO,QAAQ,KAAM,KAAK,CAAC,GAAG,CAC1E,KAAK,OAAO,CASf,OADY,MAAM,EAAY,EANlB,CACV,sCAAsC,EAAQ,IAC9C,SAAS,IACT,WACD,CAAC,KAAK;EAAK,CAEkC,EACnC,MAAM,CAAC,OAAS,EAa7B,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,SAAQ,OAAM,OAAM,YAAW,cAAa,YAAa,EAC3D,EAAUC,EAAAA,QAAK,QAAQ,EAAK,CAE5B,EAAS,IAAI,IACjB,EACG,MAAM,IAAI,CACV,IAAK,GAAM,EAAE,MAAM,CAAC,QAAQ,MAAO,GAAG,CAAC,aAAa,CAAC,CACrD,OAAO,QAAQ,CACnB,CACK,EAAW,MAAM,KAAK,EAAO,CAAC,IAAK,GAAM,QAAQ,IAAI,CAE3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,kBAAkB,EAAO,OAAO,EAAQ,UAAU,CAAC,GAAG,EAAO,CAAC,KAC5D,KACD,CAAC,GACH,CACF,CAED,IAAM,EAAc,MAAA,EAAA,EAAA,SAAS,EAAU,CACrC,IAAK,EACL,SAAU,GACV,UAAW,GACX,oBAAqB,GACrB,eAAgB,GACjB,CAAC,CAEI,EAAY,OAAO,KAAK,EAAO,aAAa,CAAE,OAAO,CACrD,EAAiB,EAAE,CAczB,GAZA,MAAM,EAAmB,EAAa,EAAa,KAAO,IAAS,CACjE,GAAI,CACS,MAAM,EAAuB,EAAM,EAAW,EAAS,GAEhE,EAAK,KAAK,EAAK,CACf,KAAK,QAAQ,OAAO,MAAM,GAAG,EAAK,IAAI,OAElC,IAGR,CAEE,CAAC,EAAW,CACd,IAAM,EAAe,MAAA,EAAA,EAAA,SAAS,CAAC,eAAe,CAAE,CAC9C,IAAK,EACL,SAAU,GACV,UAAW,GACX,oBAAqB,GACrB,eAAgB,GACjB,CAAC,CAEE,EAAa,OAAS,IACxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAa,OAAO,gCACjC,CACF,CAED,MAAM,EACJ,EACA,KAAK,IAAI,EAAG,KAAK,MAAM,EAAc,EAAE,CAAC,CACxC,KAAO,IAAS,CACd,GAAI,CACS,MAAM,EAA0B,SAAU,EAAM,EAAO,GAEhE,EAAK,KAAK,EAAK,CACf,KAAK,QAAQ,OAAO,MAAM,GAAG,EAAK,IAAI,OAElC,IAIX,EAIL,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,eAAe,EAAK,OAAO,oBAAoB,CAAC"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`),require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const e=require(`./cron-NLlyCiml.cjs`),t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o}){t.t(this.process.exit),await e.r({file:n,transcendUrl:r,auth:i,sombraAuth:a,dataSiloId:o})}exports.markIdentifiersCompleted=n;
2
- //# sourceMappingURL=impl-Cypfgm4t.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-Cypfgm4t.cjs","names":["pushCronIdentifiersFromCsv"],"sources":["../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pushCronIdentifiersFromCsv } from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"],"mappings":"sSAYA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAA2B,CAC/B,OACA,eACA,OACA,aACA,aACD,CAAC"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`);const e=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,actions:r,origins:i,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,updatedAtBefore:c,updatedAtAfter:l,transcendUrl:u,concurrency:d}){t.t(this.process.exit),await e.X({transcendUrl:u,requestActions:r,auth:n,requestOrigins:i,concurrency:d,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,updatedAtBefore:c,updatedAtAfter:l})}exports.approve=n;
2
- //# sourceMappingURL=impl-D2SWeIGe.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-D2SWeIGe.cjs","names":["approvePrivacyRequests"],"sources":["../src/commands/request/approve/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\n\nimport { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\nimport { approvePrivacyRequests } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"6QAmBA,eAAsB,EAEpB,CACE,OACA,UACA,UACA,mBACA,kBACA,iBACA,kBACA,iBACA,eACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAuB,CAC3B,eACA,eAAgB,EAChB,OACA,eAAgB,EAChB,cACA,mBACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`);const e=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,createdAtBefore:i,createdAtAfter:a,updatedAtBefore:o,updatedAtAfter:s,actions:c,daysLeft:l,days:u,requestIds:d,emailTemplate:f,concurrency:p}){t.t(this.process.exit),await e.Y({transcendUrl:r,requestActions:c,auth:n,emailTemplate:f,days:u,daysLeft:l,requestIds:d,concurrency:p,createdAtBefore:i,createdAtAfter:a,updatedAtBefore:o,updatedAtAfter:s})}exports.notifyAdditionalTime=n;
2
- //# sourceMappingURL=impl-DLwIQVA6.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-DLwIQVA6.cjs","names":["notifyPrivacyRequestsAdditionalTime"],"sources":["../src/commands/request/notify-additional-time/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { notifyPrivacyRequestsAdditionalTime } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface NotifyAdditionalTimeCommandFlags {\n auth: string;\n createdAtBefore: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n actions?: RequestAction[];\n daysLeft: number;\n days: number;\n requestIds?: string[];\n emailTemplate: string;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function notifyAdditionalTime(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n actions,\n daysLeft,\n days,\n requestIds,\n emailTemplate,\n concurrency,\n }: NotifyAdditionalTimeCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await notifyPrivacyRequestsAdditionalTime({\n transcendUrl,\n requestActions: actions,\n auth,\n emailTemplate,\n days,\n daysLeft,\n requestIds,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n });\n}\n"],"mappings":"6QAoBA,eAAsB,EAEpB,CACE,OACA,eACA,kBACA,iBACA,kBACA,iBACA,UACA,WACA,OACA,aACA,gBACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAoC,CACxC,eACA,eAAgB,EAChB,OACA,gBACA,OACA,WACA,aACA,cACA,kBACA,iBACA,kBACA,iBACD,CAAC"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./cron-NLlyCiml.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({file:e,fileTarget:o,transcendUrl:s,auth:c,sombraAuth:l,cronDataSiloId:u,targetDataSiloId:d,actions:f,skipRequestCount:p,pageLimit:m,chunkSize:h}){p&&n.t.info(a.default.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%m!==0)&&(n.t.error(a.default.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${m}.`)),this.process.exit(1)),i.t(this.process.exit);let g=t.ai(s,c),{baseName:_,extension:v}=t.c(e),{baseName:y,extension:b}=t.c(o),x=0,S=0,C=0;await r.t({dataSiloId:u,auth:c,sombraAuth:l,actions:f,apiPageSize:m,savePageSize:h,onSave:async r=>{x+=r.length;let i=await t.As(t.Rs(t.Ms(r.map(e=>e.requestId)),m),async e=>(n.t.info(a.default.magenta(`Fetching target identifiers for ${e.length} requests`)),(await t.Gn(g,m*2,{requestIds:e,dataSiloIds:[d]})).map(({fileName:e,remoteId:t})=>{if(!t)throw Error(`Failed to find remoteId for ${e}`);return{RecordId:t,Object:e.replace(`.json`,``).split(`/`).pop()?.replace(` Information`,``),Comment:`Customer data deletion request submitted via transcend.io`}})),{concurrency:1});S+=i.flat().length;let s=t.Ms(r.map(e=>Object.keys(e)).flat()),c=`${_}-${C}${v}`,l=`${y}-${C}${b}`;await t.d(c,r,s),n.t.info(a.default.green(`Successfully wrote ${r.length} identifiers to file "${e}"`));let u=i.flat();await t.d(l,u,t.Ms(u.map(e=>Object.keys(e)).flat())),n.t.info(a.default.green(`Successfully wrote ${u.length} identifiers to file "${o}"`)),n.t.info(a.default.blue(`Processed chunk of ${t.Rs.length} identifiers, found ${u.length} target identifiers`)),C+=1},transcendUrl:s,skipRequestCount:p}),n.t.info(a.default.green(`Successfully wrote ${x} identifiers to file "${e}"`)),n.t.info(a.default.green(`Successfully wrote ${S} identifiers to file "${o}"`))}exports.pullProfiles=o;
2
- //# sourceMappingURL=impl-DMm4lt-A.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-DMm4lt-A.cjs","names":["buildTranscendGraphQLClient","parseFilePath","map","chunk","uniq","fetchRequestFilesForRequest","writeLargeCsv","pullChunkedCustomSiloOutstandingIdentifiers"],"sources":["../src/commands/request/cron/pull-profiles/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from '../../../../lib/bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"],"mappings":"+XA+BA,eAAsB,EAEpB,CACE,OACA,aACA,eACA,OACA,aACA,iBACA,mBACA,UACA,mBACA,YACA,aAEa,CACX,GACF,EAAA,EAAO,KACL,EAAA,QAAO,OACL,wEACD,CACF,EAID,OAAO,MAAM,EAAU,EACvB,GAAa,GACb,EAAY,IAAc,KAE1B,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CACxD,CAAE,WAAU,aAAcC,EAAAA,EAAc,EAAK,CAC7C,CAAE,SAAU,EAAgB,UAAW,GAC3CA,EAAAA,EAAc,EAAW,CAEvB,EAAsB,EACtB,EAA4B,EAC5B,EAAY,EAmFhB,MAAMM,EAAAA,EAA4C,CAChD,WAAY,EACZ,OACA,aACA,UACA,YAAa,EACb,aAAc,EACd,OAxFa,KACb,IACkB,CAElB,GAAuB,EAAY,OAQnC,IAAM,EAAU,MAAML,EAAAA,GADIC,EAAAA,GAHDC,EAAAA,GADN,EAAY,IAAK,GAAM,EAAE,UAAoB,CACvB,CAGS,EAAU,CAG1D,KAAO,KACL,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAW,OAAO,WACtD,CACF,EACe,MAAMC,EAAAA,GACpB,EACA,EAAY,EACZ,CACE,aACA,YAAa,CAAC,EAAiB,CAChC,CACF,EACc,KAAK,CAAE,WAAU,cAAe,CAC7C,GAAI,CAAC,EACH,MAAU,MAAM,+BAA+B,IAAW,CAE5D,MAAO,CACL,SAAU,EACV,OAAQ,EACL,QAAQ,QAAS,GAAG,CACpB,MAAM,IAAI,CACV,KAAK,EACJ,QAAQ,eAAgB,GAAG,CAC/B,QACE,4DACH,EACD,EAGJ,CACE,YAAa,EACd,CACF,CAED,GAA6B,EAAQ,MAAM,CAAC,OAG5C,IAAM,EAAUD,EAAAA,GAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAC7D,EAAmB,GAAG,EAAS,GAAG,IAAY,IAC9C,EAAyB,GAAG,EAAe,GAAG,IAAY,IAChE,MAAME,EAAAA,EAAc,EAAkB,EAAa,EAAQ,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAY,OAAO,wBAAwB,EAAK,GACvE,CACF,CAED,IAAM,EAAoB,EAAQ,MAAM,CAExC,MAAMA,EAAAA,EAAc,EAAwB,EAD3BF,EAAAA,GAAK,EAAkB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACF,CACxE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAkB,OAAO,wBAAwB,EAAW,GACnF,CACF,CAED,EAAA,EAAO,KACL,EAAA,QAAO,KACL,sBAAsBD,EAAAA,GAAM,OAAO,sBAAsB,EAAkB,OAAO,qBACnF,CACF,CACD,GAAa,GAYb,eACA,mBACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAoB,wBAAwB,EAAK,GACxE,CACF,CACD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAA0B,wBAAwB,EAAW,GACpF,CACF"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-gJm1eQH0.cjs`),n=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const i=require(`./readTranscendYaml-Cycz6RxW.cjs`),a=require(`./mergeTranscendInputs-BIBCYbug.cjs`),o=require(`./api-keys-DMPYZTne.cjs`),s=require(`./done-input-validation-DGckEJ5a.cjs`);let c=require(`node:fs`),l=require(`node:path`),u=require(`colors`);u=e.t(u);async function d({transcendUrl:e,auth:t,pageSize:i,publishToPrivacyCenter:a,contents:o,deleteExtraAttributeValues:s=!1,classifyService:c=!1}){let l=n.ai(e,t);try{return!await n.t(o,l,{pageSize:i,publishToPrivacyCenter:a,classifyService:c,deleteExtraAttributeValues:s})}catch(e){return r.t.error(u.default.red(`An unexpected error occurred syncing the schema: ${e.message}`)),!1}}async function f({file:e=`./transcend.yml`,transcendUrl:f,auth:p,variables:m,pageSize:h,publishToPrivacyCenter:g,classifyService:_,deleteExtraAttributeValues:v}){s.t(this.process.exit);let y=await o.r(p),b=n.A(m),x;if(x=Array.isArray(y)&&(0,c.lstatSync)(e).isDirectory()?o.n(e).map(t=>(0,l.join)(e,t)):e.split(`,`),x.length<1)throw Error(`No file specified!`);let S=x.map(e=>{(0,c.existsSync)(e)?r.t.info(u.default.magenta(`Reading file "${e}"...`)):(r.t.error(u.default.red(`The file path does not exist on disk: ${e}. You can specify the filepath using --file=./examples/transcend.yml`)),this.process.exit(1));try{let t=i.r(e,b);return r.t.info(u.default.green(`Successfully read in "${e}"`)),{content:t,name:e.split(`/`).pop().replace(`.yml`,``)}}catch(e){r.t.error(u.default.red(`The shape of your yaml file is invalid with the following errors: ${e.message}`)),this.process.exit(1)}});if(typeof y==`string`){let[e,...n]=S.map(({content:e})=>e);await d({transcendUrl:f,auth:y,contents:a.t(e,...n),publishToPrivacyCenter:g,deleteExtraAttributeValues:v,pageSize:h,classifyService:!!_})||(r.t.info(u.default.red(`Sync encountered errors. View output above for more information, or check out ${t.r}`)),this.process.exit(1))}else{if(S.length!==1&&S.length!==y.length)throw Error(`Expected list of yml files to be equal to the list of API keys.Got ${S.length} YML file${S.length===1?``:`s`} and ${y.length} API key${y.length===1?``:`s`}`);let e=[];await n.js(y,async(t,n)=>{let i=`[${n+1}/${y.length}][${t.organizationName}] `;r.t.info(u.default.magenta(`~~~\n\n${i}Attempting to push configuration...\n\n~~~`));let a=S.length===1?S[0].content:S.find(e=>e.name===t.organizationName)?.content;if(!a){r.t.error(u.default.red(`${i}Failed to find transcend.yml file for organization: "${t.organizationName}".`)),e.push(t.organizationName);return}await d({transcendUrl:f,auth:t.apiKey,contents:a,pageSize:h,publishToPrivacyCenter:g,deleteExtraAttributeValues:v,classifyService:_})?r.t.info(u.default.green(`${i}Successfully pushed configuration!`)):(r.t.error(u.default.red(`${i}Failed to sync configuration.`)),e.push(t.organizationName))}),e.length>0&&(r.t.info(u.default.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${t.r}`)),this.process.exit(1))}r.t.info(u.default.green(`Successfully synced yaml file to Transcend! View at ${t.r}`))}exports.push=f;
2
- //# sourceMappingURL=impl-DRsuGuRx.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-DRsuGuRx.cjs","names":["buildTranscendGraphQLClient","syncConfigurationToTranscend","validateTranscendAuth","parseVariablesFromString","listFiles","readTranscendYaml","mergeTranscendInputs","ADMIN_DASH_INTEGRATIONS","mapSeries"],"sources":["../src/commands/inventory/push/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\n\nimport { logger } from '../../../logger';\nimport { mapSeries } from '../../../lib/bluebird';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { readTranscendYaml } from '../../../lib/readTranscendYaml';\nimport colors from 'colors';\nimport {\n buildTranscendGraphQLClient,\n syncConfigurationToTranscend,\n} from '../../../lib/graphql';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { TranscendInput } from '../../../codecs';\nimport { validateTranscendAuth, listFiles } from '../../../lib/api-keys';\nimport { mergeTranscendInputs } from '../../../lib/mergeTranscendInputs';\nimport { parseVariablesFromString } from '../../../lib/helpers/parseVariablesFromString';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Sync configuration to Transcend\n *\n * @param options - Options\n * @returns True if synced successfully, false if error occurs\n */\nasync function syncConfiguration({\n transcendUrl,\n auth,\n pageSize,\n publishToPrivacyCenter,\n contents,\n deleteExtraAttributeValues = false,\n classifyService = false,\n}: {\n /** Transcend YAML */\n contents: TranscendInput;\n /** Transcend URL */\n transcendUrl: string;\n /** API key */\n auth: string;\n /** Page size */\n pageSize: number;\n /** Skip privacy center publish step */\n publishToPrivacyCenter: boolean;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** Delete attributes when syncing */\n deleteExtraAttributeValues?: boolean;\n}): Promise<boolean> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Sync to Transcend\n try {\n const encounteredError = await syncConfigurationToTranscend(\n contents,\n client,\n {\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n },\n );\n return !encounteredError;\n } catch (err) {\n logger.error(\n colors.red(\n `An unexpected error occurred syncing the schema: ${err.message}`,\n ),\n );\n return false;\n }\n}\n\nexport interface PushCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n pageSize: number;\n variables: string;\n publishToPrivacyCenter: boolean;\n classifyService: boolean;\n deleteExtraAttributeValues: boolean;\n}\n\nexport async function push(\n this: LocalContext,\n {\n file = './transcend.yml',\n transcendUrl,\n auth,\n variables,\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n }: PushCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Parse out the variables\n const vars = parseVariablesFromString(variables);\n\n // check if we are being passed a list of API keys and a list of files\n let fileList: string[];\n if (Array.isArray(apiKeyOrList) && lstatSync(file).isDirectory()) {\n fileList = listFiles(file).map((filePath) => join(file, filePath));\n } else {\n fileList = file.split(',');\n }\n\n // Ensure at least one file is parsed\n if (fileList.length < 1) {\n throw new Error('No file specified!');\n }\n\n // eslint-disable-next-line array-callback-return,consistent-return\n const transcendInputs = fileList.map((filePath) => {\n // Ensure yaml file exists on disk\n if (!existsSync(filePath)) {\n logger.error(\n colors.red(\n `The file path does not exist on disk: ${filePath}. You can specify the filepath using --file=./examples/transcend.yml`,\n ),\n );\n this.process.exit(1);\n } else {\n logger.info(colors.magenta(`Reading file \"${filePath}\"...`));\n }\n\n try {\n // Read in the yaml file and validate it's shape\n const newContents = readTranscendYaml(filePath, vars);\n logger.info(colors.green(`Successfully read in \"${filePath}\"`));\n return {\n content: newContents,\n name: filePath.split('/').pop()!.replace('.yml', ''),\n };\n } catch (err) {\n logger.error(\n colors.red(\n `The shape of your yaml file is invalid with the following errors: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n });\n\n // process a single API key\n if (typeof apiKeyOrList === 'string') {\n // if passed multiple inputs, merge them together\n const [base, ...rest] = transcendInputs.map(({ content }) => content);\n const contents = mergeTranscendInputs(base, ...rest);\n\n // sync the configuration\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKeyOrList,\n contents,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n pageSize,\n classifyService: !!classifyService,\n });\n\n // exist with error code\n if (!success) {\n logger.info(\n colors.red(\n `Sync encountered errors. View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n } else {\n // if passed multiple inputs, expect them to be one per instance\n if (\n transcendInputs.length !== 1 &&\n transcendInputs.length !== apiKeyOrList.length\n ) {\n throw new Error(\n 'Expected list of yml files to be equal to the list of API keys.' +\n `Got ${transcendInputs.length} YML file${\n transcendInputs.length === 1 ? '' : 's'\n } and ${apiKeyOrList.length} API key${\n apiKeyOrList.length === 1 ? '' : 's'\n }`,\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to push configuration...\\n\\n~~~`,\n ),\n );\n\n // use the merged contents if 1 yml passed, else use the contents that map to that organization\n const useContents =\n transcendInputs.length === 1\n ? transcendInputs[0].content\n : transcendInputs.find(\n (input) => input.name === apiKey.organizationName,\n )?.content;\n\n // Throw error if cannot find a yml file matching that organization name\n if (!useContents) {\n logger.error(\n colors.red(\n `${prefix}Failed to find transcend.yml file for organization: \"${apiKey.organizationName}\".`,\n ),\n );\n encounteredErrors.push(apiKey.organizationName);\n return;\n }\n\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKey.apiKey,\n contents: useContents,\n pageSize,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n classifyService,\n });\n\n if (success) {\n logger.info(\n colors.green(`${prefix}Successfully pushed configuration!`),\n );\n } else {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to Transcend! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n}\n"],"mappings":"2iBA0BA,eAAe,EAAkB,CAC/B,eACA,OACA,WACA,yBACA,WACA,6BAA6B,GAC7B,kBAAkB,IAgBC,CACnB,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAG9D,GAAI,CAWF,MAAO,CAVkB,MAAMC,EAAAA,EAC7B,EACA,EACA,CACE,WACA,yBACA,kBACA,6BACD,CACF,OAEM,EAAK,CAMZ,OALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,oDAAoD,EAAI,UACzD,CACF,CACM,IAeX,eAAsB,EAEpB,CACE,OAAO,kBACP,eACA,OACA,YACA,WACA,yBACA,kBACA,8BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAMC,EAAAA,EAAsB,EAAK,CAGhD,EAAOC,EAAAA,EAAyB,EAAU,CAG5C,EAQJ,GAPA,AAGE,EAHE,MAAM,QAAQ,EAAa,GAAA,EAAA,EAAA,WAAc,EAAK,CAAC,aAAa,CACnDC,EAAAA,EAAU,EAAK,CAAC,IAAK,IAAA,EAAA,EAAA,MAAkB,EAAM,EAAS,CAAC,CAEvD,EAAK,MAAM,IAAI,CAIxB,EAAS,OAAS,EACpB,MAAU,MAAM,qBAAqB,CAIvC,IAAM,EAAkB,EAAS,IAAK,GAAa,EAE7C,EAAA,EAAA,YAAY,EAAS,CAQvB,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,iBAAiB,EAAS,MAAM,CAAC,EAP5D,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yCAAyC,EAAS,sEACnD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAI,CAEF,IAAM,EAAcC,EAAAA,EAAkB,EAAU,EAAK,CAErD,OADA,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,yBAAyB,EAAS,GAAG,CAAC,CACxD,CACL,QAAS,EACT,KAAM,EAAS,MAAM,IAAI,CAAC,KAAK,CAAE,QAAQ,OAAQ,GAAG,CACrD,OACM,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,qEAAqE,EAAI,UAC1E,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,GAEtB,CAGF,GAAI,OAAO,GAAiB,SAAU,CAEpC,GAAM,CAAC,EAAM,GAAG,GAAQ,EAAgB,KAAK,CAAE,aAAc,EAAQ,CAIrD,MAAM,EAAkB,CACtC,eACA,KAAM,EACN,SANeC,EAAAA,EAAqB,EAAM,GAAG,EAAK,CAOlD,yBACA,6BACA,WACA,gBAAiB,CAAC,CAAC,EACpB,CAAC,GAIA,EAAA,EAAO,KACL,EAAA,QAAO,IACL,iFAAiFC,EAAAA,IAClF,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,MAEjB,CAEL,GACE,EAAgB,SAAW,GAC3B,EAAgB,SAAW,EAAa,OAExC,MAAU,MACR,sEACS,EAAgB,OAAO,WAC5B,EAAgB,SAAW,EAAI,GAAK,IACrC,OAAO,EAAa,OAAO,UAC1B,EAAa,SAAW,EAAI,GAAK,MAEtC,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAMC,EAAAA,GAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAChD,EAAO,iBACR,IACD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAO,4CAClB,CACF,CAGD,IAAM,EACJ,EAAgB,SAAW,EACvB,EAAgB,GAAG,QACnB,EAAgB,KACb,GAAU,EAAM,OAAS,EAAO,iBAClC,EAAE,QAGT,GAAI,CAAC,EAAa,CAChB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,GAAG,EAAO,uDAAuD,EAAO,iBAAiB,IAC1F,CACF,CACD,EAAkB,KAAK,EAAO,iBAAiB,CAC/C,OAGc,MAAM,EAAkB,CACtC,eACA,KAAM,EAAO,OACb,SAAU,EACV,WACA,yBACA,6BACA,kBACD,CAAC,CAGA,EAAA,EAAO,KACL,EAAA,QAAO,MAAM,GAAG,EAAO,oCAAoC,CAC5D,EAED,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,GAAG,EAAO,+BAA+B,CAAC,CAClE,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAA,EAAO,KACL,EAAA,QAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0DD,EAAAA,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,EAKxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uDAAuDA,EAAAA,IACxD,CACF"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./pooling-C-TYBnHI.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`node:fs`),o=require(`node:path`),s=require(`colors`);s=e.t(s);let c=require(`fast-csv`);c=e.t(c);let l=require(`node:events`),u=require(`node:fs/promises`),d=require(`node:stream/promises`),f=require(`node:stream`),p=require(`csv-parse`);function m(e,t){e||(n.t.error(s.default.red(`A --directory must be provided.`)),t.process.exit(1));let r=[];try{r=(0,a.readdirSync)(e).filter(e=>e.endsWith(`.csv`)).map(t=>(0,o.join)(e,t)).filter(e=>{try{return(0,a.statSync)(e).isFile()}catch{return!1}})}catch(r){n.t.error(s.default.red(`Failed to read directory: ${e}`)),n.t.error(s.default.red(r.message)),t.process.exit(1)}return r.length===0&&(n.t.error(s.default.red(`No CSV files found in directory: ${e}`)),t.process.exit(1)),n.t.info(s.default.green(`Found: ${r.join(`, `)} CSV files`)),r}function h(e,t){let n=(0,a.createWriteStream)(e),r=c.format({headers:t,writeHeaders:!0,objectMode:!0});return r.pipe(n),{async write(e){r.write(e)||await(0,l.once)(r,`drain`)},async end(){let e=Promise.all([(0,l.once)(n,`finish`)]);r.end(),await e}}}function g(e){return String(e).padStart(4,`0`)}function _(e){return Buffer.byteLength(Object.values(e).map(e=>e==null?``:String(e)).join(`,`),`utf8`)}async function v(e){let{filePath:t,outputDir:r,clearOutputDir:i,chunkSizeMB:c,onProgress:l,reportEveryMs:m=500}=e,{size:v}=await(0,u.stat)(t),y=0;n.t.info(s.default.magenta(`Chunking ${t} into ~${c}MB files...`));let b=Math.floor(c*1024*1024),x=(0,o.basename)(t,`.csv`),S=r||(0,o.dirname)(t);if(n.t.info(s.default.magenta(`Output directory: ${S}`)),await(0,u.mkdir)(S,{recursive:!0}),i){n.t.warn(s.default.yellow(`Clearing output directory: ${S}`));let e=await(0,u.readdir)(S);await Promise.all(e.filter(e=>e.startsWith(`${x}_chunk_`)&&e.endsWith(`.csv`)).map(e=>(0,u.unlink)((0,o.join)(S,e))))}let C=null,w=null,T=0,E=1,D=0,O=new p.Parser({columns:!1,skip_empty_lines:!0}),k=0,A=0,j=()=>{let e=A>0?k/A:0,t=e>0?Math.max(T,Math.ceil(v/e)):void 0;l(T,t),y=Date.now()};j();let M=null,N=()=>(0,o.join)(S,`${x}_chunk_${g(E)}.csv`),P=new f.Transform({objectMode:!0,async transform(e,t,r){try{if(!C){C=e.slice(0),w=C.length,M=h(N(),C),r();return}w!==null&&e.length!==w&&n.t.warn(s.default.yellow(`Row has ${e.length} cols; expected ${w}`)),T+=1,T%25e4==0&&l(T);let t=Object.fromEntries(C.map((t,n)=>[t,e[n]])),i=_(t);k+=i,A+=1,Date.now()-y>=m&&j(),M&&D>0&&D+i>b&&(await M.end(),E+=1,D=0,n.t.info(s.default.green(`Rolling to chunk ${E} after ${T.toLocaleString()} rows.`)),M=h(N(),C)),M||=h(N(),C),await M.write(t),D+=i,r()}catch(e){r(e)}},async flush(e){try{M&&=(await M.end(),null),j(),e()}catch(t){e(t)}}});await(0,d.pipeline)((0,a.createReadStream)(t),O,P),l(T),n.t.info(s.default.green(`Chunked ${t} into ${E} file(s); processed ${T.toLocaleString()} rows.`))}async function y(){let e=Number(process.env.WORKER_ID||`0`);n.t.info(`[w${e}] ready pid=${process.pid}`),process.send?.({type:`ready`}),process.on(`message`,async r=>{if(!r||typeof r!=`object`||(r.type===`shutdown`&&process.exit(0),r.type!==`task`))return;let{filePath:i,options:a}=r.payload,{outputDir:o,clearOutputDir:s,chunkSizeMB:c}=a;try{await v({filePath:i,outputDir:o,clearOutputDir:s,chunkSizeMB:c,onProgress:(e,t)=>process.send?.({type:`progress`,payload:{filePath:i,processed:e,total:t}})}),process.send?.({type:`result`,payload:{ok:!0,filePath:i}})}catch(r){let a=t.O(r);n.t.error(`[w${e}] ERROR ${i}: ${a}`),process.send?.({type:`result`,payload:{ok:!1,filePath:i,error:a}})}}),await new Promise(()=>{})}function b(e){return r.r(e)}function x(e){return r.i(e)}const S={renderHeader:b,renderWorkers:x};function C(){return typeof __filename<`u`?__filename:process.argv[1]}async function w(e){i.t(this.process.exit);let{directory:t,outputDir:a,clearOutputDir:o,chunkSizeMB:c,concurrency:l,viewerMode:u}=e,d=m(t,this),{poolSize:f,cpuCount:p}=r.s(l,d.length);n.t.info(s.default.green(`Chunking ${d.length} CSV file(s) with pool size ${f} (CPU=${p})`));let h=d.map(e=>({filePath:e,options:{outputDir:a,clearOutputDir:o,chunkSizeMB:c}}));await r.n({title:`Chunk CSV - ${t}`,baseDir:t||a||process.cwd(),childFlag:r.o,childModulePath:C(),poolSize:f,cpuCount:p,filesTotal:d.length,hooks:{nextTask:()=>h.shift(),taskLabel:e=>e.filePath,initTotals:()=>({}),initSlotProgress:()=>void 0,onProgress:e=>e,onResult:(e,t)=>({totals:e,ok:!!t.ok}),postProcess:async()=>{}},viewerMode:u,render:e=>r.a(e,S,u),extraKeyHandler:({logsBySlot:e,repaint:t,setPaused:n})=>r.t({logsBySlot:e,repaint:t,setPaused:n})})}process.argv.includes(r.o)&&y().catch(e=>{n.t.error(e),process.exit(1)}),exports.chunkCsv=w;
2
- //# sourceMappingURL=impl-DSrvfkVp.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-DSrvfkVp.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"8lBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`);const e=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,dataSiloId:r,actions:i,transcendUrl:a}){t.t(this.process.exit),await e.z({requestActions:i,transcendUrl:a,auth:n,dataSiloId:r})}exports.retryRequestDataSilos=n;
2
- //# sourceMappingURL=impl-DZMWJNLE.cjs.map
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`),i=require(`./preference-management-Ch77Yxod.cjs`);let a=require(`colors`);a=e.t(a);const o=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function s({auth:e,partition:s,sombraAuth:c,file:l,transcendUrl:u,timestampBefore:d,timestampAfter:f,updatedBefore:p,updatedAfter:m,identifiers:h=[],concurrency:g,shouldChunk:_,windowConcurrency:v,maxChunks:y,exportIdentifiersWithDelimiter:b,maxLookbackDays:x}){r.t(this.process.exit);let S=await t.ii(u,e,c),C=t.ai(u,e),w=h.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),T={...d?{timestampBefore:d.toISOString()}:{},...f?{timestampAfter:f.toISOString()}:{},...m||p?{system:{...p?{updatedBefore:p.toISOString()}:{},...m?{updatedAfter:m.toISOString()}:{}}}:{},...w.length>0?{identifiers:w}:{}};n.t.info(`Fetching consent preferences from partition ${s}, using mode=${_?`chunked-stream`:`paged-stream`}...`),n.t.info(a.default.magenta(`Preparing CSV at: ${l}`));let[E,D]=await Promise.all([t.xr(C),t.n(C)]),O=D.map(e=>e.name),k=Array.from(new Set(E.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),A=Array.from(new Set(E.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),j=[...o,...O,...A,...k],M=null,N=!1,P=e=>{if(!e||e.length===0)return;let n=e.map(e=>i.i(e,b));if(!N){let e=Object.keys(n[0]??{}),r=new Set;M=[...j,...e].filter(e=>e===void 0||r.has(e)?!1:(r.add(e),!0)),t.s(l,M),N=!0}t.a(l,n,M)};if(_){await i.r(S,{partition:s,filterBy:T,limit:g,windowConcurrency:v,maxChunks:y,maxLookbackDays:x,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`));return}await i.n(S,{partition:s,filterBy:T,limit:g,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`))}exports.pullConsentPreferences=s;
2
- //# sourceMappingURL=impl-DbqAvW7X.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-DbqAvW7X.cjs","names":["createSombraGotInstance","buildTranscendGraphQLClient","fetchAllPurposesAndPreferences","fetchAllIdentifiers","transformPreferenceRecordToCsv","fetchConsentPreferencesChunked","fetchConsentPreferences"],"sources":["../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers';\nimport type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map(\n (identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n },\n );\n\n // Build filter\n const filterBy = {\n ...(timestampBefore\n ? { timestampBefore: timestampBefore.toISOString() }\n : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore\n ? { updatedBefore: updatedBefore.toISOString() }\n : {}),\n ...(updatedAfter\n ? { updatedAfter: updatedAfter.toISOString() }\n : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap(\n (p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? [],\n ),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(\n new Set(purposesWithTopics.map((p) => p.trackingType)),\n ).sort((a, b) => a.localeCompare(b));\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [\n ...CORE_COLS,\n ...identifierCols,\n ...purposeCols,\n ...topicCols,\n ];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"gZAsBA,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAMA,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IACnC,GAAqC,CACpC,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EAEzB,CAGK,EAAW,CACf,GAAI,EACA,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAClD,EAAE,CACN,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EACA,CAAE,cAAe,EAAc,aAAa,CAAE,CAC9C,EAAE,CACN,GAAI,EACA,CAAE,aAAc,EAAa,aAAa,CAAE,CAC5C,EAAE,CACP,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAA,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7DC,EAAAA,GAA+B,EAAO,CACtCC,EAAAA,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAChB,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CACjE,CACF,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KACxB,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CACvD,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAsB,CAC1B,GAAG,EACH,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGG,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtBC,EAAAA,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAA,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAA,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAMC,EAAAA,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAMC,EAAAA,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const n=require(`./done-input-validation-DGckEJ5a.cjs`);let r=require(`@transcend-io/privacy-types`);async function i({auth:e,transcendUrl:i,folderPath:a,requestIds:o,statuses:s=[r.RequestStatus.Approving,r.RequestStatus.Downloadable],concurrency:c,createdAtBefore:l,createdAtAfter:u,updatedAtBefore:d,updatedAtAfter:f,approveAfterDownload:p}){n.t(this.process.exit),await t.$({transcendUrl:i,auth:e,folderPath:a,requestIds:o,statuses:s,concurrency:c,createdAtBefore:l,createdAtAfter:u,updatedAtBefore:d,updatedAtAfter:f,approveAfterDownload:p})}exports.downloadFiles=i;
2
- //# sourceMappingURL=impl-Dl78xJu6.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-Dl78xJu6.cjs","names":["RequestStatus","downloadPrivacyRequestFiles"],"sources":["../src/commands/request/download-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { downloadPrivacyRequestFiles } from '../../../lib/requests';\nimport { RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DownloadFilesCommandFlags {\n auth: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n statuses?: RequestStatus[];\n folderPath: string;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n approveAfterDownload: boolean;\n transcendUrl: string;\n}\n\nexport async function downloadFiles(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n folderPath,\n requestIds,\n statuses = [RequestStatus.Approving, RequestStatus.Downloadable],\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n approveAfterDownload,\n }: DownloadFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await downloadPrivacyRequestFiles({\n transcendUrl,\n auth,\n folderPath,\n requestIds,\n statuses,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n approveAfterDownload,\n });\n}\n"],"mappings":"kWAoBA,eAAsB,EAEpB,CACE,OACA,eACA,aACA,aACA,WAAW,CAACA,EAAAA,cAAc,UAAWA,EAAAA,cAAc,aAAa,CAChE,cACA,kBACA,iBACA,kBACA,iBACA,wBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMC,EAAAA,EAA4B,CAChC,eACA,OACA,aACA,aACA,WACA,cACA,kBACA,iBACA,kBACA,iBACA,uBACD,CAAC"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./cron-NLlyCiml.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);let o=require(`io-ts`);o=e.t(o);const s=o.type({"Request Id":o.string});async function c({auth:e,dataSiloId:o,file:c,transcendUrl:l}){i.t(this.process.exit),n.t.info(a.default.magenta(`Reading "${c}" from disk`)),await r.n({requestIds:t.ui(c,s).map(e=>e[`Request Id`]),transcendUrl:l,auth:e,dataSiloId:o})}exports.markRequestDataSilosCompleted=c;
2
- //# sourceMappingURL=impl-Dvpc-Qa5.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-Dvpc-Qa5.cjs","names":["t","markRequestDataSiloIdsCompleted","readCsv"],"sources":["../src/commands/request/system/mark-request-data-silos-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { logger } from '../../../../logger';\nimport { markRequestDataSiloIdsCompleted } from '../../../../lib/cron';\nimport { readCsv } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nconst RequestIdRow = t.type({\n 'Request Id': t.string,\n});\n\nexport interface MarkRequestDataSilosCompletedCommandFlags {\n auth: string;\n dataSiloId: string;\n file: string;\n transcendUrl: string;\n}\n\nexport async function markRequestDataSilosCompleted(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n file,\n transcendUrl,\n }: MarkRequestDataSilosCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, RequestIdRow);\n\n await markRequestDataSiloIdsCompleted({\n requestIds: activeResults.map((request) => request['Request Id']),\n transcendUrl,\n auth,\n dataSiloId,\n });\n}\n"],"mappings":"+ZASA,MAAM,EAAeA,EAAE,KAAK,CAC1B,aAAcA,EAAE,OACjB,CAAC,CASF,eAAsB,EAEpB,CACE,OACA,aACA,OACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAG1D,MAAMC,EAAAA,EAAgC,CACpC,WAHoBC,EAAAA,GAAQ,EAAM,EAAa,CAGrB,IAAK,GAAY,EAAQ,cAAc,CACjE,eACA,OACA,aACD,CAAC"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`);let i=require(`@transcend-io/privacy-types`),a=require(`node:fs`),o=require(`colors`);o=e.t(o);let s=require(`io-ts`);s=e.t(s);let c=require(`cli-progress`);c=e.t(c);async function l({auth:e,file:l,transcendUrl:u,duration:d,subjectType:f,emailColumnName:p,coreIdentifierColumnName:m}){r.t(this.process.exit),(0,a.existsSync)(l)||(n.t.error(o.default.red(`File does not exist: "${l}". Please provide a valid path to a CSV file.`)),this.process.exit(1));try{let r=t.ai(u,e),a=t.ui(l,s.type({[p]:s.string,...m?{[m]:s.string}:{}}));if(!a.length)throw Error(`Input CSV is empty.`);let h=a.map((e,t)=>[e,t]).filter(([e])=>!e[p]?.trim());if(h.length){let e=h.map(([,e])=>e+2).join(`, `);throw Error(`The following rows are missing the required "${p}" column: ${e}`)}if(m){let e=a.map((e,t)=>[e,t]).filter(([e])=>!e[m]?.trim());if(e.length){let t=e.map(([,e])=>e+2).join(`, `);throw Error(`The following rows are missing the required "${m}" column: ${t}`)}}let g=Math.max(1,Math.floor(d/1e3)),_=a.map((e,t)=>{let n=e[p].trim(),r=m?e[m]?.trim():void 0;return{subjectType:f,scopes:[i.SombraStandardScope.PreferenceManagement],expiresIn:g,email:n,...r?{coreIdentifier:r}:{},index:t}}),v=new c.default.SingleBar({},c.default.Presets.shades_classic);v.start(_.length,0);let y=Date.now(),b=await t.Kr(r,_,e=>{v.update(e)});v.update(_.length),v.stop();let x=b.map(({accessToken:e,input:t})=>{if(typeof t.index!=`number`)throw Error(`Internal error: missing input index.`);return{...a[t.index],token:e}});n.t.info(o.default.magenta(`Writing access tokens to file "${l}"...`)),await t.l(l,x,!0);let S=Math.round((Date.now()-y)/1e3);n.t.info(o.default.green(`Successfully generated ${b.length} access tokens to "${l}" in ${S}s!`))}catch(e){n.t.error(o.default.red(`An error occurred while generating access tokens: ${e?.message||String(e)}`)),this.process.exit(1)}}exports.generateAccessTokens=l;
2
- //# sourceMappingURL=impl-DySeNL1m.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-DySeNL1m.cjs","names":["buildTranscendGraphQLClient","readCsv","t","SombraStandardScope","cliProgress","createPreferenceAccessTokens","writeCsv"],"sources":["../src/commands/consent/generate-access-tokens/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { existsSync } from 'node:fs';\nimport cliProgress from 'cli-progress';\nimport {\n buildTranscendGraphQLClient,\n createPreferenceAccessTokens,\n type PreferenceAccessTokenInputWithIndex,\n} from '../../../lib/graphql';\nimport { readCsv } from '../../../lib/requests';\nimport { SombraStandardScope } from '@transcend-io/privacy-types';\nimport { writeCsv } from '../../../lib/helpers';\n\n/**\n * CLI flags accepted by the `generate-access-tokens` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type GenerateAccessTokenCommandFlags = {\n auth: string;\n file: string;\n duration: number;\n transcendUrl: string;\n subjectType: string;\n emailColumnName: string;\n coreIdentifierColumnName?: string;\n};\n\n/**\n * Take in a CSV of user identifiers and generate access tokens for each user.\n *\n * Expected CSV columns:\n * - [emailColumnName] (required)\n * - [coreIdentifierColumnName] (optional)\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function generateAccessTokens(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n duration,\n subjectType,\n emailColumnName,\n coreIdentifierColumnName,\n }: GenerateAccessTokenCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n if (!existsSync(file)) {\n logger.error(\n colors.red(\n `File does not exist: \"${file}\". Please provide a valid path to a CSV file.`,\n ),\n );\n this.process.exit(1);\n }\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read + parse CSV\n const codec = t.type({\n [emailColumnName]: t.string,\n ...(coreIdentifierColumnName\n ? { [coreIdentifierColumnName]: t.string }\n : {}),\n });\n const rows: Array<Record<string, string>> = readCsv(file, codec);\n if (!rows.length) {\n throw new Error('Input CSV is empty.');\n }\n\n // Ensure emails and core identifiers exist\n const missingEmail = rows\n .map((r, i) => [r, i] as const)\n .filter(([r]) => !r[emailColumnName]?.trim());\n if (missingEmail.length) {\n const rowNumbers = missingEmail\n .map(([, i]) => i + 2) // +2 to account for header row and 0-indexing\n .join(', ');\n throw new Error(\n `The following rows are missing the required \"${emailColumnName}\" column: ${rowNumbers}`,\n );\n }\n if (coreIdentifierColumnName) {\n const missingCoreId = rows\n .map((r, i) => [r, i] as const)\n .filter(([r]) => !r[coreIdentifierColumnName]?.trim());\n if (missingCoreId.length) {\n const rowNumbers = missingCoreId\n .map(([, i]) => i + 2) // +2 to account for header row and 0-indexing\n .join(', ');\n throw new Error(\n `The following rows are missing the required \"${coreIdentifierColumnName}\" column: ${rowNumbers}`,\n );\n }\n }\n\n // Duration provided by CLI is in ms; GraphQL expects seconds\n const expiresInSeconds = Math.max(1, Math.floor(duration / 1000));\n\n // Build inputs for GraphQL\n const inputs = rows.map((r, index): PreferenceAccessTokenInputWithIndex => {\n const email = r[emailColumnName].trim();\n const coreIdentifier = coreIdentifierColumnName\n ? r[coreIdentifierColumnName]?.trim()\n : undefined;\n const scopes = [SombraStandardScope.PreferenceManagement];\n return {\n subjectType,\n scopes,\n expiresIn: expiresInSeconds,\n email,\n ...(coreIdentifier ? { coreIdentifier } : {}),\n index,\n };\n });\n\n // Progress bar\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n progressBar.start(inputs.length, 0);\n\n // Kick off token creation (batched internally)\n const t0 = Date.now();\n const results = await createPreferenceAccessTokens(\n client,\n inputs,\n (progress) => {\n progressBar.update(progress);\n },\n );\n progressBar.update(inputs.length);\n progressBar.stop();\n\n // Prepare output CSV rows\n const outputRows = results.map(({ accessToken, input }) => {\n if (typeof input.index !== 'number') {\n throw new Error('Internal error: missing input index.');\n }\n return {\n ...rows[input.index],\n token: accessToken,\n };\n });\n\n logger.info(colors.magenta(`Writing access tokens to file \"${file}\"...`));\n await writeCsv(file, outputRows, true);\n\n const totalTimeSec = Math.round((Date.now() - t0) / 1000);\n logger.info(\n colors.green(\n `Successfully generated ${results.length} access tokens to \"${file}\" in ${totalTimeSec}s!`,\n ),\n );\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n logger.error(\n colors.red(\n `An error occurred while generating access tokens: ${\n err?.message || String(err)\n }`,\n ),\n );\n this.process.exit(1);\n }\n}\n"],"mappings":"meAyCA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,WACA,cACA,kBACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,EAClC,EAAA,EAAA,YAAY,EAAK,GACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yBAAyB,EAAK,+CAC/B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,GAAI,CAEF,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CASxD,EAAsCC,EAAAA,GAAQ,EANtCC,EAAE,KAAK,EAClB,GAAkBA,EAAE,OACrB,GAAI,EACA,EAAG,GAA2BA,EAAE,OAAQ,CACxC,EAAE,CACP,CAAC,CAC8D,CAChE,GAAI,CAAC,EAAK,OACR,MAAU,MAAM,sBAAsB,CAIxC,IAAM,EAAe,EAClB,KAAK,EAAG,IAAM,CAAC,EAAG,EAAE,CAAU,CAC9B,QAAQ,CAAC,KAAO,CAAC,EAAE,IAAkB,MAAM,CAAC,CAC/C,GAAI,EAAa,OAAQ,CACvB,IAAM,EAAa,EAChB,KAAK,EAAG,KAAO,EAAI,EAAE,CACrB,KAAK,KAAK,CACb,MAAU,MACR,gDAAgD,EAAgB,YAAY,IAC7E,CAEH,GAAI,EAA0B,CAC5B,IAAM,EAAgB,EACnB,KAAK,EAAG,IAAM,CAAC,EAAG,EAAE,CAAU,CAC9B,QAAQ,CAAC,KAAO,CAAC,EAAE,IAA2B,MAAM,CAAC,CACxD,GAAI,EAAc,OAAQ,CACxB,IAAM,EAAa,EAChB,KAAK,EAAG,KAAO,EAAI,EAAE,CACrB,KAAK,KAAK,CACb,MAAU,MACR,gDAAgD,EAAyB,YAAY,IACtF,EAKL,IAAM,EAAmB,KAAK,IAAI,EAAG,KAAK,MAAM,EAAW,IAAK,CAAC,CAG3D,EAAS,EAAK,KAAK,EAAG,IAA+C,CACzE,IAAM,EAAQ,EAAE,GAAiB,MAAM,CACjC,EAAiB,EACnB,EAAE,IAA2B,MAAM,CACnC,IAAA,GAEJ,MAAO,CACL,cACA,OAHa,CAACC,EAAAA,oBAAoB,qBAAqB,CAIvD,UAAW,EACX,QACA,GAAI,EAAiB,CAAE,iBAAgB,CAAG,EAAE,CAC5C,QACD,EACD,CAGI,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CACD,EAAY,MAAM,EAAO,OAAQ,EAAE,CAGnC,IAAM,EAAK,KAAK,KAAK,CACf,EAAU,MAAMC,EAAAA,GACpB,EACA,EACC,GAAa,CACZ,EAAY,OAAO,EAAS,EAE/B,CACD,EAAY,OAAO,EAAO,OAAO,CACjC,EAAY,MAAM,CAGlB,IAAM,EAAa,EAAQ,KAAK,CAAE,cAAa,WAAY,CACzD,GAAI,OAAO,EAAM,OAAU,SACzB,MAAU,MAAM,uCAAuC,CAEzD,MAAO,CACL,GAAG,EAAK,EAAM,OACd,MAAO,EACR,EACD,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,MAAMC,EAAAA,EAAS,EAAM,EAAY,GAAK,CAEtC,IAAM,EAAe,KAAK,OAAO,KAAK,KAAK,CAAG,GAAM,IAAK,CACzD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAQ,OAAO,qBAAqB,EAAK,OAAO,EAAa,IACxF,CACF,OAEM,EAAU,CACjB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,qDACE,GAAK,SAAW,OAAO,EAAI,GAE9B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`);const e=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,enricherId:r,actions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,updatedAtBefore:l,updatedAtAfter:u,concurrency:d,transcendUrl:f}){t.t(this.process.exit),await e.B({auth:n,enricherId:r,requestActions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,updatedAtBefore:l?new Date(l):void 0,updatedAtAfter:u?new Date(u):void 0,concurrency:d,transcendUrl:f})}exports.enricherRestart=n;
2
- //# sourceMappingURL=impl-Fj-Esff-.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-Fj-Esff-.cjs","names":["bulkRetryEnrichers"],"sources":["../src/commands/request/enricher-restart/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { bulkRetryEnrichers } from '../../../lib/requests';\nimport type {\n RequestAction,\n RequestEnricherStatus,\n} from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n updatedAtBefore: updatedAtBefore ? new Date(updatedAtBefore) : undefined,\n updatedAtAfter: updatedAtAfter ? new Date(updatedAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"6QAsBA,eAAsB,EAEpB,CACE,OACA,aACA,UACA,0BACA,aACA,kBACA,iBACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAmB,CACvB,OACA,aACA,eAAgB,EAChB,0BACA,aACA,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,cACA,eACD,CAAC"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`);const e=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,enricherIds:i}){t.t(this.process.exit),await e.V({transcendUrl:r,auth:n,enricherIds:i})}exports.skipPreflightJobs=n;
2
- //# sourceMappingURL=impl-W6tVmm8N.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-W6tVmm8N.cjs","names":["skipPreflightJobsHelper"],"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"6QAUA,eAAsB,EAEpB,CAAE,OAAM,eAAc,eACP,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAwB,CAC5B,eACA,OACA,cACD,CAAC"}
@@ -1,2 +0,0 @@
1
- require(`./constants-gJm1eQH0.cjs`),require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const e=require(`./manual-enrichment-DTVJo7hP.cjs`),t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,file:i,concurrency:a,actions:o,sombraAuth:s}){t.t(this.process.exit),await e.i({file:i,transcendUrl:r,concurrency:a,requestActions:o,auth:n,sombraAuth:s})}exports.pullIdentifiers=n;
2
- //# sourceMappingURL=impl-WphnR0cX.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-WphnR0cX.cjs","names":["pullManualEnrichmentIdentifiersToCsv"],"sources":["../src/commands/request/preflight/pull-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pullManualEnrichmentIdentifiersToCsv } from '../../../../lib/manual-enrichment';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PullIdentifiersCommandFlags {\n auth: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n actions?: RequestAction[];\n concurrency: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n concurrency,\n actions,\n sombraAuth,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pullManualEnrichmentIdentifiersToCsv({\n file,\n transcendUrl,\n concurrency,\n requestActions: actions,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,cACA,UACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAqC,CACzC,OACA,eACA,cACA,eAAgB,EAChB,OACA,aACD,CAAC"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`),i=require(`./preference-management-Ch77Yxod.cjs`);let a=require(`node:fs`),o=require(`node:path`),s=require(`colors`);s=e.t(s);async function c({auth:e,partition:c,sombraAuth:l,transcendUrl:u,file:d=``,directory:f,dryRun:p,skipExistingRecordCheck:m,receiptFileDir:h,skipWorkflowTriggers:g,forceTriggerWorkflows:_,skipConflictUpdates:v,isSilent:y,attributes:b,concurrency:x}){f&&d&&(n.t.error(s.default.red(`Cannot provide both a directory and a file. Please provide only one.`)),this.process.exit(1)),!d&&!f&&(n.t.error(s.default.red(`A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences`)),this.process.exit(1)),r.t(this.process.exit);let S=[];if(f)try{let e=(0,a.readdirSync)(f).filter(e=>e.endsWith(`.csv`));e.length===0&&(n.t.error(s.default.red(`No CSV files found in directory: ${f}`)),this.process.exit(1)),S.push(...e.map(e=>(0,o.join)(f,e)))}catch(e){n.t.error(s.default.red(`Failed to read directory: ${f}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}else try{d.endsWith(`.csv`)||(n.t.error(s.default.red(`File must be a CSV file`)),this.process.exit(1)),S.push(d)}catch(e){n.t.error(s.default.red(`Failed to access file: ${d}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}n.t.info(s.default.green(`Processing ${S.length} consent preferences files for partition: ${c}`)),n.t.debug(`Files to process: ${S.join(`, `)}`),m&&n.t.info(s.default.bgYellow(`Skipping existing record check: ${m}`)),await t.As(S,async n=>{await i.a({receiptFilepath:(0,o.join)(h,`${(0,o.basename)(n).replace(`.csv`,``)}-receipts.json`),auth:e,sombraAuth:l,file:n,partition:c,transcendUrl:u,skipConflictUpdates:v,skipWorkflowTriggers:g,skipExistingRecordCheck:m,isSilent:y,dryRun:p,attributes:t.pi(b),forceTriggerWorkflows:_})},{concurrency:x})}exports.uploadPreferences=c;
2
- //# sourceMappingURL=impl-YNAicr-z.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-YNAicr-z.cjs","names":["file","map","uploadPreferenceManagementPreferencesInteractive","splitCsvToList"],"sources":["../src/commands/consent/upload-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { logger } from '../../../logger';\nimport { uploadPreferenceManagementPreferencesInteractive } from '../../../lib/preference-management';\nimport { splitCsvToList } from '../../../lib/requests';\nimport { readdirSync } from 'node:fs';\nimport { map } from '../../../lib/bluebird';\nimport { basename, join } from 'node:path';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n transcendUrl: string;\n file?: string;\n directory?: string;\n dryRun: boolean;\n skipExistingRecordCheck: boolean;\n receiptFileDir: string;\n skipWorkflowTriggers: boolean;\n forceTriggerWorkflows: boolean;\n skipConflictUpdates: boolean;\n isSilent: boolean;\n attributes: string;\n receiptFilepath: string;\n concurrency: number;\n}\n\nexport async function uploadPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n transcendUrl,\n file = '',\n directory,\n dryRun,\n skipExistingRecordCheck,\n receiptFileDir,\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n skipConflictUpdates,\n isSilent,\n attributes,\n concurrency,\n }: UploadPreferencesCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red(\n 'Cannot provide both a directory and a file. Please provide only one.',\n ),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(\n colors.red(`No CSV files found in directory: ${directory}`),\n );\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.info(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`Files to process: ${files.join(', ')}`);\n\n if (skipExistingRecordCheck) {\n logger.info(\n colors.bgYellow(\n `Skipping existing record check: ${skipExistingRecordCheck}`,\n ),\n );\n }\n\n await map(\n files,\n async (filePath) => {\n const fileName = basename(filePath).replace('.csv', '');\n await uploadPreferenceManagementPreferencesInteractive({\n receiptFilepath: join(receiptFileDir, `${fileName}-receipts.json`),\n auth,\n sombraAuth,\n file: filePath,\n partition,\n transcendUrl,\n skipConflictUpdates,\n skipWorkflowTriggers,\n skipExistingRecordCheck,\n isSilent,\n dryRun,\n attributes: splitCsvToList(attributes),\n forceTriggerWorkflows,\n });\n },\n { concurrency },\n );\n}\n"],"mappings":"4bA8BA,eAAsB,EAEpB,CACE,OACA,YACA,aACA,eACA,OAAO,GACP,YACA,SACA,0BACA,iBACA,uBACA,wBACA,sBACA,WACA,aACA,eAEa,CACT,GAAe,IACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uEACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,GAAA,EAAA,EAAA,aAD+B,EAAU,CACb,OAAQ,GAASA,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAC5D,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,IAAA,EAAA,EAAA,MAAc,EAAWA,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAA,EAAO,MAAM,qBAAqB,EAAM,KAAK,KAAK,GAAG,CAEjD,GACF,EAAA,EAAO,KACL,EAAA,QAAO,SACL,mCAAmC,IACpC,CACF,CAGH,MAAMC,EAAAA,GACJ,EACA,KAAO,IAAa,CAElB,MAAMC,EAAAA,EAAiD,CACrD,iBAAA,EAAA,EAAA,MAAsB,EAAgB,IAAA,EAAA,EAAA,UAFd,EAAS,CAAC,QAAQ,OAAQ,GAAG,CAEH,gBAAgB,CAClE,OACA,aACA,KAAM,EACN,YACA,eACA,sBACA,uBACA,0BACA,WACA,SACA,WAAYC,EAAAA,GAAe,EAAW,CACtC,wBACD,CAAC,EAEJ,CAAE,cAAa,CAChB"}
@@ -1,2 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-gJm1eQH0.cjs`);const t=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./data-inventory-a9Nz9lUO.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({auth:e,file:o,transcendUrl:s,dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d}){i.t(this.process.exit);try{let i=await r.t(t.ai(s,e),{dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d});n.t.info(a.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let f=[];await t.d(o,i.map(e=>{let n={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...d?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return f=t.Ms([...f,...Object.keys(n)]),n}),f)}catch(e){n.t.error(a.default.red(`An error occurred syncing the unstructured discovery files: ${e.message}`)),this.process.exit(1)}n.t.info(a.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles=o;
2
- //# sourceMappingURL=impl-jEpWgC2N.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"impl-jEpWgC2N.cjs","names":["pullUnstructuredSubDataPointRecommendations","buildTranscendGraphQLClient","writeLargeCsv","uniq"],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"],"mappings":"yYAoBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAMA,EAAAA,EAFPC,EAAAA,GAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,iDAAiD,EAAK,MACvD,CACF,CACD,IAAI,EAAoB,EAAE,CAmB1B,MAAMC,EAAAA,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,+DAA+D,EAAI,UACpE,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,+DAA+D,EAAK,GACrE,CACF"}
@@ -1,4 +0,0 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-gJm1eQH0.cjs`),n=require(`./syncConfigurationToTranscend-s-cjtUI3.cjs`),r=require(`./enums-BZulhPFa.cjs`),i=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const a=require(`./done-input-validation-DGckEJ5a.cjs`);let o=require(`@transcend-io/privacy-types`),s=require(`@transcend-io/type-utils`),c=require(`node:fs`);c=e.t(c);let l=require(`colors`);l=e.t(l);let u=require(`yargs-parser`);u=e.t(u);let d=require(`got`);d=e.t(d);let f=require(`JSONStream`);f=e.t(f);const p=({hostname:e,auth:t})=>d.default.extend({prefixUrl:`https://${e}`,headers:{accept:`application/json`,"content-type":`application/json`,authorization:`Bearer ${t}`}}),m=Object.values(r.n),h=({assessment:e,index:t,total:n,wrap:r=!0})=>{let i=``;(t===0||r)&&(i=`[
2
- `);let a=JSON.stringify(e),o=n&&t<n-1&&!r?`,`:``;return i=`${i+a+o}\n`,(n&&t===n-1||r)&&(i+=`
3
- ]`),i},g=({file:e,assessment:t,index:n,total:r})=>{i.t.info(l.default.magenta(`Writing enriched assessment ${n+1} of ${r} to file "${e}"...`)),n===0?c.default.writeFileSync(e,h({assessment:t,index:n,total:r,wrap:!1})):c.default.appendFileSync(e,h({assessment:t,index:n,total:r,wrap:!1}))},_=async({oneTrust:e})=>{let t=0,n=1,r=0,a=[];for(;t<n;){let{body:c}=await e.get(`api/assessment/v2/assessments?page=${t}&size=2000`),{page:l,content:u}=(0,s.decodeCodec)(o.OneTrustGetListOfAssessmentsResponse,c);a.push(...u??[]),t===0&&(n=l?.totalPages??0,r=l?.totalElements??0),t+=1,i.t.info(`Fetched ${a.length} of ${r} assessments.`)}return a},v=async({oneTrust:e,assessmentId:t})=>{let{body:n}=await e.get(`api/assessment/v2/assessments/${t}/export?ExcludeSkippedQuestions=false`);return(0,s.decodeCodec)(o.OneTrustGetAssessmentResponse,n)},y=async({oneTrust:e,riskId:t})=>{let{body:n}=await e.get(`api/risk/v2/risks/${t}`);return(0,s.decodeCodec)(o.OneTrustGetRiskResponse,n)},b=async({oneTrust:e,userId:t})=>{let{body:n}=await e.get(`api/scim/v2/Users/${t}`);return(0,s.decodeCodec)(o.OneTrustGetUserResponse,n)},x=({assessment:e,assessmentDetails:n,riskDetails:r,creatorDetails:i,approversDetails:a,respondentsDetails:o})=>{let s=t.g(r,`id`),{sections:c,createdBy:l,...u}=n,d=c.map(e=>{let{questions:t,...n}=e,r=t.map(e=>{let{risks:t,...n}=e,r=(t??[]).map(e=>{let t=s[e.riskId];return{...e,...t,level:e.level,impactLevel:e.impactLevel??0}});return{...n,risks:r}});return{...n,questions:r}}),f={...l,active:i?.active??!1,userType:i?.userType??`Internal`,emails:i?.emails??[],title:i?.title??null,givenName:i?.name.givenName??null,familyName:i?.name.familyName??null},p=t.g(a,`id`),m=n.approvers.flatMap(e=>p[e.id]?[{...e,approver:{...e.approver,active:p[e.id].active,userType:p[e.id].userType,emails:p[e.id].emails,title:p[e.id].title,givenName:p[e.id].name.givenName??null,familyName:p[e.id].name.familyName??null}}]:[]),h=t.g(o,`id`),g=n.respondents.filter(e=>!e.name.includes(`@`)).flatMap(e=>h[e.id]?[{...e,active:h[e.id].active,userType:h[e.id].userType,emails:h[e.id].emails,title:h[e.id].title,givenName:h[e.id].name.givenName??null,familyName:h[e.id].name.familyName??null}]:[]);return{...e,...u,approvers:m,respondents:g,createdBy:f,sections:d}},S=async({transcend:e,assessment:t,total:r,index:a})=>{i.t.info(l.default.magenta(`Writing enriched assessment ${a+1} ${r?`of ${r} `:` `}to Transcend...`));let o={json:h({assessment:t,index:a,total:r})};try{await n.i(e,n.wo,{input:o})}catch{i.t.error(l.default.red(`Failed to sync assessment ${a+1} ${r?`of ${r} `:` `}to Transcend.\n\tAssessment Title: ${t.name}. Template Title: ${t.template.name}\n`))}},C=async({oneTrust:e,file:t,dryRun:r,transcend:a})=>{i.t.info(`Getting list of all assessments from OneTrust...`);let o=await _({oneTrust:e}),s={};await n.js(Array.from({length:Math.ceil(o.length/5)},(e,t)=>o.slice(t*5,(t+1)*5)),async(c,u)=>{let d=[];await n.As(c,async(t,r)=>{let a=5*u+r+1;i.t.info(`[assessment ${a} of ${o.length}]: fetching details...`);let{templateName:c,assessmentId:f}=t,p=await v({oneTrust:e,assessmentId:f}),m=p.createdBy.id,h=s[m];if(!h){i.t.info(`[assessment ${a} of ${o.length}]: fetching creator...`);try{h=await b({oneTrust:e,userId:m}),s[m]=h}catch{i.t.warn(l.default.yellow(`[assessment ${a} of ${o.length}]: failed to fetch form creator.\tcreatorId: ${m}. Assessment Title: ${t.name}. Template Title: ${c}`))}}let{approvers:g}=p,_=[];g.length>0&&(i.t.info(`[assessment ${a} of ${o.length}]: fetching approvers...`),_=await n.As(g.map(({id:e})=>e),async n=>{try{let t=s[n];return t||(t=await b({oneTrust:e,userId:n}),s[n]=t),[t]}catch{return i.t.warn(l.default.yellow(`[assessment ${a} of ${o.length}]: failed to fetch a form approver.\tapproverId: ${n}. Assessment Title: ${t.name}. Template Title: ${c}`)),[]}},{concurrency:5}));let{respondents:S}=p,C=S.filter(e=>!e.name.includes(`@`)),w=[];C.length>0&&(i.t.info(`[assessment ${a} of ${o.length}]: fetching respondents...`),w=await n.As(C.map(({id:e})=>e),async n=>{try{let t=s[n];return t||(t=await b({oneTrust:e,userId:n}),s[n]=t),[t]}catch{return i.t.warn(l.default.yellow(`[assessment ${a} of ${o.length}]: failed to fetch a respondent.\trespondentId: ${n}. Assessment Title: ${t.name}. Template Title: ${c}`)),[]}},{concurrency:5}));let T=[],E=n.Ms(p.sections.flatMap(e=>e.questions.flatMap(e=>(e.risks??[]).flatMap(e=>e.riskId))));E.length>0&&(i.t.info(`[assessment ${a} of ${o.length}]: fetching risks...`),T=await n.As(E,t=>y({oneTrust:e,riskId:t}),{concurrency:5}));let D=x({assessment:t,assessmentDetails:p,riskDetails:T,creatorDetails:h,approversDetails:_.flat(),respondentsDetails:w.flat()});d.push(D)},{concurrency:5}),await n.js(d,async(e,n)=>{let i=u*5+n;r&&t?g({assessment:e,index:i,total:o.length,file:t}):a&&await S({assessment:e,transcend:a,total:o.length,index:i})})})},w=({transcend:e,file:t})=>(i.t.info(`Getting list of all assessments from file ${t}...`),new Promise((n,r)=>{let a=(0,c.createReadStream)(t,{encoding:`utf-8`,highWaterMark:64*1024}),u=f.default.parse(`*`),d=0;a.pipe(u),u.on(`data`,async n=>{try{u.pause(),await S({assessment:(0,s.decodeCodec)(o.OneTrustEnrichedAssessment,n),transcend:e,index:d}),d+=1,u.resume()}catch(e){i.t.error(l.default.red(`Failed to parse the assessment ${d} from file '${t}': ${e.message}.`))}}),u.on(`end`,()=>{i.t.info(`Finished processing ${d} assessments from file ${t}`),n()}),u.on(`error`,e=>{i.t.error(l.default.red(`Error parsing file '${t}': ${e.message}`)),r(e)}),a.on(`error`,e=>{i.t.error(l.default.red(`Error reading file '${t}': ${e.message}`)),r(e)})}));async function T({hostname:e,oneTrustAuth:t,source:o,transcendAuth:s,transcendUrl:c,resource:u,file:d,dryRun:f,debug:m}){if(!f&&!s)throw Error('Must specify a "transcendAuth" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}');if(f&&!d)throw Error(`Must set a "file" parameter when "dryRun" is "true". e.g. --file=./oneTrustAssessments.json`);if(d){let e=d.split(`.`);if(e.length<2)throw Error(`The "file" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.`);if(e.at(-1)!==r.t.Json)throw Error(`Expected the format of the "file" parameters '${d}' to be '${r.t.Json}', but got '${e.at(-1)}'.`)}if(o===r.r.OneTrust){if(!e)throw Error(`Missing required parameter "hostname". e.g. --hostname=customer.my.onetrust.com`);if(!t)throw Error(`Missing required parameter "oneTrustAuth". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN`)}else{if(!d)throw Error(`Must specify a "file" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json`);if(f)throw Error(`Cannot read and write to a file simultaneously. Emit the "source" parameter or set it to ${r.r.OneTrust} if "dryRun" is enabled.`)}a.t(this.process.exit);let h=e&&t?p({hostname:e,auth:t}):void 0,g=c&&s?n.ai(c,s):void 0;try{u===r.n.Assessments&&(o===r.r.OneTrust&&h?await C({oneTrust:h,file:d,dryRun:f,...g&&{transcend:g}}):o===r.r.File&&d&&g&&await w({file:d,transcend:g}))}catch(e){throw Error(`An error occurred syncing the resource ${u} from OneTrust: ${m?e.stack:e.message}`)}i.t.info(l.default.green(`Successfully synced OneTrust ${u} to ${f?`disk at "${d}"`:`Transcend`}!`))}exports.syncOt=T;
4
- //# sourceMappingURL=impl-x2P-_Pk2.cjs.map