@transcend-io/cli 7.0.0-alpha.13 → 7.0.0-alpha.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (272) hide show
  1. package/dist/bin/bash-complete.cjs +4 -0
  2. package/dist/bin/bash-complete.cjs.map +1 -0
  3. package/dist/bin/cli.cjs +3 -0
  4. package/dist/bin/cli.cjs.map +1 -0
  5. package/dist/bin/deprecated-command.cjs +7 -0
  6. package/dist/bin/deprecated-command.cjs.map +1 -0
  7. package/dist/chunk-5TWQU6YF.cjs +2844 -0
  8. package/dist/chunk-5TWQU6YF.cjs.map +1 -0
  9. package/dist/chunk-6HKJGDAM.cjs +2 -0
  10. package/dist/chunk-6HKJGDAM.cjs.map +1 -0
  11. package/dist/chunk-ABQQ23HK.cjs +2 -0
  12. package/dist/chunk-ABQQ23HK.cjs.map +1 -0
  13. package/dist/chunk-BY7W4UQF.cjs +2 -0
  14. package/dist/chunk-BY7W4UQF.cjs.map +1 -0
  15. package/dist/chunk-EG4L6YAJ.cjs +2 -0
  16. package/dist/chunk-EG4L6YAJ.cjs.map +1 -0
  17. package/dist/chunk-IBTP5OXE.cjs +2 -0
  18. package/dist/chunk-IBTP5OXE.cjs.map +1 -0
  19. package/dist/chunk-KEH7CZKK.cjs +75 -0
  20. package/dist/chunk-KEH7CZKK.cjs.map +1 -0
  21. package/dist/chunk-KOV2SQO2.cjs +4 -0
  22. package/dist/chunk-KOV2SQO2.cjs.map +1 -0
  23. package/dist/chunk-KXRTLA5U.cjs +94 -0
  24. package/dist/chunk-KXRTLA5U.cjs.map +1 -0
  25. package/dist/chunk-LZZZALF3.cjs +4 -0
  26. package/dist/chunk-LZZZALF3.cjs.map +1 -0
  27. package/dist/chunk-PBT4YWG7.cjs +9 -0
  28. package/dist/chunk-PBT4YWG7.cjs.map +1 -0
  29. package/dist/chunk-PUWWHSAM.cjs +2 -0
  30. package/dist/chunk-PUWWHSAM.cjs.map +1 -0
  31. package/dist/chunk-RPT6OXNL.cjs +3 -0
  32. package/dist/chunk-RPT6OXNL.cjs.map +1 -0
  33. package/dist/chunk-SAEKBZGF.cjs +2 -0
  34. package/dist/chunk-SAEKBZGF.cjs.map +1 -0
  35. package/dist/chunk-SEJ2UCU4.cjs +2 -0
  36. package/dist/chunk-SEJ2UCU4.cjs.map +1 -0
  37. package/dist/chunk-SR7OAEWS.cjs +2 -0
  38. package/dist/chunk-SR7OAEWS.cjs.map +1 -0
  39. package/dist/chunk-T462ONFX.cjs +2 -0
  40. package/dist/chunk-T462ONFX.cjs.map +1 -0
  41. package/dist/chunk-TD7ADMVO.cjs +2 -0
  42. package/dist/chunk-TD7ADMVO.cjs.map +1 -0
  43. package/dist/chunk-UEGX6GZ2.cjs +2 -0
  44. package/dist/chunk-UEGX6GZ2.cjs.map +1 -0
  45. package/dist/chunk-XWRWKB4H.cjs +12 -0
  46. package/dist/chunk-XWRWKB4H.cjs.map +1 -0
  47. package/dist/chunk-YGYXLCPI.cjs +2 -0
  48. package/dist/chunk-YGYXLCPI.cjs.map +1 -0
  49. package/dist/chunk-ZFZPE5BJ.cjs +2 -0
  50. package/dist/chunk-ZFZPE5BJ.cjs.map +1 -0
  51. package/dist/chunk-ZUNVPK23.cjs +2 -0
  52. package/dist/chunk-ZUNVPK23.cjs.map +1 -0
  53. package/dist/chunk-ZVK4HIDF.cjs +6 -0
  54. package/dist/chunk-ZVK4HIDF.cjs.map +1 -0
  55. package/dist/impl-2E3PAZHM.cjs +2 -0
  56. package/dist/impl-2E3PAZHM.cjs.map +1 -0
  57. package/dist/impl-2I7MIYNB.cjs +2 -0
  58. package/dist/impl-2I7MIYNB.cjs.map +1 -0
  59. package/dist/impl-7H4CBYYB.cjs +12 -0
  60. package/dist/impl-7H4CBYYB.cjs.map +1 -0
  61. package/dist/impl-7KOHW25M.cjs +6 -0
  62. package/dist/impl-7KOHW25M.cjs.map +1 -0
  63. package/dist/impl-7M4OQEYH.cjs +2 -0
  64. package/dist/impl-7M4OQEYH.cjs.map +1 -0
  65. package/dist/impl-AHGBQO5E.cjs +2 -0
  66. package/dist/impl-AHGBQO5E.cjs.map +1 -0
  67. package/dist/impl-AJB3VAJO.cjs +2 -0
  68. package/dist/impl-AJB3VAJO.cjs.map +1 -0
  69. package/dist/impl-B3EPOCAJ.cjs +2 -0
  70. package/dist/impl-B3EPOCAJ.cjs.map +1 -0
  71. package/dist/impl-BDRTVVF2.cjs +2 -0
  72. package/dist/impl-BDRTVVF2.cjs.map +1 -0
  73. package/dist/impl-CBBAWKT7.cjs +2 -0
  74. package/dist/impl-CBBAWKT7.cjs.map +1 -0
  75. package/dist/impl-DI7FIYZI.cjs +2 -0
  76. package/dist/impl-DI7FIYZI.cjs.map +1 -0
  77. package/dist/impl-DPLGIVNZ.cjs +2 -0
  78. package/dist/impl-DPLGIVNZ.cjs.map +1 -0
  79. package/dist/impl-DRJ7E2FN.cjs +2 -0
  80. package/dist/impl-DRJ7E2FN.cjs.map +1 -0
  81. package/dist/impl-F3CYEECC.cjs +2 -0
  82. package/dist/impl-F3CYEECC.cjs.map +1 -0
  83. package/dist/impl-FQF3AWHR.cjs +2 -0
  84. package/dist/impl-FQF3AWHR.cjs.map +1 -0
  85. package/dist/impl-FQYT2XK3.cjs +2 -0
  86. package/dist/impl-FQYT2XK3.cjs.map +1 -0
  87. package/dist/impl-HDVI2F6D.cjs +2 -0
  88. package/dist/impl-HDVI2F6D.cjs.map +1 -0
  89. package/dist/impl-HEGJOPZK.cjs +2 -0
  90. package/dist/impl-HEGJOPZK.cjs.map +1 -0
  91. package/dist/impl-ITBKGH3N.cjs +2 -0
  92. package/dist/impl-ITBKGH3N.cjs.map +1 -0
  93. package/dist/impl-K7CHXRVJ.cjs +2 -0
  94. package/dist/impl-K7CHXRVJ.cjs.map +1 -0
  95. package/dist/impl-NFRFFLXM.cjs +2 -0
  96. package/dist/impl-NFRFFLXM.cjs.map +1 -0
  97. package/dist/impl-NIMVACZO.cjs +2 -0
  98. package/dist/impl-NIMVACZO.cjs.map +1 -0
  99. package/dist/impl-OBZFKR4D.cjs +2 -0
  100. package/dist/impl-OBZFKR4D.cjs.map +1 -0
  101. package/dist/impl-P7P4PHKK.cjs +2 -0
  102. package/dist/impl-P7P4PHKK.cjs.map +1 -0
  103. package/dist/impl-PKVWUYYX.cjs +2 -0
  104. package/dist/impl-PKVWUYYX.cjs.map +1 -0
  105. package/dist/impl-PUU55WCF.cjs +2 -0
  106. package/dist/impl-PUU55WCF.cjs.map +1 -0
  107. package/dist/impl-QTYOX6E5.cjs +2 -0
  108. package/dist/impl-QTYOX6E5.cjs.map +1 -0
  109. package/dist/impl-RNWZPIFE.cjs +2 -0
  110. package/dist/impl-RNWZPIFE.cjs.map +1 -0
  111. package/dist/impl-S3OTWWYU.cjs +6 -0
  112. package/dist/impl-S3OTWWYU.cjs.map +1 -0
  113. package/dist/impl-TFECR52S.cjs +2 -0
  114. package/dist/impl-TFECR52S.cjs.map +1 -0
  115. package/dist/impl-VBCRH4YN.cjs +2 -0
  116. package/dist/impl-VBCRH4YN.cjs.map +1 -0
  117. package/dist/impl-W5XYDQXZ.cjs +6 -0
  118. package/dist/{impl-CCUCFOCW.js.map → impl-W5XYDQXZ.cjs.map} +1 -1
  119. package/dist/impl-WBDBDZHE.cjs +2 -0
  120. package/dist/impl-WBDBDZHE.cjs.map +1 -0
  121. package/dist/impl-WDYARUUL.cjs +7 -0
  122. package/dist/impl-WDYARUUL.cjs.map +1 -0
  123. package/dist/impl-WJ7VUNYC.cjs +2 -0
  124. package/dist/impl-WJ7VUNYC.cjs.map +1 -0
  125. package/dist/impl-WMHBCVAN.cjs +2 -0
  126. package/dist/impl-WMHBCVAN.cjs.map +1 -0
  127. package/dist/impl-WYJHZILF.cjs +9 -0
  128. package/dist/impl-WYJHZILF.cjs.map +1 -0
  129. package/dist/impl-ZBKQ6GRT.cjs +2 -0
  130. package/dist/impl-ZBKQ6GRT.cjs.map +1 -0
  131. package/dist/impl-ZSFEQ5UA.cjs +4 -0
  132. package/dist/impl-ZSFEQ5UA.cjs.map +1 -0
  133. package/dist/index.cjs +5 -0
  134. package/dist/index.cjs.map +1 -0
  135. package/dist/{index.d.ts → index.d.cts} +1 -1
  136. package/package.json +49 -45
  137. package/dist/bin/bash-complete.js +0 -4
  138. package/dist/bin/bash-complete.js.map +0 -1
  139. package/dist/bin/cli.js +0 -3
  140. package/dist/bin/cli.js.map +0 -1
  141. package/dist/bin/deprecated-command.js +0 -7
  142. package/dist/bin/deprecated-command.js.map +0 -1
  143. package/dist/chunk-24SSWBXM.js +0 -4
  144. package/dist/chunk-24SSWBXM.js.map +0 -1
  145. package/dist/chunk-347UQP43.js +0 -2
  146. package/dist/chunk-347UQP43.js.map +0 -1
  147. package/dist/chunk-43JWXG77.js +0 -2
  148. package/dist/chunk-43JWXG77.js.map +0 -1
  149. package/dist/chunk-4GLITB3Y.js +0 -2
  150. package/dist/chunk-4GLITB3Y.js.map +0 -1
  151. package/dist/chunk-6P4FW6XR.js +0 -3
  152. package/dist/chunk-6P4FW6XR.js.map +0 -1
  153. package/dist/chunk-72U6ETHG.js +0 -2
  154. package/dist/chunk-72U6ETHG.js.map +0 -1
  155. package/dist/chunk-7QHA6ZIV.js +0 -2
  156. package/dist/chunk-7QHA6ZIV.js.map +0 -1
  157. package/dist/chunk-ARVEJERC.js +0 -2
  158. package/dist/chunk-ARVEJERC.js.map +0 -1
  159. package/dist/chunk-CBAHSBSW.js +0 -2
  160. package/dist/chunk-CBAHSBSW.js.map +0 -1
  161. package/dist/chunk-HH2PQ3PQ.js +0 -2
  162. package/dist/chunk-HH2PQ3PQ.js.map +0 -1
  163. package/dist/chunk-INLBXSQE.js +0 -9
  164. package/dist/chunk-INLBXSQE.js.map +0 -1
  165. package/dist/chunk-KRN6Q433.js +0 -75
  166. package/dist/chunk-KRN6Q433.js.map +0 -1
  167. package/dist/chunk-L5ULN3IT.js +0 -2
  168. package/dist/chunk-L5ULN3IT.js.map +0 -1
  169. package/dist/chunk-L7ZIX4SU.js +0 -2
  170. package/dist/chunk-L7ZIX4SU.js.map +0 -1
  171. package/dist/chunk-LAYHULHH.js +0 -2
  172. package/dist/chunk-LAYHULHH.js.map +0 -1
  173. package/dist/chunk-MA4JWWRO.js +0 -6
  174. package/dist/chunk-MA4JWWRO.js.map +0 -1
  175. package/dist/chunk-MVDOKJ6J.js +0 -2
  176. package/dist/chunk-MVDOKJ6J.js.map +0 -1
  177. package/dist/chunk-OEB7WG3G.js +0 -4
  178. package/dist/chunk-OEB7WG3G.js.map +0 -1
  179. package/dist/chunk-SF46ZLPT.js +0 -2
  180. package/dist/chunk-SF46ZLPT.js.map +0 -1
  181. package/dist/chunk-TDBKATQK.js +0 -2831
  182. package/dist/chunk-TDBKATQK.js.map +0 -1
  183. package/dist/chunk-WSDWILYI.js +0 -2
  184. package/dist/chunk-WSDWILYI.js.map +0 -1
  185. package/dist/chunk-XNR74SBS.js +0 -12
  186. package/dist/chunk-XNR74SBS.js.map +0 -1
  187. package/dist/chunk-ZLRUIEVQ.js +0 -94
  188. package/dist/chunk-ZLRUIEVQ.js.map +0 -1
  189. package/dist/chunk-ZTD7APNF.js +0 -2
  190. package/dist/chunk-ZTD7APNF.js.map +0 -1
  191. package/dist/impl-25VWUB6L.js +0 -2
  192. package/dist/impl-25VWUB6L.js.map +0 -1
  193. package/dist/impl-3M5R6G5M.js +0 -6
  194. package/dist/impl-3M5R6G5M.js.map +0 -1
  195. package/dist/impl-5OEPVWPL.js +0 -2
  196. package/dist/impl-5OEPVWPL.js.map +0 -1
  197. package/dist/impl-5YV7K446.js +0 -2
  198. package/dist/impl-5YV7K446.js.map +0 -1
  199. package/dist/impl-AFRHPZGF.js +0 -2
  200. package/dist/impl-AFRHPZGF.js.map +0 -1
  201. package/dist/impl-CCUCFOCW.js +0 -6
  202. package/dist/impl-E36SWF4Z.js +0 -2
  203. package/dist/impl-E36SWF4Z.js.map +0 -1
  204. package/dist/impl-E5WXNV47.js +0 -2
  205. package/dist/impl-E5WXNV47.js.map +0 -1
  206. package/dist/impl-EVICJMI3.js +0 -2
  207. package/dist/impl-EVICJMI3.js.map +0 -1
  208. package/dist/impl-G5TGSB4H.js +0 -2
  209. package/dist/impl-G5TGSB4H.js.map +0 -1
  210. package/dist/impl-GNG2DOKG.js +0 -2
  211. package/dist/impl-GNG2DOKG.js.map +0 -1
  212. package/dist/impl-GNSHZ3OL.js +0 -2
  213. package/dist/impl-GNSHZ3OL.js.map +0 -1
  214. package/dist/impl-GPCURY4M.js +0 -7
  215. package/dist/impl-GPCURY4M.js.map +0 -1
  216. package/dist/impl-GZRQOFY6.js +0 -2
  217. package/dist/impl-GZRQOFY6.js.map +0 -1
  218. package/dist/impl-HEC3SVYP.js +0 -2
  219. package/dist/impl-HEC3SVYP.js.map +0 -1
  220. package/dist/impl-HH24GIMG.js +0 -2
  221. package/dist/impl-HH24GIMG.js.map +0 -1
  222. package/dist/impl-I24OLEN5.js +0 -2
  223. package/dist/impl-I24OLEN5.js.map +0 -1
  224. package/dist/impl-IAXNYDJT.js +0 -2
  225. package/dist/impl-IAXNYDJT.js.map +0 -1
  226. package/dist/impl-J33PI3PK.js +0 -2
  227. package/dist/impl-J33PI3PK.js.map +0 -1
  228. package/dist/impl-JZDUGI7W.js +0 -2
  229. package/dist/impl-JZDUGI7W.js.map +0 -1
  230. package/dist/impl-LZ3HI26W.js +0 -4
  231. package/dist/impl-LZ3HI26W.js.map +0 -1
  232. package/dist/impl-MEDPDKAE.js +0 -2
  233. package/dist/impl-MEDPDKAE.js.map +0 -1
  234. package/dist/impl-MLS6TI7N.js +0 -2
  235. package/dist/impl-MLS6TI7N.js.map +0 -1
  236. package/dist/impl-NI7KSBSS.js +0 -2
  237. package/dist/impl-NI7KSBSS.js.map +0 -1
  238. package/dist/impl-OM6EKANE.js +0 -9
  239. package/dist/impl-OM6EKANE.js.map +0 -1
  240. package/dist/impl-T4WDJSWZ.js +0 -2
  241. package/dist/impl-T4WDJSWZ.js.map +0 -1
  242. package/dist/impl-U37YTCPW.js +0 -2
  243. package/dist/impl-U37YTCPW.js.map +0 -1
  244. package/dist/impl-U5555HGJ.js +0 -12
  245. package/dist/impl-U5555HGJ.js.map +0 -1
  246. package/dist/impl-UHFSVVIS.js +0 -6
  247. package/dist/impl-UHFSVVIS.js.map +0 -1
  248. package/dist/impl-UIVTSO57.js +0 -2
  249. package/dist/impl-UIVTSO57.js.map +0 -1
  250. package/dist/impl-UQYL5PXR.js +0 -2
  251. package/dist/impl-UQYL5PXR.js.map +0 -1
  252. package/dist/impl-V5QTKTU4.js +0 -2
  253. package/dist/impl-V5QTKTU4.js.map +0 -1
  254. package/dist/impl-WDPWOOFV.js +0 -2
  255. package/dist/impl-WDPWOOFV.js.map +0 -1
  256. package/dist/impl-WZAF2LD3.js +0 -2
  257. package/dist/impl-WZAF2LD3.js.map +0 -1
  258. package/dist/impl-XF26H3HG.js +0 -2
  259. package/dist/impl-XF26H3HG.js.map +0 -1
  260. package/dist/impl-XQY2Q5R6.js +0 -2
  261. package/dist/impl-XQY2Q5R6.js.map +0 -1
  262. package/dist/impl-YB2LON7S.js +0 -2
  263. package/dist/impl-YB2LON7S.js.map +0 -1
  264. package/dist/impl-YNGQIWW7.js +0 -2
  265. package/dist/impl-YNGQIWW7.js.map +0 -1
  266. package/dist/impl-ZA3PKNQN.js +0 -2
  267. package/dist/impl-ZA3PKNQN.js.map +0 -1
  268. package/dist/index.js +0 -5
  269. package/dist/index.js.map +0 -1
  270. /package/dist/bin/{bash-complete.d.ts → bash-complete.d.cts} +0 -0
  271. /package/dist/bin/{cli.d.ts → cli.d.cts} +0 -0
  272. /package/dist/bin/{deprecated-command.d.ts → deprecated-command.d.cts} +0 -0
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/cron/pull-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport colors from 'colors';\n\nimport { logger } from '@/logger';\nimport { uniq } from 'lodash-es';\nimport {\n CsvFormattedIdentifier,\n parseFilePath,\n pullChunkedCustomSiloOutstandingIdentifiers,\n writeCsv,\n} from '@/lib/cron';\nimport { RequestAction } from '@transcend-io/privacy-types';\n\ninterface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n process.exit(1);\n }\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(\n colors.blue(\n `Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`,\n ),\n );\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n writeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`,\n ),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"],"mappings":"2QACA,OAAOA,MAAY,SAGnB,OAAS,QAAAC,MAAY,YAqBrB,eAAsBC,EAEpB,CACE,KAAAC,EACA,aAAAC,EACA,KAAAC,EACA,WAAAC,EACA,WAAAC,EACA,QAAAC,EACA,UAAAC,EACA,iBAAAC,EACA,UAAAC,CACF,EACe,CACXD,GACFE,EAAO,KACLC,EAAO,OACL,uEACF,CACF,GAIA,OAAO,MAAMF,CAAS,GACtBA,GAAa,GACbA,EAAYF,IAAc,KAE1BG,EAAO,MACLC,EAAO,IACL,wBAAwBF,CAAS,uDAAuDF,CAAS,GACnG,CACF,EACA,QAAQ,KAAK,CAAC,GAGhB,GAAM,CAAE,SAAAK,EAAU,UAAAC,CAAU,EAAIC,EAAcb,CAAI,EAC9Cc,EAAY,EAsBhB,MAAMC,EAA4C,CAChD,aAAAd,EACA,YAAaK,EACb,aAAcE,EACd,OAxBcQ,GAAmD,CACjE,IAAMC,EAAmB,GAAGN,CAAQ,IAAIG,CAAS,GAAGF,CAAS,GAC7DH,EAAO,KACLC,EAAO,KACL,UAAUM,EAAM,MAAM,yBAAyBC,CAAgB,GACjE,CACF,EAEA,IAAMC,EAAUC,EAAKH,EAAM,IAAKI,GAAM,OAAO,KAAKA,CAAC,CAAC,EAAE,KAAK,CAAC,EAC5D,OAAAC,EAASJ,EAAkBD,EAAOE,CAAO,EACzCT,EAAO,KACLC,EAAO,MACL,sBAAsBM,EAAM,MAAM,yBAAyBC,CAAgB,GAC7E,CACF,EACAH,GAAa,EACN,QAAQ,QAAQ,CACzB,EAQE,QAAAT,EACA,KAAAH,EACA,WAAAC,EACA,WAAAC,EACA,iBAAAG,CACF,CAAC,CACH","names":["colors","uniq","pullIdentifiers","file","transcendUrl","auth","sombraAuth","dataSiloId","actions","pageLimit","skipRequestCount","chunkSize","logger","colors","baseName","extension","parseFilePath","fileCount","pullChunkedCustomSiloOutstandingIdentifiers","chunk","numberedFileName","headers","uniq","d","writeCsv"]}
@@ -1,2 +0,0 @@
1
- import{b as r}from"./chunk-MVDOKJ6J.js";import{g as t}from"./chunk-XNR74SBS.js";import"./chunk-WSDWILYI.js";import"./chunk-ZTD7APNF.js";import"./chunk-LAYHULHH.js";import"./chunk-347UQP43.js";import"./chunk-TDBKATQK.js";import{a as n}from"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import e from"colors";import{writeFileSync as g}from"fs";async function C({auth:d,xdiLocation:s,file:i,removeIpAddresses:c,domainBlockList:o,xdiAllowedCommands:l,transcendUrl:a}){let m=await r(d),{syncGroups:p,html:f}=await t(m,{xdiLocation:s,transcendUrl:a,removeIpAddresses:c,domainBlockList:o.length>0?o:void 0,xdiAllowedCommands:l});n.info(e.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),g(i,f),n.info(e.green(`Wrote configuration to file "${i}"!`))}export{C as buildXdiSyncEndpoint};
2
- //# sourceMappingURL=impl-G5TGSB4H.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/consent/build-xdi-sync-endpoint/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { writeFileSync } from 'fs';\nimport { validateTranscendAuth } from '@/lib/api-keys';\nimport { buildXdiSyncEndpoint as buildXdiSyncEndpointHelper } from '@/lib/consent-manager';\n\ninterface BuildXdiSyncEndpointCommandFlags {\n auth: string;\n xdiLocation: string;\n file: string;\n removeIpAddresses: boolean;\n domainBlockList: string[];\n xdiAllowedCommands: string;\n transcendUrl: string;\n}\n\nexport async function buildXdiSyncEndpoint(\n this: LocalContext,\n {\n auth,\n xdiLocation,\n file,\n removeIpAddresses,\n domainBlockList,\n xdiAllowedCommands,\n transcendUrl,\n }: BuildXdiSyncEndpointCommandFlags,\n): Promise<void> {\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Build the sync endpoint\n const { syncGroups, html } = await buildXdiSyncEndpointHelper(apiKeyOrList, {\n xdiLocation,\n transcendUrl,\n removeIpAddresses,\n domainBlockList: domainBlockList.length > 0 ? domainBlockList : undefined,\n xdiAllowedCommands,\n });\n\n // Log success\n logger.info(\n colors.green(\n `Successfully constructed sync endpoint for sync groups: ${JSON.stringify(\n syncGroups,\n null,\n 2,\n )}`,\n ),\n );\n\n // Write to disk\n writeFileSync(file, html);\n logger.info(colors.green(`Wrote configuration to file \"${file}\"!`));\n}\n"],"mappings":"wVAEA,OAAOA,MAAY,SACnB,OAAS,iBAAAC,MAAqB,KAc9B,eAAsBC,EAEpB,CACE,KAAAC,EACA,YAAAC,EACA,KAAAC,EACA,kBAAAC,EACA,gBAAAC,EACA,mBAAAC,EACA,aAAAC,CACF,EACe,CAEf,IAAMC,EAAe,MAAMC,EAAsBR,CAAI,EAG/C,CAAE,WAAAS,EAAY,KAAAC,CAAK,EAAI,MAAMX,EAA2BQ,EAAc,CAC1E,YAAAN,EACA,aAAAK,EACA,kBAAAH,EACA,gBAAiBC,EAAgB,OAAS,EAAIA,EAAkB,OAChE,mBAAAC,CACF,CAAC,EAGDM,EAAO,KACLC,EAAO,MACL,2DAA2D,KAAK,UAC9DH,EACA,KACA,CACF,CAAC,EACH,CACF,EAGAI,EAAcX,EAAMQ,CAAI,EACxBC,EAAO,KAAKC,EAAO,MAAM,gCAAgCV,CAAI,IAAI,CAAC,CACpE","names":["colors","writeFileSync","buildXdiSyncEndpoint","auth","xdiLocation","file","removeIpAddresses","domainBlockList","xdiAllowedCommands","transcendUrl","apiKeyOrList","validateTranscendAuth","syncGroups","html","logger","colors","writeFileSync"]}
@@ -1,2 +0,0 @@
1
- import{b as d}from"./chunk-KRN6Q433.js";import"./chunk-7QHA6ZIV.js";import{c as u}from"./chunk-MA4JWWRO.js";import"./chunk-ZTD7APNF.js";import{pe as c}from"./chunk-TDBKATQK.js";import{a as e}from"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import i from"colors";import{uniq as b}from"lodash-es";async function x({auth:l,file:s,transcendUrl:m,dataSiloIds:f,subCategories:p,status:g,includeEncryptedSnippets:a}){try{let o=c(m,l),C=await d(o,{dataSiloIds:f,subCategories:p,status:g,includeEncryptedSnippets:a});e.info(i.magenta(`Writing unstructured discovery files to file "${s}"...`));let r=[],S=C.map(t=>{let n={"Entry ID":t.id,"Data Silo ID":t.dataSiloId,"Object Path ID":t.scannedObjectPathId,"Object ID":t.scannedObjectId,...a?{Entry:t.name,"Context Snippet":t.contextSnippet}:{},"Data Category":`${t.dataSubCategory.category}:${t.dataSubCategory.name}`,"Classification Status":t.status,"Confidence Score":t.confidence,"Classification Method":t.classificationMethod,"Classifier Version":t.classifierVersion};return r=b([...r,...Object.keys(n)]),n});u(s,S,r)}catch(o){e.error(i.red(`An error occurred syncing the unstructured discovery files: ${o.message}`)),process.exit(1)}e.info(i.green(`Successfully synced unstructured discovery files to disk at ${s}!`))}export{x as pullUnstructuredDiscoveryFiles};
2
- //# sourceMappingURL=impl-GNG2DOKG.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { writeCsv } from '@/lib/cron';\nimport { pullUnstructuredSubDataPointRecommendations } from '@/lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '@/lib/graphql';\nimport { logger } from '@/logger';\n\ninterface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n writeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"],"mappings":"6SAEA,OAAOA,MAAY,SACnB,OAAS,QAAAC,MAAY,YAgBrB,eAAsBC,EAEpB,CACE,KAAAC,EACA,KAAAC,EACA,aAAAC,EACA,YAAAC,EACA,cAAAC,EACA,OAAAC,EACA,yBAAAC,CACF,EACe,CACf,GAAI,CAEF,IAAMC,EAASC,EAA4BN,EAAcF,CAAI,EAEvDS,EAAU,MAAMC,EAA4CH,EAAQ,CACxE,YAAAJ,EACA,cAAAC,EACA,OAAAC,EACA,yBAAAC,CACF,CAAC,EAEDK,EAAO,KACLC,EAAO,QACL,iDAAiDX,CAAI,MACvD,CACF,EACA,IAAIY,EAAoB,CAAC,EACnBC,EAASL,EAAQ,IAAKM,GAAU,CACpC,IAAMC,EAAS,CACb,WAAYD,EAAM,GAClB,eAAgBA,EAAM,WACtB,iBAAkBA,EAAM,oBACxB,YAAaA,EAAM,gBACnB,GAAIT,EACA,CAAE,MAAOS,EAAM,KAAM,kBAAmBA,EAAM,cAAe,EAC7D,CAAC,EACL,gBAAiB,GAAGA,EAAM,gBAAgB,QAAQ,IAAIA,EAAM,gBAAgB,IAAI,GAChF,wBAAyBA,EAAM,OAC/B,mBAAoBA,EAAM,WAC1B,wBAAyBA,EAAM,qBAC/B,qBAAsBA,EAAM,iBAC9B,EACA,OAAAF,EAAUI,EAAK,CAAC,GAAGJ,EAAS,GAAG,OAAO,KAAKG,CAAM,CAAC,CAAC,EAC5CA,CACT,CAAC,EACDE,EAASjB,EAAMa,EAAQD,CAAO,CAChC,OAASM,EAAK,CACZR,EAAO,MACLC,EAAO,IACL,+DAA+DO,EAAI,OAAO,EAC5E,CACF,EACA,QAAQ,KAAK,CAAC,CAChB,CAGAR,EAAO,KACLC,EAAO,MACL,+DAA+DX,CAAI,GACrE,CACF,CACF","names":["colors","uniq","pullUnstructuredDiscoveryFiles","auth","file","transcendUrl","dataSiloIds","subCategories","status","includeEncryptedSnippets","client","buildTranscendGraphQLClient","entries","pullUnstructuredSubDataPointRecommendations","logger","colors","headers","inputs","entry","result","uniq","writeCsv","err"]}
@@ -1,2 +0,0 @@
1
- import{a as w}from"./chunk-CBAHSBSW.js";import{d as D,e as h}from"./chunk-OEB7WG3G.js";import{c as y}from"./chunk-MVDOKJ6J.js";import"./chunk-LAYHULHH.js";import"./chunk-347UQP43.js";import{ne as S,pe as T}from"./chunk-TDBKATQK.js";import{a as c}from"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import{join as O}from"path";import{difference as v}from"lodash-es";import I from"colors";import{existsSync as $,lstatSync as b}from"fs";async function Q({auth:F,dataFlowsYmlFolder:a,output:x,ignoreYmls:C=[],transcendUrl:k}){a||(c.error(I.red("Missing required arg: --dataFlowsYmlFolder=./working/data-flows/")),process.exit(1)),(!$(a)||!b(a).isDirectory())&&(c.error(I.red(`Folder does not exist: "${a}"`)),process.exit(1));let N=C.map(t=>t.split(".")[0]),l=y(a).map(t=>{let{"data-flows":o=[]}=D(O(a,t)),{adTechDataSilos:m,siteTechDataSilos:r}=w(o,{serviceToSupportedIntegration:u,serviceToTitle:f});return{adTechDataSilos:m,siteTechDataSilos:r,organizationName:t.split(".")[0]}}),s={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o,organizationName:m})=>{[...t,...o].forEach(e=>{let n=e["outer-type"]||e.integrationName;s[n]||(s[n]=[]),s[n].push(m),s[n]=[...new Set(s[n])]})});let p=[...new Set(l.map(({adTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],g=v([...new Set(l.map(({siteTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],p),i={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o})=>{[...t,...o].forEach(r=>{let e=r["outer-type"]||r.integrationName,n=r.attributes?.find(E=>E.key==="Found On Domain");i[e]||(i[e]=[]),i[e].push(...n?.values||[]),i[e]=[...new Set(i[e])]})});let A=T(k,F),{serviceToTitle:f,serviceToSupportedIntegration:u}=await S(A),d=[...p,...g].map(t=>({title:f[t],...u[t]?{integrationName:t}:{integrationName:"promptAPerson","outer-type":t},attributes:[{key:"Tech Type",values:["Ad Tech"]},{key:"Business Units",values:v(s[t]||[],N)},{key:"Found On Domain",values:i[t]||[]}]}));c.log(`Total Services: ${d.length}`),c.log(`Ad Tech Services: ${p.length}`),c.log(`Site Tech Services: ${g.length}`),h(x,{"data-silos":d})}export{Q as deriveDataSilosFromDataFlowsCrossInstance};
2
- //# sourceMappingURL=impl-GNSHZ3OL.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/inventory/derive-data-silos-from-data-flows-cross-instance/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '@/lib/graphql';\nimport { join } from 'path';\nimport { difference } from 'lodash-es';\nimport colors from 'colors';\nimport { logger } from '@/logger';\nimport { dataFlowsToDataSilos } from '@/lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '@/codecs';\nimport { existsSync, lstatSync } from 'fs';\nimport { listFiles } from '@/lib/api-keys';\nimport { readTranscendYaml, writeTranscendYaml } from '@/lib/readTranscendYaml';\n\ninterface DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n output: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlowsCrossInstance(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n output,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags,\n): Promise<void> {\n // Ensure folder is passed to dataFlowsYmlFolder\n if (!dataFlowsYmlFolder) {\n logger.error(\n colors.red(\n 'Missing required arg: --dataFlowsYmlFolder=./working/data-flows/',\n ),\n );\n process.exit(1);\n }\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n process.exit(1);\n }\n\n // Ignore the data flows in these yml files\n const instancesToIgnore = ignoreYmls.map((x) => x.split('.')[0]);\n\n // Map over each data flow yml file and convert to data silo configurations\n const dataSiloInputs = listFiles(dataFlowsYmlFolder).map((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n return {\n adTechDataSilos,\n siteTechDataSilos,\n organizationName: directory.split('.')[0],\n };\n });\n\n // Mapping from service name to instances that have that service\n const serviceToInstance: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(\n ({ adTechDataSilos, siteTechDataSilos, organizationName }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n // create mapping to instance\n if (!serviceToInstance[service]) {\n serviceToInstance[service] = [];\n }\n serviceToInstance[service]!.push(organizationName);\n serviceToInstance[service] = [...new Set(serviceToInstance[service])];\n });\n },\n );\n\n // List of ad tech integrations\n const adTechIntegrations = [\n ...new Set(\n dataSiloInputs\n .map(({ adTechDataSilos }) =>\n adTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ];\n\n // List of site tech integrations\n const siteTechIntegrations = difference(\n [\n ...new Set(\n dataSiloInputs\n .map(({ siteTechDataSilos }) =>\n siteTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ],\n adTechIntegrations,\n );\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(({ adTechDataSilos, siteTechDataSilos }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n const foundOnDomain = dataSilo.attributes?.find(\n (attr) => attr.key === 'Found On Domain',\n );\n // create mapping to instance\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(...(foundOnDomain?.values || []));\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n });\n });\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // construct the aggregated data silo inputs\n const dataSilos = [...adTechIntegrations, ...siteTechIntegrations].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Business Units',\n values: difference(\n serviceToInstance[service] || [],\n instancesToIgnore,\n ),\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n // Log output\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechIntegrations.length}`);\n logger.log(`Site Tech Services: ${siteTechIntegrations.length}`);\n\n // Write to yaml\n writeTranscendYaml(output, {\n 'data-silos': dataSilos,\n });\n}\n"],"mappings":"oWAKA,OAAS,QAAAA,MAAY,OACrB,OAAS,cAAAC,MAAkB,YAC3B,OAAOC,MAAY,SAInB,OAAS,cAAAC,EAAY,aAAAC,MAAiB,KAYtC,eAAsBC,EAEpB,CACE,KAAAC,EACA,mBAAAC,EACA,OAAAC,EACA,WAAAC,EAAa,CAAC,EACd,aAAAC,CACF,EACe,CAEVH,IACHI,EAAO,MACLC,EAAO,IACL,kEACF,CACF,EACA,QAAQ,KAAK,CAAC,IAKd,CAACC,EAAWN,CAAkB,GAC9B,CAACO,EAAUP,CAAkB,EAAE,YAAY,KAE3CI,EAAO,MAAMC,EAAO,IAAI,2BAA2BL,CAAkB,GAAG,CAAC,EACzE,QAAQ,KAAK,CAAC,GAIhB,IAAMQ,EAAoBN,EAAW,IAAKO,GAAMA,EAAE,MAAM,GAAG,EAAE,CAAC,CAAC,EAGzDC,EAAiBC,EAAUX,CAAkB,EAAE,IAAKY,GAAc,CAEtE,GAAM,CAAE,aAAcC,EAAY,CAAC,CAAE,EAAIC,EACvCC,EAAKf,EAAoBY,CAAS,CACpC,EAGM,CAAE,gBAAAI,EAAiB,kBAAAC,CAAkB,EAAIC,EAC7CL,EACA,CACE,8BAAAM,EACA,eAAAC,CACF,CACF,EAEA,MAAO,CACL,gBAAAJ,EACA,kBAAAC,EACA,iBAAkBL,EAAU,MAAM,GAAG,EAAE,CAAC,CAC1C,CACF,CAAC,EAGKS,EAAiD,CAAC,EACxDX,EAAe,QACb,CAAC,CAAE,gBAAAM,EAAiB,kBAAAC,EAAmB,iBAAAK,CAAiB,IAAM,CACvC,CAAC,GAAGN,EAAiB,GAAGC,CAAiB,EACjD,QAASM,GAAa,CACjC,IAAMC,EAAUD,EAAS,YAAY,GAAKA,EAAS,gBAE9CF,EAAkBG,CAAO,IAC5BH,EAAkBG,CAAO,EAAI,CAAC,GAEhCH,EAAkBG,CAAO,EAAG,KAAKF,CAAgB,EACjDD,EAAkBG,CAAO,EAAI,CAAC,GAAG,IAAI,IAAIH,EAAkBG,CAAO,CAAC,CAAC,CACtE,CAAC,CACH,CACF,EAGA,IAAMC,EAAqB,CACzB,GAAG,IAAI,IACLf,EACG,IAAI,CAAC,CAAE,gBAAAM,CAAgB,IACtBA,EAAgB,IACbU,GAASA,EAAK,YAAY,GAAKA,EAAK,eACvC,CACF,EACC,KAAK,CACV,CACF,EAGMC,EAAuBC,EAC3B,CACE,GAAG,IAAI,IACLlB,EACG,IAAI,CAAC,CAAE,kBAAAO,CAAkB,IACxBA,EAAkB,IACfS,GAASA,EAAK,YAAY,GAAKA,EAAK,eACvC,CACF,EACC,KAAK,CACV,CACF,EACAD,CACF,EAGMI,EAAsD,CAAC,EAC7DnB,EAAe,QAAQ,CAAC,CAAE,gBAAAM,EAAiB,kBAAAC,CAAkB,IAAM,CAC5C,CAAC,GAAGD,EAAiB,GAAGC,CAAiB,EACjD,QAASM,GAAa,CACjC,IAAMC,EAAUD,EAAS,YAAY,GAAKA,EAAS,gBAC7CO,EAAgBP,EAAS,YAAY,KACxCQ,GAASA,EAAK,MAAQ,iBACzB,EAEKF,EAAuBL,CAAO,IACjCK,EAAuBL,CAAO,EAAI,CAAC,GAErCK,EAAuBL,CAAO,EAAG,KAAK,GAAIM,GAAe,QAAU,CAAC,CAAE,EACtED,EAAuBL,CAAO,EAAI,CAChC,GAAG,IAAI,IAAIK,EAAuBL,CAAO,CAAC,CAC5C,CACF,CAAC,CACH,CAAC,EAGD,IAAMQ,EAASC,EAA4B9B,EAAcJ,CAAI,EACvD,CAAE,eAAAqB,EAAgB,8BAAAD,CAA8B,EACpD,MAAMe,EAAsBF,CAAM,EAG9BG,EAAY,CAAC,GAAGV,EAAoB,GAAGE,CAAoB,EAAE,IAChEH,IAAa,CACZ,MAAOJ,EAAeI,CAAO,EAC7B,GAAIL,EAA8BK,CAAO,EACrC,CAAE,gBAAiBA,CAAQ,EAC3B,CAAE,gBAAiB,gBAAiB,aAAcA,CAAQ,EAC9D,WAAY,CACV,CACE,IAAK,YACL,OAAQ,CAAC,SAAS,CACpB,EACA,CACE,IAAK,iBACL,OAAQI,EACNP,EAAkBG,CAAO,GAAK,CAAC,EAC/BhB,CACF,CACF,EACA,CACE,IAAK,kBACL,OAAQqB,EAAuBL,CAAO,GAAK,CAAC,CAC9C,CACF,CACF,EACF,EAGApB,EAAO,IAAI,mBAAmB+B,EAAU,MAAM,EAAE,EAChD/B,EAAO,IAAI,qBAAqBqB,EAAmB,MAAM,EAAE,EAC3DrB,EAAO,IAAI,uBAAuBuB,EAAqB,MAAM,EAAE,EAG/DS,EAAmBnC,EAAQ,CACzB,aAAckC,CAChB,CAAC,CACH","names":["join","difference","colors","existsSync","lstatSync","deriveDataSilosFromDataFlowsCrossInstance","auth","dataFlowsYmlFolder","output","ignoreYmls","transcendUrl","logger","colors","existsSync","lstatSync","instancesToIgnore","x","dataSiloInputs","listFiles","directory","dataFlows","readTranscendYaml","join","adTechDataSilos","siteTechDataSilos","dataFlowsToDataSilos","serviceToSupportedIntegration","serviceToTitle","serviceToInstance","organizationName","dataSilo","service","adTechIntegrations","silo","siteTechIntegrations","difference","serviceToFoundOnDomain","foundOnDomain","attr","client","buildTranscendGraphQLClient","fetchAndIndexCatalogs","dataSilos","writeTranscendYaml"]}
@@ -1,7 +0,0 @@
1
- import{_b as L,a as N,b as R,ha as _,pe as D}from"./chunk-TDBKATQK.js";import{a}from"./chunk-43JWXG77.js";import"./chunk-SF46ZLPT.js";import{b as U}from"./chunk-ARVEJERC.js";import ge from"colors";import X from"got";var Q=({hostname:t,auth:e})=>X.extend({prefixUrl:`https://${t}`,headers:{accept:"application/json","content-type":"application/json",authorization:`Bearer ${e}`}});import Re from"colors";import xe from"yargs-parser";var Ge=Object.values(U);import te from"colors";import j from"fs";var w=({assessment:t,index:e,total:s,wrap:r=!0})=>{let n="";(e===0||r)&&(n=`[
2
- `);let m=JSON.stringify(t),o=s&&e<s-1&&!r?",":"";return n=`${n+m+o}
3
- `,(s&&e===s-1||r)&&(n+=`
4
- ]`),n};var B=({file:t,assessment:e,index:s,total:r})=>{a.info(te.magenta(`Writing enriched assessment ${s+1} of ${r} to file "${t}"...`)),s===0?j.writeFileSync(t,w({assessment:e,index:s,total:r,wrap:!1})):j.appendFileSync(t,w({assessment:e,index:s,total:r,wrap:!1}))};import P from"colors";import{decodeCodec as re}from"@transcend-io/type-utils";import{OneTrustGetListOfAssessmentsResponse as ne}from"@transcend-io/privacy-types";var q=async({oneTrust:t})=>{let e=0,s=1,r=0,n=[];for(;e<s;){let{body:m}=await t.get(`api/assessment/v2/assessments?page=${e}&size=2000`),{page:o,content:u}=re(ne,m);n.push(...u??[]),e===0&&(s=o?.totalPages??0,r=o?.totalElements??0),e+=1,a.info(`Fetched ${n.length} of ${r} assessments.`)}return n};import{decodeCodec as oe}from"@transcend-io/type-utils";import{OneTrustGetAssessmentResponse as ie}from"@transcend-io/privacy-types";var J=async({oneTrust:t,assessmentId:e})=>{let{body:s}=await t.get(`api/assessment/v2/assessments/${e}/export?ExcludeSkippedQuestions=false`);return oe(ie,s)};import{decodeCodec as ae}from"@transcend-io/type-utils";import{OneTrustGetRiskResponse as me}from"@transcend-io/privacy-types";var K=async({oneTrust:t,riskId:e})=>{let{body:s}=await t.get(`api/risk/v2/risks/${e}`);return ae(me,s)};import{decodeCodec as ce}from"@transcend-io/type-utils";import{OneTrustGetUserResponse as ue}from"@transcend-io/privacy-types";var b=async({oneTrust:t,userId:e})=>{let{body:s}=await t.get(`api/scim/v2/Users/${e}`);return ce(ue,s)};import{uniq as pe}from"lodash-es";import{keyBy as I}from"lodash-es";var W=({assessment:t,assessmentDetails:e,riskDetails:s,creatorDetails:r,approversDetails:n,respondentsDetails:m})=>{let o=I(s,"id"),{sections:u,createdBy:g,...h}=e,O=u.map(i=>{let{questions:A,...$}=i,x=A.map(E=>{let{risks:C,...G}=E,v=(C??[]).map(y=>{let S=o[y.riskId];return{...y,...S,level:y.level,impactLevel:y.impactLevel??0}});return{...G,risks:v}});return{...$,questions:x}}),p={...g,active:r?.active??!1,userType:r?.userType??"Internal",emails:r?.emails??[],title:r?.title??null,givenName:r?.name.givenName??null,familyName:r?.name.familyName??null},d=I(n,"id"),l=e.approvers.flatMap(i=>d[i.id]?[{...i,approver:{...i.approver,active:d[i.id].active,userType:d[i.id].userType,emails:d[i.id].emails,title:d[i.id].title,givenName:d[i.id].name.givenName??null,familyName:d[i.id].name.familyName??null}}]:[]),T=I(m,"id"),F=e.respondents.filter(i=>!i.name.includes("@")).flatMap(i=>T[i.id]?[{...i,active:T[i.id].active,userType:T[i.id].userType,emails:T[i.id].emails,title:T[i.id].title,givenName:T[i.id].name.givenName??null,familyName:T[i.id].name.familyName??null}]:[]);return{...t,...h,approvers:l,respondents:F,createdBy:p,sections:O}};import H from"colors";var k=async({transcend:t,assessment:e,total:s,index:r})=>{a.info(H.magenta(`Writing enriched assessment ${r+1} ${s?`of ${s} `:" "}to Transcend...`));let m={json:w({assessment:e,index:r,total:s})};try{await L(t,_,{input:m})}catch{a.error(H.red(`Failed to sync assessment ${r+1} ${s?`of ${s} `:" "}to Transcend.
5
- Assessment Title: ${e.name}. Template Title: ${e.template.name}
6
- `))}};var z=async({oneTrust:t,file:e,dryRun:s,transcend:r})=>{a.info("Getting list of all assessments from OneTrust...");let n=await q({oneTrust:t}),m={},o=5,u=Array.from({length:Math.ceil(n.length/o)},(g,h)=>n.slice(h*o,(h+1)*o));await N(u,async(g,h)=>{let O=[];await R(g,async(p,d)=>{let l=o*h+d+1;a.info(`[assessment ${l} of ${n.length}]: fetching details...`);let{templateName:T,assessmentId:F}=p,i=await J({oneTrust:t,assessmentId:F}),A=i.createdBy.id,$=m[A];if(!$){a.info(`[assessment ${l} of ${n.length}]: fetching creator...`);try{$=await b({oneTrust:t,userId:A}),m[A]=$}catch{a.warn(P.yellow(`[assessment ${l} of ${n.length}]: failed to fetch form creator. creatorId: ${A}. Assessment Title: ${p.name}. Template Title: ${T}`))}}let{approvers:x}=i,E=[];x.length>0&&(a.info(`[assessment ${l} of ${n.length}]: fetching approvers...`),E=await R(x.map(({id:c})=>c),async c=>{try{let f=m[c];return f||(f=await b({oneTrust:t,userId:c}),m[c]=f),[f]}catch{return a.warn(P.yellow(`[assessment ${l} of ${n.length}]: failed to fetch a form approver. approverId: ${c}. Assessment Title: ${p.name}. Template Title: ${T}`)),[]}},{concurrency:5}));let{respondents:C}=i,G=C.filter(c=>!c.name.includes("@")),v=[];G.length>0&&(a.info(`[assessment ${l} of ${n.length}]: fetching respondents...`),v=await R(G.map(({id:c})=>c),async c=>{try{let f=m[c];return f||(f=await b({oneTrust:t,userId:c}),m[c]=f),[f]}catch{return a.warn(P.yellow(`[assessment ${l} of ${n.length}]: failed to fetch a respondent. respondentId: ${c}. Assessment Title: ${p.name}. Template Title: ${T}`)),[]}},{concurrency:5}));let y=[],S=pe(i.sections.flatMap(c=>c.questions.flatMap(f=>(f.risks??[]).flatMap(Z=>Z.riskId))));S.length>0&&(a.info(`[assessment ${l} of ${n.length}]: fetching risks...`),y=await R(S,c=>K({oneTrust:t,riskId:c}),{concurrency:5}));let V=W({assessment:p,assessmentDetails:i,riskDetails:y,creatorDetails:$,approversDetails:E.flat(),respondentsDetails:v.flat()});O.push(V)},{concurrency:o}),await N(O,async(p,d)=>{let l=h*o+d;s&&e?B({assessment:p,index:l,total:n.length,file:e}):r&&await k({assessment:p,transcend:r,total:n.length,index:l})})})};import{decodeCodec as le}from"@transcend-io/type-utils";import M from"colors";import fe from"JSONStream";import{createReadStream as de}from"fs";import{OneTrustEnrichedAssessment as Te}from"@transcend-io/privacy-types";var Y=({transcend:t,file:e})=>(a.info(`Getting list of all assessments from file ${e}...`),new Promise((s,r)=>{let n=de(e,{encoding:"utf-8",highWaterMark:65536}),m=fe.parse("*"),o=0;n.pipe(m),m.on("data",async u=>{try{m.pause();let g=le(Te,u);await k({assessment:g,transcend:t,index:o}),o+=1,m.resume()}catch(g){a.error(M.red(`Failed to parse the assessment ${o} from file '${e}': ${g.message}.`))}}),m.on("end",()=>{a.info(`Finished processing ${o} assessments from file ${e}`),s()}),m.on("error",u=>{a.error(M.red(`Error parsing file '${e}': ${u.message}`)),r(u)}),n.on("error",u=>{a.error(M.red(`Error reading file '${e}': ${u.message}`)),r(u)})}));async function _s({hostname:t,oneTrustAuth:e,source:s,transcendAuth:r,transcendUrl:n,resource:m,file:o,dryRun:u,debug:g}){if(!u&&!r)throw new Error('Must specify a "transcendAuth" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}');if(u&&!o)throw new Error('Must set a "file" parameter when "dryRun" is "true". e.g. --file=./oneTrustAssessments.json');if(o){let p=o.split(".");if(p.length<2)throw new Error('The "file" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.');if(p.at(-1)!=="json")throw new Error(`Expected the format of the "file" parameters '${o}' to be 'json', but got '${p.at(-1)}'.`)}if(s==="oneTrust"){if(!t)throw new Error('Missing required parameter "hostname". e.g. --hostname=customer.my.onetrust.com');if(!e)throw new Error('Missing required parameter "oneTrustAuth". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN')}else{if(!o)throw new Error('Must specify a "file" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json');if(u)throw new Error('Cannot read and write to a file simultaneously. Emit the "source" parameter or set it to oneTrust if "dryRun" is enabled.')}let h=t&&e?Q({hostname:t,auth:e}):void 0,O=n&&r?D(n,r):void 0;try{m==="assessments"&&(s==="oneTrust"&&h?await z({oneTrust:h,file:o,dryRun:u,...O&&{transcend:O}}):s==="file"&&o&&O&&await Y({file:o,transcend:O}))}catch(p){throw new Error(`An error occurred syncing the resource ${m} from OneTrust: ${g?p.stack:p.message}`)}a.info(ge.green(`Successfully synced OneTrust ${m} to ${u?`disk at "${o}"`:"Transcend"}!`))}export{_s as syncOt};
7
- //# sourceMappingURL=impl-GPCURY4M.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/migration/sync-ot/impl.ts","../src/lib/oneTrust/createOneTrustGotInstance.ts","../src/lib/oneTrust/helpers/parseCliSyncOtArguments.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToDisk.ts","../src/lib/oneTrust/helpers/oneTrustAssessmentToJson.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromOneTrust.ts","../src/lib/oneTrust/endpoints/getListOfOneTrustAssessments.ts","../src/lib/oneTrust/endpoints/getOneTrustAssessment.ts","../src/lib/oneTrust/endpoints/getOneTrustRisk.ts","../src/lib/oneTrust/endpoints/getOneTrustUser.ts","../src/lib/oneTrust/helpers/enrichOneTrustAssessment.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToTranscend.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromFile.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { createOneTrustGotInstance } from '@/lib/oneTrust';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '@/enums';\nimport { buildTranscendGraphQLClient } from '@/lib/graphql';\nimport {\n syncOneTrustAssessmentsFromFile,\n syncOneTrustAssessmentsFromOneTrust,\n} from '@/lib/oneTrust/helpers';\n\n// Command flag interface\ninterface SyncOtCommandFlags {\n hostname?: string;\n oneTrustAuth?: string;\n source: OneTrustPullSource;\n transcendAuth?: string;\n transcendUrl: string;\n file?: string;\n resource: OneTrustPullResource;\n dryRun: boolean;\n debug: boolean;\n}\n\n// Command implementation\nexport async function syncOt(\n this: LocalContext,\n {\n hostname,\n oneTrustAuth,\n source,\n transcendAuth,\n transcendUrl,\n resource,\n file,\n dryRun,\n debug,\n }: SyncOtCommandFlags,\n): Promise<void> {\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n throw new Error(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n );\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n throw new Error(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n );\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n throw new Error(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n );\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n throw new Error(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n );\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n throw new Error(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n );\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n throw new Error(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n );\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n throw new Error(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n );\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n throw new Error(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n );\n }\n }\n\n // instantiate a client to talk to OneTrust\n const oneTrust =\n hostname && oneTrustAuth\n ? createOneTrustGotInstance({\n hostname,\n auth: oneTrustAuth,\n })\n : undefined;\n\n // instantiate a client to talk to Transcend\n const transcend =\n transcendUrl && transcendAuth\n ? buildTranscendGraphQLClient(transcendUrl, transcendAuth)\n : undefined;\n\n try {\n if (resource === OneTrustPullResource.Assessments) {\n if (source === OneTrustPullSource.OneTrust && oneTrust) {\n await syncOneTrustAssessmentsFromOneTrust({\n oneTrust,\n file,\n dryRun,\n ...(transcend && { transcend }),\n });\n } else if (source === OneTrustPullSource.File && file && transcend) {\n await syncOneTrustAssessmentsFromFile({ file, transcend });\n }\n }\n } catch (err) {\n throw new Error(\n `An error occurred syncing the resource ${resource} from OneTrust: ${\n debug ? err.stack : err.message\n }`,\n );\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced OneTrust ${resource} to ${\n dryRun ? `disk at \"${file}\"` : 'Transcend'\n }!`,\n ),\n );\n}\n","import got, { Got } from 'got';\n\n/**\n * Instantiate an instance of got that is capable of making requests to OneTrust\n *\n * @param param - information about the OneTrust URL\n * @returns The instance of got that is capable of making requests to the customer ingress\n */\nexport const createOneTrustGotInstance = ({\n hostname,\n auth,\n}: {\n /** Hostname of the OneTrust API */\n hostname: string;\n /** The OAuth access token */\n auth: string;\n}): Got =>\n got.extend({\n prefixUrl: `https://${hostname}`,\n headers: {\n accept: 'application/json',\n 'content-type': 'application/json',\n authorization: `Bearer ${auth}`,\n },\n });\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport yargs from 'yargs-parser';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '../../../enums';\n\nconst VALID_RESOURCES = Object.values(OneTrustPullResource);\n\ninterface OneTrustCliArguments {\n /** The name of the file to write the resources to */\n file: string;\n /** The OneTrust hostname to send the requests to */\n hostname?: string;\n /** The OAuth Bearer token used to authenticate the requests to OneTrust */\n oneTrustAuth?: string;\n /** The Transcend API key to authenticate the requests to Transcend */\n transcendAuth: string;\n /** The Transcend URL where to forward requests */\n transcendUrl: string;\n /** The resource to pull from OneTrust */\n resource: OneTrustPullResource;\n /** Whether to enable debugging while reporting errors */\n debug: boolean;\n /** Whether to export the resource into a file rather than push to transcend */\n dryRun: boolean;\n /** Where to read the OneTrust resource from */\n source: OneTrustPullSource;\n}\n\n/**\n * Parse the command line arguments\n *\n * @returns the parsed arguments\n */\nexport const parseCliSyncOtArguments = (): OneTrustCliArguments => {\n const {\n file,\n hostname,\n oneTrustAuth,\n resource,\n debug,\n dryRun,\n transcendAuth,\n transcendUrl,\n source,\n } = yargs(process.argv.slice(2), {\n string: [\n 'file',\n 'hostname',\n 'oneTrustAuth',\n 'resource',\n 'dryRun',\n 'transcendAuth',\n 'transcendUrl',\n 'source',\n ],\n boolean: ['debug', 'dryRun'],\n default: {\n resource: OneTrustPullResource.Assessments,\n debug: false,\n dryRun: false,\n transcendUrl: 'https://api.transcend.io',\n source: OneTrustPullSource.OneTrust,\n },\n });\n\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n logger.error(\n colors.red(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n ),\n );\n return process.exit(1);\n }\n if (!dryRun && !transcendUrl) {\n logger.error(\n colors.red(\n // eslint-disable-next-line max-len\n 'Must specify a \"transcendUrl\" parameter to sync resources to Transcend. e.g. --transcendUrl=https://api.transcend.io',\n ),\n );\n return process.exit(1);\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n logger.error(\n colors.red(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n ),\n );\n return process.exit(1);\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n logger.error(\n colors.red(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n ),\n );\n return process.exit(1);\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n logger.error(\n colors.red(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n ),\n );\n return process.exit(1);\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n logger.error(\n colors.red(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n ),\n );\n return process.exit(1);\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n logger.error(\n colors.red(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n ),\n );\n return process.exit(1);\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n logger.error(\n colors.red(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n ),\n );\n return process.exit(1);\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n logger.error(\n colors.red(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n ),\n );\n return process.exit(1);\n }\n }\n\n if (!VALID_RESOURCES.includes(resource)) {\n logger.error(\n colors.red(\n `Received invalid resource value: \"${resource}\". Allowed: ${VALID_RESOURCES.join(\n ',',\n )}`,\n ),\n );\n return process.exit(1);\n }\n\n return {\n file,\n ...(hostname && { hostname }),\n ...(oneTrustAuth && { oneTrustAuth }),\n resource,\n debug,\n dryRun,\n transcendAuth,\n transcendUrl,\n source,\n };\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport fs from 'fs';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Write the assessment to disk at the specified file path.\n *\n *\n * @param param - information about the assessment to write\n */\nexport const syncOneTrustAssessmentToDisk = ({\n file,\n assessment,\n index,\n total,\n}: {\n /** The file path to write the assessment to */\n file: string;\n /** The basic assessment */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total: number;\n}): void => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${\n index + 1\n } of ${total} to file \"${file}\"...`,\n ),\n );\n\n if (index === 0) {\n fs.writeFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n } else {\n fs.appendFileSync(\n file,\n oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n wrap: false,\n }),\n );\n }\n};\n","import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Converts the assessment into a json entry.\n *\n * @param param - information about the assessment and amount of entries\n * @returns a stringified json entry ready to be appended to a file\n */\nexport const oneTrustAssessmentToJson = ({\n assessment,\n index,\n total,\n wrap = true,\n}: {\n /** The assessment to convert */\n assessment: OneTrustEnrichedAssessment;\n /** The position of the assessment in the final Json object */\n index: number;\n /** The total amount of the assessments in the final Json object */\n total?: number;\n /** Whether to wrap every entry in brackets */\n wrap?: boolean;\n}): string => {\n let jsonEntry = '';\n // start with an opening bracket\n if (index === 0 || wrap) {\n jsonEntry = '[\\n';\n }\n\n const stringifiedAssessment = JSON.stringify(assessment);\n\n // Add comma for all items except the last one\n const comma = total && index < total - 1 && !wrap ? ',' : '';\n\n // write to file\n jsonEntry = `${jsonEntry + stringifiedAssessment + comma}\\n`;\n\n // end with closing bracket\n if ((total && index === total - 1) || wrap) {\n jsonEntry += '\\n]';\n }\n\n return jsonEntry;\n};\n","import type { Got } from 'got';\nimport colors from 'colors';\nimport {\n getListOfOneTrustAssessments,\n getOneTrustAssessment,\n getOneTrustRisk,\n getOneTrustUser,\n} from '../endpoints';\nimport { mapSeries, map } from '@/lib/bluebird-replace';\nimport { logger } from '../../../logger';\nimport {\n OneTrustAssessmentQuestion,\n OneTrustAssessmentSection,\n OneTrustEnrichedAssessment,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { uniq } from 'lodash-es';\nimport { enrichOneTrustAssessment } from './enrichOneTrustAssessment';\nimport { syncOneTrustAssessmentToDisk } from './syncOneTrustAssessmentToDisk';\nimport { GraphQLClient } from 'graphql-request';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Reads all the assessments from a OneTrust instance and syncs them to Transcend or to Disk.\n *\n * @param param - the information about the assessment, its OneTrust source, and destination (disk or Transcend)\n */\nexport const syncOneTrustAssessmentsFromOneTrust = async ({\n oneTrust,\n file,\n dryRun,\n transcend,\n}: {\n /** the OneTrust client instance */\n oneTrust: Got;\n /** the Transcend client instance */\n transcend?: GraphQLClient;\n /** Whether to write to file instead of syncing to Transcend */\n dryRun: boolean;\n /** the path to the file in case dryRun is true */\n file?: string;\n}): Promise<void> => {\n // fetch the list of all assessments in the OneTrust organization\n logger.info('Getting list of all assessments from OneTrust...');\n const assessments = await getListOfOneTrustAssessments({ oneTrust });\n\n // a cache of OneTrust users so we avoid requesting already fetched users\n const oneTrustCachedUsers: Record<string, OneTrustGetUserResponse> = {};\n\n // split all assessments in batches, so we can process some of steps in parallel\n const BATCH_SIZE = 5;\n const assessmentBatches = Array.from(\n {\n length: Math.ceil(assessments.length / BATCH_SIZE),\n },\n (_, i) => assessments.slice(i * BATCH_SIZE, (i + 1) * BATCH_SIZE),\n );\n\n // process each batch and sync the batch right away so it's garbage collected and we don't run out of memory\n await mapSeries(assessmentBatches, async (assessmentBatch, batch) => {\n const batchEnrichedAssessments: OneTrustEnrichedAssessment[] = [];\n\n // fetch assessment details from OneTrust in parallel\n await map(\n assessmentBatch,\n async (assessment, index) => {\n const assessmentNumber = BATCH_SIZE * batch + index + 1;\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching details...`,\n );\n const { templateName, assessmentId } = assessment;\n const assessmentDetails = await getOneTrustAssessment({\n oneTrust,\n assessmentId,\n });\n // fetch assessment's creator information\n const creatorId = assessmentDetails.createdBy.id;\n let creator = oneTrustCachedUsers[creatorId];\n if (!creator) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching creator...`,\n );\n try {\n creator = await getOneTrustUser({\n oneTrust,\n userId: creatorId,\n });\n oneTrustCachedUsers[creatorId] = creator;\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch form creator.` +\n `\\tcreatorId: ${creatorId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n }\n }\n\n // fetch assessment approvers information\n const { approvers } = assessmentDetails;\n let approversDetails: OneTrustGetUserResponse[][] = [];\n if (approvers.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching approvers...`,\n );\n approversDetails = await map(\n approvers.map(({ id }) => id),\n async (userId) => {\n try {\n let approver = oneTrustCachedUsers[userId];\n if (!approver) {\n approver = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = approver;\n }\n return [approver];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a form approver.` +\n `\\tapproverId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment internal respondents information\n const { respondents } = assessmentDetails;\n // if a user is an internal respondents, their 'name' field can't be an email.\n const internalRespondents = respondents.filter(\n (r) => !r.name.includes('@'),\n );\n let respondentsDetails: OneTrustGetUserResponse[][] = [];\n if (internalRespondents.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching respondents...`,\n );\n respondentsDetails = await map(\n internalRespondents.map(({ id }) => id),\n async (userId) => {\n try {\n let respondent = oneTrustCachedUsers[userId];\n if (!respondent) {\n respondent = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = respondent;\n }\n return [respondent];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a respondent.` +\n `\\trespondentId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment risk information\n let riskDetails: OneTrustGetRiskResponse[] = [];\n const riskIds = uniq(\n assessmentDetails.sections.flatMap((s: OneTrustAssessmentSection) =>\n s.questions.flatMap((q: OneTrustAssessmentQuestion) =>\n (q.risks ?? []).flatMap((r) => r.riskId),\n ),\n ),\n );\n if (riskIds.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching risks...`,\n );\n riskDetails = await map(\n riskIds,\n (riskId) => getOneTrustRisk({ oneTrust, riskId: riskId as string }),\n {\n concurrency: 5,\n },\n );\n }\n\n // enrich the assessments with user and risk details\n const enrichedAssessment = enrichOneTrustAssessment({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails: creator,\n approversDetails: approversDetails.flat(),\n respondentsDetails: respondentsDetails.flat(),\n });\n\n batchEnrichedAssessments.push(enrichedAssessment);\n },\n { concurrency: BATCH_SIZE },\n );\n\n // sync assessments in series to avoid concurrency bugs\n await mapSeries(\n batchEnrichedAssessments,\n async (enrichedAssessment, index) => {\n // the assessment's global index takes its batch into consideration\n const globalIndex = batch * BATCH_SIZE + index;\n\n if (dryRun && file) {\n // sync to file\n syncOneTrustAssessmentToDisk({\n assessment: enrichedAssessment,\n index: globalIndex,\n total: assessments.length,\n file,\n });\n } else if (transcend) {\n // sync to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: enrichedAssessment,\n transcend,\n total: assessments.length,\n index: globalIndex,\n });\n }\n },\n );\n });\n};\n","import { Got } from 'got';\nimport { logger } from '../../../logger';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport {\n OneTrustAssessment,\n OneTrustGetListOfAssessmentsResponse,\n} from '@transcend-io/privacy-types';\n\n/**\n * Fetch a list of all assessments from the OneTrust client.\n * ref: https://developer.onetrust.com/onetrust/reference/getallassessmentbasicdetailsusingget\n *\n * @param param - the information about the OneTrust client\n * @returns a list of OneTrustAssessment\n */\nexport const getListOfOneTrustAssessments = async ({\n oneTrust,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n}): Promise<OneTrustAssessment[]> => {\n let currentPage = 0;\n let totalPages = 1;\n let totalElements = 0;\n\n const allAssessments: OneTrustAssessment[] = [];\n\n while (currentPage < totalPages) {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments?page=${currentPage}&size=2000`,\n );\n\n const { page, content } = decodeCodec(\n OneTrustGetListOfAssessmentsResponse,\n body,\n );\n allAssessments.push(...(content ?? []));\n if (currentPage === 0) {\n totalPages = page?.totalPages ?? 0;\n totalElements = page?.totalElements ?? 0;\n }\n currentPage += 1;\n\n // log progress\n logger.info(\n `Fetched ${allAssessments.length} of ${totalElements} assessments.`,\n );\n }\n\n return allAssessments;\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetAssessmentResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular assessment.\n * ref: https://developer.onetrust.com/onetrust/reference/exportassessmentusingget\n *\n * @param param - the information about the OneTrust client and assessment to retrieve\n * @returns details about the assessment\n */\nexport const getOneTrustAssessment = async ({\n oneTrust,\n assessmentId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the assessment to retrieve */\n assessmentId: string;\n}): Promise<OneTrustGetAssessmentResponse> => {\n const { body } = await oneTrust.get(\n `api/assessment/v2/assessments/${assessmentId}/export?ExcludeSkippedQuestions=false`,\n );\n\n return decodeCodec(OneTrustGetAssessmentResponse, body);\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetRiskResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular risk.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustRisk = async ({\n oneTrust,\n riskId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust risk to retrieve */\n riskId: string;\n}): Promise<OneTrustGetRiskResponse> => {\n const { body } = await oneTrust.get(`api/risk/v2/risks/${riskId}`);\n\n return decodeCodec(OneTrustGetRiskResponse, body);\n};\n","import { Got } from 'got';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { OneTrustGetUserResponse } from '@transcend-io/privacy-types';\n\n/**\n * Retrieve details about a particular user.\n * ref: https://developer.onetrust.com/onetrust/reference/getriskusingget\n *\n * @param param - the information about the OneTrust client and risk to retrieve\n * @returns the OneTrust risk\n */\nexport const getOneTrustUser = async ({\n oneTrust,\n userId,\n}: {\n /** The OneTrust client instance */\n oneTrust: Got;\n /** The ID of the OneTrust user to retrieve */\n userId: string;\n}): Promise<OneTrustGetUserResponse> => {\n const { body } = await oneTrust.get(`api/scim/v2/Users/${userId}`);\n\n return decodeCodec(OneTrustGetUserResponse, body);\n};\n","import {\n OneTrustAssessment,\n OneTrustEnrichedAssessment,\n OneTrustGetAssessmentResponse,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { keyBy } from 'lodash-es';\n\n/**\n * Merge the assessment, assessmentDetails, and riskDetails into one object.\n *\n * @param param - the assessment and risk information\n * @returns the assessment enriched with details and risk information\n */\nexport const enrichOneTrustAssessment = ({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails,\n approversDetails,\n respondentsDetails,\n}: {\n /** The OneTrust risk details */\n riskDetails: OneTrustGetRiskResponse[];\n /** The OneTrust assessment as returned from Get List of Assessments endpoint */\n assessment: OneTrustAssessment;\n /** The OneTrust assessment details */\n assessmentDetails: OneTrustGetAssessmentResponse;\n /** The OneTrust assessment creator details */\n creatorDetails: OneTrustGetUserResponse;\n /** The OneTrust assessment approvers details */\n approversDetails: OneTrustGetUserResponse[];\n /** The OneTrust assessment internal respondents details */\n respondentsDetails: OneTrustGetUserResponse[];\n}): OneTrustEnrichedAssessment => {\n const riskDetailsById = keyBy(riskDetails, 'id');\n const { sections, createdBy, ...restAssessmentDetails } = assessmentDetails;\n const sectionsWithEnrichedRisk = sections.map((section) => {\n const { questions, ...restSection } = section;\n const enrichedQuestions = questions.map((question) => {\n const { risks, ...restQuestion } = question;\n const enrichedRisks = (risks ?? []).map((risk) => {\n const details = riskDetailsById[risk.riskId];\n return {\n ...risk,\n ...details,\n level: risk.level,\n impactLevel: risk.impactLevel ?? 0,\n };\n });\n return {\n ...restQuestion,\n risks: enrichedRisks,\n };\n });\n return {\n ...restSection,\n questions: enrichedQuestions,\n };\n });\n\n // grab creator details\n const enrichedCreatedBy = {\n ...createdBy,\n active: creatorDetails?.active ?? false,\n userType: creatorDetails?.userType ?? 'Internal',\n emails: creatorDetails?.emails ?? [],\n title: creatorDetails?.title ?? null,\n givenName: creatorDetails?.name.givenName ?? null,\n familyName: creatorDetails?.name.familyName ?? null,\n };\n\n // grab approvers details\n const approverDetailsById = keyBy(approversDetails, 'id');\n const enrichedApprovers = assessmentDetails.approvers.flatMap(\n (originalApprover) =>\n approverDetailsById[originalApprover.id]\n ? [\n {\n ...originalApprover,\n approver: {\n ...originalApprover.approver,\n active: approverDetailsById[originalApprover.id].active,\n userType: approverDetailsById[originalApprover.id].userType,\n emails: approverDetailsById[originalApprover.id].emails,\n title: approverDetailsById[originalApprover.id].title,\n givenName:\n approverDetailsById[originalApprover.id].name.givenName ??\n null,\n familyName:\n approverDetailsById[originalApprover.id].name.familyName ??\n null,\n },\n },\n ]\n : [],\n );\n\n // grab respondents details\n const respondentsDetailsById = keyBy(respondentsDetails, 'id');\n const enrichedRespondents = assessmentDetails.respondents\n .filter((r) => !r.name.includes('@')) // search only internal respondents\n .flatMap((respondent) =>\n respondentsDetailsById[respondent.id]\n ? [\n {\n ...respondent,\n active: respondentsDetailsById[respondent.id].active,\n userType: respondentsDetailsById[respondent.id].userType,\n emails: respondentsDetailsById[respondent.id].emails,\n title: respondentsDetailsById[respondent.id].title,\n givenName:\n respondentsDetailsById[respondent.id].name.givenName ?? null,\n familyName:\n respondentsDetailsById[respondent.id].name.familyName ?? null,\n },\n ]\n : [],\n );\n\n // combine everything into a single enriched assessment\n return {\n ...assessment,\n ...restAssessmentDetails,\n approvers: enrichedApprovers,\n respondents: enrichedRespondents,\n createdBy: enrichedCreatedBy,\n sections: sectionsWithEnrichedRisk,\n };\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport {\n IMPORT_ONE_TRUST_ASSESSMENT_FORMS,\n makeGraphQLRequest,\n} from '../../graphql';\nimport { ImportOnetrustAssessmentsInput } from '../../../codecs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Write the assessment to a Transcend instance.\n *\n *\n * @param param - information about the assessment and Transcend instance to write to\n */\nexport const syncOneTrustAssessmentToTranscend = async ({\n transcend,\n assessment,\n total,\n index,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** the assessment to sync to Transcend */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total?: number;\n}): Promise<void> => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend...`,\n ),\n );\n\n // convert the OneTrust assessment object into a json record\n const json = oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n });\n\n // transform the json record into a valid input to the mutation\n const input: ImportOnetrustAssessmentsInput = {\n json,\n };\n\n try {\n await makeGraphQLRequest<{\n /** the importOneTrustAssessmentForms mutation */\n importOneTrustAssessmentForms: {\n /** Created Assessment Forms */\n assessmentForms: AssessmentForm[];\n };\n }>(transcend, IMPORT_ONE_TRUST_ASSESSMENT_FORMS, {\n input,\n });\n } catch (e) {\n logger.error(\n colors.red(\n `Failed to sync assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend.\\n` +\n `\\tAssessment Title: ${assessment.name}. Template Title: ${assessment.template.name}\\n`,\n ),\n );\n }\n};\n","import { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport JSONStream from 'JSONStream';\n\nimport { createReadStream } from 'fs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\nimport { GraphQLClient } from 'graphql-request';\n\n/**\n * Reads assessments from a file and syncs them to Transcend.\n *\n * @param param - the information about the source file and Transcend instance to write them to.\n */\nexport const syncOneTrustAssessmentsFromFile = ({\n transcend,\n file,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** The name of the file from which to read the OneTrust assessments */\n file: string;\n}): Promise<void> => {\n logger.info(`Getting list of all assessments from file ${file}...`);\n\n return new Promise((resolve, reject) => {\n // Create a readable stream from the file\n const fileStream = createReadStream(file, {\n encoding: 'utf-8',\n highWaterMark: 64 * 1024, // 64KB chunks\n });\n\n // Create a JSONStream parser to parse the array of OneTrust assessments from the file\n const parser = JSONStream.parse('*'); // '*' matches each element in the root array\n\n let index = 0;\n\n // Pipe the file stream into the JSON parser\n fileStream.pipe(parser);\n\n // Handle each parsed assessment object\n parser.on('data', async (assessment) => {\n try {\n // Pause the stream while processing to avoid overwhelming memory\n parser.pause();\n\n // Decode and validate the assessment\n const parsedAssessment = decodeCodec(\n OneTrustEnrichedAssessment,\n assessment,\n );\n\n // Sync the assessment to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: parsedAssessment,\n transcend,\n index,\n });\n\n index += 1;\n\n // Resume the stream after processing\n parser.resume();\n } catch (e) {\n // if failed to parse a line, report error and continue\n logger.error(\n colors.red(\n `Failed to parse the assessment ${index} from file '${file}': ${e.message}.`,\n ),\n );\n }\n });\n\n // Handle completion\n parser.on('end', () => {\n logger.info(`Finished processing ${index} assessments from file ${file}`);\n resolve();\n });\n\n // Handle stream or parsing errors\n parser.on('error', (error) => {\n logger.error(\n colors.red(`Error parsing file '${file}': ${error.message}`),\n );\n reject(error);\n });\n\n fileStream.on('error', (error) => {\n logger.error(\n colors.red(`Error reading file '${file}': ${error.message}`),\n );\n reject(error);\n });\n });\n};\n"],"mappings":"8KAEA,OAAOA,OAAY,SCFnB,OAAOC,MAAkB,MAQlB,IAAMC,EAA4B,CAAC,CACxC,SAAAC,EACA,KAAAC,CACF,IAMEH,EAAI,OAAO,CACT,UAAW,WAAWE,CAAQ,GAC9B,QAAS,CACP,OAAQ,mBACR,eAAgB,mBAChB,cAAe,UAAUC,CAAI,EAC/B,CACF,CAAC,ECvBH,OAAOC,OAAY,SACnB,OAAOC,OAAW,eAOlB,IAAMC,GAAkB,OAAO,OAAOC,CAAoB,ECR1D,OAAOC,OAAY,SACnB,OAAOC,MAAQ,KCMR,IAAMC,EAA2B,CAAC,CACvC,WAAAC,EACA,MAAAC,EACA,MAAAC,EACA,KAAAC,EAAO,EACT,IASc,CACZ,IAAIC,EAAY,IAEZH,IAAU,GAAKE,KACjBC,EAAY;AAAA,GAGd,IAAMC,EAAwB,KAAK,UAAUL,CAAU,EAGjDM,EAAQJ,GAASD,EAAQC,EAAQ,GAAK,CAACC,EAAO,IAAM,GAG1D,OAAAC,EAAY,GAAGA,EAAYC,EAAwBC,CAAK;AAAA,GAGnDJ,GAASD,IAAUC,EAAQ,GAAMC,KACpCC,GAAa;AAAA,IAGRA,CACT,ED/BO,IAAMG,EAA+B,CAAC,CAC3C,KAAAC,EACA,WAAAC,EACA,MAAAC,EACA,MAAAC,CACF,IASY,CACVC,EAAO,KACLC,GAAO,QACL,+BACEH,EAAQ,CACV,OAAOC,CAAK,aAAaH,CAAI,MAC/B,CACF,EAEIE,IAAU,EACZI,EAAG,cACDN,EACAO,EAAyB,CACvB,WAAAN,EACA,MAAAC,EACA,MAAAC,EACA,KAAM,EACR,CAAC,CACH,EAEAG,EAAG,eACDN,EACAO,EAAyB,CACvB,WAAAN,EACA,MAAAC,EACA,MAAAC,EACA,KAAM,EACR,CAAC,CACH,CAEJ,EEvDA,OAAOK,MAAY,SCCnB,OAAS,eAAAC,OAAmB,2BAC5B,OAEE,wCAAAC,OACK,8BASA,IAAMC,EAA+B,MAAO,CACjD,SAAAC,CACF,IAGqC,CACnC,IAAIC,EAAc,EACdC,EAAa,EACbC,EAAgB,EAEdC,EAAuC,CAAC,EAE9C,KAAOH,EAAcC,GAAY,CAC/B,GAAM,CAAE,KAAAG,CAAK,EAAI,MAAML,EAAS,IAC9B,sCAAsCC,CAAW,YACnD,EAEM,CAAE,KAAAK,EAAM,QAAAC,CAAQ,EAAIV,GACxBC,GACAO,CACF,EACAD,EAAe,KAAK,GAAIG,GAAW,CAAC,CAAE,EAClCN,IAAgB,IAClBC,EAAaI,GAAM,YAAc,EACjCH,EAAgBG,GAAM,eAAiB,GAEzCL,GAAe,EAGfO,EAAO,KACL,WAAWJ,EAAe,MAAM,OAAOD,CAAa,eACtD,CACF,CAEA,OAAOC,CACT,ECjDA,OAAS,eAAAK,OAAmB,2BAC5B,OAAS,iCAAAC,OAAqC,8BASvC,IAAMC,EAAwB,MAAO,CAC1C,SAAAC,EACA,aAAAC,CACF,IAK8C,CAC5C,GAAM,CAAE,KAAAC,CAAK,EAAI,MAAMF,EAAS,IAC9B,iCAAiCC,CAAY,uCAC/C,EAEA,OAAOJ,GAAYC,GAA+BI,CAAI,CACxD,ECxBA,OAAS,eAAAC,OAAmB,2BAC5B,OAAS,2BAAAC,OAA+B,8BASjC,IAAMC,EAAkB,MAAO,CACpC,SAAAC,EACA,OAAAC,CACF,IAKwC,CACtC,GAAM,CAAE,KAAAC,CAAK,EAAI,MAAMF,EAAS,IAAI,qBAAqBC,CAAM,EAAE,EAEjE,OAAOJ,GAAYC,GAAyBI,CAAI,CAClD,ECtBA,OAAS,eAAAC,OAAmB,2BAC5B,OAAS,2BAAAC,OAA+B,8BASjC,IAAMC,EAAkB,MAAO,CACpC,SAAAC,EACA,OAAAC,CACF,IAKwC,CACtC,GAAM,CAAE,KAAAC,CAAK,EAAI,MAAMF,EAAS,IAAI,qBAAqBC,CAAM,EAAE,EAEjE,OAAOJ,GAAYC,GAAyBI,CAAI,CAClD,EJNA,OAAS,QAAAC,OAAY,YKVrB,OAAS,SAAAC,MAAa,YAQf,IAAMC,EAA2B,CAAC,CACvC,WAAAC,EACA,kBAAAC,EACA,YAAAC,EACA,eAAAC,EACA,iBAAAC,EACA,mBAAAC,CACF,IAakC,CAChC,IAAMC,EAAkBR,EAAMI,EAAa,IAAI,EACzC,CAAE,SAAAK,EAAU,UAAAC,EAAW,GAAGC,CAAsB,EAAIR,EACpDS,EAA2BH,EAAS,IAAKI,GAAY,CACzD,GAAM,CAAE,UAAAC,EAAW,GAAGC,CAAY,EAAIF,EAChCG,EAAoBF,EAAU,IAAKG,GAAa,CACpD,GAAM,CAAE,MAAAC,EAAO,GAAGC,CAAa,EAAIF,EAC7BG,GAAiBF,GAAS,CAAC,GAAG,IAAKG,GAAS,CAChD,IAAMC,EAAUd,EAAgBa,EAAK,MAAM,EAC3C,MAAO,CACL,GAAGA,EACH,GAAGC,EACH,MAAOD,EAAK,MACZ,YAAaA,EAAK,aAAe,CACnC,CACF,CAAC,EACD,MAAO,CACL,GAAGF,EACH,MAAOC,CACT,CACF,CAAC,EACD,MAAO,CACL,GAAGL,EACH,UAAWC,CACb,CACF,CAAC,EAGKO,EAAoB,CACxB,GAAGb,EACH,OAAQL,GAAgB,QAAU,GAClC,SAAUA,GAAgB,UAAY,WACtC,OAAQA,GAAgB,QAAU,CAAC,EACnC,MAAOA,GAAgB,OAAS,KAChC,UAAWA,GAAgB,KAAK,WAAa,KAC7C,WAAYA,GAAgB,KAAK,YAAc,IACjD,EAGMmB,EAAsBxB,EAAMM,EAAkB,IAAI,EAClDmB,EAAoBtB,EAAkB,UAAU,QACnDuB,GACCF,EAAoBE,EAAiB,EAAE,EACnC,CACE,CACE,GAAGA,EACH,SAAU,CACR,GAAGA,EAAiB,SACpB,OAAQF,EAAoBE,EAAiB,EAAE,EAAE,OACjD,SAAUF,EAAoBE,EAAiB,EAAE,EAAE,SACnD,OAAQF,EAAoBE,EAAiB,EAAE,EAAE,OACjD,MAAOF,EAAoBE,EAAiB,EAAE,EAAE,MAChD,UACEF,EAAoBE,EAAiB,EAAE,EAAE,KAAK,WAC9C,KACF,WACEF,EAAoBE,EAAiB,EAAE,EAAE,KAAK,YAC9C,IACJ,CACF,CACF,EACA,CAAC,CACT,EAGMC,EAAyB3B,EAAMO,EAAoB,IAAI,EACvDqB,EAAsBzB,EAAkB,YAC3C,OAAQ0B,GAAM,CAACA,EAAE,KAAK,SAAS,GAAG,CAAC,EACnC,QAASC,GACRH,EAAuBG,EAAW,EAAE,EAChC,CACE,CACE,GAAGA,EACH,OAAQH,EAAuBG,EAAW,EAAE,EAAE,OAC9C,SAAUH,EAAuBG,EAAW,EAAE,EAAE,SAChD,OAAQH,EAAuBG,EAAW,EAAE,EAAE,OAC9C,MAAOH,EAAuBG,EAAW,EAAE,EAAE,MAC7C,UACEH,EAAuBG,EAAW,EAAE,EAAE,KAAK,WAAa,KAC1D,WACEH,EAAuBG,EAAW,EAAE,EAAE,KAAK,YAAc,IAC7D,CACF,EACA,CAAC,CACP,EAGF,MAAO,CACL,GAAG5B,EACH,GAAGS,EACH,UAAWc,EACX,YAAaG,EACb,UAAWL,EACX,SAAUX,CACZ,CACF,ECjIA,OAAOmB,MAAY,SAuBZ,IAAMC,EAAoC,MAAO,CACtD,UAAAC,EACA,WAAAC,EACA,MAAAC,EACA,MAAAC,CACF,IASqB,CACnBC,EAAO,KACLC,EAAO,QACL,+BAA+BF,EAAQ,CAAC,IACtCD,EAAQ,MAAMA,CAAK,IAAM,GAC3B,iBACF,CACF,EAUA,IAAMI,EAAwC,CAC5C,KARWC,EAAyB,CACpC,WAAAN,EACA,MAAAE,EACA,MAAAD,CACF,CAAC,CAKD,EAEA,GAAI,CACF,MAAMM,EAMHR,EAAWS,EAAmC,CAC/C,MAAAH,CACF,CAAC,CACH,MAAY,CACVF,EAAO,MACLC,EAAO,IACL,6BAA6BF,EAAQ,CAAC,IACpCD,EAAQ,MAAMA,CAAK,IAAM,GAC3B;AAAA,qBACyBD,EAAW,IAAI,qBAAqBA,EAAW,SAAS,IAAI;AAAA,CACvF,CACF,CACF,CACF,EN5CO,IAAMS,EAAsC,MAAO,CACxD,SAAAC,EACA,KAAAC,EACA,OAAAC,EACA,UAAAC,CACF,IASqB,CAEnBC,EAAO,KAAK,kDAAkD,EAC9D,IAAMC,EAAc,MAAMC,EAA6B,CAAE,SAAAN,CAAS,CAAC,EAG7DO,EAA+D,CAAC,EAGhEC,EAAa,EACbC,EAAoB,MAAM,KAC9B,CACE,OAAQ,KAAK,KAAKJ,EAAY,OAASG,CAAU,CACnD,EACA,CAACE,EAAGC,IAAMN,EAAY,MAAMM,EAAIH,GAAaG,EAAI,GAAKH,CAAU,CAClE,EAGA,MAAMI,EAAUH,EAAmB,MAAOI,EAAiBC,IAAU,CACnE,IAAMC,EAAyD,CAAC,EAGhE,MAAMC,EACJH,EACA,MAAOI,EAAYC,IAAU,CAC3B,IAAMC,EAAmBX,EAAaM,EAAQI,EAAQ,EACtDd,EAAO,KACL,eAAee,CAAgB,OAAOd,EAAY,MAAM,wBAC1D,EACA,GAAM,CAAE,aAAAe,EAAc,aAAAC,CAAa,EAAIJ,EACjCK,EAAoB,MAAMC,EAAsB,CACpD,SAAAvB,EACA,aAAAqB,CACF,CAAC,EAEKG,EAAYF,EAAkB,UAAU,GAC1CG,EAAUlB,EAAoBiB,CAAS,EAC3C,GAAI,CAACC,EAAS,CACZrB,EAAO,KACL,eAAee,CAAgB,OAAOd,EAAY,MAAM,wBAC1D,EACA,GAAI,CACFoB,EAAU,MAAMC,EAAgB,CAC9B,SAAA1B,EACA,OAAQwB,CACV,CAAC,EACDjB,EAAoBiB,CAAS,EAAIC,CACnC,MAAY,CACVrB,EAAO,KACLuB,EAAO,OACL,eAAeR,CAAgB,OAAOd,EAAY,MAAM,+CACtCmB,CAAS,uBAAuBP,EAAW,IAAI,qBAAqBG,CAAY,EACpG,CACF,CACF,CACF,CAGA,GAAM,CAAE,UAAAQ,CAAU,EAAIN,EAClBO,EAAgD,CAAC,EACjDD,EAAU,OAAS,IACrBxB,EAAO,KACL,eAAee,CAAgB,OAAOd,EAAY,MAAM,0BAC1D,EACAwB,EAAmB,MAAMb,EACvBY,EAAU,IAAI,CAAC,CAAE,GAAAE,CAAG,IAAMA,CAAE,EAC5B,MAAOC,GAAW,CAChB,GAAI,CACF,IAAIC,EAAWzB,EAAoBwB,CAAM,EACzC,OAAKC,IACHA,EAAW,MAAMN,EAAgB,CAAE,SAAA1B,EAAU,OAAA+B,CAAO,CAAC,EACrDxB,EAAoBwB,CAAM,EAAIC,GAEzB,CAACA,CAAQ,CAClB,MAAY,CACV,OAAA5B,EAAO,KACLuB,EAAO,OACL,eAAeR,CAAgB,OAAOd,EAAY,MAAM,mDACrC0B,CAAM,uBAAuBd,EAAW,IAAI,qBAAqBG,CAAY,EAClG,CACF,EACO,CAAC,CACV,CACF,EACA,CAAE,YAAa,CAAE,CACnB,GAIF,GAAM,CAAE,YAAAa,CAAY,EAAIX,EAElBY,EAAsBD,EAAY,OACrCE,GAAM,CAACA,EAAE,KAAK,SAAS,GAAG,CAC7B,EACIC,EAAkD,CAAC,EACnDF,EAAoB,OAAS,IAC/B9B,EAAO,KACL,eAAee,CAAgB,OAAOd,EAAY,MAAM,4BAC1D,EACA+B,EAAqB,MAAMpB,EACzBkB,EAAoB,IAAI,CAAC,CAAE,GAAAJ,CAAG,IAAMA,CAAE,EACtC,MAAOC,GAAW,CAChB,GAAI,CACF,IAAIM,EAAa9B,EAAoBwB,CAAM,EAC3C,OAAKM,IACHA,EAAa,MAAMX,EAAgB,CAAE,SAAA1B,EAAU,OAAA+B,CAAO,CAAC,EACvDxB,EAAoBwB,CAAM,EAAIM,GAEzB,CAACA,CAAU,CACpB,MAAY,CACV,OAAAjC,EAAO,KACLuB,EAAO,OACL,eAAeR,CAAgB,OAAOd,EAAY,MAAM,kDACnC0B,CAAM,uBAAuBd,EAAW,IAAI,qBAAqBG,CAAY,EACpG,CACF,EACO,CAAC,CACV,CACF,EACA,CAAE,YAAa,CAAE,CACnB,GAIF,IAAIkB,EAAyC,CAAC,EACxCC,EAAUC,GACdlB,EAAkB,SAAS,QAASmB,GAClCA,EAAE,UAAU,QAASC,IAClBA,EAAE,OAAS,CAAC,GAAG,QAASP,GAAMA,EAAE,MAAM,CACzC,CACF,CACF,EACII,EAAQ,OAAS,IACnBnC,EAAO,KACL,eAAee,CAAgB,OAAOd,EAAY,MAAM,sBAC1D,EACAiC,EAAc,MAAMtB,EAClBuB,EACCI,GAAWC,EAAgB,CAAE,SAAA5C,EAAU,OAAQ2C,CAAiB,CAAC,EAClE,CACE,YAAa,CACf,CACF,GAIF,IAAME,EAAqBC,EAAyB,CAClD,WAAA7B,EACA,kBAAAK,EACA,YAAAgB,EACA,eAAgBb,EAChB,iBAAkBI,EAAiB,KAAK,EACxC,mBAAoBO,EAAmB,KAAK,CAC9C,CAAC,EAEDrB,EAAyB,KAAK8B,CAAkB,CAClD,EACA,CAAE,YAAarC,CAAW,CAC5B,EAGA,MAAMI,EACJG,EACA,MAAO8B,EAAoB3B,IAAU,CAEnC,IAAM6B,EAAcjC,EAAQN,EAAaU,EAErChB,GAAUD,EAEZ+C,EAA6B,CAC3B,WAAYH,EACZ,MAAOE,EACP,MAAO1C,EAAY,OACnB,KAAAJ,CACF,CAAC,EACQE,GAET,MAAM8C,EAAkC,CACtC,WAAYJ,EACZ,UAAA1C,EACA,MAAOE,EAAY,OACnB,MAAO0C,CACT,CAAC,CAEL,CACF,CACF,CAAC,CACH,EO5OA,OAAS,eAAAG,OAAmB,2BAC5B,OAAOC,MAAY,SAEnB,OAAOC,OAAgB,aAEvB,OAAS,oBAAAC,OAAwB,KACjC,OAAS,8BAAAC,OAAkC,8BASpC,IAAMC,EAAkC,CAAC,CAC9C,UAAAC,EACA,KAAAC,CACF,KAMEC,EAAO,KAAK,6CAA6CD,CAAI,KAAK,EAE3D,IAAI,QAAQ,CAACE,EAASC,IAAW,CAEtC,IAAMC,EAAaC,GAAiBL,EAAM,CACxC,SAAU,QACV,cAAe,KACjB,CAAC,EAGKM,EAASC,GAAW,MAAM,GAAG,EAE/BC,EAAQ,EAGZJ,EAAW,KAAKE,CAAM,EAGtBA,EAAO,GAAG,OAAQ,MAAOG,GAAe,CACtC,GAAI,CAEFH,EAAO,MAAM,EAGb,IAAMI,EAAmBC,GACvBC,GACAH,CACF,EAGA,MAAMI,EAAkC,CACtC,WAAYH,EACZ,UAAAX,EACA,MAAAS,CACF,CAAC,EAEDA,GAAS,EAGTF,EAAO,OAAO,CAChB,OAASQ,EAAG,CAEVb,EAAO,MACLc,EAAO,IACL,kCAAkCP,CAAK,eAAeR,CAAI,MAAMc,EAAE,OAAO,GAC3E,CACF,CACF,CACF,CAAC,EAGDR,EAAO,GAAG,MAAO,IAAM,CACrBL,EAAO,KAAK,uBAAuBO,CAAK,0BAA0BR,CAAI,EAAE,EACxEE,EAAQ,CACV,CAAC,EAGDI,EAAO,GAAG,QAAUU,GAAU,CAC5Bf,EAAO,MACLc,EAAO,IAAI,uBAAuBf,CAAI,MAAMgB,EAAM,OAAO,EAAE,CAC7D,EACAb,EAAOa,CAAK,CACd,CAAC,EAEDZ,EAAW,GAAG,QAAUY,GAAU,CAChCf,EAAO,MACLc,EAAO,IAAI,uBAAuBf,CAAI,MAAMgB,EAAM,OAAO,EAAE,CAC7D,EACAb,EAAOa,CAAK,CACd,CAAC,CACH,CAAC,GZjEH,eAAsBC,GAEpB,CACE,SAAAC,EACA,aAAAC,EACA,OAAAC,EACA,cAAAC,EACA,aAAAC,EACA,SAAAC,EACA,KAAAC,EACA,OAAAC,EACA,MAAAC,CACF,EACe,CAEf,GAAI,CAACD,GAAU,CAACJ,EACd,MAAM,IAAI,MAER,oHACF,EAIF,GAAII,GAAU,CAACD,EACb,MAAM,IAAI,MACR,6FACF,EAGF,GAAIA,EAAM,CACR,IAAMG,EAAYH,EAAK,MAAM,GAAG,EAChC,GAAIG,EAAU,OAAS,EACrB,MAAM,IAAI,MACR,6GACF,EAEF,GAAIA,EAAU,GAAG,EAAE,IAAM,OACvB,MAAM,IAAI,MACR,iDAAiDH,CAAI,4BAEtCG,EAAU,GAAG,EAAE,CAAC,IACjC,CAEJ,CAGA,GAAIP,IAAW,WAA6B,CAE1C,GAAI,CAACF,EACH,MAAM,IAAI,MACR,iFACF,EAGF,GAAI,CAACC,EACH,MAAM,IAAI,MACR,sFACF,CAEJ,KAAO,CAEL,GAAI,CAACK,EACH,MAAM,IAAI,MACR,iHACF,EAIF,GAAIC,EACF,MAAM,IAAI,iIAGV,CAEJ,CAGA,IAAMG,EACJV,GAAYC,EACRU,EAA0B,CACxB,SAAAX,EACA,KAAMC,CACR,CAAC,EACD,OAGAW,EACJR,GAAgBD,EACZU,EAA4BT,EAAcD,CAAa,EACvD,OAEN,GAAI,CACEE,IAAa,gBACXH,IAAW,YAA+BQ,EAC5C,MAAMI,EAAoC,CACxC,SAAAJ,EACA,KAAAJ,EACA,OAAAC,EACA,GAAIK,GAAa,CAAE,UAAAA,CAAU,CAC/B,CAAC,EACQV,IAAW,QAA2BI,GAAQM,GACvD,MAAMG,EAAgC,CAAE,KAAAT,EAAM,UAAAM,CAAU,CAAC,EAG/D,OAASI,EAAK,CACZ,MAAM,IAAI,MACR,0CAA0CX,CAAQ,mBAChDG,EAAQQ,EAAI,MAAQA,EAAI,OAC1B,EACF,CACF,CAGAC,EAAO,KACLC,GAAO,MACL,gCAAgCb,CAAQ,OACtCE,EAAS,YAAYD,CAAI,IAAM,WACjC,GACF,CACF,CACF","names":["colors","got","createOneTrustGotInstance","hostname","auth","colors","yargs","VALID_RESOURCES","OneTrustPullResource","colors","fs","oneTrustAssessmentToJson","assessment","index","total","wrap","jsonEntry","stringifiedAssessment","comma","syncOneTrustAssessmentToDisk","file","assessment","index","total","logger","colors","fs","oneTrustAssessmentToJson","colors","decodeCodec","OneTrustGetListOfAssessmentsResponse","getListOfOneTrustAssessments","oneTrust","currentPage","totalPages","totalElements","allAssessments","body","page","content","logger","decodeCodec","OneTrustGetAssessmentResponse","getOneTrustAssessment","oneTrust","assessmentId","body","decodeCodec","OneTrustGetRiskResponse","getOneTrustRisk","oneTrust","riskId","body","decodeCodec","OneTrustGetUserResponse","getOneTrustUser","oneTrust","userId","body","uniq","keyBy","enrichOneTrustAssessment","assessment","assessmentDetails","riskDetails","creatorDetails","approversDetails","respondentsDetails","riskDetailsById","sections","createdBy","restAssessmentDetails","sectionsWithEnrichedRisk","section","questions","restSection","enrichedQuestions","question","risks","restQuestion","enrichedRisks","risk","details","enrichedCreatedBy","approverDetailsById","enrichedApprovers","originalApprover","respondentsDetailsById","enrichedRespondents","r","respondent","colors","syncOneTrustAssessmentToTranscend","transcend","assessment","total","index","logger","colors","input","oneTrustAssessmentToJson","makeGraphQLRequest","IMPORT_ONE_TRUST_ASSESSMENT_FORMS","syncOneTrustAssessmentsFromOneTrust","oneTrust","file","dryRun","transcend","logger","assessments","getListOfOneTrustAssessments","oneTrustCachedUsers","BATCH_SIZE","assessmentBatches","_","i","mapSeries","assessmentBatch","batch","batchEnrichedAssessments","map","assessment","index","assessmentNumber","templateName","assessmentId","assessmentDetails","getOneTrustAssessment","creatorId","creator","getOneTrustUser","colors","approvers","approversDetails","id","userId","approver","respondents","internalRespondents","r","respondentsDetails","respondent","riskDetails","riskIds","uniq","s","q","riskId","getOneTrustRisk","enrichedAssessment","enrichOneTrustAssessment","globalIndex","syncOneTrustAssessmentToDisk","syncOneTrustAssessmentToTranscend","decodeCodec","colors","JSONStream","createReadStream","OneTrustEnrichedAssessment","syncOneTrustAssessmentsFromFile","transcend","file","logger","resolve","reject","fileStream","createReadStream","parser","JSONStream","index","assessment","parsedAssessment","decodeCodec","OneTrustEnrichedAssessment","syncOneTrustAssessmentToTranscend","e","colors","error","syncOt","hostname","oneTrustAuth","source","transcendAuth","transcendUrl","resource","file","dryRun","debug","splitFile","oneTrust","createOneTrustGotInstance","transcend","buildTranscendGraphQLClient","syncOneTrustAssessmentsFromOneTrust","syncOneTrustAssessmentsFromFile","err","logger","colors"]}
@@ -1,2 +0,0 @@
1
- import{c as o}from"./chunk-XNR74SBS.js";import"./chunk-WSDWILYI.js";import"./chunk-ZTD7APNF.js";import"./chunk-LAYHULHH.js";import"./chunk-347UQP43.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function i({auth:t,trackerStatus:r,file:s,transcendUrl:a}){await o({auth:t,trackerStatus:r,file:s,transcendUrl:a})}export{i as uploadCookiesFromCsv};
2
- //# sourceMappingURL=impl-GZRQOFY6.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/consent/upload-cookies-from-csv/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { uploadCookiesFromCsv as uploadCookiesFromCsvHelper } from '@/lib/consent-manager';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\n\ninterface UploadCookiesFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n transcendUrl: string;\n}\n\nexport async function uploadCookiesFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, transcendUrl }: UploadCookiesFromCsvCommandFlags,\n): Promise<void> {\n // Upload cookies\n await uploadCookiesFromCsvHelper({\n auth,\n trackerStatus,\n file,\n transcendUrl,\n });\n}\n"],"mappings":"oSAWA,eAAsBA,EAEpB,CAAE,KAAAC,EAAM,cAAAC,EAAe,KAAAC,EAAM,aAAAC,CAAa,EAC3B,CAEf,MAAMJ,EAA2B,CAC/B,KAAAC,EACA,cAAAC,EACA,KAAAC,EACA,aAAAC,CACF,CAAC,CACH","names":["uploadCookiesFromCsv","auth","trackerStatus","file","transcendUrl"]}
@@ -1,2 +0,0 @@
1
- import{a as l}from"./chunk-KRN6Q433.js";import"./chunk-7QHA6ZIV.js";import{c as m}from"./chunk-MA4JWWRO.js";import"./chunk-ZTD7APNF.js";import{pe as c}from"./chunk-TDBKATQK.js";import{a as r}from"./chunk-43JWXG77.js";import{d as g}from"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import{uniq as A,groupBy as G}from"lodash-es";import n from"colors";async function k({auth:p,file:a,transcendUrl:d,dataSiloIds:u,includeAttributes:y,includeGuessedCategories:C,parentCategories:f,subCategories:D=[]}){try{let o=c(d,p),P=await l(o,{dataSiloIds:u,includeGuessedCategories:C,parentCategories:f,includeAttributes:y,subCategories:D});r.info(n.magenta(`Writing datapoints to file "${a}"...`));let s=[],b=P.map(e=>{let i={"Property ID":e.id,"Data Silo":e.dataSilo.title,Object:e.dataPoint.name,"Object Path":e.dataPoint.path.join("."),Property:e.name,"Property Description":e.description,"Data Categories":e.categories.map(t=>`${t.category}:${t.name}`).join(", "),"Guessed Category":e.pendingCategoryGuesses?.[0]?`${e.pendingCategoryGuesses[0].category.category}:${e.pendingCategoryGuesses[0].category.name}`:"","Processing Purposes":e.purposes.map(t=>`${t.purpose}:${t.name}`).join(", "),...Object.entries(G(e.attributeValues||[],({attributeKey:t})=>t.name)).reduce((t,[$,h])=>(t[$]=h.map(j=>j.name).join(","),t),{})};return s=A([...s,...Object.keys(i)]),i});m(a,b,s)}catch(o){r.error(n.red(`An error occurred syncing the datapoints: ${o.message}`)),process.exit(1)}r.info(n.green(`Successfully synced datapoints to disk at ${a}! View at ${g}`))}export{k as pullDatapoints};
2
- //# sourceMappingURL=impl-HEC3SVYP.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/inventory/pull-datapoints/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { uniq, groupBy } from 'lodash-es';\n\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { buildTranscendGraphQLClient } from '@/lib/graphql';\nimport { ADMIN_DASH_DATAPOINTS } from '@/constants';\nimport { pullAllDatapoints } from '@/lib/data-inventory';\nimport { writeCsv } from '@/lib/cron';\nimport { DataCategoryType } from '@transcend-io/privacy-types';\n\ninterface PullDatapointsCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n includeAttributes: boolean;\n includeGuessedCategories: boolean;\n parentCategories?: DataCategoryType[];\n subCategories?: string[];\n}\n\nexport async function pullDatapoints(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n includeAttributes,\n includeGuessedCategories,\n parentCategories,\n subCategories = [],\n }: PullDatapointsCommandFlags,\n): Promise<void> {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const dataPoints = await pullAllDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n parentCategories,\n includeAttributes,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n });\n\n logger.info(colors.magenta(`Writing datapoints to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = dataPoints.map((point) => {\n const result = {\n 'Property ID': point.id,\n 'Data Silo': point.dataSilo.title,\n Object: point.dataPoint.name,\n 'Object Path': point.dataPoint.path.join('.'),\n Property: point.name,\n 'Property Description': point.description,\n 'Data Categories': point.categories\n .map((category) => `${category.category}:${category.name}`)\n .join(', '),\n 'Guessed Category': point.pendingCategoryGuesses?.[0]\n ? `${point.pendingCategoryGuesses![0]!.category.category}:${\n point.pendingCategoryGuesses![0]!.category.name\n }`\n : '',\n 'Processing Purposes': point.purposes\n .map((purpose) => `${purpose.purpose}:${purpose.name}`)\n .join(', '),\n ...Object.entries(\n groupBy(\n point.attributeValues || [],\n ({ attributeKey }) => attributeKey.name,\n ),\n ).reduce((acc, [key, values]) => {\n acc[key] = values.map((value) => value.name).join(',');\n return acc;\n }, {} as Record<string, string>),\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n writeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the datapoints: ${err.message}`),\n );\n process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced datapoints to disk at ${file}! View at ${ADMIN_DASH_DATAPOINTS}`,\n ),\n );\n}\n"],"mappings":"yTACA,OAAS,QAAAA,EAAM,WAAAC,MAAe,YAG9B,OAAOC,MAAY,SAkBnB,eAAsBC,EAEpB,CACE,KAAAC,EACA,KAAAC,EACA,aAAAC,EACA,YAAAC,EACA,kBAAAC,EACA,yBAAAC,EACA,iBAAAC,EACA,cAAAC,EAAgB,CAAC,CACnB,EACe,CACf,GAAI,CAEF,IAAMC,EAASC,EAA4BP,EAAcF,CAAI,EAEvDU,EAAa,MAAMC,EAAkBH,EAAQ,CACjD,YAAAL,EACA,yBAAAE,EACA,iBAAAC,EACA,kBAAAF,EACA,cAAAG,CACF,CAAC,EAEDK,EAAO,KAAKC,EAAO,QAAQ,+BAA+BZ,CAAI,MAAM,CAAC,EACrE,IAAIa,EAAoB,CAAC,EACnBC,EAASL,EAAW,IAAKM,GAAU,CACvC,IAAMC,EAAS,CACb,cAAeD,EAAM,GACrB,YAAaA,EAAM,SAAS,MAC5B,OAAQA,EAAM,UAAU,KACxB,cAAeA,EAAM,UAAU,KAAK,KAAK,GAAG,EAC5C,SAAUA,EAAM,KAChB,uBAAwBA,EAAM,YAC9B,kBAAmBA,EAAM,WACtB,IAAKE,GAAa,GAAGA,EAAS,QAAQ,IAAIA,EAAS,IAAI,EAAE,EACzD,KAAK,IAAI,EACZ,mBAAoBF,EAAM,yBAAyB,CAAC,EAChD,GAAGA,EAAM,uBAAwB,CAAC,EAAG,SAAS,QAAQ,IACpDA,EAAM,uBAAwB,CAAC,EAAG,SAAS,IAC7C,GACA,GACJ,sBAAuBA,EAAM,SAC1B,IAAKG,GAAY,GAAGA,EAAQ,OAAO,IAAIA,EAAQ,IAAI,EAAE,EACrD,KAAK,IAAI,EACZ,GAAG,OAAO,QACRC,EACEJ,EAAM,iBAAmB,CAAC,EAC1B,CAAC,CAAE,aAAAK,CAAa,IAAMA,EAAa,IACrC,CACF,EAAE,OAAO,CAACC,EAAK,CAACC,EAAKC,CAAM,KACzBF,EAAIC,CAAG,EAAIC,EAAO,IAAKC,GAAUA,EAAM,IAAI,EAAE,KAAK,GAAG,EAC9CH,GACN,CAAC,CAA2B,CACjC,EACA,OAAAR,EAAUY,EAAK,CAAC,GAAGZ,EAAS,GAAG,OAAO,KAAKG,CAAM,CAAC,CAAC,EAC5CA,CACT,CAAC,EACDU,EAAS1B,EAAMc,EAAQD,CAAO,CAChC,OAASc,EAAK,CACZhB,EAAO,MACLC,EAAO,IAAI,6CAA6Ce,EAAI,OAAO,EAAE,CACvE,EACA,QAAQ,KAAK,CAAC,CAChB,CAGAhB,EAAO,KACLC,EAAO,MACL,6CAA6CZ,CAAI,aAAa4B,CAAqB,EACrF,CACF,CACF","names":["uniq","groupBy","colors","pullDatapoints","auth","file","transcendUrl","dataSiloIds","includeAttributes","includeGuessedCategories","parentCategories","subCategories","client","buildTranscendGraphQLClient","dataPoints","pullAllDatapoints","logger","colors","headers","inputs","point","result","category","purpose","groupBy","attributeKey","acc","key","values","value","uniq","writeCsv","err","ADMIN_DASH_DATAPOINTS"]}
@@ -1,2 +0,0 @@
1
- import{W as e}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function s({auth:t,transcendUrl:i,identifierNames:n,actions:r=[]}){await e({requestActions:r,transcendUrl:i,auth:t,identifierNames:n})}export{s as rejectUnverifiedIdentifiers};
2
- //# sourceMappingURL=impl-HH24GIMG.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/reject-unverified-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { removeUnverifiedRequestIdentifiers } from '@/lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\n\ninterface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n identifierNames,\n actions = [],\n }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"],"mappings":"oLAWA,eAAsBA,EAEpB,CACE,KAAAC,EACA,aAAAC,EACA,gBAAAC,EACA,QAAAC,EAAU,CAAC,CACb,EACe,CACf,MAAMC,EAAmC,CACvC,eAAgBD,EAChB,aAAAF,EACA,KAAAD,EACA,gBAAAE,CACF,CAAC,CACH","names":["rejectUnverifiedIdentifiers","auth","transcendUrl","identifierNames","actions","removeUnverifiedRequestIdentifiers"]}
@@ -1,2 +0,0 @@
1
- import{R as t}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function e({auth:i,transcendUrl:o,enricherIds:r}){await t({transcendUrl:o,auth:i,enricherIds:r})}export{e as skipPreflightJobs};
2
- //# sourceMappingURL=impl-I24OLEN5.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '@/lib/requests';\n\ninterface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"oLASA,eAAsBA,EAEpB,CAAE,KAAAC,EAAM,aAAAC,EAAc,YAAAC,CAAY,EACnB,CACf,MAAMH,EAAwB,CAC5B,aAAAE,EACA,KAAAD,EACA,YAAAE,CACF,CAAC,CACH","names":["skipPreflightJobs","auth","transcendUrl","enricherIds"]}
@@ -1,2 +0,0 @@
1
- import{V as t}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function n({auth:s,dataSiloId:a,status:e,statuses:i,transcendUrl:o}){await t({transcendUrl:o,auth:s,status:e,dataSiloId:a,requestStatuses:i})}export{n as skipRequestDataSilos};
2
- //# sourceMappingURL=impl-IAXNYDJT.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/system/skip-request-data-silos/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport type { RequestStatus } from '@transcend-io/privacy-types';\nimport { skipRequestDataSilos as skipRequestDataSilosHelper } from '@/lib/requests';\n\ninterface SkipRequestDataSilosCommandFlags {\n auth: string;\n dataSiloId: string;\n transcendUrl: string;\n statuses: RequestStatus[];\n status: 'SKIPPED' | 'RESOLVED';\n}\n\nexport async function skipRequestDataSilos(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n status,\n statuses,\n transcendUrl,\n }: SkipRequestDataSilosCommandFlags,\n): Promise<void> {\n await skipRequestDataSilosHelper({\n transcendUrl,\n auth,\n status,\n dataSiloId,\n requestStatuses: statuses,\n });\n}\n"],"mappings":"oLAYA,eAAsBA,EAEpB,CACE,KAAAC,EACA,WAAAC,EACA,OAAAC,EACA,SAAAC,EACA,aAAAC,CACF,EACe,CACf,MAAML,EAA2B,CAC/B,aAAAK,EACA,KAAAJ,EACA,OAAAE,EACA,WAAAD,EACA,gBAAiBE,CACnB,CAAC,CACH","names":["skipRequestDataSilos","auth","dataSiloId","status","statuses","transcendUrl"]}
@@ -1,2 +0,0 @@
1
- import{S as r}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function m({auth:n,enricherId:s,actions:i,requestEnricherStatuses:a,requestIds:c,createdAtBefore:e,createdAtAfter:t,concurrency:o,transcendUrl:u}){await r({auth:n,enricherId:s,requestActions:i,requestEnricherStatuses:a,requestIds:c,createdAtBefore:e?new Date(e):void 0,createdAtAfter:t?new Date(t):void 0,concurrency:o,transcendUrl:u})}export{m as enricherRestart};
2
- //# sourceMappingURL=impl-J33PI3PK.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/enricher-restart/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { bulkRetryEnrichers } from '@/lib/requests';\nimport type {\n RequestAction,\n RequestEnricherStatus,\n} from '@transcend-io/privacy-types';\n\ninterface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"oLAmBA,eAAsBA,EAEpB,CACE,KAAAC,EACA,WAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,WAAAC,EACA,gBAAAC,EACA,eAAAC,EACA,YAAAC,EACA,aAAAC,CACF,EACe,CACf,MAAMC,EAAmB,CACvB,KAAAT,EACA,WAAAC,EACA,eAAgBC,EAChB,wBAAAC,EACA,WAAAC,EACA,gBAAiBC,EAAkB,IAAI,KAAKA,CAAe,EAAI,OAC/D,eAAgBC,EAAiB,IAAI,KAAKA,CAAc,EAAI,OAC5D,YAAAC,EACA,aAAAC,CACF,CAAC,CACH","names":["enricherRestart","auth","enricherId","actions","requestEnricherStatuses","requestIds","createdAtBefore","createdAtAfter","concurrency","transcendUrl","bulkRetryEnrichers"]}
@@ -1,2 +0,0 @@
1
- import{Q as e}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function A({auth:t,requestReceiptFolder:s,sombraAuth:r,actions:a,statuses:o,requestIds:i,createdAt:n,emailIsVerified:c,silentModeBefore:u,sendEmailReceipt:d,copyIdentifiers:m,skipWaitingPeriod:l,createdAtBefore:p,createdAtAfter:f,concurrency:R,transcendUrl:q}){await e({requestReceiptFolder:s,auth:t,sombraAuth:r,requestActions:a,requestStatuses:o,requestIds:i,createdAt:n,emailIsVerified:c,silentModeBefore:u,sendEmailReceipt:d,copyIdentifiers:m,skipWaitingPeriod:l,createdAtBefore:p,createdAtAfter:f,concurrency:R,transcendUrl:q})}export{A as restart};
2
- //# sourceMappingURL=impl-JZDUGI7W.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/restart/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { bulkRestartRequests } from '@/lib/requests';\nimport type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\ninterface RestartCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses: RequestStatus[];\n transcendUrl: string;\n requestReceiptFolder: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n emailIsVerified: boolean;\n createdAt?: Date;\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n sendEmailReceipt: boolean;\n copyIdentifiers: boolean;\n skipWaitingPeriod: boolean;\n}\n\nexport async function restart(\n this: LocalContext,\n {\n auth,\n requestReceiptFolder,\n sombraAuth,\n actions,\n statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n concurrency,\n transcendUrl,\n }: RestartCommandFlags,\n): Promise<void> {\n await bulkRestartRequests({\n requestReceiptFolder,\n auth,\n sombraAuth,\n requestActions: actions,\n requestStatuses: statuses,\n requestIds,\n createdAt,\n emailIsVerified,\n silentModeBefore,\n sendEmailReceipt,\n copyIdentifiers,\n skipWaitingPeriod,\n createdAtBefore,\n createdAtAfter,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"oLAuBA,eAAsBA,EAEpB,CACE,KAAAC,EACA,qBAAAC,EACA,WAAAC,EACA,QAAAC,EACA,SAAAC,EACA,WAAAC,EACA,UAAAC,EACA,gBAAAC,EACA,iBAAAC,EACA,iBAAAC,EACA,gBAAAC,EACA,kBAAAC,EACA,gBAAAC,EACA,eAAAC,EACA,YAAAC,EACA,aAAAC,CACF,EACe,CACf,MAAMC,EAAoB,CACxB,qBAAAf,EACA,KAAAD,EACA,WAAAE,EACA,eAAgBC,EAChB,gBAAiBC,EACjB,WAAAC,EACA,UAAAC,EACA,gBAAAC,EACA,iBAAAC,EACA,iBAAAC,EACA,gBAAAC,EACA,kBAAAC,EACA,gBAAAC,EACA,eAAAC,EACA,YAAAC,EACA,aAAAC,CACF,CAAC,CACH","names":["restart","auth","requestReceiptFolder","sombraAuth","actions","statuses","requestIds","createdAt","emailIsVerified","silentModeBefore","sendEmailReceipt","copyIdentifiers","skipWaitingPeriod","createdAtBefore","createdAtAfter","concurrency","transcendUrl","bulkRestartRequests"]}
@@ -1,4 +0,0 @@
1
- import{a as D}from"./chunk-24SSWBXM.js";import"./chunk-MVDOKJ6J.js";import"./chunk-LAYHULHH.js";import"./chunk-347UQP43.js";import{Od as u,le as S,pe as y}from"./chunk-TDBKATQK.js";import{a as r}from"./chunk-43JWXG77.js";import{b as h}from"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import{stringify as C}from"query-string";import w from"colors";import v from"fast-glob";async function m({scanPath:t,fileGlobs:e,ignoreDirs:g,config:p}){let{ignoreDirs:f,supportedFiles:s,scanFunction:a}=p,n=e===""?s:s.concat(e.split(",")),l=[...g.split(","),...f].filter(o=>o.length>0);try{let o=await v(`${t}/**/${n.join("|")}`,{ignore:l.map(i=>`${t}/**/${i}`),unique:!0,onlyFiles:!0});r.info(`Scanning: ${o.length} files`);let $=o.map(i=>a(i)).flat().map(i=>i.softwareDevelopmentKits||[]).flat(),d=[...new Set($.map(i=>i.name))];return r.info(`Found: ${d.length} unique dependencies`),d.map(i=>({name:i,resourceId:`${t}/**/${i}`,useStrictClassifier:!0}))}catch(o){throw new Error(`Error scanning globs ${m} with error: ${o}`)}}async function k({scanPath:t,dataSiloId:e,auth:g,fileGlobs:p,ignoreDirs:f,transcendUrl:s}){let a=y(s,g),n=await u(a,e),l=D[n.dataSilo.type];l||(r.error(w.red(`This plugin "${n.dataSilo.type}" is not supported for offline silo discovery.`)),process.exit(1));let o=await m({scanPath:t,fileGlobs:p,ignoreDirs:f,config:l});await S(a,n.id,o);let c=new URL(h);c.pathname="/data-map/data-inventory/silo-discovery/triage",c.search=C({filters:JSON.stringify({pluginIds:[n.id]})}),r.info(w.green(`Scan found ${o.length} potential data silos at ${t}! View at '${c.href}'
2
-
3
- NOTE: it may take 2-3 minutes for scan results to appear in the UI.`))}export{k as discoverSilos};
4
- //# sourceMappingURL=impl-LZ3HI26W.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/inventory/discover-silos/impl.ts","../src/lib/code-scanning/findFilesToScan.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { stringify } from 'query-string';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '@/constants';\nimport {\n fetchActiveSiloDiscoPlugin,\n buildTranscendGraphQLClient,\n uploadSiloDiscoveryResults,\n} from '@/lib/graphql';\nimport { findFilesToScan } from '@/lib/code-scanning/findFilesToScan';\nimport { SILO_DISCOVERY_CONFIGS } from '@/lib/code-scanning';\n\ninterface DiscoverSilosCommandFlags {\n scanPath: string;\n dataSiloId: string;\n auth: string;\n fileGlobs: string;\n ignoreDirs: string;\n transcendUrl: string;\n}\n\nexport async function discoverSilos(\n this: LocalContext,\n {\n scanPath,\n dataSiloId,\n auth,\n fileGlobs,\n ignoreDirs,\n transcendUrl,\n }: DiscoverSilosCommandFlags,\n): Promise<void> {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const plugin = await fetchActiveSiloDiscoPlugin(client, dataSiloId);\n\n const config = SILO_DISCOVERY_CONFIGS[plugin.dataSilo.type];\n if (!config) {\n logger.error(\n colors.red(\n `This plugin \"${plugin.dataSilo.type}\" is not supported for offline silo discovery.`,\n ),\n );\n process.exit(1);\n }\n\n const results = await findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n });\n\n await uploadSiloDiscoveryResults(client, plugin.id, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/data-map/data-inventory/silo-discovery/triage';\n newUrl.search = stringify({\n filters: JSON.stringify({ pluginIds: [plugin.id] }),\n });\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} potential data silos at ${scanPath}! ` +\n `View at '${newUrl.href}' ` +\n '\\n\\n NOTE: it may take 2-3 minutes for scan results to appear in the UI.',\n ),\n );\n}\n","import fastGlob from 'fast-glob';\nimport { logger } from '../../logger';\nimport { CodeScanningConfig } from './types';\n\nexport interface SiloDiscoveryRawResults {\n /** The name of the potential data silo entry */\n name: string;\n /** A unique UUID (represents the same resource across different silo discovery runs) */\n resourceId: string;\n /** Any hosts associated with the entry */\n host?: string;\n /** Type of data silo */\n type?: string | undefined;\n}\n\n/**\n * Helper to scan for data silos in all package.json files that it can find in a directory\n *\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n}: {\n /** Where to look for package.json files */\n scanPath: string;\n /** Globs to look for */\n fileGlobs: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs: string;\n /** Silo Discovery configuration */\n config: CodeScanningConfig;\n}): Promise<SiloDiscoveryRawResults[]> {\n const { ignoreDirs: IGNORE_DIRS, supportedFiles, scanFunction } = config;\n const globsToSupport =\n fileGlobs === ''\n ? supportedFiles\n : supportedFiles.concat(fileGlobs.split(','));\n const dirsToIgnore = [...ignoreDirs.split(','), ...IGNORE_DIRS].filter(\n (dir) => dir.length > 0,\n );\n try {\n const filesToScan: string[] = await fastGlob(\n `${scanPath}/**/${globsToSupport.join('|')}`,\n {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n },\n );\n logger.info(`Scanning: ${filesToScan.length} files`);\n const allPackages = filesToScan\n .map((filePath: string) => scanFunction(filePath))\n .flat();\n const allSdks = allPackages\n .map((appPackage) => appPackage.softwareDevelopmentKits || [])\n .flat();\n const uniqueDeps = new Set(allSdks.map((sdk) => sdk.name));\n const deps = [...uniqueDeps];\n logger.info(`Found: ${deps.length} unique dependencies`);\n return deps.map((dep) => ({\n name: dep,\n resourceId: `${scanPath}/**/${dep}`,\n useStrictClassifier: true,\n }));\n } catch (error) {\n throw new Error(\n `Error scanning globs ${findFilesToScan} with error: ${error}`,\n );\n }\n}\n"],"mappings":"6TACA,OAAS,aAAAA,MAAiB,eAE1B,OAAOC,MAAY,SCHnB,OAAOC,MAAc,YAsBrB,eAAsBC,EAAgB,CACpC,SAAAC,EACA,UAAAC,EACA,WAAAC,EACA,OAAAC,CACF,EASuC,CACrC,GAAM,CAAE,WAAYC,EAAa,eAAAC,EAAgB,aAAAC,CAAa,EAAIH,EAC5DI,EACJN,IAAc,GACVI,EACAA,EAAe,OAAOJ,EAAU,MAAM,GAAG,CAAC,EAC1CO,EAAe,CAAC,GAAGN,EAAW,MAAM,GAAG,EAAG,GAAGE,CAAW,EAAE,OAC7DK,GAAQA,EAAI,OAAS,CACxB,EACA,GAAI,CACF,IAAMC,EAAwB,MAAMC,EAClC,GAAGX,CAAQ,OAAOO,EAAe,KAAK,GAAG,CAAC,GAC1C,CACE,OAAQC,EAAa,IAAKC,GAAgB,GAAGT,CAAQ,OAAOS,CAAG,EAAE,EACjE,OAAQ,GACR,UAAW,EACb,CACF,EACAG,EAAO,KAAK,aAAaF,EAAY,MAAM,QAAQ,EAInD,IAAMG,EAHcH,EACjB,IAAKI,GAAqBR,EAAaQ,CAAQ,CAAC,EAChD,KAAK,EAEL,IAAKC,GAAeA,EAAW,yBAA2B,CAAC,CAAC,EAC5D,KAAK,EAEFC,EAAO,CAAC,GADK,IAAI,IAAIH,EAAQ,IAAKI,GAAQA,EAAI,IAAI,CAAC,CAC9B,EAC3B,OAAAL,EAAO,KAAK,UAAUI,EAAK,MAAM,sBAAsB,EAChDA,EAAK,IAAKE,IAAS,CACxB,KAAMA,EACN,WAAY,GAAGlB,CAAQ,OAAOkB,CAAG,GACjC,oBAAqB,EACvB,EAAE,CACJ,OAASC,EAAO,CACd,MAAM,IAAI,MACR,wBAAwBpB,CAAe,gBAAgBoB,CAAK,EAC9D,CACF,CACF,CDpDA,eAAsBC,EAEpB,CACE,SAAAC,EACA,WAAAC,EACA,KAAAC,EACA,UAAAC,EACA,WAAAC,EACA,aAAAC,CACF,EACe,CAEf,IAAMC,EAASC,EAA4BF,EAAcH,CAAI,EAEvDM,EAAS,MAAMC,EAA2BH,EAAQL,CAAU,EAE5DS,EAASC,EAAuBH,EAAO,SAAS,IAAI,EACrDE,IACHE,EAAO,MACLC,EAAO,IACL,gBAAgBL,EAAO,SAAS,IAAI,gDACtC,CACF,EACA,QAAQ,KAAK,CAAC,GAGhB,IAAMM,EAAU,MAAMC,EAAgB,CACpC,SAAAf,EACA,UAAAG,EACA,WAAAC,EACA,OAAAM,CACF,CAAC,EAED,MAAMM,EAA2BV,EAAQE,EAAO,GAAIM,CAAO,EAE3D,IAAMG,EAAS,IAAI,IAAIC,CAAU,EACjCD,EAAO,SAAW,iDAClBA,EAAO,OAASE,EAAU,CACxB,QAAS,KAAK,UAAU,CAAE,UAAW,CAACX,EAAO,EAAE,CAAE,CAAC,CACpD,CAAC,EAGDI,EAAO,KACLC,EAAO,MACL,cAAcC,EAAQ,MAAM,4BAA4Bd,CAAQ,cAClDiB,EAAO,IAAI;AAAA;AAAA,qEAE3B,CACF,CACF","names":["stringify","colors","fastGlob","findFilesToScan","scanPath","fileGlobs","ignoreDirs","config","IGNORE_DIRS","supportedFiles","scanFunction","globsToSupport","dirsToIgnore","dir","filesToScan","fastGlob","logger","allSdks","filePath","appPackage","deps","sdk","dep","error","discoverSilos","scanPath","dataSiloId","auth","fileGlobs","ignoreDirs","transcendUrl","client","buildTranscendGraphQLClient","plugin","fetchActiveSiloDiscoPlugin","config","SILO_DISCOVERY_CONFIGS","logger","colors","results","findFilesToScan","uploadSiloDiscoveryResults","newUrl","ADMIN_DASH","stringify"]}
@@ -1,2 +0,0 @@
1
- import"./chunk-7QHA6ZIV.js";import{c as s}from"./chunk-MA4JWWRO.js";import{U as r}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import{a as o}from"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import y from"colors";import{uniq as q}from"lodash-es";async function v({auth:n,transcendUrl:a,file:e,pageLimit:i,actions:m,sombraAuth:c,statuses:u,createdAtBefore:p,createdAtAfter:f,showTests:l}){let{requestsFormattedForCsv:t}=await r({transcendUrl:a,pageLimit:i,actions:m,statuses:u,auth:n,sombraAuth:c,createdAtBefore:p,createdAtAfter:f,isTest:l}),g=q(t.map(d=>Object.keys(d)).flat());s(e,t,g),o.info(y.green(`Successfully wrote ${t.length} requests to file "${e}"`))}export{v as _export};
2
- //# sourceMappingURL=impl-MEDPDKAE.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/export/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport colors from 'colors';\n\nimport { logger } from '@/logger';\nimport { uniq } from 'lodash-es';\nimport { pullPrivacyRequests } from '@/lib/requests';\nimport { writeCsv } from '@/lib/cron';\nimport type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\n\ninterface ExportCommandFlags {\n auth: string;\n sombraAuth?: string;\n actions?: RequestAction[];\n statuses?: RequestStatus[];\n transcendUrl: string;\n file: string;\n concurrency: number;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n showTests?: boolean;\n pageLimit: number;\n}\n\n// `export` is a reserved keyword, so we need to prefix it with an underscore\n// eslint-disable-next-line no-underscore-dangle\nexport async function _export(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n pageLimit,\n actions,\n sombraAuth,\n statuses,\n createdAtBefore,\n createdAtAfter,\n showTests,\n }: ExportCommandFlags,\n): Promise<void> {\n const { requestsFormattedForCsv } = await pullPrivacyRequests({\n transcendUrl,\n pageLimit,\n actions,\n statuses,\n auth,\n sombraAuth,\n createdAtBefore,\n createdAtAfter,\n isTest: showTests,\n });\n\n // Write to CSV\n const headers = uniq(\n requestsFormattedForCsv.map((d) => Object.keys(d)).flat(),\n );\n writeCsv(file, requestsFormattedForCsv, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${requestsFormattedForCsv.length} requests to file \"${file}\"`,\n ),\n );\n}\n"],"mappings":"oQACA,OAAOA,MAAY,SAGnB,OAAS,QAAAC,MAAY,YAqBrB,eAAsBC,EAEpB,CACE,KAAAC,EACA,aAAAC,EACA,KAAAC,EACA,UAAAC,EACA,QAAAC,EACA,WAAAC,EACA,SAAAC,EACA,gBAAAC,EACA,eAAAC,EACA,UAAAC,CACF,EACe,CACf,GAAM,CAAE,wBAAAC,CAAwB,EAAI,MAAMC,EAAoB,CAC5D,aAAAV,EACA,UAAAE,EACA,QAAAC,EACA,SAAAE,EACA,KAAAN,EACA,WAAAK,EACA,gBAAAE,EACA,eAAAC,EACA,OAAQC,CACV,CAAC,EAGKG,EAAUC,EACdH,EAAwB,IAAK,GAAM,OAAO,KAAK,CAAC,CAAC,EAAE,KAAK,CAC1D,EACAI,EAASZ,EAAMQ,EAAyBE,CAAO,EAC/CG,EAAO,KACLC,EAAO,MACL,sBAAsBN,EAAwB,MAAM,sBAAsBR,CAAI,GAChF,CACF,CACF","names":["colors","uniq","_export","auth","transcendUrl","file","pageLimit","actions","sombraAuth","statuses","createdAtBefore","createdAtAfter","showTests","requestsFormattedForCsv","pullPrivacyRequests","headers","uniq","writeCsv","logger","colors"]}
@@ -1,2 +0,0 @@
1
- import{L as a}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function m({auth:s,actions:o,statuses:r=[],requestIds:c,silentModeBefore:e,createdAtBefore:t,createdAtAfter:n,cancellationTitle:i,transcendUrl:u,concurrency:d}){await a({transcendUrl:u,requestActions:o,auth:s,cancellationTitle:i,requestIds:c,statuses:r,concurrency:d,silentModeBefore:e?new Date(e):void 0,createdAtBefore:t?new Date(t):void 0,createdAtAfter:n?new Date(n):void 0})}export{m as cancel};
2
- //# sourceMappingURL=impl-MLS6TI7N.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/cancel/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { cancelPrivacyRequests } from '@/lib/requests';\n\ninterface CancelCommandFlags {\n auth: string;\n actions: RequestAction[];\n statuses?: RequestStatus[];\n requestIds?: string[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n cancellationTitle: string;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function cancel(\n this: LocalContext,\n {\n auth,\n actions,\n statuses = [],\n requestIds,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n cancellationTitle,\n transcendUrl,\n concurrency,\n }: CancelCommandFlags,\n): Promise<void> {\n await cancelPrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n cancellationTitle,\n requestIds,\n statuses,\n concurrency,\n silentModeBefore: silentModeBefore ? new Date(silentModeBefore) : undefined,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n });\n}\n"],"mappings":"oLAiBA,eAAsBA,EAEpB,CACE,KAAAC,EACA,QAAAC,EACA,SAAAC,EAAW,CAAC,EACZ,WAAAC,EACA,iBAAAC,EACA,gBAAAC,EACA,eAAAC,EACA,kBAAAC,EACA,aAAAC,EACA,YAAAC,CACF,EACe,CACf,MAAMC,EAAsB,CAC1B,aAAAF,EACA,eAAgBP,EAChB,KAAAD,EACA,kBAAAO,EACA,WAAAJ,EACA,SAAAD,EACA,YAAAO,EACA,iBAAkBL,EAAmB,IAAI,KAAKA,CAAgB,EAAI,OAClE,gBAAiBC,EAAkB,IAAI,KAAKA,CAAe,EAAI,OAC/D,eAAgBC,EAAiB,IAAI,KAAKA,CAAc,EAAI,MAC9D,CAAC,CACH","names":["cancel","auth","actions","statuses","requestIds","silentModeBefore","createdAtBefore","createdAtAfter","cancellationTitle","transcendUrl","concurrency","cancelPrivacyRequests"]}
@@ -1,2 +0,0 @@
1
- import{b as p}from"./chunk-24SSWBXM.js";import"./chunk-MVDOKJ6J.js";import"./chunk-LAYHULHH.js";import"./chunk-347UQP43.js";import{ke as c,pe as m}from"./chunk-TDBKATQK.js";import{a as r}from"./chunk-43JWXG77.js";import{b as g}from"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import n from"colors";import{execSync as k}from"child_process";var l='A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository';async function S({auth:d,scanPath:o,ignoreDirs:f,repositoryName:u,transcendUrl:y}){let e=u;if(!e)try{let t=k(`cd ${o} && git config --get remote.origin.url`).toString("utf-8").trim();[e]=t.includes("https:")?t.split("/").slice(3).join("/").split("."):(t.split(":").pop()||"").split("."),e||(r.error(n.red(l)),process.exit(1))}catch(a){r.error(n.red(`${l} - Got error: ${a.message}`)),process.exit(1)}let h=m(y,d),i=await p({scanPath:o,ignoreDirs:f,repositoryName:e});await c(h,i);let s=new URL(g);s.pathname="/code-scanning/code-packages",r.info(n.green(`Scan found ${i.length} packages at ${o}! View results at '${s.href}'`))}export{S as scanPackages};
2
- //# sourceMappingURL=impl-NI7KSBSS.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/inventory/scan-packages/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '@/constants';\nimport { findCodePackagesInFolder } from '@/lib/code-scanning';\nimport { buildTranscendGraphQLClient, syncCodePackages } from '@/lib/graphql';\nimport { execSync } from 'child_process';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\ninterface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n {\n auth,\n scanPath,\n ignoreDirs,\n repositoryName,\n transcendUrl,\n }: ScanPackagesCommandFlags,\n): Promise<void> {\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(\n `cd ${scanPath} && git config --get remote.origin.url`,\n );\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` +\n `View results at '${newUrl.href}'`,\n ),\n );\n}\n"],"mappings":"qTAEA,OAAOA,MAAY,SAInB,OAAS,YAAAC,MAAgB,gBAEzB,IAAMC,EACJ,2LAYF,eAAsBC,EAEpB,CACE,KAAAC,EACA,SAAAC,EACA,WAAAC,EACA,eAAAC,EACA,aAAAC,CACF,EACe,CAEf,IAAIC,EAAoBF,EACxB,GAAI,CAACE,EACH,GAAI,CAKF,IAAMC,EAJOT,EACX,MAAMI,CAAQ,wCAChB,EAEiB,SAAS,OAAO,EAAE,KAAK,EACxC,CAACI,CAAiB,EAAKC,EAAI,SAAS,QAAQ,EAExCA,EAAI,MAAM,GAAG,EAAE,MAAM,CAAC,EAAE,KAAK,GAAG,EAAE,MAAM,GAAG,GAD1CA,EAAI,MAAM,GAAG,EAAE,IAAI,GAAK,IAAI,MAAM,GAAG,EAErCD,IACHE,EAAO,MAAMC,EAAO,IAAIV,CAAU,CAAC,EACnC,QAAQ,KAAK,CAAC,EAElB,OAASW,EAAK,CACZF,EAAO,MAAMC,EAAO,IAAI,GAAGV,CAAU,iBAAiBW,EAAI,OAAO,EAAE,CAAC,EACpE,QAAQ,KAAK,CAAC,CAChB,CAIF,IAAMC,EAASC,EAA4BP,EAAcJ,CAAI,EAGvDY,EAAU,MAAMC,EAAyB,CAC7C,SAAAZ,EACA,WAAAC,EACA,eAAgBG,CAClB,CAAC,EAGD,MAAMS,EAAiBJ,EAAQE,CAAO,EAEtC,IAAMG,EAAS,IAAI,IAAIC,CAAU,EACjCD,EAAO,SAAW,+BAGlBR,EAAO,KACLC,EAAO,MACL,cAAcI,EAAQ,MAAM,gBAAgBX,CAAQ,sBAC9Bc,EAAO,IAAI,GACnC,CACF,CACF","names":["colors","execSync","REPO_ERROR","scanPackages","auth","scanPath","ignoreDirs","repositoryName","transcendUrl","gitRepositoryName","url","logger","colors","err","client","buildTranscendGraphQLClient","results","findCodePackagesInFolder","syncCodePackages","newUrl","ADMIN_DASH"]}
@@ -1,9 +0,0 @@
1
- import{b as O}from"./chunk-MVDOKJ6J.js";import"./chunk-7QHA6ZIV.js";import{c as v}from"./chunk-MA4JWWRO.js";import{d as b}from"./chunk-XNR74SBS.js";import"./chunk-WSDWILYI.js";import"./chunk-ZTD7APNF.js";import"./chunk-LAYHULHH.js";import"./chunk-347UQP43.js";import{a as M,pe as x,sc as S}from"./chunk-TDBKATQK.js";import{a as t}from"./chunk-43JWXG77.js";import{c as y}from"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import e from"colors";import{join as w}from"path";import E,{existsSync as j,mkdirSync as T}from"fs";async function q({auth:A,start:C,end:f,folder:r,bin:l,transcendUrl:N}){let m=await O(A);E.existsSync(r)&&!E.lstatSync(r).isDirectory()&&(t.error(e.red('The provided argument "folder" was passed a file. expected: folder="./consent-metrics/"')),process.exit(1));let g=l;Object.values(S).includes(g)||(t.error(e.red(`Failed to parse argument "bin" with value "${l}"
2
- Expected one of:
3
- ${Object.values(S).join(`
4
- `)}`)),process.exit(1));let i=new Date(C),o=f?new Date(f):new Date;if(Number.isNaN(i.getTime())&&(t.error(e.red(`Start date provided is invalid date. Got --start="${C}" expected --start="01/01/2023"`)),process.exit(1)),Number.isNaN(o.getTime())&&(t.error(e.red(`End date provided is invalid date. Got --end="${f}" expected --end="01/01/2023"`)),process.exit(1)),i>o&&(t.error(e.red(`Got a start date "${i.toISOString()}" that was larger than the end date "${o.toISOString()}". Start date must be before end date.`)),process.exit(1)),j(r)||T(r),t.info(e.magenta(`Pulling consent metrics from start=${i.toString()} to end=${o.toISOString()} with bin size "${l}"`)),typeof m=="string"){try{let n=x(N,m),s=await b(n,{bin:g,start:i,end:o});Object.entries(s).forEach(([p,c])=>{c.forEach(({points:u,name:d})=>{let a=w(r,`${p}_${d}.csv`);t.info(e.magenta(`Writing configuration to file "${a}"...`)),v(a,u.map(({key:$,value:h})=>({timestamp:$,value:h})))})})}catch(n){t.error(e.red(`An error occurred syncing the schema: ${n.message}`)),process.exit(1)}t.info(e.green(`Successfully synced consent metrics to disk in folder "${r}"! View at ${y}`))}else{let n=[];await M(m,async(s,p)=>{let c=`[${p+1}/${m.length}][${s.organizationName}] `;t.info(e.magenta(`~~~
5
-
6
- ${c}Attempting to pull consent metrics...
7
-
8
- ~~~`));let u=x(N,s.apiKey);try{let d=await b(u,{bin:g,start:i,end:o}),a=w(r,s.organizationName);j(a)||T(a),Object.entries(d).forEach(([$,h])=>{h.forEach(({points:I,name:F})=>{let D=w(a,`${$}_${F}.csv`);t.info(e.magenta(`Writing configuration to file "${D}"...`)),v(D,I.map(({key:G,value:z})=>({timestamp:G,value:z})))})}),t.info(e.green(`${c}Successfully pulled configuration!`))}catch{t.error(e.red(`${c}Failed to sync configuration.`)),n.push(s.organizationName)}}),n.length>0&&(t.info(e.red(`Sync encountered errors for "${n.join(",")}". View output above for more information, or check out ${y}`)),process.exit(1))}}export{q as pullConsentMetrics};
9
- //# sourceMappingURL=impl-OM6EKANE.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/consent/pull-consent-metrics/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { mapSeries } from '@/lib/bluebird-replace';\nimport { join } from 'path';\nimport fs, { existsSync, mkdirSync } from 'fs';\nimport {\n buildTranscendGraphQLClient,\n ConsentManagerMetricBin,\n} from '@/lib/graphql';\nimport { validateTranscendAuth } from '@/lib/api-keys';\nimport { ADMIN_DASH_INTEGRATIONS } from '@/constants';\nimport { pullConsentManagerMetrics } from '@/lib/consent-manager';\nimport { writeCsv } from '@/lib/cron';\n\ninterface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n {\n auth,\n start,\n end,\n folder,\n bin,\n transcendUrl,\n }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n process.exit(1);\n }\n\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join(\n '\\n',\n )}`,\n ),\n );\n process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${err.message}`),\n );\n process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n process.exit(1);\n }\n }\n}\n"],"mappings":"ocAEA,OAAOA,MAAY,SAEnB,OAAS,QAAAC,MAAY,OACrB,OAAOC,GAAM,cAAAC,EAAY,aAAAC,MAAiB,KAmB1C,eAAsBC,EAEpB,CACE,KAAAC,EACA,MAAAC,EACA,IAAAC,EACA,OAAAC,EACA,IAAAC,EACA,aAAAC,CACF,EACe,CAEf,IAAMC,EAAe,MAAMC,EAAsBP,CAAI,EAGjDQ,EAAG,WAAWL,CAAM,GAAK,CAACK,EAAG,UAAUL,CAAM,EAAE,YAAY,IAC7DM,EAAO,MACLC,EAAO,IACL,yFACF,CACF,EACA,QAAQ,KAAK,CAAC,GAIhB,IAAMC,EAAYP,EACb,OAAO,OAAOQ,CAAuB,EAAE,SAASD,CAAS,IAC5DF,EAAO,MACLC,EAAO,IACL,8CAA8CN,CAAG;AAAA;AAAA,EACzB,OAAO,OAAOQ,CAAuB,EAAE,KAC3D;AAAA,CACF,CAAC,EACL,CACF,EACA,QAAQ,KAAK,CAAC,GAIhB,IAAMC,EAAY,IAAI,KAAKZ,CAAK,EAC1Ba,EAAUZ,EAAM,IAAI,KAAKA,CAAG,EAAI,IAAI,KAuC1C,GAtCI,OAAO,MAAMW,EAAU,QAAQ,CAAC,IAClCJ,EAAO,MACLC,EAAO,IACL,qDAAqDT,CAAK,iCAC5D,CACF,EACA,QAAQ,KAAK,CAAC,GAEZ,OAAO,MAAMa,EAAQ,QAAQ,CAAC,IAChCL,EAAO,MACLC,EAAO,IACL,iDAAiDR,CAAG,+BACtD,CACF,EACA,QAAQ,KAAK,CAAC,GAEZW,EAAYC,IACdL,EAAO,MACLC,EAAO,IACL,qBAAqBG,EAAU,YAAY,CAAC,wCAAwCC,EAAQ,YAAY,CAAC,wCAE3G,CACF,EACA,QAAQ,KAAK,CAAC,GAIXC,EAAWZ,CAAM,GACpBa,EAAUb,CAAM,EAGlBM,EAAO,KACLC,EAAO,QACL,sCAAsCG,EAAU,SAAS,CAAC,WAAWC,EAAQ,YAAY,CAAC,mBAAmBV,CAAG,GAClH,CACF,EAGI,OAAOE,GAAiB,SAAU,CACpC,GAAI,CAEF,IAAMW,EAASC,EAA4Bb,EAAcC,CAAY,EAG/Da,EAAgB,MAAMC,EAA0BH,EAAQ,CAC5D,IAAKN,EACL,MAAOE,EACP,IAAKC,CACP,CAAC,EAGD,OAAO,QAAQK,CAAa,EAAE,QAAQ,CAAC,CAACE,EAAYC,CAAO,IAAM,CAC/DA,EAAQ,QAAQ,CAAC,CAAE,OAAAC,EAAQ,KAAAC,CAAK,IAAM,CACpC,IAAMC,EAAOC,EAAKvB,EAAQ,GAAGkB,CAAU,IAAIG,CAAI,MAAM,EACrDf,EAAO,KACLC,EAAO,QAAQ,kCAAkCe,CAAI,MAAM,CAC7D,EACAE,EACEF,EACAF,EAAO,IAAI,CAAC,CAAE,IAAAK,EAAK,MAAAC,CAAM,KAAO,CAC9B,UAAWD,EACX,MAAAC,CACF,EAAE,CACJ,CACF,CAAC,CACH,CAAC,CACH,OAASC,EAAK,CACZrB,EAAO,MACLC,EAAO,IAAI,yCAAyCoB,EAAI,OAAO,EAAE,CACnE,EACA,QAAQ,KAAK,CAAC,CAChB,CAGArB,EAAO,KACLC,EAAO,MACL,0DAA0DP,CAAM,cAAc4B,CAAuB,EACvG,CACF,CACF,KAAO,CACL,IAAMC,EAA8B,CAAC,EACrC,MAAMC,EAAU3B,EAAc,MAAO4B,EAAQC,IAAQ,CACnD,IAAMC,EAAS,IAAID,EAAM,CAAC,IAAI7B,EAAa,MAAM,KAC/C4B,EAAO,gBACT,KACAzB,EAAO,KACLC,EAAO,QACL;AAAA;AAAA,EAAU0B,CAAM;AAAA;AAAA,IAClB,CACF,EAGA,IAAMnB,EAASC,EAA4Bb,EAAc6B,EAAO,MAAM,EAEtE,GAAI,CACF,IAAMf,EAAgB,MAAMC,EAA0BH,EAAQ,CAC5D,IAAKN,EACL,MAAOE,EACP,IAAKC,CACP,CAAC,EAGKuB,EAAYX,EAAKvB,EAAQ+B,EAAO,gBAAgB,EACjDnB,EAAWsB,CAAS,GACvBrB,EAAUqB,CAAS,EAIrB,OAAO,QAAQlB,CAAa,EAAE,QAAQ,CAAC,CAACE,EAAYC,CAAO,IAAM,CAC/DA,EAAQ,QAAQ,CAAC,CAAE,OAAAC,EAAQ,KAAAC,CAAK,IAAM,CACpC,IAAMC,EAAOC,EAAKW,EAAW,GAAGhB,CAAU,IAAIG,CAAI,MAAM,EACxDf,EAAO,KACLC,EAAO,QAAQ,kCAAkCe,CAAI,MAAM,CAC7D,EACAE,EACEF,EACAF,EAAO,IAAI,CAAC,CAAE,IAAAK,EAAK,MAAAC,CAAM,KAAO,CAC9B,UAAWD,EACX,MAAAC,CACF,EAAE,CACJ,CACF,CAAC,CACH,CAAC,EAEDpB,EAAO,KACLC,EAAO,MAAM,GAAG0B,CAAM,oCAAoC,CAC5D,CACF,MAAc,CACZ3B,EAAO,MAAMC,EAAO,IAAI,GAAG0B,CAAM,+BAA+B,CAAC,EACjEJ,EAAkB,KAAKE,EAAO,gBAAgB,CAChD,CACF,CAAC,EAEGF,EAAkB,OAAS,IAC7BvB,EAAO,KACLC,EAAO,IACL,gCAAgCsB,EAAkB,KAChD,GACF,CAAC,2DAA2DD,CAAuB,EACrF,CACF,EAEA,QAAQ,KAAK,CAAC,EAElB,CACF","names":["colors","join","fs","existsSync","mkdirSync","pullConsentMetrics","auth","start","end","folder","bin","transcendUrl","apiKeyOrList","validateTranscendAuth","fs","logger","colors","parsedBin","ConsentManagerMetricBin","startDate","endDate","existsSync","mkdirSync","client","buildTranscendGraphQLClient","configuration","pullConsentManagerMetrics","metricName","metrics","points","name","file","join","writeCsv","key","value","err","ADMIN_DASH_INTEGRATIONS","encounteredErrors","mapSeries","apiKey","ind","prefix","subFolder"]}
@@ -1,2 +0,0 @@
1
- import{b as n,f as r}from"./chunk-WSDWILYI.js";import{q as e}from"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function d({base64EncryptionKey:o,base64SigningKey:t,partition:s,file:i,consentUrl:a,concurrency:c}){let m=e(i,n);await r({base64EncryptionKey:o,base64SigningKey:t,preferences:m,partition:s,concurrency:c,transcendUrl:a})}export{d as uploadConsentPreferences};
2
- //# sourceMappingURL=impl-T4WDJSWZ.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/consent/upload-consent-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\n\nimport { uploadConsents } from '@/lib/consent-manager/uploadConsents';\nimport { ConsentPreferenceUpload } from '@/lib/consent-manager/types';\nimport { readCsv } from '@/lib/requests';\n\ninterface UploadConsentPreferencesCommandFlags {\n base64EncryptionKey: string;\n base64SigningKey: string;\n partition: string;\n file: string;\n consentUrl: string;\n concurrency: number;\n}\n\nexport async function uploadConsentPreferences(\n this: LocalContext,\n {\n base64EncryptionKey,\n base64SigningKey,\n partition,\n file,\n consentUrl,\n concurrency,\n }: UploadConsentPreferencesCommandFlags,\n): Promise<void> {\n // Load in preferences from csv\n const preferences = readCsv(file, ConsentPreferenceUpload);\n\n // Upload cookies\n await uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency,\n transcendUrl: consentUrl,\n });\n}\n"],"mappings":"mOAeA,eAAsBA,EAEpB,CACE,oBAAAC,EACA,iBAAAC,EACA,UAAAC,EACA,KAAAC,EACA,WAAAC,EACA,YAAAC,CACF,EACe,CAEf,IAAMC,EAAcC,EAAQJ,EAAMK,CAAuB,EAGzD,MAAMC,EAAe,CACnB,oBAAAT,EACA,iBAAAC,EACA,YAAAK,EACA,UAAAJ,EACA,YAAAG,EACA,aAAcD,CAChB,CAAC,CACH","names":["uploadConsentPreferences","base64EncryptionKey","base64SigningKey","partition","file","consentUrl","concurrency","preferences","readCsv","ConsentPreferenceUpload","uploadConsents"]}
@@ -1,2 +0,0 @@
1
- import{d as n}from"./chunk-6P4FW6XR.js";import"./chunk-MA4JWWRO.js";import"./chunk-ZTD7APNF.js";import"./chunk-TDBKATQK.js";import"./chunk-43JWXG77.js";import"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";async function c({auth:t,transcendUrl:r,file:e,enricherId:i,concurrency:s,markSilent:o,sombraAuth:a}){await n({file:e,transcendUrl:r,enricherId:i,concurrency:s,markSilent:o,auth:t,sombraAuth:a})}export{c as pushIdentifiers};
2
- //# sourceMappingURL=impl-U37YTCPW.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/commands/request/preflight/push-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '@/context';\nimport { pushManualEnrichmentIdentifiersFromCsv } from '@/lib/manual-enrichment';\n\ninterface PushIdentifiersCommandFlags {\n auth: string;\n enricherId: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n markSilent: boolean;\n concurrency: number;\n}\n\nexport async function pushIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n enricherId,\n concurrency,\n markSilent,\n sombraAuth,\n }: PushIdentifiersCommandFlags,\n): Promise<void> {\n await pushManualEnrichmentIdentifiersFromCsv({\n file,\n transcendUrl,\n enricherId,\n concurrency,\n markSilent,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"4OAaA,eAAsBA,EAEpB,CACE,KAAAC,EACA,aAAAC,EACA,KAAAC,EACA,WAAAC,EACA,YAAAC,EACA,WAAAC,EACA,WAAAC,CACF,EACe,CACf,MAAMC,EAAuC,CAC3C,KAAAL,EACA,aAAAD,EACA,WAAAE,EACA,YAAAC,EACA,WAAAC,EACA,KAAAL,EACA,WAAAM,CACF,CAAC,CACH","names":["pushIdentifiers","auth","transcendUrl","file","enricherId","concurrency","markSilent","sombraAuth","pushManualEnrichmentIdentifiersFromCsv"]}
@@ -1,12 +0,0 @@
1
- import{a as Q}from"./chunk-HH2PQ3PQ.js";import"./chunk-4GLITB3Y.js";import{n as F,p as X,q as Z}from"./chunk-ZTD7APNF.js";import"./chunk-347UQP43.js";import{Qc as H,_d as ee,a as A,b as V,pe as te,qe as re}from"./chunk-TDBKATQK.js";import{a as l}from"./chunk-43JWXG77.js";import{f as oe}from"./chunk-L5ULN3IT.js";import"./chunk-SF46ZLPT.js";import"./chunk-ARVEJERC.js";import v from"colors";import k from"colors";import{chunk as Qe}from"lodash-es";import ye from"cli-progress";import{PersistedState as Ge}from"@transcend-io/persisted-state";import{keyBy as De}from"lodash-es";import*as x from"io-ts";import de from"colors";import{PreferenceQueryResponseItem as Ue}from"@transcend-io/privacy-types";import ne from"colors";import se from"cli-progress";import{chunk as ke}from"lodash-es";import{decodeCodec as Re}from"@transcend-io/type-utils";import*as T from"io-ts";var Me=T.intersection([T.type({nodes:T.array(Ue)}),T.partial({cursor:T.string})]),Ve=["ENOTFOUND","ETIMEDOUT","504 Gateway Time-out","Task timed out after"];async function ie(m,{identifiers:n,partitionKey:h,skipLogging:d=!1}){let s=[],u=ke(n,100),i=new Date().getTime(),t=new se.SingleBar({},se.Presets.shades_classic);d||t.start(n.length,0);let c=0;await V(u,async e=>{let a=0,g=3;for(;a<g;)try{let p=await m.post(`v1/preferences/${h}/query`,{json:{filter:{identifiers:e},limit:e.length}}).json(),w=Re(Me,p);s.push(...w.nodes),c+=e.length,t.update(c);break}catch(p){a+=1;let w=p?.response?.body||p?.message||"";if(a>=g||!Ve.some(P=>w.includes(P)))throw new Error(`Received an error from server after ${a} attempts: ${w}`);l.warn(ne.yellow(`[RETRYING FAILED REQUEST - Attempt ${a}] Failed to fetch ${e.length} user preferences from partition ${h}: ${w}`))}},{concurrency:40}),t.stop();let r=new Date().getTime()-i;return d||l.info(ne.green(`Completed download in "${r/1e3}" seconds.`)),s}import{PreferenceTopicType as G}from"@transcend-io/privacy-types";import{apply as Fe}from"@transcend-io/type-utils";function D({row:m,columnToPurposeName:n,purposeSlugs:h,preferenceTopics:d}){let s={};return Object.entries(n).forEach(([u,{purpose:i,preference:t,valueMapping:c}])=>{if(!h.includes(i))throw new Error(`Invalid purpose slug: ${i}, expected: ${h.join(", ")}`);if(t){let f=d.find(g=>g.slug===t&&g.purpose.trackingType===i);if(!f){let g=d.filter(p=>p.purpose.trackingType===i).map(p=>p.slug);throw new Error(`Invalid preference slug: ${t} for purpose: ${i}. Allowed preference slugs for purpose are: ${g.join(",")}`)}s[i]||(s[i]={preferences:[]}),s[i].preferences||(s[i].preferences=[]);let r=m[u],e=c[r],a=typeof e=="string"&&e.trim()||null;switch(f.type){case G.Boolean:if(typeof e!="boolean")throw new Error(`Invalid value for boolean preference: ${t}, expected boolean, got: ${r}`);s[i].preferences.push({topic:t,choice:{booleanValue:e}});break;case G.Select:if(typeof e!="string"&&e!==null)throw new Error(`Invalid value for select preference: ${t}, expected string or null, got: ${r}`);if(a&&!f.preferenceOptionValues.map(({slug:g})=>g).includes(a))throw new Error(`Invalid value for select preference: ${t}, expected one of: ${f.preferenceOptionValues.map(({slug:g})=>g).join(", ")}, got: ${r}`);s[i].preferences.push({topic:t,choice:{selectValue:a}});break;case G.MultiSelect:if(typeof r!="string")throw new Error(`Invalid value for multi select preference: ${t}, expected string, got: ${r}`);s[i].preferences.push({topic:t,choice:{selectValues:F(r).map(g=>{let p=c[g];if(typeof p!="string")throw new Error(`Invalid value for multi select preference: ${t}, expected one of: ${f.preferenceOptionValues.map(({slug:w})=>w).join(", ")}, got: ${g}`);return p}).sort((g,p)=>g.localeCompare(p))}});break;default:throw new Error(`Unknown preference type: ${f.type}`)}}else s[i]?s[i].enabled=c[m[u]]===!0:s[i]={enabled:c[m[u]]===!0}}),Fe(s,(u,i)=>{if(typeof u.enabled!="boolean")throw new Error(`No mapping provided for purpose.enabled=true/false value: ${i}`);return{...u,enabled:u.enabled}})}import{uniq as Se,difference as Ee}from"lodash-es";import ae from"colors";import Ie from"inquirer";var q="[NONE]";async function pe(m,n){let h=Se(m.map(s=>Object.keys(s)).flat()),d=Ee(h,[...n.identifierColumn?[n.identifierColumn]:[],...Object.keys(n.columnToPurposeName)]);if(!n.timestampColum){let{timestampName:s}=await Ie.prompt([{name:"timestampName",message:"Choose the column that will be used as the timestamp of last preference update",type:"list",default:d.find(u=>u.toLowerCase().includes("date"))||d.find(u=>u.toLowerCase().includes("time"))||d[0],choices:[...d,q]}]);n.timestampColum=s}if(l.info(ae.magenta(`Using timestamp column "${n.timestampColum}"`)),n.timestampColum!==q){let s=m.map((u,i)=>u[n.timestampColum]?null:[i]).filter(u=>!!u).flat();if(s.length>0)throw new Error(`The timestamp column "${n.timestampColum}" is missing a value for the following rows: ${s.join(`
2
- `)}`);l.info(ae.magenta(`The timestamp column "${n.timestampColum}" is present for all row`))}return n}import{uniq as Ne,groupBy as Oe,difference as xe}from"lodash-es";import N from"colors";import je from"inquirer";async function le(m,n){let h=Ne(m.map(t=>Object.keys(t)).flat()),d=xe(h,[...n.identifierColumn?[n.identifierColumn]:[],...Object.keys(n.columnToPurposeName)]);if(!n.identifierColumn){let{identifierName:t}=await je.prompt([{name:"identifierName",message:"Choose the column that will be used as the identifier to upload consent preferences by",type:"list",default:d.find(c=>c.toLowerCase().includes("email"))||d[0],choices:d}]);n.identifierColumn=t}l.info(N.magenta(`Using identifier column "${n.identifierColumn}"`));let s=m.map((t,c)=>t[n.identifierColumn]?null:[c]).filter(t=>!!t).flat();if(s.length>0){let t=`The identifier column "${n.identifierColumn}" is missing a value for the following rows: ${s.join(", ")}`;if(l.warn(N.yellow(t)),!await Q({message:"Would you like to skip rows missing an identifier?"}))throw new Error(t);let f=m.length;m=m.filter(r=>r[n.identifierColumn]),l.info(N.yellow(`Skipped ${f-m.length} rows missing an identifier`))}l.info(N.magenta(`The identifier column "${n.identifierColumn}" is present for all rows`));let u=Oe(m,n.identifierColumn),i=Object.entries(u).filter(([,t])=>t.length>1);if(i.length>0){let t=`The identifier column "${n.identifierColumn}" has duplicate values for the following rows: ${i.slice(0,10).map(([f,r])=>`${f} (${r.length})`).join(`
3
- `)}`;if(l.warn(N.yellow(t)),!await Q({message:"Would you like to automatically take the latest update?"}))throw new Error(t);m=Object.entries(u).map(([,f])=>f.sort((e,a)=>new Date(a[n.timestampColum]).getTime()-new Date(e[n.timestampColum]).getTime())[0]).filter(f=>f)}return{currentState:n,preferences:m}}import{uniq as ce,difference as Ae}from"lodash-es";import W from"colors";import O from"inquirer";import{PreferenceTopicType as _}from"@transcend-io/privacy-types";async function fe(m,n,{purposeSlugs:h,preferenceTopics:d,forceTriggerWorkflows:s}){let u=ce(m.map(c=>Object.keys(c)).flat()),i=Ae(u,[...n.identifierColumn?[n.identifierColumn]:[],...n.timestampColum?[n.timestampColum]:[]]);if(i.length===0){if(s)return n;throw new Error("No other columns to process")}let t=[...h,...d.map(c=>`${c.purpose.trackingType}->${c.slug}`)];return await A(i,async c=>{let f=ce(m.map(e=>e[c])),r=n.columnToPurposeName[c];if(r)l.info(W.magenta(`Column "${c}" is associated with purpose "${r.purpose}"`));else{let{purposeName:e}=await O.prompt([{name:"purposeName",message:`Choose the purpose that column ${c} is associated with`,type:"list",default:t.find(p=>p.startsWith(h[0])),choices:t}]),[a,g]=e.split("->");r={purpose:a,preference:g||null,valueMapping:{}}}await A(f,async e=>{if(r.valueMapping[e]!==void 0){l.info(W.magenta(`Value "${e}" is associated with purpose value "${r.valueMapping[e]}"`));return}if(r.preference===null){let{purposeValue:a}=await O.prompt([{name:"purposeValue",message:`Choose the purpose value for value "${e}" associated with purpose "${r.purpose}"`,type:"confirm",default:e!=="false"}]);r.valueMapping[e]=a}if(r.preference!==null){let a=d.find(p=>p.slug===r.preference);if(!a){l.error(W.red(`Preference topic "${r.preference}" not found`));return}let g=a.preferenceOptionValues.map(({slug:p})=>p);if(a.type===_.Boolean){let{preferenceValue:p}=await O.prompt([{name:"preferenceValue",message:`Choose the preference value for "${a.slug}" value "${e}" associated with purpose "${r.purpose}"`,type:"confirm",default:e!=="false"}]);r.valueMapping[e]=p;return}if(a.type===_.Select){let{preferenceValue:p}=await O.prompt([{name:"preferenceValue",message:`Choose the preference value for "${a.slug}" value "${e}" associated with purpose "${r.purpose}"`,type:"list",choices:g,default:g.find(w=>w===e)}]);r.valueMapping[e]=p;return}if(a.type===_.MultiSelect){let p=F(e);await A(p,async w=>{if(r.valueMapping[w]!==void 0)return;let{preferenceValue:P}=await O.prompt([{name:"preferenceValue",message:`Choose the preference value for "${a.slug}" value "${w}" associated with purpose "${r.purpose}"`,type:"list",choices:g,default:g.find(y=>y===w)}]);r.valueMapping[w]=P});return}throw new Error(`Unknown preference topic type: ${a.type}`)}}),n.columnToPurposeName[c]=r}),n}import{PreferenceTopicType as K}from"@transcend-io/privacy-types";function me({currentConsentRecord:m,pendingUpdates:n,preferenceTopics:h}){return Object.entries(n).every(([d,{preferences:s=[],enabled:u}])=>{let i=m.purposes.find(c=>c.purpose===d);return!!i&&i.enabled===u?s.every(({topic:c,choice:f})=>i.preferences&&i.preferences.find(r=>{if(r.topic!==c)return!1;let e=h.find(a=>a.slug===c&&a.purpose.trackingType===d);if(!e)throw new Error(`Could not find preference topic for ${c}`);switch(e.type){case K.Boolean:return r.choice.booleanValue===f.booleanValue;case K.Select:return r.choice.selectValue===f.selectValue;case K.MultiSelect:let a=(r.choice.selectValues||[]).sort(),g=(f.selectValues||[]).sort();return a.length===g.length&&a.every((p,w)=>p===g[w]);default:throw new Error(`Unknown preference topic type: ${e.type}`)}})):!1})}import{PreferenceTopicType as Y}from"@transcend-io/privacy-types";function ue({currentConsentRecord:m,pendingUpdates:n,preferenceTopics:h}){return!!Object.entries(n).find(([d,{preferences:s=[],enabled:u}])=>{let i=m.purposes.find(t=>t.purpose===d);return i?i.enabled!==u?!0:!!s.find(({topic:t,choice:c})=>{let f=(i.preferences||[]).find(e=>e.topic===t);if(!f)return!1;let r=h.find(e=>e.slug===t&&e.purpose.trackingType===d);if(!r)throw new Error(`Could not find preference topic for ${t}`);switch(r.type){case Y.Boolean:return f.choice.booleanValue!==c.booleanValue;case Y.Select:return f.choice.selectValue!==c.selectValue;case Y.MultiSelect:let e=(f.choice.selectValues||[]).sort(),a=(c.selectValues||[]).sort();return e.length!==a.length||!e.every((g,p)=>g===a[p]);default:throw new Error(`Unknown preference topic type: ${r.type}`)}}):!1})}async function ge({file:m,sombra:n,purposeSlugs:h,preferenceTopics:d,partitionKey:s,skipExistingRecordCheck:u,forceTriggerWorkflows:i},t){let c=new Date().getTime(),f=t.getValue("fileMetadata");l.info(de.magenta(`Reading in file: "${m}"`));let r=Z(m,x.record(x.string,x.string)),e={columnToPurposeName:{},pendingSafeUpdates:{},pendingConflictUpdates:{},skippedUpdates:{},...f[m]||{},lastFetchedAt:new Date().toISOString()};e=await pe(r,e),f[m]=e,await t.setValue(f,"fileMetadata");let a=await le(r,e);e=a.currentState,r=a.preferences,f[m]=e,await t.setValue(f,"fileMetadata"),e=await fe(r,e,{preferenceTopics:d,purposeSlugs:h,forceTriggerWorkflows:i}),f[m]=e,await t.setValue(f,"fileMetadata");let g=r.map(y=>y[e.identifierColumn]),p=u?[]:await ie(n,{identifiers:g.map(y=>({value:y})),partitionKey:s}),w=De(p,"userId");e.pendingConflictUpdates={},e.pendingSafeUpdates={},e.skippedUpdates={},r.forEach(y=>{let C=y[e.identifierColumn],S=D({row:y,columnToPurposeName:e.columnToPurposeName,preferenceTopics:d,purposeSlugs:h}),U=w[C];if(i&&!U)throw new Error(`No existing consent record found for user with id: ${C}.
4
- When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(U&&me({currentConsentRecord:U,pendingUpdates:S,preferenceTopics:d})&&!i){e.skippedUpdates[C]=y;return}if(U&&ue({currentConsentRecord:U,pendingUpdates:S,preferenceTopics:d})){e.pendingConflictUpdates[C]={row:y,record:U};return}e.pendingSafeUpdates[C]=y}),f[m]=e,await t.setValue(f,"fileMetadata");let P=new Date().getTime();l.info(de.green(`Successfully pre-processed file: "${m}" in ${(P-c)/1e3}s`))}import{PreferenceQueryResponseItem as qe,PreferenceUpdateItem as we}from"@transcend-io/privacy-types";import*as o from"io-ts";var Be=o.type({purpose:o.string,preference:o.union([o.string,o.null]),valueMapping:o.record(o.string,o.union([o.string,o.boolean,o.null]))}),Le=o.intersection([o.type({columnToPurposeName:o.record(o.string,Be),lastFetchedAt:o.string,pendingSafeUpdates:o.record(o.string,o.record(o.string,o.string)),pendingConflictUpdates:o.record(o.string,o.type({record:qe,row:o.record(o.string,o.string)})),skippedUpdates:o.record(o.string,o.record(o.string,o.string))}),o.partial({identifierColumn:o.string,timestampColum:o.string})]),he=o.type({fileMetadata:o.record(o.string,Le),failingUpdates:o.record(o.string,o.type({uploadedAt:o.string,error:o.string,update:we})),pendingUpdates:o.record(o.string,we)});import{apply as We}from"@transcend-io/type-utils";async function Pe({auth:m,sombraAuth:n,receiptFilepath:h,file:d,partition:s,isSilent:u=!0,dryRun:i=!1,skipWorkflowTriggers:t=!1,skipConflictUpdates:c=!1,skipExistingRecordCheck:f=!1,attributes:r=[],transcendUrl:e=oe,forceTriggerWorkflows:a=!1}){let g=X(r),p=new Ge(h,he,{fileMetadata:{},failingUpdates:{},pendingUpdates:{}}),w=p.getValue("failingUpdates"),P=p.getValue("pendingUpdates"),y=p.getValue("fileMetadata");l.info(k.magenta(`Restored cache, there are:
5
- ${Object.values(w).length} failing requests to be retried
6
- ${Object.values(P).length} pending requests to be processed
7
- The following files are stored in cache and will be used:
8
- ${Object.keys(y).map(b=>b).join(`
9
- `)}
10
- The following file will be processed: ${d}
11
- `));let C=te(e,m),[S,U,J]=await Promise.all([re(e,m,n),a?Promise.resolve([]):H(C),a?Promise.resolve([]):ee(C)]);await ge({file:d,purposeSlugs:U.map(b=>b.trackingType),preferenceTopics:J,sombra:S,partitionKey:s,skipExistingRecordCheck:f,forceTriggerWorkflows:a},p);let E={};y=p.getValue("fileMetadata");let R=y[d];if(l.info(k.magenta(`Found ${Object.entries(R.pendingSafeUpdates).length} safe updates in ${d}`)),l.info(k.magenta(`Found ${Object.entries(R.pendingConflictUpdates).length} conflict updates in ${d}`)),l.info(k.magenta(`Found ${Object.entries(R.skippedUpdates).length} skipped updates in ${d}`)),Object.entries({...R.pendingSafeUpdates,...c?{}:We(R.pendingConflictUpdates,({row:b})=>b)}).forEach(([b,$])=>{let j=R.timestampColum===q?new Date:new Date($[R.timestampColum]),M=D({row:$,columnToPurposeName:R.columnToPurposeName,preferenceTopics:J,purposeSlugs:U.map(I=>I.trackingType)});E[b]={userId:b,partition:s,timestamp:j.toISOString(),purposes:Object.entries(M).map(([I,ve])=>({...ve,purpose:I,workflowSettings:{attributes:g,isSilent:u,skipWorkflowTrigger:t}}))}}),await p.setValue(E,"pendingUpdates"),await p.setValue({},"failingUpdates"),i){l.info(k.green(`Dry run complete, exiting. ${Object.values(E).length} pending updates. Check file: ${h}`));return}l.info(k.magenta(`Uploading ${Object.values(E).length} preferences to partition: ${s}`));let Ce=new Date().getTime(),B=new ye.SingleBar({},ye.Presets.shades_classic),z=0,L=Object.entries(E),$e=Qe(L,t?100:10);B.start(L.length,0),await V($e,async b=>{try{await S.put("v1/preferences",{json:{records:b.map(([,$])=>$),skipWorkflowTriggers:t,forceTriggerWorkflows:a}}).json()}catch($){try{let M=JSON.parse($?.response?.body||"{}");M.error&&l.error(k.red(`Error: ${M.error}`))}catch{}l.error(k.red(`Failed to upload ${b.length} user preferences to partition ${s}: ${$?.response?.body||$?.message}`));let j=p.getValue("failingUpdates");b.forEach(([M,I])=>{j[M]={uploadedAt:new Date().toISOString(),update:I,error:$?.response?.body||$?.message||"Unknown error"}}),await p.setValue(j,"failingUpdates")}z+=b.length,B.update(z)},{concurrency:40}),B.stop();let Te=new Date().getTime()-Ce;l.info(k.green(`Successfully uploaded ${L.length} user preferences to partition ${s} in "${Te/1e3}" seconds!`))}import{readdirSync as _e}from"fs";import{basename as Ke,join as be}from"path";async function Sr({auth:m,partition:n,sombraAuth:h,consentUrl:d,file:s="",directory:u,dryRun:i,skipExistingRecordCheck:t,receiptFileDir:c,skipWorkflowTriggers:f,forceTriggerWorkflows:r,skipConflictUpdates:e,isSilent:a,attributes:g,concurrency:p}){u&&s&&(l.error(v.red("Cannot provide both a directory and a file. Please provide only one.")),process.exit(1)),!s&&!u&&(l.error(v.red("A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences")),process.exit(1));let w=[];if(u)try{let y=_e(u).filter(C=>C.endsWith(".csv"));y.length===0&&(l.error(v.red(`No CSV files found in directory: ${u}`)),process.exit(1)),w.push(...y.map(C=>be(u,C)))}catch(P){l.error(v.red(`Failed to read directory: ${u}`)),l.error(v.red(P.message)),process.exit(1)}else try{s.endsWith(".csv")||(l.error(v.red("File must be a CSV file")),process.exit(1)),w.push(s)}catch(P){l.error(v.red(`Failed to access file: ${s}`)),l.error(v.red(P.message)),process.exit(1)}l.info(v.green(`Processing ${w.length} consent preferences files for partition: ${n}`)),l.debug(`Files to process: ${w.join(", ")}`),t&&l.info(v.bgYellow(`Skipping existing record check: ${t}`)),await V(w,async P=>{let y=Ke(P).replace(".csv","");await Pe({receiptFilepath:be(c,`${y}-receipts.json`),auth:m,sombraAuth:h,file:P,partition:n,transcendUrl:d,skipConflictUpdates:e,skipWorkflowTriggers:f,skipExistingRecordCheck:t,isSilent:a,dryRun:i,attributes:F(g),forceTriggerWorkflows:r})},{concurrency:p})}export{Sr as uploadPreferences};
12
- //# sourceMappingURL=impl-U5555HGJ.js.map