@transcend-io/cli 8.25.0 → 8.25.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (147) hide show
  1. package/dist/bin/bash-complete.cjs +1 -1
  2. package/dist/bin/cli.cjs +1 -1
  3. package/dist/bin/deprecated-command.cjs +2 -2
  4. package/dist/{chunk-2Q7ADV5Y.cjs → chunk-4AIAQD56.cjs} +2 -2
  5. package/dist/{chunk-2Q7ADV5Y.cjs.map → chunk-4AIAQD56.cjs.map} +1 -1
  6. package/dist/{chunk-MOWFESXN.cjs → chunk-4MXXWDM2.cjs} +2 -2
  7. package/dist/{chunk-MOWFESXN.cjs.map → chunk-4MXXWDM2.cjs.map} +1 -1
  8. package/dist/{chunk-NI434OII.cjs → chunk-6WIX75ZF.cjs} +2 -2
  9. package/dist/{chunk-NI434OII.cjs.map → chunk-6WIX75ZF.cjs.map} +1 -1
  10. package/dist/{chunk-COKHIMLO.cjs → chunk-7UB5BB7K.cjs} +2 -2
  11. package/dist/{chunk-COKHIMLO.cjs.map → chunk-7UB5BB7K.cjs.map} +1 -1
  12. package/dist/chunk-CD32SMHC.cjs +3 -0
  13. package/dist/chunk-CD32SMHC.cjs.map +1 -0
  14. package/dist/{chunk-7ZGJ4MJF.cjs → chunk-FWTJQA4M.cjs} +2 -2
  15. package/dist/{chunk-7ZGJ4MJF.cjs.map → chunk-FWTJQA4M.cjs.map} +1 -1
  16. package/dist/{chunk-5PTFTN6J.cjs → chunk-GC3HFDP4.cjs} +163 -159
  17. package/dist/chunk-GC3HFDP4.cjs.map +1 -0
  18. package/dist/{chunk-BGZ6TA6C.cjs → chunk-JQY2DY4S.cjs} +2 -2
  19. package/dist/{chunk-BGZ6TA6C.cjs.map → chunk-JQY2DY4S.cjs.map} +1 -1
  20. package/dist/{chunk-WWJHUHSQ.cjs → chunk-LCL7BED2.cjs} +6 -6
  21. package/dist/{chunk-WWJHUHSQ.cjs.map → chunk-LCL7BED2.cjs.map} +1 -1
  22. package/dist/{chunk-F3BRFYKD.cjs → chunk-M2ARSCLF.cjs} +2 -2
  23. package/dist/{chunk-F3BRFYKD.cjs.map → chunk-M2ARSCLF.cjs.map} +1 -1
  24. package/dist/{chunk-KYVDLURL.cjs → chunk-OM2P5WDQ.cjs} +4 -4
  25. package/dist/{chunk-KYVDLURL.cjs.map → chunk-OM2P5WDQ.cjs.map} +1 -1
  26. package/dist/{chunk-MPMKY5NF.cjs → chunk-SY2KZA6N.cjs} +21 -21
  27. package/dist/{chunk-MPMKY5NF.cjs.map → chunk-SY2KZA6N.cjs.map} +1 -1
  28. package/dist/{chunk-HRNCQV6G.cjs → chunk-UO63E354.cjs} +4 -4
  29. package/dist/{chunk-HRNCQV6G.cjs.map → chunk-UO63E354.cjs.map} +1 -1
  30. package/dist/{chunk-RGBVP433.cjs → chunk-XMRABG76.cjs} +2 -2
  31. package/dist/{chunk-RGBVP433.cjs.map → chunk-XMRABG76.cjs.map} +1 -1
  32. package/dist/{impl-6S3K72GP.cjs → impl-27RLTNE6.cjs} +2 -2
  33. package/dist/{impl-6S3K72GP.cjs.map → impl-27RLTNE6.cjs.map} +1 -1
  34. package/dist/impl-3QCQ57NL.cjs +2 -0
  35. package/dist/impl-3QCQ57NL.cjs.map +1 -0
  36. package/dist/{impl-YIYW7STX.cjs → impl-4SDP76RG.cjs} +2 -2
  37. package/dist/{impl-YIYW7STX.cjs.map → impl-4SDP76RG.cjs.map} +1 -1
  38. package/dist/{impl-KPZSEHQG.cjs → impl-5JGDZKKD.cjs} +2 -2
  39. package/dist/{impl-KPZSEHQG.cjs.map → impl-5JGDZKKD.cjs.map} +1 -1
  40. package/dist/{impl-GIPZ5EW5.cjs → impl-6WYAJQUE.cjs} +2 -2
  41. package/dist/{impl-GIPZ5EW5.cjs.map → impl-6WYAJQUE.cjs.map} +1 -1
  42. package/dist/{impl-EQ4EH4QJ.cjs → impl-77XWT55M.cjs} +2 -2
  43. package/dist/{impl-EQ4EH4QJ.cjs.map → impl-77XWT55M.cjs.map} +1 -1
  44. package/dist/impl-7WMPLPUZ.cjs +9 -0
  45. package/dist/impl-7WMPLPUZ.cjs.map +1 -0
  46. package/dist/impl-AXXSN2FC.cjs +2 -0
  47. package/dist/impl-AXXSN2FC.cjs.map +1 -0
  48. package/dist/{impl-HCY5MWEP.cjs → impl-C43DSDWW.cjs} +2 -2
  49. package/dist/{impl-HCY5MWEP.cjs.map → impl-C43DSDWW.cjs.map} +1 -1
  50. package/dist/{impl-AI276JNQ.cjs → impl-CWKT2AM2.cjs} +2 -2
  51. package/dist/{impl-AI276JNQ.cjs.map → impl-CWKT2AM2.cjs.map} +1 -1
  52. package/dist/impl-ELSZK5ML.cjs +2 -0
  53. package/dist/impl-ELSZK5ML.cjs.map +1 -0
  54. package/dist/{impl-FD5CDY7Y.cjs → impl-EVOTRAAK.cjs} +2 -2
  55. package/dist/{impl-FD5CDY7Y.cjs.map → impl-EVOTRAAK.cjs.map} +1 -1
  56. package/dist/{impl-VAQF5Y6W.cjs → impl-F2KNXXMR.cjs} +2 -2
  57. package/dist/{impl-VAQF5Y6W.cjs.map → impl-F2KNXXMR.cjs.map} +1 -1
  58. package/dist/{impl-LGUMGRRA.cjs → impl-FGPCTV3U.cjs} +2 -2
  59. package/dist/{impl-LGUMGRRA.cjs.map → impl-FGPCTV3U.cjs.map} +1 -1
  60. package/dist/{impl-MH7OR57S.cjs → impl-GA66PSNM.cjs} +2 -2
  61. package/dist/{impl-MH7OR57S.cjs.map → impl-GA66PSNM.cjs.map} +1 -1
  62. package/dist/impl-GHJXDRQX.cjs +2 -0
  63. package/dist/impl-GHJXDRQX.cjs.map +1 -0
  64. package/dist/impl-GNNLPQPB.cjs +2 -0
  65. package/dist/impl-GNNLPQPB.cjs.map +1 -0
  66. package/dist/{impl-7WCN5RAN.cjs → impl-HAWCNCIE.cjs} +2 -2
  67. package/dist/{impl-7WCN5RAN.cjs.map → impl-HAWCNCIE.cjs.map} +1 -1
  68. package/dist/{impl-K5LDQF7V.cjs → impl-KLLNTGD6.cjs} +2 -2
  69. package/dist/{impl-K5LDQF7V.cjs.map → impl-KLLNTGD6.cjs.map} +1 -1
  70. package/dist/{impl-7ZGTTYG5.cjs → impl-LLYATZKE.cjs} +2 -2
  71. package/dist/{impl-7ZGTTYG5.cjs.map → impl-LLYATZKE.cjs.map} +1 -1
  72. package/dist/{impl-AQY6F4SC.cjs → impl-MB2TL32H.cjs} +2 -2
  73. package/dist/{impl-AQY6F4SC.cjs.map → impl-MB2TL32H.cjs.map} +1 -1
  74. package/dist/{impl-G6WAGNDN.cjs → impl-N5UML4V3.cjs} +2 -2
  75. package/dist/{impl-G6WAGNDN.cjs.map → impl-N5UML4V3.cjs.map} +1 -1
  76. package/dist/{impl-SYXFCDZC.cjs → impl-OA7FQTRX.cjs} +4 -4
  77. package/dist/{impl-SYXFCDZC.cjs.map → impl-OA7FQTRX.cjs.map} +1 -1
  78. package/dist/impl-P4S3OGOU.cjs +2 -0
  79. package/dist/impl-P4S3OGOU.cjs.map +1 -0
  80. package/dist/impl-Q3D7NUNV.cjs +2 -0
  81. package/dist/{impl-PZ3JDEQN.cjs.map → impl-Q3D7NUNV.cjs.map} +1 -1
  82. package/dist/impl-QFXCNJYB.cjs +2 -0
  83. package/dist/impl-QFXCNJYB.cjs.map +1 -0
  84. package/dist/{impl-JREQ3SEN.cjs → impl-QSLPHFXK.cjs} +2 -2
  85. package/dist/{impl-JREQ3SEN.cjs.map → impl-QSLPHFXK.cjs.map} +1 -1
  86. package/dist/impl-R3CFYSPV.cjs +2 -0
  87. package/dist/impl-R3CFYSPV.cjs.map +1 -0
  88. package/dist/{impl-TFAARXUG.cjs → impl-RID2MAD7.cjs} +2 -2
  89. package/dist/{impl-TFAARXUG.cjs.map → impl-RID2MAD7.cjs.map} +1 -1
  90. package/dist/{impl-VGOV6N6A.cjs → impl-SHH4WC7J.cjs} +2 -2
  91. package/dist/{impl-VGOV6N6A.cjs.map → impl-SHH4WC7J.cjs.map} +1 -1
  92. package/dist/{impl-3VTN6SPE.cjs → impl-SL744OKN.cjs} +3 -3
  93. package/dist/{impl-3VTN6SPE.cjs.map → impl-SL744OKN.cjs.map} +1 -1
  94. package/dist/{impl-QSTK5NYY.cjs → impl-SLYDJIQF.cjs} +3 -3
  95. package/dist/{impl-QSTK5NYY.cjs.map → impl-SLYDJIQF.cjs.map} +1 -1
  96. package/dist/impl-UCJNP22O.cjs +2 -0
  97. package/dist/impl-UCJNP22O.cjs.map +1 -0
  98. package/dist/{impl-OIXVXSLU.cjs → impl-URWHAZIO.cjs} +2 -2
  99. package/dist/{impl-OIXVXSLU.cjs.map → impl-URWHAZIO.cjs.map} +1 -1
  100. package/dist/impl-UVBO46MQ.cjs +2 -0
  101. package/dist/impl-UVBO46MQ.cjs.map +1 -0
  102. package/dist/{impl-FJ4XWHLE.cjs → impl-VEU4X4HE.cjs} +2 -2
  103. package/dist/{impl-FJ4XWHLE.cjs.map → impl-VEU4X4HE.cjs.map} +1 -1
  104. package/dist/impl-WAR6J2SL.cjs +2 -0
  105. package/dist/impl-WAR6J2SL.cjs.map +1 -0
  106. package/dist/{impl-OXOU2A27.cjs → impl-WS774CEP.cjs} +2 -2
  107. package/dist/{impl-OXOU2A27.cjs.map → impl-WS774CEP.cjs.map} +1 -1
  108. package/dist/{impl-CP2GLGVZ.cjs → impl-XAXZ5R2V.cjs} +2 -2
  109. package/dist/{impl-CP2GLGVZ.cjs.map → impl-XAXZ5R2V.cjs.map} +1 -1
  110. package/dist/{impl-CLA7WK6U.cjs → impl-XFY3O27V.cjs} +2 -2
  111. package/dist/{impl-CLA7WK6U.cjs.map → impl-XFY3O27V.cjs.map} +1 -1
  112. package/dist/{impl-HEPMD6RG.cjs → impl-ZCXWTUMJ.cjs} +2 -2
  113. package/dist/{impl-HEPMD6RG.cjs.map → impl-ZCXWTUMJ.cjs.map} +1 -1
  114. package/dist/index.cjs +3 -3
  115. package/dist/index.cjs.map +1 -1
  116. package/dist/index.d.cts +47 -48
  117. package/package.json +1 -1
  118. package/dist/chunk-5PTFTN6J.cjs.map +0 -1
  119. package/dist/chunk-GG7P2RO6.cjs +0 -3
  120. package/dist/chunk-GG7P2RO6.cjs.map +0 -1
  121. package/dist/chunk-LR3CPNDM.cjs +0 -6
  122. package/dist/chunk-LR3CPNDM.cjs.map +0 -1
  123. package/dist/impl-32ZRDOCN.cjs +0 -9
  124. package/dist/impl-32ZRDOCN.cjs.map +0 -1
  125. package/dist/impl-4YI4ERPI.cjs +0 -2
  126. package/dist/impl-4YI4ERPI.cjs.map +0 -1
  127. package/dist/impl-5H724WN5.cjs +0 -2
  128. package/dist/impl-5H724WN5.cjs.map +0 -1
  129. package/dist/impl-7G2TDJGL.cjs +0 -2
  130. package/dist/impl-7G2TDJGL.cjs.map +0 -1
  131. package/dist/impl-CRQ4DYII.cjs +0 -2
  132. package/dist/impl-CRQ4DYII.cjs.map +0 -1
  133. package/dist/impl-LI3SHHAQ.cjs +0 -2
  134. package/dist/impl-LI3SHHAQ.cjs.map +0 -1
  135. package/dist/impl-PZ3JDEQN.cjs +0 -2
  136. package/dist/impl-QZHQCQ3B.cjs +0 -2
  137. package/dist/impl-QZHQCQ3B.cjs.map +0 -1
  138. package/dist/impl-TCN6BRVK.cjs +0 -2
  139. package/dist/impl-TCN6BRVK.cjs.map +0 -1
  140. package/dist/impl-VGJPIZZY.cjs +0 -2
  141. package/dist/impl-VGJPIZZY.cjs.map +0 -1
  142. package/dist/impl-X6Z7MKVJ.cjs +0 -2
  143. package/dist/impl-X6Z7MKVJ.cjs.map +0 -1
  144. package/dist/impl-ZGACCLWF.cjs +0 -2
  145. package/dist/impl-ZGACCLWF.cjs.map +0 -1
  146. package/dist/impl-ZV4VREIU.cjs +0 -2
  147. package/dist/impl-ZV4VREIU.cjs.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-WWJHUHSQ.cjs","../src/lib/preference-management/uploadPreferenceManagementPreferencesInteractive.ts","../src/lib/preference-management/parsePreferenceManagementCsv.ts","../src/lib/preference-management/getPreferencesForIdentifiers.ts","../src/lib/preference-management/withPreferenceQueryRetry.ts","../src/lib/preference-management/parsePreferenceIdentifiersFromCsv.ts","../src/lib/preference-management/parsePreferenceAndPurposeValuesFromCsv.ts","../src/lib/preference-management/codecs.ts"],"names":["RETRY_PREFERENCE_MSGS","withPreferenceQueryRetry","fn","maxAttempts","baseDelayMs","isRetryable","_err","msg","m","onRetry","attempt","err","preferences","value"],"mappings":"AAAA,2lCAAgH,wDAAyC,wDAAuE,gFCQ7M,oCACC,qGAGI,+DAEO,qJCXZ,qDCES,ICGfA,EAAAA,CAAkC,CAC7C,WAAA,CACA,YAAA,CACA,WAAA,CACA,sBAAA,CACA,sBACF,CAAA,CAwBA,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,CAACC,CAAAA,CAAMC,CAAAA,CAAAA,EACnBP,EAAAA,CAAsB,IAAA,CAAMQ,CAAAA,EAAMD,CAAAA,CAAI,QAAA,CAASC,CAAC,CAAC,CAAA,CACnD,OAAA,CAAAC,CACF,CAAA,CAAkB,CAAC,CAAA,CACP,CACZ,IAAIC,CAAAA,CAAU,CAAA,CAEd,GAAA,CAAA,CAAA,CAAA,CAAa,CACXA,CAAAA,EAAW,CAAA,CACX,GAAI,CACF,OAAO,MAAMR,CAAAA,CAAG,CAElB,CAAA,KAAA,CAASS,CAAAA,CAAU,CACjB,IAAMJ,CAAAA,kBAAAA,CACHI,CAAAA,EAAAA,iBAAQA,CAAAA,qBAAI,QAAA,6BAAU,MAAA,EAAQA,CAAAA,CAAI,OAAA,CAAA,CAAA,SACnC,MAAA,kBAAOA,CAAAA,SAAO,iBAAe,GAAA,CAE/B,EAAA,CAAI,CAAA,CADcD,CAAAA,CAAUP,CAAAA,EAAeE,CAAAA,CAAYM,CAAAA,CAAKJ,CAAG,CAAA,CAAA,CAE7D,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiCG,CAAO,CAAA,aAAA,EAAgBH,CAAG,CAAA,CAAA;ACwBjEK;ACfU;ACuBV,oGAAA;ANJI;AAGA;AAAA;AAKQ;AAAK;AACgC,sCAAA;AAqF1CC","file":"/home/runner/work/cli/cli/dist/chunk-WWJHUHSQ.cjs","sourcesContent":[null,"import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllPurposes,\n fetchAllPreferenceTopics,\n PreferenceTopic,\n Purpose,\n} from '../graphql';\nimport colors from 'colors';\nimport { map } from 'bluebird';\nimport { chunk } from 'lodash-es';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { parseAttributesFromString } from '../requests';\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport { parsePreferenceManagementCsvWithCache } from './parsePreferenceManagementCsv';\nimport { PreferenceState } from './codecs';\nimport { PreferenceUpdateItem } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\nimport { NONE_PREFERENCE_MAP } from './parsePreferenceTimestampsFromCsv';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadPreferenceManagementPreferencesInteractive({\n auth,\n sombraAuth,\n receiptFilepath,\n file,\n partition,\n isSilent = true,\n dryRun = false,\n skipWorkflowTriggers = false,\n skipConflictUpdates = false,\n skipExistingRecordCheck = false,\n attributes = [],\n transcendUrl,\n forceTriggerWorkflows = false,\n}: {\n /** The Transcend API key */\n auth: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Partition key */\n partition: string;\n /** File where to store receipt and continue from where left off */\n receiptFilepath: string;\n /** The file to process */\n file: string;\n /** API URL for Transcend backend */\n transcendUrl: string;\n /** Whether to do a dry run */\n dryRun?: boolean;\n /** Whether to upload as isSilent */\n isSilent?: boolean;\n /** Attributes string pre-parse. In format Key:Value */\n attributes?: string[];\n /** Skip workflow triggers */\n skipWorkflowTriggers?: boolean;\n /**\n * When true, only update preferences that do not conflict with existing\n * preferences. When false, update all preferences in CSV based on timestamp.\n */\n skipConflictUpdates?: boolean;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck?: boolean;\n /** Whether to force trigger workflows */\n forceTriggerWorkflows?: boolean;\n}): Promise<void> {\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state file to store the requests from this run\n const preferenceState = new PersistedState(receiptFilepath, PreferenceState, {\n fileMetadata: {},\n failingUpdates: {},\n pendingUpdates: {},\n });\n const failingRequests = preferenceState.getValue('failingUpdates');\n const pendingRequests = preferenceState.getValue('pendingUpdates');\n let fileMetadata = preferenceState.getValue('fileMetadata');\n\n logger.info(\n colors.magenta(\n 'Restored cache, there are: \\n' +\n `${\n Object.values(failingRequests).length\n } failing requests to be retried\\n` +\n `${\n Object.values(pendingRequests).length\n } pending requests to be processed\\n` +\n `The following files are stored in cache and will be used:\\n${Object.keys(\n fileMetadata,\n )\n .map((x) => x)\n .join('\\n')}\\n` +\n `The following file will be processed: ${file}\\n`,\n ),\n );\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const [sombra, purposes, preferenceTopics] = await Promise.all([\n // Create sombra instance to communicate with\n createSombraGotInstance(transcendUrl, auth, sombraAuth),\n // get all purposes and topics\n forceTriggerWorkflows\n ? Promise.resolve([] as Purpose[])\n : fetchAllPurposes(client),\n forceTriggerWorkflows\n ? Promise.resolve([] as PreferenceTopic[])\n : fetchAllPreferenceTopics(client),\n ]);\n\n // Process the file\n await parsePreferenceManagementCsvWithCache(\n {\n file,\n purposeSlugs: purposes.map((x) => x.trackingType),\n preferenceTopics,\n sombra,\n partitionKey: partition,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n },\n preferenceState,\n );\n\n // Construct the pending updates\n const pendingUpdates: Record<string, PreferenceUpdateItem> = {};\n fileMetadata = preferenceState.getValue('fileMetadata');\n const metadata = fileMetadata[file];\n\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingSafeUpdates).length\n } safe updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingConflictUpdates).length\n } conflict updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.skippedUpdates).length\n } skipped updates in ${file}`,\n ),\n );\n\n // Update either safe updates only or safe + conflict\n Object.entries({\n ...metadata.pendingSafeUpdates,\n ...(skipConflictUpdates\n ? {}\n : apply(metadata.pendingConflictUpdates, ({ row }) => row)),\n }).forEach(([userId, update]) => {\n // Determine timestamp\n const timestamp =\n metadata.timestampColum === NONE_PREFERENCE_MAP\n ? new Date()\n : new Date(update[metadata.timestampColum!]);\n\n // Determine updates\n const updates = getPreferenceUpdatesFromRow({\n row: update,\n columnToPurposeName: metadata.columnToPurposeName,\n preferenceTopics,\n purposeSlugs: purposes.map((x) => x.trackingType),\n });\n pendingUpdates[userId] = {\n userId,\n partition,\n timestamp: timestamp.toISOString(),\n purposes: Object.entries(updates).map(([purpose, value]) => ({\n ...value,\n purpose,\n workflowSettings: {\n attributes: parsedAttributes,\n isSilent,\n skipWorkflowTrigger: skipWorkflowTriggers,\n },\n })),\n };\n });\n await preferenceState.setValue(pendingUpdates, 'pendingUpdates');\n await preferenceState.setValue({}, 'failingUpdates');\n\n // Exist early if dry run\n if (dryRun) {\n logger.info(\n colors.green(\n `Dry run complete, exiting. ${\n Object.values(pendingUpdates).length\n } pending updates. Check file: ${receiptFilepath}`,\n ),\n );\n return;\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${\n Object.values(pendingUpdates).length\n } preferences to partition: ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n const updatesToRun = Object.entries(pendingUpdates);\n const chunkedUpdates = chunk(updatesToRun, skipWorkflowTriggers ? 100 : 10);\n progressBar.start(updatesToRun.length, 0);\n await map(\n chunkedUpdates,\n async (currentChunk) => {\n // Make the request\n try {\n await sombra\n .put('v1/preferences', {\n json: {\n records: currentChunk.map(([, update]) => update),\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n },\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n logger.error(\n colors.red(\n `Failed to upload ${\n currentChunk.length\n } user preferences to partition ${partition}: ${\n err?.response?.body || err?.message\n }`,\n ),\n );\n const failingUpdates = preferenceState.getValue('failingUpdates');\n currentChunk.forEach(([userId, update]) => {\n failingUpdates[userId] = {\n uploadedAt: new Date().toISOString(),\n update,\n error: err?.response?.body || err?.message || 'Unknown error',\n };\n });\n await preferenceState.setValue(failingUpdates, 'failingUpdates');\n }\n\n total += currentChunk.length;\n progressBar.update(total);\n },\n {\n concurrency: 40,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n logger.info(\n colors.green(\n `Successfully uploaded ${\n updatesToRun.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport type { Got } from 'got';\nimport { keyBy } from 'lodash-es';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { FileMetadataState, PreferenceState } from './codecs';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { getPreferencesForIdentifiers } from './getPreferencesForIdentifiers';\nimport { PreferenceTopic } from '../graphql';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\nimport { parsePreferenceTimestampsFromCsv } from './parsePreferenceTimestampsFromCsv';\nimport { parsePreferenceIdentifiersFromCsv } from './parsePreferenceIdentifiersFromCsv';\nimport { parsePreferenceAndPurposeValuesFromCsv } from './parsePreferenceAndPurposeValuesFromCsv';\nimport { checkIfPendingPreferenceUpdatesAreNoOp } from './checkIfPendingPreferenceUpdatesAreNoOp';\nimport { checkIfPendingPreferenceUpdatesCauseConflict } from './checkIfPendingPreferenceUpdatesCauseConflict';\n\n/**\n * Parse a file into the cache\n *\n *\n * @param options - Options\n * @param cache - The cache to store the parsed file in\n * @returns The cache with the parsed file\n */\nexport async function parsePreferenceManagementCsvWithCache(\n {\n file,\n sombra,\n purposeSlugs,\n preferenceTopics,\n partitionKey,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n }: {\n /** File to parse */\n file: string;\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Sombra got instance */\n sombra: Got;\n /** Partition key */\n partitionKey: string;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck: boolean;\n /** Wheather to force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n cache: PersistedState<typeof PreferenceState>,\n): Promise<void> {\n // Start the timer\n const t0 = new Date().getTime();\n\n // Get the current metadata\n const fileMetadata = cache.getValue('fileMetadata');\n\n // Read in the file\n logger.info(colors.magenta(`Reading in file: \"${file}\"`));\n let preferences = readCsv(file, t.record(t.string, t.string));\n\n // start building the cache, can use previous cache as well\n let currentState: FileMetadataState = {\n columnToPurposeName: {},\n pendingSafeUpdates: {},\n pendingConflictUpdates: {},\n skippedUpdates: {},\n // Load in the last fetched time\n ...((fileMetadata[file] || {}) as Partial<FileMetadataState>),\n lastFetchedAt: new Date().toISOString(),\n };\n\n // Validate that all timestamps are present in the file\n currentState = await parsePreferenceTimestampsFromCsv(\n preferences,\n currentState,\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Validate that all identifiers are present and unique\n const result = await parsePreferenceIdentifiersFromCsv(\n preferences,\n currentState,\n );\n currentState = result.currentState;\n preferences = result.preferences;\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Ensure all other columns are mapped to purpose and preference\n // slug values\n currentState = await parsePreferenceAndPurposeValuesFromCsv(\n preferences,\n currentState,\n {\n preferenceTopics,\n purposeSlugs,\n forceTriggerWorkflows,\n },\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Grab existing preference store records\n const identifiers = preferences.map(\n (pref) => pref[currentState.identifierColumn!],\n );\n const existingConsentRecords = skipExistingRecordCheck\n ? []\n : await getPreferencesForIdentifiers(sombra, {\n identifiers: identifiers.map((x) => ({ value: x })),\n partitionKey,\n });\n const consentRecordByIdentifier = keyBy(existingConsentRecords, 'userId');\n\n // Clear out previous updates\n currentState.pendingConflictUpdates = {};\n currentState.pendingSafeUpdates = {};\n currentState.skippedUpdates = {};\n\n // Process each row\n preferences.forEach((pref) => {\n // Grab unique Id for the user\n const userId = pref[currentState.identifierColumn!];\n\n // determine updates for user\n const pendingUpdates = getPreferenceUpdatesFromRow({\n row: pref,\n columnToPurposeName: currentState.columnToPurposeName,\n preferenceTopics,\n purposeSlugs,\n });\n\n // Grab current state of the update\n const currentConsentRecord = consentRecordByIdentifier[userId];\n if (forceTriggerWorkflows && !currentConsentRecord) {\n throw new Error(\n `No existing consent record found for user with id: ${userId}. \n When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`,\n );\n }\n // Check if the update can be skipped\n // this is the case if a record exists, and the purpose\n // and preference values are all in sync\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n }) &&\n !forceTriggerWorkflows\n ) {\n currentState.skippedUpdates[userId] = pref;\n return;\n }\n\n // Determine if there are any conflicts\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n })\n ) {\n currentState.pendingConflictUpdates[userId] = {\n row: pref,\n record: currentConsentRecord,\n };\n return;\n }\n\n // Add to pending updates\n currentState.pendingSafeUpdates[userId] = pref;\n });\n\n // Read in the file\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n const t1 = new Date().getTime();\n logger.info(\n colors.green(\n `Successfully pre-processed file: \"${file}\" in ${(t1 - t0) / 1000}s`,\n ),\n );\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport { chunk } from 'lodash-es';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from 'bluebird';\nimport { logger } from '../../logger';\nimport { withPreferenceQueryRetry } from './withPreferenceQueryRetry';\nimport { ConsentPreferenceResponse } from './types';\n\n/**\n * Grab the current consent preference values for a list of identifiers\n *\n * @param sombra - Backend to make API call to\n * @param options - Options\n * @returns Plaintext context information\n */\nexport async function getPreferencesForIdentifiers(\n sombra: Got,\n {\n identifiers,\n partitionKey,\n skipLogging = false,\n concurrency = 40,\n }: {\n /** The list of identifiers to look up */\n identifiers: {\n /** The value of the identifier */\n value: string;\n }[];\n /** The partition key to look up */\n partitionKey: string;\n /** Whether to skip logging */\n skipLogging?: boolean;\n /** Concurrency for requests (default 40) */\n concurrency?: number;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const results: PreferenceQueryResponseItem[] = [];\n const groupedIdentifiers = chunk(identifiers, 100);\n\n // create a new progress bar instance and use shades_classic theme\n const t0 = new Date().getTime();\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n if (!skipLogging) {\n progressBar.start(identifiers.length, 0);\n }\n\n let total = 0;\n await map(\n groupedIdentifiers,\n async (group) => {\n const rawResult = await withPreferenceQueryRetry(\n () =>\n sombra\n .post(`v1/preferences/${partitionKey}/query`, {\n json: {\n filter: { identifiers: group },\n limit: group.length,\n },\n })\n .json(),\n {\n onRetry: (attempt, _err, msg) => {\n logger.warn(\n colors.yellow(\n `[RETRY] group size=${group.length} partition=${partitionKey} attempt=${attempt}: ${msg}`,\n ),\n );\n },\n },\n );\n\n const result = decodeCodec(ConsentPreferenceResponse, rawResult);\n results.push(...result.nodes);\n total += group.length;\n progressBar.update(total);\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n if (!skipLogging) {\n // Log completion time\n logger.info(\n colors.green(`Completed download in \"${totalTime / 1000}\" seconds.`),\n );\n }\n\n return results;\n}\n","import colors from 'colors';\nimport { logger } from '../../logger';\nimport { sleepPromise } from '../helpers';\n\n/**\n * Transient network / platform errors that merit a retry.\n * Keep this list short and specific to avoid masking real failures.\n */\nexport const RETRY_PREFERENCE_MSGS: string[] = [\n 'ENOTFOUND',\n 'ECONNRESET',\n 'ETIMEDOUT',\n '504 Gateway Time-out',\n 'Task timed out after',\n];\n\n/**\n * Options for retrying preference queries.\n */\nexport type RetryOptions = {\n /** Max attempts including the first try (default 3) */\n maxAttempts?: number;\n /** Initial backoff in ms (default 250) */\n baseDelayMs?: number;\n /** Optional custom predicate to decide if an error is retryable */\n isRetryable?: (err: unknown, message: string) => boolean;\n /** Optional hook to log on each retry */\n onRetry?: (attempt: number, err: unknown, message: string) => void;\n};\n\n/**\n * Run an async function with standardized retry behavior for preference queries.\n * Exponential backoff with jitter; only retries on known-transient messages.\n *\n * @param fn - Function to run\n * @param options - Retry options\n * @returns Result of the function\n */\nexport async function withPreferenceQueryRetry<T>(\n fn: () => Promise<T>,\n {\n maxAttempts = 3,\n baseDelayMs = 250,\n isRetryable = (_err, msg) =>\n RETRY_PREFERENCE_MSGS.some((m) => msg.includes(m)),\n onRetry,\n }: RetryOptions = {},\n): Promise<T> {\n let attempt = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n attempt += 1;\n try {\n return await fn();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n const msg: string =\n (err && (err.response?.body || err.message)) ??\n String(err ?? 'Unknown error');\n const willRetry = attempt < maxAttempts && isRetryable(err, msg);\n if (!willRetry) {\n throw new Error(\n `Preference query failed after ${attempt} attempt(s): ${msg}`,\n );\n }\n onRetry?.(attempt, err, msg);\n\n const backoff = baseDelayMs * 2 ** (attempt - 1);\n const jitter = Math.floor(Math.random() * baseDelayMs);\n const delay = backoff + jitter;\n logger.warn(\n colors.yellow(\n `[retry] attempt ${attempt}/${\n maxAttempts - 1\n }; backing off ${delay}ms: ${msg}`,\n ),\n );\n await sleepPromise(delay);\n }\n }\n}\n","import { uniq, groupBy, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { inquirerConfirmBoolean } from '../helpers';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse identifiers from a CSV list of preferences\n *\n * Ensures that all rows have a valid identifier\n * and that all identifiers are unique.\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceIdentifiersFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<{\n /** The updated state */\n currentState: FileMetadataState;\n /** The updated preferences */\n preferences: Record<string, string>[];\n}> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const remainingColumnsForIdentifier = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the identifier column to work off of\n if (!currentState.identifierColumn) {\n const { identifierName } = await inquirer.prompt<{\n /** Identifier name */\n identifierName: string;\n }>([\n {\n name: 'identifierName',\n message:\n 'Choose the column that will be used as the identifier to upload consent preferences by',\n type: 'list',\n default:\n remainingColumnsForIdentifier.find((col) =>\n col.toLowerCase().includes('email'),\n ) || remainingColumnsForIdentifier[0],\n choices: remainingColumnsForIdentifier,\n },\n ]);\n currentState.identifierColumn = identifierName;\n }\n logger.info(\n colors.magenta(\n `Using identifier column \"${currentState.identifierColumn}\"`,\n ),\n );\n\n // Validate that the identifier column is present for all rows and unique\n const identifierColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.identifierColumn!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (identifierColumnsMissing.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" is missing a value for the following rows: ${identifierColumnsMissing.join(\n ', ',\n )}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to skip rows missing an identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to skip rows missing an identifier?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n\n // Filter out rows missing an identifier\n const previous = preferences.length;\n preferences = preferences.filter(\n (pref) => pref[currentState.identifierColumn!],\n );\n logger.info(\n colors.yellow(\n `Skipped ${previous - preferences.length} rows missing an identifier`,\n ),\n );\n }\n logger.info(\n colors.magenta(\n `The identifier column \"${currentState.identifierColumn}\" is present for all rows`,\n ),\n );\n\n // Validate that all identifiers are unique\n const rowsByUserId = groupBy(preferences, currentState.identifierColumn);\n const duplicateIdentifiers = Object.entries(rowsByUserId).filter(\n ([, rows]) => rows.length > 1,\n );\n if (duplicateIdentifiers.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" has duplicate values for the following rows: ${duplicateIdentifiers\n .slice(0, 10)\n .map(([userId, rows]) => `${userId} (${rows.length})`)\n .join('\\n')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to take the most recent update\n // for each duplicate identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to automatically take the latest update?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n preferences = Object.entries(rowsByUserId)\n .map(([, rows]) => {\n const sorted = rows.sort(\n (a, b) =>\n new Date(b[currentState.timestampColum!]).getTime() -\n new Date(a[currentState.timestampColum!]).getTime(),\n );\n return sorted[0];\n })\n .filter((x) => x);\n }\n\n return { currentState, preferences };\n}\n/* eslint-enable no-param-reassign */\n","import { uniq, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { mapSeries } from 'bluebird';\nimport { PreferenceTopic } from '../graphql';\nimport { PreferenceTopicType } from '@transcend-io/privacy-types';\nimport { splitCsvToList } from '../requests';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse out the purpose.enabled and preference values from a CSV file\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @param options - Options\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceAndPurposeValuesFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n {\n purposeSlugs,\n preferenceTopics,\n forceTriggerWorkflows,\n }: {\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const otherColumns = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...(currentState.timestampColum ? [currentState.timestampColum] : []),\n ]);\n if (otherColumns.length === 0) {\n if (forceTriggerWorkflows) {\n return currentState;\n }\n throw new Error('No other columns to process');\n }\n\n // The purpose and preferences to map to\n const purposeNames = [\n ...purposeSlugs,\n ...preferenceTopics.map((x) => `${x.purpose.trackingType}->${x.slug}`),\n ];\n\n // Ensure all columns are accounted for\n await mapSeries(otherColumns, async (col) => {\n // Determine the unique values to map in this column\n const uniqueValues = uniq(preferences.map((x) => x[col]));\n\n // Map the column to a purpose\n let purposeMapping = currentState.columnToPurposeName[col];\n if (purposeMapping) {\n logger.info(\n colors.magenta(\n `Column \"${col}\" is associated with purpose \"${purposeMapping.purpose}\"`,\n ),\n );\n } else {\n const { purposeName } = await inquirer.prompt<{\n /** purpose name */\n purposeName: string;\n }>([\n {\n name: 'purposeName',\n message: `Choose the purpose that column ${col} is associated with`,\n type: 'list',\n default: purposeNames.find((x) => x.startsWith(purposeSlugs[0])),\n choices: purposeNames,\n },\n ]);\n const [purposeSlug, preferenceSlug] = purposeName.split('->');\n purposeMapping = {\n purpose: purposeSlug,\n preference: preferenceSlug || null,\n valueMapping: {},\n };\n }\n\n // map each value to the purpose value\n await mapSeries(uniqueValues, async (value) => {\n if (purposeMapping.valueMapping[value] !== undefined) {\n logger.info(\n colors.magenta(\n `Value \"${value}\" is associated with purpose value \"${purposeMapping.valueMapping[value]}\"`,\n ),\n );\n return;\n }\n // if preference is null, this column is just for the purpose\n if (purposeMapping.preference === null) {\n const { purposeValue } = await inquirer.prompt<{\n /** purpose value */\n purposeValue: boolean;\n }>([\n {\n name: 'purposeValue',\n message: `Choose the purpose value for value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = purposeValue;\n }\n\n // if preference is not null, this column is for a specific preference\n if (purposeMapping.preference !== null) {\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === purposeMapping.preference,\n );\n if (!preferenceTopic) {\n logger.error(\n colors.red(\n `Preference topic \"${purposeMapping.preference}\" not found`,\n ),\n );\n return;\n }\n const preferenceOptions = preferenceTopic.preferenceOptionValues.map(\n ({ slug }) => slug,\n );\n\n if (preferenceTopic.type === PreferenceTopicType.Boolean) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n message:\n // eslint-disable-next-line max-len\n `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.Select) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === value),\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.MultiSelect) {\n const parsedValues = splitCsvToList(value);\n // need to do this serially\n await mapSeries(parsedValues, async (parsedValue) => {\n // if we already have a value, skip re-processing it again\n if (purposeMapping.valueMapping[parsedValue] !== undefined) {\n return;\n }\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${parsedValue}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === parsedValue),\n },\n ]);\n purposeMapping.valueMapping[parsedValue] = preferenceValue;\n });\n return;\n }\n\n throw new Error(\n `Unknown preference topic type: ${preferenceTopic.type}`,\n );\n }\n });\n\n currentState.columnToPurposeName[col] = purposeMapping;\n });\n\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import {\n PreferenceQueryResponseItem,\n PreferenceUpdateItem,\n} from '@transcend-io/privacy-types';\nimport * as t from 'io-ts';\n\nexport const PurposeRowMapping = t.type({\n /**\n * The slug or trackingType of the purpose to map to\n *\n * e.g. `Marketing`\n */\n purpose: t.string,\n /**\n * If the column maps to a preference instead of a purpose\n * this is the slug of the purpose.\n *\n * null value indicates that this column maps to the true/false\n * value of the purpose\n */\n preference: t.union([t.string, t.null]),\n /**\n * The mapping between each row value and purpose/preference value.\n *\n * e.g. for a boolean preference or purpose\n * {\n * 'true': true,\n * 'false': false,\n * '': true,\n * }\n *\n * or for a single or multi select preference\n * {\n * '': true,\n * 'value1': 'Value1',\n * 'value2': 'Value2',\n * }\n */\n valueMapping: t.record(\n t.string,\n t.union([t.string, t.boolean, t.null, t.undefined]),\n ),\n});\n\n/** Override type */\nexport type PurposeRowMapping = t.TypeOf<typeof PurposeRowMapping>;\n\n/**\n * Mapping of column name to purpose row mapping.\n * This is used to map each column in the CSV to the relevant purpose and preference definitions in\n * transcend.\n */\nexport const ColumnPurposeMap = t.record(t.string, PurposeRowMapping);\n\n/** Override type */\nexport type ColumnPurposeMap = t.TypeOf<typeof ColumnPurposeMap>;\n\nexport const IdentifierMetadataForPreference = t.type({\n /** The identifier name */\n name: t.string,\n /** Is unique on preference store */\n isUniqueOnPreferenceStore: t.boolean,\n});\n\n/** Override type */\nexport type IdentifierMetadataForPreference = t.TypeOf<\n typeof IdentifierMetadataForPreference\n>;\n\n/**\n * Mapping of identifier name to the column name in the CSV file.\n * This is used to map each identifier name to the column in the CSV file.\n */\nexport const ColumnIdentifierMap = t.record(\n t.string,\n IdentifierMetadataForPreference,\n);\n\n/** Override type */\nexport type ColumnIdentifierMap = t.TypeOf<typeof ColumnIdentifierMap>;\n\nexport const FileMetadataState = t.intersection([\n t.type({\n /**\n * Definition of how to map each column in the CSV to\n * the relevant purpose and preference definitions in transcend\n */\n columnToPurposeName: t.record(t.string, PurposeRowMapping),\n /** Last time the file was last parsed at */\n lastFetchedAt: t.string,\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * These uploads are overwriting non-existent preferences and are safe\n */\n pendingSafeUpdates: t.record(t.string, t.record(t.string, t.string)),\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * these records have conflicts with existing consent preferences\n */\n pendingConflictUpdates: t.record(\n t.string,\n t.type({\n record: PreferenceQueryResponseItem,\n row: t.record(t.string, t.string),\n }),\n ),\n /**\n * Mapping of userId to the rows in the file that can be skipped because\n * their preferences are already in the store\n */\n skippedUpdates: t.record(t.string, t.record(t.string, t.string)),\n }),\n t.partial({\n /** Determine which column name in file maps to consent record identifier to upload on */\n identifierColumn: t.string,\n /** Determine which column name in file maps to the timestamp */\n timestampColum: t.string,\n }),\n]);\n\n/** Override type */\nexport type FileMetadataState = t.TypeOf<typeof FileMetadataState>;\n\n/**\n * This is the type of the receipts that are stored in the file\n * that is used to track the state of the upload process.\n * It is used to resume the upload process from where it left off.\n * It is used to persist the state of the upload process across multiple runs.\n */\nexport const PreferenceUpdateMap = t.record(\n t.string,\n // This can either be true to indicate the record is pending\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, PreferenceUpdateItem]),\n);\n\n/** Override type */\nexport type PreferenceUpdateMap = t.TypeOf<typeof PreferenceUpdateMap>;\n\n/**\n * This is the type of the pending updates that are safe to run without\n * conflicts with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is safe to upload.\n */\nexport const PendingSafePreferenceUpdates = t.record(\n t.string,\n // This can either be true to indicate the record is safe\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, t.record(t.string, t.string)]),\n);\n\n/** Override type */\nexport type PendingSafePreferenceUpdates = t.TypeOf<\n typeof PendingSafePreferenceUpdates\n>;\n\n/**\n * These are the updates that failed to be uploaded to the API.\n */\nexport const FailingPreferenceUpdates = t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n);\n\n/** Override type */\nexport type FailingPreferenceUpdates = t.TypeOf<\n typeof FailingPreferenceUpdates\n>;\n\n/**\n * This is the type of the pending updates that are in conflict with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is pending upload.\n */\nexport const PendingWithConflictPreferenceUpdates = t.record(\n t.string,\n // We always return the conflicts for investigation\n t.type({\n /** Record to be inserted to transcend v1/preferences API */\n record: PreferenceQueryResponseItem,\n /** The row in the file that is pending upload */\n row: t.record(t.string, t.string),\n }),\n);\n\n/** Override type */\nexport type PendingWithConflictPreferenceUpdates = t.TypeOf<\n typeof PendingWithConflictPreferenceUpdates\n>;\n\n/**\n * The set of preference updates that are skipped\n * Key is primaryKey and value is the row in the CSV\n * that is skipped.\n *\n * This is usually because the preferences are already in the store\n * or there are duplicate rows in the CSV file that are identical.\n */\nexport const SkippedPreferenceUpdates = t.record(\n t.string,\n t.record(t.string, t.string),\n);\n\n/** Override type */\nexport type SkippedPreferenceUpdates = t.TypeOf<\n typeof SkippedPreferenceUpdates\n>;\n\n/** Persist this data between runs of the script */\nexport const PreferenceState = t.type({\n /**\n * Store a cache of previous files read in\n */\n fileMetadata: t.record(t.string, FileMetadataState),\n /**\n * The set of successful uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n failingUpdates: t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n ),\n /**\n * The set of pending uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n pendingUpdates: t.record(t.string, PreferenceUpdateItem),\n});\n\n/** Override type */\nexport type PreferenceState = t.TypeOf<typeof PreferenceState>;\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-LCL7BED2.cjs","../src/lib/preference-management/uploadPreferenceManagementPreferencesInteractive.ts","../src/lib/preference-management/parsePreferenceManagementCsv.ts","../src/lib/preference-management/getPreferencesForIdentifiers.ts","../src/lib/preference-management/withPreferenceQueryRetry.ts","../src/lib/preference-management/parsePreferenceIdentifiersFromCsv.ts","../src/lib/preference-management/parsePreferenceAndPurposeValuesFromCsv.ts","../src/lib/preference-management/codecs.ts"],"names":["RETRY_PREFERENCE_MSGS","withPreferenceQueryRetry","fn","maxAttempts","baseDelayMs","isRetryable","_err","msg","m","onRetry","attempt","err","preferences","value"],"mappings":"AAAA,2lCAAgH,wDAAyC,wDAAuE,gFCQ7M,oCACC,qGAGI,+DAEO,qJCXZ,qDCES,ICGfA,EAAAA,CAAkC,CAC7C,WAAA,CACA,YAAA,CACA,WAAA,CACA,sBAAA,CACA,sBACF,CAAA,CAwBA,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,CAACC,CAAAA,CAAMC,CAAAA,CAAAA,EACnBP,EAAAA,CAAsB,IAAA,CAAMQ,CAAAA,EAAMD,CAAAA,CAAI,QAAA,CAASC,CAAC,CAAC,CAAA,CACnD,OAAA,CAAAC,CACF,CAAA,CAAkB,CAAC,CAAA,CACP,CACZ,IAAIC,CAAAA,CAAU,CAAA,CAEd,GAAA,CAAA,CAAA,CAAA,CAAa,CACXA,CAAAA,EAAW,CAAA,CACX,GAAI,CACF,OAAO,MAAMR,CAAAA,CAAG,CAElB,CAAA,KAAA,CAASS,CAAAA,CAAU,CACjB,IAAMJ,CAAAA,kBAAAA,CACHI,CAAAA,EAAAA,iBAAQA,CAAAA,qBAAI,QAAA,6BAAU,MAAA,EAAQA,CAAAA,CAAI,OAAA,CAAA,CAAA,SACnC,MAAA,kBAAOA,CAAAA,SAAO,iBAAe,GAAA,CAE/B,EAAA,CAAI,CAAA,CADcD,CAAAA,CAAUP,CAAAA,EAAeE,CAAAA,CAAYM,CAAAA,CAAKJ,CAAG,CAAA,CAAA,CAE7D,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiCG,CAAO,CAAA,aAAA,EAAgBH,CAAG,CAAA,CAAA;ACwBjEK;ACfU;ACuBV,oGAAA;ANJI;AAGA;AAAA;AAKQ;AAAK;AACgC,sCAAA;AAqF1CC","file":"/home/runner/work/cli/cli/dist/chunk-LCL7BED2.cjs","sourcesContent":[null,"import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllPurposes,\n fetchAllPreferenceTopics,\n PreferenceTopic,\n Purpose,\n} from '../graphql';\nimport colors from 'colors';\nimport { map } from 'bluebird';\nimport { chunk } from 'lodash-es';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { parseAttributesFromString } from '../requests';\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport { parsePreferenceManagementCsvWithCache } from './parsePreferenceManagementCsv';\nimport { PreferenceState } from './codecs';\nimport { PreferenceUpdateItem } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\nimport { NONE_PREFERENCE_MAP } from './parsePreferenceTimestampsFromCsv';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadPreferenceManagementPreferencesInteractive({\n auth,\n sombraAuth,\n receiptFilepath,\n file,\n partition,\n isSilent = true,\n dryRun = false,\n skipWorkflowTriggers = false,\n skipConflictUpdates = false,\n skipExistingRecordCheck = false,\n attributes = [],\n transcendUrl,\n forceTriggerWorkflows = false,\n}: {\n /** The Transcend API key */\n auth: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Partition key */\n partition: string;\n /** File where to store receipt and continue from where left off */\n receiptFilepath: string;\n /** The file to process */\n file: string;\n /** API URL for Transcend backend */\n transcendUrl: string;\n /** Whether to do a dry run */\n dryRun?: boolean;\n /** Whether to upload as isSilent */\n isSilent?: boolean;\n /** Attributes string pre-parse. In format Key:Value */\n attributes?: string[];\n /** Skip workflow triggers */\n skipWorkflowTriggers?: boolean;\n /**\n * When true, only update preferences that do not conflict with existing\n * preferences. When false, update all preferences in CSV based on timestamp.\n */\n skipConflictUpdates?: boolean;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck?: boolean;\n /** Whether to force trigger workflows */\n forceTriggerWorkflows?: boolean;\n}): Promise<void> {\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state file to store the requests from this run\n const preferenceState = new PersistedState(receiptFilepath, PreferenceState, {\n fileMetadata: {},\n failingUpdates: {},\n pendingUpdates: {},\n });\n const failingRequests = preferenceState.getValue('failingUpdates');\n const pendingRequests = preferenceState.getValue('pendingUpdates');\n let fileMetadata = preferenceState.getValue('fileMetadata');\n\n logger.info(\n colors.magenta(\n 'Restored cache, there are: \\n' +\n `${\n Object.values(failingRequests).length\n } failing requests to be retried\\n` +\n `${\n Object.values(pendingRequests).length\n } pending requests to be processed\\n` +\n `The following files are stored in cache and will be used:\\n${Object.keys(\n fileMetadata,\n )\n .map((x) => x)\n .join('\\n')}\\n` +\n `The following file will be processed: ${file}\\n`,\n ),\n );\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const [sombra, purposes, preferenceTopics] = await Promise.all([\n // Create sombra instance to communicate with\n createSombraGotInstance(transcendUrl, auth, sombraAuth),\n // get all purposes and topics\n forceTriggerWorkflows\n ? Promise.resolve([] as Purpose[])\n : fetchAllPurposes(client),\n forceTriggerWorkflows\n ? Promise.resolve([] as PreferenceTopic[])\n : fetchAllPreferenceTopics(client),\n ]);\n\n // Process the file\n await parsePreferenceManagementCsvWithCache(\n {\n file,\n purposeSlugs: purposes.map((x) => x.trackingType),\n preferenceTopics,\n sombra,\n partitionKey: partition,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n },\n preferenceState,\n );\n\n // Construct the pending updates\n const pendingUpdates: Record<string, PreferenceUpdateItem> = {};\n fileMetadata = preferenceState.getValue('fileMetadata');\n const metadata = fileMetadata[file];\n\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingSafeUpdates).length\n } safe updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingConflictUpdates).length\n } conflict updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.skippedUpdates).length\n } skipped updates in ${file}`,\n ),\n );\n\n // Update either safe updates only or safe + conflict\n Object.entries({\n ...metadata.pendingSafeUpdates,\n ...(skipConflictUpdates\n ? {}\n : apply(metadata.pendingConflictUpdates, ({ row }) => row)),\n }).forEach(([userId, update]) => {\n // Determine timestamp\n const timestamp =\n metadata.timestampColum === NONE_PREFERENCE_MAP\n ? new Date()\n : new Date(update[metadata.timestampColum!]);\n\n // Determine updates\n const updates = getPreferenceUpdatesFromRow({\n row: update,\n columnToPurposeName: metadata.columnToPurposeName,\n preferenceTopics,\n purposeSlugs: purposes.map((x) => x.trackingType),\n });\n pendingUpdates[userId] = {\n userId,\n partition,\n timestamp: timestamp.toISOString(),\n purposes: Object.entries(updates).map(([purpose, value]) => ({\n ...value,\n purpose,\n workflowSettings: {\n attributes: parsedAttributes,\n isSilent,\n skipWorkflowTrigger: skipWorkflowTriggers,\n },\n })),\n };\n });\n await preferenceState.setValue(pendingUpdates, 'pendingUpdates');\n await preferenceState.setValue({}, 'failingUpdates');\n\n // Exist early if dry run\n if (dryRun) {\n logger.info(\n colors.green(\n `Dry run complete, exiting. ${\n Object.values(pendingUpdates).length\n } pending updates. Check file: ${receiptFilepath}`,\n ),\n );\n return;\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${\n Object.values(pendingUpdates).length\n } preferences to partition: ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n const updatesToRun = Object.entries(pendingUpdates);\n const chunkedUpdates = chunk(updatesToRun, skipWorkflowTriggers ? 100 : 10);\n progressBar.start(updatesToRun.length, 0);\n await map(\n chunkedUpdates,\n async (currentChunk) => {\n // Make the request\n try {\n await sombra\n .put('v1/preferences', {\n json: {\n records: currentChunk.map(([, update]) => update),\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n },\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n logger.error(\n colors.red(\n `Failed to upload ${\n currentChunk.length\n } user preferences to partition ${partition}: ${\n err?.response?.body || err?.message\n }`,\n ),\n );\n const failingUpdates = preferenceState.getValue('failingUpdates');\n currentChunk.forEach(([userId, update]) => {\n failingUpdates[userId] = {\n uploadedAt: new Date().toISOString(),\n update,\n error: err?.response?.body || err?.message || 'Unknown error',\n };\n });\n await preferenceState.setValue(failingUpdates, 'failingUpdates');\n }\n\n total += currentChunk.length;\n progressBar.update(total);\n },\n {\n concurrency: 40,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n logger.info(\n colors.green(\n `Successfully uploaded ${\n updatesToRun.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport type { Got } from 'got';\nimport { keyBy } from 'lodash-es';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { FileMetadataState, PreferenceState } from './codecs';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { getPreferencesForIdentifiers } from './getPreferencesForIdentifiers';\nimport { PreferenceTopic } from '../graphql';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\nimport { parsePreferenceTimestampsFromCsv } from './parsePreferenceTimestampsFromCsv';\nimport { parsePreferenceIdentifiersFromCsv } from './parsePreferenceIdentifiersFromCsv';\nimport { parsePreferenceAndPurposeValuesFromCsv } from './parsePreferenceAndPurposeValuesFromCsv';\nimport { checkIfPendingPreferenceUpdatesAreNoOp } from './checkIfPendingPreferenceUpdatesAreNoOp';\nimport { checkIfPendingPreferenceUpdatesCauseConflict } from './checkIfPendingPreferenceUpdatesCauseConflict';\n\n/**\n * Parse a file into the cache\n *\n *\n * @param options - Options\n * @param cache - The cache to store the parsed file in\n * @returns The cache with the parsed file\n */\nexport async function parsePreferenceManagementCsvWithCache(\n {\n file,\n sombra,\n purposeSlugs,\n preferenceTopics,\n partitionKey,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n }: {\n /** File to parse */\n file: string;\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Sombra got instance */\n sombra: Got;\n /** Partition key */\n partitionKey: string;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck: boolean;\n /** Wheather to force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n cache: PersistedState<typeof PreferenceState>,\n): Promise<void> {\n // Start the timer\n const t0 = new Date().getTime();\n\n // Get the current metadata\n const fileMetadata = cache.getValue('fileMetadata');\n\n // Read in the file\n logger.info(colors.magenta(`Reading in file: \"${file}\"`));\n let preferences = readCsv(file, t.record(t.string, t.string));\n\n // start building the cache, can use previous cache as well\n let currentState: FileMetadataState = {\n columnToPurposeName: {},\n pendingSafeUpdates: {},\n pendingConflictUpdates: {},\n skippedUpdates: {},\n // Load in the last fetched time\n ...((fileMetadata[file] || {}) as Partial<FileMetadataState>),\n lastFetchedAt: new Date().toISOString(),\n };\n\n // Validate that all timestamps are present in the file\n currentState = await parsePreferenceTimestampsFromCsv(\n preferences,\n currentState,\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Validate that all identifiers are present and unique\n const result = await parsePreferenceIdentifiersFromCsv(\n preferences,\n currentState,\n );\n currentState = result.currentState;\n preferences = result.preferences;\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Ensure all other columns are mapped to purpose and preference\n // slug values\n currentState = await parsePreferenceAndPurposeValuesFromCsv(\n preferences,\n currentState,\n {\n preferenceTopics,\n purposeSlugs,\n forceTriggerWorkflows,\n },\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Grab existing preference store records\n const identifiers = preferences.map(\n (pref) => pref[currentState.identifierColumn!],\n );\n const existingConsentRecords = skipExistingRecordCheck\n ? []\n : await getPreferencesForIdentifiers(sombra, {\n identifiers: identifiers.map((x) => ({ value: x })),\n partitionKey,\n });\n const consentRecordByIdentifier = keyBy(existingConsentRecords, 'userId');\n\n // Clear out previous updates\n currentState.pendingConflictUpdates = {};\n currentState.pendingSafeUpdates = {};\n currentState.skippedUpdates = {};\n\n // Process each row\n preferences.forEach((pref) => {\n // Grab unique Id for the user\n const userId = pref[currentState.identifierColumn!];\n\n // determine updates for user\n const pendingUpdates = getPreferenceUpdatesFromRow({\n row: pref,\n columnToPurposeName: currentState.columnToPurposeName,\n preferenceTopics,\n purposeSlugs,\n });\n\n // Grab current state of the update\n const currentConsentRecord = consentRecordByIdentifier[userId];\n if (forceTriggerWorkflows && !currentConsentRecord) {\n throw new Error(\n `No existing consent record found for user with id: ${userId}. \n When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`,\n );\n }\n // Check if the update can be skipped\n // this is the case if a record exists, and the purpose\n // and preference values are all in sync\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n }) &&\n !forceTriggerWorkflows\n ) {\n currentState.skippedUpdates[userId] = pref;\n return;\n }\n\n // Determine if there are any conflicts\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n })\n ) {\n currentState.pendingConflictUpdates[userId] = {\n row: pref,\n record: currentConsentRecord,\n };\n return;\n }\n\n // Add to pending updates\n currentState.pendingSafeUpdates[userId] = pref;\n });\n\n // Read in the file\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n const t1 = new Date().getTime();\n logger.info(\n colors.green(\n `Successfully pre-processed file: \"${file}\" in ${(t1 - t0) / 1000}s`,\n ),\n );\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport { chunk } from 'lodash-es';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from 'bluebird';\nimport { logger } from '../../logger';\nimport { withPreferenceQueryRetry } from './withPreferenceQueryRetry';\nimport { ConsentPreferenceResponse } from './types';\n\n/**\n * Grab the current consent preference values for a list of identifiers\n *\n * @param sombra - Backend to make API call to\n * @param options - Options\n * @returns Plaintext context information\n */\nexport async function getPreferencesForIdentifiers(\n sombra: Got,\n {\n identifiers,\n partitionKey,\n skipLogging = false,\n concurrency = 40,\n }: {\n /** The list of identifiers to look up */\n identifiers: {\n /** The value of the identifier */\n value: string;\n }[];\n /** The partition key to look up */\n partitionKey: string;\n /** Whether to skip logging */\n skipLogging?: boolean;\n /** Concurrency for requests (default 40) */\n concurrency?: number;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const results: PreferenceQueryResponseItem[] = [];\n const groupedIdentifiers = chunk(identifiers, 100);\n\n // create a new progress bar instance and use shades_classic theme\n const t0 = new Date().getTime();\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n if (!skipLogging) {\n progressBar.start(identifiers.length, 0);\n }\n\n let total = 0;\n await map(\n groupedIdentifiers,\n async (group) => {\n const rawResult = await withPreferenceQueryRetry(\n () =>\n sombra\n .post(`v1/preferences/${partitionKey}/query`, {\n json: {\n filter: { identifiers: group },\n limit: group.length,\n },\n })\n .json(),\n {\n onRetry: (attempt, _err, msg) => {\n logger.warn(\n colors.yellow(\n `[RETRY] group size=${group.length} partition=${partitionKey} attempt=${attempt}: ${msg}`,\n ),\n );\n },\n },\n );\n\n const result = decodeCodec(ConsentPreferenceResponse, rawResult);\n results.push(...result.nodes);\n total += group.length;\n progressBar.update(total);\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n if (!skipLogging) {\n // Log completion time\n logger.info(\n colors.green(`Completed download in \"${totalTime / 1000}\" seconds.`),\n );\n }\n\n return results;\n}\n","import colors from 'colors';\nimport { logger } from '../../logger';\nimport { sleepPromise } from '../helpers';\n\n/**\n * Transient network / platform errors that merit a retry.\n * Keep this list short and specific to avoid masking real failures.\n */\nexport const RETRY_PREFERENCE_MSGS: string[] = [\n 'ENOTFOUND',\n 'ECONNRESET',\n 'ETIMEDOUT',\n '504 Gateway Time-out',\n 'Task timed out after',\n];\n\n/**\n * Options for retrying preference queries.\n */\nexport type RetryOptions = {\n /** Max attempts including the first try (default 3) */\n maxAttempts?: number;\n /** Initial backoff in ms (default 250) */\n baseDelayMs?: number;\n /** Optional custom predicate to decide if an error is retryable */\n isRetryable?: (err: unknown, message: string) => boolean;\n /** Optional hook to log on each retry */\n onRetry?: (attempt: number, err: unknown, message: string) => void;\n};\n\n/**\n * Run an async function with standardized retry behavior for preference queries.\n * Exponential backoff with jitter; only retries on known-transient messages.\n *\n * @param fn - Function to run\n * @param options - Retry options\n * @returns Result of the function\n */\nexport async function withPreferenceQueryRetry<T>(\n fn: () => Promise<T>,\n {\n maxAttempts = 3,\n baseDelayMs = 250,\n isRetryable = (_err, msg) =>\n RETRY_PREFERENCE_MSGS.some((m) => msg.includes(m)),\n onRetry,\n }: RetryOptions = {},\n): Promise<T> {\n let attempt = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n attempt += 1;\n try {\n return await fn();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n const msg: string =\n (err && (err.response?.body || err.message)) ??\n String(err ?? 'Unknown error');\n const willRetry = attempt < maxAttempts && isRetryable(err, msg);\n if (!willRetry) {\n throw new Error(\n `Preference query failed after ${attempt} attempt(s): ${msg}`,\n );\n }\n onRetry?.(attempt, err, msg);\n\n const backoff = baseDelayMs * 2 ** (attempt - 1);\n const jitter = Math.floor(Math.random() * baseDelayMs);\n const delay = backoff + jitter;\n logger.warn(\n colors.yellow(\n `[retry] attempt ${attempt}/${\n maxAttempts - 1\n }; backing off ${delay}ms: ${msg}`,\n ),\n );\n await sleepPromise(delay);\n }\n }\n}\n","import { uniq, groupBy, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { inquirerConfirmBoolean } from '../helpers';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse identifiers from a CSV list of preferences\n *\n * Ensures that all rows have a valid identifier\n * and that all identifiers are unique.\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceIdentifiersFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<{\n /** The updated state */\n currentState: FileMetadataState;\n /** The updated preferences */\n preferences: Record<string, string>[];\n}> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const remainingColumnsForIdentifier = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the identifier column to work off of\n if (!currentState.identifierColumn) {\n const { identifierName } = await inquirer.prompt<{\n /** Identifier name */\n identifierName: string;\n }>([\n {\n name: 'identifierName',\n message:\n 'Choose the column that will be used as the identifier to upload consent preferences by',\n type: 'list',\n default:\n remainingColumnsForIdentifier.find((col) =>\n col.toLowerCase().includes('email'),\n ) || remainingColumnsForIdentifier[0],\n choices: remainingColumnsForIdentifier,\n },\n ]);\n currentState.identifierColumn = identifierName;\n }\n logger.info(\n colors.magenta(\n `Using identifier column \"${currentState.identifierColumn}\"`,\n ),\n );\n\n // Validate that the identifier column is present for all rows and unique\n const identifierColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.identifierColumn!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (identifierColumnsMissing.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" is missing a value for the following rows: ${identifierColumnsMissing.join(\n ', ',\n )}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to skip rows missing an identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to skip rows missing an identifier?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n\n // Filter out rows missing an identifier\n const previous = preferences.length;\n preferences = preferences.filter(\n (pref) => pref[currentState.identifierColumn!],\n );\n logger.info(\n colors.yellow(\n `Skipped ${previous - preferences.length} rows missing an identifier`,\n ),\n );\n }\n logger.info(\n colors.magenta(\n `The identifier column \"${currentState.identifierColumn}\" is present for all rows`,\n ),\n );\n\n // Validate that all identifiers are unique\n const rowsByUserId = groupBy(preferences, currentState.identifierColumn);\n const duplicateIdentifiers = Object.entries(rowsByUserId).filter(\n ([, rows]) => rows.length > 1,\n );\n if (duplicateIdentifiers.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" has duplicate values for the following rows: ${duplicateIdentifiers\n .slice(0, 10)\n .map(([userId, rows]) => `${userId} (${rows.length})`)\n .join('\\n')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to take the most recent update\n // for each duplicate identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to automatically take the latest update?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n preferences = Object.entries(rowsByUserId)\n .map(([, rows]) => {\n const sorted = rows.sort(\n (a, b) =>\n new Date(b[currentState.timestampColum!]).getTime() -\n new Date(a[currentState.timestampColum!]).getTime(),\n );\n return sorted[0];\n })\n .filter((x) => x);\n }\n\n return { currentState, preferences };\n}\n/* eslint-enable no-param-reassign */\n","import { uniq, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { mapSeries } from 'bluebird';\nimport { PreferenceTopic } from '../graphql';\nimport { PreferenceTopicType } from '@transcend-io/privacy-types';\nimport { splitCsvToList } from '../requests';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse out the purpose.enabled and preference values from a CSV file\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @param options - Options\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceAndPurposeValuesFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n {\n purposeSlugs,\n preferenceTopics,\n forceTriggerWorkflows,\n }: {\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const otherColumns = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...(currentState.timestampColum ? [currentState.timestampColum] : []),\n ]);\n if (otherColumns.length === 0) {\n if (forceTriggerWorkflows) {\n return currentState;\n }\n throw new Error('No other columns to process');\n }\n\n // The purpose and preferences to map to\n const purposeNames = [\n ...purposeSlugs,\n ...preferenceTopics.map((x) => `${x.purpose.trackingType}->${x.slug}`),\n ];\n\n // Ensure all columns are accounted for\n await mapSeries(otherColumns, async (col) => {\n // Determine the unique values to map in this column\n const uniqueValues = uniq(preferences.map((x) => x[col]));\n\n // Map the column to a purpose\n let purposeMapping = currentState.columnToPurposeName[col];\n if (purposeMapping) {\n logger.info(\n colors.magenta(\n `Column \"${col}\" is associated with purpose \"${purposeMapping.purpose}\"`,\n ),\n );\n } else {\n const { purposeName } = await inquirer.prompt<{\n /** purpose name */\n purposeName: string;\n }>([\n {\n name: 'purposeName',\n message: `Choose the purpose that column ${col} is associated with`,\n type: 'list',\n default: purposeNames.find((x) => x.startsWith(purposeSlugs[0])),\n choices: purposeNames,\n },\n ]);\n const [purposeSlug, preferenceSlug] = purposeName.split('->');\n purposeMapping = {\n purpose: purposeSlug,\n preference: preferenceSlug || null,\n valueMapping: {},\n };\n }\n\n // map each value to the purpose value\n await mapSeries(uniqueValues, async (value) => {\n if (purposeMapping.valueMapping[value] !== undefined) {\n logger.info(\n colors.magenta(\n `Value \"${value}\" is associated with purpose value \"${purposeMapping.valueMapping[value]}\"`,\n ),\n );\n return;\n }\n // if preference is null, this column is just for the purpose\n if (purposeMapping.preference === null) {\n const { purposeValue } = await inquirer.prompt<{\n /** purpose value */\n purposeValue: boolean;\n }>([\n {\n name: 'purposeValue',\n message: `Choose the purpose value for value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = purposeValue;\n }\n\n // if preference is not null, this column is for a specific preference\n if (purposeMapping.preference !== null) {\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === purposeMapping.preference,\n );\n if (!preferenceTopic) {\n logger.error(\n colors.red(\n `Preference topic \"${purposeMapping.preference}\" not found`,\n ),\n );\n return;\n }\n const preferenceOptions = preferenceTopic.preferenceOptionValues.map(\n ({ slug }) => slug,\n );\n\n if (preferenceTopic.type === PreferenceTopicType.Boolean) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n message:\n // eslint-disable-next-line max-len\n `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.Select) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === value),\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.MultiSelect) {\n const parsedValues = splitCsvToList(value);\n // need to do this serially\n await mapSeries(parsedValues, async (parsedValue) => {\n // if we already have a value, skip re-processing it again\n if (purposeMapping.valueMapping[parsedValue] !== undefined) {\n return;\n }\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${parsedValue}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === parsedValue),\n },\n ]);\n purposeMapping.valueMapping[parsedValue] = preferenceValue;\n });\n return;\n }\n\n throw new Error(\n `Unknown preference topic type: ${preferenceTopic.type}`,\n );\n }\n });\n\n currentState.columnToPurposeName[col] = purposeMapping;\n });\n\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import {\n PreferenceQueryResponseItem,\n PreferenceUpdateItem,\n} from '@transcend-io/privacy-types';\nimport * as t from 'io-ts';\n\nexport const PurposeRowMapping = t.type({\n /**\n * The slug or trackingType of the purpose to map to\n *\n * e.g. `Marketing`\n */\n purpose: t.string,\n /**\n * If the column maps to a preference instead of a purpose\n * this is the slug of the purpose.\n *\n * null value indicates that this column maps to the true/false\n * value of the purpose\n */\n preference: t.union([t.string, t.null]),\n /**\n * The mapping between each row value and purpose/preference value.\n *\n * e.g. for a boolean preference or purpose\n * {\n * 'true': true,\n * 'false': false,\n * '': true,\n * }\n *\n * or for a single or multi select preference\n * {\n * '': true,\n * 'value1': 'Value1',\n * 'value2': 'Value2',\n * }\n */\n valueMapping: t.record(\n t.string,\n t.union([t.string, t.boolean, t.null, t.undefined]),\n ),\n});\n\n/** Override type */\nexport type PurposeRowMapping = t.TypeOf<typeof PurposeRowMapping>;\n\n/**\n * Mapping of column name to purpose row mapping.\n * This is used to map each column in the CSV to the relevant purpose and preference definitions in\n * transcend.\n */\nexport const ColumnPurposeMap = t.record(t.string, PurposeRowMapping);\n\n/** Override type */\nexport type ColumnPurposeMap = t.TypeOf<typeof ColumnPurposeMap>;\n\nexport const IdentifierMetadataForPreference = t.type({\n /** The identifier name */\n name: t.string,\n /** Is unique on preference store */\n isUniqueOnPreferenceStore: t.boolean,\n});\n\n/** Override type */\nexport type IdentifierMetadataForPreference = t.TypeOf<\n typeof IdentifierMetadataForPreference\n>;\n\n/**\n * Mapping of identifier name to the column name in the CSV file.\n * This is used to map each identifier name to the column in the CSV file.\n */\nexport const ColumnIdentifierMap = t.record(\n t.string,\n IdentifierMetadataForPreference,\n);\n\n/** Override type */\nexport type ColumnIdentifierMap = t.TypeOf<typeof ColumnIdentifierMap>;\n\nexport const FileMetadataState = t.intersection([\n t.type({\n /**\n * Definition of how to map each column in the CSV to\n * the relevant purpose and preference definitions in transcend\n */\n columnToPurposeName: t.record(t.string, PurposeRowMapping),\n /** Last time the file was last parsed at */\n lastFetchedAt: t.string,\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * These uploads are overwriting non-existent preferences and are safe\n */\n pendingSafeUpdates: t.record(t.string, t.record(t.string, t.string)),\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * these records have conflicts with existing consent preferences\n */\n pendingConflictUpdates: t.record(\n t.string,\n t.type({\n record: PreferenceQueryResponseItem,\n row: t.record(t.string, t.string),\n }),\n ),\n /**\n * Mapping of userId to the rows in the file that can be skipped because\n * their preferences are already in the store\n */\n skippedUpdates: t.record(t.string, t.record(t.string, t.string)),\n }),\n t.partial({\n /** Determine which column name in file maps to consent record identifier to upload on */\n identifierColumn: t.string,\n /** Determine which column name in file maps to the timestamp */\n timestampColum: t.string,\n }),\n]);\n\n/** Override type */\nexport type FileMetadataState = t.TypeOf<typeof FileMetadataState>;\n\n/**\n * This is the type of the receipts that are stored in the file\n * that is used to track the state of the upload process.\n * It is used to resume the upload process from where it left off.\n * It is used to persist the state of the upload process across multiple runs.\n */\nexport const PreferenceUpdateMap = t.record(\n t.string,\n // This can either be true to indicate the record is pending\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, PreferenceUpdateItem]),\n);\n\n/** Override type */\nexport type PreferenceUpdateMap = t.TypeOf<typeof PreferenceUpdateMap>;\n\n/**\n * This is the type of the pending updates that are safe to run without\n * conflicts with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is safe to upload.\n */\nexport const PendingSafePreferenceUpdates = t.record(\n t.string,\n // This can either be true to indicate the record is safe\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, t.record(t.string, t.string)]),\n);\n\n/** Override type */\nexport type PendingSafePreferenceUpdates = t.TypeOf<\n typeof PendingSafePreferenceUpdates\n>;\n\n/**\n * These are the updates that failed to be uploaded to the API.\n */\nexport const FailingPreferenceUpdates = t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n);\n\n/** Override type */\nexport type FailingPreferenceUpdates = t.TypeOf<\n typeof FailingPreferenceUpdates\n>;\n\n/**\n * This is the type of the pending updates that are in conflict with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is pending upload.\n */\nexport const PendingWithConflictPreferenceUpdates = t.record(\n t.string,\n // We always return the conflicts for investigation\n t.type({\n /** Record to be inserted to transcend v1/preferences API */\n record: PreferenceQueryResponseItem,\n /** The row in the file that is pending upload */\n row: t.record(t.string, t.string),\n }),\n);\n\n/** Override type */\nexport type PendingWithConflictPreferenceUpdates = t.TypeOf<\n typeof PendingWithConflictPreferenceUpdates\n>;\n\n/**\n * The set of preference updates that are skipped\n * Key is primaryKey and value is the row in the CSV\n * that is skipped.\n *\n * This is usually because the preferences are already in the store\n * or there are duplicate rows in the CSV file that are identical.\n */\nexport const SkippedPreferenceUpdates = t.record(\n t.string,\n t.record(t.string, t.string),\n);\n\n/** Override type */\nexport type SkippedPreferenceUpdates = t.TypeOf<\n typeof SkippedPreferenceUpdates\n>;\n\n/** Persist this data between runs of the script */\nexport const PreferenceState = t.type({\n /**\n * Store a cache of previous files read in\n */\n fileMetadata: t.record(t.string, FileMetadataState),\n /**\n * The set of successful uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n failingUpdates: t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n ),\n /**\n * The set of pending uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n pendingUpdates: t.record(t.string, PreferenceUpdateItem),\n});\n\n/** Override type */\nexport type PreferenceState = t.TypeOf<typeof PreferenceState>;\n"]}
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');var _crypto = require('crypto'); var E = _interopRequireWildcard(_crypto);var _jsonwebtoken = require('jsonwebtoken'); var $ = _interopRequireWildcard(_jsonwebtoken);function A(c,p,t){let n=Buffer.from(t,"base64"),u=Buffer.from(p,"base64"),f="id-aes256-wrap-pad",l=Buffer.from("A65959A6","hex"),o=E.createCipheriv(f,u,l),s={encryptedIdentifier:Buffer.concat([o.update(c),o.final()]).toString("base64")};return $.sign(s,n,{algorithm:"HS384"})}var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _iots = require('io-ts'); var r = _interopRequireWildcard(_iots);var _bluebird = require('bluebird');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _typeutils = require('@transcend-io/type-utils');var x=/^[0-9][Y|N]([Y|N])[Y|N]$/,I= exports.c =r.record(r.string,r.union([r.boolean,r.literal("Auto")]));async function q({base64EncryptionKey:c,base64SigningKey:p,preferences:t,partition:n,concurrency:u=100,transcendUrl:f=_chunkNI434OIIcjs.s}){let l=_chunk5PTFTN6Jcjs.yc.call(void 0, f),o=t.filter(e=>e.usp&&!x.test(e.usp));if(o.length>0)throw new Error(`Received invalid usp strings: ${JSON.stringify(o,null,2)}`);let d=t.map((e,g)=>[e,g]).filter(([e])=>{if(!e.purposes)return!1;try{return _typeutils.decodeCodec.call(void 0, I,e.purposes),!1}catch (e2){return!0}});if(d.length>0)throw new Error(`Received invalid purpose maps: ${JSON.stringify(d,null,2)}`);let s=t.filter(e=>!e.usp&&!e.purposes);if(s.length>0)throw new Error(`Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(s,null,2)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${t.length} user preferences to partition ${n}`));let S=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),v=0;m.start(t.length,0),await _bluebird.map.call(void 0, t,async({userId:e,confirmed:g="true",updated:C,prompted:P,purposes:N,...i})=>{let O=A(e,c,p),[,_]=i.usp?x.exec(i.usp)||[]:[],j={token:O,partition:n,consent:{confirmed:g==="true",purposes:N?_typeutils.decodeCodec.call(void 0, I,N):i.usp?{SaleOfInfo:_==="Y"}:{},...C?{updated:C==="true"}:{},...P?{prompted:P==="true"}:{},...i}};try{await l.post("sync",{json:j}).json()}catch(y){try{let h=JSON.parse(_optionalChain([y, 'optionalAccess', _2 => _2.response, 'optionalAccess', _3 => _3.body])||"{}");h.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${h.error}`))}catch (e3){}throw new Error(`Received an error from server: ${_optionalChain([y, 'optionalAccess', _4 => _4.response, 'optionalAccess', _5 => _5.body])||_optionalChain([y, 'optionalAccess', _6 => _6.message])}`)}v+=1,m.update(v)},{concurrency:u}),m.stop();let R=new Date().getTime()-S;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${t.length} user preferences to partition ${n} in "${R/1e3}" seconds!`))}exports.a = A; exports.b = x; exports.c = I; exports.d = q;
2
- //# sourceMappingURL=chunk-F3BRFYKD.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkGC3HFDP4cjs = require('./chunk-GC3HFDP4.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunk6WIX75ZFcjs = require('./chunk-6WIX75ZF.cjs');var _crypto = require('crypto'); var E = _interopRequireWildcard(_crypto);var _jsonwebtoken = require('jsonwebtoken'); var $ = _interopRequireWildcard(_jsonwebtoken);function A(c,p,t){let n=Buffer.from(t,"base64"),u=Buffer.from(p,"base64"),f="id-aes256-wrap-pad",l=Buffer.from("A65959A6","hex"),o=E.createCipheriv(f,u,l),s={encryptedIdentifier:Buffer.concat([o.update(c),o.final()]).toString("base64")};return $.sign(s,n,{algorithm:"HS384"})}var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _iots = require('io-ts'); var r = _interopRequireWildcard(_iots);var _bluebird = require('bluebird');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _typeutils = require('@transcend-io/type-utils');var x=/^[0-9][Y|N]([Y|N])[Y|N]$/,I= exports.c =r.record(r.string,r.union([r.boolean,r.literal("Auto")]));async function q({base64EncryptionKey:c,base64SigningKey:p,preferences:t,partition:n,concurrency:u=100,transcendUrl:f=_chunk6WIX75ZFcjs.s}){let l=_chunkGC3HFDP4cjs.yc.call(void 0, f),o=t.filter(e=>e.usp&&!x.test(e.usp));if(o.length>0)throw new Error(`Received invalid usp strings: ${JSON.stringify(o,null,2)}`);let d=t.map((e,g)=>[e,g]).filter(([e])=>{if(!e.purposes)return!1;try{return _typeutils.decodeCodec.call(void 0, I,e.purposes),!1}catch (e2){return!0}});if(d.length>0)throw new Error(`Received invalid purpose maps: ${JSON.stringify(d,null,2)}`);let s=t.filter(e=>!e.usp&&!e.purposes);if(s.length>0)throw new Error(`Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(s,null,2)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${t.length} user preferences to partition ${n}`));let S=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),v=0;m.start(t.length,0),await _bluebird.map.call(void 0, t,async({userId:e,confirmed:g="true",updated:C,prompted:P,purposes:N,...i})=>{let O=A(e,c,p),[,_]=i.usp?x.exec(i.usp)||[]:[],j={token:O,partition:n,consent:{confirmed:g==="true",purposes:N?_typeutils.decodeCodec.call(void 0, I,N):i.usp?{SaleOfInfo:_==="Y"}:{},...C?{updated:C==="true"}:{},...P?{prompted:P==="true"}:{},...i}};try{await l.post("sync",{json:j}).json()}catch(y){try{let h=JSON.parse(_optionalChain([y, 'optionalAccess', _2 => _2.response, 'optionalAccess', _3 => _3.body])||"{}");h.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${h.error}`))}catch (e3){}throw new Error(`Received an error from server: ${_optionalChain([y, 'optionalAccess', _4 => _4.response, 'optionalAccess', _5 => _5.body])||_optionalChain([y, 'optionalAccess', _6 => _6.message])}`)}v+=1,m.update(v)},{concurrency:u}),m.stop();let R=new Date().getTime()-S;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${t.length} user preferences to partition ${n} in "${R/1e3}" seconds!`))}exports.a = A; exports.b = x; exports.c = I; exports.d = q;
2
+ //# sourceMappingURL=chunk-M2ARSCLF.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-F3BRFYKD.cjs","../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/uploadConsents.ts"],"names":["createConsentToken","userId","base64EncryptionKey","base64SigningKey","signingKey","encryptionKey","encryptionAlgorithm","iv","cipher","jwtPayload","USP_STRING_REGEX","PurposeMap","uploadConsents","preferences","partition","concurrency","transcendUrl","DEFAULT_TRANSCEND_CONSENT_API","transcendConsentApi","createTranscendConsentGotInstance","invalidUspStrings","pref"],"mappings":"AAAA,u/BAAyC,wDAAoC,wDAAyC,0ECA9F,4FACH,SAWLA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACQ,CAER,IAAMC,CAAAA,CAAa,MAAA,CAAO,IAAA,CAAKD,CAAAA,CAAkB,QAAQ,CAAA,CACnDE,CAAAA,CAAgB,MAAA,CAAO,IAAA,CAAKH,CAAAA,CAAqB,QAAQ,CAAA,CAGzDI,CAAAA,CAAsB,oBAAA,CAEtBC,CAAAA,CAAK,MAAA,CAAO,IAAA,CAAK,UAAA,CAAY,KAAK,CAAA,CAElCC,CAAAA,CAAgB,CAAA,CAAA,cAAA,CAAeF,CAAAA,CAAqBD,CAAAA,CAAeE,CAAE,CAAA,CAYrEE,CAAAA,CAAa,CACjB,mBAAA,CAV0B,MAAA,CAAO,MAAA,CAAO,CACxCD,CAAAA,CAAO,MAAA,CAAOP,CAAM,CAAA,CACpBO,CAAAA,CAAO,KAAA,CAAM,CACf,CAAC,CAAA,CAAE,QAAA,CAAS,QAAQ,CAQpB,CAAA,CAOA,OAJyB,CAAA,CAAA,IAAA,CAAKC,CAAAA,CAAYL,CAAAA,CAAY,CACpD,SAAA,CAAW,OACb,CAAC,CAGH,CC/CA,gFAAmB,qEACA,oCAEC,qGAGI,qDACI,IAIfM,CAAAA,CAAmB,0BAAA,CAEnBC,CAAAA,aAAe,CAAA,CAAA,MAAA,CACxB,CAAA,CAAA,MAAA,CACA,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAC,CAAC,CACxC,CAAA,CAOA,MAAA,SAAsBC,CAAAA,CAAe,CACnC,mBAAA,CAAAV,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,WAAA,CAAAU,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAakB,CAEhB,IAAMC,CAAAA,CAAsBC,kCAAAA,CAA8C,CAAA,CAGpEC,CAAAA,CAAoBP,CAAAA,CAAY,MAAA,CACnCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,EAAO,CAACX,CAAAA,CAAiB,IAAA,CAAKW,CAAAA,CAAK,GAAG,CACvD,CAAA,CACA,EAAA,CAAID,CAAAA,CAAkB,MAAA,CAAS,CAAA,CAC7B,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiC,IAAA,CAAK,SAAA,CACpCA,CAAAA,CACA,IAAA,CACA,CACF,CAAC,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-F3BRFYKD.cjs","sourcesContent":[null,"import * as crypto from 'crypto';\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([\n cipher.update(userId),\n cipher.final(),\n ]).toString('base64');\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import { createTranscendConsentGotInstance } from '../graphql';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants';\nimport { map } from 'bluebird';\nimport { createConsentToken } from './createConsentToken';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport type { ConsentPreferenceUpload } from './types';\nimport { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(\n t.string,\n t.union([t.boolean, t.literal('Auto')]),\n);\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(\n `Received invalid usp strings: ${JSON.stringify(\n invalidUspStrings,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(\n invalidPurposeMaps,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter(\n (pref) => !pref.usp && !pref.purposes,\n );\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${preferences.length} user preferences to partition ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({\n userId,\n confirmed = 'true',\n updated,\n prompted,\n purposes,\n ...consent\n }) => {\n const token = createConsentToken(\n userId,\n base64EncryptionKey,\n base64SigningKey,\n );\n\n // parse usp string\n const [, saleStatus] = consent.usp\n ? USP_STRING_REGEX.exec(consent.usp) || []\n : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n throw new Error(\n `Received an error from server: ${\n err?.response?.body || err?.message\n }`,\n );\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${\n preferences.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-M2ARSCLF.cjs","../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/uploadConsents.ts"],"names":["createConsentToken","userId","base64EncryptionKey","base64SigningKey","signingKey","encryptionKey","encryptionAlgorithm","iv","cipher","jwtPayload","USP_STRING_REGEX","PurposeMap","uploadConsents","preferences","partition","concurrency","transcendUrl","DEFAULT_TRANSCEND_CONSENT_API","transcendConsentApi","createTranscendConsentGotInstance","invalidUspStrings","pref"],"mappings":"AAAA,u/BAAyC,wDAAoC,wDAAyC,0ECA9F,4FACH,SAWLA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACQ,CAER,IAAMC,CAAAA,CAAa,MAAA,CAAO,IAAA,CAAKD,CAAAA,CAAkB,QAAQ,CAAA,CACnDE,CAAAA,CAAgB,MAAA,CAAO,IAAA,CAAKH,CAAAA,CAAqB,QAAQ,CAAA,CAGzDI,CAAAA,CAAsB,oBAAA,CAEtBC,CAAAA,CAAK,MAAA,CAAO,IAAA,CAAK,UAAA,CAAY,KAAK,CAAA,CAElCC,CAAAA,CAAgB,CAAA,CAAA,cAAA,CAAeF,CAAAA,CAAqBD,CAAAA,CAAeE,CAAE,CAAA,CAYrEE,CAAAA,CAAa,CACjB,mBAAA,CAV0B,MAAA,CAAO,MAAA,CAAO,CACxCD,CAAAA,CAAO,MAAA,CAAOP,CAAM,CAAA,CACpBO,CAAAA,CAAO,KAAA,CAAM,CACf,CAAC,CAAA,CAAE,QAAA,CAAS,QAAQ,CAQpB,CAAA,CAOA,OAJyB,CAAA,CAAA,IAAA,CAAKC,CAAAA,CAAYL,CAAAA,CAAY,CACpD,SAAA,CAAW,OACb,CAAC,CAGH,CC/CA,gFAAmB,qEACA,oCAEC,qGAGI,qDACI,IAIfM,CAAAA,CAAmB,0BAAA,CAEnBC,CAAAA,aAAe,CAAA,CAAA,MAAA,CACxB,CAAA,CAAA,MAAA,CACA,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAC,CAAC,CACxC,CAAA,CAOA,MAAA,SAAsBC,CAAAA,CAAe,CACnC,mBAAA,CAAAV,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,WAAA,CAAAU,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAakB,CAEhB,IAAMC,CAAAA,CAAsBC,kCAAAA,CAA8C,CAAA,CAGpEC,CAAAA,CAAoBP,CAAAA,CAAY,MAAA,CACnCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,EAAO,CAACX,CAAAA,CAAiB,IAAA,CAAKW,CAAAA,CAAK,GAAG,CACvD,CAAA,CACA,EAAA,CAAID,CAAAA,CAAkB,MAAA,CAAS,CAAA,CAC7B,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiC,IAAA,CAAK,SAAA,CACpCA,CAAAA,CACA,IAAA,CACA,CACF,CAAC,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-M2ARSCLF.cjs","sourcesContent":[null,"import * as crypto from 'crypto';\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([\n cipher.update(userId),\n cipher.final(),\n ]).toString('base64');\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import { createTranscendConsentGotInstance } from '../graphql';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants';\nimport { map } from 'bluebird';\nimport { createConsentToken } from './createConsentToken';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport type { ConsentPreferenceUpload } from './types';\nimport { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(\n t.string,\n t.union([t.boolean, t.literal('Auto')]),\n);\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(\n `Received invalid usp strings: ${JSON.stringify(\n invalidUspStrings,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(\n invalidPurposeMaps,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter(\n (pref) => !pref.usp && !pref.purposes,\n );\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${preferences.length} user preferences to partition ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({\n userId,\n confirmed = 'true',\n updated,\n prompted,\n purposes,\n ...consent\n }) => {\n const token = createConsentToken(\n userId,\n base64EncryptionKey,\n base64SigningKey,\n );\n\n // parse usp string\n const [, saleStatus] = consent.usp\n ? USP_STRING_REGEX.exec(consent.usp) || []\n : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n throw new Error(\n `Received an error from server: ${\n err?.response?.body || err?.message\n }`,\n );\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${\n preferences.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"]}
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunk5PTFTN6Jcjs.kg.call(void 0, u,_chunk5PTFTN6Jcjs.d,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunk5PTFTN6Jcjs.kg.call(void 0, u,_graphqlrequest.gql`
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkGC3HFDP4cjs = require('./chunk-GC3HFDP4.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunk6WIX75ZFcjs = require('./chunk-6WIX75ZF.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunkGC3HFDP4cjs.pg.call(void 0, u,_chunkGC3HFDP4cjs.d,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunkGC3HFDP4cjs.pg.call(void 0, u,_graphqlrequest.gql`
2
2
  query TranscendCliSubDataPointCsvExport(
3
3
  $filterBy: SubDataPointFiltersInput
4
4
  $first: Int!
@@ -41,7 +41,7 @@
41
41
  }
42
42
  }
43
43
  }
44
- `,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunkNI434OIIcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunkNI434OIIcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunk5PTFTN6Jcjs.kg.call(void 0, u,_chunk5PTFTN6Jcjs.g,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunkNI434OIIcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunk5PTFTN6Jcjs.kg.call(void 0, u,_chunk5PTFTN6Jcjs.j,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunkNI434OIIcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunkNI434OIIcjs.e.call(void 0, d,"id"),o=_chunkNI434OIIcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunkNI434OIIcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunk5PTFTN6Jcjs.kg.call(void 0, u,_chunk5PTFTN6Jcjs.h,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunk5PTFTN6Jcjs.kg.call(void 0, u,_graphqlrequest.gql`
44
+ `,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunk6WIX75ZFcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunk6WIX75ZFcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunkGC3HFDP4cjs.pg.call(void 0, u,_chunkGC3HFDP4cjs.g,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunk6WIX75ZFcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunkGC3HFDP4cjs.pg.call(void 0, u,_chunkGC3HFDP4cjs.j,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunk6WIX75ZFcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunk6WIX75ZFcjs.e.call(void 0, d,"id"),o=_chunk6WIX75ZFcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunk6WIX75ZFcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunkGC3HFDP4cjs.pg.call(void 0, u,_chunkGC3HFDP4cjs.h,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunkGC3HFDP4cjs.pg.call(void 0, u,_graphqlrequest.gql`
45
45
  query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(
46
46
  $filterBy: UnstructuredSubDataPointRecommendationsFilterInput
47
47
  $first: Int!
@@ -71,5 +71,5 @@
71
71
  }
72
72
  }
73
73
  }
74
- `,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let T=new Date().getTime()-n,C=_chunkNI434OIIcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${T/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
75
- //# sourceMappingURL=chunk-KYVDLURL.cjs.map
74
+ `,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let T=new Date().getTime()-n,C=_chunk6WIX75ZFcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${T/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
75
+ //# sourceMappingURL=chunk-OM2P5WDQ.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-KYVDLURL.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAAqE,wDAAyC,wDAA8D,2DCKrK,qGACiB,iDACJ,gFACD,oCAWO,MAuE1B,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-KYVDLURL.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from 'bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
1
+ {"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-OM2P5WDQ.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAAqE,wDAAyC,wDAA8D,2DCKrK,qGACiB,iDACJ,gFACD,oCAWO,MAuE1B,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-OM2P5WDQ.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from 'bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}