@transcend-io/cli 8.25.3 → 8.25.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +2 -2
- package/dist/{chunk-GLXV5XPP.cjs → chunk-25JGKUUE.cjs} +165 -162
- package/dist/chunk-25JGKUUE.cjs.map +1 -0
- package/dist/{chunk-FIBHN6CY.cjs → chunk-2HKH3MW5.cjs} +21 -21
- package/dist/{chunk-FIBHN6CY.cjs.map → chunk-2HKH3MW5.cjs.map} +1 -1
- package/dist/{chunk-UMAN3VT7.cjs → chunk-6PTR24XG.cjs} +2 -2
- package/dist/{chunk-UMAN3VT7.cjs.map → chunk-6PTR24XG.cjs.map} +1 -1
- package/dist/{chunk-G6QNUBBK.cjs → chunk-7YHV6PUI.cjs} +2 -2
- package/dist/{chunk-G6QNUBBK.cjs.map → chunk-7YHV6PUI.cjs.map} +1 -1
- package/dist/{chunk-C3ZJMA3A.cjs → chunk-AGAP55PJ.cjs} +2 -2
- package/dist/{chunk-C3ZJMA3A.cjs.map → chunk-AGAP55PJ.cjs.map} +1 -1
- package/dist/{chunk-GDPRENOQ.cjs → chunk-GZSDZRAF.cjs} +4 -4
- package/dist/{chunk-GDPRENOQ.cjs.map → chunk-GZSDZRAF.cjs.map} +1 -1
- package/dist/{chunk-WO3KAASS.cjs → chunk-NIACJFII.cjs} +7 -7
- package/dist/{chunk-WO3KAASS.cjs.map → chunk-NIACJFII.cjs.map} +1 -1
- package/dist/{chunk-DV57HBVO.cjs → chunk-V5WGGXWV.cjs} +4 -4
- package/dist/{chunk-DV57HBVO.cjs.map → chunk-V5WGGXWV.cjs.map} +1 -1
- package/dist/{chunk-HJTYLFGJ.cjs → chunk-WF7EGPWL.cjs} +2 -2
- package/dist/{chunk-HJTYLFGJ.cjs.map → chunk-WF7EGPWL.cjs.map} +1 -1
- package/dist/{chunk-EDVVHIFK.cjs → chunk-WV7PEYHE.cjs} +2 -2
- package/dist/{chunk-EDVVHIFK.cjs.map → chunk-WV7PEYHE.cjs.map} +1 -1
- package/dist/{chunk-HPERBM2R.cjs → chunk-YRLFFYWT.cjs} +2 -2
- package/dist/{chunk-HPERBM2R.cjs.map → chunk-YRLFFYWT.cjs.map} +1 -1
- package/dist/{chunk-FGBTRT4J.cjs → chunk-Z42WWJXP.cjs} +2 -2
- package/dist/{chunk-FGBTRT4J.cjs.map → chunk-Z42WWJXP.cjs.map} +1 -1
- package/dist/{chunk-5MIDKBL7.cjs → chunk-ZI4VPQTU.cjs} +2 -2
- package/dist/{chunk-5MIDKBL7.cjs.map → chunk-ZI4VPQTU.cjs.map} +1 -1
- package/dist/{chunk-7YR2TG7U.cjs → chunk-ZY2OEIDM.cjs} +2 -2
- package/dist/{chunk-7YR2TG7U.cjs.map → chunk-ZY2OEIDM.cjs.map} +1 -1
- package/dist/{impl-GRQJMVJE.cjs → impl-2LKYEIZI.cjs} +2 -2
- package/dist/{impl-GRQJMVJE.cjs.map → impl-2LKYEIZI.cjs.map} +1 -1
- package/dist/{impl-PVIVOTA4.cjs → impl-3YE4WAQP.cjs} +2 -2
- package/dist/{impl-PVIVOTA4.cjs.map → impl-3YE4WAQP.cjs.map} +1 -1
- package/dist/{impl-DRHYZQSB.cjs → impl-6QA74NVV.cjs} +2 -2
- package/dist/{impl-DRHYZQSB.cjs.map → impl-6QA74NVV.cjs.map} +1 -1
- package/dist/{impl-PAKRFLP6.cjs → impl-6WJQPEMB.cjs} +2 -2
- package/dist/{impl-PAKRFLP6.cjs.map → impl-6WJQPEMB.cjs.map} +1 -1
- package/dist/{impl-KJQDZ5CD.cjs → impl-7I7WLWJQ.cjs} +2 -2
- package/dist/{impl-KJQDZ5CD.cjs.map → impl-7I7WLWJQ.cjs.map} +1 -1
- package/dist/{impl-5Z557CRG.cjs → impl-ACVRWX3I.cjs} +2 -2
- package/dist/{impl-5Z557CRG.cjs.map → impl-ACVRWX3I.cjs.map} +1 -1
- package/dist/{impl-2YWGSU4V.cjs → impl-AUV3SVBV.cjs} +3 -3
- package/dist/{impl-2YWGSU4V.cjs.map → impl-AUV3SVBV.cjs.map} +1 -1
- package/dist/impl-BDFYLKR3.cjs +2 -0
- package/dist/{impl-C3XI35SQ.cjs.map → impl-BDFYLKR3.cjs.map} +1 -1
- package/dist/{impl-2HC6MNNN.cjs → impl-BLJACUHU.cjs} +2 -2
- package/dist/{impl-2HC6MNNN.cjs.map → impl-BLJACUHU.cjs.map} +1 -1
- package/dist/{impl-XCXP4S3J.cjs → impl-BSAQKPCH.cjs} +2 -2
- package/dist/{impl-XCXP4S3J.cjs.map → impl-BSAQKPCH.cjs.map} +1 -1
- package/dist/{impl-XUBKDJNI.cjs → impl-BYTZY3Z6.cjs} +2 -2
- package/dist/{impl-XUBKDJNI.cjs.map → impl-BYTZY3Z6.cjs.map} +1 -1
- package/dist/{impl-JTLAUUYX.cjs → impl-CRNRICMD.cjs} +2 -2
- package/dist/{impl-JTLAUUYX.cjs.map → impl-CRNRICMD.cjs.map} +1 -1
- package/dist/{impl-IQVMZJD7.cjs → impl-DHRIGZHG.cjs} +2 -2
- package/dist/{impl-IQVMZJD7.cjs.map → impl-DHRIGZHG.cjs.map} +1 -1
- package/dist/{impl-M6JBOLVE.cjs → impl-EQTULPSC.cjs} +2 -2
- package/dist/{impl-M6JBOLVE.cjs.map → impl-EQTULPSC.cjs.map} +1 -1
- package/dist/{impl-5H7LSGMO.cjs → impl-FPAX7DCW.cjs} +2 -2
- package/dist/{impl-5H7LSGMO.cjs.map → impl-FPAX7DCW.cjs.map} +1 -1
- package/dist/{impl-LVGKFMBD.cjs → impl-GW25TQMJ.cjs} +2 -2
- package/dist/{impl-LVGKFMBD.cjs.map → impl-GW25TQMJ.cjs.map} +1 -1
- package/dist/{impl-QTTAMUHI.cjs → impl-H4RE4D4S.cjs} +2 -2
- package/dist/{impl-QTTAMUHI.cjs.map → impl-H4RE4D4S.cjs.map} +1 -1
- package/dist/impl-I4OB5JJW.cjs +2 -0
- package/dist/{impl-O742JPYW.cjs.map → impl-I4OB5JJW.cjs.map} +1 -1
- package/dist/{impl-X6RMXDJS.cjs → impl-INNQGTTN.cjs} +2 -2
- package/dist/{impl-X6RMXDJS.cjs.map → impl-INNQGTTN.cjs.map} +1 -1
- package/dist/{impl-52NIQWXG.cjs → impl-JAZLFVKB.cjs} +2 -2
- package/dist/{impl-52NIQWXG.cjs.map → impl-JAZLFVKB.cjs.map} +1 -1
- package/dist/{impl-6BPL3Z3G.cjs → impl-JKCLI6YJ.cjs} +4 -4
- package/dist/{impl-6BPL3Z3G.cjs.map → impl-JKCLI6YJ.cjs.map} +1 -1
- package/dist/{impl-VINV2XOZ.cjs → impl-JL7GLOZF.cjs} +2 -2
- package/dist/{impl-VINV2XOZ.cjs.map → impl-JL7GLOZF.cjs.map} +1 -1
- package/dist/{impl-4CRBCTLC.cjs → impl-KXF7BCHQ.cjs} +2 -2
- package/dist/{impl-4CRBCTLC.cjs.map → impl-KXF7BCHQ.cjs.map} +1 -1
- package/dist/{impl-FJO7BPTB.cjs → impl-LPFK3Y6E.cjs} +2 -2
- package/dist/{impl-FJO7BPTB.cjs.map → impl-LPFK3Y6E.cjs.map} +1 -1
- package/dist/{impl-4MW5YXMP.cjs → impl-LWCLFXJE.cjs} +2 -2
- package/dist/{impl-4MW5YXMP.cjs.map → impl-LWCLFXJE.cjs.map} +1 -1
- package/dist/{impl-BPFDJMJY.cjs → impl-MLUSR6C2.cjs} +2 -2
- package/dist/{impl-BPFDJMJY.cjs.map → impl-MLUSR6C2.cjs.map} +1 -1
- package/dist/{impl-I6NMF5QP.cjs → impl-MOZKJUJD.cjs} +5 -5
- package/dist/{impl-I6NMF5QP.cjs.map → impl-MOZKJUJD.cjs.map} +1 -1
- package/dist/{impl-UDUTUFUT.cjs → impl-N5DU3D2L.cjs} +2 -2
- package/dist/{impl-UDUTUFUT.cjs.map → impl-N5DU3D2L.cjs.map} +1 -1
- package/dist/{impl-5FBCF5HY.cjs → impl-ND36KD2S.cjs} +2 -2
- package/dist/{impl-5FBCF5HY.cjs.map → impl-ND36KD2S.cjs.map} +1 -1
- package/dist/{impl-YM2OKSJ4.cjs → impl-OB57ILTQ.cjs} +2 -2
- package/dist/{impl-YM2OKSJ4.cjs.map → impl-OB57ILTQ.cjs.map} +1 -1
- package/dist/{impl-RVJLQVKI.cjs → impl-OZSXSSM2.cjs} +2 -2
- package/dist/{impl-RVJLQVKI.cjs.map → impl-OZSXSSM2.cjs.map} +1 -1
- package/dist/{impl-IXVIQGXK.cjs → impl-PTM53QZ4.cjs} +2 -2
- package/dist/{impl-IXVIQGXK.cjs.map → impl-PTM53QZ4.cjs.map} +1 -1
- package/dist/{impl-22JGFXXK.cjs → impl-S2ZHNQPA.cjs} +2 -2
- package/dist/{impl-22JGFXXK.cjs.map → impl-S2ZHNQPA.cjs.map} +1 -1
- package/dist/{impl-MSDEMRBZ.cjs → impl-U36BPXJJ.cjs} +2 -2
- package/dist/{impl-MSDEMRBZ.cjs.map → impl-U36BPXJJ.cjs.map} +1 -1
- package/dist/{impl-DVNUPZK7.cjs → impl-U52ECRAI.cjs} +2 -2
- package/dist/{impl-DVNUPZK7.cjs.map → impl-U52ECRAI.cjs.map} +1 -1
- package/dist/{impl-XWVPG2AO.cjs → impl-UFMS6SI7.cjs} +2 -2
- package/dist/{impl-XWVPG2AO.cjs.map → impl-UFMS6SI7.cjs.map} +1 -1
- package/dist/{impl-QYNAMOYS.cjs → impl-UFPWZRKN.cjs} +2 -2
- package/dist/{impl-QYNAMOYS.cjs.map → impl-UFPWZRKN.cjs.map} +1 -1
- package/dist/{impl-BA6B2C6M.cjs → impl-UXAQZEE7.cjs} +2 -2
- package/dist/{impl-BA6B2C6M.cjs.map → impl-UXAQZEE7.cjs.map} +1 -1
- package/dist/{impl-T7EXOBCA.cjs → impl-XJCUQGWV.cjs} +3 -3
- package/dist/{impl-T7EXOBCA.cjs.map → impl-XJCUQGWV.cjs.map} +1 -1
- package/dist/{impl-BGBHVFXR.cjs → impl-YRWRS6KV.cjs} +2 -2
- package/dist/{impl-BGBHVFXR.cjs.map → impl-YRWRS6KV.cjs.map} +1 -1
- package/dist/{impl-M7XLIXX7.cjs → impl-ZJ7OGSS6.cjs} +2 -2
- package/dist/{impl-M7XLIXX7.cjs.map → impl-ZJ7OGSS6.cjs.map} +1 -1
- package/dist/index.cjs +3 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +20 -4
- package/package.json +2 -1
- package/dist/chunk-GLXV5XPP.cjs.map +0 -1
- package/dist/impl-C3XI35SQ.cjs +0 -2
- package/dist/impl-O742JPYW.cjs +0 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-UMAN3VT7.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/integrations/kotlin.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAM,CAAA,CACnB,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;AC3BD","file":"/home/runner/work/cli/cli/dist/chunk-UMAN3VT7.cjs","sourcesContent":[null,"import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'node:path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'node:path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { dirname } from 'node:path';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\n\n/**\n * Kotlin DSL (build.gradle.kts) dependency & plugin parsing\n */\n\nconst KTS_DEP_CONFIGS =\n // eslint-disable-next-line max-len\n '(implementation|api|kapt|ksp|debugImplementation|releaseImplementation|androidTestImplementation|testImplementation|compileOnly|runtimeOnly)';\n\n// e.g. implementation(\"com.google.firebase:firebase-analytics:18.0.0\")\nconst KTS_DEP_STRING_COORDS_REGEX = new RegExp(\n `${KTS_DEP_CONFIGS}\\\\s*\\\\(\\\\s*[\"']([^\"':\\\\s]+):([^\"':\\\\s]+):?([^\"']*)[\"']\\\\s*\\\\)`,\n 'g',\n);\n// captures: [1]=config, [2]=group, [3]=artifact, [4]=version (may be '')\n\n// e.g. implementation(platform(\"com.google.firebase:firebase-bom:33.1.2\"))\nconst KTS_DEP_PLATFORM_REGEX = new RegExp(\n `${KTS_DEP_CONFIGS}\\\\s*\\\\(\\\\s*platform\\\\(\\\\s*[\"']([^\"':\\\\s]+):([^\"':\\\\s]+):?([^\"']*)[\"']\\\\s*\\\\)\\\\s*\\\\)`,\n 'g',\n);\n\n// e.g. implementation(libs.androidx.appcompat) / implementation(libs[\"androidx-core-ktx\"])\nconst KTS_DEP_LIBS_ALIAS_REGEX = new RegExp(\n `${KTS_DEP_CONFIGS}\\\\s*\\\\(\\\\s*libs(?:\\\\.[\\\\w\\\\-\\\\.]+|\\\\[[\"'][^\"']+[\"']\\\\])\\\\s*\\\\)`,\n 'g',\n);\n\n// Plugins:\n// plugins { id(\"com.google.gms.google-services\") version \"4.4.2\" apply false }\n// plugins { id(\"org.jetbrains.kotlin.android\") }\n// apply(plugin = \"newrelic\")\n// plugins { alias(libs.plugins.kotlin.android) }\nconst KTS_PLUGIN_ID_REGEX =\n /id\\s*\\(\\s*[\"']([^\"']+)[\"']\\s*\\)(?:\\s*version\\s*[\"']([^\"']+)[\"'])?/g;\nconst KTS_PLUGIN_APPLY_REGEX =\n /apply\\s*\\(\\s*plugin\\s*=\\s*[\"']([^\"']+)[\"']\\s*\\)/g;\nconst KTS_PLUGIN_ALIAS_REGEX =\n /plugins\\s*\\{[^}]*alias\\s*\\(\\s*libs(?:\\.plugins)?(?:\\.[\\w\\-.]+|\\[[\"'][^\"']+[\"']\\])\\s*\\)[^}]*\\}/g;\n\n// applicationId in Kotlin DSL:\n// applicationId = \"com.foo.bar\"\n// applicationId(\"com.foo.bar\")\nconst KTS_APPLICATION_ID_EQ_REGEX = /applicationId\\s*=\\s*[\"']([^\"']+)[\"']/g;\nconst KTS_APPLICATION_ID_CALL_REGEX =\n /applicationId\\s*\\(\\s*[\"']([^\"']+)[\"']\\s*\\)/g;\n\n/**\n * Input dep entry (partial)\n */\ntype DepInput = {\n /** Name of the dependency */\n name: string;\n /** Version of the dependency */\n version?: string;\n};\n\n/**\n * Helper to normalize a parsed dep entry\n *\n * @param name - name\n * @param version - version\n * @returns normalized entry\n */\nfunction depEntry(name: string, version?: string): DepInput {\n const v =\n version && version.trim().length > 0 && version !== '_'\n ? version.trim()\n : undefined;\n return { name, version: v };\n}\n\nexport const kotlin: CodeScanningConfig = {\n supportedFiles: ['**/build.gradle.kts', '**/*.gradle.kts'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n // ---------- applicationId ----------\n const appIds = [\n ...findAllWithRegex(\n { value: KTS_APPLICATION_ID_EQ_REGEX, matches: ['name'] },\n fileContents,\n ),\n ...findAllWithRegex(\n { value: KTS_APPLICATION_ID_CALL_REGEX, matches: ['name'] },\n fileContents,\n ),\n ];\n if (appIds.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n const appName = appIds[0]?.name || directory.split('/').pop()!;\n\n // ---------- dependencies ----------\n const deps: Array<DepInput> = [];\n\n // \"group:artifact:version\"\n for (const m of fileContents.matchAll(KTS_DEP_STRING_COORDS_REGEX)) {\n const [, , group, artifact, version] = m;\n deps.push(depEntry(`${group}:${artifact}`, version));\n }\n\n // platform(\"group:artifact:version\")\n for (const m of fileContents.matchAll(KTS_DEP_PLATFORM_REGEX)) {\n const [, , group, artifact, version] = m;\n // Record as regular coord (you may prefer to tag as BoM separately)\n deps.push(depEntry(`${group}:${artifact}`, version));\n }\n\n // libs aliases (version catalogs) — keep alias as name, unknown version\n for (const m of fileContents.matchAll(KTS_DEP_LIBS_ALIAS_REGEX)) {\n // Grab the exact token as name (best-effort)\n const token = m[0]\n .replace(/^[^(]+\\(\\s*/, '')\n .replace(/\\)\\s*$/, '')\n .trim(); // e.g., libs.androidx.appcompat or libs[\"androidx-core-ktx\"]\n deps.push(depEntry(token));\n }\n\n // ---------- plugins ----------\n const plugins: Array<DepInput> = [];\n\n for (const m of fileContents.matchAll(KTS_PLUGIN_ID_REGEX)) {\n const [, pid, pver] = m;\n plugins.push(depEntry(pid, pver));\n }\n\n for (const m of fileContents.matchAll(KTS_PLUGIN_APPLY_REGEX)) {\n const [, pid] = m;\n plugins.push(depEntry(pid));\n }\n\n // alias(libs.plugins...) — keep alias token (no version)\n if (KTS_PLUGIN_ALIAS_REGEX.test(fileContents)) {\n // Collect all alias lines to preserve identifiers; light parse:\n const aliasMatches = fileContents.matchAll(\n /alias\\s*\\(\\s*(libs(?:\\.plugins)?(?:\\.[\\w\\-.]+|\\[[\"'][^\"']+[\"']\\]))\\s*\\)/g,\n );\n for (const m of aliasMatches) {\n plugins.push(depEntry(m[1]));\n }\n }\n\n // ---------- compose final list ----------\n // Merge deps + plugins as \"softwareDevelopmentKits\"\n const softwareDevelopmentKits = [...deps, ...plugins]\n // de-dup by name+version\n .reduce(\n (acc, cur) => {\n const key = `${cur.name}@@${cur.version || ''}`;\n if (!acc.map.has(key)) {\n acc.map.set(key, cur);\n acc.list.push(cur);\n }\n return acc;\n },\n {\n map: new Map<string, DepInput>(),\n list: [] as Array<DepInput>,\n },\n ).list;\n\n return [\n {\n name: appName,\n softwareDevelopmentKits,\n },\n ];\n },\n};\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-6PTR24XG.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/integrations/kotlin.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAM,CAAA,CACnB,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;AC3BD","file":"/home/runner/work/cli/cli/dist/chunk-6PTR24XG.cjs","sourcesContent":[null,"import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'node:path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'node:path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { dirname } from 'node:path';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\n\n/**\n * Kotlin DSL (build.gradle.kts) dependency & plugin parsing\n */\n\nconst KTS_DEP_CONFIGS =\n // eslint-disable-next-line max-len\n '(implementation|api|kapt|ksp|debugImplementation|releaseImplementation|androidTestImplementation|testImplementation|compileOnly|runtimeOnly)';\n\n// e.g. implementation(\"com.google.firebase:firebase-analytics:18.0.0\")\nconst KTS_DEP_STRING_COORDS_REGEX = new RegExp(\n `${KTS_DEP_CONFIGS}\\\\s*\\\\(\\\\s*[\"']([^\"':\\\\s]+):([^\"':\\\\s]+):?([^\"']*)[\"']\\\\s*\\\\)`,\n 'g',\n);\n// captures: [1]=config, [2]=group, [3]=artifact, [4]=version (may be '')\n\n// e.g. implementation(platform(\"com.google.firebase:firebase-bom:33.1.2\"))\nconst KTS_DEP_PLATFORM_REGEX = new RegExp(\n `${KTS_DEP_CONFIGS}\\\\s*\\\\(\\\\s*platform\\\\(\\\\s*[\"']([^\"':\\\\s]+):([^\"':\\\\s]+):?([^\"']*)[\"']\\\\s*\\\\)\\\\s*\\\\)`,\n 'g',\n);\n\n// e.g. implementation(libs.androidx.appcompat) / implementation(libs[\"androidx-core-ktx\"])\nconst KTS_DEP_LIBS_ALIAS_REGEX = new RegExp(\n `${KTS_DEP_CONFIGS}\\\\s*\\\\(\\\\s*libs(?:\\\\.[\\\\w\\\\-\\\\.]+|\\\\[[\"'][^\"']+[\"']\\\\])\\\\s*\\\\)`,\n 'g',\n);\n\n// Plugins:\n// plugins { id(\"com.google.gms.google-services\") version \"4.4.2\" apply false }\n// plugins { id(\"org.jetbrains.kotlin.android\") }\n// apply(plugin = \"newrelic\")\n// plugins { alias(libs.plugins.kotlin.android) }\nconst KTS_PLUGIN_ID_REGEX =\n /id\\s*\\(\\s*[\"']([^\"']+)[\"']\\s*\\)(?:\\s*version\\s*[\"']([^\"']+)[\"'])?/g;\nconst KTS_PLUGIN_APPLY_REGEX =\n /apply\\s*\\(\\s*plugin\\s*=\\s*[\"']([^\"']+)[\"']\\s*\\)/g;\nconst KTS_PLUGIN_ALIAS_REGEX =\n /plugins\\s*\\{[^}]*alias\\s*\\(\\s*libs(?:\\.plugins)?(?:\\.[\\w\\-.]+|\\[[\"'][^\"']+[\"']\\])\\s*\\)[^}]*\\}/g;\n\n// applicationId in Kotlin DSL:\n// applicationId = \"com.foo.bar\"\n// applicationId(\"com.foo.bar\")\nconst KTS_APPLICATION_ID_EQ_REGEX = /applicationId\\s*=\\s*[\"']([^\"']+)[\"']/g;\nconst KTS_APPLICATION_ID_CALL_REGEX =\n /applicationId\\s*\\(\\s*[\"']([^\"']+)[\"']\\s*\\)/g;\n\n/**\n * Input dep entry (partial)\n */\ntype DepInput = {\n /** Name of the dependency */\n name: string;\n /** Version of the dependency */\n version?: string;\n};\n\n/**\n * Helper to normalize a parsed dep entry\n *\n * @param name - name\n * @param version - version\n * @returns normalized entry\n */\nfunction depEntry(name: string, version?: string): DepInput {\n const v =\n version && version.trim().length > 0 && version !== '_'\n ? version.trim()\n : undefined;\n return { name, version: v };\n}\n\nexport const kotlin: CodeScanningConfig = {\n supportedFiles: ['**/build.gradle.kts', '**/*.gradle.kts'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n // ---------- applicationId ----------\n const appIds = [\n ...findAllWithRegex(\n { value: KTS_APPLICATION_ID_EQ_REGEX, matches: ['name'] },\n fileContents,\n ),\n ...findAllWithRegex(\n { value: KTS_APPLICATION_ID_CALL_REGEX, matches: ['name'] },\n fileContents,\n ),\n ];\n if (appIds.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n const appName = appIds[0]?.name || directory.split('/').pop()!;\n\n // ---------- dependencies ----------\n const deps: Array<DepInput> = [];\n\n // \"group:artifact:version\"\n for (const m of fileContents.matchAll(KTS_DEP_STRING_COORDS_REGEX)) {\n const [, , group, artifact, version] = m;\n deps.push(depEntry(`${group}:${artifact}`, version));\n }\n\n // platform(\"group:artifact:version\")\n for (const m of fileContents.matchAll(KTS_DEP_PLATFORM_REGEX)) {\n const [, , group, artifact, version] = m;\n // Record as regular coord (you may prefer to tag as BoM separately)\n deps.push(depEntry(`${group}:${artifact}`, version));\n }\n\n // libs aliases (version catalogs) — keep alias as name, unknown version\n for (const m of fileContents.matchAll(KTS_DEP_LIBS_ALIAS_REGEX)) {\n // Grab the exact token as name (best-effort)\n const token = m[0]\n .replace(/^[^(]+\\(\\s*/, '')\n .replace(/\\)\\s*$/, '')\n .trim(); // e.g., libs.androidx.appcompat or libs[\"androidx-core-ktx\"]\n deps.push(depEntry(token));\n }\n\n // ---------- plugins ----------\n const plugins: Array<DepInput> = [];\n\n for (const m of fileContents.matchAll(KTS_PLUGIN_ID_REGEX)) {\n const [, pid, pver] = m;\n plugins.push(depEntry(pid, pver));\n }\n\n for (const m of fileContents.matchAll(KTS_PLUGIN_APPLY_REGEX)) {\n const [, pid] = m;\n plugins.push(depEntry(pid));\n }\n\n // alias(libs.plugins...) — keep alias token (no version)\n if (KTS_PLUGIN_ALIAS_REGEX.test(fileContents)) {\n // Collect all alias lines to preserve identifiers; light parse:\n const aliasMatches = fileContents.matchAll(\n /alias\\s*\\(\\s*(libs(?:\\.plugins)?(?:\\.[\\w\\-.]+|\\[[\"'][^\"']+[\"']\\]))\\s*\\)/g,\n );\n for (const m of aliasMatches) {\n plugins.push(depEntry(m[1]));\n }\n }\n\n // ---------- compose final list ----------\n // Merge deps + plugins as \"softwareDevelopmentKits\"\n const softwareDevelopmentKits = [...deps, ...plugins]\n // de-dup by name+version\n .reduce(\n (acc, cur) => {\n const key = `${cur.name}@@${cur.version || ''}`;\n if (!acc.map.has(key)) {\n acc.map.set(key, cur);\n acc.list.push(cur);\n }\n return acc;\n },\n {\n map: new Map<string, DepInput>(),\n list: [] as Array<DepInput>,\n },\n ).list;\n\n return [\n {\n name: appName,\n softwareDevelopmentKits,\n },\n ];\n },\n};\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
2
|
-
//# sourceMappingURL=chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk25JGKUUEcjs = require('./chunk-25JGKUUE.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkWF7EGPWLcjs = require('./chunk-WF7EGPWL.cjs');var _crypto = require('crypto'); var E = _interopRequireWildcard(_crypto);var _jsonwebtoken = require('jsonwebtoken'); var $ = _interopRequireWildcard(_jsonwebtoken);function A(c,p,t){let n=Buffer.from(t,"base64"),u=Buffer.from(p,"base64"),f="id-aes256-wrap-pad",l=Buffer.from("A65959A6","hex"),o=E.createCipheriv(f,u,l),s={encryptedIdentifier:Buffer.concat([o.update(c),o.final()]).toString("base64")};return $.sign(s,n,{algorithm:"HS384"})}var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _iots = require('io-ts'); var r = _interopRequireWildcard(_iots);var _bluebird = require('bluebird');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _typeutils = require('@transcend-io/type-utils');var x=/^[0-9][Y|N]([Y|N])[Y|N]$/,I= exports.c =r.record(r.string,r.union([r.boolean,r.literal("Auto")]));async function q({base64EncryptionKey:c,base64SigningKey:p,preferences:t,partition:n,concurrency:u=100,transcendUrl:f=_chunkWF7EGPWLcjs.s}){let l=_chunk25JGKUUEcjs.yc.call(void 0, f),o=t.filter(e=>e.usp&&!x.test(e.usp));if(o.length>0)throw new Error(`Received invalid usp strings: ${JSON.stringify(o,null,2)}`);let d=t.map((e,g)=>[e,g]).filter(([e])=>{if(!e.purposes)return!1;try{return _typeutils.decodeCodec.call(void 0, I,e.purposes),!1}catch (e2){return!0}});if(d.length>0)throw new Error(`Received invalid purpose maps: ${JSON.stringify(d,null,2)}`);let s=t.filter(e=>!e.usp&&!e.purposes);if(s.length>0)throw new Error(`Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(s,null,2)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${t.length} user preferences to partition ${n}`));let S=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),v=0;m.start(t.length,0),await _bluebird.map.call(void 0, t,async({userId:e,confirmed:g="true",updated:C,prompted:P,purposes:N,...i})=>{let O=A(e,c,p),[,_]=i.usp?x.exec(i.usp)||[]:[],j={token:O,partition:n,consent:{confirmed:g==="true",purposes:N?_typeutils.decodeCodec.call(void 0, I,N):i.usp?{SaleOfInfo:_==="Y"}:{},...C?{updated:C==="true"}:{},...P?{prompted:P==="true"}:{},...i}};try{await l.post("sync",{json:j}).json()}catch(y){try{let h=JSON.parse(_optionalChain([y, 'optionalAccess', _2 => _2.response, 'optionalAccess', _3 => _3.body])||"{}");h.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${h.error}`))}catch (e3){}throw new Error(`Received an error from server: ${_optionalChain([y, 'optionalAccess', _4 => _4.response, 'optionalAccess', _5 => _5.body])||_optionalChain([y, 'optionalAccess', _6 => _6.message])}`)}v+=1,m.update(v)},{concurrency:u}),m.stop();let R=new Date().getTime()-S;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${t.length} user preferences to partition ${n} in "${R/1e3}" seconds!`))}exports.a = A; exports.b = x; exports.c = I; exports.d = q;
|
|
2
|
+
//# sourceMappingURL=chunk-7YHV6PUI.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-7YHV6PUI.cjs","../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/uploadConsents.ts"],"names":["createConsentToken","userId","base64EncryptionKey","base64SigningKey","signingKey","encryptionKey","encryptionAlgorithm","iv","cipher","jwtPayload","USP_STRING_REGEX","PurposeMap","uploadConsents","preferences","partition","concurrency","transcendUrl","DEFAULT_TRANSCEND_CONSENT_API","transcendConsentApi","createTranscendConsentGotInstance","invalidUspStrings","pref"],"mappings":"AAAA,u/BAAyC,wDAAoC,wDAAyC,0ECA9F,4FACH,SAWLA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACQ,CAER,IAAMC,CAAAA,CAAa,MAAA,CAAO,IAAA,CAAKD,CAAAA,CAAkB,QAAQ,CAAA,CACnDE,CAAAA,CAAgB,MAAA,CAAO,IAAA,CAAKH,CAAAA,CAAqB,QAAQ,CAAA,CAGzDI,CAAAA,CAAsB,oBAAA,CAEtBC,CAAAA,CAAK,MAAA,CAAO,IAAA,CAAK,UAAA,CAAY,KAAK,CAAA,CAElCC,CAAAA,CAAgB,CAAA,CAAA,cAAA,CAAeF,CAAAA,CAAqBD,CAAAA,CAAeE,CAAE,CAAA,CAYrEE,CAAAA,CAAa,CACjB,mBAAA,CAV0B,MAAA,CAAO,MAAA,CAAO,CACxCD,CAAAA,CAAO,MAAA,CAAOP,CAAM,CAAA,CACpBO,CAAAA,CAAO,KAAA,CAAM,CACf,CAAC,CAAA,CAAE,QAAA,CAAS,QAAQ,CAQpB,CAAA,CAOA,OAJyB,CAAA,CAAA,IAAA,CAAKC,CAAAA,CAAYL,CAAAA,CAAY,CACpD,SAAA,CAAW,OACb,CAAC,CAGH,CC/CA,gFAAmB,qEACA,oCAEC,qGAGI,qDACI,IAIfM,CAAAA,CAAmB,0BAAA,CAEnBC,CAAAA,aAAe,CAAA,CAAA,MAAA,CACxB,CAAA,CAAA,MAAA,CACA,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAC,CAAC,CACxC,CAAA,CAOA,MAAA,SAAsBC,CAAAA,CAAe,CACnC,mBAAA,CAAAV,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,WAAA,CAAAU,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAakB,CAEhB,IAAMC,CAAAA,CAAsBC,kCAAAA,CAA8C,CAAA,CAGpEC,CAAAA,CAAoBP,CAAAA,CAAY,MAAA,CACnCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,EAAO,CAACX,CAAAA,CAAiB,IAAA,CAAKW,CAAAA,CAAK,GAAG,CACvD,CAAA,CACA,EAAA,CAAID,CAAAA,CAAkB,MAAA,CAAS,CAAA,CAC7B,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiC,IAAA,CAAK,SAAA,CACpCA,CAAAA,CACA,IAAA,CACA,CACF,CAAC,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-7YHV6PUI.cjs","sourcesContent":[null,"import * as crypto from 'crypto';\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([\n cipher.update(userId),\n cipher.final(),\n ]).toString('base64');\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import { createTranscendConsentGotInstance } from '../graphql';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants';\nimport { map } from 'bluebird';\nimport { createConsentToken } from './createConsentToken';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport type { ConsentPreferenceUpload } from './types';\nimport { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(\n t.string,\n t.union([t.boolean, t.literal('Auto')]),\n);\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(\n `Received invalid usp strings: ${JSON.stringify(\n invalidUspStrings,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(\n invalidPurposeMaps,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter(\n (pref) => !pref.usp && !pref.purposes,\n );\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${preferences.length} user preferences to partition ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({\n userId,\n confirmed = 'true',\n updated,\n prompted,\n purposes,\n ...consent\n }) => {\n const token = createConsentToken(\n userId,\n base64EncryptionKey,\n base64SigningKey,\n );\n\n // parse usp string\n const [, saleStatus] = consent.usp\n ? USP_STRING_REGEX.exec(consent.usp) || []\n : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n throw new Error(\n `Received an error from server: ${\n err?.response?.body || err?.message\n }`,\n );\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${\n preferences.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkWF7EGPWLcjs = require('./chunk-WF7EGPWL.cjs');var _chunkQ7I37FJVcjs = require('./chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');var _privacytypes = require('@transcend-io/privacy-types');var _ms = require('ms'); var _ms2 = _interopRequireDefault(_ms);function S(e){if(!/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(e))throw new Error(`Invalid UUID format: ${e}`);return e}function n(e){try{return new URL(e).toString().replace(/\/$/,"")}catch (e2){throw new Error(`Invalid URL format: ${e}`)}}function T(e){return e.split(",").map(r=>r.trim()).filter(r=>r.length>0)}function y(e){let r=new Date(e);if(Number.isNaN(r.getTime()))throw new TypeError(`Invalid date: ${e}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`);return r}function k(e){if(typeof e=="number"&&Number.isFinite(e))return Math.round(e*1e3);if(typeof e=="string"){let r=e.trim();if(r==="")throw new Error('Invalid duration. Examples: "45", "2d", "1h", "90 minutes", "10s".');let t=Number(r);if(r!==""&&Number.isFinite(t))return Math.round(t*1e3);let a;try{a=_ms2.default.call(void 0, r)}catch (e3){throw new Error('Invalid duration. Examples: "45", "2d", "1h", "90 minutes", "10s".')}if(typeof a=="number"&&Number.isFinite(a))return a}throw new Error('Invalid duration. Examples: "45", "2d", "1h", "90 minutes", "10s".')}var d=({scopes:e,requiresSiloScope:r=!1})=>{let t={kind:"parsed",parse:String,brief:"The Transcend API key."};return r&&(t.brief+=" This key must be associated with the data silo(s) being operated on."),e==="Varies"?{...t,brief:`${t.brief} The scopes required will vary depending on the operation performed. If in doubt, the ${_privacytypes.TRANSCEND_SCOPES[_privacytypes.ScopeName.FullAdmin].title} scope will always work.`}:e.length===0?{...t,brief:`${t.brief} No scopes are required for this command.`}:{...t,brief:`${t.brief} Requires scopes: ${e.map(a=>`"${_privacytypes.TRANSCEND_SCOPES[a].title}"`).join(", ")}`}},u= exports.f =(e=_chunkWF7EGPWLcjs.r)=>({kind:"parsed",parse:n,brief:"URL of the Transcend backend. Use https://api.us.transcend.io for US hosting",default:e}),x= exports.g =(e=_chunkWF7EGPWLcjs.s)=>({kind:"parsed",parse:n,brief:"URL of the Transcend consent backend. Use https://consent.us.transcend.io for US hosting",default:e}),A= exports.h =()=>({kind:"parsed",parse:String,brief:"The Sombra internal key, use for additional authentication when self-hosting Sombra",optional:!0});var g=["dataSilos","enrichers","templates","apiKeys"],I= exports.j =Object.values(_privacytypes.ConsentTrackerStatus),R= exports.k =_core.buildCommand.call(void 0, {loader:async()=>{let{pull:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-XJCUQGWV.cjs")));return e},parameters:{flags:{auth:d({scopes:"Varies"}),resources:{kind:"enum",values:["all",...Object.values(_chunkQ7I37FJVcjs.d)],brief:`The different resource types to pull in. Defaults to ${g.join(",")}.`,variadic:",",optional:!0},file:{kind:"parsed",parse:String,brief:"Path to the YAML file to pull into",default:"./transcend.yml"},transcendUrl:u(),dataSiloIds:{kind:"parsed",parse:String,variadic:",",brief:"The UUIDs of the data silos that should be pulled into the YAML file",optional:!0},integrationNames:{kind:"parsed",parse:String,variadic:",",brief:"The types of integrations to pull down",optional:!0},trackerStatuses:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),variadic:",",brief:"The statuses of consent manager trackers to pull down. Defaults to all statuses.",optional:!0},pageSize:{kind:"parsed",parse:_core.numberParser,brief:"The page size to use when paginating over the API",default:"50"},skipDatapoints:{kind:"boolean",brief:"When true, skip pulling in datapoints alongside data silo resource",default:!1},skipSubDatapoints:{kind:"boolean",brief:"When true, skip pulling in subDatapoints alongside data silo resource",default:!1},includeGuessedCategories:{kind:"boolean",brief:"When true, included guessed data categories that came from the content classifier",default:!1},debug:{kind:"boolean",brief:"Set to true to include debug logs while pulling the configuration",default:!1}}},docs:{brief:"Pull metadata from Transcend into transcend.yml",fullDescription:`Generates a transcend.yml by pulling the configuration from your Transcend instance.
|
|
2
2
|
|
|
3
3
|
The API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).
|
|
4
4
|
|
|
@@ -6,4 +6,4 @@ This command can be helpful if you are looking to:
|
|
|
6
6
|
|
|
7
7
|
- Copy your data into another instance
|
|
8
8
|
- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`}});exports.a = S; exports.b = T; exports.c = y; exports.d = k; exports.e = d; exports.f = u; exports.g = x; exports.h = A; exports.i = g; exports.j = I; exports.k = R;
|
|
9
|
-
//# sourceMappingURL=chunk-
|
|
9
|
+
//# sourceMappingURL=chunk-AGAP55PJ.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-AGAP55PJ.cjs","../src/commands/inventory/pull/command.ts","../src/lib/cli/parsers.ts"],"names":["uuidParser","input"],"mappings":"AAAA,mfAA0C,wDAAyC,qCCAxC,2DACN,gECDiB,SAStCA,CAAAA,CAAWC,CAAAA,CAAuB,CAGhD,EAAA,CAAI,CADF,4EAAA,CACa,IAAA,CAAKA,CAAK,CAAA,CACvB,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwBA,CAAK,CAAA,CAAA;ADwF5B;AAAA;AAAA;AAAA;AAAA;AAAA;AASpB,qGAAA","file":"/home/runner/work/cli/cli/dist/chunk-AGAP55PJ.cjs","sourcesContent":[null,"import { buildCommand, numberParser } from '@stricli/core';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport {\n createAuthParameter,\n createTranscendUrlParameter,\n} from '../../../lib/cli/common-parameters';\nimport { TranscendPullResource } from '../../../enums';\n\nexport const DEFAULT_TRANSCEND_PULL_RESOURCES = [\n TranscendPullResource.DataSilos,\n TranscendPullResource.Enrichers,\n TranscendPullResource.Templates,\n TranscendPullResource.ApiKeys,\n];\n\nexport const DEFAULT_CONSENT_TRACKER_STATUSES =\n Object.values(ConsentTrackerStatus);\n\nexport const pullCommand = buildCommand({\n loader: async () => {\n const { pull } = await import('./impl');\n return pull;\n },\n parameters: {\n flags: {\n auth: createAuthParameter({\n scopes: 'Varies',\n }),\n resources: {\n kind: 'enum',\n values: ['all', ...Object.values(TranscendPullResource)],\n brief: `The different resource types to pull in. Defaults to ${DEFAULT_TRANSCEND_PULL_RESOURCES.join(\n ',',\n )}.`,\n variadic: ',',\n optional: true,\n },\n file: {\n kind: 'parsed',\n parse: String,\n brief: 'Path to the YAML file to pull into',\n default: './transcend.yml',\n },\n transcendUrl: createTranscendUrlParameter(),\n dataSiloIds: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief:\n 'The UUIDs of the data silos that should be pulled into the YAML file',\n optional: true,\n },\n integrationNames: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief: 'The types of integrations to pull down',\n optional: true,\n },\n trackerStatuses: {\n kind: 'enum',\n values: Object.values(ConsentTrackerStatus),\n variadic: ',',\n brief:\n 'The statuses of consent manager trackers to pull down. Defaults to all statuses.',\n optional: true,\n },\n pageSize: {\n kind: 'parsed',\n parse: numberParser,\n brief: 'The page size to use when paginating over the API',\n default: '50',\n },\n skipDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in datapoints alongside data silo resource',\n default: false,\n },\n skipSubDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in subDatapoints alongside data silo resource',\n default: false,\n },\n includeGuessedCategories: {\n kind: 'boolean',\n brief:\n 'When true, included guessed data categories that came from the content classifier',\n default: false,\n },\n debug: {\n kind: 'boolean',\n brief:\n 'Set to true to include debug logs while pulling the configuration',\n default: false,\n },\n },\n },\n docs: {\n brief: 'Pull metadata from Transcend into transcend.yml',\n fullDescription: `Generates a transcend.yml by pulling the configuration from your Transcend instance.\n\nThe API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).\n\nThis command can be helpful if you are looking to:\n\n- Copy your data into another instance\n- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`,\n },\n});\n","import ms, { type StringValue as MsStringValue } from 'ms';\n\n/**\n * Validates and returns a UUID string.\n *\n * @param input - The input string to validate as UUID\n * @returns The validated UUID string\n * @throws Error if input is not a valid UUID\n */\nexport function uuidParser(input: string): string {\n const uuidRegex =\n /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;\n if (!uuidRegex.test(input)) {\n throw new Error(`Invalid UUID format: ${input}`);\n }\n return input;\n}\n\n/**\n * Validates and returns a URL string.\n *\n * @param input - The input string to validate as URL\n * @returns The validated URL string\n * @throws Error if input is not a valid URL\n */\nexport function urlParser(input: string): string {\n try {\n const url = new URL(input);\n return url.toString().replace(/\\/$/, '');\n } catch {\n throw new Error(`Invalid URL format: ${input}`);\n }\n}\n\n/**\n * Parse a comma-separated string to array.\n * NOTE: Prefer using `variadic` for list arguments instead of this function. This should only be used for arguments which have a default value.\n *\n * @param input - The comma-separated string to parse\n * @returns Array of trimmed, non-empty strings\n */\nexport function arrayParser(input: string): string[] {\n return input\n .split(',')\n .map((s) => s.trim())\n .filter((s) => s.length > 0);\n}\n\n/**\n * Parse a date string to a Date object.\n *\n * @param input - The date string to parse\n * @returns The parsed Date object\n * @throws TypeError if input is not a valid date\n */\nexport function dateParser(input: string): Date {\n const date = new Date(input);\n if (Number.isNaN(date.getTime())) {\n throw new TypeError(\n `Invalid date: ${input}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`,\n );\n }\n return date;\n}\n\n/**\n * Parse a duration string to milliseconds.\n * Accepts concise/natural-ish strings (powered by `ms`) and returns milliseconds.\n * Examples: \"3600\", \"2d\", \"1h\", \"90 minutes\", \"10s\".\n *\n * @param input - The duration to parse\n * @returns The parsed duration in milliseconds\n * @throws Error if input is not a valid duration\n */\nexport function parseDurationToMs(input: unknown): number {\n if (typeof input === 'number' && Number.isFinite(input)) {\n // backward-compat: numbers => seconds\n return Math.round(input * 1000);\n }\n\n if (typeof input === 'string') {\n const trimmed = input.trim();\n // empty string → our standardized error (avoid ms throwing its own)\n if (trimmed === '') {\n throw new Error(\n 'Invalid duration. Examples: \"45\", \"2d\", \"1h\", \"90 minutes\", \"10s\".',\n );\n }\n\n // bare numeric string => seconds (backward-compat)\n const asNumber = Number(trimmed);\n if (trimmed !== '' && Number.isFinite(asNumber)) {\n return Math.round(asNumber * 1000);\n }\n\n // let ms parse human strings\n let parsed: number | undefined;\n try {\n parsed = ms(trimmed as MsStringValue);\n } catch {\n // normalize ms' error to ours\n throw new Error(\n 'Invalid duration. Examples: \"45\", \"2d\", \"1h\", \"90 minutes\", \"10s\".',\n );\n }\n if (typeof parsed === 'number' && Number.isFinite(parsed)) {\n return parsed;\n }\n }\n\n throw new Error(\n 'Invalid duration. Examples: \"45\", \"2d\", \"1h\", \"90 minutes\", \"10s\".',\n );\n}\n"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk25JGKUUEcjs = require('./chunk-25JGKUUE.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkWF7EGPWLcjs = require('./chunk-WF7EGPWL.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunk25JGKUUEcjs.rg.call(void 0, u,_chunk25JGKUUEcjs.d,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunk25JGKUUEcjs.rg.call(void 0, u,_graphqlrequest.gql`
|
|
2
2
|
query TranscendCliSubDataPointCsvExport(
|
|
3
3
|
$filterBy: SubDataPointFiltersInput
|
|
4
4
|
$first: Int!
|
|
@@ -41,7 +41,7 @@
|
|
|
41
41
|
}
|
|
42
42
|
}
|
|
43
43
|
}
|
|
44
|
-
`,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=
|
|
44
|
+
`,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunkWF7EGPWLcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunkWF7EGPWLcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunk25JGKUUEcjs.rg.call(void 0, u,_chunk25JGKUUEcjs.g,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunkWF7EGPWLcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _bluebird.mapSeries.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunk25JGKUUEcjs.rg.call(void 0, u,_chunk25JGKUUEcjs.j,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunkWF7EGPWLcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunkWF7EGPWLcjs.e.call(void 0, d,"id"),o=_chunkWF7EGPWLcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunkWF7EGPWLcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunk25JGKUUEcjs.rg.call(void 0, u,_chunk25JGKUUEcjs.h,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunk25JGKUUEcjs.rg.call(void 0, u,_graphqlrequest.gql`
|
|
45
45
|
query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(
|
|
46
46
|
$filterBy: UnstructuredSubDataPointRecommendationsFilterInput
|
|
47
47
|
$first: Int!
|
|
@@ -71,5 +71,5 @@
|
|
|
71
71
|
}
|
|
72
72
|
}
|
|
73
73
|
}
|
|
74
|
-
`,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let T=new Date().getTime()-n,C=
|
|
75
|
-
//# sourceMappingURL=chunk-
|
|
74
|
+
`,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let T=new Date().getTime()-n,C=_chunkWF7EGPWLcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${T/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
|
|
75
|
+
//# sourceMappingURL=chunk-GZSDZRAF.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-GDPRENOQ.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAAqE,wDAAyC,wDAA8D,2DCKrK,qGACiB,iDACJ,gFACD,oCAWO,MAuE1B,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-GDPRENOQ.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from 'bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-GZSDZRAF.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAAqE,wDAAyC,wDAA8D,2DCKrK,qGACiB,iDACJ,gFACD,oCAWO,MAuE1B,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-GZSDZRAF.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from 'bluebird';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
2
|
-
`)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The timestamp column "${a.timestampColum}" is present for all row`))}return a}async function ge(f,a){let y=
|
|
3
|
-
`)}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(r)),!await
|
|
4
|
-
When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(C&&we({currentConsentRecord:C,pendingUpdates:
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk25JGKUUEcjs = require('./chunk-25JGKUUE.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkWF7EGPWLcjs = require('./chunk-WF7EGPWL.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _persistedstate = require('@transcend-io/persisted-state');var _iots = require('io-ts'); var j = _interopRequireWildcard(_iots); var b = _interopRequireWildcard(_iots); var e = _interopRequireWildcard(_iots);var _typeutils = require('@transcend-io/type-utils');var Oe=["ENOTFOUND","ECONNRESET","ETIMEDOUT","504 Gateway Time-out","Task timed out after"];async function O(f,{maxAttempts:a=3,baseDelayMs:y=250,isRetryable:m=(l,s)=>Oe.some(r=>s.includes(r)),onRetry:c}={}){let l=0;for(;;){l+=1;try{return await f()}catch(s){let r=_nullishCoalesce((s&&(_optionalChain([s, 'access', _2 => _2.response, 'optionalAccess', _3 => _3.body])||s.message)), () => (String(_nullishCoalesce(s, () => ("Unknown error")))));if(!(l<a&&m(s,r)))throw new Error(`Preference query failed after ${l} attempt(s): ${r}`);_optionalChain([c, 'optionalCall', _4 => _4(l,s,r)]);let o=y*2**(l-1),n=Math.floor(Math.random()*y),t=o+n;_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[retry] attempt ${l}/${a-1}; backing off ${t}ms: ${r}`)),await _chunk25JGKUUEcjs.Xf.call(void 0, t)}}}var _privacytypes = require('@transcend-io/privacy-types');var v=b.intersection([b.type({nodes:b.array(_privacytypes.PreferenceQueryResponseItem)}),b.partial({cursor:b.string})]);async function me(f,{identifiers:a,partitionKey:y,skipLogging:m=!1,concurrency:c=40}){let l=[],s=_chunkWF7EGPWLcjs.b.call(void 0, a,100),r=new Date().getTime(),i=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);m||i.start(a.length,0);let o=0;await _bluebird.map.call(void 0, s,async p=>{let g=await O(()=>f.post(`v1/preferences/${y}/query`,{json:{filter:{identifiers:p},limit:p.length}}).json(),{onRetry:(w,M,h)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[RETRY] group size=${p.length} partition=${y} attempt=${w}: ${h}`))}}),d=_typeutils.decodeCodec.call(void 0, v,g);l.push(...d.nodes),o+=p.length,i.update(o)},{concurrency:c}),i.stop();let t=new Date().getTime()-r;return m||_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Completed download in "${t/1e3}" seconds.`)),l}function B({row:f,columnToPurposeName:a,purposeSlugs:y,preferenceTopics:m}){let c={};return Object.entries(a).forEach(([l,{purpose:s,preference:r,valueMapping:i}])=>{if(!y.includes(s))throw new Error(`Invalid purpose slug: ${s}, expected: ${y.join(", ")}`);let o=f[l];if(r){let n=m.find(t=>t.slug===r&&t.purpose.trackingType===s);if(!n){let t=m.filter(p=>p.purpose.trackingType===s).map(p=>p.slug);throw new Error(`Invalid preference slug: ${r} for purpose: ${s}. Allowed preference slugs for purpose are: ${t.join(",")}`)}switch(c[s]||(c[s]={preferences:[]}),c[s].preferences||(c[s].preferences=[]),n.type){case _privacytypes.PreferenceTopicType.Boolean:{let t=i[o];if(t===void 0&&o!=="")throw new Error(`No preference mapping found for value "${o}" in column "${l}" (purpose=${s}, preference=${r})`);if(t==null)return;if(typeof t!="boolean")throw new Error(`Invalid value for boolean preference: ${r}, expected boolean, got: ${o}`);c[s].preferences.push({topic:r,choice:{booleanValue:t}});break}case _privacytypes.PreferenceTopicType.Select:{let t=i[o];if(t===void 0&&o!=="")throw new Error(`No preference mapping found for value "${o}" in column "${l}" (purpose=${s}, preference=${r})`);if(t==null)return;if(typeof t!="string")throw new Error(`Invalid value for select preference: ${r}, expected string, got: ${o}`);let p=t.trim()||null;if(p&&!n.preferenceOptionValues.map(({slug:g})=>g).includes(p))throw new Error(`Invalid value for select preference: ${r}, expected one of: ${n.preferenceOptionValues.map(({slug:g})=>g).join(", ")}, got: ${o}`);c[s].preferences.push({topic:r,choice:{selectValue:p}});break}case _privacytypes.PreferenceTopicType.MultiSelect:{if(typeof o!="string")throw new Error(`Invalid value for multi select preference: ${r}, expected string, got: ${o}`);let t=_chunk25JGKUUEcjs.oc.call(void 0, o).map(p=>{let g=i[p];if(g===void 0&&o!=="")throw new Error(`No preference mapping found for multi select token "${o}" in column "${l}" (purpose=${s}, preference=${r})`);if(g==null)return null;if(typeof g!="string")throw new Error(`Invalid value for multi select preference: ${r}, expected one of: ${n.preferenceOptionValues.map(({slug:d})=>d).join(", ")}, got: ${p}`);return g}).filter(p=>p!==null).sort((p,g)=>p.localeCompare(g));t.length>0&&c[s].preferences.push({topic:r,choice:{selectValues:t}});break}default:throw new Error(`Unknown preference type: ${n.type}`)}}else{let n=i[o];if(n===void 0&&o!=="")throw new Error(`No preference mapping found for value "${o}" in column "${l}" (purpose=${s}, preference=\u2205)`);if(n===null)return;c[s]?c[s].enabled=n===!0:c[s]={enabled:n===!0}}}),_typeutils.apply.call(void 0, c,(l,s)=>{if(typeof l.enabled!="boolean")throw new Error(`No mapping provided for purpose.enabled=true/false value: ${s}`);return{...l,enabled:l.enabled}})}var _inquirer = require('inquirer'); var _inquirer2 = _interopRequireDefault(_inquirer);var G="[NONE]";async function de(f,a){let y=_chunkWF7EGPWLcjs.j.call(void 0, f.map(c=>Object.keys(c)).flat()),m=_chunkWF7EGPWLcjs.c.call(void 0, y,[...a.identifierColumn?[a.identifierColumn]:[],...Object.keys(a.columnToPurposeName)]);if(!a.timestampColum){let{timestampName:c}=await _inquirer2.default.prompt([{name:"timestampName",message:"Choose the column that will be used as the timestamp of last preference update",type:"list",default:m.find(l=>l.toLowerCase().includes("date"))||m.find(l=>l.toLowerCase().includes("time"))||m[0],choices:[...m,G]}]);a.timestampColum=c}if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Using timestamp column "${a.timestampColum}"`)),a.timestampColum!==G){let c=f.map((l,s)=>l[a.timestampColum]?null:[s]).filter(l=>!!l).flat();if(c.length>0)throw new Error(`The timestamp column "${a.timestampColum}" is missing a value for the following rows: ${c.join(`
|
|
2
|
+
`)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The timestamp column "${a.timestampColum}" is present for all row`))}return a}async function ge(f,a){let y=_chunkWF7EGPWLcjs.j.call(void 0, f.map(r=>Object.keys(r)).flat()),m=_chunkWF7EGPWLcjs.c.call(void 0, y,[...a.identifierColumn?[a.identifierColumn]:[],...Object.keys(a.columnToPurposeName)]);if(!a.identifierColumn){let{identifierName:r}=await _inquirer2.default.prompt([{name:"identifierName",message:"Choose the column that will be used as the identifier to upload consent preferences by",type:"list",default:m.find(i=>i.toLowerCase().includes("email"))||m[0],choices:m}]);a.identifierColumn=r}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Using identifier column "${a.identifierColumn}"`));let c=f.map((r,i)=>r[a.identifierColumn]?null:[i]).filter(r=>!!r).flat();if(c.length>0){let r=`The identifier column "${a.identifierColumn}" is missing a value for the following rows: ${c.join(", ")}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(r)),!await _chunk25JGKUUEcjs.Rf.call(void 0, {message:"Would you like to skip rows missing an identifier?"}))throw new Error(r);let o=f.length;f=f.filter(n=>n[a.identifierColumn]),_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Skipped ${o-f.length} rows missing an identifier`))}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The identifier column "${a.identifierColumn}" is present for all rows`));let l=_chunkWF7EGPWLcjs.d.call(void 0, f,a.identifierColumn),s=Object.entries(l).filter(([,r])=>r.length>1);if(s.length>0){let r=`The identifier column "${a.identifierColumn}" has duplicate values for the following rows: ${s.slice(0,10).map(([o,n])=>`${o} (${n.length})`).join(`
|
|
3
|
+
`)}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(r)),!await _chunk25JGKUUEcjs.Rf.call(void 0, {message:"Would you like to automatically take the latest update?"}))throw new Error(r);f=Object.entries(l).map(([,o])=>o.sort((t,p)=>new Date(p[a.timestampColum]).getTime()-new Date(t[a.timestampColum]).getTime())[0]).filter(o=>o)}return{currentState:a,preferences:f}}async function ye(f,a,{purposeSlugs:y,preferenceTopics:m,forceTriggerWorkflows:c}){let l=_chunkWF7EGPWLcjs.j.call(void 0, f.map(i=>Object.keys(i)).flat()),s=_chunkWF7EGPWLcjs.c.call(void 0, l,[...a.identifierColumn?[a.identifierColumn]:[],...a.timestampColum?[a.timestampColum]:[]]);if(s.length===0){if(c)return a;throw new Error("No other columns to process")}let r=[...y,...m.map(i=>`${i.purpose.trackingType}->${i.slug}`)];return await _bluebird.mapSeries.call(void 0, s,async i=>{let o=_chunkWF7EGPWLcjs.j.call(void 0, f.map(t=>t[i])),n=a.columnToPurposeName[i];if(n)_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Column "${i}" is associated with purpose "${n.purpose}"`));else{let{purposeName:t}=await _inquirer2.default.prompt([{name:"purposeName",message:`Choose the purpose that column ${i} is associated with`,type:"list",default:r.find(d=>d.startsWith(y[0])),choices:r}]),[p,g]=t.split("->");n={purpose:p,preference:g||null,valueMapping:{}}}await _bluebird.mapSeries.call(void 0, o,async t=>{if(n.valueMapping[t]!==void 0){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Value "${t}" is associated with purpose value "${n.valueMapping[t]}"`));return}if(n.preference===null){let{purposeValue:p}=await _inquirer2.default.prompt([{name:"purposeValue",message:`Choose the purpose value for value "${t}" associated with purpose "${n.purpose}"`,type:"confirm",default:t!=="false"}]);n.valueMapping[t]=p}if(n.preference!==null){let p=m.find(d=>d.slug===n.preference);if(!p){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Preference topic "${n.preference}" not found`));return}let g=p.preferenceOptionValues.map(({slug:d})=>d);if(p.type===_privacytypes.PreferenceTopicType.Boolean){let{preferenceValue:d}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${p.slug}" value "${t}" associated with purpose "${n.purpose}"`,type:"confirm",default:t!=="false"}]);n.valueMapping[t]=d;return}if(p.type===_privacytypes.PreferenceTopicType.Select){let{preferenceValue:d}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${p.slug}" value "${t}" associated with purpose "${n.purpose}"`,type:"list",choices:g,default:g.find(w=>w===t)}]);n.valueMapping[t]=d;return}if(p.type===_privacytypes.PreferenceTopicType.MultiSelect){let d=_chunk25JGKUUEcjs.oc.call(void 0, t);await _bluebird.mapSeries.call(void 0, d,async w=>{if(n.valueMapping[w]!==void 0)return;let{preferenceValue:M}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${p.slug}" value "${w}" associated with purpose "${n.purpose}"`,type:"list",choices:g,default:g.find(h=>h===w)}]);n.valueMapping[w]=M});return}throw new Error(`Unknown preference topic type: ${p.type}`)}}),a.columnToPurposeName[i]=n}),a}function we({currentConsentRecord:f,pendingUpdates:a,preferenceTopics:y}){return Object.entries(a).every(([m,{preferences:c=[],enabled:l}])=>{let s=f.purposes.find(i=>i.purpose===m);return!!s&&s.enabled===l?c.every(({topic:i,choice:o})=>s.preferences&&s.preferences.find(n=>{if(n.topic!==i)return!1;let t=y.find(p=>p.slug===i&&p.purpose.trackingType===m);if(!t)throw new Error(`Could not find preference topic for ${i}`);switch(t.type){case _privacytypes.PreferenceTopicType.Boolean:return n.choice.booleanValue===o.booleanValue;case _privacytypes.PreferenceTopicType.Select:return n.choice.selectValue===o.selectValue;case _privacytypes.PreferenceTopicType.MultiSelect:let p=(n.choice.selectValues||[]).sort(),g=(o.selectValues||[]).sort();return p.length===g.length&&p.every((d,w)=>d===g[w]);default:throw new Error(`Unknown preference topic type: ${t.type}`)}})):!1})}function he({currentConsentRecord:f,pendingUpdates:a,preferenceTopics:y,log:m}){return!!Object.entries(a).find(([c,{preferences:l=[],enabled:s}])=>{let r=f.purposes.find(i=>i.purpose===c);return r?r.enabled!==s?(m&&_chunkZUNVPK23cjs.a.warn(`Purpose ${c} enabled value conflict for user ${f.userId}. Pending Value: ${s}, Current Value: ${r.enabled}`),!0):!!l.find(({topic:i,choice:o})=>{let n=(r.preferences||[]).find(d=>d.topic===i);if(!n)return m&&_chunkZUNVPK23cjs.a.warn(`No existing preference found for topic ${i} in purpose ${c} for user ${f.userId}.`),!1;let t=y.find(d=>d.slug===i&&d.purpose.trackingType===c);if(!t)throw new Error(`Could not find preference topic for ${i}`);let p,g;switch(t.type){case _privacytypes.PreferenceTopicType.Boolean:return p=n.choice.booleanValue!==o.booleanValue,m&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} boolean value conflict for user ${f.userId}. Expected: ${o.booleanValue}, Found: ${n.choice.booleanValue}`),p;case _privacytypes.PreferenceTopicType.Select:return g=n.choice.selectValue!==o.selectValue,m&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} select value conflict for user ${f.userId}. Expected: ${o.selectValue}, Found: ${n.choice.selectValue}`),g;case _privacytypes.PreferenceTopicType.MultiSelect:let d=(n.choice.selectValues||[]).sort(),w=(o.selectValues||[]).sort();return g=d.length!==w.length||!d.every((M,h)=>M===w[h]),m&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} multi-select value conflict for user ${f.userId}. Expected: ${w.join(", ")}, Found: ${d.join(", ")}`),g;default:throw new Error(`Unknown preference topic type: ${t.type}`)}}):(m&&_chunkZUNVPK23cjs.a.warn(`No existing purpose found for ${c} in consent record for ${f.userId}.`),!1)})}async function $e({file:f,sombra:a,purposeSlugs:y,preferenceTopics:m,partitionKey:c,skipExistingRecordCheck:l,forceTriggerWorkflows:s},r){let i=new Date().getTime(),o=r.getValue("fileMetadata");_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading in file: "${f}"`));let n=_chunk25JGKUUEcjs.rc.call(void 0, f,j.record(j.string,j.string)),t={columnToPurposeName:{},pendingSafeUpdates:{},pendingConflictUpdates:{},skippedUpdates:{},...o[f]||{},lastFetchedAt:new Date().toISOString()};t=await de(n,t),o[f]=t,await r.setValue(o,"fileMetadata");let p=await ge(n,t);t=p.currentState,n=p.preferences,o[f]=t,await r.setValue(o,"fileMetadata"),t=await ye(n,t,{preferenceTopics:m,purposeSlugs:y,forceTriggerWorkflows:s}),o[f]=t,await r.setValue(o,"fileMetadata");let g=n.map(h=>h[t.identifierColumn]),d=l?[]:await me(a,{identifiers:g.map(h=>({value:h})),partitionKey:c}),w=_chunkWF7EGPWLcjs.e.call(void 0, d,"userId");t.pendingConflictUpdates={},t.pendingSafeUpdates={},t.skippedUpdates={},n.forEach(h=>{let S=h[t.identifierColumn],F=B({row:h,columnToPurposeName:t.columnToPurposeName,preferenceTopics:m,purposeSlugs:y}),C=w[S];if(s&&!C)throw new Error(`No existing consent record found for user with id: ${S}.
|
|
4
|
+
When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(C&&we({currentConsentRecord:C,pendingUpdates:F,preferenceTopics:m})&&!s){t.skippedUpdates[S]=h;return}if(C&&he({currentConsentRecord:C,pendingUpdates:F,preferenceTopics:m})){t.pendingConflictUpdates[S]={row:h,record:C};return}t.pendingSafeUpdates[S]=h}),o[f]=t,await r.setValue(o,"fileMetadata");let M=new Date().getTime();_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pre-processed file: "${f}" in ${(M-i)/1e3}s`))}var Ce=e.type({purpose:e.string,preference:e.union([e.string,e.null]),valueMapping:e.record(e.string,e.union([e.string,e.boolean,e.null,e.undefined]))}),Xt=e.record(e.string,Ce),je=e.type({name:e.string,isUniqueOnPreferenceStore:e.boolean}),Zt=e.record(e.string,je),Ne=e.intersection([e.type({columnToPurposeName:e.record(e.string,Ce),lastFetchedAt:e.string,pendingSafeUpdates:e.record(e.string,e.record(e.string,e.string)),pendingConflictUpdates:e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),skippedUpdates:e.record(e.string,e.record(e.string,e.string))}),e.partial({identifierColumn:e.string,timestampColum:e.string})]),er=e.record(e.string,e.union([e.boolean,_privacytypes.PreferenceUpdateItem])),tr=e.record(e.string,e.union([e.boolean,e.record(e.string,e.string)])),rr=e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),or=e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),nr=e.record(e.string,e.record(e.string,e.string)),Te=e.type({fileMetadata:e.record(e.string,Ne),failingUpdates:e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),pendingUpdates:e.record(e.string,_privacytypes.PreferenceUpdateItem)});async function br({auth:f,sombraAuth:a,receiptFilepath:y,file:m,partition:c,isSilent:l=!0,dryRun:s=!1,skipWorkflowTriggers:r=!1,skipConflictUpdates:i=!1,skipExistingRecordCheck:o=!1,attributes:n=[],transcendUrl:t,forceTriggerWorkflows:p=!1}){let g=_chunk25JGKUUEcjs.qc.call(void 0, n),d=new (0, _persistedstate.PersistedState)(y,Te,{fileMetadata:{},failingUpdates:{},pendingUpdates:{}}),w=d.getValue("failingUpdates"),M=d.getValue("pendingUpdates"),h=d.getValue("fileMetadata");_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Restored cache, there are:
|
|
5
5
|
${Object.values(w).length} failing requests to be retried
|
|
6
6
|
${Object.values(M).length} pending requests to be processed
|
|
7
7
|
The following files are stored in cache and will be used:
|
|
8
|
-
${Object.keys(
|
|
8
|
+
${Object.keys(h).map(P=>P).join(`
|
|
9
9
|
`)}
|
|
10
10
|
The following file will be processed: ${m}
|
|
11
|
-
`));let
|
|
12
|
-
//# sourceMappingURL=chunk-
|
|
11
|
+
`));let S=_chunk25JGKUUEcjs.wc.call(void 0, t,f),[F,C,Z]=await Promise.all([_chunk25JGKUUEcjs.xc.call(void 0, t,f,a),p?Promise.resolve([]):_chunk25JGKUUEcjs.fd.call(void 0, S),p?Promise.resolve([]):_chunk25JGKUUEcjs.bd.call(void 0, S)]);await $e({file:m,purposeSlugs:C.map(P=>P.trackingType),preferenceTopics:Z,sombra:F,partitionKey:c,skipExistingRecordCheck:o,forceTriggerWorkflows:p},d);let U={};h=d.getValue("fileMetadata");let k=h[m];if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(k.pendingSafeUpdates).length} safe updates in ${m}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(k.pendingConflictUpdates).length} conflict updates in ${m}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(k.skippedUpdates).length} skipped updates in ${m}`)),Object.entries({...k.pendingSafeUpdates,...i?{}:_typeutils.apply.call(void 0, k.pendingConflictUpdates,({row:P})=>P)}).forEach(([P,$])=>{let N=k.timestampColum===G?new Date:new Date($[k.timestampColum]),x=B({row:$,columnToPurposeName:k.columnToPurposeName,preferenceTopics:Z,purposeSlugs:C.map(V=>V.trackingType)});U[P]={userId:P,partition:c,timestamp:N.toISOString(),purposes:Object.entries(x).map(([V,Ie])=>({...Ie,purpose:V,workflowSettings:{attributes:g,isSilent:l,skipWorkflowTrigger:r}}))}}),await d.setValue(U,"pendingUpdates"),await d.setValue({},"failingUpdates"),s){_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Dry run complete, exiting. ${Object.values(U).length} pending updates. Check file: ${y}`));return}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${Object.values(U).length} preferences to partition: ${c}`));let Se=new Date().getTime(),_=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),ee=0,L=Object.entries(U),ke=_chunkWF7EGPWLcjs.b.call(void 0, L,r?100:10);_.start(L.length,0),await _bluebird.map.call(void 0, ke,async P=>{try{await F.put("v1/preferences",{json:{records:P.map(([,$])=>$),skipWorkflowTriggers:r,forceTriggerWorkflows:p}}).json()}catch($){try{let x=JSON.parse(_optionalChain([$, 'optionalAccess', _5 => _5.response, 'optionalAccess', _6 => _6.body])||"{}");x.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${x.error}`))}catch (e2){}_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to upload ${P.length} user preferences to partition ${c}: ${_optionalChain([$, 'optionalAccess', _7 => _7.response, 'optionalAccess', _8 => _8.body])||_optionalChain([$, 'optionalAccess', _9 => _9.message])}`));let N=d.getValue("failingUpdates");P.forEach(([x,V])=>{N[x]={uploadedAt:new Date().toISOString(),update:V,error:_optionalChain([$, 'optionalAccess', _10 => _10.response, 'optionalAccess', _11 => _11.body])||_optionalChain([$, 'optionalAccess', _12 => _12.message])||"Unknown error"}}),await d.setValue(N,"failingUpdates")}ee+=P.length,_.update(ee)},{concurrency:40}),_.stop();let xe=new Date().getTime()-Se;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${L.length} user preferences to partition ${c} in "${xe/1e3}" seconds!`))}function Tr({identifiers:f=[],purposes:a=[],metadata:y=[],consentManagement:m={},system:c={decryptionStatus:"DECRYPTED"},...l}){let s={...l,...c,...m};if(Array.isArray(f)){let r=new Map;for(let{name:i,value:o}of f)r.has(i)||r.set(i,new Set),o&&r.get(i).add(o);for(let[i,o]of r.entries())s[i]=Array.from(o).join(",")}if(Array.isArray(y)){let r=new Map;for(let{key:i,value:o}of y)r.has(i)||r.set(i,new Set),o&&r.get(i).add(o);for(let[i,o]of r.entries())s[`metadata_${i}`]=Array.from(o).join(",")}if(Array.isArray(a)){for(let{purpose:r,preferences:i,enabled:o}of a)if(s[r]=!!o,Array.isArray(i))for(let{topic:n,choice:t}of i){let p=`${r}_${n}`,g=null;Object.prototype.hasOwnProperty.call(t,"booleanValue")?g=!!t.booleanValue:Object.prototype.hasOwnProperty.call(t,"selectValue")?g=String(_nullishCoalesce(t.selectValue, () => (""))):Array.isArray(t.selectValues)?g=t.selectValues.map(w=>String(w)).filter(w=>w.length>0).join(","):g=null,s[p]=g}}return s}async function vr(f,{partition:a,filterBy:y={},limit:m=50}){let c=[],l,s=y&&(Object.keys(y).length>0||y.system&&Object.keys(y.system).length>0),r=Math.max(1,Math.min(50,_nullishCoalesce(m, () => (50))));for(;;){let i={limit:r};s&&(i.filter=y),l&&(i.cursor=l);let o=await O(()=>f.post(`v1/preferences/${a}/query`,{json:i}).json(),{onRetry:(p,g,d)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Retry attempt ${p} for fetchConsentPreferences due to error: ${d}`))}}),{nodes:n,cursor:t}=_typeutils.decodeCodec.call(void 0, v,o);if(!n||n.length===0||(c.push(...n),!t))break;l=t}return c}exports.a = br; exports.b = Tr; exports.c = vr;
|
|
12
|
+
//# sourceMappingURL=chunk-NIACJFII.cjs.map
|