@transcend-io/cli 7.0.0-alpha.12 → 7.0.0-alpha.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/bash-complete.js +4 -0
- package/dist/bin/bash-complete.js.map +1 -0
- package/dist/bin/cli.js +3 -0
- package/dist/bin/cli.js.map +1 -0
- package/dist/bin/deprecated-command.js +7 -0
- package/dist/bin/deprecated-command.js.map +1 -0
- package/dist/chunk-24SSWBXM.js +4 -0
- package/dist/chunk-24SSWBXM.js.map +1 -0
- package/dist/chunk-347UQP43.js +2 -0
- package/dist/chunk-347UQP43.js.map +1 -0
- package/dist/chunk-43JWXG77.js +2 -0
- package/dist/chunk-43JWXG77.js.map +1 -0
- package/dist/chunk-4GLITB3Y.js +2 -0
- package/dist/chunk-4GLITB3Y.js.map +1 -0
- package/dist/chunk-6P4FW6XR.js +3 -0
- package/dist/chunk-6P4FW6XR.js.map +1 -0
- package/dist/chunk-72U6ETHG.js +2 -0
- package/dist/chunk-72U6ETHG.js.map +1 -0
- package/dist/chunk-7QHA6ZIV.js +2 -0
- package/dist/chunk-7QHA6ZIV.js.map +1 -0
- package/dist/chunk-ARVEJERC.js +2 -0
- package/dist/chunk-ARVEJERC.js.map +1 -0
- package/dist/chunk-CBAHSBSW.js +2 -0
- package/dist/chunk-CBAHSBSW.js.map +1 -0
- package/dist/chunk-HH2PQ3PQ.js +2 -0
- package/dist/chunk-HH2PQ3PQ.js.map +1 -0
- package/dist/chunk-INLBXSQE.js +9 -0
- package/dist/chunk-INLBXSQE.js.map +1 -0
- package/dist/chunk-KRN6Q433.js +75 -0
- package/dist/chunk-KRN6Q433.js.map +1 -0
- package/dist/chunk-L5ULN3IT.js +2 -0
- package/dist/chunk-L5ULN3IT.js.map +1 -0
- package/dist/chunk-L7ZIX4SU.js +2 -0
- package/dist/chunk-L7ZIX4SU.js.map +1 -0
- package/dist/chunk-LAYHULHH.js +2 -0
- package/dist/chunk-LAYHULHH.js.map +1 -0
- package/dist/chunk-MA4JWWRO.js +6 -0
- package/dist/chunk-MA4JWWRO.js.map +1 -0
- package/dist/chunk-MVDOKJ6J.js +2 -0
- package/dist/chunk-MVDOKJ6J.js.map +1 -0
- package/dist/chunk-OEB7WG3G.js +4 -0
- package/dist/chunk-OEB7WG3G.js.map +1 -0
- package/dist/chunk-SF46ZLPT.js +2 -0
- package/dist/chunk-SF46ZLPT.js.map +1 -0
- package/dist/chunk-TDBKATQK.js +2831 -0
- package/dist/chunk-TDBKATQK.js.map +1 -0
- package/dist/chunk-WSDWILYI.js +2 -0
- package/dist/chunk-WSDWILYI.js.map +1 -0
- package/dist/chunk-XNR74SBS.js +12 -0
- package/dist/chunk-XNR74SBS.js.map +1 -0
- package/dist/chunk-ZLRUIEVQ.js +94 -0
- package/dist/chunk-ZLRUIEVQ.js.map +1 -0
- package/dist/chunk-ZTD7APNF.js +2 -0
- package/dist/chunk-ZTD7APNF.js.map +1 -0
- package/dist/impl-25VWUB6L.js +2 -0
- package/dist/impl-25VWUB6L.js.map +1 -0
- package/dist/impl-3M5R6G5M.js +6 -0
- package/dist/impl-3M5R6G5M.js.map +1 -0
- package/dist/impl-5OEPVWPL.js +2 -0
- package/dist/impl-5OEPVWPL.js.map +1 -0
- package/dist/impl-5YV7K446.js +2 -0
- package/dist/impl-5YV7K446.js.map +1 -0
- package/dist/impl-AFRHPZGF.js +2 -0
- package/dist/impl-AFRHPZGF.js.map +1 -0
- package/dist/impl-CCUCFOCW.js +6 -0
- package/dist/impl-CCUCFOCW.js.map +1 -0
- package/dist/impl-E36SWF4Z.js +2 -0
- package/dist/impl-E36SWF4Z.js.map +1 -0
- package/dist/impl-E5WXNV47.js +2 -0
- package/dist/impl-E5WXNV47.js.map +1 -0
- package/dist/impl-EVICJMI3.js +2 -0
- package/dist/impl-EVICJMI3.js.map +1 -0
- package/dist/impl-G5TGSB4H.js +2 -0
- package/dist/impl-G5TGSB4H.js.map +1 -0
- package/dist/impl-GNG2DOKG.js +2 -0
- package/dist/impl-GNG2DOKG.js.map +1 -0
- package/dist/impl-GNSHZ3OL.js +2 -0
- package/dist/impl-GNSHZ3OL.js.map +1 -0
- package/dist/impl-GPCURY4M.js +7 -0
- package/dist/impl-GPCURY4M.js.map +1 -0
- package/dist/impl-GZRQOFY6.js +2 -0
- package/dist/impl-GZRQOFY6.js.map +1 -0
- package/dist/impl-HEC3SVYP.js +2 -0
- package/dist/impl-HEC3SVYP.js.map +1 -0
- package/dist/impl-HH24GIMG.js +2 -0
- package/dist/impl-HH24GIMG.js.map +1 -0
- package/dist/impl-I24OLEN5.js +2 -0
- package/dist/impl-I24OLEN5.js.map +1 -0
- package/dist/impl-IAXNYDJT.js +2 -0
- package/dist/impl-IAXNYDJT.js.map +1 -0
- package/dist/impl-J33PI3PK.js +2 -0
- package/dist/impl-J33PI3PK.js.map +1 -0
- package/dist/impl-JZDUGI7W.js +2 -0
- package/dist/impl-JZDUGI7W.js.map +1 -0
- package/dist/impl-LZ3HI26W.js +4 -0
- package/dist/impl-LZ3HI26W.js.map +1 -0
- package/dist/impl-MEDPDKAE.js +2 -0
- package/dist/impl-MEDPDKAE.js.map +1 -0
- package/dist/impl-MLS6TI7N.js +2 -0
- package/dist/impl-MLS6TI7N.js.map +1 -0
- package/dist/impl-NI7KSBSS.js +2 -0
- package/dist/impl-NI7KSBSS.js.map +1 -0
- package/dist/impl-OM6EKANE.js +9 -0
- package/dist/impl-OM6EKANE.js.map +1 -0
- package/dist/impl-T4WDJSWZ.js +2 -0
- package/dist/impl-T4WDJSWZ.js.map +1 -0
- package/dist/impl-U37YTCPW.js +2 -0
- package/dist/impl-U37YTCPW.js.map +1 -0
- package/dist/impl-U5555HGJ.js +12 -0
- package/dist/impl-U5555HGJ.js.map +1 -0
- package/dist/impl-UHFSVVIS.js +6 -0
- package/dist/impl-UHFSVVIS.js.map +1 -0
- package/dist/impl-UIVTSO57.js +2 -0
- package/dist/impl-UIVTSO57.js.map +1 -0
- package/dist/impl-UQYL5PXR.js +2 -0
- package/dist/impl-UQYL5PXR.js.map +1 -0
- package/dist/impl-V5QTKTU4.js +2 -0
- package/dist/impl-V5QTKTU4.js.map +1 -0
- package/dist/impl-WDPWOOFV.js +2 -0
- package/dist/impl-WDPWOOFV.js.map +1 -0
- package/dist/impl-WZAF2LD3.js +2 -0
- package/dist/impl-WZAF2LD3.js.map +1 -0
- package/dist/impl-XF26H3HG.js +2 -0
- package/dist/impl-XF26H3HG.js.map +1 -0
- package/dist/impl-XQY2Q5R6.js +2 -0
- package/dist/impl-XQY2Q5R6.js.map +1 -0
- package/dist/impl-YB2LON7S.js +2 -0
- package/dist/impl-YB2LON7S.js.map +1 -0
- package/dist/impl-YNGQIWW7.js +2 -0
- package/dist/impl-YNGQIWW7.js.map +1 -0
- package/dist/impl-ZA3PKNQN.js +2 -0
- package/dist/impl-ZA3PKNQN.js.map +1 -0
- package/dist/{index.d.cts → index.d.ts} +1 -1
- package/dist/index.js +5 -0
- package/dist/index.js.map +1 -0
- package/package.json +45 -46
- package/dist/bin/bash-complete.cjs +0 -4
- package/dist/bin/bash-complete.cjs.map +0 -1
- package/dist/bin/cli.cjs +0 -3
- package/dist/bin/cli.cjs.map +0 -1
- package/dist/bin/deprecated-command.cjs +0 -7
- package/dist/bin/deprecated-command.cjs.map +0 -1
- package/dist/chunk-BRVWR44K.cjs +0 -2
- package/dist/chunk-BRVWR44K.cjs.map +0 -1
- package/dist/chunk-BY7W4UQF.cjs +0 -2
- package/dist/chunk-BY7W4UQF.cjs.map +0 -1
- package/dist/chunk-CX2GRUPB.cjs +0 -2
- package/dist/chunk-CX2GRUPB.cjs.map +0 -1
- package/dist/chunk-DQHCGJTR.cjs +0 -2
- package/dist/chunk-DQHCGJTR.cjs.map +0 -1
- package/dist/chunk-EG4L6YAJ.cjs +0 -2
- package/dist/chunk-EG4L6YAJ.cjs.map +0 -1
- package/dist/chunk-IBTP5OXE.cjs +0 -2
- package/dist/chunk-IBTP5OXE.cjs.map +0 -1
- package/dist/chunk-JC7VDPVP.cjs +0 -2831
- package/dist/chunk-JC7VDPVP.cjs.map +0 -1
- package/dist/chunk-KAE73AXX.cjs +0 -2
- package/dist/chunk-KAE73AXX.cjs.map +0 -1
- package/dist/chunk-KEXUFX2J.cjs +0 -12
- package/dist/chunk-KEXUFX2J.cjs.map +0 -1
- package/dist/chunk-KOV2SQO2.cjs +0 -4
- package/dist/chunk-KOV2SQO2.cjs.map +0 -1
- package/dist/chunk-LOOIAAAW.cjs +0 -9
- package/dist/chunk-LOOIAAAW.cjs.map +0 -1
- package/dist/chunk-OKOJP5XU.cjs +0 -94
- package/dist/chunk-OKOJP5XU.cjs.map +0 -1
- package/dist/chunk-ORNBWSZL.cjs +0 -2
- package/dist/chunk-ORNBWSZL.cjs.map +0 -1
- package/dist/chunk-QJYHSHFA.cjs +0 -2
- package/dist/chunk-QJYHSHFA.cjs.map +0 -1
- package/dist/chunk-SAEKBZGF.cjs +0 -2
- package/dist/chunk-SAEKBZGF.cjs.map +0 -1
- package/dist/chunk-T462ONFX.cjs +0 -2
- package/dist/chunk-T462ONFX.cjs.map +0 -1
- package/dist/chunk-TD7ADMVO.cjs +0 -2
- package/dist/chunk-TD7ADMVO.cjs.map +0 -1
- package/dist/chunk-UEGX6GZ2.cjs +0 -2
- package/dist/chunk-UEGX6GZ2.cjs.map +0 -1
- package/dist/chunk-URT6VVOK.cjs +0 -3
- package/dist/chunk-URT6VVOK.cjs.map +0 -1
- package/dist/chunk-UYYOVK3W.cjs +0 -2
- package/dist/chunk-UYYOVK3W.cjs.map +0 -1
- package/dist/chunk-X4YTPQVY.cjs +0 -4
- package/dist/chunk-X4YTPQVY.cjs.map +0 -1
- package/dist/chunk-ZJDLK7C3.cjs +0 -75
- package/dist/chunk-ZJDLK7C3.cjs.map +0 -1
- package/dist/chunk-ZUNVPK23.cjs +0 -2
- package/dist/chunk-ZUNVPK23.cjs.map +0 -1
- package/dist/chunk-ZVK4HIDF.cjs +0 -6
- package/dist/chunk-ZVK4HIDF.cjs.map +0 -1
- package/dist/impl-2DZ5OV74.cjs +0 -2
- package/dist/impl-2DZ5OV74.cjs.map +0 -1
- package/dist/impl-2ILRPUCC.cjs +0 -9
- package/dist/impl-2ILRPUCC.cjs.map +0 -1
- package/dist/impl-2LBSGBBL.cjs +0 -2
- package/dist/impl-2LBSGBBL.cjs.map +0 -1
- package/dist/impl-3NMEM4QJ.cjs +0 -2
- package/dist/impl-3NMEM4QJ.cjs.map +0 -1
- package/dist/impl-3QGL5KFO.cjs +0 -2
- package/dist/impl-3QGL5KFO.cjs.map +0 -1
- package/dist/impl-56MNYVA5.cjs +0 -2
- package/dist/impl-56MNYVA5.cjs.map +0 -1
- package/dist/impl-6PKXIPAW.cjs +0 -2
- package/dist/impl-6PKXIPAW.cjs.map +0 -1
- package/dist/impl-73JOMLRW.cjs +0 -2
- package/dist/impl-73JOMLRW.cjs.map +0 -1
- package/dist/impl-ADTYWN4O.cjs +0 -2
- package/dist/impl-ADTYWN4O.cjs.map +0 -1
- package/dist/impl-BOLY4EOP.cjs +0 -7
- package/dist/impl-BOLY4EOP.cjs.map +0 -1
- package/dist/impl-DUSKH5V5.cjs +0 -2
- package/dist/impl-DUSKH5V5.cjs.map +0 -1
- package/dist/impl-F6IWO7FD.cjs +0 -2
- package/dist/impl-F6IWO7FD.cjs.map +0 -1
- package/dist/impl-GHDROQMO.cjs +0 -2
- package/dist/impl-GHDROQMO.cjs.map +0 -1
- package/dist/impl-HEJP2URY.cjs +0 -2
- package/dist/impl-HEJP2URY.cjs.map +0 -1
- package/dist/impl-IQ7A5Z4D.cjs +0 -6
- package/dist/impl-IQ7A5Z4D.cjs.map +0 -1
- package/dist/impl-JAJVO3ZW.cjs +0 -2
- package/dist/impl-JAJVO3ZW.cjs.map +0 -1
- package/dist/impl-JF27LEV3.cjs +0 -2
- package/dist/impl-JF27LEV3.cjs.map +0 -1
- package/dist/impl-JODNLRWN.cjs +0 -6
- package/dist/impl-JODNLRWN.cjs.map +0 -1
- package/dist/impl-KJUS5YHL.cjs +0 -6
- package/dist/impl-KJUS5YHL.cjs.map +0 -1
- package/dist/impl-KOKG6ZNB.cjs +0 -2
- package/dist/impl-KOKG6ZNB.cjs.map +0 -1
- package/dist/impl-KWJ7DKLD.cjs +0 -2
- package/dist/impl-KWJ7DKLD.cjs.map +0 -1
- package/dist/impl-LNCNTGHP.cjs +0 -2
- package/dist/impl-LNCNTGHP.cjs.map +0 -1
- package/dist/impl-MHS2Q5XQ.cjs +0 -2
- package/dist/impl-MHS2Q5XQ.cjs.map +0 -1
- package/dist/impl-MUMMGTPH.cjs +0 -2
- package/dist/impl-MUMMGTPH.cjs.map +0 -1
- package/dist/impl-NJ7B53DT.cjs +0 -2
- package/dist/impl-NJ7B53DT.cjs.map +0 -1
- package/dist/impl-OKINLHAG.cjs +0 -2
- package/dist/impl-OKINLHAG.cjs.map +0 -1
- package/dist/impl-PAGIGWUU.cjs +0 -2
- package/dist/impl-PAGIGWUU.cjs.map +0 -1
- package/dist/impl-PKVWUYYX.cjs +0 -2
- package/dist/impl-PKVWUYYX.cjs.map +0 -1
- package/dist/impl-QBPIYO6K.cjs +0 -2
- package/dist/impl-QBPIYO6K.cjs.map +0 -1
- package/dist/impl-QUC5AKPQ.cjs +0 -2
- package/dist/impl-QUC5AKPQ.cjs.map +0 -1
- package/dist/impl-RMVV72AB.cjs +0 -2
- package/dist/impl-RMVV72AB.cjs.map +0 -1
- package/dist/impl-S5EXF3MB.cjs +0 -2
- package/dist/impl-S5EXF3MB.cjs.map +0 -1
- package/dist/impl-SJGNXCXR.cjs +0 -4
- package/dist/impl-SJGNXCXR.cjs.map +0 -1
- package/dist/impl-SYM6RYJP.cjs +0 -2
- package/dist/impl-SYM6RYJP.cjs.map +0 -1
- package/dist/impl-UTZGKHAD.cjs +0 -2
- package/dist/impl-UTZGKHAD.cjs.map +0 -1
- package/dist/impl-VI3JMHFN.cjs +0 -2
- package/dist/impl-VI3JMHFN.cjs.map +0 -1
- package/dist/impl-VIX7AWHA.cjs +0 -2
- package/dist/impl-VIX7AWHA.cjs.map +0 -1
- package/dist/impl-W42Y6L6N.cjs +0 -2
- package/dist/impl-W42Y6L6N.cjs.map +0 -1
- package/dist/impl-XE5EV5SW.cjs +0 -12
- package/dist/impl-XE5EV5SW.cjs.map +0 -1
- package/dist/index.cjs +0 -5
- package/dist/index.cjs.map +0 -1
- /package/dist/bin/{bash-complete.d.cts → bash-complete.d.ts} +0 -0
- /package/dist/bin/{cli.d.cts → cli.d.ts} +0 -0
- /package/dist/bin/{deprecated-command.d.cts → deprecated-command.d.ts} +0 -0
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../package.json"],"names":["description","version"],"mappings":"AAGE,6EAAAA,CAAAA,CAAe,wEAAA,CACfC,CAAAA,aAAW,gBAAA,CAAA,6BAAA","file":"/Users/benbrook/transcend/cli/dist/chunk-UYYOVK3W.cjs","sourcesContent":["{\n \"author\": \"Transcend Inc.\",\n \"name\": \"@transcend-io/cli\",\n \"description\": \"A command line interface for programmatic operations across Transcend.\",\n \"version\": \"7.0.0-alpha.12\",\n \"homepage\": \"https://github.com/transcend-io/cli\",\n \"repository\": {\n \"type\": \"git\",\n \"url\": \"https://github.com/transcend-io/cli.git\"\n },\n \"type\": \"module\",\n \"license\": \"UNLICENSED\",\n \"main\": \"dist/index.cjs\",\n \"types\": \"dist/index.d.cts\",\n \"bin\": {\n \"@transcend-io/transcend\": \"dist/bin/cli.cjs\",\n \"__cli_bash_complete\": \"dist/bin/bash-complete.cjs\",\n \"__tr-build-xdi-sync-endpoint\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-consent-manager-service-json-to-yml\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-consent-managers-to-business-entities\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-cron-mark-identifiers-completed\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-cron-pull-identifiers\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-cron-pull-profiles\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-derive-data-silos-from-data-flows\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-derive-data-silos-from-data-flows-cross-instance\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-discover-silos\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-generate-api-keys\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-manual-enrichment-pull-identifiers\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-manual-enrichment-push-identifiers\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-mark-request-data-silos-completed\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-pull\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-pull-consent-metrics\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-pull-consent-preferences\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-pull-datapoints\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-pull-unstructured-discovery-files\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-push\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-approve\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-cancel\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-download-files\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-enricher-restart\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-export\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-mark-silent\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-notify-additional-time\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-reject-unverified-identifiers\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-restart\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-request-upload\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-retry-request-data-silos\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-scan-packages\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-skip-request-data-silos\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-sync-ot\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-update-consent-manager\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-upload-consent-preferences\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-upload-cookies-from-csv\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-upload-data-flows-from-csv\": \"dist/bin/deprecated-command.cjs\",\n \"__tr-upload-preferences\": \"dist/bin/deprecated-command.cjs\"\n },\n \"engines\": {\n \"node\": \">=18\"\n },\n \"files\": [\n \"dist\"\n ],\n \"scripts\": {\n \"start\": \"./dist/bin/cli.cjs\",\n \"prebuild\": \"tsc -p tsconfig.json\",\n \"build\": \"tsup\",\n \"build:watch\": \"tsup --watch\",\n \"prepublishOnly\": \"pnpm lint && pnpm build && pnpm test && pnpm publint\",\n \"lint\": \"pnpm run --parallel --aggregate-output \\\"/^lint:*/\\\"\",\n \"lint:prettier\": \"prettier --check . --log-level warn\",\n \"lint:types\": \"tsc --noEmit\",\n \"lint:eslint\": \"eslint .\",\n \"format\": \"prettier --write .\",\n \"test\": \"vitest run\",\n \"script:transcend-json-schema\": \"tsx scripts/buildTranscendJsonSchema.ts && prettier ./transcend-yml-schema-*.json --write\",\n \"script:pathfinder-json-schema\": \"tsx scripts/buildPathfinderJsonSchema.ts && prettier ./pathfinder-policy-yml-schema.json --write\",\n \"script:build-readme-docs\": \"tsx scripts/buildReadmeDocs.ts\"\n },\n \"tsup\": {\n \"entry\": [\n \"src/bin/cli.ts\",\n \"src/bin/bash-complete.ts\",\n \"src/bin/deprecated-command.ts\",\n \"src/index.ts\"\n ],\n \"format\": [\n \"cjs\"\n ],\n \"sourcemap\": true,\n \"dts\": true,\n \"tsconfig\": \"tsconfig.json\",\n \"splitting\": true,\n \"clean\": true,\n \"minify\": true\n },\n \"dependencies\": {\n \"@stricli/auto-complete\": \"^1.2.0\",\n \"@stricli/core\": \"^1.2.0\",\n \"@transcend-io/airgap.js-types\": \"^12.12.1\",\n \"@transcend-io/handlebars-utils\": \"^1.1.0\",\n \"@transcend-io/internationalization\": \"^1.6.0\",\n \"@transcend-io/persisted-state\": \"^1.0.4\",\n \"@transcend-io/privacy-types\": \"^4.124.1\",\n \"@transcend-io/secret-value\": \"^1.2.0\",\n \"@transcend-io/type-utils\": \"^1.8.0\",\n \"JSONStream\": \"^1.3.5\",\n \"cli-progress\": \"^3.11.2\",\n \"colors\": \"^1.4.0\",\n \"csv-parse\": \"^5.6.0\",\n \"fast-csv\": \"^4.3.6\",\n \"fast-glob\": \"^3.2.12\",\n \"fp-ts\": \"^2.16.1\",\n \"fuzzysearch\": \"^1.0.3\",\n \"global-agent\": \"^3.0.0\",\n \"got\": \"^11.8.5\",\n \"graphql-request\": \"^5.0.0\",\n \"inquirer\": \"=7.3.3\",\n \"inquirer-autocomplete-prompt\": \"=1.3.0\",\n \"io-ts\": \"^2.2.21\",\n \"io-ts-types\": \"^0.5.16\",\n \"js-yaml\": \"^4.1.0\",\n \"jsonwebtoken\": \"^9.0.2\",\n \"lodash-es\": \"^4.17.21\",\n \"query-string\": \"=7.0.0\",\n \"semver\": \"^7.6.0\",\n \"undici\": \"^5.22.1\",\n \"yargs-parser\": \"^21.1.1\"\n },\n \"devDependencies\": {\n \"@types/JSONStream\": \"npm:@types/jsonstream@^0.8.33\",\n \"@types/cli-progress\": \"^3.11.0\",\n \"@types/colors\": \"^1.2.1\",\n \"@types/fuzzysearch\": \"^1.0.0\",\n \"@types/global-agent\": \"^2.1.1\",\n \"@types/inquirer\": \"^7.3.1\",\n \"@types/inquirer-autocomplete-prompt\": \"^3.0.0\",\n \"@types/js-yaml\": \"^4.0.5\",\n \"@types/json-schema\": \"^7.0.15\",\n \"@types/jsonwebtoken\": \"^9\",\n \"@types/lodash-es\": \"^4.17.12\",\n \"@types/node\": \"^18.15.11\",\n \"@types/semver\": \"^7\",\n \"@types/yargs-parser\": \"^21.0.0\",\n \"@typescript-eslint/eslint-plugin\": \"^5.58.0\",\n \"@typescript-eslint/parser\": \"^5.58.0\",\n \"depcheck\": \"^1.4.3\",\n \"doctoc\": \"^2.2.1\",\n \"eslint\": \"^8.38.0\",\n \"eslint-config-airbnb-base\": \"^15.0.0\",\n \"eslint-import-resolver-typescript\": \"^3.5.5\",\n \"eslint-plugin-eslint-comments\": \"^3.2.0\",\n \"eslint-plugin-import\": \"2.27.5\",\n \"eslint-plugin-jsdoc\": \"^41.1.1\",\n \"fdir\": \"^6.4.6\",\n \"prettier\": \"^2.8.7\",\n \"publint\": \"^0.3.12\",\n \"tsup\": \"^8.5.0\",\n \"tsx\": \"^4.20.3\",\n \"typescript\": \"^5.0.4\",\n \"vite-tsconfig-paths\": \"^5.1.4\",\n \"vitest\": \"^3.2.4\"\n },\n \"packageManager\": \"pnpm@10.12.4+sha512.5ea8b0deed94ed68691c9bad4c955492705c5eeb8a87ef86bc62c74a26b037b08ff9570f108b2e4dbd1dd1a9186fea925e527f141c648e85af45631074680184\"\n}\n"]}
|
package/dist/chunk-X4YTPQVY.cjs
DELETED
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkCX2GRUPBcjs = require('./chunk-CX2GRUPB.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _fs = require('fs');var _typeutils = require('@transcend-io/type-utils');var _privacytypes = require('@transcend-io/privacy-types');var N=/target ('|")(.*?)('|")/,O=/pod ('|")(.*?)('|")(, ('|")~> (.+?)('|")|)/,y={supportedFiles:["Podfile"],ignoreDirs:["Pods"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(N,"g"),matches:["quote1","name","quote2"]},n),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(O,"g"),matches:["quote1","name","quote2","extra","quote3","version","quote4"]},n);return o.map((t,p)=>({name:t.name,type:_privacytypes.CodePackageType.CocoaPods,softwareDevelopmentKits:c.filter(r=>r.matchIndex>t.matchIndex&&(!o[p+1]||r.matchIndex<o[p+1].matchIndex)).map(r=>({name:r.name,version:r.version}))}))}};var _path = require('path');var K=/implementation( *)('|")(.+?):(.+?):(.+?|)('|")/,M=/apply plugin: *('|")(.+?)(:(.+?)|)('|")/,j=/implementation group:( *)('|")(.+?)('|"),( *)name:( *)('|")(.+?)('|"),( *)version:( *)('|")(.+?)('|")/,L=/applicationId( *)"(.+?)"/,S={supportedFiles:["build.gradle**"],ignoreDirs:["gradle-app.setting","gradle-wrapper.jar","gradle-wrapper.properties"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(K,"g"),matches:["space","quote1","name","path","version","quote2"]},n),a=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(M,"g"),matches:["quote1","name","group","version","quote2"]},n),t=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(j,"g"),matches:["space1","quote1","group","quote2","space2","space3","quote3","name","quote4","space4","space5","quote5","version","quote6"]},n),p=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(L,"g"),matches:["space","name"]},n);if(p.length>1)throw new Error(`Expected only one applicationId per file: ${e}`);return[{name:_optionalChain([p, 'access', _2 => _2[0], 'optionalAccess', _3 => _3.name])||o.split("/").pop(),softwareDevelopmentKits:[...c,...t,...a].map(r=>({name:r.name,version:r.version||void 0}))}]}};var x={supportedFiles:["package.json"],ignoreDirs:["node_modules","serverless-build","lambda-build"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=JSON.parse(n),{name:a,description:t,dependencies:p={},devDependencies:r={},optionalDependencies:i={}}=c;return[{name:a||o.split("/").pop(),description:t,softwareDevelopmentKits:[...Object.entries(p).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0})),...Object.entries(r).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0,isDevDependency:!0})),...Object.entries(i).map(([s,m])=>({name:s,version:typeof m=="string"?m:void 0}))]}]}};var V=/(.+?)(=+)(.+)/,H=/name *= *('|")(.+?)('|")/,Q=/description *= *('|")(.+?)('|")/,P={supportedFiles:["requirements.txt"],ignoreDirs:["build","lib","lib64"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),a=_chunkCX2GRUPBcjs.c.call(void 0, o).find(s=>s==="setup.py"),t=a?_fs.readFileSync.call(void 0, _path.join.call(void 0, o,a),"utf-8"):void 0,p=t?(H.exec(t)||[])[2]:void 0,r=t?(Q.exec(t)||[])[2]:void 0,i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(V,"g"),matches:["name","equals","version"]},n);return[{name:p||o.split("/").pop(),description:r||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:i.map(s=>({name:s.name,version:s.version}))}]}};var ee=/gem *('|")(.+?)('|")(, *('|")(.+?)('|")|)/,ne=/spec\.name *= *('|")(.+?)('|")/,oe=/spec\.description *= *('|")(.+?)('|")/,te=/spec\.summary *= *('|")(.+?)('|")/,k={supportedFiles:["Gemfile"],ignoreDirs:["bin"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),a=_chunkCX2GRUPBcjs.c.call(void 0, o).find(s=>s===".gemspec"),t=a?_fs.readFileSync.call(void 0, a,"utf-8"):void 0,p=t?(ne.exec(t)||[])[2]:void 0,r=t?(oe.exec(t)||te.exec(t)||[])[1]:void 0,i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(ee,"g"),matches:["quote1","name","quote2","hasVersion","quote3","version","quote4"]},n);return[{name:p||o.split("/").pop(),description:r||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:i.map(s=>({name:s.name,version:s.version}))}]}};var _jsyaml = require('js-yaml'); var _jsyaml2 = _interopRequireDefault(_jsyaml);function pe(e){return e.split(`
|
|
2
|
-
`).map(n=>{let o=n.indexOf("#");return o>-1&&!n.substring(0,o).includes('"')&&!n.substring(0,o).includes("'")?n.substring(0,o).trim():n}).filter(n=>n.length>0).join(`
|
|
3
|
-
`)}var v={supportedFiles:["pubspec.yml"],ignoreDirs:["build"],scanFunction:e=>{let n=_path.dirname.call(void 0, e),o=_fs.readFileSync.call(void 0, e,"utf-8"),{name:c,description:a,dev_dependencies:t={},dependencies:p={}}=_jsyaml2.default.load(pe(o));return[{name:c||n.split("/").pop(),description:a,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:[...Object.entries(p).map(([r,i])=>({name:r,version:typeof i=="string"?i:typeof i=="number"?i.toString():_optionalChain([i, 'optionalAccess', _4 => _4.sdk])})),...Object.entries(t).map(([r,i])=>({name:r,version:typeof i=="string"?i:typeof i=="number"?i.toString():_optionalChain([i, 'optionalAccess', _5 => _5.sdk]),isDevDependency:!0}))]}]}};var I={supportedFiles:["composer.json"],ignoreDirs:["vendor","node_modules","cache","build","dist"],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_path.dirname.call(void 0, e),c=JSON.parse(n),{name:a,description:t,require:p={},"require-dev":r={}}=c;return[{name:a||o.split("/").pop(),description:t,softwareDevelopmentKits:[...Object.entries(p).map(([i,s])=>({name:i,version:typeof s=="string"?s:void 0})),...Object.entries(r).map(([i,s])=>({name:i,version:typeof s=="string"?s:void 0,isDevDependency:!0}))]}]}};var _iots = require('io-ts'); var d = _interopRequireWildcard(_iots);var le=d.type({pins:d.array(d.type({identity:d.string,kind:d.string,location:d.string,state:d.type({revision:d.string,version:d.string})})),version:d.number}),D={supportedFiles:["Package.resolved"],ignoreDirs:[],scanFunction:e=>{let n=_fs.readFileSync.call(void 0, e,"utf-8"),o=_typeutils.decodeCodec.call(void 0, le,n);return[{name:_path.dirname.call(void 0, e).split("/").pop()||"",type:_privacytypes.CodePackageType.CocoaPods,softwareDevelopmentKits:o.pins.map(c=>({name:c.identity,version:c.state.version}))}]}};var gn={cocoaPods:y,gradle:S,javascriptPackageJson:x,pythonRequirementsTxt:P,gemfile:k,pubspec:v,swift:D},_={[_privacytypes.CodePackageType.CocoaPods]:y,[_privacytypes.CodePackageType.Gradle]:S,[_privacytypes.CodePackageType.PackageJson]:x,[_privacytypes.CodePackageType.RequirementsTxt]:P,[_privacytypes.CodePackageType.Gemfile]:k,[_privacytypes.CodePackageType.Pubspec]:v,[_privacytypes.CodePackageType.ComposerJson]:I,[_privacytypes.CodePackageType.Swift]:D};var _fastglob = require('fast-glob'); var _fastglob2 = _interopRequireDefault(_fastglob);var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function Sn({scanPath:e,ignoreDirs:n=[],repositoryName:o}){return(await Promise.all(_typeutils.getEntries.call(void 0, _).map(async([a,t])=>{let{ignoreDirs:p,supportedFiles:r,scanFunction:i}=t,s=[...n,...p].filter(m=>m.length>0);try{let m=await _fastglob2.default.call(void 0, `${e}/**/${r.join("|")}`,{ignore:s.map(g=>`${e}/**/${g}`),unique:!0,onlyFiles:!0});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Scanning: ${m.length} files of type ${a}`));let C=m.map(g=>i(g).map(q=>({...q,relativePath:g.replace(`${e}/`,"")}))).flat();return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Found: ${C.length} packages and ${C.map(({softwareDevelopmentKits:g=[]})=>g).flat().length} sdks`)),C.map(g=>({...g,type:a,repositoryName:o}))}catch(m){throw new Error(`Error scanning globs ${r} with error: ${m}`)}}))).flat()}exports.a = gn; exports.b = Sn;
|
|
4
|
-
//# sourceMappingURL=chunk-X4YTPQVY.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/chunk-X4YTPQVY.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/constants.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAM,CAAA,CACnB,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;ACVR","file":"/Users/benbrook/transcend/cli/dist/chunk-X4YTPQVY.cjs","sourcesContent":[null,"import { readFileSync } from 'fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { CodeScanningConfig } from './types';\nimport {\n cocoaPods,\n gradle,\n javascriptPackageJson,\n gemfile,\n composerJson,\n pubspec,\n swift,\n pythonRequirementsTxt,\n} from './integrations';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\n/**\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n */\nexport const SILO_DISCOVERY_CONFIGS: {\n [k in string]: CodeScanningConfig;\n} = {\n cocoaPods,\n gradle,\n javascriptPackageJson,\n pythonRequirementsTxt,\n gemfile,\n pubspec,\n swift,\n};\n\nexport const CODE_SCANNING_CONFIGS: {\n [k in CodePackageType]: CodeScanningConfig;\n} = {\n [CodePackageType.CocoaPods]: cocoaPods,\n [CodePackageType.Gradle]: gradle,\n [CodePackageType.PackageJson]: javascriptPackageJson,\n [CodePackageType.RequirementsTxt]: pythonRequirementsTxt,\n [CodePackageType.Gemfile]: gemfile,\n [CodePackageType.Pubspec]: pubspec,\n [CodePackageType.ComposerJson]: composerJson,\n [CodePackageType.Swift]: swift,\n};\n"]}
|
package/dist/chunk-ZJDLK7C3.cjs
DELETED
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkJC7VDPVPcjs = require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _lodashes = require('lodash-es');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunkJC7VDPVPcjs._b.call(void 0, u,_chunkJC7VDPVPcjs.f,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunkJC7VDPVPcjs._b.call(void 0, u,_graphqlrequest.gql`
|
|
2
|
-
query TranscendCliSubDataPointCsvExport(
|
|
3
|
-
$filterBy: SubDataPointFiltersInput
|
|
4
|
-
$first: Int!
|
|
5
|
-
$offset: Int!
|
|
6
|
-
) {
|
|
7
|
-
subDataPoints(
|
|
8
|
-
filterBy: $filterBy
|
|
9
|
-
first: $first
|
|
10
|
-
offset: $offset
|
|
11
|
-
useMaster: false
|
|
12
|
-
) {
|
|
13
|
-
nodes {
|
|
14
|
-
id
|
|
15
|
-
name
|
|
16
|
-
description
|
|
17
|
-
dataPointId
|
|
18
|
-
dataSiloId
|
|
19
|
-
purposes {
|
|
20
|
-
name
|
|
21
|
-
purpose
|
|
22
|
-
}
|
|
23
|
-
categories {
|
|
24
|
-
name
|
|
25
|
-
category
|
|
26
|
-
}
|
|
27
|
-
${l?`pendingCategoryGuesses {
|
|
28
|
-
category {
|
|
29
|
-
name
|
|
30
|
-
category
|
|
31
|
-
}
|
|
32
|
-
status
|
|
33
|
-
classifierVersion
|
|
34
|
-
}`:""}
|
|
35
|
-
${a?`attributeValues {
|
|
36
|
-
attributeKey {
|
|
37
|
-
name
|
|
38
|
-
}
|
|
39
|
-
name
|
|
40
|
-
}`:""}
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
`,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_lodashes.sortBy.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_lodashes.chunk.call(void 0, e,l);t.start(e.length,0);let n=0;await _chunkJC7VDPVPcjs.a.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunkJC7VDPVPcjs._b.call(void 0, u,_chunkJC7VDPVPcjs.i,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_lodashes.chunk.call(void 0, e,l);t.start(e.length,0);let n=0;await _chunkJC7VDPVPcjs.a.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunkJC7VDPVPcjs._b.call(void 0, u,_chunkJC7VDPVPcjs.l,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function H(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_lodashes.uniq.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_lodashes.keyBy.call(void 0, d,"id"),o=_lodashes.uniq.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_lodashes.keyBy.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function st(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunkJC7VDPVPcjs._b.call(void 0, u,_chunkJC7VDPVPcjs.j,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunkJC7VDPVPcjs._b.call(void 0, u,_graphqlrequest.gql`
|
|
45
|
-
query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(
|
|
46
|
-
$filterBy: UnstructuredSubDataPointRecommendationsFilterInput
|
|
47
|
-
$first: Int!
|
|
48
|
-
$offset: Int!
|
|
49
|
-
) {
|
|
50
|
-
unstructuredSubDataPointRecommendations(
|
|
51
|
-
filterBy: $filterBy
|
|
52
|
-
first: $first
|
|
53
|
-
offset: $offset
|
|
54
|
-
useMaster: false
|
|
55
|
-
) {
|
|
56
|
-
nodes {
|
|
57
|
-
id
|
|
58
|
-
dataSiloId
|
|
59
|
-
scannedObjectPathId
|
|
60
|
-
scannedObjectId
|
|
61
|
-
${c?"name":""}
|
|
62
|
-
${c?"contextSnippet":""}
|
|
63
|
-
dataSubCategory {
|
|
64
|
-
name
|
|
65
|
-
category
|
|
66
|
-
}
|
|
67
|
-
status
|
|
68
|
-
confidence
|
|
69
|
-
classificationMethod
|
|
70
|
-
classifierVersion
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
`,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let I=new Date().getTime()-n,C=_lodashes.sortBy.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${I/1e3} seconds!`)),C}exports.a = H; exports.b = st;
|
|
75
|
-
//# sourceMappingURL=chunk-ZJDLK7C3.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/chunk-ZJDLK7C3.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAA4E,wDAAyC,qCCC1E,2DAIpC,qGACiB,iDACJ,gFACD,MAkFnB,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/Users/benbrook/transcend/cli/dist/chunk-ZJDLK7C3.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from '@/lib/bluebird-replace';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
|
package/dist/chunk-ZUNVPK23.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _globalagent = require('global-agent');var _undici = require('undici');var _yargsparser = require('yargs-parser'); var _yargsparser2 = _interopRequireDefault(_yargsparser);var i=console,{httpProxy:o=process.env.http_proxy}=_yargsparser2.default.call(void 0, process.argv.slice(2));o&&(i.info(_colors2.default.green(`Initializing proxy: ${o}`)),process.env.GLOBAL_AGENT_HTTP_PROXY=o,_globalagent.bootstrap.call(void 0, ),_undici.setGlobalDispatcher.call(void 0, new (0, _undici.ProxyAgent)(o)));exports.a = i;
|
|
2
|
-
//# sourceMappingURL=chunk-ZUNVPK23.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/logger.ts"],"names":["logger","httpProxy","yargs","colors"],"mappings":"AAAA,yPAAmB,2CACO,gCACsB,qGAC9B,IAELA,CAAAA,CAAS,OAAA,CAGhB,CAAE,SAAA,CAAAC,CAAAA,CAAY,OAAA,CAAQ,GAAA,CAAI,UAAW,CAAA,CAAIC,mCAAAA,OAAM,CAAQ,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA,CACtED,CAAAA,EAAAA,CACFD,CAAAA,CAAO,IAAA,CAAKG,gBAAAA,CAAO,KAAA,CAAM,CAAA,oBAAA,EAAuBF,CAAS,CAAA,CAAA","file":"/Users/benbrook/transcend/cli/dist/chunk-ZUNVPK23.cjs","sourcesContent":["import colors from 'colors';\nimport { bootstrap } from 'global-agent';\nimport { ProxyAgent, setGlobalDispatcher } from 'undici';\nimport yargs from 'yargs-parser';\n\nexport const logger = console;\n\n// When the proxy env var of flag is specified, initiate the proxy\nconst { httpProxy = process.env.http_proxy } = yargs(process.argv.slice(2));\nif (httpProxy) {\n logger.info(colors.green(`Initializing proxy: ${httpProxy}`));\n\n // Use global-agent, which overrides `request` based requests\n process.env.GLOBAL_AGENT_HTTP_PROXY = httpProxy;\n bootstrap();\n\n // Use undici, which overrides `fetch` based requests\n setGlobalDispatcher(new ProxyAgent(httpProxy));\n}\n"]}
|
package/dist/chunk-ZVK4HIDF.cjs
DELETED
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var _fastcsv = require('fast-csv'); var u = _interopRequireWildcard(_fastcsv);var _fs = require('fs');function m(n){return n.includes('"')||n.includes(",")||n.includes(`
|
|
2
|
-
`)?`"${n.replace(/"/g,'""')}"`:n}function d(n,t,i){let r=[];r.push(i),r.push(...t.map(e=>Object.values(e)));let s=r.map(e=>e.map(m).join(",")).join(`
|
|
3
|
-
`);_fs.writeFileSync.call(void 0, n,s)}function C(n,t){let r=t.map(s=>Object.values(s)).map(s=>s.map(m).join(",")).join(`
|
|
4
|
-
`);_fs.appendFileSync.call(void 0, n,`
|
|
5
|
-
${r}`)}async function p(n,t,i=!0){let r=_fs.createWriteStream.call(void 0, n);await new Promise((s,e)=>{try{u.write(t,{headers:i,objectMode:!0}).pipe(r).on("error",e).on("end",()=>s(!0))}catch(c){e(c)}})}function x(n){let t=n.lastIndexOf(".");return{baseName:t!==-1?n.substring(0,t):n,extension:t!==-1?n.substring(t):".csv"}}async function O(n,t,i=!0,r=1e5){if(t.length<=r)return await p(n,t,i),[n];let s=[],e=Math.ceil(t.length/r),{baseName:c,extension:b}=x(n);for(let o=0;o<e;o+=1){let a=o*r,l=Math.min(a+r,t.length),w=t.slice(a,l),y=String(o+1).padStart(String(e).length,"0"),g=`${c}_part${y}_of_${e}${b}`;await p(g,w,i),s.push(g)}return s}exports.a = d; exports.b = C; exports.c = p; exports.d = x; exports.e = O;
|
|
6
|
-
//# sourceMappingURL=chunk-ZVK4HIDF.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/cron/writeCsv.ts"],"names":["escapeCsvValue","value","writeCsvSync","filePath","data","headers","rows","row","csvContent","writeFileSync","appendCsvSync","appendFileSync"],"mappings":"AAAA,yaAAyB,wBACwC,SAUxDA,CAAAA,CAAeC,CAAAA,CAAuB,CAC7C,OAAIA,CAAAA,CAAM,QAAA,CAAS,GAAG,CAAA,EAAKA,CAAAA,CAAM,QAAA,CAAS,GAAG,CAAA,EAAKA,CAAAA,CAAM,QAAA,CAAS,CAAA;AAAA,CAAI,CAAA,CAC5D,CAAA,CAAA,EAAIA,CAAAA,CAAM,OAAA,CAAQ,IAAA,CAAM,IAAI,CAAC,CAAA,CAAA,CAAA,CAE/BA,CACT,CAQO,SAASC,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACM,CACN,IAAMC,CAAAA,CAAmB,CAAC,CAAA,CAE1BA,CAAAA,CAAK,IAAA,CAAKD,CAAO,CAAA,CACjBC,CAAAA,CAAK,IAAA,CAAK,GAAGF,CAAAA,CAAK,GAAA,CAAKG,CAAAA,EAAQ,MAAA,CAAO,MAAA,CAAOA,CAAG,CAAC,CAAC,CAAA,CAGlD,IAAMC,CAAAA,CAAaF,CAAAA,CAChB,GAAA,CAAKC,CAAAA,EAAQA,CAAAA,CAAI,GAAA,CAAIP,CAAc,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA,CAC9C,IAAA,CAAK,CAAA;AAAA,CAAI,CAAA,CAGZS,+BAAAA,CAAcN,CAAUK,CAAU,CACpC,CASO,SAASE,CAAAA,CAAcP,CAAAA,CAAkBC,CAAAA,CAA2B,CAKzE,IAAMI,CAAAA,CAHOJ,CAAAA,CAAK,GAAA,CAAKG,CAAAA,EAAQ,MAAA,CAAO,MAAA,CAAOA,CAAG,CAAC,CAAA,CAI9C,GAAA,CAAKA,CAAAA,EAAQA,CAAAA,CAAI,GAAA,CAAIP,CAAc,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA,CAC9C,IAAA,CAAK,CAAA;AAAA,CAAI,CAAA,CAGZW,gCAAAA,CAAeR,CAAU,CAAA;AAAA,EAAKK,CAAU,CAAA,CAAA","file":"/Users/benbrook/transcend/cli/dist/chunk-ZVK4HIDF.cjs","sourcesContent":["import * as fastcsv from 'fast-csv';\nimport { createWriteStream, writeFileSync, appendFileSync } from 'fs';\n\nimport { ObjByString } from '@transcend-io/type-utils';\n\n/**\n * Escape a CSV value\n *\n * @param value - Value to escape\n * @returns Escaped value\n */\nfunction escapeCsvValue(value: string): string {\n if (value.includes('\"') || value.includes(',') || value.includes('\\n')) {\n return `\"${value.replace(/\"/g, '\"\"')}\"`;\n }\n return value;\n}\n/**\n * Write a csv to file synchronously, overwriting any existing content\n *\n * @param filePath - File to write out to\n * @param data - Data to write\n * @param headers - Headers. If true, use object keys as headers. If array, use provided headers.\n */\nexport function writeCsvSync(\n filePath: string,\n data: ObjByString[],\n headers: string[],\n): void {\n const rows: string[][] = [];\n\n rows.push(headers);\n rows.push(...data.map((row) => Object.values(row)));\n\n // Build CSV content with proper escaping\n const csvContent = rows\n .map((row) => row.map(escapeCsvValue).join(','))\n .join('\\n');\n\n // Write to file, overwriting existing content\n writeFileSync(filePath, csvContent);\n}\n\n/**\n * Append data to an existing csv file synchronously\n * Assumes the data structure matches the existing file\n *\n * @param filePath - File to append to\n * @param data - Data to append\n */\nexport function appendCsvSync(filePath: string, data: ObjByString[]): void {\n // Convert data to CSV rows\n const rows = data.map((row) => Object.values(row));\n\n // Build CSV content with proper escaping\n const csvContent = rows\n .map((row) => row.map(escapeCsvValue).join(','))\n .join('\\n');\n\n // Append to file with leading newline\n appendFileSync(filePath, `\\n${csvContent}`);\n}\n\n/**\n * Write a csv to file asynchronously\n *\n * @param filePath - File to write out to\n * @param data - Data to write\n * @param headers - Headers\n */\nexport async function writeCsv(\n filePath: string,\n data: ObjByString[],\n headers: boolean | string[] = true,\n): Promise<void> {\n const ws = createWriteStream(filePath);\n await new Promise((resolve, reject) => {\n try {\n fastcsv\n .write(data, { headers, objectMode: true })\n .pipe(ws)\n .on('error', reject)\n .on('end', () => resolve(true));\n } catch (err) {\n reject(err);\n }\n });\n}\n\n/**\n * Parse a file path into a base name and extension\n *\n * @param filePath - File path to parse\n * @returns Base name and extension\n */\nexport function parseFilePath(filePath: string): {\n /** Base name of the file */\n baseName: string;\n /** Extension of the file */\n extension: string;\n} {\n const lastDotIndex = filePath.lastIndexOf('.');\n return {\n baseName:\n lastDotIndex !== -1 ? filePath.substring(0, lastDotIndex) : filePath,\n extension: lastDotIndex !== -1 ? filePath.substring(lastDotIndex) : '.csv',\n };\n}\n\n/**\n * Write a large CSV dataset to multiple files to avoid file size limits\n *\n * @param filePath - Base file path (will be modified to include chunk numbers)\n * @param data - Data to write\n * @param headers - Headers\n * @param chunkSize - Maximum number of rows per file (default 100000)\n * @returns Array of written file paths\n */\nexport async function writeLargeCsv(\n filePath: string,\n data: ObjByString[],\n headers: boolean | string[] = true,\n chunkSize = 100000,\n): Promise<string[]> {\n if (data.length <= chunkSize) {\n // If data is small enough, write to single file\n await writeCsv(filePath, data, headers);\n return [filePath];\n }\n\n // Split data into chunks and write to multiple files\n const writtenFiles: string[] = [];\n const totalChunks = Math.ceil(data.length / chunkSize);\n const { baseName, extension } = parseFilePath(filePath);\n\n for (let i = 0; i < totalChunks; i += 1) {\n const start = i * chunkSize;\n const end = Math.min(start + chunkSize, data.length);\n const chunk = data.slice(start, end);\n\n // Create filename with chunk number and zero-padding\n const chunkNumber = String(i + 1).padStart(String(totalChunks).length, '0');\n const chunkFilePath = `${baseName}_part${chunkNumber}_of_${totalChunks}${extension}`;\n\n await writeCsv(chunkFilePath, chunk, headers);\n writtenFiles.push(chunkFilePath);\n }\n\n return writtenFiles;\n}\n"]}
|
package/dist/impl-2DZ5OV74.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkZJDLK7C3cjs = require('./chunk-ZJDLK7C3.cjs');require('./chunk-KAE73AXX.cjs');var _chunkZVK4HIDFcjs = require('./chunk-ZVK4HIDF.cjs');require('./chunk-ORNBWSZL.cjs');var _chunkJC7VDPVPcjs = require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _lodashes = require('lodash-es');async function x({auth:l,file:s,transcendUrl:m,dataSiloIds:f,subCategories:p,status:g,includeEncryptedSnippets:a}){try{let o=_chunkJC7VDPVPcjs.pe.call(void 0, m,l),C=await _chunkZJDLK7C3cjs.b.call(void 0, o,{dataSiloIds:f,subCategories:p,status:g,includeEncryptedSnippets:a});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing unstructured discovery files to file "${s}"...`));let r=[],S=C.map(t=>{let n={"Entry ID":t.id,"Data Silo ID":t.dataSiloId,"Object Path ID":t.scannedObjectPathId,"Object ID":t.scannedObjectId,...a?{Entry:t.name,"Context Snippet":t.contextSnippet}:{},"Data Category":`${t.dataSubCategory.category}:${t.dataSubCategory.name}`,"Classification Status":t.status,"Confidence Score":t.confidence,"Classification Method":t.classificationMethod,"Classifier Version":t.classifierVersion};return r=_lodashes.uniq.call(void 0, [...r,...Object.keys(n)]),n});_chunkZVK4HIDFcjs.c.call(void 0, s,S,r)}catch(o){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the unstructured discovery files: ${o.message}`)),process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced unstructured discovery files to disk at ${s}!`))}exports.pullUnstructuredDiscoveryFiles = x;
|
|
2
|
-
//# sourceMappingURL=impl-2DZ5OV74.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-2DZ5OV74.cjs","../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"names":["pullUnstructuredDiscoveryFiles","auth","file","transcendUrl","dataSiloIds","subCategories","status","includeEncryptedSnippets","client","buildTranscendGraphQLClient","entries","pullUnstructuredSubDataPointRecommendations","logger","colors","headers","inputs","entry","result"],"mappings":"AAAA,iOAAwC,gCAA6B,wDAAyC,gCAA6B,wDAA0C,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gFCElS,qCACE,MAgBrB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAA,CACe,CACf,GAAI,CAEF,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BN,CAAcF,CAAI,CAAA,CAEvDS,CAAAA,CAAU,MAAMC,iCAAAA,CAA4CH,CAAQ,CACxE,WAAA,CAAAJ,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,8CAAA,EAAiDX,CAAI,CAAA,IAAA,CACvD,CACF,CAAA,CACA,IAAIY,CAAAA,CAAoB,CAAC,CAAA,CACnBC,CAAAA,CAASL,CAAAA,CAAQ,GAAA,CAAKM,CAAAA,EAAU,CACpC,IAAMC,CAAAA,CAAS,CACb,UAAA,CAAYD,CAAAA,CAAM,EAAA,CAClB,cAAA,CAAgBA,CAAAA,CAAM,UAAA,CACtB,gBAAA,CAAkBA,CAAAA,CAAM,mBAAA,CACxB,WAAA,CAAaA,CAAAA,CAAM,eAAA,CACnB,GAAIT,CAAAA,CACA,CAAE,KAAA,CAAOS,CAAAA,CAAM,IAAA,CAAM,iBAAA,CAAmBA,CAAAA,CAAM,cAAe,CAAA,CAC7D,CAAC,CAAA,CACL,eAAA,CAAiB,CAAA,EAAA","file":"/Users/benbrook/transcend/cli/dist/impl-2DZ5OV74.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { writeCsv } from '@/lib/cron';\nimport { pullUnstructuredSubDataPointRecommendations } from '@/lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '@/lib/graphql';\nimport { logger } from '@/logger';\n\ninterface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n writeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"]}
|
package/dist/impl-2ILRPUCC.cjs
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkCX2GRUPBcjs = require('./chunk-CX2GRUPB.cjs');require('./chunk-KAE73AXX.cjs');var _chunkZVK4HIDFcjs = require('./chunk-ZVK4HIDF.cjs');var _chunkKEXUFX2Jcjs = require('./chunk-KEXUFX2J.cjs');require('./chunk-QJYHSHFA.cjs');require('./chunk-ORNBWSZL.cjs');require('./chunk-UEGX6GZ2.cjs');require('./chunk-SAEKBZGF.cjs');var _chunkJC7VDPVPcjs = require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkEG4L6YAJcjs = require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _path = require('path');var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);async function q({auth:A,start:C,end:f,folder:r,bin:l,transcendUrl:N}){let m=await _chunkCX2GRUPBcjs.b.call(void 0, A);_fs2.default.existsSync(r)&&!_fs2.default.lstatSync(r).isDirectory()&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red('The provided argument "folder" was passed a file. expected: folder="./consent-metrics/"')),process.exit(1));let g=l;Object.values(_chunkJC7VDPVPcjs.sc).includes(g)||(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to parse argument "bin" with value "${l}"
|
|
2
|
-
Expected one of:
|
|
3
|
-
${Object.values(_chunkJC7VDPVPcjs.sc).join(`
|
|
4
|
-
`)}`)),process.exit(1));let i=new Date(C),o=f?new Date(f):new Date;if(Number.isNaN(i.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Start date provided is invalid date. Got --start="${C}" expected --start="01/01/2023"`)),process.exit(1)),Number.isNaN(o.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`End date provided is invalid date. Got --end="${f}" expected --end="01/01/2023"`)),process.exit(1)),i>o&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Got a start date "${i.toISOString()}" that was larger than the end date "${o.toISOString()}". Start date must be before end date.`)),process.exit(1)),_fs.existsSync.call(void 0, r)||_fs.mkdirSync.call(void 0, r),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling consent metrics from start=${i.toString()} to end=${o.toISOString()} with bin size "${l}"`)),typeof m=="string"){try{let n=_chunkJC7VDPVPcjs.pe.call(void 0, N,m),s=await _chunkKEXUFX2Jcjs.d.call(void 0, n,{bin:g,start:i,end:o});Object.entries(s).forEach(([p,c])=>{c.forEach(({points:u,name:d})=>{let a=_path.join.call(void 0, r,`${p}_${d}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${a}"...`)),_chunkZVK4HIDFcjs.c.call(void 0, a,u.map(({key:$,value:h})=>({timestamp:$,value:h})))})})}catch(n){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the schema: ${n.message}`)),process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced consent metrics to disk in folder "${r}"! View at ${_chunkEG4L6YAJcjs.c}`))}else{let n=[];await _chunkJC7VDPVPcjs.a.call(void 0, m,async(s,p)=>{let c=`[${p+1}/${m.length}][${s.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
|
|
5
|
-
|
|
6
|
-
${c}Attempting to pull consent metrics...
|
|
7
|
-
|
|
8
|
-
~~~`));let u=_chunkJC7VDPVPcjs.pe.call(void 0, N,s.apiKey);try{let d=await _chunkKEXUFX2Jcjs.d.call(void 0, u,{bin:g,start:i,end:o}),a=_path.join.call(void 0, r,s.organizationName);_fs.existsSync.call(void 0, a)||_fs.mkdirSync.call(void 0, a),Object.entries(d).forEach(([$,h])=>{h.forEach(({points:I,name:F})=>{let D=_path.join.call(void 0, a,`${$}_${F}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${D}"...`)),_chunkZVK4HIDFcjs.c.call(void 0, D,I.map(({key:G,value:z})=>({timestamp:G,value:z})))})}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${c}Successfully pulled configuration!`))}catch (e2){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${c}Failed to sync configuration.`)),n.push(s.organizationName)}}),n.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${n.join(",")}". View output above for more information, or check out ${_chunkEG4L6YAJcjs.c}`)),process.exit(1))}}exports.pullConsentMetrics = q;
|
|
9
|
-
//# sourceMappingURL=impl-2ILRPUCC.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-2ILRPUCC.cjs","../src/commands/consent/pull-consent-metrics/impl.ts"],"names":["pullConsentMetrics","auth","start","end","folder","bin","transcendUrl","apiKeyOrList","validateTranscendAuth","fs","logger","colors","parsedBin","ConsentManagerMetricBin"],"mappings":"AAAA,iOAAwC,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,wDAAyD,wDAAyC,wDAAyC,gCAA6B,gCAA6B,gFCE7b,4BAEE,gEACqB,MAmB1C,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAe,MAAMC,iCAAAA,CAA0B,CAAA,CAGjDC,YAAAA,CAAG,UAAA,CAAWL,CAAM,CAAA,EAAK,CAACK,YAAAA,CAAG,SAAA,CAAUL,CAAM,CAAA,CAAE,WAAA,CAAY,CAAA,EAAA,CAC7DM,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,yFACF,CACF,CAAA,CACA,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAIhB,IAAMC,CAAAA,CAAYP,CAAAA,CACb,MAAA,CAAO,MAAA,CAAOQ,oBAAuB,CAAA,CAAE,QAAA,CAASD,CAAS,CAAA,EAAA,CAC5DF,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CN,CAAG,CAAA;AAAA;AAAA,EACzB,MAAA,CAAO,MAAA,CAAOQ,oBAAuB,CAAA,CAAE,IAAA,CAC3D,CAAA;AAAA,CACF,CAAC,CAAA,CAAA;AAgGD;AAAgB;AAAA;AAAA,GAAA","file":"/Users/benbrook/transcend/cli/dist/impl-2ILRPUCC.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { mapSeries } from '@/lib/bluebird-replace';\nimport { join } from 'path';\nimport fs, { existsSync, mkdirSync } from 'fs';\nimport {\n buildTranscendGraphQLClient,\n ConsentManagerMetricBin,\n} from '@/lib/graphql';\nimport { validateTranscendAuth } from '@/lib/api-keys';\nimport { ADMIN_DASH_INTEGRATIONS } from '@/constants';\nimport { pullConsentManagerMetrics } from '@/lib/consent-manager';\nimport { writeCsv } from '@/lib/cron';\n\ninterface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n {\n auth,\n start,\n end,\n folder,\n bin,\n transcendUrl,\n }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n process.exit(1);\n }\n\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join(\n '\\n',\n )}`,\n ),\n );\n process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${err.message}`),\n );\n process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n process.exit(1);\n }\n }\n}\n"]}
|
package/dist/impl-2LBSGBBL.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkCX2GRUPBcjs = require('./chunk-CX2GRUPB.cjs');var _chunkKEXUFX2Jcjs = require('./chunk-KEXUFX2J.cjs');require('./chunk-QJYHSHFA.cjs');require('./chunk-ORNBWSZL.cjs');require('./chunk-UEGX6GZ2.cjs');require('./chunk-SAEKBZGF.cjs');var _chunkJC7VDPVPcjs = require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _privacytypes = require('@transcend-io/privacy-types');async function M({auth:f,bundleTypes:r=[_privacytypes.ConsentBundleType.Production,_privacytypes.ConsentBundleType.Test],deploy:i,transcendUrl:s}){let e=await _chunkCX2GRUPBcjs.b.call(void 0, f);typeof e=="string"?(await _chunkKEXUFX2Jcjs.a.call(void 0, {deploy:i,transcendUrl:s,auth:e,bundleTypes:r}),_chunkZUNVPK23cjs.a.info(_colors2.default.green("Successfully updated Consent Manager!"))):(await _chunkJC7VDPVPcjs.a.call(void 0, e,async o=>{_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Updating Consent Manager for organization "${o.organizationName}"...`)),await _chunkKEXUFX2Jcjs.a.call(void 0, {deploy:i,transcendUrl:s,auth:o.apiKey,bundleTypes:r}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully updated Consent Manager for organization "${o.organizationName}"!`))}),_chunkZUNVPK23cjs.a.info(_colors2.default.green("Successfully updated Consent Managers!")))}exports.updateConsentManager = M;
|
|
2
|
-
//# sourceMappingURL=impl-2LBSGBBL.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-2LBSGBBL.cjs","../src/commands/consent/update-consent-manager/impl.ts"],"names":["updateConsentManager","auth","bundleTypes","ConsentBundleType","deploy","transcendUrl","apiKeyOrList","validateTranscendAuth","updateConsentManagerVersionToLatest","logger","colors","mapSeries","apiKey"],"mappings":"AAAA,iOAAwC,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gFCC3V,2DACe,MAclC,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,CAACC,+BAAAA,CAAkB,UAAA,CAAYA,+BAAAA,CAAkB,IAAI,CAAA,CACnE,MAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAe,MAAMC,iCAAAA,CAA0B,CAAA,CAGjD,OAAOD,CAAAA,EAAiB,QAAA,CAAA,CAE1B,MAAME,iCAAAA,CACJ,MAAA,CAAAJ,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAMC,CAAAA,CACN,WAAA,CAAAJ,CACF,CAAC,CAAA,CACDO,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,KAAA,CAAM,uCAAuC,CAAC,CAAA,CAAA,CAAA,CAEjE,MAAMC,iCAAAA,CAAUL,CAAc,MAAOM,CAAAA,EAAW,CAC9CH,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,2CAAA,EAA8CE,CAAAA,CAAO,gBAAgB,CAAA,IAAA,CACvE,CACF,CAAA,CAEA,MAAMJ,iCAAAA,CACJ,MAAA,CAAAJ,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAMO,CAAAA,CAAO,MAAA,CACb,WAAA,CAAAV,CACF,CAAC,CAAA,CAEDO,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,uDAAA,EAA0DE,CAAAA,CAAO,gBAAgB,CAAA,EAAA,CACnF,CACF,CACF,CAAC,CAAA,CACDH,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,KAAA,CAAM,wCAAwC,CAAC,CAAA,CAEtE,CAAA,iCAAA","file":"/Users/benbrook/transcend/cli/dist/impl-2LBSGBBL.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport colors from 'colors';\nimport { ConsentBundleType } from '@transcend-io/privacy-types';\nimport { mapSeries } from '@/lib/bluebird-replace';\n\nimport { logger } from '@/logger';\nimport { updateConsentManagerVersionToLatest } from '@/lib/consent-manager';\nimport { validateTranscendAuth } from '@/lib/api-keys';\n\ninterface UpdateConsentManagerCommandFlags {\n auth: string;\n bundleTypes: ConsentBundleType[];\n deploy: boolean;\n transcendUrl: string;\n}\n\nexport async function updateConsentManager(\n this: LocalContext,\n {\n auth,\n bundleTypes = [ConsentBundleType.Production, ConsentBundleType.Test],\n deploy,\n transcendUrl,\n }: UpdateConsentManagerCommandFlags,\n): Promise<void> {\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Handle single update\n if (typeof apiKeyOrList === 'string') {\n // Update consent manager\n await updateConsentManagerVersionToLatest({\n deploy,\n transcendUrl,\n auth: apiKeyOrList,\n bundleTypes,\n });\n logger.info(colors.green('Successfully updated Consent Manager!'));\n } else {\n await mapSeries(apiKeyOrList, async (apiKey) => {\n logger.info(\n colors.magenta(\n `Updating Consent Manager for organization \"${apiKey.organizationName}\"...`,\n ),\n );\n\n await updateConsentManagerVersionToLatest({\n deploy,\n transcendUrl,\n auth: apiKey.apiKey,\n bundleTypes,\n });\n\n logger.info(\n colors.green(\n `Successfully updated Consent Manager for organization \"${apiKey.organizationName}\"!`,\n ),\n );\n });\n logger.info(colors.green('Successfully updated Consent Managers!'));\n }\n}\n"]}
|
package/dist/impl-3NMEM4QJ.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkKOV2SQO2cjs = require('./chunk-KOV2SQO2.cjs');var _chunkCX2GRUPBcjs = require('./chunk-CX2GRUPB.cjs');var _chunkKEXUFX2Jcjs = require('./chunk-KEXUFX2J.cjs');require('./chunk-QJYHSHFA.cjs');require('./chunk-ORNBWSZL.cjs');require('./chunk-UEGX6GZ2.cjs');require('./chunk-SAEKBZGF.cjs');require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');var _path = require('path');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _fs = require('fs');function b({consentManagerYmlFolder:s,output:n}){(!_fs.existsSync.call(void 0, s)||!_fs.lstatSync.call(void 0, s).isDirectory())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Folder does not exist: "${s}"`)),process.exit(1));let p=_chunkCX2GRUPBcjs.c.call(void 0, s).map(i=>{let{"consent-manager":f}=_chunkKOV2SQO2cjs.d.call(void 0, _path.join.call(void 0, s,i));return{name:i,input:f}}),e=_chunkKEXUFX2Jcjs.h.call(void 0, p);_chunkKOV2SQO2cjs.e.call(void 0, n,{"business-entities":e}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${e.length} business entities to file "${n}"`))}exports.consentManagersToBusinessEntities = b;
|
|
2
|
-
//# sourceMappingURL=impl-3NMEM4QJ.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-3NMEM4QJ.cjs","../src/commands/inventory/consent-managers-to-business-entities/impl.ts"],"names":["consentManagersToBusinessEntities","consentManagerYmlFolder","output","existsSync","lstatSync","logger","colors","inputs","listFiles","directory","consentManager","readTranscendYaml","join","businessEntities","writeTranscendYaml"],"mappings":"AAAA,iOAA+C,wDAAyC,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gCAA6B,4BCI7X,gFAEF,wBAEmB,SAOtBA,CAAAA,CAEd,CACE,uBAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CACF,CAAA,CACM,CAAA,CAGJ,CAACC,4BAAAA,CAAkC,CAAA,EACnC,CAACC,2BAAAA,CAAiC,CAAA,CAAE,WAAA,CAAY,CAAA,CAAA,EAAA,CAEhDC,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CAAI,CAAA,wBAAA,EAA2BL,CAAuB,CAAA,CAAA,CAAG,CAClE,CAAA,CACA,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAIhB,IAAMM,CAAAA,CAASC,iCAAAA,CAAiC,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAc,CACnE,GAAM,CAAE,iBAAA,CAAmBC,CAAe,CAAA,CAAIC,iCAAAA,wBAC5CC,CAAKX,CAAyBQ,CAAS,CACzC,CAAA,CACA,MAAO,CAAE,IAAA,CAAMA,CAAAA,CAAW,KAAA,CAAOC,CAAe,CAClD,CAAC,CAAA,CAGKG,CAAAA,CAAmBb,iCAAAA,CAA8C,CAAA,CAGvEc,iCAAAA,CAAmBZ,CAAQ,CACzB,mBAAA,CAAqBW,CACvB,CAAC,CAAA,CAEDR,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,KAAA,CACL,CAAA,mBAAA,EAAsBO,CAAAA,CAAiB,MAAM,CAAA,4BAAA,EAA+BX,CAAM,CAAA,CAAA,CACpF,CACF,CACF,CAAA,8CAAA","file":"/Users/benbrook/transcend/cli/dist/impl-3NMEM4QJ.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { listFiles } from '@/lib/api-keys';\nimport { consentManagersToBusinessEntities as consentManagersToBusinessEntitiesHelper } from '@/lib/consent-manager';\nimport { readTranscendYaml, writeTranscendYaml } from '@/lib/readTranscendYaml';\nimport { join } from 'path';\n\nimport colors from 'colors';\nimport { logger } from '@/logger';\nimport { existsSync, lstatSync } from 'fs';\n\ninterface ConsentManagersToBusinessEntitiesCommandFlags {\n consentManagerYmlFolder: string;\n output: string;\n}\n\nexport function consentManagersToBusinessEntities(\n this: LocalContext,\n {\n consentManagerYmlFolder,\n output,\n }: ConsentManagersToBusinessEntitiesCommandFlags,\n): void {\n // Ensure folder is passed\n if (\n !existsSync(consentManagerYmlFolder) ||\n !lstatSync(consentManagerYmlFolder).isDirectory()\n ) {\n logger.error(\n colors.red(`Folder does not exist: \"${consentManagerYmlFolder}\"`),\n );\n process.exit(1);\n }\n\n // Read in each consent manager configuration\n const inputs = listFiles(consentManagerYmlFolder).map((directory) => {\n const { 'consent-manager': consentManager } = readTranscendYaml(\n join(consentManagerYmlFolder, directory),\n );\n return { name: directory, input: consentManager };\n });\n\n // Convert to business entities\n const businessEntities = consentManagersToBusinessEntitiesHelper(inputs);\n\n // write to disk\n writeTranscendYaml(output, {\n 'business-entities': businessEntities,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${businessEntities.length} business entities to file \"${output}\"`,\n ),\n );\n}\n"]}
|
package/dist/impl-3QGL5KFO.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkORNBWSZLcjs = require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');async function e({auth:i,transcendUrl:o,enricherIds:r}){await _chunkORNBWSZLcjs.R.call(void 0, {transcendUrl:o,auth:i,enricherIds:r})}exports.skipPreflightJobs = e;
|
|
2
|
-
//# sourceMappingURL=impl-3QGL5KFO.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-3QGL5KFO.cjs","../src/commands/request/skip-preflight-jobs/impl.ts"],"names":["skipPreflightJobs","auth","transcendUrl","enricherIds"],"mappings":"AAAA,iIAAwC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCSzL,SAAsBA,CAAAA,CAEpB,CAAE,IAAA,CAAAC,CAAAA,CAAM,YAAA,CAAAC,CAAAA,CAAc,WAAA,CAAAC,CAAY,CAAA,CACnB,CACf,MAAMH,iCAAAA,CACJ,YAAA,CAAAE,CAAAA,CACA,IAAA,CAAAD,CAAAA,CACA,WAAA,CAAAE,CACF,CAAC,CACH,CAAA,8BAAA","file":"/Users/benbrook/transcend/cli/dist/impl-3QGL5KFO.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '@/lib/requests';\n\ninterface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"]}
|
package/dist/impl-56MNYVA5.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkORNBWSZLcjs = require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');async function m({auth:n,enricherId:s,actions:i,requestEnricherStatuses:a,requestIds:c,createdAtBefore:e,createdAtAfter:t,concurrency:o,transcendUrl:u}){await _chunkORNBWSZLcjs.S.call(void 0, {auth:n,enricherId:s,requestActions:i,requestEnricherStatuses:a,requestIds:c,createdAtBefore:e?new Date(e):void 0,createdAtAfter:t?new Date(t):void 0,concurrency:o,transcendUrl:u})}exports.enricherRestart = m;
|
|
2
|
-
//# sourceMappingURL=impl-56MNYVA5.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-56MNYVA5.cjs","../src/commands/request/enricher-restart/impl.ts"],"names":["enricherRestart","auth","enricherId","actions","requestEnricherStatuses","requestIds","createdAtBefore","createdAtAfter","concurrency","transcendUrl","bulkRetryEnrichers"],"mappings":"AAAA,iIAAwC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCmBzL,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,uBAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,eAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CACf,MAAMC,iCAAAA,CACJ,IAAA,CAAAT,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAgBC,CAAAA,CAChB,uBAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,eAAA,CAAiBC,CAAAA,CAAkB,IAAI,IAAA,CAAKA,CAAe,CAAA,CAAI,KAAA,CAAA,CAC/D,cAAA,CAAgBC,CAAAA,CAAiB,IAAI,IAAA,CAAKA,CAAc,CAAA,CAAI,KAAA,CAAA,CAC5D,WAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAC,CACH,CAAA,4BAAA","file":"/Users/benbrook/transcend/cli/dist/impl-56MNYVA5.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { bulkRetryEnrichers } from '@/lib/requests';\nimport type {\n RequestAction,\n RequestEnricherStatus,\n} from '@transcend-io/privacy-types';\n\ninterface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"]}
|
package/dist/impl-6PKXIPAW.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkX4YTPQVYcjs = require('./chunk-X4YTPQVY.cjs');require('./chunk-CX2GRUPB.cjs');require('./chunk-UEGX6GZ2.cjs');require('./chunk-SAEKBZGF.cjs');var _chunkJC7VDPVPcjs = require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkEG4L6YAJcjs = require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _child_process = require('child_process');var l='A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository';async function S({auth:d,scanPath:o,ignoreDirs:f,repositoryName:u,transcendUrl:y}){let e=u;if(!e)try{let t=_child_process.execSync.call(void 0, `cd ${o} && git config --get remote.origin.url`).toString("utf-8").trim();[e]=t.includes("https:")?t.split("/").slice(3).join("/").split("."):(t.split(":").pop()||"").split("."),e||(_chunkZUNVPK23cjs.a.error(_colors2.default.red(l)),process.exit(1))}catch(a){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${l} - Got error: ${a.message}`)),process.exit(1)}let h=_chunkJC7VDPVPcjs.pe.call(void 0, y,d),i=await _chunkX4YTPQVYcjs.b.call(void 0, {scanPath:o,ignoreDirs:f,repositoryName:e});await _chunkJC7VDPVPcjs.ke.call(void 0, h,i);let s=new URL(_chunkEG4L6YAJcjs.b);s.pathname="/code-scanning/code-packages",_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Scan found ${i.length} packages at ${o}! View results at '${s.href}'`))}exports.scanPackages = S;
|
|
2
|
-
//# sourceMappingURL=impl-6PKXIPAW.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-6PKXIPAW.cjs","../src/commands/inventory/scan-packages/impl.ts"],"names":["REPO_ERROR","scanPackages","auth","scanPath","ignoreDirs","repositoryName","transcendUrl","gitRepositoryName","url","execSync","logger","colors","err"],"mappings":"AAAA,iOAAwC,gCAA6B,gCAA6B,gCAA6B,wDAAkD,wDAAyC,wDAAyC,gCAA6B,gCAA6B,gFCE1S,8CAIM,IAEnBA,CAAAA,CACJ,0LAAA,CAYF,MAAA,SAAsBC,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAIC,CAAAA,CAAoBF,CAAAA,CACxB,EAAA,CAAI,CAACE,CAAAA,CACH,GAAI,CAKF,IAAMC,CAAAA,CAJOC,qCAAAA,CACX,GAAA,EAAMN,CAAQ,CAAA,sCAAA,CAChB,CAAA,CAEiB,QAAA,CAAS,OAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CACxC,CAACI,CAAiB,CAAA,CAAKC,CAAAA,CAAI,QAAA,CAAS,QAAQ,CAAA,CAExCA,CAAAA,CAAI,KAAA,CAAM,GAAG,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA,CAAE,KAAA,CAAM,GAAG,CAAA,CAAA,CAD1CA,CAAAA,CAAI,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,CAAI,CAAA,EAAK,EAAA,CAAA,CAAI,KAAA,CAAM,GAAG,CAAA,CAErCD,CAAAA,EAAAA,CACHG,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAIX,CAAU,CAAC,CAAA,CACnC,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAElB,CAAA,KAAA,CAASY,CAAAA,CAAK,CACZF,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,EAAA","file":"/Users/benbrook/transcend/cli/dist/impl-6PKXIPAW.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '@/constants';\nimport { findCodePackagesInFolder } from '@/lib/code-scanning';\nimport { buildTranscendGraphQLClient, syncCodePackages } from '@/lib/graphql';\nimport { execSync } from 'child_process';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\ninterface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n {\n auth,\n scanPath,\n ignoreDirs,\n repositoryName,\n transcendUrl,\n }: ScanPackagesCommandFlags,\n): Promise<void> {\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(\n `cd ${scanPath} && git config --get remote.origin.url`,\n );\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` +\n `View results at '${newUrl.href}'`,\n ),\n );\n}\n"]}
|
package/dist/impl-73JOMLRW.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkURT6VVOKcjs = require('./chunk-URT6VVOK.cjs');require('./chunk-ZVK4HIDF.cjs');require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');async function c({auth:n,transcendUrl:e,file:i,concurrency:r,actions:o,sombraAuth:s}){await _chunkURT6VVOKcjs.a.call(void 0, {file:i,transcendUrl:e,concurrency:r,requestActions:o,auth:n,sombraAuth:s})}exports.pullIdentifiers = c;
|
|
2
|
-
//# sourceMappingURL=impl-73JOMLRW.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-73JOMLRW.cjs","../src/commands/request/preflight/pull-identifiers/impl.ts"],"names":["pullIdentifiers","auth","transcendUrl","file","concurrency","actions","sombraAuth","pullManualEnrichmentIdentifiersToCsv"],"mappings":"AAAA,iIAAwC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCanP,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAA,CACe,CACf,MAAMC,iCAAAA,CACJ,IAAA,CAAAJ,CAAAA,CACA,YAAA,CAAAD,CAAAA,CACA,WAAA,CAAAE,CAAAA,CACA,cAAA,CAAgBC,CAAAA,CAChB,IAAA,CAAAJ,CAAAA,CACA,UAAA,CAAAK,CACF,CAAC,CACH,CAAA,4BAAA","file":"/Users/benbrook/transcend/cli/dist/impl-73JOMLRW.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { pullManualEnrichmentIdentifiersToCsv } from '@/lib/manual-enrichment';\nimport type { RequestAction } from '@transcend-io/privacy-types';\n\ninterface PullIdentifiersCommandFlags {\n auth: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n actions?: RequestAction[];\n concurrency: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n concurrency,\n actions,\n sombraAuth,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n await pullManualEnrichmentIdentifiersToCsv({\n file,\n transcendUrl,\n concurrency,\n requestActions: actions,\n auth,\n sombraAuth,\n });\n}\n"]}
|
package/dist/impl-ADTYWN4O.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkKAE73AXXcjs = require('./chunk-KAE73AXX.cjs');var _chunkZVK4HIDFcjs = require('./chunk-ZVK4HIDF.cjs');require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _lodashes = require('lodash-es');async function N({file:d,transcendUrl:u,auth:p,sombraAuth:g,dataSiloId:c,actions:v,pageLimit:r,skipRequestCount:n,chunkSize:e}){n&&_chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Skipping request count as requested. This may help speed up the call.")),(Number.isNaN(e)||e<=0||e%r!==0)&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Invalid chunk size: "${e}". Must be a positive integer that is a multiple of ${r}.`)),process.exit(1));let{baseName:b,extension:C}=_chunkZVK4HIDFcjs.d.call(void 0, d),a=0;await _chunkKAE73AXXcjs.g.call(void 0, {transcendUrl:u,apiPageSize:r,savePageSize:e,onSave:i=>{let s=`${b}-${a}${C}`;_chunkZUNVPK23cjs.a.info(_colors2.default.blue(`Saving ${i.length} identifiers to file "${s}"`));let $=_lodashes.uniq.call(void 0, i.map(I=>Object.keys(I)).flat());return _chunkZVK4HIDFcjs.c.call(void 0, s,i,$),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${i.length} identifiers to file "${s}"`)),a+=1,Promise.resolve()},actions:v,auth:p,sombraAuth:g,dataSiloId:c,skipRequestCount:n})}exports.pullIdentifiers = N;
|
|
2
|
-
//# sourceMappingURL=impl-ADTYWN4O.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-ADTYWN4O.cjs","../src/commands/request/cron/pull-identifiers/impl.ts"],"names":["pullIdentifiers","file","transcendUrl","auth","sombraAuth","dataSiloId","actions","pageLimit","skipRequestCount","chunkSize","logger","colors","baseName","extension","parseFilePath","fileCount","pullChunkedCustomSiloOutstandingIdentifiers","chunk","numberedFileName"],"mappings":"AAAA,iOAAwC,wDAAgD,gCAA6B,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gFCC/P,qCAGE,MAqBrB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CACF,CAAA,CACe,CACXD,CAAAA,EACFE,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,MAAA,CACL,uEACF,CACF,CAAA,CAAA,CAIA,MAAA,CAAO,KAAA,CAAMF,CAAS,CAAA,EACtBA,CAAAA,EAAa,CAAA,EACbA,CAAAA,CAAYF,CAAAA,GAAc,CAAA,CAAA,EAAA,CAE1BG,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,qBAAA,EAAwBF,CAAS,CAAA,oDAAA,EAAuDF,CAAS,CAAA,CAAA,CACnG,CACF,CAAA,CACA,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAGhB,GAAM,CAAE,QAAA,CAAAK,CAAAA,CAAU,SAAA,CAAAC,CAAU,CAAA,CAAIC,iCAAAA,CAAkB,CAAA,CAC9CC,CAAAA,CAAY,CAAA,CAsBhB,MAAMC,iCAAAA,CACJ,YAAA,CAAAd,CAAAA,CACA,WAAA,CAAaK,CAAAA,CACb,YAAA,CAAcE,CAAAA,CACd,MAAA,CAxBcQ,CAAAA,EAAmD,CACjE,IAAMC,CAAAA,CAAmB,CAAA,EAAA","file":"/Users/benbrook/transcend/cli/dist/impl-ADTYWN4O.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport colors from 'colors';\n\nimport { logger } from '@/logger';\nimport { uniq } from 'lodash-es';\nimport {\n CsvFormattedIdentifier,\n parseFilePath,\n pullChunkedCustomSiloOutstandingIdentifiers,\n writeCsv,\n} from '@/lib/cron';\nimport { RequestAction } from '@transcend-io/privacy-types';\n\ninterface PullIdentifiersCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n actions: RequestAction[];\n pageLimit: number;\n skipRequestCount: boolean;\n chunkSize: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n actions,\n pageLimit,\n skipRequestCount,\n chunkSize,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n process.exit(1);\n }\n\n const { baseName, extension } = parseFilePath(file);\n let fileCount = 0;\n\n const onSave = (chunk: CsvFormattedIdentifier[]): Promise<void> => {\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n logger.info(\n colors.blue(\n `Saving ${chunk.length} identifiers to file \"${numberedFileName}\"`,\n ),\n );\n\n const headers = uniq(chunk.map((d) => Object.keys(d)).flat());\n writeCsv(numberedFileName, chunk, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunk.length} identifiers to file \"${numberedFileName}\"`,\n ),\n );\n fileCount += 1;\n return Promise.resolve();\n };\n\n // Pull down outstanding identifiers\n await pullChunkedCustomSiloOutstandingIdentifiers({\n transcendUrl,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n actions,\n auth,\n sombraAuth,\n dataSiloId,\n skipRequestCount,\n });\n}\n"]}
|
package/dist/impl-BOLY4EOP.cjs
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkJC7VDPVPcjs = require('./chunk-JC7VDPVP.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');require('./chunk-UYYOVK3W.cjs');var _chunkBY7W4UQFcjs = require('./chunk-BY7W4UQF.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _got = require('got'); var _got2 = _interopRequireDefault(_got);var Q=({hostname:t,auth:e})=>_got2.default.extend({prefixUrl:`https://${t}`,headers:{accept:"application/json","content-type":"application/json",authorization:`Bearer ${e}`}});var _yargsparser = require('yargs-parser'); var _yargsparser2 = _interopRequireDefault(_yargsparser);var Ge=Object.values(_chunkBY7W4UQFcjs.b);var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);var w=({assessment:t,index:e,total:s,wrap:r=!0})=>{let n="";(e===0||r)&&(n=`[
|
|
2
|
-
`);let m=JSON.stringify(t),o=s&&e<s-1&&!r?",":"";return n=`${n+m+o}
|
|
3
|
-
`,(s&&e===s-1||r)&&(n+=`
|
|
4
|
-
]`),n};var B=({file:t,assessment:e,index:s,total:r})=>{_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing enriched assessment ${s+1} of ${r} to file "${t}"...`)),s===0?_fs2.default.writeFileSync(t,w({assessment:e,index:s,total:r,wrap:!1})):_fs2.default.appendFileSync(t,w({assessment:e,index:s,total:r,wrap:!1}))};var _typeutils = require('@transcend-io/type-utils');var _privacytypes = require('@transcend-io/privacy-types');var q=async({oneTrust:t})=>{let e=0,s=1,r=0,n=[];for(;e<s;){let{body:m}=await t.get(`api/assessment/v2/assessments?page=${e}&size=2000`),{page:o,content:u}=_typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetListOfAssessmentsResponse,m);n.push(..._nullishCoalesce(u, () => ([]))),e===0&&(s=_nullishCoalesce(_optionalChain([o, 'optionalAccess', _2 => _2.totalPages]), () => (0)),r=_nullishCoalesce(_optionalChain([o, 'optionalAccess', _3 => _3.totalElements]), () => (0))),e+=1,_chunkZUNVPK23cjs.a.info(`Fetched ${n.length} of ${r} assessments.`)}return n};var J=async({oneTrust:t,assessmentId:e})=>{let{body:s}=await t.get(`api/assessment/v2/assessments/${e}/export?ExcludeSkippedQuestions=false`);return _typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetAssessmentResponse,s)};var K=async({oneTrust:t,riskId:e})=>{let{body:s}=await t.get(`api/risk/v2/risks/${e}`);return _typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetRiskResponse,s)};var b=async({oneTrust:t,userId:e})=>{let{body:s}=await t.get(`api/scim/v2/Users/${e}`);return _typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustGetUserResponse,s)};var _lodashes = require('lodash-es');var W=({assessment:t,assessmentDetails:e,riskDetails:s,creatorDetails:r,approversDetails:n,respondentsDetails:m})=>{let o=_lodashes.keyBy.call(void 0, s,"id"),{sections:u,createdBy:g,...h}=e,O=u.map(i=>{let{questions:A,...$}=i,x=A.map(E=>{let{risks:C,...G}=E,v=(_nullishCoalesce(C, () => ([]))).map(y=>{let S=o[y.riskId];return{...y,...S,level:y.level,impactLevel:_nullishCoalesce(y.impactLevel, () => (0))}});return{...G,risks:v}});return{...$,questions:x}}),p={...g,active:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _4 => _4.active]), () => (!1)),userType:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _5 => _5.userType]), () => ("Internal")),emails:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _6 => _6.emails]), () => ([])),title:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _7 => _7.title]), () => (null)),givenName:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _8 => _8.name, 'access', _9 => _9.givenName]), () => (null)),familyName:_nullishCoalesce(_optionalChain([r, 'optionalAccess', _10 => _10.name, 'access', _11 => _11.familyName]), () => (null))},d=_lodashes.keyBy.call(void 0, n,"id"),l=e.approvers.flatMap(i=>d[i.id]?[{...i,approver:{...i.approver,active:d[i.id].active,userType:d[i.id].userType,emails:d[i.id].emails,title:d[i.id].title,givenName:_nullishCoalesce(d[i.id].name.givenName, () => (null)),familyName:_nullishCoalesce(d[i.id].name.familyName, () => (null))}}]:[]),T=_lodashes.keyBy.call(void 0, m,"id"),F=e.respondents.filter(i=>!i.name.includes("@")).flatMap(i=>T[i.id]?[{...i,active:T[i.id].active,userType:T[i.id].userType,emails:T[i.id].emails,title:T[i.id].title,givenName:_nullishCoalesce(T[i.id].name.givenName, () => (null)),familyName:_nullishCoalesce(T[i.id].name.familyName, () => (null))}]:[]);return{...t,...h,approvers:l,respondents:F,createdBy:p,sections:O}};var k=async({transcend:t,assessment:e,total:s,index:r})=>{_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing enriched assessment ${r+1} ${s?`of ${s} `:" "}to Transcend...`));let m={json:w({assessment:e,index:r,total:s})};try{await _chunkJC7VDPVPcjs._b.call(void 0, t,_chunkJC7VDPVPcjs.ha,{input:m})}catch (e2){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to sync assessment ${r+1} ${s?`of ${s} `:" "}to Transcend.
|
|
5
|
-
Assessment Title: ${e.name}. Template Title: ${e.template.name}
|
|
6
|
-
`))}};var z=async({oneTrust:t,file:e,dryRun:s,transcend:r})=>{_chunkZUNVPK23cjs.a.info("Getting list of all assessments from OneTrust...");let n=await q({oneTrust:t}),m={},o=5,u=Array.from({length:Math.ceil(n.length/o)},(g,h)=>n.slice(h*o,(h+1)*o));await _chunkJC7VDPVPcjs.a.call(void 0, u,async(g,h)=>{let O=[];await _chunkJC7VDPVPcjs.b.call(void 0, g,async(p,d)=>{let l=o*h+d+1;_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching details...`);let{templateName:T,assessmentId:F}=p,i=await J({oneTrust:t,assessmentId:F}),A=i.createdBy.id,$=m[A];if(!$){_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching creator...`);try{$=await b({oneTrust:t,userId:A}),m[A]=$}catch (e3){_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[assessment ${l} of ${n.length}]: failed to fetch form creator. creatorId: ${A}. Assessment Title: ${p.name}. Template Title: ${T}`))}}let{approvers:x}=i,E=[];x.length>0&&(_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching approvers...`),E=await _chunkJC7VDPVPcjs.b.call(void 0, x.map(({id:c})=>c),async c=>{try{let f=m[c];return f||(f=await b({oneTrust:t,userId:c}),m[c]=f),[f]}catch (e4){return _chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[assessment ${l} of ${n.length}]: failed to fetch a form approver. approverId: ${c}. Assessment Title: ${p.name}. Template Title: ${T}`)),[]}},{concurrency:5}));let{respondents:C}=i,G=C.filter(c=>!c.name.includes("@")),v=[];G.length>0&&(_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching respondents...`),v=await _chunkJC7VDPVPcjs.b.call(void 0, G.map(({id:c})=>c),async c=>{try{let f=m[c];return f||(f=await b({oneTrust:t,userId:c}),m[c]=f),[f]}catch (e5){return _chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[assessment ${l} of ${n.length}]: failed to fetch a respondent. respondentId: ${c}. Assessment Title: ${p.name}. Template Title: ${T}`)),[]}},{concurrency:5}));let y=[],S=_lodashes.uniq.call(void 0, i.sections.flatMap(c=>c.questions.flatMap(f=>(_nullishCoalesce(f.risks, () => ([]))).flatMap(Z=>Z.riskId))));S.length>0&&(_chunkZUNVPK23cjs.a.info(`[assessment ${l} of ${n.length}]: fetching risks...`),y=await _chunkJC7VDPVPcjs.b.call(void 0, S,c=>K({oneTrust:t,riskId:c}),{concurrency:5}));let V=W({assessment:p,assessmentDetails:i,riskDetails:y,creatorDetails:$,approversDetails:E.flat(),respondentsDetails:v.flat()});O.push(V)},{concurrency:o}),await _chunkJC7VDPVPcjs.a.call(void 0, O,async(p,d)=>{let l=h*o+d;s&&e?B({assessment:p,index:l,total:n.length,file:e}):r&&await k({assessment:p,transcend:r,total:n.length,index:l})})})};var _JSONStream = require('JSONStream'); var _JSONStream2 = _interopRequireDefault(_JSONStream);var Y=({transcend:t,file:e})=>(_chunkZUNVPK23cjs.a.info(`Getting list of all assessments from file ${e}...`),new Promise((s,r)=>{let n=_fs.createReadStream.call(void 0, e,{encoding:"utf-8",highWaterMark:65536}),m=_JSONStream2.default.parse("*"),o=0;n.pipe(m),m.on("data",async u=>{try{m.pause();let g=_typeutils.decodeCodec.call(void 0, _privacytypes.OneTrustEnrichedAssessment,u);await k({assessment:g,transcend:t,index:o}),o+=1,m.resume()}catch(g){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to parse the assessment ${o} from file '${e}': ${g.message}.`))}}),m.on("end",()=>{_chunkZUNVPK23cjs.a.info(`Finished processing ${o} assessments from file ${e}`),s()}),m.on("error",u=>{_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error parsing file '${e}': ${u.message}`)),r(u)}),n.on("error",u=>{_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error reading file '${e}': ${u.message}`)),r(u)})}));async function _s({hostname:t,oneTrustAuth:e,source:s,transcendAuth:r,transcendUrl:n,resource:m,file:o,dryRun:u,debug:g}){if(!u&&!r)throw new Error('Must specify a "transcendAuth" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}');if(u&&!o)throw new Error('Must set a "file" parameter when "dryRun" is "true". e.g. --file=./oneTrustAssessments.json');if(o){let p=o.split(".");if(p.length<2)throw new Error('The "file" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.');if(p.at(-1)!=="json")throw new Error(`Expected the format of the "file" parameters '${o}' to be 'json', but got '${p.at(-1)}'.`)}if(s==="oneTrust"){if(!t)throw new Error('Missing required parameter "hostname". e.g. --hostname=customer.my.onetrust.com');if(!e)throw new Error('Missing required parameter "oneTrustAuth". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN')}else{if(!o)throw new Error('Must specify a "file" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json');if(u)throw new Error('Cannot read and write to a file simultaneously. Emit the "source" parameter or set it to oneTrust if "dryRun" is enabled.')}let h=t&&e?Q({hostname:t,auth:e}):void 0,O=n&&r?_chunkJC7VDPVPcjs.pe.call(void 0, n,r):void 0;try{m==="assessments"&&(s==="oneTrust"&&h?await z({oneTrust:h,file:o,dryRun:u,...O&&{transcend:O}}):s==="file"&&o&&O&&await Y({file:o,transcend:O}))}catch(p){throw new Error(`An error occurred syncing the resource ${m} from OneTrust: ${g?p.stack:p.message}`)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced OneTrust ${m} to ${u?`disk at "${o}"`:"Transcend"}!`))}exports.syncOt = _s;
|
|
7
|
-
//# sourceMappingURL=impl-BOLY4EOP.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-BOLY4EOP.cjs","../src/commands/migration/sync-ot/impl.ts","../src/lib/oneTrust/createOneTrustGotInstance.ts","../src/lib/oneTrust/helpers/oneTrustAssessmentToJson.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentsFromOneTrust.ts","../src/lib/oneTrust/helpers/syncOneTrustAssessmentToTranscend.ts"],"names":["createOneTrustGotInstance","hostname","auth","got","index"],"mappings":"AAAA,y0BAAuE,wDAAoC,gCAA6B,wDAAyC,gFCE9J,oECFM,IAQZA,CAAAA,CAA4B,CAAC,CACxC,QAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CACF,CAAA,CAAA,EAMEC,aAAAA,CAAI,MAAA,CAAO,CACT,SAAA,CAAW,CAAA,QAAA,EAAWF,CAAQ,CAAA,CAAA;ACiBwB;AAIzC;ACtCE,CAAA;AC0E0E,mBAAA;ADFpEG","file":"/Users/benbrook/transcend/cli/dist/impl-BOLY4EOP.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { logger } from '@/logger';\nimport colors from 'colors';\nimport { createOneTrustGotInstance } from '@/lib/oneTrust';\nimport {\n OneTrustFileFormat,\n OneTrustPullResource,\n OneTrustPullSource,\n} from '@/enums';\nimport { buildTranscendGraphQLClient } from '@/lib/graphql';\nimport {\n syncOneTrustAssessmentsFromFile,\n syncOneTrustAssessmentsFromOneTrust,\n} from '@/lib/oneTrust/helpers';\n\n// Command flag interface\ninterface SyncOtCommandFlags {\n hostname?: string;\n oneTrustAuth?: string;\n source: OneTrustPullSource;\n transcendAuth?: string;\n transcendUrl: string;\n file?: string;\n resource: OneTrustPullResource;\n dryRun: boolean;\n debug: boolean;\n}\n\n// Command implementation\nexport async function syncOt(\n this: LocalContext,\n {\n hostname,\n oneTrustAuth,\n source,\n transcendAuth,\n transcendUrl,\n resource,\n file,\n dryRun,\n debug,\n }: SyncOtCommandFlags,\n): Promise<void> {\n // Must be able to authenticate to transcend to sync resources to it\n if (!dryRun && !transcendAuth) {\n throw new Error(\n // eslint-disable-next-line no-template-curly-in-string\n 'Must specify a \"transcendAuth\" parameter to sync resources to Transcend. e.g. --transcendAuth=${TRANSCEND_API_KEY}',\n );\n }\n\n // If trying to sync to disk, must specify a file path\n if (dryRun && !file) {\n throw new Error(\n 'Must set a \"file\" parameter when \"dryRun\" is \"true\". e.g. --file=./oneTrustAssessments.json',\n );\n }\n\n if (file) {\n const splitFile = file.split('.');\n if (splitFile.length < 2) {\n throw new Error(\n 'The \"file\" parameter has an invalid format. Expected a path with extensions. e.g. --file=./pathToFile.json.',\n );\n }\n if (splitFile.at(-1) !== OneTrustFileFormat.Json) {\n throw new Error(\n `Expected the format of the \"file\" parameters '${file}' to be '${\n OneTrustFileFormat.Json\n }', but got '${splitFile.at(-1)}'.`,\n );\n }\n }\n\n // if reading assessments from a OneTrust\n if (source === OneTrustPullSource.OneTrust) {\n // must specify the OneTrust hostname\n if (!hostname) {\n throw new Error(\n 'Missing required parameter \"hostname\". e.g. --hostname=customer.my.onetrust.com',\n );\n }\n // must specify the OneTrust auth\n if (!oneTrustAuth) {\n throw new Error(\n 'Missing required parameter \"oneTrustAuth\". e.g. --oneTrustAuth=$ONE_TRUST_AUTH_TOKEN',\n );\n }\n } else {\n // if reading the assessments from a file, must specify a file to read from\n if (!file) {\n throw new Error(\n 'Must specify a \"file\" parameter to read the OneTrust assessments from. e.g. --source=./oneTrustAssessments.json',\n );\n }\n\n // Cannot try reading from file and save assessments to a file simultaneously\n if (dryRun) {\n throw new Error(\n 'Cannot read and write to a file simultaneously.' +\n ` Emit the \"source\" parameter or set it to ${OneTrustPullSource.OneTrust} if \"dryRun\" is enabled.`,\n );\n }\n }\n\n // instantiate a client to talk to OneTrust\n const oneTrust =\n hostname && oneTrustAuth\n ? createOneTrustGotInstance({\n hostname,\n auth: oneTrustAuth,\n })\n : undefined;\n\n // instantiate a client to talk to Transcend\n const transcend =\n transcendUrl && transcendAuth\n ? buildTranscendGraphQLClient(transcendUrl, transcendAuth)\n : undefined;\n\n try {\n if (resource === OneTrustPullResource.Assessments) {\n if (source === OneTrustPullSource.OneTrust && oneTrust) {\n await syncOneTrustAssessmentsFromOneTrust({\n oneTrust,\n file,\n dryRun,\n ...(transcend && { transcend }),\n });\n } else if (source === OneTrustPullSource.File && file && transcend) {\n await syncOneTrustAssessmentsFromFile({ file, transcend });\n }\n }\n } catch (err) {\n throw new Error(\n `An error occurred syncing the resource ${resource} from OneTrust: ${\n debug ? err.stack : err.message\n }`,\n );\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced OneTrust ${resource} to ${\n dryRun ? `disk at \"${file}\"` : 'Transcend'\n }!`,\n ),\n );\n}\n","import got, { Got } from 'got';\n\n/**\n * Instantiate an instance of got that is capable of making requests to OneTrust\n *\n * @param param - information about the OneTrust URL\n * @returns The instance of got that is capable of making requests to the customer ingress\n */\nexport const createOneTrustGotInstance = ({\n hostname,\n auth,\n}: {\n /** Hostname of the OneTrust API */\n hostname: string;\n /** The OAuth access token */\n auth: string;\n}): Got =>\n got.extend({\n prefixUrl: `https://${hostname}`,\n headers: {\n accept: 'application/json',\n 'content-type': 'application/json',\n authorization: `Bearer ${auth}`,\n },\n });\n","import { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\n\n/**\n * Converts the assessment into a json entry.\n *\n * @param param - information about the assessment and amount of entries\n * @returns a stringified json entry ready to be appended to a file\n */\nexport const oneTrustAssessmentToJson = ({\n assessment,\n index,\n total,\n wrap = true,\n}: {\n /** The assessment to convert */\n assessment: OneTrustEnrichedAssessment;\n /** The position of the assessment in the final Json object */\n index: number;\n /** The total amount of the assessments in the final Json object */\n total?: number;\n /** Whether to wrap every entry in brackets */\n wrap?: boolean;\n}): string => {\n let jsonEntry = '';\n // start with an opening bracket\n if (index === 0 || wrap) {\n jsonEntry = '[\\n';\n }\n\n const stringifiedAssessment = JSON.stringify(assessment);\n\n // Add comma for all items except the last one\n const comma = total && index < total - 1 && !wrap ? ',' : '';\n\n // write to file\n jsonEntry = `${jsonEntry + stringifiedAssessment + comma}\\n`;\n\n // end with closing bracket\n if ((total && index === total - 1) || wrap) {\n jsonEntry += '\\n]';\n }\n\n return jsonEntry;\n};\n","import type { Got } from 'got';\nimport colors from 'colors';\nimport {\n getListOfOneTrustAssessments,\n getOneTrustAssessment,\n getOneTrustRisk,\n getOneTrustUser,\n} from '../endpoints';\nimport { mapSeries, map } from '@/lib/bluebird-replace';\nimport { logger } from '../../../logger';\nimport {\n OneTrustAssessmentQuestion,\n OneTrustAssessmentSection,\n OneTrustEnrichedAssessment,\n OneTrustGetRiskResponse,\n OneTrustGetUserResponse,\n} from '@transcend-io/privacy-types';\nimport { uniq } from 'lodash-es';\nimport { enrichOneTrustAssessment } from './enrichOneTrustAssessment';\nimport { syncOneTrustAssessmentToDisk } from './syncOneTrustAssessmentToDisk';\nimport { GraphQLClient } from 'graphql-request';\nimport { syncOneTrustAssessmentToTranscend } from './syncOneTrustAssessmentToTranscend';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Reads all the assessments from a OneTrust instance and syncs them to Transcend or to Disk.\n *\n * @param param - the information about the assessment, its OneTrust source, and destination (disk or Transcend)\n */\nexport const syncOneTrustAssessmentsFromOneTrust = async ({\n oneTrust,\n file,\n dryRun,\n transcend,\n}: {\n /** the OneTrust client instance */\n oneTrust: Got;\n /** the Transcend client instance */\n transcend?: GraphQLClient;\n /** Whether to write to file instead of syncing to Transcend */\n dryRun: boolean;\n /** the path to the file in case dryRun is true */\n file?: string;\n}): Promise<void> => {\n // fetch the list of all assessments in the OneTrust organization\n logger.info('Getting list of all assessments from OneTrust...');\n const assessments = await getListOfOneTrustAssessments({ oneTrust });\n\n // a cache of OneTrust users so we avoid requesting already fetched users\n const oneTrustCachedUsers: Record<string, OneTrustGetUserResponse> = {};\n\n // split all assessments in batches, so we can process some of steps in parallel\n const BATCH_SIZE = 5;\n const assessmentBatches = Array.from(\n {\n length: Math.ceil(assessments.length / BATCH_SIZE),\n },\n (_, i) => assessments.slice(i * BATCH_SIZE, (i + 1) * BATCH_SIZE),\n );\n\n // process each batch and sync the batch right away so it's garbage collected and we don't run out of memory\n await mapSeries(assessmentBatches, async (assessmentBatch, batch) => {\n const batchEnrichedAssessments: OneTrustEnrichedAssessment[] = [];\n\n // fetch assessment details from OneTrust in parallel\n await map(\n assessmentBatch,\n async (assessment, index) => {\n const assessmentNumber = BATCH_SIZE * batch + index + 1;\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching details...`,\n );\n const { templateName, assessmentId } = assessment;\n const assessmentDetails = await getOneTrustAssessment({\n oneTrust,\n assessmentId,\n });\n // fetch assessment's creator information\n const creatorId = assessmentDetails.createdBy.id;\n let creator = oneTrustCachedUsers[creatorId];\n if (!creator) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching creator...`,\n );\n try {\n creator = await getOneTrustUser({\n oneTrust,\n userId: creatorId,\n });\n oneTrustCachedUsers[creatorId] = creator;\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch form creator.` +\n `\\tcreatorId: ${creatorId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n }\n }\n\n // fetch assessment approvers information\n const { approvers } = assessmentDetails;\n let approversDetails: OneTrustGetUserResponse[][] = [];\n if (approvers.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching approvers...`,\n );\n approversDetails = await map(\n approvers.map(({ id }) => id),\n async (userId) => {\n try {\n let approver = oneTrustCachedUsers[userId];\n if (!approver) {\n approver = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = approver;\n }\n return [approver];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a form approver.` +\n `\\tapproverId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment internal respondents information\n const { respondents } = assessmentDetails;\n // if a user is an internal respondents, their 'name' field can't be an email.\n const internalRespondents = respondents.filter(\n (r) => !r.name.includes('@'),\n );\n let respondentsDetails: OneTrustGetUserResponse[][] = [];\n if (internalRespondents.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching respondents...`,\n );\n respondentsDetails = await map(\n internalRespondents.map(({ id }) => id),\n async (userId) => {\n try {\n let respondent = oneTrustCachedUsers[userId];\n if (!respondent) {\n respondent = await getOneTrustUser({ oneTrust, userId });\n oneTrustCachedUsers[userId] = respondent;\n }\n return [respondent];\n } catch (e) {\n logger.warn(\n colors.yellow(\n `[assessment ${assessmentNumber} of ${assessments.length}]: failed to fetch a respondent.` +\n `\\trespondentId: ${userId}. Assessment Title: ${assessment.name}. Template Title: ${templateName}`,\n ),\n );\n return [];\n }\n },\n { concurrency: 5 },\n );\n }\n\n // fetch assessment risk information\n let riskDetails: OneTrustGetRiskResponse[] = [];\n const riskIds = uniq(\n assessmentDetails.sections.flatMap((s: OneTrustAssessmentSection) =>\n s.questions.flatMap((q: OneTrustAssessmentQuestion) =>\n (q.risks ?? []).flatMap((r) => r.riskId),\n ),\n ),\n );\n if (riskIds.length > 0) {\n logger.info(\n `[assessment ${assessmentNumber} of ${assessments.length}]: fetching risks...`,\n );\n riskDetails = await map(\n riskIds,\n (riskId) => getOneTrustRisk({ oneTrust, riskId: riskId as string }),\n {\n concurrency: 5,\n },\n );\n }\n\n // enrich the assessments with user and risk details\n const enrichedAssessment = enrichOneTrustAssessment({\n assessment,\n assessmentDetails,\n riskDetails,\n creatorDetails: creator,\n approversDetails: approversDetails.flat(),\n respondentsDetails: respondentsDetails.flat(),\n });\n\n batchEnrichedAssessments.push(enrichedAssessment);\n },\n { concurrency: BATCH_SIZE },\n );\n\n // sync assessments in series to avoid concurrency bugs\n await mapSeries(\n batchEnrichedAssessments,\n async (enrichedAssessment, index) => {\n // the assessment's global index takes its batch into consideration\n const globalIndex = batch * BATCH_SIZE + index;\n\n if (dryRun && file) {\n // sync to file\n syncOneTrustAssessmentToDisk({\n assessment: enrichedAssessment,\n index: globalIndex,\n total: assessments.length,\n file,\n });\n } else if (transcend) {\n // sync to transcend\n await syncOneTrustAssessmentToTranscend({\n assessment: enrichedAssessment,\n transcend,\n total: assessments.length,\n index: globalIndex,\n });\n }\n },\n );\n });\n};\n","import { logger } from '../../../logger';\nimport colors from 'colors';\nimport { GraphQLClient } from 'graphql-request';\nimport {\n IMPORT_ONE_TRUST_ASSESSMENT_FORMS,\n makeGraphQLRequest,\n} from '../../graphql';\nimport { ImportOnetrustAssessmentsInput } from '../../../codecs';\nimport { OneTrustEnrichedAssessment } from '@transcend-io/privacy-types';\nimport { oneTrustAssessmentToJson } from './oneTrustAssessmentToJson';\n\nexport interface AssessmentForm {\n /** ID of Assessment Form */\n id: string;\n /** Title of Assessment Form */\n name: string;\n}\n\n/**\n * Write the assessment to a Transcend instance.\n *\n *\n * @param param - information about the assessment and Transcend instance to write to\n */\nexport const syncOneTrustAssessmentToTranscend = async ({\n transcend,\n assessment,\n total,\n index,\n}: {\n /** the Transcend client instance */\n transcend: GraphQLClient;\n /** the assessment to sync to Transcend */\n assessment: OneTrustEnrichedAssessment;\n /** The index of the assessment being written to the file */\n index: number;\n /** The total amount of assessments that we will write */\n total?: number;\n}): Promise<void> => {\n logger.info(\n colors.magenta(\n `Writing enriched assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend...`,\n ),\n );\n\n // convert the OneTrust assessment object into a json record\n const json = oneTrustAssessmentToJson({\n assessment,\n index,\n total,\n });\n\n // transform the json record into a valid input to the mutation\n const input: ImportOnetrustAssessmentsInput = {\n json,\n };\n\n try {\n await makeGraphQLRequest<{\n /** the importOneTrustAssessmentForms mutation */\n importOneTrustAssessmentForms: {\n /** Created Assessment Forms */\n assessmentForms: AssessmentForm[];\n };\n }>(transcend, IMPORT_ONE_TRUST_ASSESSMENT_FORMS, {\n input,\n });\n } catch (e) {\n logger.error(\n colors.red(\n `Failed to sync assessment ${index + 1} ${\n total ? `of ${total} ` : ' '\n }to Transcend.\\n` +\n `\\tAssessment Title: ${assessment.name}. Template Title: ${assessment.template.name}\\n`,\n ),\n );\n }\n};\n"]}
|
package/dist/impl-DUSKH5V5.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkQJYHSHFAcjs = require('./chunk-QJYHSHFA.cjs');var _chunkORNBWSZLcjs = require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');async function d({base64EncryptionKey:o,base64SigningKey:t,partition:s,file:i,consentUrl:a,concurrency:c}){let m=_chunkORNBWSZLcjs.q.call(void 0, i,_chunkQJYHSHFAcjs.b);await _chunkQJYHSHFAcjs.f.call(void 0, {base64EncryptionKey:o,base64SigningKey:t,preferences:m,partition:s,concurrency:c,transcendUrl:a})}exports.uploadConsentPreferences = d;
|
|
2
|
-
//# sourceMappingURL=impl-DUSKH5V5.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-DUSKH5V5.cjs","../src/commands/consent/upload-consent-preferences/impl.ts"],"names":["uploadConsentPreferences","base64EncryptionKey","base64SigningKey","partition","file","consentUrl","concurrency","preferences","readCsv","ConsentPreferenceUpload","uploadConsents"],"mappings":"AAAA,iIAA+C,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCezO,SAAsBA,CAAAA,CAEpB,CACE,mBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAcC,iCAAAA,CAAQJ,CAAMK,mBAAuB,CAAA,CAGzD,MAAMC,iCAAAA,CACJ,mBAAA,CAAAT,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,WAAA,CAAAK,CAAAA,CACA,SAAA,CAAAJ,CAAAA,CACA,WAAA,CAAAG,CAAAA,CACA,YAAA,CAAcD,CAChB,CAAC,CACH,CAAA,qCAAA","file":"/Users/benbrook/transcend/cli/dist/impl-DUSKH5V5.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\n\nimport { uploadConsents } from '@/lib/consent-manager/uploadConsents';\nimport { ConsentPreferenceUpload } from '@/lib/consent-manager/types';\nimport { readCsv } from '@/lib/requests';\n\ninterface UploadConsentPreferencesCommandFlags {\n base64EncryptionKey: string;\n base64SigningKey: string;\n partition: string;\n file: string;\n consentUrl: string;\n concurrency: number;\n}\n\nexport async function uploadConsentPreferences(\n this: LocalContext,\n {\n base64EncryptionKey,\n base64SigningKey,\n partition,\n file,\n consentUrl,\n concurrency,\n }: UploadConsentPreferencesCommandFlags,\n): Promise<void> {\n // Load in preferences from csv\n const preferences = readCsv(file, ConsentPreferenceUpload);\n\n // Upload cookies\n await uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency,\n transcendUrl: consentUrl,\n });\n}\n"]}
|
package/dist/impl-F6IWO7FD.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkKAE73AXXcjs = require('./chunk-KAE73AXX.cjs');require('./chunk-ZVK4HIDF.cjs');require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');async function s({file:e,transcendUrl:r,auth:i,sombraAuth:o,dataSiloId:n}){await _chunkKAE73AXXcjs.e.call(void 0, {file:e,transcendUrl:r,auth:i,sombraAuth:o,dataSiloId:n})}exports.markIdentifiersCompleted = s;
|
|
2
|
-
//# sourceMappingURL=impl-F6IWO7FD.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-F6IWO7FD.cjs","../src/commands/request/cron/mark-identifiers-completed/impl.ts"],"names":["markIdentifiersCompleted","file","transcendUrl","auth","sombraAuth","dataSiloId","pushCronIdentifiersFromCsv"],"mappings":"AAAA,iIAAwC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCWnP,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAA,CACe,CACf,MAAMC,iCAAAA,CACJ,IAAA,CAAAL,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAC,CACH,CAAA,qCAAA","file":"/Users/benbrook/transcend/cli/dist/impl-F6IWO7FD.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport { pushCronIdentifiersFromCsv } from '@/lib/cron';\n\ninterface MarkIdentifiersCompletedCommandFlags {\n file: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n dataSiloId: string;\n}\n\nexport async function markIdentifiersCompleted(\n this: LocalContext,\n {\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n }: MarkIdentifiersCompletedCommandFlags,\n): Promise<void> {\n await pushCronIdentifiersFromCsv({\n file,\n transcendUrl,\n auth,\n sombraAuth,\n dataSiloId,\n });\n}\n"]}
|
package/dist/impl-GHDROQMO.cjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkORNBWSZLcjs = require('./chunk-ORNBWSZL.cjs');require('./chunk-JC7VDPVP.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-EG4L6YAJ.cjs');require('./chunk-UYYOVK3W.cjs');require('./chunk-BY7W4UQF.cjs');async function n({auth:s,dataSiloId:a,status:e,statuses:i,transcendUrl:o}){await _chunkORNBWSZLcjs.V.call(void 0, {transcendUrl:o,auth:s,status:e,dataSiloId:a,requestStatuses:i})}exports.skipRequestDataSilos = n;
|
|
2
|
-
//# sourceMappingURL=impl-GHDROQMO.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/benbrook/transcend/cli/dist/impl-GHDROQMO.cjs","../src/commands/request/system/skip-request-data-silos/impl.ts"],"names":["skipRequestDataSilos","auth","dataSiloId","status","statuses","transcendUrl"],"mappings":"AAAA,iIAAwC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCYzL,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CACf,MAAML,iCAAAA,CACJ,YAAA,CAAAK,CAAAA,CACA,IAAA,CAAAJ,CAAAA,CACA,MAAA,CAAAE,CAAAA,CACA,UAAA,CAAAD,CAAAA,CACA,eAAA,CAAiBE,CACnB,CAAC,CACH,CAAA,iCAAA","file":"/Users/benbrook/transcend/cli/dist/impl-GHDROQMO.cjs","sourcesContent":[null,"import type { LocalContext } from '@/context';\nimport type { RequestStatus } from '@transcend-io/privacy-types';\nimport { skipRequestDataSilos as skipRequestDataSilosHelper } from '@/lib/requests';\n\ninterface SkipRequestDataSilosCommandFlags {\n auth: string;\n dataSiloId: string;\n transcendUrl: string;\n statuses: RequestStatus[];\n status: 'SKIPPED' | 'RESOLVED';\n}\n\nexport async function skipRequestDataSilos(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n status,\n statuses,\n transcendUrl,\n }: SkipRequestDataSilosCommandFlags,\n): Promise<void> {\n await skipRequestDataSilosHelper({\n transcendUrl,\n auth,\n status,\n dataSiloId,\n requestStatuses: statuses,\n });\n}\n"]}
|