@transcend-io/cli 7.0.2 → 7.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1816 -158
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +2 -2
- package/dist/chunk-3ZKZCSGD.cjs +2847 -0
- package/dist/chunk-3ZKZCSGD.cjs.map +1 -0
- package/dist/{chunk-QXKWSBYR.cjs → chunk-6DWFLWWT.cjs} +2 -2
- package/dist/{chunk-QXKWSBYR.cjs.map → chunk-6DWFLWWT.cjs.map} +1 -1
- package/dist/chunk-6FGYCQAQ.cjs +2 -0
- package/dist/chunk-6FGYCQAQ.cjs.map +1 -0
- package/dist/chunk-BF5T3SGE.cjs +3 -0
- package/dist/chunk-BF5T3SGE.cjs.map +1 -0
- package/dist/chunk-E3CF3RKX.cjs +2 -0
- package/dist/chunk-E3CF3RKX.cjs.map +1 -0
- package/dist/chunk-EVZLUL56.cjs +75 -0
- package/dist/chunk-EVZLUL56.cjs.map +1 -0
- package/dist/{chunk-W5T3VHKD.cjs → chunk-FPXWBUHS.cjs} +2 -2
- package/dist/{chunk-W5T3VHKD.cjs.map → chunk-FPXWBUHS.cjs.map} +1 -1
- package/dist/chunk-G2T7YVBG.cjs +113 -0
- package/dist/chunk-G2T7YVBG.cjs.map +1 -0
- package/dist/{chunk-WAYG2MDL.cjs → chunk-IG65EYJY.cjs} +2 -2
- package/dist/{chunk-WAYG2MDL.cjs.map → chunk-IG65EYJY.cjs.map} +1 -1
- package/dist/chunk-L7YT5ETS.cjs +2 -0
- package/dist/{chunk-VFP6EKTE.cjs.map → chunk-L7YT5ETS.cjs.map} +1 -1
- package/dist/{chunk-U6G3J6VJ.cjs → chunk-R4IKDXM5.cjs} +2 -2
- package/dist/{chunk-U6G3J6VJ.cjs.map → chunk-R4IKDXM5.cjs.map} +1 -1
- package/dist/chunk-TS5EYI4O.cjs +12 -0
- package/dist/chunk-TS5EYI4O.cjs.map +1 -0
- package/dist/chunk-WIXQSFS6.cjs +2 -0
- package/dist/chunk-WIXQSFS6.cjs.map +1 -0
- package/dist/chunk-WKCTKYN4.cjs +2 -0
- package/dist/chunk-WKCTKYN4.cjs.map +1 -0
- package/dist/chunk-Y4BWTFTX.cjs +15 -0
- package/dist/chunk-Y4BWTFTX.cjs.map +1 -0
- package/dist/{chunk-XHS3FR6L.cjs → chunk-YA5UZ3YM.cjs} +2 -2
- package/dist/{chunk-XHS3FR6L.cjs.map → chunk-YA5UZ3YM.cjs.map} +1 -1
- package/dist/impl-24MFRX5R.cjs +2 -0
- package/dist/impl-24MFRX5R.cjs.map +1 -0
- package/dist/impl-2G6FOZLU.cjs +2 -0
- package/dist/impl-2G6FOZLU.cjs.map +1 -0
- package/dist/impl-4EDFESYC.cjs +2 -0
- package/dist/impl-4EDFESYC.cjs.map +1 -0
- package/dist/impl-5FO2QEHJ.cjs +2 -0
- package/dist/impl-5FO2QEHJ.cjs.map +1 -0
- package/dist/impl-6B7JDOM5.cjs +3 -0
- package/dist/impl-6B7JDOM5.cjs.map +1 -0
- package/dist/impl-AUOR6D3Q.cjs +2 -0
- package/dist/impl-AUOR6D3Q.cjs.map +1 -0
- package/dist/impl-B5HQXUMH.cjs +2 -0
- package/dist/impl-B5HQXUMH.cjs.map +1 -0
- package/dist/impl-BNFADMTO.cjs +2 -0
- package/dist/impl-BNFADMTO.cjs.map +1 -0
- package/dist/impl-BPWMOF4U.cjs +2 -0
- package/dist/impl-BPWMOF4U.cjs.map +1 -0
- package/dist/impl-CNCC36M6.cjs +9 -0
- package/dist/impl-CNCC36M6.cjs.map +1 -0
- package/dist/impl-DBDL4Z23.cjs +2 -0
- package/dist/impl-DBDL4Z23.cjs.map +1 -0
- package/dist/impl-FG47LALL.cjs +2 -0
- package/dist/impl-FG47LALL.cjs.map +1 -0
- package/dist/impl-GNB7TDFU.cjs +2 -0
- package/dist/impl-GNB7TDFU.cjs.map +1 -0
- package/dist/impl-HB3R7YDP.cjs +2 -0
- package/dist/impl-HB3R7YDP.cjs.map +1 -0
- package/dist/impl-ICI7EQKE.cjs +2 -0
- package/dist/impl-ICI7EQKE.cjs.map +1 -0
- package/dist/impl-KOMGU55G.cjs +2 -0
- package/dist/impl-KOMGU55G.cjs.map +1 -0
- package/dist/impl-KP27234L.cjs +2 -0
- package/dist/impl-KP27234L.cjs.map +1 -0
- package/dist/impl-KZ2L66Q3.cjs +2 -0
- package/dist/impl-KZ2L66Q3.cjs.map +1 -0
- package/dist/impl-L3M67IGI.cjs +7 -0
- package/dist/{impl-JH4KIP5U.cjs.map → impl-L3M67IGI.cjs.map} +1 -1
- package/dist/impl-LTOV5CHF.cjs +2 -0
- package/dist/impl-LTOV5CHF.cjs.map +1 -0
- package/dist/impl-MO6AOGQM.cjs +2 -0
- package/dist/impl-MO6AOGQM.cjs.map +1 -0
- package/dist/impl-MPGDZ2M2.cjs +12 -0
- package/dist/impl-MPGDZ2M2.cjs.map +1 -0
- package/dist/impl-N54C7NPT.cjs +2 -0
- package/dist/impl-N54C7NPT.cjs.map +1 -0
- package/dist/impl-N6ML5K5S.cjs +2 -0
- package/dist/impl-N6ML5K5S.cjs.map +1 -0
- package/dist/impl-NKLZ5RG4.cjs +2 -0
- package/dist/impl-NKLZ5RG4.cjs.map +1 -0
- package/dist/impl-NNOWJ4Q4.cjs +6 -0
- package/dist/impl-NNOWJ4Q4.cjs.map +1 -0
- package/dist/impl-OLEEHZUA.cjs +6 -0
- package/dist/impl-OLEEHZUA.cjs.map +1 -0
- package/dist/impl-Q6EQYFKN.cjs +4 -0
- package/dist/impl-Q6EQYFKN.cjs.map +1 -0
- package/dist/impl-QHTG36G3.cjs +2 -0
- package/dist/impl-QHTG36G3.cjs.map +1 -0
- package/dist/impl-QITUCVEV.cjs +2 -0
- package/dist/impl-QITUCVEV.cjs.map +1 -0
- package/dist/impl-R3KCARSP.cjs +2 -0
- package/dist/impl-R3KCARSP.cjs.map +1 -0
- package/dist/impl-RSHVCDLE.cjs +2 -0
- package/dist/impl-RSHVCDLE.cjs.map +1 -0
- package/dist/impl-SNLQCZOR.cjs +2 -0
- package/dist/impl-SNLQCZOR.cjs.map +1 -0
- package/dist/impl-TPVA6DLJ.cjs +2 -0
- package/dist/impl-TPVA6DLJ.cjs.map +1 -0
- package/dist/impl-TVNBHOR4.cjs +2 -0
- package/dist/impl-TVNBHOR4.cjs.map +1 -0
- package/dist/impl-TWLWQSZG.cjs +2 -0
- package/dist/impl-TWLWQSZG.cjs.map +1 -0
- package/dist/impl-UKOAB6ED.cjs +2 -0
- package/dist/impl-UKOAB6ED.cjs.map +1 -0
- package/dist/impl-ZKBMWGMK.cjs +2 -0
- package/dist/impl-ZKBMWGMK.cjs.map +1 -0
- package/dist/impl-ZTWLYZZO.cjs +2 -0
- package/dist/impl-ZTWLYZZO.cjs.map +1 -0
- package/dist/index.cjs +3 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +39 -7
- package/package.json +2 -1
- package/dist/chunk-25UGE4YY.cjs +0 -12
- package/dist/chunk-25UGE4YY.cjs.map +0 -1
- package/dist/chunk-EG4L6YAJ.cjs +0 -2
- package/dist/chunk-EG4L6YAJ.cjs.map +0 -1
- package/dist/chunk-FPTEK7BG.cjs +0 -2
- package/dist/chunk-FPTEK7BG.cjs.map +0 -1
- package/dist/chunk-HAOVN3XO.cjs +0 -75
- package/dist/chunk-HAOVN3XO.cjs.map +0 -1
- package/dist/chunk-KWG56BUX.cjs +0 -2
- package/dist/chunk-KWG56BUX.cjs.map +0 -1
- package/dist/chunk-NOF6QHE4.cjs +0 -2843
- package/dist/chunk-NOF6QHE4.cjs.map +0 -1
- package/dist/chunk-QY37PK62.cjs +0 -94
- package/dist/chunk-QY37PK62.cjs.map +0 -1
- package/dist/chunk-SHV6R64J.cjs +0 -2
- package/dist/chunk-SHV6R64J.cjs.map +0 -1
- package/dist/chunk-VFP6EKTE.cjs +0 -2
- package/dist/chunk-XBHJO2OX.cjs +0 -3
- package/dist/chunk-XBHJO2OX.cjs.map +0 -1
- package/dist/impl-2DZFILID.cjs +0 -2
- package/dist/impl-2DZFILID.cjs.map +0 -1
- package/dist/impl-3CTVL4TA.cjs +0 -2
- package/dist/impl-3CTVL4TA.cjs.map +0 -1
- package/dist/impl-3IN6AV44.cjs +0 -9
- package/dist/impl-3IN6AV44.cjs.map +0 -1
- package/dist/impl-3PSE2WEI.cjs +0 -2
- package/dist/impl-3PSE2WEI.cjs.map +0 -1
- package/dist/impl-4XMTTFXK.cjs +0 -2
- package/dist/impl-4XMTTFXK.cjs.map +0 -1
- package/dist/impl-5L4G73JT.cjs +0 -2
- package/dist/impl-5L4G73JT.cjs.map +0 -1
- package/dist/impl-6NO74W36.cjs +0 -2
- package/dist/impl-6NO74W36.cjs.map +0 -1
- package/dist/impl-6TJRZGRQ.cjs +0 -2
- package/dist/impl-6TJRZGRQ.cjs.map +0 -1
- package/dist/impl-7FHT7P3V.cjs +0 -2
- package/dist/impl-7FHT7P3V.cjs.map +0 -1
- package/dist/impl-7KOXTJKC.cjs +0 -2
- package/dist/impl-7KOXTJKC.cjs.map +0 -1
- package/dist/impl-BESS7RG2.cjs +0 -2
- package/dist/impl-BESS7RG2.cjs.map +0 -1
- package/dist/impl-COCUOYKI.cjs +0 -2
- package/dist/impl-COCUOYKI.cjs.map +0 -1
- package/dist/impl-ELOMI7JW.cjs +0 -6
- package/dist/impl-ELOMI7JW.cjs.map +0 -1
- package/dist/impl-EUJEQGOT.cjs +0 -2
- package/dist/impl-EUJEQGOT.cjs.map +0 -1
- package/dist/impl-F3OOKCK6.cjs +0 -2
- package/dist/impl-F3OOKCK6.cjs.map +0 -1
- package/dist/impl-GH42XS6I.cjs +0 -2
- package/dist/impl-GH42XS6I.cjs.map +0 -1
- package/dist/impl-GSD2LS72.cjs +0 -2
- package/dist/impl-GSD2LS72.cjs.map +0 -1
- package/dist/impl-IIP3SXEB.cjs +0 -6
- package/dist/impl-IIP3SXEB.cjs.map +0 -1
- package/dist/impl-JD4LU2UP.cjs +0 -4
- package/dist/impl-JD4LU2UP.cjs.map +0 -1
- package/dist/impl-JH4KIP5U.cjs +0 -7
- package/dist/impl-JVXU24DF.cjs +0 -2
- package/dist/impl-JVXU24DF.cjs.map +0 -1
- package/dist/impl-KO4JL735.cjs +0 -2
- package/dist/impl-KO4JL735.cjs.map +0 -1
- package/dist/impl-L464FWQF.cjs +0 -2
- package/dist/impl-L464FWQF.cjs.map +0 -1
- package/dist/impl-LH2NCUFY.cjs +0 -2
- package/dist/impl-LH2NCUFY.cjs.map +0 -1
- package/dist/impl-LTL4WYPE.cjs +0 -2
- package/dist/impl-LTL4WYPE.cjs.map +0 -1
- package/dist/impl-MHML3KTM.cjs +0 -2
- package/dist/impl-MHML3KTM.cjs.map +0 -1
- package/dist/impl-NKUI2BXG.cjs +0 -2
- package/dist/impl-NKUI2BXG.cjs.map +0 -1
- package/dist/impl-NXTGH73U.cjs +0 -2
- package/dist/impl-NXTGH73U.cjs.map +0 -1
- package/dist/impl-OAPUUSOI.cjs +0 -2
- package/dist/impl-OAPUUSOI.cjs.map +0 -1
- package/dist/impl-OCJDYVIR.cjs +0 -2
- package/dist/impl-OCJDYVIR.cjs.map +0 -1
- package/dist/impl-PDWZ4O5W.cjs +0 -2
- package/dist/impl-PDWZ4O5W.cjs.map +0 -1
- package/dist/impl-Q2MH6BDT.cjs +0 -2
- package/dist/impl-Q2MH6BDT.cjs.map +0 -1
- package/dist/impl-QV42ZSX6.cjs +0 -6
- package/dist/impl-QV42ZSX6.cjs.map +0 -1
- package/dist/impl-TEQOCEBB.cjs +0 -2
- package/dist/impl-TEQOCEBB.cjs.map +0 -1
- package/dist/impl-TXQ4XVDC.cjs +0 -12
- package/dist/impl-TXQ4XVDC.cjs.map +0 -1
- package/dist/impl-U744VBLW.cjs +0 -2
- package/dist/impl-U744VBLW.cjs.map +0 -1
- package/dist/impl-VEGPS7VZ.cjs +0 -2
- package/dist/impl-VEGPS7VZ.cjs.map +0 -1
- package/dist/impl-XBEBGY37.cjs +0 -2
- package/dist/impl-XBEBGY37.cjs.map +0 -1
- package/dist/impl-Y3YSZDVF.cjs +0 -2
- package/dist/impl-Y3YSZDVF.cjs.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-E3CF3RKX.cjs","../src/constants.ts"],"names":["name","ADMIN_DASH","ADMIN_DASH_INTEGRATIONS"],"mappings":"AAAA,iIAAwC,2DCKjC,IAQMA,CAAAA,CAAO,WAAA,CAEPC,CAAAA,aAAa,0BAAA,CAEbC,CAAAA,aAA0B,CAAA,EAAA","file":"/home/runner/work/cli/cli/dist/chunk-E3CF3RKX.cjs","sourcesContent":[null,"import { keyBy } from 'lodash-es';\nimport {\n ScopeName,\n TRANSCEND_SCOPES,\n type ScopeDefinition,\n} from '@transcend-io/privacy-types';\nimport { TranscendPullResource } from './enums';\nimport { TranscendInput } from './codecs';\n\nexport { description, version } from '../package.json';\n/**\n * The name of the main binary for the CLI\n */\nexport const name = 'transcend';\n\nexport const ADMIN_DASH = 'https://app.transcend.io';\n\nexport const ADMIN_DASH_INTEGRATIONS = `${ADMIN_DASH}/infrastructure/integrations`;\nexport const ADMIN_DASH_DATAPOINTS = `${ADMIN_DASH}/data-map/data-inventory/data-points`;\n\n/**\n * Override default transcend API url using\n * TRANSCEND_API_URL=https://api.us.transcend.io transcend ...\n */\nexport const DEFAULT_TRANSCEND_API =\n process.env.TRANSCEND_API_URL || 'https://api.transcend.io';\n\n/**\n * Override default transcend API url using\n * TRANSCEND_CONSENT_API_URL=https://consent.us.transcend.io transcend ...\n */\nexport const DEFAULT_TRANSCEND_CONSENT_API =\n process.env.TRANSCEND_CONSENT_API_URL || 'https://consent.transcend.io';\n\n/**\n * Mapping between resource type and scopes required for cli\n */\nexport const TR_PUSH_RESOURCE_SCOPE_MAP: {\n [k in TranscendPullResource]: ScopeName[];\n} = {\n [TranscendPullResource.ApiKeys]: [ScopeName.ViewApiKeys],\n [TranscendPullResource.Templates]: [ScopeName.ManageEmailTemplates],\n [TranscendPullResource.DataSilos]: [\n ScopeName.ManageDataMap,\n ScopeName.ConnectDataSilos,\n ],\n [TranscendPullResource.Enrichers]: [ScopeName.ManageRequestIdentities],\n [TranscendPullResource.BusinessEntities]: [ScopeName.ManageDataInventory],\n [TranscendPullResource.Identifiers]: [ScopeName.ManageRequestIdentities],\n [TranscendPullResource.Attributes]: [ScopeName.ManageGlobalAttributes],\n [TranscendPullResource.DataFlows]: [ScopeName.ManageDataFlow],\n [TranscendPullResource.Cookies]: [ScopeName.ManageDataFlow],\n [TranscendPullResource.ConsentManager]: [\n ScopeName.ManageConsentManagerDeveloperSettings,\n ],\n [TranscendPullResource.Partitions]: [\n ScopeName.ManageConsentManagerDeveloperSettings,\n ],\n [TranscendPullResource.Actions]: [ScopeName.ManageDataSubjectRequestSettings],\n [TranscendPullResource.DataSubjects]: [\n ScopeName.ManageDataSubjectRequestSettings,\n ],\n [TranscendPullResource.Prompts]: [ScopeName.ManagePrompts],\n [TranscendPullResource.PromptPartials]: [ScopeName.ManagePrompts],\n [TranscendPullResource.PromptGroups]: [ScopeName.ManagePrompts],\n [TranscendPullResource.Agents]: [ScopeName.ManagePathfinder],\n [TranscendPullResource.AgentFunctions]: [ScopeName.ManagePathfinder],\n [TranscendPullResource.AgentFiles]: [ScopeName.ManagePathfinder],\n [TranscendPullResource.Vendors]: [ScopeName.ManageDataInventory],\n [TranscendPullResource.DataCategories]: [ScopeName.ManageDataInventory],\n [TranscendPullResource.ProcessingPurposes]: [ScopeName.ManageDataInventory],\n [TranscendPullResource.ActionItems]: [\n ScopeName.ManageAllActionItems,\n ScopeName.ViewGlobalAttributes,\n ],\n [TranscendPullResource.ActionItemCollections]: [\n ScopeName.ManageActionItemCollections,\n ],\n [TranscendPullResource.Teams]: [ScopeName.ManageAccessControl],\n [TranscendPullResource.Messages]: [ScopeName.ManageIntlMessages],\n [TranscendPullResource.PrivacyCenters]: [ScopeName.ManagePrivacyCenter],\n [TranscendPullResource.Policies]: [ScopeName.ManagePolicies],\n [TranscendPullResource.Assessments]: [ScopeName.ManageAssessments],\n [TranscendPullResource.AssessmentTemplates]: [ScopeName.ManageAssessments],\n [TranscendPullResource.Purposes]: [\n ScopeName.ManageConsentManager,\n ScopeName.ManagePreferenceStoreSettings,\n ],\n};\n\n/**\n * Mapping between resource type and scopes required for cli\n */\nexport const TR_PULL_RESOURCE_SCOPE_MAP: {\n [k in TranscendPullResource]: ScopeName[];\n} = {\n [TranscendPullResource.ApiKeys]: [ScopeName.ViewApiKeys],\n [TranscendPullResource.Templates]: [ScopeName.ViewEmailTemplates],\n [TranscendPullResource.DataSilos]: [\n ScopeName.ViewDataMap,\n ScopeName.ViewDataSubjectRequestSettings,\n ],\n [TranscendPullResource.Enrichers]: [ScopeName.ViewRequestIdentitySettings],\n [TranscendPullResource.BusinessEntities]: [ScopeName.ViewDataInventory],\n [TranscendPullResource.Identifiers]: [ScopeName.ViewRequestIdentitySettings],\n [TranscendPullResource.Attributes]: [ScopeName.ViewGlobalAttributes],\n [TranscendPullResource.DataFlows]: [ScopeName.ViewDataFlow],\n [TranscendPullResource.Cookies]: [ScopeName.ViewDataFlow],\n [TranscendPullResource.ConsentManager]: [ScopeName.ViewConsentManager],\n [TranscendPullResource.Partitions]: [ScopeName.ViewConsentManager],\n [TranscendPullResource.Actions]: [ScopeName.ViewDataSubjectRequestSettings],\n [TranscendPullResource.DataSubjects]: [\n ScopeName.ViewDataSubjectRequestSettings,\n ],\n [TranscendPullResource.Prompts]: [ScopeName.ViewPrompts],\n [TranscendPullResource.PromptPartials]: [ScopeName.ViewPrompts],\n [TranscendPullResource.PromptGroups]: [ScopeName.ViewPrompts],\n [TranscendPullResource.Agents]: [ScopeName.ViewPathfinder],\n [TranscendPullResource.AgentFunctions]: [ScopeName.ViewPathfinder],\n [TranscendPullResource.AgentFiles]: [ScopeName.ViewPathfinder],\n [TranscendPullResource.Vendors]: [ScopeName.ViewDataInventory],\n [TranscendPullResource.DataCategories]: [ScopeName.ViewDataInventory],\n [TranscendPullResource.ProcessingPurposes]: [ScopeName.ViewDataInventory],\n [TranscendPullResource.ActionItemCollections]: [ScopeName.ViewAllActionItems],\n [TranscendPullResource.ActionItems]: [ScopeName.ViewAllActionItems],\n [TranscendPullResource.Teams]: [ScopeName.ViewScopes],\n [TranscendPullResource.Messages]: [ScopeName.ViewIntlMessages],\n [TranscendPullResource.PrivacyCenters]: [ScopeName.ViewPrivacyCenter],\n [TranscendPullResource.Policies]: [ScopeName.ViewPolicies],\n [TranscendPullResource.Assessments]: [ScopeName.ViewAssessments],\n [TranscendPullResource.AssessmentTemplates]: [ScopeName.ViewAssessments],\n [TranscendPullResource.Purposes]: [\n ScopeName.ViewConsentManager,\n ScopeName.ViewPreferenceStoreSettings,\n ],\n};\n\nexport const TR_YML_RESOURCE_TO_FIELD_NAME: Record<\n TranscendPullResource,\n keyof TranscendInput\n> = {\n [TranscendPullResource.ApiKeys]: 'api-keys',\n [TranscendPullResource.Attributes]: 'attributes',\n [TranscendPullResource.DataFlows]: 'data-flows',\n [TranscendPullResource.Cookies]: 'cookies',\n [TranscendPullResource.ConsentManager]: 'consent-manager',\n [TranscendPullResource.Partitions]: 'partitions',\n [TranscendPullResource.Actions]: 'actions',\n [TranscendPullResource.DataSubjects]: 'data-subjects',\n [TranscendPullResource.BusinessEntities]: 'business-entities',\n [TranscendPullResource.Identifiers]: 'identifiers',\n [TranscendPullResource.Enrichers]: 'enrichers',\n [TranscendPullResource.DataSilos]: 'data-silos',\n [TranscendPullResource.Templates]: 'templates',\n [TranscendPullResource.Prompts]: 'prompts',\n [TranscendPullResource.PromptPartials]: 'prompt-partials',\n [TranscendPullResource.PromptGroups]: 'prompt-groups',\n [TranscendPullResource.Agents]: 'agents',\n [TranscendPullResource.AgentFunctions]: 'agent-functions',\n [TranscendPullResource.AgentFiles]: 'agent-files',\n [TranscendPullResource.Vendors]: 'vendors',\n [TranscendPullResource.DataCategories]: 'data-categories',\n [TranscendPullResource.ProcessingPurposes]: 'processing-purposes',\n [TranscendPullResource.ActionItems]: 'action-items',\n [TranscendPullResource.ActionItemCollections]: 'action-item-collections',\n [TranscendPullResource.Teams]: 'teams',\n [TranscendPullResource.Messages]: 'messages',\n [TranscendPullResource.PrivacyCenters]: 'privacy-center',\n [TranscendPullResource.Policies]: 'policies',\n [TranscendPullResource.Assessments]: 'assessments',\n [TranscendPullResource.AssessmentTemplates]: 'assessment-templates',\n [TranscendPullResource.Purposes]: 'purposes',\n};\n\nexport const SCOPES_BY_TITLE = keyBy(\n Object.entries(TRANSCEND_SCOPES).map(([name, value]) => ({\n ...value,\n name,\n })),\n 'title',\n) as Record<\n string,\n ScopeDefinition & {\n /** The camelCased name which identifies the scope */\n name: ScopeName;\n }\n>;\n\nexport const SCOPE_TITLES = Object.keys(SCOPES_BY_TITLE);\n"]}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk3ZKZCSGDcjs = require('./chunk-3ZKZCSGD.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkY4BWTFTXcjs = require('./chunk-Y4BWTFTX.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunk3ZKZCSGDcjs.$b.call(void 0, u,_chunk3ZKZCSGDcjs.f,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunk3ZKZCSGDcjs.$b.call(void 0, u,_graphqlrequest.gql`
|
|
2
|
+
query TranscendCliSubDataPointCsvExport(
|
|
3
|
+
$filterBy: SubDataPointFiltersInput
|
|
4
|
+
$first: Int!
|
|
5
|
+
$offset: Int!
|
|
6
|
+
) {
|
|
7
|
+
subDataPoints(
|
|
8
|
+
filterBy: $filterBy
|
|
9
|
+
first: $first
|
|
10
|
+
offset: $offset
|
|
11
|
+
useMaster: false
|
|
12
|
+
) {
|
|
13
|
+
nodes {
|
|
14
|
+
id
|
|
15
|
+
name
|
|
16
|
+
description
|
|
17
|
+
dataPointId
|
|
18
|
+
dataSiloId
|
|
19
|
+
purposes {
|
|
20
|
+
name
|
|
21
|
+
purpose
|
|
22
|
+
}
|
|
23
|
+
categories {
|
|
24
|
+
name
|
|
25
|
+
category
|
|
26
|
+
}
|
|
27
|
+
${l?`pendingCategoryGuesses {
|
|
28
|
+
category {
|
|
29
|
+
name
|
|
30
|
+
category
|
|
31
|
+
}
|
|
32
|
+
status
|
|
33
|
+
classifierVersion
|
|
34
|
+
}`:""}
|
|
35
|
+
${a?`attributeValues {
|
|
36
|
+
attributeKey {
|
|
37
|
+
name
|
|
38
|
+
}
|
|
39
|
+
name
|
|
40
|
+
}`:""}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
`,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunkY4BWTFTXcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunkY4BWTFTXcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _chunk3ZKZCSGDcjs.a.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunk3ZKZCSGDcjs.$b.call(void 0, u,_chunk3ZKZCSGDcjs.i,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunkY4BWTFTXcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _chunk3ZKZCSGDcjs.a.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunk3ZKZCSGDcjs.$b.call(void 0, u,_chunk3ZKZCSGDcjs.l,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunkY4BWTFTXcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunkY4BWTFTXcjs.e.call(void 0, d,"id"),o=_chunkY4BWTFTXcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunkY4BWTFTXcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunk3ZKZCSGDcjs.$b.call(void 0, u,_chunk3ZKZCSGDcjs.j,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunk3ZKZCSGDcjs.$b.call(void 0, u,_graphqlrequest.gql`
|
|
45
|
+
query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(
|
|
46
|
+
$filterBy: UnstructuredSubDataPointRecommendationsFilterInput
|
|
47
|
+
$first: Int!
|
|
48
|
+
$offset: Int!
|
|
49
|
+
) {
|
|
50
|
+
unstructuredSubDataPointRecommendations(
|
|
51
|
+
filterBy: $filterBy
|
|
52
|
+
first: $first
|
|
53
|
+
offset: $offset
|
|
54
|
+
useMaster: false
|
|
55
|
+
) {
|
|
56
|
+
nodes {
|
|
57
|
+
id
|
|
58
|
+
dataSiloId
|
|
59
|
+
scannedObjectPathId
|
|
60
|
+
scannedObjectId
|
|
61
|
+
${c?"name":""}
|
|
62
|
+
${c?"contextSnippet":""}
|
|
63
|
+
dataSubCategory {
|
|
64
|
+
name
|
|
65
|
+
category
|
|
66
|
+
}
|
|
67
|
+
status
|
|
68
|
+
confidence
|
|
69
|
+
classificationMethod
|
|
70
|
+
classifierVersion
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
`,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let R=new Date().getTime()-n,C=_chunkY4BWTFTXcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${R/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
|
|
75
|
+
//# sourceMappingURL=chunk-EVZLUL56.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-EVZLUL56.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAA4E,wDAAyC,wDAA8D,2DCK5K,qGACiB,iDACJ,gFACD,MAkFnB,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-EVZLUL56.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from '../bluebird-replace';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var _chunkE3CF3RKXcjs = require('./chunk-E3CF3RKX.cjs');var _chunkBY7W4UQFcjs = require('./chunk-BY7W4UQF.cjs');var _core = require('@stricli/core');var _privacytypes = require('@transcend-io/privacy-types');function h(e){if(!/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(e))throw new Error(`Invalid UUID format: ${e}`);return e}function a(e){try{return new URL(e).toString().replace(/\/$/,"")}catch (e2){throw new Error(`Invalid URL format: ${e}`)}}function b(e){return e.split(",").map(r=>r.trim()).filter(r=>r.length>0)}function T(e){let r=new Date(e);if(Number.isNaN(r.getTime()))throw new TypeError(`Invalid date: ${e}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`);return r}var l=({scopes:e,requiresSiloScope:r=!1})=>{let t={kind:"parsed",parse:String,brief:"The Transcend API key."};return r&&(t.brief+=" This key must be associated with the data silo(s) being operated on."),e==="Varies"?{...t,brief:`${t.brief} The scopes required will vary depending on the operation performed. If in doubt, the ${_privacytypes.TRANSCEND_SCOPES[_privacytypes.ScopeName.FullAdmin].title} scope will always work.`}:e.length===0?{...t,brief:`${t.brief} No scopes are required for this command.`}:{...t,brief:`${t.brief} Requires scopes: ${e.map(p=>`"${_privacytypes.TRANSCEND_SCOPES[p].title}"`).join(", ")}`}},d= exports.e =(e=_chunkE3CF3RKXcjs.e)=>({kind:"parsed",parse:a,brief:"URL of the Transcend backend. Use https://api.us.transcend.io for US hosting",default:e}),A= exports.f =(e=_chunkE3CF3RKXcjs.f)=>({kind:"parsed",parse:a,brief:"URL of the Transcend consent backend. Use https://consent.us.transcend.io for US hosting",default:e}),U= exports.g =()=>({kind:"parsed",parse:String,brief:"The Sombra internal key, use for additional authentication when self-hosting Sombra",optional:!0});var g=["dataSilos","enrichers","templates","apiKeys"],N= exports.i =Object.values(_privacytypes.ConsentTrackerStatus),L= exports.j =_core.buildCommand.call(void 0, {loader:async()=>{let{pull:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-NNOWJ4Q4.cjs")));return e},parameters:{flags:{auth:l({scopes:"Varies"}),resources:{kind:"enum",values:["all",...Object.values(_chunkBY7W4UQFcjs.d)],brief:`The different resource types to pull in. Defaults to ${g.join(",")}.`,variadic:",",optional:!0},file:{kind:"parsed",parse:String,brief:"Path to the YAML file to pull into",default:"./transcend.yml"},transcendUrl:d(),dataSiloIds:{kind:"parsed",parse:String,variadic:",",brief:"The UUIDs of the data silos that should be pulled into the YAML file",optional:!0},integrationNames:{kind:"parsed",parse:String,variadic:",",brief:"The types of integrations to pull down",optional:!0},trackerStatuses:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),variadic:",",brief:"The statuses of consent manager trackers to pull down. Defaults to all statuses.",optional:!0},pageSize:{kind:"parsed",parse:_core.numberParser,brief:"The page size to use when paginating over the API",default:"50"},skipDatapoints:{kind:"boolean",brief:"When true, skip pulling in datapoints alongside data silo resource",default:!1},skipSubDatapoints:{kind:"boolean",brief:"When true, skip pulling in subDatapoints alongside data silo resource",default:!1},includeGuessedCategories:{kind:"boolean",brief:"When true, included guessed data categories that came from the content classifier",default:!1},debug:{kind:"boolean",brief:"Set to true to include debug logs while pulling the configuration",default:!1}}},docs:{brief:"Pull metadata from Transcend into transcend.yml",fullDescription:`Generates a transcend.yml by pulling the configuration from your Transcend instance.
|
|
2
2
|
|
|
3
3
|
The API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).
|
|
4
4
|
|
|
@@ -6,4 +6,4 @@ This command can be helpful if you are looking to:
|
|
|
6
6
|
|
|
7
7
|
- Copy your data into another instance
|
|
8
8
|
- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`}});exports.a = h; exports.b = b; exports.c = T; exports.d = l; exports.e = d; exports.f = A; exports.g = U; exports.h = g; exports.i = N; exports.j = L;
|
|
9
|
-
//# sourceMappingURL=chunk-
|
|
9
|
+
//# sourceMappingURL=chunk-FPXWBUHS.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-FPXWBUHS.cjs","../src/commands/inventory/pull/command.ts","../src/lib/cli/parsers.ts"],"names":["uuidParser","input"],"mappings":"AAAA,mZAA+C,wDAAyC,qCCA7C,2DACN,SCMrBA,CAAAA,CAAWC,CAAAA,CAAuB,CAGhD,EAAA,CAAI,CADF,4EAAA,CACa,IAAA,CAAKA,CAAK,CAAA,CACvB,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwBA,CAAK,CAAA,CAAA;AD0F5B;AAAA;AAAA;AAAA;AAAA;AAAA;AASpB,qGAAA","file":"/home/runner/work/cli/cli/dist/chunk-FPXWBUHS.cjs","sourcesContent":[null,"import { buildCommand, numberParser } from '@stricli/core';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport {\n createAuthParameter,\n createTranscendUrlParameter,\n} from '../../../lib/cli/common-parameters';\nimport { TranscendPullResource } from '../../../enums';\n\nexport const DEFAULT_TRANSCEND_PULL_RESOURCES = [\n TranscendPullResource.DataSilos,\n TranscendPullResource.Enrichers,\n TranscendPullResource.Templates,\n TranscendPullResource.ApiKeys,\n];\n\nexport const DEFAULT_CONSENT_TRACKER_STATUSES =\n Object.values(ConsentTrackerStatus);\n\nexport const pullCommand = buildCommand({\n loader: async () => {\n const { pull } = await import('./impl');\n return pull;\n },\n parameters: {\n flags: {\n auth: createAuthParameter({\n scopes: 'Varies',\n }),\n resources: {\n kind: 'enum',\n values: ['all', ...Object.values(TranscendPullResource)],\n brief: `The different resource types to pull in. Defaults to ${DEFAULT_TRANSCEND_PULL_RESOURCES.join(\n ',',\n )}.`,\n variadic: ',',\n optional: true,\n },\n file: {\n kind: 'parsed',\n parse: String,\n brief: 'Path to the YAML file to pull into',\n default: './transcend.yml',\n },\n transcendUrl: createTranscendUrlParameter(),\n dataSiloIds: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief:\n 'The UUIDs of the data silos that should be pulled into the YAML file',\n optional: true,\n },\n integrationNames: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief: 'The types of integrations to pull down',\n optional: true,\n },\n trackerStatuses: {\n kind: 'enum',\n values: Object.values(ConsentTrackerStatus),\n variadic: ',',\n brief:\n 'The statuses of consent manager trackers to pull down. Defaults to all statuses.',\n optional: true,\n },\n pageSize: {\n kind: 'parsed',\n parse: numberParser,\n brief: 'The page size to use when paginating over the API',\n default: '50',\n },\n skipDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in datapoints alongside data silo resource',\n default: false,\n },\n skipSubDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in subDatapoints alongside data silo resource',\n default: false,\n },\n includeGuessedCategories: {\n kind: 'boolean',\n brief:\n 'When true, included guessed data categories that came from the content classifier',\n default: false,\n },\n debug: {\n kind: 'boolean',\n brief:\n 'Set to true to include debug logs while pulling the configuration',\n default: false,\n },\n },\n },\n docs: {\n brief: 'Pull metadata from Transcend into transcend.yml',\n fullDescription: `Generates a transcend.yml by pulling the configuration from your Transcend instance.\n\nThe API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).\n\nThis command can be helpful if you are looking to:\n\n- Copy your data into another instance\n- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`,\n },\n});\n","/**\n * Validates and returns a UUID string.\n *\n * @param input - The input string to validate as UUID\n * @returns The validated UUID string\n * @throws Error if input is not a valid UUID\n */\nexport function uuidParser(input: string): string {\n const uuidRegex =\n /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;\n if (!uuidRegex.test(input)) {\n throw new Error(`Invalid UUID format: ${input}`);\n }\n return input;\n}\n\n/**\n * Validates and returns a URL string.\n *\n * @param input - The input string to validate as URL\n * @returns The validated URL string\n * @throws Error if input is not a valid URL\n */\nexport function urlParser(input: string): string {\n try {\n const url = new URL(input);\n return url.toString().replace(/\\/$/, '');\n } catch {\n throw new Error(`Invalid URL format: ${input}`);\n }\n}\n\n/**\n * Parse a comma-separated string to array.\n * NOTE: Prefer using `variadic` for list arguments instead of this function. This should only be used for arguments which have a default value.\n *\n * @param input - The comma-separated string to parse\n * @returns Array of trimmed, non-empty strings\n */\nexport function arrayParser(input: string): string[] {\n return input\n .split(',')\n .map((s) => s.trim())\n .filter((s) => s.length > 0);\n}\n\n/**\n * Parse a date string to a Date object.\n *\n * @param input - The date string to parse\n * @returns The parsed Date object\n * @throws TypeError if input is not a valid date\n */\nexport function dateParser(input: string): Date {\n const date = new Date(input);\n if (Number.isNaN(date.getTime())) {\n throw new TypeError(\n `Invalid date: ${input}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`,\n );\n }\n return date;\n}\n"]}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }var _chunkFPXWBUHScjs = require('./chunk-FPXWBUHS.cjs');var _chunkE3CF3RKXcjs = require('./chunk-E3CF3RKX.cjs');var _chunkY4BWTFTXcjs = require('./chunk-Y4BWTFTX.cjs');var _chunkBY7W4UQFcjs = require('./chunk-BY7W4UQF.cjs');var _autocomplete = require('@stricli/auto-complete');var _core = require('@stricli/core');var A=_core.buildCommand.call(void 0, {loader:async()=>{let{generateApiKeys:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-6B7JDOM5.cjs")));return e},parameters:{flags:{email:{kind:"parsed",parse:String,brief:"The email address that you use to log into Transcend"},password:{kind:"parsed",parse:String,brief:"The password for your account login"},apiKeyTitle:{kind:"parsed",parse:String,brief:"The title of the API key being generated or destroyed"},file:{kind:"parsed",parse:String,brief:"The file where API keys should be written to"},scopes:{kind:"enum",values:_chunkE3CF3RKXcjs.k,variadic:",",brief:"The list of scopes that should be given to the API key"},deleteExistingApiKey:{kind:"boolean",brief:"When true, if an API key exists with the specified apiKeyTitle, the existing API key is deleted",default:!0},createNewApiKey:{kind:"boolean",brief:"When true, new API keys will be created. Set to false if you simply want to delete all API keys with a title",default:!0},parentOrganizationId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"Filter for only a specific organization by ID, returning all child accounts associated with that organization",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Generate API keys",fullDescription:`This command allows for creating API keys across multiple Transcend instances. This is useful for customers that are managing many Transcend instances and need to regularly create, cycle or delete API keys across all of their instances.
|
|
2
|
+
|
|
3
|
+
Unlike the other commands that rely on API key authentication, this command relies upon username/password authentication. This command will spit out the API keys into a JSON file, and that JSON file can be used in subsequent CLI commands.
|
|
4
|
+
|
|
5
|
+
Authentication requires your email and password for the Transcend account. This command will only generate API keys for Transcend instances where you have the permission to "Manage API Keys".`}});var I=_core.buildRouteMap.call(void 0, {routes:{"generate-api-keys":A},docs:{brief:"Admin commands"}});var _privacytypes = require('@transcend-io/privacy-types');var D=_core.buildCommand.call(void 0, {loader:async()=>{let{buildXdiSyncEndpoint:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-5FO2QEHJ.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewConsentManager]}),xdiLocation:{kind:"parsed",parse:String,brief:"The location of the XDI that will be loaded by the generated sync endpoint"},file:{kind:"parsed",parse:String,brief:"The HTML file path where the sync endpoint should be written",default:"./sync-endpoint.html"},removeIpAddresses:{kind:"boolean",brief:"When true, remove IP addresses from the domain list",default:!0},domainBlockList:{kind:"parsed",parse:_chunkFPXWBUHScjs.b,brief:"The set of domains that should be excluded from the sync endpoint. Comma-separated list.",default:"localhost"},xdiAllowedCommands:{kind:"parsed",parse:String,brief:"The allowed set of XDI commands",default:"ConsentManager:Sync"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Build XDI sync endpoint",fullDescription:"This command allows for building of the XDI Sync Endpoint across a set of Transcend accounts."}});var R=_core.buildCommand.call(void 0, {loader:async()=>{let{pullConsentMetrics:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-CNCC36M6.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewConsentManager]}),start:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"The start date to pull metrics from"},end:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"The end date to pull metrics until",optional:!0},folder:{kind:"parsed",parse:String,brief:"The folder to save metrics to",default:"./consent-metrics/"},bin:{kind:"parsed",parse:String,brief:"The bin metric when pulling data (1h or 1d)",default:"1d"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Pull consent metrics",fullDescription:"This command allows for pulling consent manager metrics for a Transcend account, or a set of Transcend accounts.\n\nBy default, the consent metrics will be written to a folder named `consent-metrics` within the directory where you run the command. You can override the location that these CSVs are written to using the flag `--folder=./my-folder/`. This folder will contain a set of CSV files:\n\n- `CONSENT_CHANGES_TIMESERIES_optIn.csv` -> this is a feed containing the number of explicit opt in events that happen - these are calls to `airgap.setConsent(event, { SaleOfInfo: true });`\n- `CONSENT_CHANGES_TIMESERIES_optOut.csv` -> this is a feed containing the number of explicit opt out events that happen - these are calls to `airgap.setConsent(event, { SaleOfInfo: false });`\n- `CONSENT_SESSIONS_BY_REGIME_Default.csv` -> this contains the number of sessions detected for the bin period\n- `PRIVACY_SIGNAL_TIMESERIES_DNT.csv` -> the number of DNT signals detected.\n- `PRIVACY_SIGNAL_TIMESERIES_GPC.csv` -> the number of GPC signals detected."}});var U=_core.buildCommand.call(void 0, {loader:async()=>{let{pullConsentPreferences:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-AUOR6D3Q.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewManagedConsentDatabaseAdminApi]}),partition:{kind:"parsed",parse:String,brief:"The partition key to download consent preferences to"},sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),file:{kind:"parsed",parse:String,brief:"Path to the CSV file to save preferences to",default:"./preferences.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),timestampBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Filter for consents updated this time",optional:!0},timestampAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Filter for consents updated after this time",optional:!0},identifiers:{kind:"parsed",parse:String,variadic:",",brief:"Filter for specific identifiers",optional:!0},concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when downloading consents in parallel",default:"100"}}},docs:{brief:"Pull consent preferences",fullDescription:"This command allows for pull of consent preferences from the Managed Consent Database."}});var x=_core.buildCommand.call(void 0, {loader:async()=>{let{updateConsentManager:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-ICI7EQKE.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageConsentManagerDeveloperSettings]}),bundleTypes:{kind:"enum",values:Object.values(_privacytypes.ConsentBundleType),brief:"The bundle types to deploy. Defaults to PRODUCTION,TEST.",variadic:","},deploy:{kind:"boolean",brief:"When true, deploy the Consent Manager after updating the version",default:!1},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Update consent manager",fullDescription:"This command allows for updating Consent Manager to latest version. The Consent Manager bundle can also be deployed using this command."}});var F=_core.buildCommand.call(void 0, {loader:async()=>{let{uploadConsentPreferences:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-KOMGU55G.cjs")));return e},parameters:{flags:{base64EncryptionKey:{kind:"parsed",parse:String,brief:"The encryption key used to encrypt the userId"},base64SigningKey:{kind:"parsed",parse:String,brief:"The signing key used to prove authentication of consent request"},partition:{kind:"parsed",parse:String,brief:"The partition key to download consent preferences to"},file:{kind:"parsed",parse:String,brief:"The file to pull consent preferences from",default:"./preferences.csv"},consentUrl:_chunkFPXWBUHScjs.f.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"100"}}},docs:{brief:"Upload consent preferences to the Managed Consent Database",fullDescription:"This command allows for updating of consent preferences to the Managed Consent Database."}});var V=_core.buildCommand.call(void 0, {loader:async()=>{let{uploadCookiesFromCsv:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-DBDL4Z23.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageDataFlow]}),trackerStatus:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),brief:"The status of the cookies you will upload."},file:{kind:"parsed",parse:String,brief:"Path to the CSV file to upload",default:"./cookies.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Upload cookies from CSV",fullDescription:`Upload cookies from CSV. This command allows for uploading of cookies from CSV.
|
|
6
|
+
|
|
7
|
+
Step 1) Download the CSV of cookies that you want to edit from the Admin Dashboard under [Consent Management -> Cookies](https://app.transcend.io/consent-manager/cookies). You can download cookies from both the "Triage" and "Approved" tabs.
|
|
8
|
+
|
|
9
|
+
Step 2) You can edit the contents of the CSV file as needed. You may adjust the "Purpose" column, adjust the "Notes" column, add "Owners" and "Teams" or even add custom columns with additional metadata.
|
|
10
|
+
|
|
11
|
+
Step 3) Upload the modified CSV file back into the dashboard with this command.`}});var M=_core.buildCommand.call(void 0, {loader:async()=>{let{uploadDataFlowsFromCsv:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-KP27234L.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageDataFlow]}),trackerStatus:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),brief:"The status of the data flows you will upload."},file:{kind:"parsed",parse:String,brief:"Path to the CSV file to upload",default:"./data-flows.csv"},classifyService:{kind:"boolean",brief:"When true, automatically assign the service for a data flow based on the domain that is specified",default:!1},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Upload data flows from CSV",fullDescription:`Upload data flows from CSV. This command allows for uploading of data flows from CSV.
|
|
12
|
+
|
|
13
|
+
Step 1) Download the CSV of data flows that you want to edit from the Admin Dashboard under [Consent Management -> Data Flows](https://app.transcend.io/consent-manager/data-flows). You can download data flows from both the "Triage" and "Approved" tabs.
|
|
14
|
+
|
|
15
|
+
Step 2) You can edit the contents of the CSV file as needed. You may adjust the "Purpose" column, adjust the "Notes" column, add "Owners" and "Teams" or even add custom columns with additional metadata.
|
|
16
|
+
|
|
17
|
+
Step 3) Upload the modified CSV file back into the dashboard with this command.`}});var O=_core.buildCommand.call(void 0, {loader:async()=>{let{uploadPreferences:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-MPGDZ2M2.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageStoredPreferences,_privacytypes.ScopeName.ViewManagedConsentDatabaseAdminApi,_privacytypes.ScopeName.ViewPreferenceStoreSettings]}),partition:{kind:"parsed",parse:String,brief:"The partition key to download consent preferences to"},sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),file:{kind:"parsed",parse:String,brief:"Path to the CSV file to load preferences from",optional:!0},directory:{kind:"parsed",parse:String,brief:"Path to the directory of CSV files to load preferences from",optional:!0},dryRun:{kind:"boolean",brief:"Whether to do a dry run only - will write results to receiptFilepath without updating Transcend",default:!1},skipExistingRecordCheck:{kind:"boolean",brief:"Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD",default:!1},receiptFileDir:{kind:"parsed",parse:String,brief:"Directory path where the response receipts should be saved",default:"./receipts"},skipWorkflowTriggers:{kind:"boolean",brief:"Whether to skip workflow triggers when uploading to preference store",default:!1},forceTriggerWorkflows:{kind:"boolean",brief:"Whether to force trigger workflows for existing consent records",default:!1},skipConflictUpdates:{kind:"boolean",brief:"Whether to skip uploading of any records where the preference store and file have a hard conflict",default:!1},isSilent:{kind:"boolean",brief:"Whether to skip sending emails in workflows",default:!0},attributes:{kind:"parsed",parse:String,brief:"Attributes to add to any DSR request if created. Comma-separated list of key:value pairs.",default:"Tags:transcend-cli,Source:transcend-cli"},receiptFilepath:{kind:"parsed",parse:String,brief:"Store resulting, continuing where left off",default:"./preference-management-upload-receipts.json"},concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading in parallel",default:"10"}}},docs:{brief:"Upload preference management data to your Preference Store",fullDescription:`Upload preference management data to your Preference Store.
|
|
18
|
+
|
|
19
|
+
This command prompts you to map the shape of the CSV to the shape of the Transcend API. There is no requirement for the shape of the incoming CSV, as the script will handle the mapping process.
|
|
20
|
+
|
|
21
|
+
The script will also produce a JSON cache file that allows for the mappings to be preserved between runs.`}});var j=_core.buildRouteMap.call(void 0, {routes:{"build-xdi-sync-endpoint":D,"pull-consent-metrics":R,"pull-consent-preferences":U,"update-consent-manager":x,"upload-consent-preferences":F,"upload-cookies-from-csv":V,"upload-data-flows-from-csv":M,"upload-preferences":O},docs:{brief:"Consent commands"}});var N=_core.buildCommand.call(void 0, {loader:async()=>{let{deriveDataSilosFromDataFlowsCrossInstance:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-N6ML5K5S.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[]}),dataFlowsYmlFolder:{kind:"parsed",parse:String,brief:"The folder that contains data flow yml files"},output:{kind:"parsed",parse:String,brief:"The output transcend.yml file containing the data silo configurations",default:"./transcend.yml"},ignoreYmls:{kind:"parsed",parse:String,variadic:",",brief:"The set of yml files that should be skipped when uploading",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Derive data silos from data flows cross instance",fullDescription:"Given a folder of data flow transcend.yml configurations, convert those configurations to a single transcend.yml configurations of all related data silos."}});var E=_core.buildCommand.call(void 0, {loader:async()=>{let{deriveDataSilosFromDataFlows:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-BPWMOF4U.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[]}),dataFlowsYmlFolder:{kind:"parsed",parse:String,brief:"The folder that contains data flow yml files"},dataSilosYmlFolder:{kind:"parsed",parse:String,brief:"The folder that contains data silo yml files"},ignoreYmls:{kind:"parsed",parse:String,variadic:",",brief:"The set of yml files that should be skipped when uploading",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Derive data silos from data flows",fullDescription:"Given a folder of data flow transcend.yml configurations, convert those configurations to set of data silo transcend.yml configurations."}});var B=_core.buildCommand.call(void 0, {loader:async()=>{let{discoverSilos:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-Q6EQYFKN.cjs")));return e},parameters:{flags:{scanPath:{kind:"parsed",parse:String,brief:"File path in the project to scan"},dataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The UUID of the corresponding data silo"},auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageAssignedDataInventory],requiresSiloScope:!0}),fileGlobs:{kind:"parsed",parse:String,brief:"You can pass a glob syntax pattern(s) to specify additional file paths to scan. Comma-separated list of globs.",default:""},ignoreDirs:{kind:"parsed",parse:String,brief:"Comma-separated list of directories to ignore.",default:""},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Scan dependency management files to discover new data silos.",fullDescription:`We support scanning for new data silos in JavaScript, Python, Gradle, and CocoaPods projects.
|
|
22
|
+
|
|
23
|
+
To get started, add a data silo for the corresponding project type with the "silo discovery" plugin enabled. For example, if you want to scan a JavaScript project, add a package.json data silo. Then, specify the data silo ID in the "--dataSiloId" parameter.`}});var $=_core.buildCommand.call(void 0, {loader:async()=>{let{pullDatapoints:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-MO6AOGQM.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewDataInventory]}),file:{kind:"parsed",parse:String,brief:"The file to save datapoints to",default:"./datapoints.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),dataSiloIds:{kind:"parsed",parse:String,variadic:",",brief:"List of data silo IDs to filter by",optional:!0},includeAttributes:{kind:"boolean",brief:"Whether to include attributes in the output",default:!1},includeGuessedCategories:{kind:"boolean",brief:"Whether to include guessed categories in the output",default:!1},parentCategories:{kind:"enum",values:Object.values(_privacytypes.DataCategoryType),brief:"List of parent categories to filter by",variadic:",",optional:!0},subCategories:{kind:"parsed",parse:String,brief:"List of subcategories to filter by",variadic:",",optional:!0}}},docs:{brief:"Export the datapoints from your Data Inventory into a CSV."}});var W=_core.buildCommand.call(void 0, {loader:async()=>{let{pullUnstructuredDiscoveryFiles:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-R3KCARSP.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewDataInventory]}),file:{kind:"parsed",parse:String,brief:"The file to save datapoints to",default:"./unstructured-discovery-files.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),dataSiloIds:{kind:"parsed",parse:String,brief:"List of data silo IDs to filter by",variadic:",",optional:!0},subCategories:{kind:"parsed",parse:String,brief:"List of data categories to filter by",variadic:",",optional:!0},status:{kind:"enum",values:Object.values(_privacytypes.UnstructuredSubDataPointRecommendationStatus),brief:"List of classification statuses to filter by",variadic:",",optional:!0},includeEncryptedSnippets:{kind:"boolean",brief:"Whether to include encrypted snippets of the entries classified",default:!1}}},docs:{brief:"Pull unstructured discovery files",fullDescription:"This command allows for pulling Unstructured Discovery into a CSV."}});var L=_core.buildCommand.call(void 0, {loader:async()=>{let{push:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-OLEEHZUA.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:"Varies"}),file:{kind:"parsed",parse:String,brief:"Path to the YAML file to push from",default:"./transcend.yml"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),pageSize:{kind:"parsed",parse:_core.numberParser,brief:"The page size to use when paginating over the API",default:"50"},variables:{kind:"parsed",parse:String,brief:"The variables to template into the YAML file when pushing configuration. Comma-separated list of key:value pairs.",default:""},publishToPrivacyCenter:{kind:"boolean",brief:"When true, publish the configuration to the Privacy Center",default:!1},classifyService:{kind:"boolean",brief:"When true, automatically assign the service for a data flow based on the domain that is specified",default:!1},deleteExtraAttributeValues:{kind:"boolean",brief:"When true and syncing attributes, delete any extra attributes instead of just upserting",default:!1}}},docs:{brief:"Push metadata from transcend.yml to Transcend",fullDescription:"Given a transcend.yml file, sync the contents up to your Transcend instance."}});var Y=_core.buildCommand.call(void 0, {loader:async()=>{let{scanPackages:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-LTOV5CHF.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageCodeScanning]}),scanPath:{kind:"parsed",parse:String,brief:"File path in the project to scan",default:"./"},ignoreDirs:{kind:"parsed",parse:String,variadic:",",brief:"List of directories to ignore in scan",optional:!0},repositoryName:{kind:"parsed",parse:String,brief:"Name of the git repository that the package should be tied to",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Scan dependency management files to inventory code dependencies.",fullDescription:`Transcend scans packages and dependencies for the following frameworks:
|
|
24
|
+
|
|
25
|
+
- package.json
|
|
26
|
+
- requirements.txt & setup.py
|
|
27
|
+
- Podfile
|
|
28
|
+
- Package.resolved
|
|
29
|
+
- build.gradle
|
|
30
|
+
- pubspec.yaml
|
|
31
|
+
- Gemfile & .gemspec
|
|
32
|
+
- composer.json
|
|
33
|
+
|
|
34
|
+
This command will scan the folder you point at to look for any of these files. Once found, the build file will be parsed in search of dependencies. Those code packages and dependencies will be uploaded to Transcend. The information uploaded to Transcend is:
|
|
35
|
+
|
|
36
|
+
- repository name
|
|
37
|
+
- package names
|
|
38
|
+
- dependency names and versions
|
|
39
|
+
- package descriptions`}});function n(e,l,d){let i=e.join(" "),u=ft(l),{forceSingleLine:De=!1,argsIndent:y=2}=_nullishCoalesce(d, () => ({}));if(u.length===0)return`${_chunkE3CF3RKXcjs.a} ${i}`;let Re=`${_chunkE3CF3RKXcjs.a} ${i} ${u.join(" ")}`.length<=117&&!De?`${i} ${u.join(" ")}`:`${i} \\
|
|
40
|
+
${" ".repeat(y)}${u.join(` \\
|
|
41
|
+
${" ".repeat(y)}`)}`;return`${_chunkE3CF3RKXcjs.a} ${Re}`}function _(e,l=0){if(typeof e=="boolean")return e?"true":"false";if(typeof e=="number")return e.toString();if(e instanceof Date)return e.toISOString();if(Array.isArray(e)&&l===0){let d=e.map(i=>_(i,l+1));return d.every(i=>i.startsWith("$")||i.includes(" "))?`"${d.join(",")}"`:d.join(",")}if(typeof e=="string")return l===1?e.startsWith("$")?`\${${e.slice(1)}}`:e:e.startsWith("$")||e.includes(" ")?`"${e}"`:e;throw new Error(`Unsupported value type: ${typeof e}`)}function ft(e,l=0){return Object.entries(e).map(([d,i])=>{if(typeof i=="boolean"&&i)return`--${d}`;let u=_(i,l);return`--${d}=${u}`})}var G=_core.buildCommand.call(void 0, {loader:async()=>{let{consentManagerServiceJsonToYml:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-24MFRX5R.cjs")));return e},parameters:{flags:{file:{kind:"parsed",parse:String,brief:"Path to the services.json file, output of await airgap.getMetadata()",default:"./services.json"},output:{kind:"parsed",parse:String,brief:"Path to the output transcend.yml to write to",default:"./transcend.yml"}}},docs:{brief:"Convert consent manager services to transcend.yml",fullDescription:`Import the services from an airgap.js file into a Transcend instance.
|
|
42
|
+
|
|
43
|
+
1. Run \`await airgap.getMetadata()\` on a site with airgap
|
|
44
|
+
2. Right click on the printed object, and click \`Copy object\`
|
|
45
|
+
3. Place output of file in a file named \`services.json\`
|
|
46
|
+
4. Run:
|
|
47
|
+
|
|
48
|
+
${n(["inventory","consent-manager-service-json-to-yml"],{file:"./services.json",output:"./transcend.yml"},{argsIndent:5})}
|
|
49
|
+
|
|
50
|
+
5. Run:
|
|
51
|
+
|
|
52
|
+
${n(["inventory","push"],{auth:"$TRANSCEND_API_KEY",file:"./transcend.yml",classifyService:!0},{argsIndent:5})}`}});var J=_core.buildCommand.call(void 0, {loader:async()=>{let{consentManagersToBusinessEntities:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-QHTG36G3.cjs")));return e},parameters:{flags:{consentManagerYmlFolder:{kind:"parsed",parse:String,brief:"Path to the folder of Consent Manager transcend.yml files to combine"},output:{kind:"parsed",parse:String,brief:"Path to the output transcend.yml with business entity configuration",default:"./combined-business-entities.yml"}}},docs:{brief:"Convert consent managers to business entities",fullDescription:"This command allows for converting a folder or Consent Manager transcend.yml files into a single transcend.yml file where each consent manager configuration is a Business Entity in the data inventory."}});var z=_core.buildRouteMap.call(void 0, {routes:{pull:_chunkFPXWBUHScjs.j,push:L,"scan-packages":Y,"discover-silos":B,"pull-datapoints":$,"pull-unstructured-discovery-files":W,"derive-data-silos-from-data-flows":E,"derive-data-silos-from-data-flows-cross-instance":N,"consent-manager-service-json-to-yml":G,"consent-managers-to-business-entities":J},docs:{brief:"Inventory commands"}});var K=_core.buildCommand.call(void 0, {loader:async()=>{let{syncOt:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-L3M67IGI.cjs")));return e},parameters:{flags:{hostname:{kind:"parsed",parse:String,brief:"The domain of the OneTrust environment from which to pull the resource",optional:!0},oneTrustAuth:{kind:"parsed",parse:String,brief:"The OAuth access token with the scopes necessary to access the OneTrust Public APIs",optional:!0},source:{kind:"enum",values:Object.values(_chunkBY7W4UQFcjs.c),brief:"Whether to read the assessments from OneTrust or from a file",default:"oneTrust"},transcendAuth:{..._chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageAssessments]}),optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),file:{kind:"parsed",parse:String,brief:"Path to the file to pull the resource into. Must be a json file!",optional:!0},resource:{kind:"enum",values:Object.values(_chunkBY7W4UQFcjs.b),brief:"The resource to pull from OneTrust. For now, only assessments is supported",default:"assessments"},dryRun:{kind:"boolean",brief:"Whether to export the resource to a file rather than sync to Transcend",default:!1},debug:{kind:"boolean",brief:"Whether to print detailed logs in case of error",default:!1}}},docs:{brief:"Sync OneTrust data",fullDescription:`Pulls resources from a OneTrust and syncs them to a Transcend instance. For now, it only supports retrieving OneTrust Assessments.
|
|
53
|
+
|
|
54
|
+
This command can be helpful if you are looking to:
|
|
55
|
+
- Pull resources from your OneTrust account.
|
|
56
|
+
- Migrate your resources from your OneTrust account to Transcend.
|
|
57
|
+
|
|
58
|
+
OneTrust authentication requires an OAuth Token with scope for accessing the assessment endpoints.
|
|
59
|
+
If syncing the resources to Transcend, you will also need to generate an API key on the Transcend Admin Dashboard.`}});var X=_core.buildRouteMap.call(void 0, {routes:{"sync-ot":K},docs:{brief:"Migration commands"}});var H=_core.buildCommand.call(void 0, {loader:async()=>{let{approve:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-ZTWLYZZO.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.RequestApproval,_privacytypes.ScopeName.ViewRequests,_privacytypes.ScopeName.ManageRequestCompilation]}),actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to approve"},origins:{kind:"enum",values:Object.values(_privacytypes.RequestOrigin),variadic:",",brief:"The request origins to approve",optional:!0},silentModeBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Any requests made before this date should be marked as silent mode",optional:!0},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Approve requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Approve requests that were submitted after this time",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"50"}}},docs:{brief:"Bulk approve a set of privacy requests",fullDescription:"Bulk approve a set of privacy requests from the DSR Automation -> Incoming Requests tab."}});var Z=_core.buildCommand.call(void 0, {loader:async()=>{let{cancel:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-TPVA6DLJ.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewRequests,_privacytypes.ScopeName.RequestApproval]}),actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to cancel"},statuses:{kind:"enum",values:Object.values(_privacytypes.RequestStatus),variadic:",",brief:"The request statuses to cancel. Comma-separated list.",optional:!0},requestIds:{kind:"parsed",parse:String,variadic:",",brief:"Specify the specific request IDs to cancel",optional:!0},silentModeBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Any requests made before this date should be marked as silent mode for canceling to skip email sending",optional:!0},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Cancel requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Cancel requests that were submitted after this time",optional:!0},cancellationTitle:{kind:"parsed",parse:String,brief:"The title of the email template that should be sent to the requests upon cancelation",default:"Request Canceled"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"50"}}},docs:{brief:"Bulk cancel a set of privacy requests",fullDescription:"Bulk cancel a set of privacy requests from the DSR Automation -> Incoming Requests tab."}});var ee=_core.buildCommand.call(void 0, {loader:async()=>{let{markIdentifiersCompleted:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-UKOAB6ED.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[],requiresSiloScope:!0}),dataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the data silo to pull in"},file:{kind:"parsed",parse:String,brief:"Path to the CSV file where identifiers will be written to",default:"./cron-identifiers.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, )}},docs:{brief:"Mark identifiers as completed after processing.",fullDescription:`This command takes the output of "${n(["request","cron","pull-identifiers"],{})}" and notifies Transcend that all of the requests in the CSV have been processed.
|
|
60
|
+
This is used in the workflow like:
|
|
61
|
+
|
|
62
|
+
1. Pull identifiers to CSV:
|
|
63
|
+
|
|
64
|
+
${n(["request","cron","pull-identifiers"],{auth:"$TRANSCEND_API_KEY",dataSiloId:"70810f2e-cf90-43f6-9776-901a5950599f",actions:[_privacytypes.RequestAction.Erasure],file:"./outstanding-requests.csv"},{argsIndent:5})}
|
|
65
|
+
|
|
66
|
+
2. Run your process to operate on that CSV of requests.
|
|
67
|
+
|
|
68
|
+
3. Notify Transcend of completion
|
|
69
|
+
|
|
70
|
+
${n(["request","cron","mark-identifiers-completed"],{auth:"$TRANSCEND_API_KEY",dataSiloId:"70810f2e-cf90-43f6-9776-901a5950599f",file:"./outstanding-requests.csv"},{argsIndent:5})}
|
|
71
|
+
|
|
72
|
+
Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}});var re=_core.buildCommand.call(void 0, {loader:async()=>{let{pullIdentifiers:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-FG47LALL.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[],requiresSiloScope:!0}),dataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the data silo to pull in"},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to restart"},file:{kind:"parsed",parse:String,brief:"Path to the CSV file where identifiers will be written to",default:"./cron-identifiers.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),pageLimit:{kind:"parsed",parse:_core.numberParser,brief:"The page limit to use when pulling in pages of identifiers",default:"100"},skipRequestCount:{kind:"boolean",brief:"Whether to skip the count of all outstanding requests. This is required to render the progress bar, but can take a long time to run if you have a large number of outstanding requests to process. In that case, we recommend setting skipRequestCount=true so that you can still proceed with fetching the identifiers",default:!1},chunkSize:{kind:"parsed",parse:_core.numberParser,brief:"Maximum number of rows per CSV file. For large datasets, the output will be automatically split into multiple files to avoid file system size limits. Each file will contain at most this many rows",default:"10000"}}},docs:{brief:"Pull identifiers of outstanding requests for a data silo to a CSV.",fullDescription:`If you are using the cron job integration, you can run this command to pull the outstanding identifiers for the data silo to a CSV.
|
|
73
|
+
|
|
74
|
+
For large datasets, the output will be automatically split into multiple CSV files to avoid file system size limits. Use the --chunkSize parameter to control the maximum number of rows per file.
|
|
75
|
+
|
|
76
|
+
Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}});var oe=_core.buildCommand.call(void 0, {loader:async()=>{let{pullProfiles:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-SNLQCZOR.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[],requiresSiloScope:!0}),cronDataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the cron data silo to pull in"},targetDataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the target data silo to pull in"},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to restart"},file:{kind:"parsed",parse:String,brief:"Path to the CSV file where identifiers will be written to",default:"./cron-identifiers.csv"},fileTarget:{kind:"parsed",parse:String,brief:"Path to the CSV file where identifiers will be written to",default:"./cron-identifiers-target.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),pageLimit:{kind:"parsed",parse:_core.numberParser,brief:"The page limit to use when pulling in pages of identifiers",default:"100"},skipRequestCount:{kind:"boolean",brief:"Whether to skip the count of all outstanding requests. This is required to render the progress bar, but can take a long time to run if you have a large number of outstanding requests to process. In that case, we recommend setting skipRequestCount=true so that you can still proceed with fetching the identifiers",default:!1},chunkSize:{kind:"parsed",parse:_core.numberParser,brief:"Maximum number of rows per CSV file. For large datasets, the output will be automatically split into multiple files to avoid file system size limits. Each file will contain at most this many rows",default:"10000"}}},docs:{brief:"Pull profiles of outstanding requests for a data silo to a CSV.",fullDescription:`If you are using the cron job integration, you can run this command to pull the outstanding profiles for the data silo to a CSV.
|
|
77
|
+
|
|
78
|
+
For large datasets, the output will be automatically split into multiple CSV files to avoid file system size limits. Use the --chunkSize parameter to control the maximum number of rows per file.
|
|
79
|
+
|
|
80
|
+
Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}});var se=_core.buildRouteMap.call(void 0, {routes:{"pull-identifiers":re,"pull-profiles":oe,"mark-identifiers-completed":ee},docs:{brief:"Cron commands",hideRoute:{"pull-profiles":!0}}});var ie=_core.buildCommand.call(void 0, {loader:async()=>{let{downloadFiles:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-B5HQXUMH.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewRequestCompilation,_privacytypes.ScopeName.ViewRequests,_privacytypes.ScopeName.RequestApproval]}),sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when downloading requests in parallel",default:"10"},requestIds:{kind:"parsed",parse:String,variadic:",",brief:"Specify the specific request IDs to download",optional:!0},statuses:{kind:"enum",values:Object.values(_privacytypes.RequestStatus),variadic:",",brief:"The request statuses to download. Comma-separated list. Defaults to APPROVING,DOWNLOADABLE.",optional:!0},folderPath:{kind:"parsed",parse:String,brief:"The folder to download files to",default:"./dsr-files"},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Download requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Download requests that were submitted after this time",optional:!0},approveAfterDownload:{kind:"boolean",brief:"If the request is in status=APPROVING, approve the request after its downloaded",default:!1},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Download the files associated with a Data Subject Access Request (DSAR)",fullDescription:"Download the files associated with a Data Subject Access Request (DSAR) from DSR Automation -> Incoming Requests tab."}});var ne=_core.buildCommand.call(void 0, {loader:async()=>{let{enricherRestart:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-ZKBMWGMK.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),enricherId:{kind:"parsed",parse:String,brief:"The ID of the enricher to restart"},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request action to restart",optional:!0},requestEnricherStatuses:{kind:"enum",values:Object.values(_privacytypes.RequestEnricherStatus),variadic:",",brief:"The request enricher statuses to restart",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"15"},requestIds:{kind:"parsed",parse:String,variadic:",",brief:"Specify the specific request IDs to restart",optional:!0},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Restart requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Restart requests that were submitted after this time",optional:!0}}},docs:{brief:"Bulk restart a particular enricher across a series of DSRs",fullDescription:`Bulk restart a particular enricher across a series of DSRs.
|
|
81
|
+
|
|
82
|
+
The API key needs the following scopes:
|
|
83
|
+
- Manage Request Compilation`}});var ue=_core.buildCommand.call(void 0, {loader:async()=>{let{_export:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-QITUCVEV.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewRequests,_privacytypes.ScopeName.ViewRequestCompilation]}),sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to export",optional:!0},statuses:{kind:"enum",values:Object.values(_privacytypes.RequestStatus),variadic:",",brief:"The request statuses to export",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),file:{kind:"parsed",parse:String,brief:"Path to the CSV file where identifiers will be written to",default:"./transcend-request-export.csv"},concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"100"},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Pull requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Pull requests that were submitted after this time",optional:!0},showTests:{kind:"boolean",brief:"Filter for test requests or production requests - when not provided, pulls both",optional:!0},pageLimit:{kind:"parsed",parse:_core.numberParser,brief:"The page limit to use when pulling in pages of requests",default:"100"}}},docs:{brief:"Export privacy requests and request identifiers to a CSV file",fullDescription:"Export privacy requests and request identifiers to a CSV file."}});var me=_core.buildCommand.call(void 0, {loader:async()=>{let{markSilent:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-RSHVCDLE.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to mark silent"},statuses:{kind:"enum",values:Object.values(_privacytypes.RequestStatus),variadic:",",brief:"The request statuses to mark silent. Comma-separated list. Defaults to REQUEST_MADE,WAITING,ENRICHING,COMPILING,DELAYED,APPROVING,SECONDARY,SECONDARY_APPROVING.",optional:!0},requestIds:{kind:"parsed",parse:String,variadic:",",brief:"Specify the specific request IDs to mark silent",optional:!0},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Mark silent requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Mark silent requests that were submitted after this time",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"50"}}},docs:{brief:"Bulk update a set of privacy requests to be in silent mode",fullDescription:"Bulk update a set of privacy requests from the DSR Automation -> Incoming Requests tab to be in silent mode."}});var ce=_core.buildCommand.call(void 0, {loader:async()=>{let{notifyAdditionalTime:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-4EDFESYC.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewRequests,_privacytypes.ScopeName.RequestApproval]}),createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Notify requests that are open but submitted before this time"},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Notify requests that are open but submitted after this time",optional:!0},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to notify",optional:!0},daysLeft:{kind:"parsed",parse:_core.numberParser,brief:"Only notify requests that have less than this number of days until they are considered expired",default:"10"},days:{kind:"parsed",parse:_core.numberParser,brief:"The number of days to adjust the expiration of the request to",default:"45"},requestIds:{kind:"parsed",parse:String,variadic:",",brief:"Specify the specific request IDs to notify",optional:!0},emailTemplate:{kind:"parsed",parse:String,brief:"The title of the email template that should be sent to the requests",default:"Additional Time Needed"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"50"}}},docs:{brief:"Bulk notify a set of privacy requests that more time is needed",fullDescription:"Bulk notify a set of privacy requests from the DSR Automation -> Incoming Requests tab that more time is needed to complete the request. Note any request in silent mode will not be emailed."}});var he=_core.buildCommand.call(void 0, {loader:async()=>{let{pullIdentifiers:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-N54C7NPT.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ViewRequests,_privacytypes.ScopeName.ViewRequestCompilation]}),sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),file:{kind:"parsed",parse:String,brief:"Path to the CSV file where requests will be written to",default:"./manual-enrichment-identifiers.csv"},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to pull for",optional:!0},concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"100"}}},docs:{brief:"Pull identifiers for manual enrichment",fullDescription:`This command pulls down the set of privacy requests that are currently pending manual enrichment.
|
|
84
|
+
|
|
85
|
+
This is useful for the following workflow:
|
|
86
|
+
|
|
87
|
+
1. Pull identifiers to CSV:
|
|
88
|
+
|
|
89
|
+
${n(["request","preflight","pull-identifiers"],{file:"./enrichment-requests.csv"},{argsIndent:5})}
|
|
90
|
+
|
|
91
|
+
2. Fill out the CSV with additional identifiers
|
|
92
|
+
|
|
93
|
+
3. Push updated back to Transcend:
|
|
94
|
+
|
|
95
|
+
${n(["request","preflight","push-identifiers"],{file:"./enrichment-requests.csv"},{argsIndent:5})}`}});var ge=_core.buildCommand.call(void 0, {loader:async()=>{let{pushIdentifiers:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-2G6FOZLU.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestIdentities,_privacytypes.ScopeName.ManageRequestCompilation]}),enricherId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the Request Enricher to upload to"},sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),file:{kind:"parsed",parse:String,brief:"Path to the CSV file where requests will be written to",default:"./manual-enrichment-identifiers.csv"},markSilent:{kind:"boolean",brief:"When true, set requests into silent mode before enriching",default:!1},concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"100"}}},docs:{brief:"Push identifiers for manual enrichment",fullDescription:`This command push up a set of identifiers for a set of requests pending manual enrichment.
|
|
96
|
+
|
|
97
|
+
This is useful for the following workflow:
|
|
98
|
+
|
|
99
|
+
1. Pull identifiers to CSV:
|
|
100
|
+
|
|
101
|
+
${n(["request","preflight","pull-identifiers"],{file:"./enrichment-requests.csv"},{argsIndent:5})}
|
|
102
|
+
|
|
103
|
+
2. Fill out the CSV with additional identifiers
|
|
104
|
+
|
|
105
|
+
3. Push updated back to Transcend:
|
|
106
|
+
|
|
107
|
+
${n(["request","preflight","push-identifiers"],{file:"./enrichment-requests.csv"},{argsIndent:5})}`}});var ye=_core.buildRouteMap.call(void 0, {routes:{"pull-identifiers":he,"push-identifiers":ge},docs:{brief:"Preflight commands"}});var ke=_core.buildCommand.call(void 0, {loader:async()=>{let{rejectUnverifiedIdentifiers:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-KZ2L66Q3.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),identifierNames:{kind:"parsed",parse:String,variadic:",",brief:"The names of identifiers to clear out"},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request action to restart",optional:!0},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Bulk clear out any request identifiers that are unverified",fullDescription:"Bulk clear out any request identifiers that are unverified."}});var Ce=_core.buildCommand.call(void 0, {loader:async()=>{let{restart:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-HB3R7YDP.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.MakeDataSubjectRequest,_privacytypes.ScopeName.ViewRequestCompilation]}),actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to restart"},statuses:{kind:"enum",values:Object.values(_privacytypes.RequestStatus),variadic:",",brief:"The request statuses to restart"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),requestReceiptFolder:{kind:"parsed",parse:String,brief:"The path to the folder where receipts of each upload are stored",default:"./privacy-request-upload-receipts"},sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"15"},requestIds:{kind:"parsed",parse:String,variadic:",",brief:"Specify the specific request IDs to restart",optional:!0},emailIsVerified:{kind:"boolean",brief:"Indicate whether the primary email address is verified. Set to false to send a verification email",default:!0},createdAt:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Restart requests that were submitted before a specific date",optional:!0},silentModeBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Requests older than this date should be marked as silent mode",optional:!0},createdAtBefore:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Restart requests that were submitted before this time",optional:!0},createdAtAfter:{kind:"parsed",parse:_chunkFPXWBUHScjs.c,brief:"Restart requests that were submitted after this time",optional:!0},sendEmailReceipt:{kind:"boolean",brief:"Send email receipts to the restarted requests",default:!1},copyIdentifiers:{kind:"boolean",brief:"Copy over all enriched identifiers from the initial request",default:!1},skipWaitingPeriod:{kind:"boolean",brief:"Skip queued state of request and go straight to compiling",default:!1}}},docs:{brief:"Bulk update a set of privacy requests based on a set of request filters",fullDescription:"Bulk update a set of privacy requests based on a set of request filters."}});var we=_core.buildCommand.call(void 0, {loader:async()=>{let{skipPreflightJobs:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-BNFADMTO.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),enricherIds:{kind:"parsed",parse:String,variadic:",",brief:"The ID of the enrichers to skip privacy request jobs for"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Skip preflight jobs",fullDescription:"This command allows for bulk skipping preflight checks."}});var ve=_core.buildCommand.call(void 0, {loader:async()=>{let{markRequestDataSilosCompleted:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-NKLZ5RG4.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the data silo to pull in"},file:{kind:"parsed",parse:String,brief:'Path to the CSV file where identifiers will be written to. The CSV is expected to have 1 column named "Request Id".',default:"./request-identifiers.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Mark request data silos as completed",fullDescription:`This command takes in a CSV of Request IDs as well as a Data Silo ID and marks all associated privacy request jobs as completed.
|
|
108
|
+
This command is useful with the "Bulk Response" UI. The CSV is expected to have 1 column named "Request Id".`}});var Te=_core.buildCommand.call(void 0, {loader:async()=>{let{retryRequestDataSilos:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-TVNBHOR4.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the data silo to pull in"},actions:{kind:"enum",values:Object.values(_privacytypes.RequestAction),variadic:",",brief:"The request actions to restart"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, )}},docs:{brief:"Retry request data silos",fullDescription:'This command allows for bulk restarting a set of data silos jobs for open privacy requests. This is equivalent to clicking the "Wipe and Retry" button for a particular data silo across a set of privacy requests.'}});var Pe=_core.buildCommand.call(void 0, {loader:async()=>{let{skipRequestDataSilos:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-TWLWQSZG.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:"parsed",parse:_chunkFPXWBUHScjs.a,brief:"The ID of the data silo to skip privacy request jobs for"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),statuses:{kind:"enum",values:Object.values(_privacytypes.RequestStatus),variadic:",",brief:"The request statuses to skip"},status:{kind:"enum",values:[_privacytypes.RequestDataSiloStatus.Skipped,_privacytypes.RequestDataSiloStatus.Resolved],brief:"The status to set the request data silo job to",default:_privacytypes.RequestDataSiloStatus.Skipped}}},docs:{brief:"Skip request data silos",fullDescription:"This command allows for bulk skipping all open privacy request jobs for a particular data silo. This command is useful if you want to disable a data silo and then clear out any active privacy requests that are still queued up for that data silo."}});var qe=_core.buildRouteMap.call(void 0, {routes:{"mark-request-data-silos-completed":ve,"retry-request-data-silos":Te,"skip-request-data-silos":Pe},docs:{brief:"System commands"}});var Ae=_core.buildCommand.call(void 0, {loader:async()=>{let{upload:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-GNB7TDFU.cjs")));return e},parameters:{flags:{auth:_chunkFPXWBUHScjs.d.call(void 0, {scopes:[_privacytypes.ScopeName.MakeDataSubjectRequest,_privacytypes.ScopeName.ViewRequestIdentitySettings,_privacytypes.ScopeName.ViewGlobalAttributes]}),file:{kind:"parsed",parse:String,brief:"Path to the CSV file of requests to upload",default:"./requests.csv"},transcendUrl:_chunkFPXWBUHScjs.e.call(void 0, ),cacheFilepath:{kind:"parsed",parse:String,brief:"The path to the JSON file encoding the metadata used to map the CSV shape to Transcend API",default:"./transcend-privacy-requests-cache.json"},requestReceiptFolder:{kind:"parsed",parse:String,brief:"The path to the folder where receipts of each upload are stored",default:"./privacy-request-upload-receipts"},sombraAuth:_chunkFPXWBUHScjs.g.call(void 0, ),concurrency:{kind:"parsed",parse:_core.numberParser,brief:"The concurrency to use when uploading requests in parallel",default:"50"},attributes:{kind:"parsed",parse:String,brief:"Tag all of the requests with the following attributes. Format: key1:value1;value2,key2:value3;value4",default:"Tags:transcend-cli"},isTest:{kind:"boolean",brief:"Flag whether the requests being uploaded are test requests or regular requests",default:!1},isSilent:{kind:"boolean",brief:"Flag whether the requests being uploaded should be submitted in silent mode",default:!0},skipSendingReceipt:{kind:"boolean",brief:"Flag whether to skip sending of the receipt email",default:!1},emailIsVerified:{kind:"boolean",brief:"Indicate whether the email address being uploaded is pre-verified. Set to false to send a verification email",default:!0},skipFilterStep:{kind:"boolean",brief:"When true, skip the interactive step to filter down the CSV",default:!1},dryRun:{kind:"boolean",brief:"When true, perform a dry run of the upload instead of calling the API to submit the requests",default:!1},debug:{kind:"boolean",brief:"Debug logging",default:!1},defaultPhoneCountryCode:{kind:"parsed",parse:String,brief:"When uploading phone numbers, if the phone number is missing a country code, assume this country code",default:"1"}}},docs:{brief:"Upload a set of requests from a CSV",fullDescription:`Upload a set of requests from a CSV.
|
|
109
|
+
|
|
110
|
+
This command prompts you to map the shape of the CSV to the shape of the Transcend API. There is no requirement for the shape of the incoming CSV, as the script will handle the mapping process.
|
|
111
|
+
|
|
112
|
+
The script will also produce a JSON cache file that allows for the mappings to be preserved between runs.`}});var Ie=_core.buildRouteMap.call(void 0, {routes:{approve:H,upload:Ae,"download-files":ie,cancel:Z,restart:Ce,"notify-additional-time":ce,"mark-silent":me,"enricher-restart":ne,"reject-unverified-identifiers":ke,export:ue,"skip-preflight-jobs":we,system:qe,preflight:ye,cron:se},docs:{brief:"All commands related to DSR requests"}});var Ur=_core.buildRouteMap.call(void 0, {routes:{request:Ie,consent:j,inventory:z,admin:I,migration:X,install:_autocomplete.buildInstallCommand.call(void 0, "@transcend-io/transcend",{bash:"__@transcend-io/cli_bash_complete"}),uninstall:_autocomplete.buildUninstallCommand.call(void 0, "@transcend-io/transcend",{bash:!0})},docs:{brief:_chunkY4BWTFTXcjs.l,hideRoute:{install:!0,uninstall:!0}}}),en= exports.a =_core.buildApplication.call(void 0, Ur,{name:_chunkE3CF3RKXcjs.a,versionInfo:{currentVersion:_chunkY4BWTFTXcjs.m}});exports.a = en;
|
|
113
|
+
//# sourceMappingURL=chunk-G2T7YVBG.cjs.map
|