@transcend-io/cli 8.36.0 → 8.36.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. package/dist/{api-keys-C3yVviFj.cjs → api-keys-DB0BZSh5.cjs} +2 -2
  2. package/dist/{api-keys-C3yVviFj.cjs.map → api-keys-DB0BZSh5.cjs.map} +1 -1
  3. package/dist/{app-CnSMU1Jq.cjs → app-BOlZhpYh.cjs} +18 -18
  4. package/dist/{app-CnSMU1Jq.cjs.map → app-BOlZhpYh.cjs.map} +1 -1
  5. package/dist/bin/bash-complete.cjs +1 -1
  6. package/dist/bin/cli.cjs +1 -1
  7. package/dist/bin/deprecated-command.cjs +1 -1
  8. package/dist/{code-scanning-Cp3rJ0-7.cjs → code-scanning-Yc7rgoTY.cjs} +2 -2
  9. package/dist/{code-scanning-Cp3rJ0-7.cjs.map → code-scanning-Yc7rgoTY.cjs.map} +1 -1
  10. package/dist/{command-FrwAB8R3.cjs → command-41dUPvEa.cjs} +2 -2
  11. package/dist/{command-FrwAB8R3.cjs.map → command-41dUPvEa.cjs.map} +1 -1
  12. package/dist/{consent-manager-Dpgb-BKQ.cjs → consent-manager-BNMaKHId.cjs} +2 -2
  13. package/dist/{consent-manager-Dpgb-BKQ.cjs.map → consent-manager-BNMaKHId.cjs.map} +1 -1
  14. package/dist/{constants-CmHjougS.cjs → constants-JoCDv9ym.cjs} +2 -2
  15. package/dist/{constants-CmHjougS.cjs.map → constants-JoCDv9ym.cjs.map} +1 -1
  16. package/dist/{cron-Cug8byEO.cjs → cron-ClvgH2uD.cjs} +2 -2
  17. package/dist/{cron-Cug8byEO.cjs.map → cron-ClvgH2uD.cjs.map} +1 -1
  18. package/dist/{data-inventory-oLLOqkbm.cjs → data-inventory-BzEYoxaa.cjs} +2 -2
  19. package/dist/{data-inventory-oLLOqkbm.cjs.map → data-inventory-BzEYoxaa.cjs.map} +1 -1
  20. package/dist/{dataFlowsToDataSilos-BjIfea8i.cjs → dataFlowsToDataSilos-daBgPi7V.cjs} +2 -2
  21. package/dist/{dataFlowsToDataSilos-BjIfea8i.cjs.map → dataFlowsToDataSilos-daBgPi7V.cjs.map} +1 -1
  22. package/dist/{impl-1ibTRBn4.cjs → impl-4pmSDQvA.cjs} +2 -2
  23. package/dist/{impl-1ibTRBn4.cjs.map → impl-4pmSDQvA.cjs.map} +1 -1
  24. package/dist/{impl-B4f6W0BP.cjs → impl-8bci0gd2.cjs} +2 -2
  25. package/dist/{impl-B4f6W0BP.cjs.map → impl-8bci0gd2.cjs.map} +1 -1
  26. package/dist/{impl-Dsj6D_fG.cjs → impl-8zottXEQ.cjs} +2 -2
  27. package/dist/{impl-Dsj6D_fG.cjs.map → impl-8zottXEQ.cjs.map} +1 -1
  28. package/dist/{impl-C4VIGK1G.cjs → impl-BFOghVmx.cjs} +2 -2
  29. package/dist/{impl-C4VIGK1G.cjs.map → impl-BFOghVmx.cjs.map} +1 -1
  30. package/dist/{impl-BtfHdJB-.cjs → impl-BH0HWnIY.cjs} +2 -2
  31. package/dist/{impl-BtfHdJB-.cjs.map → impl-BH0HWnIY.cjs.map} +1 -1
  32. package/dist/{impl-BYBww_ic.cjs → impl-BIqdEXRo.cjs} +2 -2
  33. package/dist/{impl-BYBww_ic.cjs.map → impl-BIqdEXRo.cjs.map} +1 -1
  34. package/dist/{impl-CKASqgZA.cjs → impl-BMxxKrNz.cjs} +2 -2
  35. package/dist/{impl-CKASqgZA.cjs.map → impl-BMxxKrNz.cjs.map} +1 -1
  36. package/dist/{impl-BABdXCjW.cjs → impl-BRuqPhJU.cjs} +2 -2
  37. package/dist/{impl-BABdXCjW.cjs.map → impl-BRuqPhJU.cjs.map} +1 -1
  38. package/dist/{impl-D1MnxAqp.cjs → impl-BS8t24Z7.cjs} +2 -2
  39. package/dist/{impl-D1MnxAqp.cjs.map → impl-BS8t24Z7.cjs.map} +1 -1
  40. package/dist/{impl-DLeBB5Zk.cjs → impl-BWiNjS6v.cjs} +2 -2
  41. package/dist/{impl-DLeBB5Zk.cjs.map → impl-BWiNjS6v.cjs.map} +1 -1
  42. package/dist/{impl-Ij4ebiVO.cjs → impl-BXMfTm4K.cjs} +2 -2
  43. package/dist/{impl-Ij4ebiVO.cjs.map → impl-BXMfTm4K.cjs.map} +1 -1
  44. package/dist/{impl-CeoJtFk7.cjs → impl-BYK9pQs0.cjs} +2 -2
  45. package/dist/{impl-CeoJtFk7.cjs.map → impl-BYK9pQs0.cjs.map} +1 -1
  46. package/dist/{impl-rYMBoT4c.cjs → impl-BjOFvm4E.cjs} +2 -2
  47. package/dist/{impl-rYMBoT4c.cjs.map → impl-BjOFvm4E.cjs.map} +1 -1
  48. package/dist/{impl-cFJaGYul.cjs → impl-BouHRicT.cjs} +2 -2
  49. package/dist/{impl-cFJaGYul.cjs.map → impl-BouHRicT.cjs.map} +1 -1
  50. package/dist/{impl-DbD3dI7B.cjs → impl-Bv4gPikD.cjs} +2 -2
  51. package/dist/{impl-DbD3dI7B.cjs.map → impl-Bv4gPikD.cjs.map} +1 -1
  52. package/dist/{impl-CCuP7d9d.cjs → impl-BwAnSxkP.cjs} +2 -2
  53. package/dist/{impl-CCuP7d9d.cjs.map → impl-BwAnSxkP.cjs.map} +1 -1
  54. package/dist/{impl-B19iyoiR.cjs → impl-C-_9uRms.cjs} +2 -2
  55. package/dist/{impl-B19iyoiR.cjs.map → impl-C-_9uRms.cjs.map} +1 -1
  56. package/dist/{impl-CCPd6sqk.cjs → impl-C4D1tHjp.cjs} +2 -2
  57. package/dist/{impl-CCPd6sqk.cjs.map → impl-C4D1tHjp.cjs.map} +1 -1
  58. package/dist/{impl-JkokB3Un.cjs → impl-CA_CO88W.cjs} +2 -2
  59. package/dist/{impl-JkokB3Un.cjs.map → impl-CA_CO88W.cjs.map} +1 -1
  60. package/dist/{impl-C1jpyDxO.cjs → impl-CCOEjRnr.cjs} +2 -2
  61. package/dist/{impl-C1jpyDxO.cjs.map → impl-CCOEjRnr.cjs.map} +1 -1
  62. package/dist/{impl-CKCV2er8.cjs → impl-CFmFCVt8.cjs} +2 -2
  63. package/dist/{impl-CKCV2er8.cjs.map → impl-CFmFCVt8.cjs.map} +1 -1
  64. package/dist/{impl-Ym65hOwf.cjs → impl-CSZwdpH-.cjs} +2 -2
  65. package/dist/{impl-Ym65hOwf.cjs.map → impl-CSZwdpH-.cjs.map} +1 -1
  66. package/dist/{impl-Bd239j4R.cjs → impl-ChNLuyNq.cjs} +2 -2
  67. package/dist/{impl-Bd239j4R.cjs.map → impl-ChNLuyNq.cjs.map} +1 -1
  68. package/dist/{impl-BqwFuOOo.cjs → impl-ClDXxODZ.cjs} +2 -2
  69. package/dist/{impl-BqwFuOOo.cjs.map → impl-ClDXxODZ.cjs.map} +1 -1
  70. package/dist/{impl-TTDhp7Hu.cjs → impl-CoxOrlXu.cjs} +2 -2
  71. package/dist/{impl-TTDhp7Hu.cjs.map → impl-CoxOrlXu.cjs.map} +1 -1
  72. package/dist/{impl-CWLtV0NB.cjs → impl-Cp12F9tr.cjs} +2 -2
  73. package/dist/{impl-CWLtV0NB.cjs.map → impl-Cp12F9tr.cjs.map} +1 -1
  74. package/dist/{impl-DJMqsQ7L.cjs → impl-D3sI4I1g.cjs} +2 -2
  75. package/dist/{impl-DJMqsQ7L.cjs.map → impl-D3sI4I1g.cjs.map} +1 -1
  76. package/dist/{impl-D9_MKLLL.cjs → impl-D7WBmmHb.cjs} +2 -2
  77. package/dist/{impl-D9_MKLLL.cjs.map → impl-D7WBmmHb.cjs.map} +1 -1
  78. package/dist/{impl--0AUtPOO.cjs → impl-DU-WTXTY.cjs} +2 -2
  79. package/dist/{impl--0AUtPOO.cjs.map → impl-DU-WTXTY.cjs.map} +1 -1
  80. package/dist/{impl-CXnE8Ev7.cjs → impl-DhdY6lbj.cjs} +2 -2
  81. package/dist/{impl-CXnE8Ev7.cjs.map → impl-DhdY6lbj.cjs.map} +1 -1
  82. package/dist/{impl-BnkpDlXg.cjs → impl-DjVmOb9T.cjs} +2 -2
  83. package/dist/{impl-BnkpDlXg.cjs.map → impl-DjVmOb9T.cjs.map} +1 -1
  84. package/dist/{impl-DuVC84LE.cjs → impl-DpdGWdrr.cjs} +2 -2
  85. package/dist/{impl-DuVC84LE.cjs.map → impl-DpdGWdrr.cjs.map} +1 -1
  86. package/dist/{impl-Cq9g8eCi.cjs → impl-FlZPR0yd.cjs} +2 -2
  87. package/dist/{impl-Cq9g8eCi.cjs.map → impl-FlZPR0yd.cjs.map} +1 -1
  88. package/dist/{impl-3XEvFo-7.cjs → impl-J5fV8gbh.cjs} +2 -2
  89. package/dist/{impl-3XEvFo-7.cjs.map → impl-J5fV8gbh.cjs.map} +1 -1
  90. package/dist/{impl-BzfO2Fr-.cjs → impl-KgEJvOhE.cjs} +2 -2
  91. package/dist/{impl-BzfO2Fr-.cjs.map → impl-KgEJvOhE.cjs.map} +1 -1
  92. package/dist/{impl-9qObj_On.cjs → impl-NLEQuKnT.cjs} +2 -2
  93. package/dist/{impl-9qObj_On.cjs.map → impl-NLEQuKnT.cjs.map} +1 -1
  94. package/dist/{impl-CA649Byw.cjs → impl-SkiG9sWb.cjs} +2 -2
  95. package/dist/{impl-CA649Byw.cjs.map → impl-SkiG9sWb.cjs.map} +1 -1
  96. package/dist/{impl-Bm8T2x0z.cjs → impl-_dGu54cO.cjs} +2 -2
  97. package/dist/{impl-Bm8T2x0z.cjs.map → impl-_dGu54cO.cjs.map} +1 -1
  98. package/dist/{impl-DEsdC5Z4.cjs → impl-ay7i0K_5.cjs} +2 -2
  99. package/dist/{impl-DEsdC5Z4.cjs.map → impl-ay7i0K_5.cjs.map} +1 -1
  100. package/dist/{impl-Dwd8vWoq.cjs → impl-cfDtPbS9.cjs} +2 -2
  101. package/dist/{impl-Dwd8vWoq.cjs.map → impl-cfDtPbS9.cjs.map} +1 -1
  102. package/dist/{impl-Emn0jZNk.cjs → impl-hGu8uCC4.cjs} +2 -2
  103. package/dist/{impl-Emn0jZNk.cjs.map → impl-hGu8uCC4.cjs.map} +1 -1
  104. package/dist/{impl-TDjUVCrV.cjs → impl-yHutqfbd.cjs} +2 -2
  105. package/dist/{impl-TDjUVCrV.cjs.map → impl-yHutqfbd.cjs.map} +1 -1
  106. package/dist/index.cjs +1 -1
  107. package/dist/index.d.cts +9 -9
  108. package/dist/{manual-enrichment-Cz9Cb_fJ.cjs → manual-enrichment-D-0rM9KN.cjs} +2 -2
  109. package/dist/{manual-enrichment-Cz9Cb_fJ.cjs.map → manual-enrichment-D-0rM9KN.cjs.map} +1 -1
  110. package/dist/{pooling-C9Q0E6d3.cjs → pooling-DbIx6-i7.cjs} +2 -2
  111. package/dist/{pooling-C9Q0E6d3.cjs.map → pooling-DbIx6-i7.cjs.map} +1 -1
  112. package/dist/{preference-management-DhMwul_r.cjs → preference-management-Dapt3Li3.cjs} +4 -4
  113. package/dist/preference-management-Dapt3Li3.cjs.map +1 -0
  114. package/dist/{syncConfigurationToTranscend-Drb8I8lD.cjs → syncConfigurationToTranscend-D4PLA70C.cjs} +2 -2
  115. package/dist/{syncConfigurationToTranscend-Drb8I8lD.cjs.map → syncConfigurationToTranscend-D4PLA70C.cjs.map} +1 -1
  116. package/dist/{uploadConsents-Y2WZFgxP.cjs → uploadConsents-CN1JucL3.cjs} +2 -2
  117. package/dist/{uploadConsents-Y2WZFgxP.cjs.map → uploadConsents-CN1JucL3.cjs.map} +1 -1
  118. package/package.json +1 -1
  119. package/dist/preference-management-DhMwul_r.cjs.map +0 -1
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-CmHjougS.cjs`),n=require(`./syncConfigurationToTranscend-Drb8I8lD.cjs`),r=require(`./logger-BaHHbWVd.cjs`),i=require(`./codecs-JSDJgtyL.cjs`);let a=require(`@transcend-io/type-utils`),o=require(`node:fs`),s=require(`node:path`),c=require(`colors`);c=e.s(c);let l=require(`io-ts`);l=e.s(l);async function u({email:e,password:i,scopes:a,apiKeyTitle:o,parentOrganizationId:s,deleteExistingApiKey:l=!0,createNewApiKey:u=!0,transcendUrl:d=t.a}){let f=await n.ni(d,{});r.t.info(c.default.magenta(`Logging in using email and password.`));let{roles:p,loginCookie:m}=await n.Hn(f,{email:e,password:i});r.t.info(c.default.green(`Successfully logged in and found ${p.length} role${p.length===1?``:`s`}!`));let h=s?p.filter(e=>e.organization.id===s||e.organization.parentOrganizationId===s):p;f.setHeaders({Cookie:m});let g=[],_=[];return r.t.info(c.default.magenta(`Generating API keys with title: ${o}, scopes: ${a.join(`,`)}.`)),await n.Es(h,async t=>{try{await n.Vn(f,{roleId:t.id,email:e}),r.t.info(c.default.magenta(`Checking if API key already exists in organization "${t.organization.name}" with title: "${o}".`));let[i]=await n.ar(f,[o]);if(i&&l)r.t.info(c.default.yellow(`Deleting existing API key in "${t.organization.name}" with title: "${o}".`)),await n.Bn(f,i.id),r.t.info(c.default.green(`Successfully deleted API key in "${t.organization.name}" with title: "${o}".`));else if(i)throw Error(`API key already exists with title: "${o}"`);if(u){r.t.info(c.default.magenta(`Creating API key in "${t.organization.name}" with title: "${o}".`));let{apiKey:e}=await n.zn(f,{title:o,scopes:a});g.push({organizationName:t.organization.name,organizationId:t.organization.id,apiKey:e}),r.t.info(c.default.green(`Successfully created API key in "${t.organization.name}" with title: "${o}".`))}else g.push({organizationName:t.organization.name,organizationId:t.organization.id,apiKey:``})}catch(e){r.t.error(c.default.red(`Failed to create API key in organization "${t.organization.name}"! - ${e.message}`)),_.push({organizationName:t.organization.name,organizationId:t.organization.id,error:e.message})}}),r.t.info(c.default.green(`Successfully created ${g.length} API key${g.length===1?``:`s`}`)),_.length>0&&r.t.error(c.default.red(`Failed to create ${_.length} API key${_.length===1?``:`s`}!`)),{errors:_,apiKeys:g}}function d(e){return e||(r.t.error(c.default.red(`A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY`)),process.exit(1)),(0,o.existsSync)(e)?(0,a.decodeCodec)(l.array(i.mt),(0,o.readFileSync)(e,`utf-8`)):e}function f(e,t,n=!1){if(!(0,o.existsSync)(e))return[];let r=(0,o.readdirSync)(e).filter(e=>t?t.filter(t=>e.endsWith(t)).length:!0).filter(e=>e.indexOf(`.`)>0);return n?r.map(e=>e.replace(/\.[^/.]+$/,``)):r}function p(e){return(0,o.readdirSync)(e).filter(t=>(0,o.statSync)((0,s.join)(e,t)).isDirectory())}Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return p}});
2
- //# sourceMappingURL=api-keys-C3yVviFj.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-JoCDv9ym.cjs`),n=require(`./syncConfigurationToTranscend-D4PLA70C.cjs`),r=require(`./logger-BaHHbWVd.cjs`),i=require(`./codecs-JSDJgtyL.cjs`);let a=require(`@transcend-io/type-utils`),o=require(`node:fs`),s=require(`node:path`),c=require(`colors`);c=e.s(c);let l=require(`io-ts`);l=e.s(l);async function u({email:e,password:i,scopes:a,apiKeyTitle:o,parentOrganizationId:s,deleteExistingApiKey:l=!0,createNewApiKey:u=!0,transcendUrl:d=t.a}){let f=await n.ni(d,{});r.t.info(c.default.magenta(`Logging in using email and password.`));let{roles:p,loginCookie:m}=await n.Hn(f,{email:e,password:i});r.t.info(c.default.green(`Successfully logged in and found ${p.length} role${p.length===1?``:`s`}!`));let h=s?p.filter(e=>e.organization.id===s||e.organization.parentOrganizationId===s):p;f.setHeaders({Cookie:m});let g=[],_=[];return r.t.info(c.default.magenta(`Generating API keys with title: ${o}, scopes: ${a.join(`,`)}.`)),await n.Es(h,async t=>{try{await n.Vn(f,{roleId:t.id,email:e}),r.t.info(c.default.magenta(`Checking if API key already exists in organization "${t.organization.name}" with title: "${o}".`));let[i]=await n.ar(f,[o]);if(i&&l)r.t.info(c.default.yellow(`Deleting existing API key in "${t.organization.name}" with title: "${o}".`)),await n.Bn(f,i.id),r.t.info(c.default.green(`Successfully deleted API key in "${t.organization.name}" with title: "${o}".`));else if(i)throw Error(`API key already exists with title: "${o}"`);if(u){r.t.info(c.default.magenta(`Creating API key in "${t.organization.name}" with title: "${o}".`));let{apiKey:e}=await n.zn(f,{title:o,scopes:a});g.push({organizationName:t.organization.name,organizationId:t.organization.id,apiKey:e}),r.t.info(c.default.green(`Successfully created API key in "${t.organization.name}" with title: "${o}".`))}else g.push({organizationName:t.organization.name,organizationId:t.organization.id,apiKey:``})}catch(e){r.t.error(c.default.red(`Failed to create API key in organization "${t.organization.name}"! - ${e.message}`)),_.push({organizationName:t.organization.name,organizationId:t.organization.id,error:e.message})}}),r.t.info(c.default.green(`Successfully created ${g.length} API key${g.length===1?``:`s`}`)),_.length>0&&r.t.error(c.default.red(`Failed to create ${_.length} API key${_.length===1?``:`s`}!`)),{errors:_,apiKeys:g}}function d(e){return e||(r.t.error(c.default.red(`A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY`)),process.exit(1)),(0,o.existsSync)(e)?(0,a.decodeCodec)(l.array(i.mt),(0,o.readFileSync)(e,`utf-8`)):e}function f(e,t,n=!1){if(!(0,o.existsSync)(e))return[];let r=(0,o.readdirSync)(e).filter(e=>t?t.filter(t=>e.endsWith(t)).length:!0).filter(e=>e.indexOf(`.`)>0);return n?r.map(e=>e.replace(/\.[^/.]+$/,``)):r}function p(e){return(0,o.readdirSync)(e).filter(t=>(0,o.statSync)((0,s.join)(e,t)).isDirectory())}Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return p}});
2
+ //# sourceMappingURL=api-keys-DB0BZSh5.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"api-keys-C3yVviFj.cjs","names":["DEFAULT_TRANSCEND_API","buildTranscendGraphQLClientGeneric","loginUser","mapSeries","assumeRole","fetchAllApiKeys","deleteApiKey","createApiKey","t","StoredApiKey"],"sources":["../src/lib/api-keys/generateCrossAccountApiKeys.ts","../src/lib/api-keys/validateTranscendAuth.ts","../src/lib/api-keys/listFiles.ts","../src/lib/api-keys/listDirectories.ts"],"sourcesContent":["import { mapSeries } from '../bluebird';\nimport {\n buildTranscendGraphQLClientGeneric,\n loginUser,\n createApiKey,\n fetchAllApiKeys,\n deleteApiKey,\n assumeRole,\n} from '../graphql';\nimport { ScopeName } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { StoredApiKey } from '../../codecs';\nimport { logger } from '../../logger';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\nexport interface ApiKeyGenerateError {\n /** Name of instance */\n organizationName: string;\n /** Error */\n error: string;\n /** Organization ID API key is for */\n organizationId: string;\n}\n\n/**\n * Generate API keys across multiple transcend accounts\n *\n * @param options - Options\n * @returns Number of API keys created\n */\nexport async function generateCrossAccountApiKeys({\n email,\n password,\n scopes,\n apiKeyTitle,\n parentOrganizationId,\n deleteExistingApiKey = true,\n createNewApiKey = true,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** Email address of user generating API keys */\n email: string;\n /** Password of user generating API keys */\n password: string;\n /** Filter for organizations that match this parent organization ID */\n parentOrganizationId?: string;\n /** Title of the API create to create */\n apiKeyTitle: string;\n /** Title of the API create to create */\n scopes: ScopeName[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** When true delete existing API keys with that title, if set to false an API key exists with that title, an error is thrown */\n deleteExistingApiKey?: boolean;\n /** When true, generate new API keys, otherwise only will delete past API keys */\n createNewApiKey?: boolean;\n}): Promise<{\n /** Successfully generated */\n apiKeys: StoredApiKey[];\n /** Error results */\n errors: ApiKeyGenerateError[];\n}> {\n // Create GraphQL client\n const client = await buildTranscendGraphQLClientGeneric(transcendUrl, {});\n\n // Login the user\n logger.info(colors.magenta('Logging in using email and password.'));\n const { roles, loginCookie } = await loginUser(client, { email, password });\n logger.info(\n colors.green(\n `Successfully logged in and found ${roles.length} role${\n roles.length === 1 ? '' : 's'\n }!`,\n ),\n );\n\n // Filter down by parentOrganizationId\n const filteredRoles = parentOrganizationId\n ? roles.filter(\n (role) =>\n role.organization.id === parentOrganizationId ||\n role.organization.parentOrganizationId === parentOrganizationId,\n )\n : roles;\n\n // Save cookie to call route subsequent times\n client.setHeaders({\n Cookie: loginCookie,\n });\n\n // Save the resulting API keys\n const results: StoredApiKey[] = [];\n const errors: ApiKeyGenerateError[] = [];\n\n // Generate API keys\n logger.info(\n colors.magenta(\n `Generating API keys with title: ${apiKeyTitle}, scopes: ${scopes.join(\n ',',\n )}.`,\n ),\n );\n\n // Map over each role\n await mapSeries(filteredRoles, async (role) => {\n try {\n // Log into the other instance\n await assumeRole(client, { roleId: role.id, email });\n\n // Grab API keys with that title\n logger.info(\n colors.magenta(\n `Checking if API key already exists in organization \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n\n // Delete existing API key\n const [apiKeyWithTitle] = await fetchAllApiKeys(client, [apiKeyTitle]);\n if (apiKeyWithTitle && deleteExistingApiKey) {\n logger.info(\n colors.yellow(\n `Deleting existing API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n await deleteApiKey(client, apiKeyWithTitle.id);\n logger.info(\n colors.green(\n `Successfully deleted API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else if (apiKeyWithTitle) {\n // throw error if one exists but not configured to delete\n throw new Error(`API key already exists with title: \"${apiKeyTitle}\"`);\n }\n\n // Create the API key\n if (createNewApiKey) {\n logger.info(\n colors.magenta(\n `Creating API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n const { apiKey } = await createApiKey(client, {\n title: apiKeyTitle,\n scopes,\n });\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey,\n });\n logger.info(\n colors.green(\n `Successfully created API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else {\n // Delete only\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey: '',\n });\n }\n } catch (err) {\n logger.error(\n colors.red(\n `Failed to create API key in organization \"${role.organization.name}\"! - ${err.message}`,\n ),\n );\n errors.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n error: err.message,\n });\n }\n });\n logger.info(\n colors.green(\n `Successfully created ${results.length} API key${\n results.length === 1 ? '' : 's'\n }`,\n ),\n );\n\n if (errors.length > 0) {\n logger.error(\n colors.red(\n `Failed to create ${errors.length} API key${\n errors.length === 1 ? '' : 's'\n }!`,\n ),\n );\n }\n\n return { errors, apiKeys: results };\n}\n","import { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { logger } from '../../logger';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { StoredApiKey } from '../../codecs';\n\n/**\n * Determine if the `--auth` parameter is an API key or a path to a JSON\n * file containing a list of API keys.\n *\n * @param auth - Raw auth parameter\n * @returns The API key or the list API keys\n */\nexport function validateTranscendAuth(auth: string): string | StoredApiKey[] {\n // Ensure auth is passed\n if (!auth) {\n logger.error(\n colors.red(\n 'A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY',\n ),\n );\n process.exit(1);\n }\n\n // Read from disk\n if (existsSync(auth)) {\n // validate that file is a list of API keys\n return decodeCodec(t.array(StoredApiKey), readFileSync(auth, 'utf-8'));\n }\n\n // Return as single API key\n return auth;\n}\n","import { existsSync, readdirSync } from 'node:fs';\n\n/**\n * List the files in a directory\n *\n * ```typescript\n * // The directory to search\n * const directory = '/User/test/transcend/my-app/app/containers';\n * // Returns ['test.js']\n * listFiles(directory);\n * ```\n *\n * @param directory - The directory to search\n * @param validExtensions - The list of valid extensions\n * @param removeExtensions - When true, remove the extensions from the listed files\n * @returns The list of files in the directory\n */\nexport function listFiles(\n directory: string,\n validExtensions?: string[],\n removeExtensions = false,\n): string[] {\n if (!existsSync(directory)) {\n return [];\n }\n\n const files = readdirSync(directory)\n .filter((fil) =>\n validExtensions\n ? validExtensions.filter((ext) => fil.endsWith(ext)).length\n : true,\n )\n .filter((fil) => fil.indexOf('.') > 0);\n\n return removeExtensions\n ? files.map((fil) => fil.replace(/\\.[^/.]+$/, ''))\n : files;\n}\n","import { readdirSync, statSync } from 'node:fs';\nimport { join } from 'node:path';\n\n/**\n * List the folders in a directory\n *\n * @param startDir - The base directory to list from\n * @returns The list of folders in that directory\n */\nexport function listDirectories(startDir: string): string[] {\n return readdirSync(startDir).filter((entryName) =>\n statSync(join(startDir, entryName)).isDirectory(),\n );\n}\n"],"mappings":"gWA8BA,eAAsB,EAA4B,CAChD,QACA,WACA,SACA,cACA,uBACA,uBAAuB,GACvB,kBAAkB,GAClB,eAAeA,EAAAA,GAuBd,CAED,IAAM,EAAS,MAAMC,EAAAA,GAAmC,EAAc,EAAE,CAAC,CAGzE,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,uCAAuC,CAAC,CACnE,GAAM,CAAE,QAAO,eAAgB,MAAMC,EAAAA,GAAU,EAAQ,CAAE,QAAO,WAAU,CAAC,CAC3E,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oCAAoC,EAAM,OAAO,OAC/C,EAAM,SAAW,EAAI,GAAK,IAC3B,GACF,CACF,CAGD,IAAM,EAAgB,EAClB,EAAM,OACH,GACC,EAAK,aAAa,KAAO,GACzB,EAAK,aAAa,uBAAyB,EAC9C,CACD,EAGJ,EAAO,WAAW,CAChB,OAAQ,EACT,CAAC,CAGF,IAAM,EAA0B,EAAE,CAC5B,EAAgC,EAAE,CAuGxC,OApGA,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAY,YAAY,EAAO,KAChE,IACD,CAAC,GACH,CACF,CAGD,MAAMC,EAAAA,GAAU,EAAe,KAAO,IAAS,CAC7C,GAAI,CAEF,MAAMC,EAAAA,GAAW,EAAQ,CAAE,OAAQ,EAAK,GAAI,QAAO,CAAC,CAGpD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,uDAAuD,EAAK,aAAa,KAAK,iBAAiB,EAAY,IAC5G,CACF,CAGD,GAAM,CAAC,GAAmB,MAAMC,EAAAA,GAAgB,EAAQ,CAAC,EAAY,CAAC,CACtE,GAAI,GAAmB,EACrB,EAAA,EAAO,KACL,EAAA,QAAO,OACL,iCAAiC,EAAK,aAAa,KAAK,iBAAiB,EAAY,IACtF,CACF,CACD,MAAMC,EAAAA,GAAa,EAAQ,EAAgB,GAAG,CAC9C,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oCAAoC,EAAK,aAAa,KAAK,iBAAiB,EAAY,IACzF,CACF,SACQ,EAET,MAAU,MAAM,uCAAuC,EAAY,GAAG,CAIxE,GAAI,EAAiB,CACnB,EAAA,EAAO,KACL,EAAA,QAAO,QACL,wBAAwB,EAAK,aAAa,KAAK,iBAAiB,EAAY,IAC7E,CACF,CACD,GAAM,CAAE,UAAW,MAAMC,EAAAA,GAAa,EAAQ,CAC5C,MAAO,EACP,SACD,CAAC,CACF,EAAQ,KAAK,CACX,iBAAkB,EAAK,aAAa,KACpC,eAAgB,EAAK,aAAa,GAClC,SACD,CAAC,CACF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oCAAoC,EAAK,aAAa,KAAK,iBAAiB,EAAY,IACzF,CACF,MAGD,EAAQ,KAAK,CACX,iBAAkB,EAAK,aAAa,KACpC,eAAgB,EAAK,aAAa,GAClC,OAAQ,GACT,CAAC,OAEG,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,6CAA6C,EAAK,aAAa,KAAK,OAAO,EAAI,UAChF,CACF,CACD,EAAO,KAAK,CACV,iBAAkB,EAAK,aAAa,KACpC,eAAgB,EAAK,aAAa,GAClC,MAAO,EAAI,QACZ,CAAC,GAEJ,CACF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,wBAAwB,EAAQ,OAAO,UACrC,EAAQ,SAAW,EAAI,GAAK,MAE/B,CACF,CAEG,EAAO,OAAS,GAClB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,oBAAoB,EAAO,OAAO,UAChC,EAAO,SAAW,EAAI,GAAK,IAC5B,GACF,CACF,CAGI,CAAE,SAAQ,QAAS,EAAS,CCrLrC,SAAgB,EAAsB,EAAuC,CAkB3E,OAhBK,IACH,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wFACD,CACF,CACD,QAAQ,KAAK,EAAE,GAIjB,EAAA,EAAA,YAAe,EAAK,EAElB,EAAA,EAAA,aAAmBC,EAAE,MAAMC,EAAAA,GAAa,EAAA,EAAA,EAAA,cAAe,EAAM,QAAQ,CAAC,CAIjE,ECfT,SAAgB,EACd,EACA,EACA,EAAmB,GACT,CACV,GAAI,EAAA,EAAA,EAAA,YAAY,EAAU,CACxB,MAAO,EAAE,CAGX,IAAM,GAAA,EAAA,EAAA,aAAoB,EAAU,CACjC,OAAQ,GACP,EACI,EAAgB,OAAQ,GAAQ,EAAI,SAAS,EAAI,CAAC,CAAC,OACnD,GACL,CACA,OAAQ,GAAQ,EAAI,QAAQ,IAAI,CAAG,EAAE,CAExC,OAAO,EACH,EAAM,IAAK,GAAQ,EAAI,QAAQ,YAAa,GAAG,CAAC,CAChD,EC3BN,SAAgB,EAAgB,EAA4B,CAC1D,OAAA,EAAA,EAAA,aAAmB,EAAS,CAAC,OAAQ,IAAA,EAAA,EAAA,WAAA,EAAA,EAAA,MACrB,EAAU,EAAU,CAAC,CAAC,aAAa,CAClD"}
1
+ {"version":3,"file":"api-keys-DB0BZSh5.cjs","names":["DEFAULT_TRANSCEND_API","buildTranscendGraphQLClientGeneric","loginUser","mapSeries","assumeRole","fetchAllApiKeys","deleteApiKey","createApiKey","t","StoredApiKey"],"sources":["../src/lib/api-keys/generateCrossAccountApiKeys.ts","../src/lib/api-keys/validateTranscendAuth.ts","../src/lib/api-keys/listFiles.ts","../src/lib/api-keys/listDirectories.ts"],"sourcesContent":["import { mapSeries } from '../bluebird';\nimport {\n buildTranscendGraphQLClientGeneric,\n loginUser,\n createApiKey,\n fetchAllApiKeys,\n deleteApiKey,\n assumeRole,\n} from '../graphql';\nimport { ScopeName } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { StoredApiKey } from '../../codecs';\nimport { logger } from '../../logger';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\nexport interface ApiKeyGenerateError {\n /** Name of instance */\n organizationName: string;\n /** Error */\n error: string;\n /** Organization ID API key is for */\n organizationId: string;\n}\n\n/**\n * Generate API keys across multiple transcend accounts\n *\n * @param options - Options\n * @returns Number of API keys created\n */\nexport async function generateCrossAccountApiKeys({\n email,\n password,\n scopes,\n apiKeyTitle,\n parentOrganizationId,\n deleteExistingApiKey = true,\n createNewApiKey = true,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** Email address of user generating API keys */\n email: string;\n /** Password of user generating API keys */\n password: string;\n /** Filter for organizations that match this parent organization ID */\n parentOrganizationId?: string;\n /** Title of the API create to create */\n apiKeyTitle: string;\n /** Title of the API create to create */\n scopes: ScopeName[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** When true delete existing API keys with that title, if set to false an API key exists with that title, an error is thrown */\n deleteExistingApiKey?: boolean;\n /** When true, generate new API keys, otherwise only will delete past API keys */\n createNewApiKey?: boolean;\n}): Promise<{\n /** Successfully generated */\n apiKeys: StoredApiKey[];\n /** Error results */\n errors: ApiKeyGenerateError[];\n}> {\n // Create GraphQL client\n const client = await buildTranscendGraphQLClientGeneric(transcendUrl, {});\n\n // Login the user\n logger.info(colors.magenta('Logging in using email and password.'));\n const { roles, loginCookie } = await loginUser(client, { email, password });\n logger.info(\n colors.green(\n `Successfully logged in and found ${roles.length} role${\n roles.length === 1 ? '' : 's'\n }!`,\n ),\n );\n\n // Filter down by parentOrganizationId\n const filteredRoles = parentOrganizationId\n ? roles.filter(\n (role) =>\n role.organization.id === parentOrganizationId ||\n role.organization.parentOrganizationId === parentOrganizationId,\n )\n : roles;\n\n // Save cookie to call route subsequent times\n client.setHeaders({\n Cookie: loginCookie,\n });\n\n // Save the resulting API keys\n const results: StoredApiKey[] = [];\n const errors: ApiKeyGenerateError[] = [];\n\n // Generate API keys\n logger.info(\n colors.magenta(\n `Generating API keys with title: ${apiKeyTitle}, scopes: ${scopes.join(\n ',',\n )}.`,\n ),\n );\n\n // Map over each role\n await mapSeries(filteredRoles, async (role) => {\n try {\n // Log into the other instance\n await assumeRole(client, { roleId: role.id, email });\n\n // Grab API keys with that title\n logger.info(\n colors.magenta(\n `Checking if API key already exists in organization \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n\n // Delete existing API key\n const [apiKeyWithTitle] = await fetchAllApiKeys(client, [apiKeyTitle]);\n if (apiKeyWithTitle && deleteExistingApiKey) {\n logger.info(\n colors.yellow(\n `Deleting existing API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n await deleteApiKey(client, apiKeyWithTitle.id);\n logger.info(\n colors.green(\n `Successfully deleted API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else if (apiKeyWithTitle) {\n // throw error if one exists but not configured to delete\n throw new Error(`API key already exists with title: \"${apiKeyTitle}\"`);\n }\n\n // Create the API key\n if (createNewApiKey) {\n logger.info(\n colors.magenta(\n `Creating API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n const { apiKey } = await createApiKey(client, {\n title: apiKeyTitle,\n scopes,\n });\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey,\n });\n logger.info(\n colors.green(\n `Successfully created API key in \"${role.organization.name}\" with title: \"${apiKeyTitle}\".`,\n ),\n );\n } else {\n // Delete only\n results.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n apiKey: '',\n });\n }\n } catch (err) {\n logger.error(\n colors.red(\n `Failed to create API key in organization \"${role.organization.name}\"! - ${err.message}`,\n ),\n );\n errors.push({\n organizationName: role.organization.name,\n organizationId: role.organization.id,\n error: err.message,\n });\n }\n });\n logger.info(\n colors.green(\n `Successfully created ${results.length} API key${\n results.length === 1 ? '' : 's'\n }`,\n ),\n );\n\n if (errors.length > 0) {\n logger.error(\n colors.red(\n `Failed to create ${errors.length} API key${\n errors.length === 1 ? '' : 's'\n }!`,\n ),\n );\n }\n\n return { errors, apiKeys: results };\n}\n","import { decodeCodec } from '@transcend-io/type-utils';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { logger } from '../../logger';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { StoredApiKey } from '../../codecs';\n\n/**\n * Determine if the `--auth` parameter is an API key or a path to a JSON\n * file containing a list of API keys.\n *\n * @param auth - Raw auth parameter\n * @returns The API key or the list API keys\n */\nexport function validateTranscendAuth(auth: string): string | StoredApiKey[] {\n // Ensure auth is passed\n if (!auth) {\n logger.error(\n colors.red(\n 'A Transcend API key must be provided. You can specify using --auth=$TRANSCEND_API_KEY',\n ),\n );\n process.exit(1);\n }\n\n // Read from disk\n if (existsSync(auth)) {\n // validate that file is a list of API keys\n return decodeCodec(t.array(StoredApiKey), readFileSync(auth, 'utf-8'));\n }\n\n // Return as single API key\n return auth;\n}\n","import { existsSync, readdirSync } from 'node:fs';\n\n/**\n * List the files in a directory\n *\n * ```typescript\n * // The directory to search\n * const directory = '/User/test/transcend/my-app/app/containers';\n * // Returns ['test.js']\n * listFiles(directory);\n * ```\n *\n * @param directory - The directory to search\n * @param validExtensions - The list of valid extensions\n * @param removeExtensions - When true, remove the extensions from the listed files\n * @returns The list of files in the directory\n */\nexport function listFiles(\n directory: string,\n validExtensions?: string[],\n removeExtensions = false,\n): string[] {\n if (!existsSync(directory)) {\n return [];\n }\n\n const files = readdirSync(directory)\n .filter((fil) =>\n validExtensions\n ? validExtensions.filter((ext) => fil.endsWith(ext)).length\n : true,\n )\n .filter((fil) => fil.indexOf('.') > 0);\n\n return removeExtensions\n ? files.map((fil) => fil.replace(/\\.[^/.]+$/, ''))\n : files;\n}\n","import { readdirSync, statSync } from 'node:fs';\nimport { join } from 'node:path';\n\n/**\n * List the folders in a directory\n *\n * @param startDir - The base directory to list from\n * @returns The list of folders in that directory\n */\nexport function listDirectories(startDir: string): string[] {\n return readdirSync(startDir).filter((entryName) =>\n statSync(join(startDir, entryName)).isDirectory(),\n );\n}\n"],"mappings":"gWA8BA,eAAsB,EAA4B,CAChD,QACA,WACA,SACA,cACA,uBACA,uBAAuB,GACvB,kBAAkB,GAClB,eAAeA,EAAAA,GAuBd,CAED,IAAM,EAAS,MAAMC,EAAAA,GAAmC,EAAc,EAAE,CAAC,CAGzE,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,uCAAuC,CAAC,CACnE,GAAM,CAAE,QAAO,eAAgB,MAAMC,EAAAA,GAAU,EAAQ,CAAE,QAAO,WAAU,CAAC,CAC3E,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oCAAoC,EAAM,OAAO,OAC/C,EAAM,SAAW,EAAI,GAAK,IAC3B,GACF,CACF,CAGD,IAAM,EAAgB,EAClB,EAAM,OACH,GACC,EAAK,aAAa,KAAO,GACzB,EAAK,aAAa,uBAAyB,EAC9C,CACD,EAGJ,EAAO,WAAW,CAChB,OAAQ,EACT,CAAC,CAGF,IAAM,EAA0B,EAAE,CAC5B,EAAgC,EAAE,CAuGxC,OApGA,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAY,YAAY,EAAO,KAChE,IACD,CAAC,GACH,CACF,CAGD,MAAMC,EAAAA,GAAU,EAAe,KAAO,IAAS,CAC7C,GAAI,CAEF,MAAMC,EAAAA,GAAW,EAAQ,CAAE,OAAQ,EAAK,GAAI,QAAO,CAAC,CAGpD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,uDAAuD,EAAK,aAAa,KAAK,iBAAiB,EAAY,IAC5G,CACF,CAGD,GAAM,CAAC,GAAmB,MAAMC,EAAAA,GAAgB,EAAQ,CAAC,EAAY,CAAC,CACtE,GAAI,GAAmB,EACrB,EAAA,EAAO,KACL,EAAA,QAAO,OACL,iCAAiC,EAAK,aAAa,KAAK,iBAAiB,EAAY,IACtF,CACF,CACD,MAAMC,EAAAA,GAAa,EAAQ,EAAgB,GAAG,CAC9C,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oCAAoC,EAAK,aAAa,KAAK,iBAAiB,EAAY,IACzF,CACF,SACQ,EAET,MAAU,MAAM,uCAAuC,EAAY,GAAG,CAIxE,GAAI,EAAiB,CACnB,EAAA,EAAO,KACL,EAAA,QAAO,QACL,wBAAwB,EAAK,aAAa,KAAK,iBAAiB,EAAY,IAC7E,CACF,CACD,GAAM,CAAE,UAAW,MAAMC,EAAAA,GAAa,EAAQ,CAC5C,MAAO,EACP,SACD,CAAC,CACF,EAAQ,KAAK,CACX,iBAAkB,EAAK,aAAa,KACpC,eAAgB,EAAK,aAAa,GAClC,SACD,CAAC,CACF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oCAAoC,EAAK,aAAa,KAAK,iBAAiB,EAAY,IACzF,CACF,MAGD,EAAQ,KAAK,CACX,iBAAkB,EAAK,aAAa,KACpC,eAAgB,EAAK,aAAa,GAClC,OAAQ,GACT,CAAC,OAEG,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,6CAA6C,EAAK,aAAa,KAAK,OAAO,EAAI,UAChF,CACF,CACD,EAAO,KAAK,CACV,iBAAkB,EAAK,aAAa,KACpC,eAAgB,EAAK,aAAa,GAClC,MAAO,EAAI,QACZ,CAAC,GAEJ,CACF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,wBAAwB,EAAQ,OAAO,UACrC,EAAQ,SAAW,EAAI,GAAK,MAE/B,CACF,CAEG,EAAO,OAAS,GAClB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,oBAAoB,EAAO,OAAO,UAChC,EAAO,SAAW,EAAI,GAAK,IAC5B,GACF,CACF,CAGI,CAAE,SAAQ,QAAS,EAAS,CCrLrC,SAAgB,EAAsB,EAAuC,CAkB3E,OAhBK,IACH,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wFACD,CACF,CACD,QAAQ,KAAK,EAAE,GAIjB,EAAA,EAAA,YAAe,EAAK,EAElB,EAAA,EAAA,aAAmBC,EAAE,MAAMC,EAAAA,GAAa,EAAA,EAAA,EAAA,cAAe,EAAM,QAAQ,CAAC,CAIjE,ECfT,SAAgB,EACd,EACA,EACA,EAAmB,GACT,CACV,GAAI,EAAA,EAAA,EAAA,YAAY,EAAU,CACxB,MAAO,EAAE,CAGX,IAAM,GAAA,EAAA,EAAA,aAAoB,EAAU,CACjC,OAAQ,GACP,EACI,EAAgB,OAAQ,GAAQ,EAAI,SAAS,EAAI,CAAC,CAAC,OACnD,GACL,CACA,OAAQ,GAAQ,EAAI,QAAQ,IAAI,CAAG,EAAE,CAExC,OAAO,EACH,EAAM,IAAK,GAAQ,EAAI,QAAQ,YAAa,GAAG,CAAC,CAChD,EC3BN,SAAgB,EAAgB,EAA4B,CAC1D,OAAA,EAAA,EAAA,aAAmB,EAAS,CAAC,OAAQ,IAAA,EAAA,EAAA,WAAA,EAAA,EAAA,MACrB,EAAU,EAAU,CAAC,CAAC,aAAa,CAClD"}
@@ -1,33 +1,33 @@
1
- const e=require(`./enums-CBXlBJii.cjs`),t=require(`./command-FrwAB8R3.cjs`),n=require(`./constants-CmHjougS.cjs`);let r=require(`@stricli/core`),i=require(`@stricli/auto-complete`),a=require(`@transcend-io/privacy-types`);const o=(0,r.buildCommand)({loader:async()=>{let{generateApiKeys:e}=await Promise.resolve().then(()=>require(`./impl-DJMqsQ7L.cjs`));return e},parameters:{flags:{email:{kind:`parsed`,parse:String,brief:`The email address that you use to log into Transcend`},password:{kind:`parsed`,parse:String,brief:`The password for your account login`},apiKeyTitle:{kind:`parsed`,parse:String,brief:`The title of the API key being generated or destroyed`},file:{kind:`parsed`,parse:String,brief:`The file where API keys should be written to`},scopes:{kind:`enum`,values:n.l,variadic:`,`,brief:`The list of scopes that should be given to the API key`},deleteExistingApiKey:{kind:`boolean`,brief:`When true, if an API key exists with the specified apiKeyTitle, the existing API key is deleted`,default:!0},createNewApiKey:{kind:`boolean`,brief:`When true, new API keys will be created. Set to false if you simply want to delete all API keys with a title`,default:!0},parentOrganizationId:{kind:`parsed`,parse:t.d,brief:`Filter for only a specific organization by ID, returning all child accounts associated with that organization`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Generate API keys`,fullDescription:`This command allows for creating API keys across multiple Transcend instances. This is useful for customers that are managing many Transcend instances and need to regularly create, cycle or delete API keys across all of their instances.
1
+ const e=require(`./enums-CBXlBJii.cjs`),t=require(`./command-41dUPvEa.cjs`),n=require(`./constants-JoCDv9ym.cjs`);let r=require(`@stricli/core`),i=require(`@stricli/auto-complete`),a=require(`@transcend-io/privacy-types`);const o=(0,r.buildCommand)({loader:async()=>{let{generateApiKeys:e}=await Promise.resolve().then(()=>require(`./impl-D3sI4I1g.cjs`));return e},parameters:{flags:{email:{kind:`parsed`,parse:String,brief:`The email address that you use to log into Transcend`},password:{kind:`parsed`,parse:String,brief:`The password for your account login`},apiKeyTitle:{kind:`parsed`,parse:String,brief:`The title of the API key being generated or destroyed`},file:{kind:`parsed`,parse:String,brief:`The file where API keys should be written to`},scopes:{kind:`enum`,values:n.l,variadic:`,`,brief:`The list of scopes that should be given to the API key`},deleteExistingApiKey:{kind:`boolean`,brief:`When true, if an API key exists with the specified apiKeyTitle, the existing API key is deleted`,default:!0},createNewApiKey:{kind:`boolean`,brief:`When true, new API keys will be created. Set to false if you simply want to delete all API keys with a title`,default:!0},parentOrganizationId:{kind:`parsed`,parse:t.d,brief:`Filter for only a specific organization by ID, returning all child accounts associated with that organization`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Generate API keys`,fullDescription:`This command allows for creating API keys across multiple Transcend instances. This is useful for customers that are managing many Transcend instances and need to regularly create, cycle or delete API keys across all of their instances.
2
2
 
3
3
  Unlike the other commands that rely on API key authentication, this command relies upon username/password authentication. This command will spit out the API keys into a JSON file, and that JSON file can be used in subsequent CLI commands.
4
4
 
5
- Authentication requires your email and password for the Transcend account. This command will only generate API keys for Transcend instances where you have the permission to "Manage API Keys".`}}),s=(0,r.buildCommand)({loader:async()=>{let{chunkCsv:e}=await Promise.resolve().then(()=>require(`./impl-CA649Byw.cjs`));return e},parameters:{flags:{directory:{kind:`parsed`,parse:String,brief:`Directory containing CSV files to split (required)`},outputDir:{kind:`parsed`,parse:String,brief:`Directory to write chunk files (defaults to each input file's directory)`,optional:!0},clearOutputDir:{kind:`boolean`,brief:`Clear the output directory before writing chunks`,default:!0},chunkSizeMB:{kind:`parsed`,parse:e=>{let t=Number(e);if(!Number.isFinite(t)||t<=0)throw Error(`chunkSizeMB must be a positive number`);return t},brief:`Approximate chunk size in megabytes. Keep well under JS string size limits`,default:`10`},concurrency:{kind:`parsed`,parse:e=>Math.max(1,Number(e)||0),brief:`Max number of worker processes (defaults based on CPU and file count)`,optional:!0},viewerMode:{kind:`boolean`,brief:`Run in non-interactive viewer mode (no attach UI, auto-artifacts)`,default:!1}}},docs:{brief:`Chunk all CSVs in a directory into smaller CSV files`,fullDescription:`Streams every CSV in --directory and writes chunked files of approximately N MB each.
5
+ Authentication requires your email and password for the Transcend account. This command will only generate API keys for Transcend instances where you have the permission to "Manage API Keys".`}}),s=(0,r.buildCommand)({loader:async()=>{let{chunkCsv:e}=await Promise.resolve().then(()=>require(`./impl-SkiG9sWb.cjs`));return e},parameters:{flags:{directory:{kind:`parsed`,parse:String,brief:`Directory containing CSV files to split (required)`},outputDir:{kind:`parsed`,parse:String,brief:`Directory to write chunk files (defaults to each input file's directory)`,optional:!0},clearOutputDir:{kind:`boolean`,brief:`Clear the output directory before writing chunks`,default:!0},chunkSizeMB:{kind:`parsed`,parse:e=>{let t=Number(e);if(!Number.isFinite(t)||t<=0)throw Error(`chunkSizeMB must be a positive number`);return t},brief:`Approximate chunk size in megabytes. Keep well under JS string size limits`,default:`10`},concurrency:{kind:`parsed`,parse:e=>Math.max(1,Number(e)||0),brief:`Max number of worker processes (defaults based on CPU and file count)`,optional:!0},viewerMode:{kind:`boolean`,brief:`Run in non-interactive viewer mode (no attach UI, auto-artifacts)`,default:!1}}},docs:{brief:`Chunk all CSVs in a directory into smaller CSV files`,fullDescription:`Streams every CSV in --directory and writes chunked files of approximately N MB each.
6
6
  - Runs files in parallel across worker processes (configurable via --concurrency).
7
- - Validates row-length consistency against the header row; logs periodic progress and memory usage.`}}),c=(0,r.buildCommand)({loader:async()=>{let{parquetToCsv:e}=await Promise.resolve().then(()=>require(`./impl-BqwFuOOo.cjs`));return e},parameters:{flags:{directory:{kind:`parsed`,parse:String,brief:`Directory containing Parquet files to convert (required)`},outputDir:{kind:`parsed`,parse:String,brief:`Directory to write CSV files (defaults to each input file's directory)`,optional:!0},clearOutputDir:{kind:`boolean`,brief:`Clear the output directory before writing CSVs`,default:!0},concurrency:{kind:`parsed`,parse:e=>Math.max(1,Number(e)||0),brief:`Max number of worker processes (defaults based on CPU and file count)`,optional:!0},viewerMode:{kind:`boolean`,brief:`Run in non-interactive viewer mode (no attach UI, auto-artifacts)`,default:!1}}},docs:{brief:`Convert all Parquet files in a directory to CSV`,fullDescription:`Streams every .parquet in --directory and writes CSV output files
7
+ - Validates row-length consistency against the header row; logs periodic progress and memory usage.`}}),c=(0,r.buildCommand)({loader:async()=>{let{parquetToCsv:e}=await Promise.resolve().then(()=>require(`./impl-ClDXxODZ.cjs`));return e},parameters:{flags:{directory:{kind:`parsed`,parse:String,brief:`Directory containing Parquet files to convert (required)`},outputDir:{kind:`parsed`,parse:String,brief:`Directory to write CSV files (defaults to each input file's directory)`,optional:!0},clearOutputDir:{kind:`boolean`,brief:`Clear the output directory before writing CSVs`,default:!0},concurrency:{kind:`parsed`,parse:e=>Math.max(1,Number(e)||0),brief:`Max number of worker processes (defaults based on CPU and file count)`,optional:!0},viewerMode:{kind:`boolean`,brief:`Run in non-interactive viewer mode (no attach UI, auto-artifacts)`,default:!1}}},docs:{brief:`Convert all Parquet files in a directory to CSV`,fullDescription:`Streams every .parquet in --directory and writes CSV output files
8
8
  - Runs files in parallel across worker processes (configurable via --concurrency).
9
9
  - Validates row consistency; logs periodic progress and memory usage.
10
10
 
11
11
  This is a useful administrative tool for converting Parquet exports to CSV for the purposes
12
- of uploading DSRs or Consent Preferences. e.g. transcend consent upload-preferences ...`}}),l=(0,r.buildRouteMap)({routes:{"generate-api-keys":o,"chunk-csv":s,"parquet-to-csv":c},docs:{brief:`Admin commands`}}),u=(0,r.buildCommand)({loader:async()=>{let{buildXdiSyncEndpoint:e}=await Promise.resolve().then(()=>require(`./impl-CCuP7d9d.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewConsentManager]}),xdiLocation:{kind:`parsed`,parse:String,brief:`The location of the XDI that will be loaded by the generated sync endpoint`},file:{kind:`parsed`,parse:String,brief:`The HTML file path where the sync endpoint should be written`,default:`./sync-endpoint.html`},removeIpAddresses:{kind:`boolean`,brief:`When true, remove IP addresses from the domain list`,default:!0},domainBlockList:{kind:`parsed`,parse:t.c,brief:`The set of domains that should be excluded from the sync endpoint. Comma-separated list.`,default:`localhost`},xdiAllowedCommands:{kind:`parsed`,parse:String,brief:`The allowed set of XDI commands`,default:`ConsentManager:Sync`},transcendUrl:t.s()}},docs:{brief:`Build XDI sync endpoint`,fullDescription:`This command allows for building of the XDI Sync Endpoint across a set of Transcend accounts.`}}),d=(0,r.buildCommand)({loader:async()=>{let{pullConsentMetrics:e}=await Promise.resolve().then(()=>require(`./impl-BzfO2Fr-.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewConsentManager]}),start:{kind:`parsed`,parse:t.l,brief:`The start date to pull metrics from`},end:{kind:`parsed`,parse:t.l,brief:`The end date to pull metrics until`,optional:!0},folder:{kind:`parsed`,parse:String,brief:`The folder to save metrics to`,default:`./consent-metrics/`},bin:{kind:`parsed`,parse:String,brief:`The bin metric when pulling data (1h or 1d)`,default:`1d`},transcendUrl:t.s()}},docs:{brief:`Pull consent metrics`,fullDescription:"This command allows for pulling consent manager metrics for a Transcend account, or a set of Transcend accounts.\n\nBy default, the consent metrics will be written to a folder named `consent-metrics` within the directory where you run the command. You can override the location that these CSVs are written to using the flag `--folder=./my-folder/`. This folder will contain a set of CSV files:\n\n- `CONSENT_CHANGES_TIMESERIES_optIn.csv` -> this is a feed containing the number of explicit opt in events that happen - these are calls to `airgap.setConsent(event, { SaleOfInfo: true });`\n- `CONSENT_CHANGES_TIMESERIES_optOut.csv` -> this is a feed containing the number of explicit opt out events that happen - these are calls to `airgap.setConsent(event, { SaleOfInfo: false });`\n- `CONSENT_SESSIONS_BY_REGIME_Default.csv` -> this contains the number of sessions detected for the bin period\n- `PRIVACY_SIGNAL_TIMESERIES_DNT.csv` -> the number of DNT signals detected.\n- `PRIVACY_SIGNAL_TIMESERIES_GPC.csv` -> the number of GPC signals detected."}}),f=(0,r.buildCommand)({loader:async()=>{let{pullConsentPreferences:e}=await Promise.resolve().then(()=>require(`./impl-DEsdC5Z4.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewManagedConsentDatabaseAdminApi,a.ScopeName.ViewRequestIdentitySettings,a.ScopeName.ViewPreferenceStoreSettings]}),partition:{kind:`parsed`,parse:String,brief:`Partition ID to query in the Preference Store`},sombraAuth:t.o(),file:{kind:`parsed`,parse:String,brief:`Path to CSV output file`,default:`./preferences.csv`},transcendUrl:t.s(),timestampBefore:{kind:`parsed`,parse:t.l,brief:`Filter: preferences collected before this time (timestampBefore)`,optional:!0},timestampAfter:{kind:`parsed`,parse:t.l,brief:`Filter: preferences collected after this time (timestampAfter)`,optional:!0},updatedBefore:{kind:`parsed`,parse:t.l,brief:`Filter: preferences updated before this time (system.updatedAt)`,optional:!0},updatedAfter:{kind:`parsed`,parse:t.l,brief:`Filter: preferences updated after this time (system.updatedAt)`,optional:!0},identifiers:{kind:`parsed`,parse:String,variadic:`,`,brief:`Filter specific users by identifier(s) as "name:value". If name is omitted, defaults to "email". Multiple values separated by commas.`,optional:!0},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`Page size / concurrency used when downloading (1–50 per API). Higher = fewer pages.`,default:`50`},shouldChunk:{kind:`boolean`,brief:`Whether to download requests in timestamp window chunks.`,default:!0},exportIdentifiersWithDelimiter:{kind:`parsed`,parse:String,brief:`Delimiter to use when combining multiple identifiers into a single column in the output CSV.`,default:`,`},windowConcurrency:{kind:`parsed`,parse:r.numberParser,brief:`When chunking, how many windows to download in parallel (higher = faster, but more load).`,default:`80`},maxChunks:{kind:`parsed`,parse:r.numberParser,brief:`Maximum number of chunks to download (higher = more data, but more load).`,default:`20000`},maxLookbackDays:{kind:`parsed`,parse:r.numberParser,brief:`Maximum lookback period in days for fetching consent preferences.`,default:`3650`}}},docs:{brief:`Pull consent preferences from the Managed Consent Database`,fullDescription:`Uses POST /v1/preferences/{partition}/query with cursor-based pagination. Supports filtering by identifiers, collection timestamps, and system.updatedAt.`}}),p=(0,r.buildCommand)({loader:async()=>{let{updateConsentManager:e}=await Promise.resolve().then(()=>require(`./impl-CCPd6sqk.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageConsentManagerDeveloperSettings]}),bundleTypes:{kind:`enum`,values:Object.values(a.ConsentBundleType),brief:`The bundle types to deploy. Defaults to PRODUCTION,TEST.`,variadic:`,`},deploy:{kind:`boolean`,brief:`When true, deploy the Consent Manager after updating the version`,default:!1},transcendUrl:t.s()}},docs:{brief:`Update consent manager`,fullDescription:`This command allows for updating Consent Manager to latest version. The Consent Manager bundle can also be deployed using this command.`}}),m=(0,r.buildCommand)({loader:async()=>{let{uploadConsentPreferences:e}=await Promise.resolve().then(()=>require(`./impl-CKCV2er8.cjs`));return e},parameters:{flags:{base64EncryptionKey:{kind:`parsed`,parse:String,brief:`The encryption key used to encrypt the userId`},base64SigningKey:{kind:`parsed`,parse:String,brief:`The signing key used to prove authentication of consent request`},partition:{kind:`parsed`,parse:String,brief:`The partition key to download consent preferences to`},file:{kind:`parsed`,parse:String,brief:`The file to pull consent preferences from`,default:`./preferences.csv`},consentUrl:t.a(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`100`}}},docs:{brief:`Upload consent preferences to the Managed Consent Database`,fullDescription:`This command allows for updating of consent preferences to the Managed Consent Database.`}}),h=(0,r.buildCommand)({loader:async()=>{let{uploadCookiesFromCsv:e}=await Promise.resolve().then(()=>require(`./impl-DbD3dI7B.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageDataFlow]}),trackerStatus:{kind:`enum`,values:Object.values(a.ConsentTrackerStatus),brief:`The status of the cookies you will upload.`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file to upload`,default:`./cookies.csv`},transcendUrl:t.s()}},docs:{brief:`Upload cookies from CSV`,fullDescription:`Upload cookies from CSV. This command allows for uploading of cookies from CSV.
12
+ of uploading DSRs or Consent Preferences. e.g. transcend consent upload-preferences ...`}}),l=(0,r.buildRouteMap)({routes:{"generate-api-keys":o,"chunk-csv":s,"parquet-to-csv":c},docs:{brief:`Admin commands`}}),u=(0,r.buildCommand)({loader:async()=>{let{buildXdiSyncEndpoint:e}=await Promise.resolve().then(()=>require(`./impl-BwAnSxkP.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewConsentManager]}),xdiLocation:{kind:`parsed`,parse:String,brief:`The location of the XDI that will be loaded by the generated sync endpoint`},file:{kind:`parsed`,parse:String,brief:`The HTML file path where the sync endpoint should be written`,default:`./sync-endpoint.html`},removeIpAddresses:{kind:`boolean`,brief:`When true, remove IP addresses from the domain list`,default:!0},domainBlockList:{kind:`parsed`,parse:t.c,brief:`The set of domains that should be excluded from the sync endpoint. Comma-separated list.`,default:`localhost`},xdiAllowedCommands:{kind:`parsed`,parse:String,brief:`The allowed set of XDI commands`,default:`ConsentManager:Sync`},transcendUrl:t.s()}},docs:{brief:`Build XDI sync endpoint`,fullDescription:`This command allows for building of the XDI Sync Endpoint across a set of Transcend accounts.`}}),d=(0,r.buildCommand)({loader:async()=>{let{pullConsentMetrics:e}=await Promise.resolve().then(()=>require(`./impl-KgEJvOhE.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewConsentManager]}),start:{kind:`parsed`,parse:t.l,brief:`The start date to pull metrics from`},end:{kind:`parsed`,parse:t.l,brief:`The end date to pull metrics until`,optional:!0},folder:{kind:`parsed`,parse:String,brief:`The folder to save metrics to`,default:`./consent-metrics/`},bin:{kind:`parsed`,parse:String,brief:`The bin metric when pulling data (1h or 1d)`,default:`1d`},transcendUrl:t.s()}},docs:{brief:`Pull consent metrics`,fullDescription:"This command allows for pulling consent manager metrics for a Transcend account, or a set of Transcend accounts.\n\nBy default, the consent metrics will be written to a folder named `consent-metrics` within the directory where you run the command. You can override the location that these CSVs are written to using the flag `--folder=./my-folder/`. This folder will contain a set of CSV files:\n\n- `CONSENT_CHANGES_TIMESERIES_optIn.csv` -> this is a feed containing the number of explicit opt in events that happen - these are calls to `airgap.setConsent(event, { SaleOfInfo: true });`\n- `CONSENT_CHANGES_TIMESERIES_optOut.csv` -> this is a feed containing the number of explicit opt out events that happen - these are calls to `airgap.setConsent(event, { SaleOfInfo: false });`\n- `CONSENT_SESSIONS_BY_REGIME_Default.csv` -> this contains the number of sessions detected for the bin period\n- `PRIVACY_SIGNAL_TIMESERIES_DNT.csv` -> the number of DNT signals detected.\n- `PRIVACY_SIGNAL_TIMESERIES_GPC.csv` -> the number of GPC signals detected."}}),f=(0,r.buildCommand)({loader:async()=>{let{pullConsentPreferences:e}=await Promise.resolve().then(()=>require(`./impl-ay7i0K_5.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewManagedConsentDatabaseAdminApi,a.ScopeName.ViewRequestIdentitySettings,a.ScopeName.ViewPreferenceStoreSettings]}),partition:{kind:`parsed`,parse:String,brief:`Partition ID to query in the Preference Store`},sombraAuth:t.o(),file:{kind:`parsed`,parse:String,brief:`Path to CSV output file`,default:`./preferences.csv`},transcendUrl:t.s(),timestampBefore:{kind:`parsed`,parse:t.l,brief:`Filter: preferences collected before this time (timestampBefore)`,optional:!0},timestampAfter:{kind:`parsed`,parse:t.l,brief:`Filter: preferences collected after this time (timestampAfter)`,optional:!0},updatedBefore:{kind:`parsed`,parse:t.l,brief:`Filter: preferences updated before this time (system.updatedAt)`,optional:!0},updatedAfter:{kind:`parsed`,parse:t.l,brief:`Filter: preferences updated after this time (system.updatedAt)`,optional:!0},identifiers:{kind:`parsed`,parse:String,variadic:`,`,brief:`Filter specific users by identifier(s) as "name:value". If name is omitted, defaults to "email". Multiple values separated by commas.`,optional:!0},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`Page size / concurrency used when downloading (1–50 per API). Higher = fewer pages.`,default:`50`},shouldChunk:{kind:`boolean`,brief:`Whether to download requests in timestamp window chunks.`,default:!0},exportIdentifiersWithDelimiter:{kind:`parsed`,parse:String,brief:`Delimiter to use when combining multiple identifiers into a single column in the output CSV.`,default:`,`},windowConcurrency:{kind:`parsed`,parse:r.numberParser,brief:`When chunking, how many windows to download in parallel (higher = faster, but more load).`,default:`80`},maxChunks:{kind:`parsed`,parse:r.numberParser,brief:`Maximum number of chunks to download (higher = more data, but more load).`,default:`20000`},maxLookbackDays:{kind:`parsed`,parse:r.numberParser,brief:`Maximum lookback period in days for fetching consent preferences.`,default:`3650`}}},docs:{brief:`Pull consent preferences from the Managed Consent Database`,fullDescription:`Uses POST /v1/preferences/{partition}/query with cursor-based pagination. Supports filtering by identifiers, collection timestamps, and system.updatedAt.`}}),p=(0,r.buildCommand)({loader:async()=>{let{updateConsentManager:e}=await Promise.resolve().then(()=>require(`./impl-C4D1tHjp.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageConsentManagerDeveloperSettings]}),bundleTypes:{kind:`enum`,values:Object.values(a.ConsentBundleType),brief:`The bundle types to deploy. Defaults to PRODUCTION,TEST.`,variadic:`,`},deploy:{kind:`boolean`,brief:`When true, deploy the Consent Manager after updating the version`,default:!1},transcendUrl:t.s()}},docs:{brief:`Update consent manager`,fullDescription:`This command allows for updating Consent Manager to latest version. The Consent Manager bundle can also be deployed using this command.`}}),m=(0,r.buildCommand)({loader:async()=>{let{uploadConsentPreferences:e}=await Promise.resolve().then(()=>require(`./impl-CFmFCVt8.cjs`));return e},parameters:{flags:{base64EncryptionKey:{kind:`parsed`,parse:String,brief:`The encryption key used to encrypt the userId`},base64SigningKey:{kind:`parsed`,parse:String,brief:`The signing key used to prove authentication of consent request`},partition:{kind:`parsed`,parse:String,brief:`The partition key to download consent preferences to`},file:{kind:`parsed`,parse:String,brief:`The file to pull consent preferences from`,default:`./preferences.csv`},consentUrl:t.a(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`100`}}},docs:{brief:`Upload consent preferences to the Managed Consent Database`,fullDescription:`This command allows for updating of consent preferences to the Managed Consent Database.`}}),h=(0,r.buildCommand)({loader:async()=>{let{uploadCookiesFromCsv:e}=await Promise.resolve().then(()=>require(`./impl-Bv4gPikD.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageDataFlow]}),trackerStatus:{kind:`enum`,values:Object.values(a.ConsentTrackerStatus),brief:`The status of the cookies you will upload.`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file to upload`,default:`./cookies.csv`},transcendUrl:t.s()}},docs:{brief:`Upload cookies from CSV`,fullDescription:`Upload cookies from CSV. This command allows for uploading of cookies from CSV.
13
13
 
14
14
  Step 1) Download the CSV of cookies that you want to edit from the Admin Dashboard under [Consent Management -> Cookies](https://app.transcend.io/consent-manager/cookies). You can download cookies from both the "Triage" and "Approved" tabs.
15
15
 
16
16
  Step 2) You can edit the contents of the CSV file as needed. You may adjust the "Purpose" column, adjust the "Notes" column, add "Owners" and "Teams" or even add custom columns with additional metadata.
17
17
 
18
- Step 3) Upload the modified CSV file back into the dashboard with this command.`}}),g=(0,r.buildCommand)({loader:async()=>{let{uploadDataFlowsFromCsv:e}=await Promise.resolve().then(()=>require(`./impl-TTDhp7Hu.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageDataFlow]}),trackerStatus:{kind:`enum`,values:Object.values(a.ConsentTrackerStatus),brief:`The status of the data flows you will upload.`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file to upload`,default:`./data-flows.csv`},classifyService:{kind:`boolean`,brief:`When true, automatically assign the service for a data flow based on the domain that is specified`,default:!1},transcendUrl:t.s()}},docs:{brief:`Upload data flows from CSV`,fullDescription:`Upload data flows from CSV. This command allows for uploading of data flows from CSV.
18
+ Step 3) Upload the modified CSV file back into the dashboard with this command.`}}),g=(0,r.buildCommand)({loader:async()=>{let{uploadDataFlowsFromCsv:e}=await Promise.resolve().then(()=>require(`./impl-CoxOrlXu.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageDataFlow]}),trackerStatus:{kind:`enum`,values:Object.values(a.ConsentTrackerStatus),brief:`The status of the data flows you will upload.`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file to upload`,default:`./data-flows.csv`},classifyService:{kind:`boolean`,brief:`When true, automatically assign the service for a data flow based on the domain that is specified`,default:!1},transcendUrl:t.s()}},docs:{brief:`Upload data flows from CSV`,fullDescription:`Upload data flows from CSV. This command allows for uploading of data flows from CSV.
19
19
 
20
20
  Step 1) Download the CSV of data flows that you want to edit from the Admin Dashboard under [Consent Management -> Data Flows](https://app.transcend.io/consent-manager/data-flows). You can download data flows from both the "Triage" and "Approved" tabs.
21
21
 
22
22
  Step 2) You can edit the contents of the CSV file as needed. You may adjust the "Purpose" column, adjust the "Notes" column, add "Owners" and "Teams" or even add custom columns with additional metadata.
23
23
 
24
- Step 3) Upload the modified CSV file back into the dashboard with this command.`}}),_=(0,r.buildCommand)({loader:async()=>{let{uploadPreferences:e}=await Promise.resolve().then(()=>require(`./impl-Ym65hOwf.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageStoredPreferences,a.ScopeName.ViewManagedConsentDatabaseAdminApi,a.ScopeName.ViewPreferenceStoreSettings]}),partition:{kind:`parsed`,parse:String,brief:`The partition key to download consent preferences to`},sombraAuth:t.o(),transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file to load preferences from`,optional:!0},directory:{kind:`parsed`,parse:String,brief:`Path to the directory of CSV files to load preferences from`,optional:!0},dryRun:{kind:`boolean`,brief:`Whether to do a dry run only - will write results to receiptFilepath without updating Transcend`,default:!1},skipExistingRecordCheck:{kind:`boolean`,brief:`Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD`,default:!1},receiptFileDir:{kind:`parsed`,parse:String,brief:`Directory path where the response receipts should be saved`,default:`./receipts`},skipWorkflowTriggers:{kind:`boolean`,brief:`Whether to skip workflow triggers when uploading to preference store`,default:!1},forceTriggerWorkflows:{kind:`boolean`,brief:`Whether to force trigger workflows for existing consent records`,default:!1},skipConflictUpdates:{kind:`boolean`,brief:`Whether to skip uploading of any records where the preference store and file have a hard conflict`,default:!1},isSilent:{kind:`boolean`,brief:`Whether to skip sending emails in workflows`,default:!0},attributes:{kind:`parsed`,parse:String,brief:`Attributes to add to any DSR request if created. Comma-separated list of key:value pairs.`,default:`Tags:transcend-cli,Source:transcend-cli`},receiptFilepath:{kind:`parsed`,parse:String,brief:`Store resulting, continuing where left off`,default:`./preference-management-upload-receipts.json`},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading in parallel`,default:`10`}}},docs:{brief:`Upload preference management data to your Preference Store`,fullDescription:`Upload preference management data to your Preference Store.
24
+ Step 3) Upload the modified CSV file back into the dashboard with this command.`}}),_=(0,r.buildCommand)({loader:async()=>{let{uploadPreferences:e}=await Promise.resolve().then(()=>require(`./impl-CSZwdpH-.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageStoredPreferences,a.ScopeName.ViewManagedConsentDatabaseAdminApi,a.ScopeName.ViewPreferenceStoreSettings]}),partition:{kind:`parsed`,parse:String,brief:`The partition key to download consent preferences to`},sombraAuth:t.o(),transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file to load preferences from`,optional:!0},directory:{kind:`parsed`,parse:String,brief:`Path to the directory of CSV files to load preferences from`,optional:!0},dryRun:{kind:`boolean`,brief:`Whether to do a dry run only - will write results to receiptFilepath without updating Transcend`,default:!1},skipExistingRecordCheck:{kind:`boolean`,brief:`Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD`,default:!1},receiptFileDir:{kind:`parsed`,parse:String,brief:`Directory path where the response receipts should be saved`,default:`./receipts`},skipWorkflowTriggers:{kind:`boolean`,brief:`Whether to skip workflow triggers when uploading to preference store`,default:!1},forceTriggerWorkflows:{kind:`boolean`,brief:`Whether to force trigger workflows for existing consent records`,default:!1},skipConflictUpdates:{kind:`boolean`,brief:`Whether to skip uploading of any records where the preference store and file have a hard conflict`,default:!1},isSilent:{kind:`boolean`,brief:`Whether to skip sending emails in workflows`,default:!0},attributes:{kind:`parsed`,parse:String,brief:`Attributes to add to any DSR request if created. Comma-separated list of key:value pairs.`,default:`Tags:transcend-cli,Source:transcend-cli`},receiptFilepath:{kind:`parsed`,parse:String,brief:`Store resulting, continuing where left off`,default:`./preference-management-upload-receipts.json`},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading in parallel`,default:`10`}}},docs:{brief:`Upload preference management data to your Preference Store`,fullDescription:`Upload preference management data to your Preference Store.
25
25
 
26
26
  This command prompts you to map the shape of the CSV to the shape of the Transcend API. There is no requirement for the shape of the incoming CSV, as the script will handle the mapping process.
27
27
 
28
- The script will also produce a JSON cache file that allows for the mappings to be preserved between runs.`}}),v=(0,r.buildCommand)({loader:async()=>{let{generateAccessTokens:e}=await Promise.resolve().then(()=>require(`./impl-3XEvFo-7.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.GeneratePreferenceAccessTokens]}),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file containing user identifiers to generate access tokens for`},subjectType:{kind:`parsed`,parse:String,brief:`Slug for the data subject that the user will be logged in as on the Privacy Center. e.g. "customer" or "employee"`},emailColumnName:{kind:`parsed`,parse:String,brief:`Name of the column in the CSV that contains user email addresses`,default:`email`},coreIdentifierColumnName:{kind:`parsed`,parse:String,optional:!0,brief:`Name of the column in the CSV that contains user core identifiers`},duration:{kind:`parsed`,parse:t.u,brief:'How long the access tokens should be valid. Accepts human-friendly values like "2 days", "10h", "90 minutes". A bare number is interpreted as seconds (e.g., "300" = 5 minutes). Powered by the `ms` library: https://github.com/vercel/ms',default:`1y`},transcendUrl:t.s()}},docs:{brief:`Generate access tokens`,fullDescription:`This command allows for the generation of access tokens for users specified in a CSV file.`}}),y=(0,r.buildCommand)({loader:async()=>{let{deletePreferenceRecords:e}=await Promise.resolve().then(()=>require(`./impl-CKASqgZA.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageStoredPreferences]}),sombraAuth:t.o(),partition:{kind:`parsed`,parse:String,brief:`Partition ID to used to delete preference records from`},timestamp:{kind:`parsed`,parse:t.l,brief:`The timestamp when the deletion operation is made. Used for logging purposes.`},file:{kind:`parsed`,parse:String,optional:!0,brief:`Path to the CSV file used to identify preference records to delete`},directory:{kind:`parsed`,parse:String,brief:`Path to the directory of CSV files to load preferences from`,optional:!0},transcendUrl:t.s(),maxItemsInChunk:{kind:`parsed`,parse:r.numberParser,brief:`When chunking, how many items to delete in a single chunk (higher = faster, but more load).`,default:`10`},maxConcurrency:{kind:`parsed`,parse:r.numberParser,brief:`Number of concurrent requests to make when deleting preference records. (Higher = faster, but more load and rate limiting errors).`,default:`10`},fileConcurrency:{kind:`parsed`,parse:r.numberParser,brief:`Number of files to process concurrently when deleting preference records from multiple files.`,default:`5`},receiptDirectory:{kind:`parsed`,parse:String,brief:`Directory to write receipts of failed deletions to.`,default:`./receipts`}}},docs:{brief:`Delete consent preference records in bulk from Preference Store`,fullDescription:`Uses POST /v1/preferences/{partition}/delete route on sombra to delete consent preference records in bulk from Preference Store based on a CSV file input. Refer to examples/cli-upload-preferences-example.csv for the expected format of the CSV file. The CSV expects the following headers: "name" and "value". The "name" field corresponds to the identifier name as defined on https://app.transcend.io/privacy-requests/identifiers The "value" refers to the actual identifier value for the user whose preference record is being deleted. For Large scale deletions, consider chunking the input CSV into smaller files and using the --directory option to process them concurrently.`}}),b=(0,r.buildRouteMap)({routes:{"build-xdi-sync-endpoint":u,"generate-access-tokens":v,"pull-consent-metrics":d,"pull-consent-preferences":f,"update-consent-manager":p,"upload-consent-preferences":m,"upload-cookies-from-csv":h,"upload-data-flows-from-csv":g,"upload-preferences":_,"delete-preference-records":y},docs:{brief:`Consent commands`}}),x=(0,r.buildCommand)({loader:async()=>{let{deriveDataSilosFromDataFlowsCrossInstance:e}=await Promise.resolve().then(()=>require(`./impl-cFJaGYul.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[]}),dataFlowsYmlFolder:{kind:`parsed`,parse:String,brief:`The folder that contains data flow yml files`},output:{kind:`parsed`,parse:String,brief:`The output transcend.yml file containing the data silo configurations`,default:`./transcend.yml`},ignoreYmls:{kind:`parsed`,parse:String,variadic:`,`,brief:`The set of yml files that should be skipped when uploading`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Derive data silos from data flows cross instance`,fullDescription:`Given a folder of data flow transcend.yml configurations, convert those configurations to a single transcend.yml configurations of all related data silos.`}}),S=(0,r.buildCommand)({loader:async()=>{let{deriveDataSilosFromDataFlows:e}=await Promise.resolve().then(()=>require(`./impl-C1jpyDxO.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[]}),dataFlowsYmlFolder:{kind:`parsed`,parse:String,brief:`The folder that contains data flow yml files`},dataSilosYmlFolder:{kind:`parsed`,parse:String,brief:`The folder that contains data silo yml files`},ignoreYmls:{kind:`parsed`,parse:String,variadic:`,`,brief:`The set of yml files that should be skipped when uploading`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Derive data silos from data flows`,fullDescription:`Given a folder of data flow transcend.yml configurations, convert those configurations to set of data silo transcend.yml configurations.`}}),C=(0,r.buildCommand)({loader:async()=>{let{discoverSilos:e}=await Promise.resolve().then(()=>require(`./impl-Cq9g8eCi.cjs`));return e},parameters:{flags:{scanPath:{kind:`parsed`,parse:String,brief:`File path in the project to scan`},dataSiloId:{kind:`parsed`,parse:t.d,brief:`The UUID of the corresponding data silo`},auth:t.i({scopes:[a.ScopeName.ManageAssignedDataInventory],requiresSiloScope:!0}),fileGlobs:{kind:`parsed`,parse:String,brief:`You can pass a glob syntax pattern(s) to specify additional file paths to scan. Comma-separated list of globs.`,default:``},ignoreDirs:{kind:`parsed`,parse:String,brief:`Comma-separated list of directories to ignore.`,default:``},transcendUrl:t.s()}},docs:{brief:`Scan dependency management files to discover new data silos.`,fullDescription:`We support scanning for new data silos in JavaScript, Python, Gradle, and CocoaPods projects.
28
+ The script will also produce a JSON cache file that allows for the mappings to be preserved between runs.`}}),v=(0,r.buildCommand)({loader:async()=>{let{generateAccessTokens:e}=await Promise.resolve().then(()=>require(`./impl-J5fV8gbh.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.GeneratePreferenceAccessTokens]}),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file containing user identifiers to generate access tokens for`},subjectType:{kind:`parsed`,parse:String,brief:`Slug for the data subject that the user will be logged in as on the Privacy Center. e.g. "customer" or "employee"`},emailColumnName:{kind:`parsed`,parse:String,brief:`Name of the column in the CSV that contains user email addresses`,default:`email`},coreIdentifierColumnName:{kind:`parsed`,parse:String,optional:!0,brief:`Name of the column in the CSV that contains user core identifiers`},duration:{kind:`parsed`,parse:t.u,brief:'How long the access tokens should be valid. Accepts human-friendly values like "2 days", "10h", "90 minutes". A bare number is interpreted as seconds (e.g., "300" = 5 minutes). Powered by the `ms` library: https://github.com/vercel/ms',default:`1y`},transcendUrl:t.s()}},docs:{brief:`Generate access tokens`,fullDescription:`This command allows for the generation of access tokens for users specified in a CSV file.`}}),y=(0,r.buildCommand)({loader:async()=>{let{deletePreferenceRecords:e}=await Promise.resolve().then(()=>require(`./impl-BMxxKrNz.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageStoredPreferences]}),sombraAuth:t.o(),partition:{kind:`parsed`,parse:String,brief:`Partition ID to used to delete preference records from`},timestamp:{kind:`parsed`,parse:t.l,brief:`The timestamp when the deletion operation is made. Used for logging purposes.`},file:{kind:`parsed`,parse:String,optional:!0,brief:`Path to the CSV file used to identify preference records to delete`},directory:{kind:`parsed`,parse:String,brief:`Path to the directory of CSV files to load preferences from`,optional:!0},transcendUrl:t.s(),maxItemsInChunk:{kind:`parsed`,parse:r.numberParser,brief:`When chunking, how many items to delete in a single chunk (higher = faster, but more load).`,default:`10`},maxConcurrency:{kind:`parsed`,parse:r.numberParser,brief:`Number of concurrent requests to make when deleting preference records. (Higher = faster, but more load and rate limiting errors).`,default:`10`},fileConcurrency:{kind:`parsed`,parse:r.numberParser,brief:`Number of files to process concurrently when deleting preference records from multiple files.`,default:`5`},receiptDirectory:{kind:`parsed`,parse:String,brief:`Directory to write receipts of failed deletions to.`,default:`./receipts`}}},docs:{brief:`Delete consent preference records in bulk from Preference Store`,fullDescription:`Uses POST /v1/preferences/{partition}/delete route on sombra to delete consent preference records in bulk from Preference Store based on a CSV file input. Refer to examples/cli-upload-preferences-example.csv for the expected format of the CSV file. The CSV expects the following headers: "name" and "value". The "name" field corresponds to the identifier name as defined on https://app.transcend.io/privacy-requests/identifiers The "value" refers to the actual identifier value for the user whose preference record is being deleted. For Large scale deletions, consider chunking the input CSV into smaller files and using the --directory option to process them concurrently.`}}),b=(0,r.buildRouteMap)({routes:{"build-xdi-sync-endpoint":u,"generate-access-tokens":v,"pull-consent-metrics":d,"pull-consent-preferences":f,"update-consent-manager":p,"upload-consent-preferences":m,"upload-cookies-from-csv":h,"upload-data-flows-from-csv":g,"upload-preferences":_,"delete-preference-records":y},docs:{brief:`Consent commands`}}),x=(0,r.buildCommand)({loader:async()=>{let{deriveDataSilosFromDataFlowsCrossInstance:e}=await Promise.resolve().then(()=>require(`./impl-BouHRicT.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[]}),dataFlowsYmlFolder:{kind:`parsed`,parse:String,brief:`The folder that contains data flow yml files`},output:{kind:`parsed`,parse:String,brief:`The output transcend.yml file containing the data silo configurations`,default:`./transcend.yml`},ignoreYmls:{kind:`parsed`,parse:String,variadic:`,`,brief:`The set of yml files that should be skipped when uploading`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Derive data silos from data flows cross instance`,fullDescription:`Given a folder of data flow transcend.yml configurations, convert those configurations to a single transcend.yml configurations of all related data silos.`}}),S=(0,r.buildCommand)({loader:async()=>{let{deriveDataSilosFromDataFlows:e}=await Promise.resolve().then(()=>require(`./impl-CCOEjRnr.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[]}),dataFlowsYmlFolder:{kind:`parsed`,parse:String,brief:`The folder that contains data flow yml files`},dataSilosYmlFolder:{kind:`parsed`,parse:String,brief:`The folder that contains data silo yml files`},ignoreYmls:{kind:`parsed`,parse:String,variadic:`,`,brief:`The set of yml files that should be skipped when uploading`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Derive data silos from data flows`,fullDescription:`Given a folder of data flow transcend.yml configurations, convert those configurations to set of data silo transcend.yml configurations.`}}),C=(0,r.buildCommand)({loader:async()=>{let{discoverSilos:e}=await Promise.resolve().then(()=>require(`./impl-FlZPR0yd.cjs`));return e},parameters:{flags:{scanPath:{kind:`parsed`,parse:String,brief:`File path in the project to scan`},dataSiloId:{kind:`parsed`,parse:t.d,brief:`The UUID of the corresponding data silo`},auth:t.i({scopes:[a.ScopeName.ManageAssignedDataInventory],requiresSiloScope:!0}),fileGlobs:{kind:`parsed`,parse:String,brief:`You can pass a glob syntax pattern(s) to specify additional file paths to scan. Comma-separated list of globs.`,default:``},ignoreDirs:{kind:`parsed`,parse:String,brief:`Comma-separated list of directories to ignore.`,default:``},transcendUrl:t.s()}},docs:{brief:`Scan dependency management files to discover new data silos.`,fullDescription:`We support scanning for new data silos in JavaScript, Python, Gradle, and CocoaPods projects.
29
29
 
30
- To get started, add a data silo for the corresponding project type with the "silo discovery" plugin enabled. For example, if you want to scan a JavaScript project, add a package.json data silo. Then, specify the data silo ID in the "--dataSiloId" parameter.`}}),w=(0,r.buildCommand)({loader:async()=>{let{pullDatapoints:e}=await Promise.resolve().then(()=>require(`./impl-BABdXCjW.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewDataInventory]}),file:{kind:`parsed`,parse:String,brief:`The file to save datapoints to`,default:`./datapoints.csv`},transcendUrl:t.s(),dataSiloIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`List of data silo IDs to filter by`,optional:!0},includeAttributes:{kind:`boolean`,brief:`Whether to include attributes in the output`,default:!1},includeGuessedCategories:{kind:`boolean`,brief:`Whether to include guessed categories in the output`,default:!1},parentCategories:{kind:`enum`,values:Object.values(a.DataCategoryType),brief:`List of parent categories to filter by`,variadic:`,`,optional:!0},subCategories:{kind:`parsed`,parse:String,brief:`List of subcategories to filter by`,variadic:`,`,optional:!0}}},docs:{brief:`Export the datapoints from your Data Inventory into a CSV.`}}),T=(0,r.buildCommand)({loader:async()=>{let{pullUnstructuredDiscoveryFiles:e}=await Promise.resolve().then(()=>require(`./impl-1ibTRBn4.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewDataInventory]}),file:{kind:`parsed`,parse:String,brief:`The file to save datapoints to`,default:`./unstructured-discovery-files.csv`},transcendUrl:t.s(),dataSiloIds:{kind:`parsed`,parse:String,brief:`List of data silo IDs to filter by`,variadic:`,`,optional:!0},subCategories:{kind:`parsed`,parse:String,brief:`List of data categories to filter by`,variadic:`,`,optional:!0},status:{kind:`enum`,values:Object.values(a.UnstructuredSubDataPointRecommendationStatus),brief:`List of classification statuses to filter by`,variadic:`,`,optional:!0},includeEncryptedSnippets:{kind:`boolean`,brief:`Whether to include encrypted snippets of the entries classified`,default:!1}}},docs:{brief:`Pull unstructured discovery files`,fullDescription:`This command allows for pulling Unstructured Discovery into a CSV.`}}),E=(0,r.buildCommand)({loader:async()=>{let{push:e}=await Promise.resolve().then(()=>require(`./impl-DuVC84LE.cjs`));return e},parameters:{flags:{auth:t.i({scopes:`Varies`}),file:{kind:`parsed`,parse:String,brief:`Path to the YAML file to push from`,default:`./transcend.yml`},transcendUrl:t.s(),pageSize:{kind:`parsed`,parse:r.numberParser,brief:`The page size to use when paginating over the API`,default:`50`},variables:{kind:`parsed`,parse:String,brief:`The variables to template into the YAML file when pushing configuration. Comma-separated list of key:value pairs.`,default:``},publishToPrivacyCenter:{kind:`boolean`,brief:`When true, publish the configuration to the Privacy Center`,default:!1},classifyService:{kind:`boolean`,brief:`When true, automatically assign the service for a data flow based on the domain that is specified`,default:!1},deleteExtraAttributeValues:{kind:`boolean`,brief:`When true and syncing attributes, delete any extra attributes instead of just upserting`,default:!1}}},docs:{brief:`Push metadata from transcend.yml to Transcend`,fullDescription:`Given a transcend.yml file, sync the contents up to your Transcend instance.`}}),D=(0,r.buildCommand)({loader:async()=>{let{scanPackages:e}=await Promise.resolve().then(()=>require(`./impl--0AUtPOO.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageCodeScanning]}),scanPath:{kind:`parsed`,parse:String,brief:`File path in the project to scan`,default:`./`},ignoreDirs:{kind:`parsed`,parse:String,variadic:`,`,brief:`List of directories to ignore in scan`,optional:!0},repositoryName:{kind:`parsed`,parse:String,brief:`Name of the git repository that the package should be tied to`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Scan dependency management files to inventory code dependencies.`,fullDescription:`Transcend scans packages and dependencies for the following frameworks:
30
+ To get started, add a data silo for the corresponding project type with the "silo discovery" plugin enabled. For example, if you want to scan a JavaScript project, add a package.json data silo. Then, specify the data silo ID in the "--dataSiloId" parameter.`}}),w=(0,r.buildCommand)({loader:async()=>{let{pullDatapoints:e}=await Promise.resolve().then(()=>require(`./impl-BRuqPhJU.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewDataInventory]}),file:{kind:`parsed`,parse:String,brief:`The file to save datapoints to`,default:`./datapoints.csv`},transcendUrl:t.s(),dataSiloIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`List of data silo IDs to filter by`,optional:!0},includeAttributes:{kind:`boolean`,brief:`Whether to include attributes in the output`,default:!1},includeGuessedCategories:{kind:`boolean`,brief:`Whether to include guessed categories in the output`,default:!1},parentCategories:{kind:`enum`,values:Object.values(a.DataCategoryType),brief:`List of parent categories to filter by`,variadic:`,`,optional:!0},subCategories:{kind:`parsed`,parse:String,brief:`List of subcategories to filter by`,variadic:`,`,optional:!0}}},docs:{brief:`Export the datapoints from your Data Inventory into a CSV.`}}),T=(0,r.buildCommand)({loader:async()=>{let{pullUnstructuredDiscoveryFiles:e}=await Promise.resolve().then(()=>require(`./impl-4pmSDQvA.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewDataInventory]}),file:{kind:`parsed`,parse:String,brief:`The file to save datapoints to`,default:`./unstructured-discovery-files.csv`},transcendUrl:t.s(),dataSiloIds:{kind:`parsed`,parse:String,brief:`List of data silo IDs to filter by`,variadic:`,`,optional:!0},subCategories:{kind:`parsed`,parse:String,brief:`List of data categories to filter by`,variadic:`,`,optional:!0},status:{kind:`enum`,values:Object.values(a.UnstructuredSubDataPointRecommendationStatus),brief:`List of classification statuses to filter by`,variadic:`,`,optional:!0},includeEncryptedSnippets:{kind:`boolean`,brief:`Whether to include encrypted snippets of the entries classified`,default:!1}}},docs:{brief:`Pull unstructured discovery files`,fullDescription:`This command allows for pulling Unstructured Discovery into a CSV.`}}),E=(0,r.buildCommand)({loader:async()=>{let{push:e}=await Promise.resolve().then(()=>require(`./impl-DpdGWdrr.cjs`));return e},parameters:{flags:{auth:t.i({scopes:`Varies`}),file:{kind:`parsed`,parse:String,brief:`Path to the YAML file to push from`,default:`./transcend.yml`},transcendUrl:t.s(),pageSize:{kind:`parsed`,parse:r.numberParser,brief:`The page size to use when paginating over the API`,default:`50`},variables:{kind:`parsed`,parse:String,brief:`The variables to template into the YAML file when pushing configuration. Comma-separated list of key:value pairs.`,default:``},publishToPrivacyCenter:{kind:`boolean`,brief:`When true, publish the configuration to the Privacy Center`,default:!1},classifyService:{kind:`boolean`,brief:`When true, automatically assign the service for a data flow based on the domain that is specified`,default:!1},deleteExtraAttributeValues:{kind:`boolean`,brief:`When true and syncing attributes, delete any extra attributes instead of just upserting`,default:!1}}},docs:{brief:`Push metadata from transcend.yml to Transcend`,fullDescription:`Given a transcend.yml file, sync the contents up to your Transcend instance.`}}),D=(0,r.buildCommand)({loader:async()=>{let{scanPackages:e}=await Promise.resolve().then(()=>require(`./impl-DU-WTXTY.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageCodeScanning]}),scanPath:{kind:`parsed`,parse:String,brief:`File path in the project to scan`,default:`./`},ignoreDirs:{kind:`parsed`,parse:String,variadic:`,`,brief:`List of directories to ignore in scan`,optional:!0},repositoryName:{kind:`parsed`,parse:String,brief:`Name of the git repository that the package should be tied to`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Scan dependency management files to inventory code dependencies.`,fullDescription:`Transcend scans packages and dependencies for the following frameworks:
31
31
 
32
32
  - package.json
33
33
  - requirements.txt & setup.py
@@ -54,14 +54,14 @@ This command will scan the folder you point at to look for any of these files. O
54
54
 
55
55
  5. Run:
56
56
 
57
- ${O([`inventory`,`push`],{auth:`$TRANSCEND_API_KEY`,file:`./transcend.yml`,classifyService:!0},{argsIndent:5})}`}}),M=(0,r.buildCommand)({loader:async()=>{let{consentManagersToBusinessEntities:e}=await Promise.resolve().then(()=>require(`./impl-DLeBB5Zk.cjs`));return e},parameters:{flags:{consentManagerYmlFolder:{kind:`parsed`,parse:String,brief:`Path to the folder of Consent Manager transcend.yml files to combine`},output:{kind:`parsed`,parse:String,brief:`Path to the output transcend.yml with business entity configuration`,default:`./combined-business-entities.yml`}}},docs:{brief:`Convert consent managers to business entities`,fullDescription:`This command allows for converting a folder or Consent Manager transcend.yml files into a single transcend.yml file where each consent manager configuration is a Business Entity in the data inventory.`}}),N=(0,r.buildRouteMap)({routes:{pull:t.r,push:E,"scan-packages":D,"discover-silos":C,"pull-datapoints":w,"pull-unstructured-discovery-files":T,"derive-data-silos-from-data-flows":S,"derive-data-silos-from-data-flows-cross-instance":x,"consent-manager-service-json-to-yml":j,"consent-managers-to-business-entities":M},docs:{brief:`Inventory commands`}}),P=(0,r.buildCommand)({loader:async()=>{let{syncOt:e}=await Promise.resolve().then(()=>require(`./impl-B4f6W0BP.cjs`));return e},parameters:{flags:{hostname:{kind:`parsed`,parse:String,brief:`The domain of the OneTrust environment from which to pull the resource`,optional:!0},oneTrustAuth:{kind:`parsed`,parse:String,brief:`The OAuth access token with the scopes necessary to access the OneTrust Public APIs`,optional:!0},source:{kind:`enum`,values:Object.values(e.r),brief:`Whether to read the assessments from OneTrust or from a file`,default:e.r.OneTrust},transcendAuth:{...t.i({scopes:[a.ScopeName.ManageAssessments]}),optional:!0},transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the file to pull the resource into. Must be a json file!`,optional:!0},resource:{kind:`enum`,values:Object.values(e.n),brief:`The resource to pull from OneTrust. For now, only assessments is supported`,default:e.n.Assessments},dryRun:{kind:`boolean`,brief:`Whether to export the resource to a file rather than sync to Transcend`,default:!1},debug:{kind:`boolean`,brief:`Whether to print detailed logs in case of error`,default:!1}}},docs:{brief:`Sync OneTrust data`,fullDescription:`Pulls resources from a OneTrust and syncs them to a Transcend instance. For now, it only supports retrieving OneTrust Assessments.
57
+ ${O([`inventory`,`push`],{auth:`$TRANSCEND_API_KEY`,file:`./transcend.yml`,classifyService:!0},{argsIndent:5})}`}}),M=(0,r.buildCommand)({loader:async()=>{let{consentManagersToBusinessEntities:e}=await Promise.resolve().then(()=>require(`./impl-BWiNjS6v.cjs`));return e},parameters:{flags:{consentManagerYmlFolder:{kind:`parsed`,parse:String,brief:`Path to the folder of Consent Manager transcend.yml files to combine`},output:{kind:`parsed`,parse:String,brief:`Path to the output transcend.yml with business entity configuration`,default:`./combined-business-entities.yml`}}},docs:{brief:`Convert consent managers to business entities`,fullDescription:`This command allows for converting a folder or Consent Manager transcend.yml files into a single transcend.yml file where each consent manager configuration is a Business Entity in the data inventory.`}}),N=(0,r.buildRouteMap)({routes:{pull:t.r,push:E,"scan-packages":D,"discover-silos":C,"pull-datapoints":w,"pull-unstructured-discovery-files":T,"derive-data-silos-from-data-flows":S,"derive-data-silos-from-data-flows-cross-instance":x,"consent-manager-service-json-to-yml":j,"consent-managers-to-business-entities":M},docs:{brief:`Inventory commands`}}),P=(0,r.buildCommand)({loader:async()=>{let{syncOt:e}=await Promise.resolve().then(()=>require(`./impl-8bci0gd2.cjs`));return e},parameters:{flags:{hostname:{kind:`parsed`,parse:String,brief:`The domain of the OneTrust environment from which to pull the resource`,optional:!0},oneTrustAuth:{kind:`parsed`,parse:String,brief:`The OAuth access token with the scopes necessary to access the OneTrust Public APIs`,optional:!0},source:{kind:`enum`,values:Object.values(e.r),brief:`Whether to read the assessments from OneTrust or from a file`,default:e.r.OneTrust},transcendAuth:{...t.i({scopes:[a.ScopeName.ManageAssessments]}),optional:!0},transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the file to pull the resource into. Must be a json file!`,optional:!0},resource:{kind:`enum`,values:Object.values(e.n),brief:`The resource to pull from OneTrust. For now, only assessments is supported`,default:e.n.Assessments},dryRun:{kind:`boolean`,brief:`Whether to export the resource to a file rather than sync to Transcend`,default:!1},debug:{kind:`boolean`,brief:`Whether to print detailed logs in case of error`,default:!1}}},docs:{brief:`Sync OneTrust data`,fullDescription:`Pulls resources from a OneTrust and syncs them to a Transcend instance. For now, it only supports retrieving OneTrust Assessments.
58
58
 
59
59
  This command can be helpful if you are looking to:
60
60
  - Pull resources from your OneTrust account.
61
61
  - Migrate your resources from your OneTrust account to Transcend.
62
62
 
63
63
  OneTrust authentication requires an OAuth Token with scope for accessing the assessment endpoints.
64
- If syncing the resources to Transcend, you will also need to generate an API key on the Transcend Admin Dashboard.`}}),ee=(0,r.buildRouteMap)({routes:{"sync-ot":P},docs:{brief:`Migration commands`}}),F=(0,r.buildCommand)({loader:async()=>{let{approve:e}=await Promise.resolve().then(()=>require(`./impl-9qObj_On.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.RequestApproval,a.ScopeName.ViewRequests,a.ScopeName.ManageRequestCompilation]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to approve`},origins:{kind:`enum`,values:Object.values(a.RequestOrigin),variadic:`,`,brief:`The request origins to approve`,optional:!0},silentModeBefore:{kind:`parsed`,parse:t.l,brief:`Any requests made before this date should be marked as silent mode`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Approve requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Approve requests that were submitted after this time`,optional:!0},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk approve a set of privacy requests`,fullDescription:`Bulk approve a set of privacy requests from the DSR Automation -> Incoming Requests tab.`}}),I=(0,r.buildCommand)({loader:async()=>{let{cancel:e}=await Promise.resolve().then(()=>require(`./impl-Bd239j4R.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.RequestApproval]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to cancel`},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to cancel. Comma-separated list.`,optional:!0},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to cancel`,optional:!0},silentModeBefore:{kind:`parsed`,parse:t.l,brief:`Any requests made before this date should be marked as silent mode for canceling to skip email sending`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Cancel requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Cancel requests that were submitted after this time`,optional:!0},cancellationTitle:{kind:`parsed`,parse:String,brief:`The title of the email template that should be sent to the requests upon cancelation`,default:`Request Canceled`},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk cancel a set of privacy requests`,fullDescription:`Bulk cancel a set of privacy requests from the DSR Automation -> Incoming Requests tab.`}}),L=(0,r.buildCommand)({loader:async()=>{let{markIdentifiersCompleted:e}=await Promise.resolve().then(()=>require(`./impl-Dsj6D_fG.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[],requiresSiloScope:!0}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers.csv`},transcendUrl:t.s(),sombraAuth:t.o()}},docs:{brief:`Mark identifiers as completed after processing.`,fullDescription:`This command takes the output of "${O([`request`,`cron`,`pull-identifiers`],{})}" and notifies Transcend that all of the requests in the CSV have been processed.
64
+ If syncing the resources to Transcend, you will also need to generate an API key on the Transcend Admin Dashboard.`}}),ee=(0,r.buildRouteMap)({routes:{"sync-ot":P},docs:{brief:`Migration commands`}}),F=(0,r.buildCommand)({loader:async()=>{let{approve:e}=await Promise.resolve().then(()=>require(`./impl-NLEQuKnT.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.RequestApproval,a.ScopeName.ViewRequests,a.ScopeName.ManageRequestCompilation]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to approve`},origins:{kind:`enum`,values:Object.values(a.RequestOrigin),variadic:`,`,brief:`The request origins to approve`,optional:!0},silentModeBefore:{kind:`parsed`,parse:t.l,brief:`Any requests made before this date should be marked as silent mode`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Approve requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Approve requests that were submitted after this time`,optional:!0},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk approve a set of privacy requests`,fullDescription:`Bulk approve a set of privacy requests from the DSR Automation -> Incoming Requests tab.`}}),I=(0,r.buildCommand)({loader:async()=>{let{cancel:e}=await Promise.resolve().then(()=>require(`./impl-ChNLuyNq.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.RequestApproval]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to cancel`},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to cancel. Comma-separated list.`,optional:!0},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to cancel`,optional:!0},silentModeBefore:{kind:`parsed`,parse:t.l,brief:`Any requests made before this date should be marked as silent mode for canceling to skip email sending`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Cancel requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Cancel requests that were submitted after this time`,optional:!0},cancellationTitle:{kind:`parsed`,parse:String,brief:`The title of the email template that should be sent to the requests upon cancelation`,default:`Request Canceled`},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk cancel a set of privacy requests`,fullDescription:`Bulk cancel a set of privacy requests from the DSR Automation -> Incoming Requests tab.`}}),L=(0,r.buildCommand)({loader:async()=>{let{markIdentifiersCompleted:e}=await Promise.resolve().then(()=>require(`./impl-8zottXEQ.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[],requiresSiloScope:!0}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers.csv`},transcendUrl:t.s(),sombraAuth:t.o()}},docs:{brief:`Mark identifiers as completed after processing.`,fullDescription:`This command takes the output of "${O([`request`,`cron`,`pull-identifiers`],{})}" and notifies Transcend that all of the requests in the CSV have been processed.
65
65
  This is used in the workflow like:
66
66
 
67
67
  1. Pull identifiers to CSV:
@@ -74,18 +74,18 @@ This is used in the workflow like:
74
74
 
75
75
  ${O([`request`,`cron`,`mark-identifiers-completed`],{auth:`$TRANSCEND_API_KEY`,dataSiloId:`70810f2e-cf90-43f6-9776-901a5950599f`,file:`./outstanding-requests.csv`},{argsIndent:5})}
76
76
 
77
- Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}}),R=(0,r.buildCommand)({loader:async()=>{let{pullIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-Bm8T2x0z.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[],requiresSiloScope:!0}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers.csv`},transcendUrl:t.s(),sombraAuth:t.o(),pageLimit:{kind:`parsed`,parse:r.numberParser,brief:`The page limit to use when pulling in pages of identifiers`,default:`100`},skipRequestCount:{kind:`boolean`,brief:`Whether to skip the count of all outstanding requests. This is required to render the progress bar, but can take a long time to run if you have a large number of outstanding requests to process. In that case, we recommend setting skipRequestCount=true so that you can still proceed with fetching the identifiers`,default:!1},chunkSize:{kind:`parsed`,parse:r.numberParser,brief:`Maximum number of rows per CSV file. For large datasets, the output will be automatically split into multiple files to avoid file system size limits. Each file will contain at most this many rows`,default:`10000`}}},docs:{brief:`Pull identifiers of outstanding requests for a data silo to a CSV.`,fullDescription:`If you are using the cron job integration, you can run this command to pull the outstanding identifiers for the data silo to a CSV.
77
+ Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}}),R=(0,r.buildCommand)({loader:async()=>{let{pullIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-_dGu54cO.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[],requiresSiloScope:!0}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers.csv`},transcendUrl:t.s(),sombraAuth:t.o(),pageLimit:{kind:`parsed`,parse:r.numberParser,brief:`The page limit to use when pulling in pages of identifiers`,default:`100`},skipRequestCount:{kind:`boolean`,brief:`Whether to skip the count of all outstanding requests. This is required to render the progress bar, but can take a long time to run if you have a large number of outstanding requests to process. In that case, we recommend setting skipRequestCount=true so that you can still proceed with fetching the identifiers`,default:!1},chunkSize:{kind:`parsed`,parse:r.numberParser,brief:`Maximum number of rows per CSV file. For large datasets, the output will be automatically split into multiple files to avoid file system size limits. Each file will contain at most this many rows`,default:`10000`}}},docs:{brief:`Pull identifiers of outstanding requests for a data silo to a CSV.`,fullDescription:`If you are using the cron job integration, you can run this command to pull the outstanding identifiers for the data silo to a CSV.
78
78
 
79
79
  For large datasets, the output will be automatically split into multiple CSV files to avoid file system size limits. Use the --chunkSize parameter to control the maximum number of rows per file.
80
80
 
81
- Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}}),z=(0,r.buildCommand)({loader:async()=>{let{pullProfiles:e}=await Promise.resolve().then(()=>require(`./impl-CXnE8Ev7.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[],requiresSiloScope:!0}),cronDataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the cron data silo to pull in`},targetDataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the target data silo to pull in`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers.csv`},fileTarget:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers-target.csv`},transcendUrl:t.s(),sombraAuth:t.o(),pageLimit:{kind:`parsed`,parse:r.numberParser,brief:`The page limit to use when pulling in pages of identifiers`,default:`100`},skipRequestCount:{kind:`boolean`,brief:`Whether to skip the count of all outstanding requests. This is required to render the progress bar, but can take a long time to run if you have a large number of outstanding requests to process. In that case, we recommend setting skipRequestCount=true so that you can still proceed with fetching the identifiers`,default:!1},chunkSize:{kind:`parsed`,parse:r.numberParser,brief:`Maximum number of rows per CSV file. For large datasets, the output will be automatically split into multiple files to avoid file system size limits. Each file will contain at most this many rows`,default:`10000`}}},docs:{brief:`Pull profiles of outstanding requests for a data silo to a CSV.`,fullDescription:`If you are using the cron job integration, you can run this command to pull the outstanding profiles for the data silo to a CSV.
81
+ Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}}),z=(0,r.buildCommand)({loader:async()=>{let{pullProfiles:e}=await Promise.resolve().then(()=>require(`./impl-DhdY6lbj.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[],requiresSiloScope:!0}),cronDataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the cron data silo to pull in`},targetDataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the target data silo to pull in`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers.csv`},fileTarget:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./cron-identifiers-target.csv`},transcendUrl:t.s(),sombraAuth:t.o(),pageLimit:{kind:`parsed`,parse:r.numberParser,brief:`The page limit to use when pulling in pages of identifiers`,default:`100`},skipRequestCount:{kind:`boolean`,brief:`Whether to skip the count of all outstanding requests. This is required to render the progress bar, but can take a long time to run if you have a large number of outstanding requests to process. In that case, we recommend setting skipRequestCount=true so that you can still proceed with fetching the identifiers`,default:!1},chunkSize:{kind:`parsed`,parse:r.numberParser,brief:`Maximum number of rows per CSV file. For large datasets, the output will be automatically split into multiple files to avoid file system size limits. Each file will contain at most this many rows`,default:`10000`}}},docs:{brief:`Pull profiles of outstanding requests for a data silo to a CSV.`,fullDescription:`If you are using the cron job integration, you can run this command to pull the outstanding profiles for the data silo to a CSV.
82
82
 
83
83
  For large datasets, the output will be automatically split into multiple CSV files to avoid file system size limits. Use the --chunkSize parameter to control the maximum number of rows per file.
84
84
 
85
- Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}}),B=(0,r.buildRouteMap)({routes:{"pull-identifiers":R,"pull-profiles":z,"mark-identifiers-completed":L},docs:{brief:`Cron commands`,hideRoute:{"pull-profiles":!0}}}),V=(0,r.buildCommand)({loader:async()=>{let{downloadFiles:e}=await Promise.resolve().then(()=>require(`./impl-JkokB3Un.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequestCompilation,a.ScopeName.ViewRequests,a.ScopeName.RequestApproval]}),sombraAuth:t.o(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when downloading requests in parallel`,default:`10`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to download`,optional:!0},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to download. Comma-separated list. Defaults to APPROVING,DOWNLOADABLE.`,optional:!0},folderPath:{kind:`parsed`,parse:String,brief:`The folder to download files to`,default:`./dsr-files`},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Download requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Download requests that were submitted after this time`,optional:!0},approveAfterDownload:{kind:`boolean`,brief:`If the request is in status=APPROVING, approve the request after its downloaded`,default:!1},transcendUrl:t.s()}},docs:{brief:`Download the files associated with a Data Subject Access Request (DSAR)`,fullDescription:`Download the files associated with a Data Subject Access Request (DSAR) from DSR Automation -> Incoming Requests tab.`}}),H=(0,r.buildCommand)({loader:async()=>{let{enricherRestart:e}=await Promise.resolve().then(()=>require(`./impl-D9_MKLLL.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),enricherId:{kind:`parsed`,parse:String,brief:`The ID of the enricher to restart`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request action to restart`,optional:!0},requestEnricherStatuses:{kind:`enum`,values:Object.values(a.RequestEnricherStatus),variadic:`,`,brief:`The request enricher statuses to restart`,optional:!0},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`15`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to restart`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted after this time`,optional:!0}}},docs:{brief:`Bulk restart a particular enricher across a series of DSRs`,fullDescription:`Bulk restart a particular enricher across a series of DSRs.
85
+ Read more at https://docs.transcend.io/docs/integrations/cron-job-integration.`}}),B=(0,r.buildRouteMap)({routes:{"pull-identifiers":R,"pull-profiles":z,"mark-identifiers-completed":L},docs:{brief:`Cron commands`,hideRoute:{"pull-profiles":!0}}}),V=(0,r.buildCommand)({loader:async()=>{let{downloadFiles:e}=await Promise.resolve().then(()=>require(`./impl-CA_CO88W.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequestCompilation,a.ScopeName.ViewRequests,a.ScopeName.RequestApproval]}),sombraAuth:t.o(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when downloading requests in parallel`,default:`10`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to download`,optional:!0},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to download. Comma-separated list. Defaults to APPROVING,DOWNLOADABLE.`,optional:!0},folderPath:{kind:`parsed`,parse:String,brief:`The folder to download files to`,default:`./dsr-files`},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Download requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Download requests that were submitted after this time`,optional:!0},approveAfterDownload:{kind:`boolean`,brief:`If the request is in status=APPROVING, approve the request after its downloaded`,default:!1},transcendUrl:t.s()}},docs:{brief:`Download the files associated with a Data Subject Access Request (DSAR)`,fullDescription:`Download the files associated with a Data Subject Access Request (DSAR) from DSR Automation -> Incoming Requests tab.`}}),H=(0,r.buildCommand)({loader:async()=>{let{enricherRestart:e}=await Promise.resolve().then(()=>require(`./impl-D7WBmmHb.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),enricherId:{kind:`parsed`,parse:String,brief:`The ID of the enricher to restart`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request action to restart`,optional:!0},requestEnricherStatuses:{kind:`enum`,values:Object.values(a.RequestEnricherStatus),variadic:`,`,brief:`The request enricher statuses to restart`,optional:!0},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`15`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to restart`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted after this time`,optional:!0}}},docs:{brief:`Bulk restart a particular enricher across a series of DSRs`,fullDescription:`Bulk restart a particular enricher across a series of DSRs.
86
86
 
87
87
  The API key needs the following scopes:
88
- - Manage Request Compilation`}}),U=(0,r.buildCommand)({loader:async()=>{let{_export:e}=await Promise.resolve().then(()=>require(`./impl-CeoJtFk7.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.ViewRequestCompilation]}),sombraAuth:t.o(),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to export`,optional:!0},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to export`,optional:!0},transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./transcend-request-export.csv`},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`},skipRequestIdentifiers:{kind:`boolean`,brief:`Skip exporting request identifiers`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Pull requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Pull requests that were submitted after this time`,optional:!0},showTests:{kind:`boolean`,brief:`Filter for test requests or production requests - when not provided, pulls both`,optional:!0},pageLimit:{kind:`parsed`,parse:r.numberParser,brief:`The page limit to use when pulling in pages of requests`,default:`100`}}},docs:{brief:`Export privacy requests and request identifiers to a CSV file`,fullDescription:`Export privacy requests and request identifiers to a CSV file.`}}),W=(0,r.buildCommand)({loader:async()=>{let{markSilent:e}=await Promise.resolve().then(()=>require(`./impl-C4VIGK1G.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to mark silent`},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to mark silent. Comma-separated list. Defaults to REQUEST_MADE,WAITING,ENRICHING,COMPILING,DELAYED,APPROVING,SECONDARY,SECONDARY_APPROVING.`,optional:!0},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to mark silent`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Mark silent requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Mark silent requests that were submitted after this time`,optional:!0},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk update a set of privacy requests to be in silent mode`,fullDescription:`Bulk update a set of privacy requests from the DSR Automation -> Incoming Requests tab to be in silent mode.`}}),G=(0,r.buildCommand)({loader:async()=>{let{notifyAdditionalTime:e}=await Promise.resolve().then(()=>require(`./impl-rYMBoT4c.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.RequestApproval]}),createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Notify requests that are open but submitted before this time`},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Notify requests that are open but submitted after this time`,optional:!0},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to notify`,optional:!0},daysLeft:{kind:`parsed`,parse:r.numberParser,brief:`Only notify requests that have less than this number of days until they are considered expired`,default:`10`},days:{kind:`parsed`,parse:r.numberParser,brief:`The number of days to adjust the expiration of the request to`,default:`45`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to notify`,optional:!0},emailTemplate:{kind:`parsed`,parse:String,brief:`The title of the email template that should be sent to the requests`,default:`Additional Time Needed`},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk notify a set of privacy requests that more time is needed`,fullDescription:`Bulk notify a set of privacy requests from the DSR Automation -> Incoming Requests tab that more time is needed to complete the request. Note any request in silent mode will not be emailed.`}}),K=(0,r.buildCommand)({loader:async()=>{let{pullIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-Ij4ebiVO.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.ViewRequestCompilation]}),sombraAuth:t.o(),transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where requests will be written to`,default:`./manual-enrichment-identifiers.csv`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to pull for`,optional:!0},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`100`}}},docs:{brief:`Pull identifiers for manual enrichment`,fullDescription:`This command pulls down the set of privacy requests that are currently pending manual enrichment.
88
+ - Manage Request Compilation`}}),U=(0,r.buildCommand)({loader:async()=>{let{_export:e}=await Promise.resolve().then(()=>require(`./impl-BYK9pQs0.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.ViewRequestCompilation]}),sombraAuth:t.o(),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to export`,optional:!0},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to export`,optional:!0},transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to`,default:`./transcend-request-export.csv`},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`},skipRequestIdentifiers:{kind:`boolean`,brief:`Skip exporting request identifiers`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Pull requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Pull requests that were submitted after this time`,optional:!0},showTests:{kind:`boolean`,brief:`Filter for test requests or production requests - when not provided, pulls both`,optional:!0},pageLimit:{kind:`parsed`,parse:r.numberParser,brief:`The page limit to use when pulling in pages of requests`,default:`100`}}},docs:{brief:`Export privacy requests and request identifiers to a CSV file`,fullDescription:`Export privacy requests and request identifiers to a CSV file.`}}),W=(0,r.buildCommand)({loader:async()=>{let{markSilent:e}=await Promise.resolve().then(()=>require(`./impl-BFOghVmx.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to mark silent`},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to mark silent. Comma-separated list. Defaults to REQUEST_MADE,WAITING,ENRICHING,COMPILING,DELAYED,APPROVING,SECONDARY,SECONDARY_APPROVING.`,optional:!0},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to mark silent`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Mark silent requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Mark silent requests that were submitted after this time`,optional:!0},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk update a set of privacy requests to be in silent mode`,fullDescription:`Bulk update a set of privacy requests from the DSR Automation -> Incoming Requests tab to be in silent mode.`}}),G=(0,r.buildCommand)({loader:async()=>{let{notifyAdditionalTime:e}=await Promise.resolve().then(()=>require(`./impl-BjOFvm4E.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.RequestApproval]}),createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Notify requests that are open but submitted before this time`},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Notify requests that are open but submitted after this time`,optional:!0},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to notify`,optional:!0},daysLeft:{kind:`parsed`,parse:r.numberParser,brief:`Only notify requests that have less than this number of days until they are considered expired`,default:`10`},days:{kind:`parsed`,parse:r.numberParser,brief:`The number of days to adjust the expiration of the request to`,default:`45`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to notify`,optional:!0},emailTemplate:{kind:`parsed`,parse:String,brief:`The title of the email template that should be sent to the requests`,default:`Additional Time Needed`},transcendUrl:t.s(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`}}},docs:{brief:`Bulk notify a set of privacy requests that more time is needed`,fullDescription:`Bulk notify a set of privacy requests from the DSR Automation -> Incoming Requests tab that more time is needed to complete the request. Note any request in silent mode will not be emailed.`}}),K=(0,r.buildCommand)({loader:async()=>{let{pullIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-BXMfTm4K.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ViewRequests,a.ScopeName.ViewRequestCompilation]}),sombraAuth:t.o(),transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where requests will be written to`,default:`./manual-enrichment-identifiers.csv`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to pull for`,optional:!0},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`100`}}},docs:{brief:`Pull identifiers for manual enrichment`,fullDescription:`This command pulls down the set of privacy requests that are currently pending manual enrichment.
89
89
 
90
90
  This is useful for the following workflow:
91
91
 
@@ -97,7 +97,7 @@ This is useful for the following workflow:
97
97
 
98
98
  3. Push updated back to Transcend:
99
99
 
100
- ${O([`request`,`preflight`,`push-identifiers`],{file:`./enrichment-requests.csv`},{argsIndent:5})}`}}),q=(0,r.buildCommand)({loader:async()=>{let{pushIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-BYBww_ic.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestIdentities,a.ScopeName.ManageRequestCompilation]}),enricherId:{kind:`parsed`,parse:t.d,brief:`The ID of the Request Enricher to upload to`},sombraAuth:t.o(),transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where requests will be written to`,default:`./manual-enrichment-identifiers.csv`},markSilent:{kind:`boolean`,brief:`When true, set requests into silent mode before enriching`,default:!1},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`100`}}},docs:{brief:`Push identifiers for manual enrichment`,fullDescription:`This command push up a set of identifiers for a set of requests pending manual enrichment.
100
+ ${O([`request`,`preflight`,`push-identifiers`],{file:`./enrichment-requests.csv`},{argsIndent:5})}`}}),q=(0,r.buildCommand)({loader:async()=>{let{pushIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-BIqdEXRo.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestIdentities,a.ScopeName.ManageRequestCompilation]}),enricherId:{kind:`parsed`,parse:t.d,brief:`The ID of the Request Enricher to upload to`},sombraAuth:t.o(),transcendUrl:t.s(),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where requests will be written to`,default:`./manual-enrichment-identifiers.csv`},markSilent:{kind:`boolean`,brief:`When true, set requests into silent mode before enriching`,default:!1},concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`100`}}},docs:{brief:`Push identifiers for manual enrichment`,fullDescription:`This command push up a set of identifiers for a set of requests pending manual enrichment.
101
101
 
102
102
  This is useful for the following workflow:
103
103
 
@@ -109,10 +109,10 @@ This is useful for the following workflow:
109
109
 
110
110
  3. Push updated back to Transcend:
111
111
 
112
- ${O([`request`,`preflight`,`push-identifiers`],{file:`./enrichment-requests.csv`},{argsIndent:5})}`}}),J=(0,r.buildRouteMap)({routes:{"pull-identifiers":K,"push-identifiers":q},docs:{brief:`Preflight commands`}}),Y=(0,r.buildCommand)({loader:async()=>{let{rejectUnverifiedIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-D1MnxAqp.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),identifierNames:{kind:`parsed`,parse:String,variadic:`,`,brief:`The names of identifiers to clear out`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request action to restart`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Bulk clear out any request identifiers that are unverified`,fullDescription:`Bulk clear out any request identifiers that are unverified.`}}),X=(0,r.buildCommand)({loader:async()=>{let{restart:e}=await Promise.resolve().then(()=>require(`./impl-B19iyoiR.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.MakeDataSubjectRequest,a.ScopeName.ViewRequestCompilation]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to restart`},transcendUrl:t.s(),requestReceiptFolder:{kind:`parsed`,parse:String,brief:`The path to the folder where receipts of each upload are stored`,default:`./privacy-request-upload-receipts`},sombraAuth:t.o(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`15`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to restart`,optional:!0},emailIsVerified:{kind:`boolean`,brief:`Indicate whether the primary email address is verified. Set to false to send a verification email`,default:!0},createdAt:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted before a specific date`,optional:!0},silentModeBefore:{kind:`parsed`,parse:t.l,brief:`Requests older than this date should be marked as silent mode`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted after this time`,optional:!0},sendEmailReceipt:{kind:`boolean`,brief:`Send email receipts to the restarted requests`,default:!1},copyIdentifiers:{kind:`boolean`,brief:`Copy over all enriched identifiers from the initial request`,default:!1},skipWaitingPeriod:{kind:`boolean`,brief:`Skip queued state of request and go straight to compiling`,default:!1}}},docs:{brief:`Bulk update a set of privacy requests based on a set of request filters`,fullDescription:`Bulk update a set of privacy requests based on a set of request filters.`}}),Z=(0,r.buildCommand)({loader:async()=>{let{skipPreflightJobs:e}=await Promise.resolve().then(()=>require(`./impl-BtfHdJB-.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),enricherIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`The ID of the enrichers to skip privacy request jobs for`},transcendUrl:t.s()}},docs:{brief:`Skip preflight jobs`,fullDescription:`This command allows for bulk skipping preflight checks.`}}),Q=(0,r.buildCommand)({loader:async()=>{let{markRequestDataSilosCompleted:e}=await Promise.resolve().then(()=>require(`./impl-Dwd8vWoq.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to. The CSV is expected to have 1 column named "Request Id".`,default:`./request-identifiers.csv`},transcendUrl:t.s()}},docs:{brief:`Mark request data silos as completed`,fullDescription:`This command takes in a CSV of Request IDs as well as a Data Silo ID and marks all associated privacy request jobs as completed.
113
- This command is useful with the "Bulk Response" UI. The CSV is expected to have 1 column named "Request Id".`}}),$=(0,r.buildCommand)({loader:async()=>{let{retryRequestDataSilos:e}=await Promise.resolve().then(()=>require(`./impl-Emn0jZNk.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},transcendUrl:t.s()}},docs:{brief:`Retry request data silos`,fullDescription:`This command allows for bulk restarting a set of data silos jobs for open privacy requests. This is equivalent to clicking the "Wipe and Retry" button for a particular data silo across a set of privacy requests.`}}),te=(0,r.buildCommand)({loader:async()=>{let{skipRequestDataSilos:e}=await Promise.resolve().then(()=>require(`./impl-TDjUVCrV.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to skip privacy request jobs for`},transcendUrl:t.s(),statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to skip`},status:{kind:`enum`,values:[a.RequestDataSiloStatus.Skipped,a.RequestDataSiloStatus.Resolved],brief:`The status to set the request data silo job to`,default:a.RequestDataSiloStatus.Skipped}}},docs:{brief:`Skip request data silos`,fullDescription:`This command allows for bulk skipping all open privacy request jobs for a particular data silo. This command is useful if you want to disable a data silo and then clear out any active privacy requests that are still queued up for that data silo.`}}),ne=(0,r.buildRouteMap)({routes:{"mark-request-data-silos-completed":Q,"retry-request-data-silos":$,"skip-request-data-silos":te},docs:{brief:`System commands`}}),re=(0,r.buildCommand)({loader:async()=>{let{upload:e}=await Promise.resolve().then(()=>require(`./impl-BnkpDlXg.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.MakeDataSubjectRequest,a.ScopeName.ViewRequestIdentitySettings,a.ScopeName.ViewGlobalAttributes]}),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file of requests to upload`,default:`./requests.csv`},transcendUrl:t.s(),cacheFilepath:{kind:`parsed`,parse:String,brief:`The path to the JSON file encoding the metadata used to map the CSV shape to Transcend API`,default:`./transcend-privacy-requests-cache.json`},requestReceiptFolder:{kind:`parsed`,parse:String,brief:`The path to the folder where receipts of each upload are stored`,default:`./privacy-request-upload-receipts`},sombraAuth:t.o(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`},attributes:{kind:`parsed`,parse:String,brief:`Tag all of the requests with the following attributes. Format: key1:value1;value2,key2:value3;value4`,default:`Tags:transcend-cli`},isTest:{kind:`boolean`,brief:`Flag whether the requests being uploaded are test requests or regular requests`,default:!1},isSilent:{kind:`boolean`,brief:`Flag whether the requests being uploaded should be submitted in silent mode`,default:!0},skipSendingReceipt:{kind:`boolean`,brief:`Flag whether to skip sending of the receipt email`,default:!1},emailIsVerified:{kind:`boolean`,brief:`Indicate whether the email address being uploaded is pre-verified. Set to false to send a verification email`,default:!0},skipFilterStep:{kind:`boolean`,brief:`When true, skip the interactive step to filter down the CSV`,default:!1},dryRun:{kind:`boolean`,brief:`When true, perform a dry run of the upload instead of calling the API to submit the requests`,default:!1},debug:{kind:`boolean`,brief:`Debug logging`,default:!1},defaultPhoneCountryCode:{kind:`parsed`,parse:String,brief:`When uploading phone numbers, if the phone number is missing a country code, assume this country code`,default:`1`}}},docs:{brief:`Upload a set of requests from a CSV`,fullDescription:`Upload a set of requests from a CSV.
112
+ ${O([`request`,`preflight`,`push-identifiers`],{file:`./enrichment-requests.csv`},{argsIndent:5})}`}}),J=(0,r.buildRouteMap)({routes:{"pull-identifiers":K,"push-identifiers":q},docs:{brief:`Preflight commands`}}),Y=(0,r.buildCommand)({loader:async()=>{let{rejectUnverifiedIdentifiers:e}=await Promise.resolve().then(()=>require(`./impl-BS8t24Z7.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),identifierNames:{kind:`parsed`,parse:String,variadic:`,`,brief:`The names of identifiers to clear out`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request action to restart`,optional:!0},transcendUrl:t.s()}},docs:{brief:`Bulk clear out any request identifiers that are unverified`,fullDescription:`Bulk clear out any request identifiers that are unverified.`}}),X=(0,r.buildCommand)({loader:async()=>{let{restart:e}=await Promise.resolve().then(()=>require(`./impl-C-_9uRms.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.MakeDataSubjectRequest,a.ScopeName.ViewRequestCompilation]}),actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to restart`},transcendUrl:t.s(),requestReceiptFolder:{kind:`parsed`,parse:String,brief:`The path to the folder where receipts of each upload are stored`,default:`./privacy-request-upload-receipts`},sombraAuth:t.o(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`15`},requestIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`Specify the specific request IDs to restart`,optional:!0},emailIsVerified:{kind:`boolean`,brief:`Indicate whether the primary email address is verified. Set to false to send a verification email`,default:!0},createdAt:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted before a specific date`,optional:!0},silentModeBefore:{kind:`parsed`,parse:t.l,brief:`Requests older than this date should be marked as silent mode`,optional:!0},createdAtBefore:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted before this time`,optional:!0},createdAtAfter:{kind:`parsed`,parse:t.l,brief:`Restart requests that were submitted after this time`,optional:!0},sendEmailReceipt:{kind:`boolean`,brief:`Send email receipts to the restarted requests`,default:!1},copyIdentifiers:{kind:`boolean`,brief:`Copy over all enriched identifiers from the initial request`,default:!1},skipWaitingPeriod:{kind:`boolean`,brief:`Skip queued state of request and go straight to compiling`,default:!1}}},docs:{brief:`Bulk update a set of privacy requests based on a set of request filters`,fullDescription:`Bulk update a set of privacy requests based on a set of request filters.`}}),Z=(0,r.buildCommand)({loader:async()=>{let{skipPreflightJobs:e}=await Promise.resolve().then(()=>require(`./impl-BH0HWnIY.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),enricherIds:{kind:`parsed`,parse:String,variadic:`,`,brief:`The ID of the enrichers to skip privacy request jobs for`},transcendUrl:t.s()}},docs:{brief:`Skip preflight jobs`,fullDescription:`This command allows for bulk skipping preflight checks.`}}),Q=(0,r.buildCommand)({loader:async()=>{let{markRequestDataSilosCompleted:e}=await Promise.resolve().then(()=>require(`./impl-cfDtPbS9.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},file:{kind:`parsed`,parse:String,brief:`Path to the CSV file where identifiers will be written to. The CSV is expected to have 1 column named "Request Id".`,default:`./request-identifiers.csv`},transcendUrl:t.s()}},docs:{brief:`Mark request data silos as completed`,fullDescription:`This command takes in a CSV of Request IDs as well as a Data Silo ID and marks all associated privacy request jobs as completed.
113
+ This command is useful with the "Bulk Response" UI. The CSV is expected to have 1 column named "Request Id".`}}),$=(0,r.buildCommand)({loader:async()=>{let{retryRequestDataSilos:e}=await Promise.resolve().then(()=>require(`./impl-hGu8uCC4.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to pull in`},actions:{kind:`enum`,values:Object.values(a.RequestAction),variadic:`,`,brief:`The request actions to restart`},transcendUrl:t.s()}},docs:{brief:`Retry request data silos`,fullDescription:`This command allows for bulk restarting a set of data silos jobs for open privacy requests. This is equivalent to clicking the "Wipe and Retry" button for a particular data silo across a set of privacy requests.`}}),te=(0,r.buildCommand)({loader:async()=>{let{skipRequestDataSilos:e}=await Promise.resolve().then(()=>require(`./impl-yHutqfbd.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.ManageRequestCompilation]}),dataSiloId:{kind:`parsed`,parse:t.d,brief:`The ID of the data silo to skip privacy request jobs for`},transcendUrl:t.s(),statuses:{kind:`enum`,values:Object.values(a.RequestStatus),variadic:`,`,brief:`The request statuses to skip`},status:{kind:`enum`,values:[a.RequestDataSiloStatus.Skipped,a.RequestDataSiloStatus.Resolved],brief:`The status to set the request data silo job to`,default:a.RequestDataSiloStatus.Skipped}}},docs:{brief:`Skip request data silos`,fullDescription:`This command allows for bulk skipping all open privacy request jobs for a particular data silo. This command is useful if you want to disable a data silo and then clear out any active privacy requests that are still queued up for that data silo.`}}),ne=(0,r.buildRouteMap)({routes:{"mark-request-data-silos-completed":Q,"retry-request-data-silos":$,"skip-request-data-silos":te},docs:{brief:`System commands`}}),re=(0,r.buildCommand)({loader:async()=>{let{upload:e}=await Promise.resolve().then(()=>require(`./impl-DjVmOb9T.cjs`));return e},parameters:{flags:{auth:t.i({scopes:[a.ScopeName.MakeDataSubjectRequest,a.ScopeName.ViewRequestIdentitySettings,a.ScopeName.ViewGlobalAttributes]}),file:{kind:`parsed`,parse:String,brief:`Path to the CSV file of requests to upload`,default:`./requests.csv`},transcendUrl:t.s(),cacheFilepath:{kind:`parsed`,parse:String,brief:`The path to the JSON file encoding the metadata used to map the CSV shape to Transcend API`,default:`./transcend-privacy-requests-cache.json`},requestReceiptFolder:{kind:`parsed`,parse:String,brief:`The path to the folder where receipts of each upload are stored`,default:`./privacy-request-upload-receipts`},sombraAuth:t.o(),concurrency:{kind:`parsed`,parse:r.numberParser,brief:`The concurrency to use when uploading requests in parallel`,default:`50`},attributes:{kind:`parsed`,parse:String,brief:`Tag all of the requests with the following attributes. Format: key1:value1;value2,key2:value3;value4`,default:`Tags:transcend-cli`},isTest:{kind:`boolean`,brief:`Flag whether the requests being uploaded are test requests or regular requests`,default:!1},isSilent:{kind:`boolean`,brief:`Flag whether the requests being uploaded should be submitted in silent mode`,default:!0},skipSendingReceipt:{kind:`boolean`,brief:`Flag whether to skip sending of the receipt email`,default:!1},emailIsVerified:{kind:`boolean`,brief:`Indicate whether the email address being uploaded is pre-verified. Set to false to send a verification email`,default:!0},skipFilterStep:{kind:`boolean`,brief:`When true, skip the interactive step to filter down the CSV`,default:!1},dryRun:{kind:`boolean`,brief:`When true, perform a dry run of the upload instead of calling the API to submit the requests`,default:!1},debug:{kind:`boolean`,brief:`Debug logging`,default:!1},defaultPhoneCountryCode:{kind:`parsed`,parse:String,brief:`When uploading phone numbers, if the phone number is missing a country code, assume this country code`,default:`1`}}},docs:{brief:`Upload a set of requests from a CSV`,fullDescription:`Upload a set of requests from a CSV.
114
114
 
115
115
  This command prompts you to map the shape of the CSV to the shape of the Transcend API. There is no requirement for the shape of the incoming CSV, as the script will handle the mapping process.
116
116
 
117
117
  The script will also produce a JSON cache file that allows for the mappings to be preserved between runs.`}}),ie=(0,r.buildRouteMap)({routes:{approve:F,upload:re,"download-files":V,cancel:I,restart:X,"notify-additional-time":G,"mark-silent":W,"enricher-restart":H,"reject-unverified-identifiers":Y,export:U,"skip-preflight-jobs":Z,system:ne,preflight:J,cron:B},docs:{brief:`All commands related to DSR requests`}}),ae=(0,r.buildRouteMap)({routes:{request:ie,consent:b,inventory:N,admin:l,migration:ee,install:(0,i.buildInstallCommand)(`@transcend-io/transcend`,{bash:`__@transcend-io/cli_bash_complete`}),uninstall:(0,i.buildUninstallCommand)(`@transcend-io/transcend`,{bash:!0})},docs:{brief:n.m,hideRoute:{install:!0,uninstall:!0}}}),oe=(0,r.buildApplication)(ae,{name:n.p,versionInfo:{currentVersion:n.h}});Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return oe}});
118
- //# sourceMappingURL=app-CnSMU1Jq.cjs.map
118
+ //# sourceMappingURL=app-BOlZhpYh.cjs.map