@transcend-io/cli 8.37.1 → 8.37.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{api-keys-Bb2BbZQe.cjs → api-keys-Bvt2HbSv.cjs} +2 -2
- package/dist/{api-keys-Bb2BbZQe.cjs.map → api-keys-Bvt2HbSv.cjs.map} +1 -1
- package/dist/{app-C9jD-f87.cjs → app-CdWFyBYu.cjs} +18 -18
- package/dist/{app-C9jD-f87.cjs.map → app-CdWFyBYu.cjs.map} +1 -1
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +1 -1
- package/dist/{code-scanning-4d0zlFxk.cjs → code-scanning-BZzwKEfY.cjs} +2 -2
- package/dist/{code-scanning-4d0zlFxk.cjs.map → code-scanning-BZzwKEfY.cjs.map} +1 -1
- package/dist/{command-XJ7XPQ04.cjs → command-DNcjQs8y.cjs} +2 -2
- package/dist/{command-XJ7XPQ04.cjs.map → command-DNcjQs8y.cjs.map} +1 -1
- package/dist/{consent-manager-CCyvzvY5.cjs → consent-manager-oip5m3XC.cjs} +2 -2
- package/dist/{consent-manager-CCyvzvY5.cjs.map → consent-manager-oip5m3XC.cjs.map} +1 -1
- package/dist/{constants-wkuhlP8d.cjs → constants-K6pQQtc7.cjs} +2 -2
- package/dist/{constants-wkuhlP8d.cjs.map → constants-K6pQQtc7.cjs.map} +1 -1
- package/dist/{cron-DfEGA7Rf.cjs → cron-lijiEqFA.cjs} +2 -2
- package/dist/{cron-DfEGA7Rf.cjs.map → cron-lijiEqFA.cjs.map} +1 -1
- package/dist/{data-inventory-C1eqZk1M.cjs → data-inventory-BKAQGjFN.cjs} +2 -2
- package/dist/{data-inventory-C1eqZk1M.cjs.map → data-inventory-BKAQGjFN.cjs.map} +1 -1
- package/dist/{dataFlowsToDataSilos-DXlFFHMV.cjs → dataFlowsToDataSilos-CnvG2jqy.cjs} +2 -2
- package/dist/{dataFlowsToDataSilos-DXlFFHMV.cjs.map → dataFlowsToDataSilos-CnvG2jqy.cjs.map} +1 -1
- package/dist/{impl-BOEoFzcB.cjs → impl-1-1sg4WF.cjs} +2 -2
- package/dist/{impl-BOEoFzcB.cjs.map → impl-1-1sg4WF.cjs.map} +1 -1
- package/dist/{impl-DwWoAbT_.cjs → impl-57HOh2c3.cjs} +2 -2
- package/dist/{impl-DwWoAbT_.cjs.map → impl-57HOh2c3.cjs.map} +1 -1
- package/dist/{impl-B_2CdctV.cjs → impl-58WnFNmn.cjs} +2 -2
- package/dist/{impl-B_2CdctV.cjs.map → impl-58WnFNmn.cjs.map} +1 -1
- package/dist/{impl-BOEjB3fo.cjs → impl-B4OVz7FC.cjs} +2 -2
- package/dist/{impl-BOEjB3fo.cjs.map → impl-B4OVz7FC.cjs.map} +1 -1
- package/dist/{impl-BfC5CRRX.cjs → impl-BFRrE04X.cjs} +2 -2
- package/dist/{impl-BfC5CRRX.cjs.map → impl-BFRrE04X.cjs.map} +1 -1
- package/dist/{impl-DqfyWyoV.cjs → impl-BSS_avMv.cjs} +2 -2
- package/dist/{impl-DqfyWyoV.cjs.map → impl-BSS_avMv.cjs.map} +1 -1
- package/dist/{impl-DNRsFfbU.cjs → impl-BVmw0mE4.cjs} +2 -2
- package/dist/{impl-DNRsFfbU.cjs.map → impl-BVmw0mE4.cjs.map} +1 -1
- package/dist/{impl-C6JjApDI.cjs → impl-Bf_hLViY.cjs} +2 -2
- package/dist/{impl-C6JjApDI.cjs.map → impl-Bf_hLViY.cjs.map} +1 -1
- package/dist/{impl-DxhyqjcY.cjs → impl-BkEg-Nm6.cjs} +2 -2
- package/dist/{impl-DxhyqjcY.cjs.map → impl-BkEg-Nm6.cjs.map} +1 -1
- package/dist/{impl-7LAuV25D.cjs → impl-Bmln6D88.cjs} +2 -2
- package/dist/{impl-7LAuV25D.cjs.map → impl-Bmln6D88.cjs.map} +1 -1
- package/dist/{impl-DhbV3bBZ.cjs → impl-BqIqzp40.cjs} +2 -2
- package/dist/{impl-DhbV3bBZ.cjs.map → impl-BqIqzp40.cjs.map} +1 -1
- package/dist/{impl-u8o3S8w2.cjs → impl-BszlCtcR.cjs} +2 -2
- package/dist/{impl-u8o3S8w2.cjs.map → impl-BszlCtcR.cjs.map} +1 -1
- package/dist/{impl-DetfC7CT.cjs → impl-BtuKKdl3.cjs} +2 -2
- package/dist/{impl-DetfC7CT.cjs.map → impl-BtuKKdl3.cjs.map} +1 -1
- package/dist/{impl-_QrpPIPw.cjs → impl-C2e4xVvX.cjs} +2 -2
- package/dist/{impl-_QrpPIPw.cjs.map → impl-C2e4xVvX.cjs.map} +1 -1
- package/dist/{impl-CyzGdwB1.cjs → impl-C65nk0G8.cjs} +2 -2
- package/dist/{impl-CyzGdwB1.cjs.map → impl-C65nk0G8.cjs.map} +1 -1
- package/dist/{impl-DjP2MJNK.cjs → impl-CCdxbRmg.cjs} +2 -2
- package/dist/{impl-DjP2MJNK.cjs.map → impl-CCdxbRmg.cjs.map} +1 -1
- package/dist/{impl-DmXYpp-M.cjs → impl-CMwmo2vR.cjs} +2 -2
- package/dist/{impl-DmXYpp-M.cjs.map → impl-CMwmo2vR.cjs.map} +1 -1
- package/dist/{impl-CR-wyJSg.cjs → impl-Cb64HwGx.cjs} +2 -2
- package/dist/{impl-CR-wyJSg.cjs.map → impl-Cb64HwGx.cjs.map} +1 -1
- package/dist/{impl-CV3axMeT.cjs → impl-CdfA8kxo.cjs} +2 -2
- package/dist/{impl-CV3axMeT.cjs.map → impl-CdfA8kxo.cjs.map} +1 -1
- package/dist/{impl-C-aKX3zu.cjs → impl-CkfOZzpI.cjs} +2 -2
- package/dist/{impl-C-aKX3zu.cjs.map → impl-CkfOZzpI.cjs.map} +1 -1
- package/dist/{impl-CKYwKeLz.cjs → impl-ClujxTb8.cjs} +2 -2
- package/dist/{impl-CKYwKeLz.cjs.map → impl-ClujxTb8.cjs.map} +1 -1
- package/dist/{impl-B04CctrY.cjs → impl-D-IWtHQi.cjs} +2 -2
- package/dist/{impl-B04CctrY.cjs.map → impl-D-IWtHQi.cjs.map} +1 -1
- package/dist/{impl-BGoAnVJu.cjs → impl-D9-ZQmJB.cjs} +2 -2
- package/dist/{impl-BGoAnVJu.cjs.map → impl-D9-ZQmJB.cjs.map} +1 -1
- package/dist/{impl-CmEsmnYZ.cjs → impl-DGel0ZLe.cjs} +2 -2
- package/dist/{impl-CmEsmnYZ.cjs.map → impl-DGel0ZLe.cjs.map} +1 -1
- package/dist/{impl-DHuguAlW.cjs → impl-DL2j8g1C.cjs} +2 -2
- package/dist/{impl-DHuguAlW.cjs.map → impl-DL2j8g1C.cjs.map} +1 -1
- package/dist/{impl-LgUGDTQK.cjs → impl-DSNgFKP_.cjs} +2 -2
- package/dist/{impl-LgUGDTQK.cjs.map → impl-DSNgFKP_.cjs.map} +1 -1
- package/dist/{impl-Dzq0t6mX.cjs → impl-DU85U1jO.cjs} +2 -2
- package/dist/{impl-Dzq0t6mX.cjs.map → impl-DU85U1jO.cjs.map} +1 -1
- package/dist/{impl-C4q9xHFr.cjs → impl-DV5f54rm.cjs} +2 -2
- package/dist/{impl-C4q9xHFr.cjs.map → impl-DV5f54rm.cjs.map} +1 -1
- package/dist/{impl-CLcnbVfj.cjs → impl-DXKJH0AZ.cjs} +2 -2
- package/dist/{impl-CLcnbVfj.cjs.map → impl-DXKJH0AZ.cjs.map} +1 -1
- package/dist/{impl-6TmoWv0o.cjs → impl-DbxzDk8h.cjs} +2 -2
- package/dist/{impl-6TmoWv0o.cjs.map → impl-DbxzDk8h.cjs.map} +1 -1
- package/dist/{impl-XyWPUpvw.cjs → impl-DhnCAbU-.cjs} +2 -2
- package/dist/{impl-XyWPUpvw.cjs.map → impl-DhnCAbU-.cjs.map} +1 -1
- package/dist/{impl-kxwq3OMk.cjs → impl-Dj2fTDNO.cjs} +2 -2
- package/dist/{impl-kxwq3OMk.cjs.map → impl-Dj2fTDNO.cjs.map} +1 -1
- package/dist/{impl-Cp7-Tctr.cjs → impl-KAorCmlT.cjs} +2 -2
- package/dist/{impl-Cp7-Tctr.cjs.map → impl-KAorCmlT.cjs.map} +1 -1
- package/dist/{impl-CnRqR4kw.cjs → impl-LMp29vxd.cjs} +2 -2
- package/dist/{impl-CnRqR4kw.cjs.map → impl-LMp29vxd.cjs.map} +1 -1
- package/dist/{impl-DC_YquN8.cjs → impl-MrsSr72p.cjs} +2 -2
- package/dist/{impl-DC_YquN8.cjs.map → impl-MrsSr72p.cjs.map} +1 -1
- package/dist/{impl-CgKn47V9.cjs → impl-SZp3iTUp.cjs} +2 -2
- package/dist/{impl-CgKn47V9.cjs.map → impl-SZp3iTUp.cjs.map} +1 -1
- package/dist/{impl-DrJj-l3s.cjs → impl-W6jE_UV0.cjs} +2 -2
- package/dist/{impl-DrJj-l3s.cjs.map → impl-W6jE_UV0.cjs.map} +1 -1
- package/dist/{impl-Dp3-sA6b.cjs → impl-XwC7A99P.cjs} +2 -2
- package/dist/{impl-Dp3-sA6b.cjs.map → impl-XwC7A99P.cjs.map} +1 -1
- package/dist/{impl-DQ8rr7Fv.cjs → impl-ebVxRYAc.cjs} +2 -2
- package/dist/{impl-DQ8rr7Fv.cjs.map → impl-ebVxRYAc.cjs.map} +1 -1
- package/dist/{impl-CsKfLxov.cjs → impl-k61p_VQY.cjs} +2 -2
- package/dist/{impl-CsKfLxov.cjs.map → impl-k61p_VQY.cjs.map} +1 -1
- package/dist/{impl-CqadSQOh.cjs → impl-kMebV10f.cjs} +2 -2
- package/dist/{impl-CqadSQOh.cjs.map → impl-kMebV10f.cjs.map} +1 -1
- package/dist/{impl-Dvoj_snk.cjs → impl-oYFKp06U.cjs} +2 -2
- package/dist/{impl-Dvoj_snk.cjs.map → impl-oYFKp06U.cjs.map} +1 -1
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +9 -9
- package/dist/{manual-enrichment-CzTpv-mM.cjs → manual-enrichment-C6h9gjY1.cjs} +2 -2
- package/dist/{manual-enrichment-CzTpv-mM.cjs.map → manual-enrichment-C6h9gjY1.cjs.map} +1 -1
- package/dist/{pooling-DA_LwUEp.cjs → pooling-DLEGcLtt.cjs} +5 -5
- package/dist/pooling-DLEGcLtt.cjs.map +1 -0
- package/dist/{preference-management-aOhuZCuE.cjs → preference-management-B36PQuMK.cjs} +2 -2
- package/dist/{preference-management-aOhuZCuE.cjs.map → preference-management-B36PQuMK.cjs.map} +1 -1
- package/dist/{syncConfigurationToTranscend-DuTZKIG8.cjs → syncConfigurationToTranscend-DKliAJhK.cjs} +2 -2
- package/dist/{syncConfigurationToTranscend-DuTZKIG8.cjs.map → syncConfigurationToTranscend-DKliAJhK.cjs.map} +1 -1
- package/dist/{uploadConsents-C9Pv8Awr.cjs → uploadConsents-MtgCk8B0.cjs} +2 -2
- package/dist/{uploadConsents-C9Pv8Awr.cjs.map → uploadConsents-MtgCk8B0.cjs.map} +1 -1
- package/package.json +1 -1
- package/dist/pooling-DA_LwUEp.cjs.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-CCdxbRmg.cjs","names":["file","map","uploadPreferenceManagementPreferencesInteractive","splitCsvToList"],"sources":["../src/commands/consent/upload-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { logger } from '../../../logger';\nimport { uploadPreferenceManagementPreferencesInteractive } from '../../../lib/preference-management';\nimport { splitCsvToList } from '../../../lib/requests';\nimport { readdirSync } from 'node:fs';\nimport { map } from '../../../lib/bluebird';\nimport { basename, join } from 'node:path';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n transcendUrl: string;\n file?: string;\n directory?: string;\n dryRun: boolean;\n skipExistingRecordCheck: boolean;\n receiptFileDir: string;\n skipWorkflowTriggers: boolean;\n forceTriggerWorkflows: boolean;\n skipConflictUpdates: boolean;\n isSilent: boolean;\n attributes: string;\n receiptFilepath: string;\n concurrency: number;\n}\n\nexport async function uploadPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n transcendUrl,\n file = '',\n directory,\n dryRun,\n skipExistingRecordCheck,\n receiptFileDir,\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n skipConflictUpdates,\n isSilent,\n attributes,\n concurrency,\n }: UploadPreferencesCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red(\n 'Cannot provide both a directory and a file. Please provide only one.',\n ),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(\n colors.red(`No CSV files found in directory: ${directory}`),\n );\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.info(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`Files to process: ${files.join(', ')}`);\n\n if (skipExistingRecordCheck) {\n logger.info(\n colors.bgYellow(\n `Skipping existing record check: ${skipExistingRecordCheck}`,\n ),\n );\n }\n\n await map(\n files,\n async (filePath) => {\n const fileName = basename(filePath).replace('.csv', '');\n await uploadPreferenceManagementPreferencesInteractive({\n receiptFilepath: join(receiptFileDir, `${fileName}-receipts.json`),\n auth,\n sombraAuth,\n file: filePath,\n partition,\n transcendUrl,\n skipConflictUpdates,\n skipWorkflowTriggers,\n skipExistingRecordCheck,\n isSilent,\n dryRun,\n attributes: splitCsvToList(attributes),\n forceTriggerWorkflows,\n });\n },\n { concurrency },\n );\n}\n"],"mappings":"4bA8BA,eAAsB,EAEpB,CACE,OACA,YACA,aACA,eACA,OAAO,GACP,YACA,SACA,0BACA,iBACA,uBACA,wBACA,sBACA,WACA,aACA,eAEa,CACT,GAAe,IACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uEACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,GAAA,EAAA,EAAA,aAD+B,EAAU,CACb,OAAQ,GAASA,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAC5D,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,IAAA,EAAA,EAAA,MAAc,EAAWA,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAA,EAAO,MAAM,qBAAqB,EAAM,KAAK,KAAK,GAAG,CAEjD,GACF,EAAA,EAAO,KACL,EAAA,QAAO,SACL,mCAAmC,IACpC,CACF,CAGH,MAAMC,EAAAA,GACJ,EACA,KAAO,IAAa,CAElB,MAAMC,EAAAA,EAAiD,CACrD,iBAAA,EAAA,EAAA,MAAsB,EAAgB,IAAA,EAAA,EAAA,UAFd,EAAS,CAAC,QAAQ,OAAQ,GAAG,CAEH,gBAAgB,CAClE,OACA,aACA,KAAM,EACN,YACA,eACA,sBACA,uBACA,0BACA,WACA,SACA,WAAYC,EAAAA,GAAe,EAAW,CACtC,wBACD,CAAC,EAEJ,CAAE,cAAa,CAChB"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`);let i=require(`@transcend-io/privacy-types`),a=require(`node:fs`),o=require(`colors`);o=e.t(o);let s=require(`io-ts`);s=e.t(s);let c=require(`cli-progress`);c=e.t(c);async function l({auth:e,file:l,transcendUrl:u,duration:d,subjectType:f,emailColumnName:p,coreIdentifierColumnName:m}){r.t(this.process.exit),(0,a.existsSync)(l)||(n.t.error(o.default.red(`File does not exist: "${l}". Please provide a valid path to a CSV file.`)),this.process.exit(1));try{let r=t.ti(u,e),a=t.oi(l,s.type({[p]:s.string,...m?{[m]:s.string}:{}}));if(!a.length)throw Error(`Input CSV is empty.`);let h=a.map((e,t)=>[e,t]).filter(([e])=>!e[p]?.trim());if(h.length){let e=h.map(([,e])=>e+2).join(`, `);throw Error(`The following rows are missing the required "${p}" column: ${e}`)}if(m){let e=a.map((e,t)=>[e,t]).filter(([e])=>!e[m]?.trim());if(e.length){let t=e.map(([,e])=>e+2).join(`, `);throw Error(`The following rows are missing the required "${m}" column: ${t}`)}}let g=Math.max(1,Math.floor(d/1e3)),_=a.map((e,t)=>{let n=e[p].trim(),r=m?e[m]?.trim():void 0;return{subjectType:f,scopes:[i.SombraStandardScope.PreferenceManagement],expiresIn:g,email:n,...r?{coreIdentifier:r}:{},index:t}}),v=new c.default.SingleBar({},c.default.Presets.shades_classic);v.start(_.length,0);let y=Date.now(),b=await t.Hr(r,_,e=>{v.update(e)});v.update(_.length),v.stop();let x=b.map(({accessToken:e,input:t})=>{if(typeof t.index!=`number`)throw Error(`Internal error: missing input index.`);return{...a[t.index],token:e}});n.t.info(o.default.magenta(`Writing access tokens to file "${l}"...`)),await t.l(l,x,!0);let S=Math.round((Date.now()-y)/1e3);n.t.info(o.default.green(`Successfully generated ${b.length} access tokens to "${l}" in ${S}s!`))}catch(e){n.t.error(o.default.red(`An error occurred while generating access tokens: ${e?.message||String(e)}`)),this.process.exit(1)}}exports.generateAccessTokens=l;
|
|
2
|
+
//# sourceMappingURL=impl-CMwmo2vR.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-CMwmo2vR.cjs","names":["buildTranscendGraphQLClient","readCsv","t","SombraStandardScope","cliProgress","createPreferenceAccessTokens","writeCsv"],"sources":["../src/commands/consent/generate-access-tokens/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { existsSync } from 'node:fs';\nimport cliProgress from 'cli-progress';\nimport {\n buildTranscendGraphQLClient,\n createPreferenceAccessTokens,\n type PreferenceAccessTokenInputWithIndex,\n} from '../../../lib/graphql';\nimport { readCsv } from '../../../lib/requests';\nimport { SombraStandardScope } from '@transcend-io/privacy-types';\nimport { writeCsv } from '../../../lib/helpers';\n\n/**\n * CLI flags accepted by the `generate-access-tokens` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type GenerateAccessTokenCommandFlags = {\n auth: string;\n file: string;\n duration: number;\n transcendUrl: string;\n subjectType: string;\n emailColumnName: string;\n coreIdentifierColumnName?: string;\n};\n\n/**\n * Take in a CSV of user identifiers and generate access tokens for each user.\n *\n * Expected CSV columns:\n * - [emailColumnName] (required)\n * - [coreIdentifierColumnName] (optional)\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function generateAccessTokens(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n duration,\n subjectType,\n emailColumnName,\n coreIdentifierColumnName,\n }: GenerateAccessTokenCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n if (!existsSync(file)) {\n logger.error(\n colors.red(\n `File does not exist: \"${file}\". Please provide a valid path to a CSV file.`,\n ),\n );\n this.process.exit(1);\n }\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read + parse CSV\n const codec = t.type({\n [emailColumnName]: t.string,\n ...(coreIdentifierColumnName\n ? { [coreIdentifierColumnName]: t.string }\n : {}),\n });\n const rows: Array<Record<string, string>> = readCsv(file, codec);\n if (!rows.length) {\n throw new Error('Input CSV is empty.');\n }\n\n // Ensure emails and core identifiers exist\n const missingEmail = rows\n .map((r, i) => [r, i] as const)\n .filter(([r]) => !r[emailColumnName]?.trim());\n if (missingEmail.length) {\n const rowNumbers = missingEmail\n .map(([, i]) => i + 2) // +2 to account for header row and 0-indexing\n .join(', ');\n throw new Error(\n `The following rows are missing the required \"${emailColumnName}\" column: ${rowNumbers}`,\n );\n }\n if (coreIdentifierColumnName) {\n const missingCoreId = rows\n .map((r, i) => [r, i] as const)\n .filter(([r]) => !r[coreIdentifierColumnName]?.trim());\n if (missingCoreId.length) {\n const rowNumbers = missingCoreId\n .map(([, i]) => i + 2) // +2 to account for header row and 0-indexing\n .join(', ');\n throw new Error(\n `The following rows are missing the required \"${coreIdentifierColumnName}\" column: ${rowNumbers}`,\n );\n }\n }\n\n // Duration provided by CLI is in ms; GraphQL expects seconds\n const expiresInSeconds = Math.max(1, Math.floor(duration / 1000));\n\n // Build inputs for GraphQL\n const inputs = rows.map((r, index): PreferenceAccessTokenInputWithIndex => {\n const email = r[emailColumnName].trim();\n const coreIdentifier = coreIdentifierColumnName\n ? r[coreIdentifierColumnName]?.trim()\n : undefined;\n const scopes = [SombraStandardScope.PreferenceManagement];\n return {\n subjectType,\n scopes,\n expiresIn: expiresInSeconds,\n email,\n ...(coreIdentifier ? { coreIdentifier } : {}),\n index,\n };\n });\n\n // Progress bar\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n progressBar.start(inputs.length, 0);\n\n // Kick off token creation (batched internally)\n const t0 = Date.now();\n const results = await createPreferenceAccessTokens(\n client,\n inputs,\n (progress) => {\n progressBar.update(progress);\n },\n );\n progressBar.update(inputs.length);\n progressBar.stop();\n\n // Prepare output CSV rows\n const outputRows = results.map(({ accessToken, input }) => {\n if (typeof input.index !== 'number') {\n throw new Error('Internal error: missing input index.');\n }\n return {\n ...rows[input.index],\n token: accessToken,\n };\n });\n\n logger.info(colors.magenta(`Writing access tokens to file \"${file}\"...`));\n await writeCsv(file, outputRows, true);\n\n const totalTimeSec = Math.round((Date.now() - t0) / 1000);\n logger.info(\n colors.green(\n `Successfully generated ${results.length} access tokens to \"${file}\" in ${totalTimeSec}s!`,\n ),\n );\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n logger.error(\n colors.red(\n `An error occurred while generating access tokens: ${\n err?.message || String(err)\n }`,\n ),\n );\n this.process.exit(1);\n }\n}\n"],"mappings":"meAyCA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,WACA,cACA,kBACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,EAClC,EAAA,EAAA,YAAY,EAAK,GACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yBAAyB,EAAK,+CAC/B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,GAAI,CAEF,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CASxD,EAAsCC,EAAAA,GAAQ,EANtCC,EAAE,KAAK,EAClB,GAAkBA,EAAE,OACrB,GAAI,EACA,EAAG,GAA2BA,EAAE,OAAQ,CACxC,EAAE,CACP,CAAC,CAC8D,CAChE,GAAI,CAAC,EAAK,OACR,MAAU,MAAM,sBAAsB,CAIxC,IAAM,EAAe,EAClB,KAAK,EAAG,IAAM,CAAC,EAAG,EAAE,CAAU,CAC9B,QAAQ,CAAC,KAAO,CAAC,EAAE,IAAkB,MAAM,CAAC,CAC/C,GAAI,EAAa,OAAQ,CACvB,IAAM,EAAa,EAChB,KAAK,EAAG,KAAO,EAAI,EAAE,CACrB,KAAK,KAAK,CACb,MAAU,MACR,gDAAgD,EAAgB,YAAY,IAC7E,CAEH,GAAI,EAA0B,CAC5B,IAAM,EAAgB,EACnB,KAAK,EAAG,IAAM,CAAC,EAAG,EAAE,CAAU,CAC9B,QAAQ,CAAC,KAAO,CAAC,EAAE,IAA2B,MAAM,CAAC,CACxD,GAAI,EAAc,OAAQ,CACxB,IAAM,EAAa,EAChB,KAAK,EAAG,KAAO,EAAI,EAAE,CACrB,KAAK,KAAK,CACb,MAAU,MACR,gDAAgD,EAAyB,YAAY,IACtF,EAKL,IAAM,EAAmB,KAAK,IAAI,EAAG,KAAK,MAAM,EAAW,IAAK,CAAC,CAG3D,EAAS,EAAK,KAAK,EAAG,IAA+C,CACzE,IAAM,EAAQ,EAAE,GAAiB,MAAM,CACjC,EAAiB,EACnB,EAAE,IAA2B,MAAM,CACnC,IAAA,GAEJ,MAAO,CACL,cACA,OAHa,CAACC,EAAAA,oBAAoB,qBAAqB,CAIvD,UAAW,EACX,QACA,GAAI,EAAiB,CAAE,iBAAgB,CAAG,EAAE,CAC5C,QACD,EACD,CAGI,EAAc,IAAIC,EAAAA,QAAY,UAClC,EAAE,CACFA,EAAAA,QAAY,QAAQ,eACrB,CACD,EAAY,MAAM,EAAO,OAAQ,EAAE,CAGnC,IAAM,EAAK,KAAK,KAAK,CACf,EAAU,MAAMC,EAAAA,GACpB,EACA,EACC,GAAa,CACZ,EAAY,OAAO,EAAS,EAE/B,CACD,EAAY,OAAO,EAAO,OAAO,CACjC,EAAY,MAAM,CAGlB,IAAM,EAAa,EAAQ,KAAK,CAAE,cAAa,WAAY,CACzD,GAAI,OAAO,EAAM,OAAU,SACzB,MAAU,MAAM,uCAAuC,CAEzD,MAAO,CACL,GAAG,EAAK,EAAM,OACd,MAAO,EACR,EACD,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,MAAMC,EAAAA,EAAS,EAAM,EAAY,GAAK,CAEtC,IAAM,EAAe,KAAK,OAAO,KAAK,KAAK,CAAG,GAAM,IAAK,CACzD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,0BAA0B,EAAQ,OAAO,qBAAqB,EAAK,OAAO,EAAa,IACxF,CACF,OAEM,EAAU,CACjB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,qDACE,GAAK,SAAW,OAAO,EAAI,GAE9B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const n=require(`./done-input-validation-DGckEJ5a.cjs`);let r=require(`@transcend-io/privacy-types`);async function i({auth:e,transcendUrl:i,folderPath:a,requestIds:o,statuses:s=[r.RequestStatus.Approving,r.RequestStatus.Downloadable],concurrency:c,createdAtBefore:l,createdAtAfter:u,updatedAtBefore:d,updatedAtAfter:f,approveAfterDownload:p}){n.t(this.process.exit),await t.Z({transcendUrl:i,auth:e,folderPath:a,requestIds:o,statuses:s,concurrency:c,createdAtBefore:l,createdAtAfter:u,updatedAtBefore:d,updatedAtAfter:f,approveAfterDownload:p})}exports.downloadFiles=i;
|
|
2
|
+
//# sourceMappingURL=impl-Cb64HwGx.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-Cb64HwGx.cjs","names":["RequestStatus","downloadPrivacyRequestFiles"],"sources":["../src/commands/request/download-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { downloadPrivacyRequestFiles } from '../../../lib/requests';\nimport { RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DownloadFilesCommandFlags {\n auth: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n statuses?: RequestStatus[];\n folderPath: string;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n approveAfterDownload: boolean;\n transcendUrl: string;\n}\n\nexport async function downloadFiles(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n folderPath,\n requestIds,\n statuses = [RequestStatus.Approving, RequestStatus.Downloadable],\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n approveAfterDownload,\n }: DownloadFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await downloadPrivacyRequestFiles({\n transcendUrl,\n auth,\n folderPath,\n requestIds,\n statuses,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n approveAfterDownload,\n });\n}\n"],"mappings":"kWAoBA,eAAsB,EAEpB,CACE,OACA,eACA,aACA,aACA,WAAW,CAACA,EAAAA,cAAc,UAAWA,EAAAA,cAAc,aAAa,CAChE,cACA,kBACA,iBACA,kBACA,iBACA,wBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMC,EAAAA,EAA4B,CAChC,eACA,OACA,aACA,aACA,WACA,cACA,kBACA,iBACA,kBACA,iBACA,uBACD,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./cron-lijiEqFA.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);let o=require(`io-ts`);o=e.t(o);const s=o.type({"Request Id":o.string});async function c({auth:e,dataSiloId:o,file:c,transcendUrl:l}){i.t(this.process.exit),n.t.info(a.default.magenta(`Reading "${c}" from disk`)),await r.n({requestIds:t.oi(c,s).map(e=>e[`Request Id`]),transcendUrl:l,auth:e,dataSiloId:o})}exports.markRequestDataSilosCompleted=c;
|
|
2
|
+
//# sourceMappingURL=impl-CdfA8kxo.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-CdfA8kxo.cjs","names":["t","markRequestDataSiloIdsCompleted","readCsv"],"sources":["../src/commands/request/system/mark-request-data-silos-completed/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport colors from 'colors';\nimport * as t from 'io-ts';\n\nimport { logger } from '../../../../logger';\nimport { markRequestDataSiloIdsCompleted } from '../../../../lib/cron';\nimport { readCsv } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nconst RequestIdRow = t.type({\n 'Request Id': t.string,\n});\n\nexport interface MarkRequestDataSilosCompletedCommandFlags {\n auth: string;\n dataSiloId: string;\n file: string;\n transcendUrl: string;\n}\n\nexport async function markRequestDataSilosCompleted(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n file,\n transcendUrl,\n }: MarkRequestDataSilosCompletedCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, RequestIdRow);\n\n await markRequestDataSiloIdsCompleted({\n requestIds: activeResults.map((request) => request['Request Id']),\n transcendUrl,\n auth,\n dataSiloId,\n });\n}\n"],"mappings":"+ZASA,MAAM,EAAeA,EAAE,KAAK,CAC1B,aAAcA,EAAE,OACjB,CAAC,CASF,eAAsB,EAEpB,CACE,OACA,aACA,OACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,YAAY,EAAK,aAAa,CAAC,CAG1D,MAAMC,EAAAA,EAAgC,CACpC,WAHoBC,EAAAA,GAAQ,EAAM,EAAa,CAGrB,IAAK,GAAY,EAAQ,cAAc,CACjE,eACA,OACA,aACD,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-K6pQQtc7.cjs`),n=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`),require(`./api-keys-Bvt2HbSv.cjs`);const i=require(`./done-input-validation-DGckEJ5a.cjs`),a=require(`./code-scanning-BZzwKEfY.cjs`);let o=require(`colors`);o=e.t(o);let s=require(`child_process`);const c=`A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository`;async function l({auth:e,scanPath:l,ignoreDirs:u,repositoryName:d,transcendUrl:f}){i.t(this.process.exit);let p=d;if(!p)try{let e=(0,s.execSync)(`cd ${l} && git config --get remote.origin.url`).toString(`utf-8`).trim();[p]=e.includes(`https:`)?e.split(`/`).slice(3).join(`/`).split(`.`):(e.split(`:`).pop()||``).split(`.`),p||(r.t.error(o.default.red(c)),this.process.exit(1))}catch(e){r.t.error(o.default.red(`${c} - Got error: ${e.message}`)),this.process.exit(1)}let m=n.ti(f,e),h=await a.t({scanPath:l,ignoreDirs:u,repositoryName:p});await n.zt(m,h);let g=new URL(t.t);g.pathname=`/code-scanning/code-packages`,r.t.info(o.default.green(`Scan found ${h.length} packages at ${l}! View results at '${g.href}'`))}exports.scanPackages=l;
|
|
2
|
+
//# sourceMappingURL=impl-CkfOZzpI.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-CkfOZzpI.cjs","names":["buildTranscendGraphQLClient","findCodePackagesInFolder","syncCodePackages","ADMIN_DASH"],"sources":["../src/commands/inventory/scan-packages/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport { findCodePackagesInFolder } from '../../../lib/code-scanning';\nimport {\n buildTranscendGraphQLClient,\n syncCodePackages,\n} from '../../../lib/graphql';\nimport { execSync } from 'child_process';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\nexport interface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n {\n auth,\n scanPath,\n ignoreDirs,\n repositoryName,\n transcendUrl,\n }: ScanPackagesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(\n `cd ${scanPath} && git config --get remote.origin.url`,\n );\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n this.process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n this.process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` +\n `View results at '${newUrl.href}'`,\n ),\n );\n}\n"],"mappings":"ueAYA,MAAM,EACJ,2LAYF,eAAsB,EAEpB,CACE,OACA,WACA,aACA,iBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAI,EAAoB,EACxB,GAAI,CAAC,EACH,GAAI,CAKF,IAAM,GAAA,EAAA,EAAA,UAHJ,MAAM,EAAS,wCAChB,CAEgB,SAAS,QAAQ,CAAC,MAAM,CACzC,CAAC,GAAsB,EAAI,SAAS,SAAS,CAEzC,EAAI,MAAM,IAAI,CAAC,MAAM,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,IAAI,EAD3C,EAAI,MAAM,IAAI,CAAC,KAAK,EAAI,IAAI,MAAM,IAAI,CAEtC,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,EAAW,CAAC,CACpC,KAAK,QAAQ,KAAK,EAAE,QAEf,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,GAAG,EAAW,gBAAgB,EAAI,UAAU,CAAC,CACrE,KAAK,QAAQ,KAAK,EAAE,CAKxB,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAU,MAAMC,EAAAA,EAAyB,CAC7C,WACA,aACA,eAAgB,EACjB,CAAC,CAGF,MAAMC,EAAAA,GAAiB,EAAQ,EAAQ,CAEvC,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,+BAGlB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,eAAe,EAAS,qBAC/B,EAAO,KAAK,GACnC,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./data-inventory-BKAQGjFN.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({auth:e,file:o,transcendUrl:s,dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d}){i.t(this.process.exit);try{let i=await r.t(t.ti(s,e),{dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d});n.t.info(a.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let f=[];await t.d(o,i.map(e=>{let n={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...d?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return f=t.Ds([...f,...Object.keys(n)]),n}),f)}catch(e){n.t.error(a.default.red(`An error occurred syncing the unstructured discovery files: ${e.message}`)),this.process.exit(1)}n.t.info(a.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles=o;
|
|
2
|
+
//# sourceMappingURL=impl-ClujxTb8.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-ClujxTb8.cjs","names":["pullUnstructuredSubDataPointRecommendations","buildTranscendGraphQLClient","writeLargeCsv","uniq"],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"],"mappings":"yYAoBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAMA,EAAAA,EAFPC,EAAAA,GAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,iDAAiD,EAAK,MACvD,CACF,CACD,IAAI,EAAoB,EAAE,CAmB1B,MAAMC,EAAAA,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,+DAA+D,EAAI,UACpE,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,+DAA+D,EAAK,GACrE,CACF"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`),i=require(`./preference-management-B36PQuMK.cjs`);let a=require(`node:fs`),o=require(`node:path`),s=require(`colors`);s=e.t(s);let c=require(`cli-progress`);c=e.t(c);async function l({auth:e,partition:l,sombraAuth:u,file:d=``,directory:f,transcendUrl:p,timestamp:m,maxConcurrency:h,maxItemsInChunk:g,receiptDirectory:_,fileConcurrency:v}){f&&d&&(n.t.error(s.default.red(`Cannot provide both a directory and a file. Please provide only one.`)),this.process.exit(1)),!d&&!f&&(n.t.error(s.default.red(`A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences`)),this.process.exit(1)),r.t(this.process.exit);let y=[];if(f)try{let e=(0,a.readdirSync)(f).filter(e=>e.endsWith(`.csv`));e.length===0&&(n.t.error(s.default.red(`No CSV files found in directory: ${f}`)),this.process.exit(1)),y.push(...e.map(e=>(0,o.join)(f,e)))}catch(e){n.t.error(s.default.red(`Failed to read directory: ${f}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}else try{d.endsWith(`.csv`)||(n.t.error(s.default.red(`File must be a CSV file`)),this.process.exit(1)),y.push(d)}catch(e){n.t.error(s.default.red(`Failed to access file: ${d}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}n.t.debug(s.default.green(`Processing ${y.length} consent preferences files for partition: ${l}`)),n.t.debug(`\nFiles to process: ${y.join(`, `)}\n`);let b=await t.ei(p,e,u),x=new c.default.SingleBar({format:`Deletion Progress |${s.default.cyan(`[{bar}]`)}| Duration: ${s.default.red(`{duration_formatted}`)} | {value}/{total} Files Processed `},c.default.Presets.shades_classic);x.start(y.length,0);let S=await t.Ts(y,async e=>{let t=await i.t(b,{partition:l,filePath:e,timestamp:m,maxItemsInChunk:g,maxConcurrency:h});return x.increment(),t},{concurrency:v});x.stop();let C=S.flat(),w=``;C.length>0&&(w=(0,o.join)(_,`deletion-failures-${Date.now()}.csv`),t.l(w,C,!0)),n.t.info(s.default.green(`
|
|
2
2
|
|
|
3
3
|
==================================
|
|
4
4
|
|
|
@@ -9,4 +9,4 @@ const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-wkuhlP8d.cjs`);cons
|
|
|
9
9
|
==================================
|
|
10
10
|
|
|
11
11
|
`))}exports.deletePreferenceRecords=l;
|
|
12
|
-
//# sourceMappingURL=impl-
|
|
12
|
+
//# sourceMappingURL=impl-D-IWtHQi.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-D-IWtHQi.cjs","names":["file","createSombraGotInstance","cliProgress","map","bulkDeletePreferenceRecords"],"sources":["../src/commands/consent/delete-preference-records/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { createSombraGotInstance } from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { readdirSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { map } from '../../../lib/bluebird';\nimport { bulkDeletePreferenceRecords } from '../../../lib/preference-management';\nimport cliProgress from 'cli-progress';\nimport { writeCsv } from '../../../lib/helpers';\n\nexport interface DeletePreferenceRecordsCommandFlags {\n /** Transcend API key for authentication */\n auth: string;\n /** Partition ID to delete preference records from */\n partition: string;\n /** Optional Sombra internal key for self-hosted instances */\n sombraAuth?: string;\n /** Path to the CSV file used to identify preference records to delete */\n file?: string;\n /** Path to the directory of CSV files to load preferences from */\n directory?: string;\n /** Base URL for the Transcend API */\n transcendUrl: string;\n /** The timestamp when the deletion operation is made. Used for logging purposes. */\n timestamp: Date;\n /** Maximum items to include in each deletion chunk */\n maxItemsInChunk: number;\n /** Maximum concurrency for deletion requests */\n maxConcurrency: number;\n /** Directory to write receipts of failed deletions to */\n receiptDirectory: string;\n /** Number of files to process concurrently when deleting preference records from multiple files */\n fileConcurrency: number;\n}\n\nexport async function deletePreferenceRecords(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file = '',\n directory,\n transcendUrl,\n timestamp,\n maxConcurrency,\n maxItemsInChunk,\n receiptDirectory,\n fileConcurrency,\n }: DeletePreferenceRecordsCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red(\n 'Cannot provide both a directory and a file. Please provide only one.',\n ),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(\n colors.red(`No CSV files found in directory: ${directory}`),\n );\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.debug(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`\\nFiles to process: ${files.join(', ')}\\n`);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const globalProgressBar = new cliProgress.SingleBar(\n {\n format: `Deletion Progress |${colors.cyan(\n '[{bar}]',\n )}| Duration: ${colors.red(\n '{duration_formatted}',\n )} | {value}/{total} Files Processed `,\n },\n cliProgress.Presets.shades_classic,\n );\n globalProgressBar.start(files.length, 0);\n\n // Process batch of files with concurrency\n const failedResultsArrays = await map(\n files,\n async (filePath) => {\n const result = await bulkDeletePreferenceRecords(sombra, {\n partition,\n filePath,\n timestamp,\n maxItemsInChunk,\n maxConcurrency,\n });\n globalProgressBar.increment();\n return result;\n },\n { concurrency: fileConcurrency },\n );\n globalProgressBar.stop();\n const failedResults = failedResultsArrays.flat();\n\n // Check for failed results and write receipt if any\n let receiptPath = '';\n if (failedResults.length > 0) {\n receiptPath = join(receiptDirectory, `deletion-failures-${Date.now()}.csv`);\n writeCsv(receiptPath, failedResults, true);\n }\n\n logger.info(colors.green('\\n\\n ================================== \\n\\n'));\n logger.info(colors.green('\\n#### Deletion Summary Report #####\\n'));\n logger.info(\n colors.green(\n `📁 Total Files Processed: ${files.length} \\n` +\n `❌ Errors: ${failedResults.length} \\n` +\n `📝 Receipt Path: ${receiptPath || 'N/A'}`,\n ),\n );\n logger.info(colors.green('\\n\\n==================================\\n\\n'));\n}\n"],"mappings":"meAsCA,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OAAO,GACP,YACA,eACA,YACA,iBACA,kBACA,mBACA,mBAEa,CACT,GAAe,IACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uEACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,GAAA,EAAA,EAAA,aAD+B,EAAU,CACb,OAAQ,GAASA,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAC5D,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,IAAA,EAAA,EAAA,MAAc,EAAWA,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAA,EAAO,MACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAA,EAAO,MAAM,uBAAuB,EAAM,KAAK,KAAK,CAAC,IAAI,CAGzD,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAAoB,IAAIC,EAAAA,QAAY,UACxC,CACE,OAAQ,sBAAsB,EAAA,QAAO,KACnC,UACD,CAAC,cAAc,EAAA,QAAO,IACrB,uBACD,CAAC,qCACH,CACDA,EAAAA,QAAY,QAAQ,eACrB,CACD,EAAkB,MAAM,EAAM,OAAQ,EAAE,CAGxC,IAAM,EAAsB,MAAMC,EAAAA,GAChC,EACA,KAAO,IAAa,CAClB,IAAM,EAAS,MAAMC,EAAAA,EAA4B,EAAQ,CACvD,YACA,WACA,YACA,kBACA,iBACD,CAAC,CAEF,OADA,EAAkB,WAAW,CACtB,GAET,CAAE,YAAa,EAAiB,CACjC,CACD,EAAkB,MAAM,CACxB,IAAM,EAAgB,EAAoB,MAAM,CAG5C,EAAc,GACd,EAAc,OAAS,IACzB,GAAA,EAAA,EAAA,MAAmB,EAAkB,qBAAqB,KAAK,KAAK,CAAC,MAAM,CAC3E,EAAA,EAAS,EAAa,EAAe,GAAK,EAG5C,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;;;EAA+C,CAAC,CACzE,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;EAAyC,CAAC,CACnE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,6BAA6B,EAAM,OAAO,eAC3B,EAAc,OAAO,sBACd,GAAe,QACtC,CACF,CACD,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;;;EAA6C,CAAC"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-K6pQQtc7.cjs`),n=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`),require(`./api-keys-Bvt2HbSv.cjs`);const i=require(`./done-input-validation-DGckEJ5a.cjs`),a=require(`./code-scanning-BZzwKEfY.cjs`);let o=require(`colors`);o=e.t(o);let s=require(`fast-glob`);s=e.t(s);let c=require(`query-string`);async function l({scanPath:e,fileGlobs:t,ignoreDirs:n,config:i}){let{ignoreDirs:a,supportedFiles:o,scanFunction:c}=i,u=t===``?o:o.concat(t.split(`,`)),d=[...n.split(`,`),...a].filter(e=>e.length>0);try{let t=await(0,s.default)(`${e}/**/${u.join(`|`)}`,{ignore:d.map(t=>`${e}/**/${t}`),unique:!0,onlyFiles:!0});r.t.info(`Scanning: ${t.length} files`);let n=t.map(e=>c(e)).flat().map(e=>e.softwareDevelopmentKits||[]).flat(),i=[...new Set(n.map(e=>e.name))];return r.t.info(`Found: ${i.length} unique dependencies`),i.map(t=>({name:t,resourceId:`${e}/**/${t}`,useStrictClassifier:!0}))}catch(e){throw Error(`Error scanning globs ${l} with error: ${e}`)}}async function u({scanPath:e,dataSiloId:s,auth:u,fileGlobs:d,ignoreDirs:f,transcendUrl:p}){i.t(this.process.exit);let m=n.ti(p,u),h=await n.Xr(m,s),g=a.n[h.dataSilo.type];g||(r.t.error(o.default.red(`This plugin "${h.dataSilo.type}" is not supported for offline silo discovery.`)),this.process.exit(1));let _=await l({scanPath:e,fileGlobs:d,ignoreDirs:f,config:g});await n.ut(m,h.id,_);let v=new URL(t.t);v.pathname=`/data-map/data-inventory/silo-discovery/triage`,v.search=(0,c.stringify)({filters:JSON.stringify({pluginIds:[h.id]})}),r.t.info(o.default.green(`Scan found ${_.length} potential data silos at ${e}! View at '${v.href}'
|
|
2
2
|
|
|
3
3
|
NOTE: it may take 2-3 minutes for scan results to appear in the UI.`))}exports.discoverSilos=u;
|
|
4
|
-
//# sourceMappingURL=impl-
|
|
4
|
+
//# sourceMappingURL=impl-D9-ZQmJB.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-D9-ZQmJB.cjs","names":["buildTranscendGraphQLClient","fetchActiveSiloDiscoPlugin","SILO_DISCOVERY_CONFIGS","uploadSiloDiscoveryResults","ADMIN_DASH"],"sources":["../src/lib/code-scanning/findFilesToScan.ts","../src/commands/inventory/discover-silos/impl.ts"],"sourcesContent":["import fastGlob from 'fast-glob';\nimport { logger } from '../../logger';\nimport { CodeScanningConfig } from './types';\n\nexport interface SiloDiscoveryRawResults {\n /** The name of the potential data silo entry */\n name: string;\n /** A unique UUID (represents the same resource across different silo discovery runs) */\n resourceId: string;\n /** Any hosts associated with the entry */\n host?: string;\n /** Type of data silo */\n type?: string | undefined;\n}\n\n/**\n * Helper to scan for data silos in all package.json files that it can find in a directory\n *\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n}: {\n /** Where to look for package.json files */\n scanPath: string;\n /** Globs to look for */\n fileGlobs: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs: string;\n /** Silo Discovery configuration */\n config: CodeScanningConfig;\n}): Promise<SiloDiscoveryRawResults[]> {\n const { ignoreDirs: IGNORE_DIRS, supportedFiles, scanFunction } = config;\n const globsToSupport =\n fileGlobs === ''\n ? supportedFiles\n : supportedFiles.concat(fileGlobs.split(','));\n const dirsToIgnore = [...ignoreDirs.split(','), ...IGNORE_DIRS].filter(\n (dir) => dir.length > 0,\n );\n try {\n const filesToScan: string[] = await fastGlob(\n `${scanPath}/**/${globsToSupport.join('|')}`,\n {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n },\n );\n logger.info(`Scanning: ${filesToScan.length} files`);\n const allPackages = filesToScan\n .map((filePath: string) => scanFunction(filePath))\n .flat();\n const allSdks = allPackages\n .map((appPackage) => appPackage.softwareDevelopmentKits || [])\n .flat();\n const uniqueDeps = new Set(allSdks.map((sdk) => sdk.name));\n const deps = [...uniqueDeps];\n logger.info(`Found: ${deps.length} unique dependencies`);\n return deps.map((dep) => ({\n name: dep,\n resourceId: `${scanPath}/**/${dep}`,\n useStrictClassifier: true,\n }));\n } catch (error) {\n throw new Error(\n `Error scanning globs ${findFilesToScan} with error: ${error}`,\n );\n }\n}\n","import type { LocalContext } from '../../../context';\nimport { stringify } from 'query-string';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport {\n fetchActiveSiloDiscoPlugin,\n buildTranscendGraphQLClient,\n uploadSiloDiscoveryResults,\n} from '../../../lib/graphql';\nimport { findFilesToScan } from '../../../lib/code-scanning/findFilesToScan';\nimport { SILO_DISCOVERY_CONFIGS } from '../../../lib/code-scanning';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DiscoverSilosCommandFlags {\n scanPath: string;\n dataSiloId: string;\n auth: string;\n fileGlobs: string;\n ignoreDirs: string;\n transcendUrl: string;\n}\n\nexport async function discoverSilos(\n this: LocalContext,\n {\n scanPath,\n dataSiloId,\n auth,\n fileGlobs,\n ignoreDirs,\n transcendUrl,\n }: DiscoverSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const plugin = await fetchActiveSiloDiscoPlugin(client, dataSiloId);\n\n const config = SILO_DISCOVERY_CONFIGS[plugin.dataSilo.type];\n if (!config) {\n logger.error(\n colors.red(\n `This plugin \"${plugin.dataSilo.type}\" is not supported for offline silo discovery.`,\n ),\n );\n this.process.exit(1);\n }\n\n const results = await findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n });\n\n await uploadSiloDiscoveryResults(client, plugin.id, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/data-map/data-inventory/silo-discovery/triage';\n newUrl.search = stringify({\n filters: JSON.stringify({ pluginIds: [plugin.id] }),\n });\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} potential data silos at ${scanPath}! ` +\n `View at '${newUrl.href}' ` +\n '\\n\\n NOTE: it may take 2-3 minutes for scan results to appear in the UI.',\n ),\n );\n}\n"],"mappings":"0gBAsBA,eAAsB,EAAgB,CACpC,WACA,YACA,aACA,UAUqC,CACrC,GAAM,CAAE,WAAY,EAAa,iBAAgB,gBAAiB,EAC5D,EACJ,IAAc,GACV,EACA,EAAe,OAAO,EAAU,MAAM,IAAI,CAAC,CAC3C,EAAe,CAAC,GAAG,EAAW,MAAM,IAAI,CAAE,GAAG,EAAY,CAAC,OAC7D,GAAQ,EAAI,OAAS,EACvB,CACD,GAAI,CACF,IAAM,EAAwB,MAAA,EAAA,EAAA,SAC5B,GAAG,EAAS,MAAM,EAAe,KAAK,IAAI,GAC1C,CACE,OAAQ,EAAa,IAAK,GAAgB,GAAG,EAAS,MAAM,IAAM,CAClE,OAAQ,GACR,UAAW,GACZ,CACF,CACD,EAAA,EAAO,KAAK,aAAa,EAAY,OAAO,QAAQ,CAIpD,IAAM,EAHc,EACjB,IAAK,GAAqB,EAAa,EAAS,CAAC,CACjD,MAAM,CAEN,IAAK,GAAe,EAAW,yBAA2B,EAAE,CAAC,CAC7D,MAAM,CAEH,EAAO,CAAC,GADK,IAAI,IAAI,EAAQ,IAAK,GAAQ,EAAI,KAAK,CAAC,CAC9B,CAE5B,OADA,EAAA,EAAO,KAAK,UAAU,EAAK,OAAO,sBAAsB,CACjD,EAAK,IAAK,IAAS,CACxB,KAAM,EACN,WAAY,GAAG,EAAS,MAAM,IAC9B,oBAAqB,GACtB,EAAE,OACI,EAAO,CACd,MAAU,MACR,wBAAwB,EAAgB,eAAe,IACxD,ECjDL,eAAsB,EAEpB,CACE,WACA,aACA,OACA,YACA,aACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAExD,EAAS,MAAMC,EAAAA,GAA2B,EAAQ,EAAW,CAE7D,EAASC,EAAAA,EAAuB,EAAO,SAAS,MACjD,IACH,EAAA,EAAO,MACL,EAAA,QAAO,IACL,gBAAgB,EAAO,SAAS,KAAK,gDACtC,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,IAAM,EAAU,MAAM,EAAgB,CACpC,WACA,YACA,aACA,SACD,CAAC,CAEF,MAAMC,EAAAA,GAA2B,EAAQ,EAAO,GAAI,EAAQ,CAE5D,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,iDAClB,EAAO,QAAA,EAAA,EAAA,WAAmB,CACxB,QAAS,KAAK,UAAU,CAAE,UAAW,CAAC,EAAO,GAAG,CAAE,CAAC,CACpD,CAAC,CAGF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,2BAA2B,EAAS,aACnD,EAAO,KAAK;;sEAE3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./command-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./command-DNcjQs8y.cjs`),n=require(`./constants-K6pQQtc7.cjs`),r=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`),i=require(`./enums-BZulhPFa.cjs`),a=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const o=require(`./readTranscendYaml-Cycz6RxW.cjs`),s=require(`./api-keys-Bvt2HbSv.cjs`),c=require(`./done-input-validation-DGckEJ5a.cjs`);let l=require(`node:fs`);l=e.t(l);let u=require(`node:path`),d=require(`colors`);d=e.t(d);async function f({auth:e,resources:f=t.n,file:p,transcendUrl:m,dataSiloIds:h=[],integrationNames:g=[],trackerStatuses:_=t.t,pageSize:v,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,debug:S}){c.t(this.process.exit);let C=await s.r(e),w=f.includes(`all`)?Object.values(i.o):f;if(typeof C==`string`){try{let e=await r._n(r.ti(m,C),{dataSiloIds:h,integrationNames:g,resources:w,pageSize:v,debug:S,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,trackerStatuses:_});a.t.info(d.default.magenta(`Writing configuration to file "${p}"...`)),o.a(p,e)}catch(e){a.t.error(d.default.red(`An error occurred syncing the schema: ${S?e.stack:e.message}`)),this.process.exit(1)}a.t.info(d.default.green(`Successfully synced yaml file to disk at ${p}! View at ${n.r}`))}else{if(!l.default.lstatSync(p).isDirectory())throw Error(`File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/`);let e=[];await r.Es(C,async(t,n)=>{let i=`[${n+1}/${C.length}][${t.organizationName}] `;a.t.info(d.default.magenta(`~~~\n\n${i}Attempting to pull configuration...\n\n~~~`));let s=r.ti(m,t.apiKey);try{let e=await r._n(s,{dataSiloIds:h,integrationNames:g,resources:w,pageSize:v,debug:S,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,trackerStatuses:_}),n=(0,u.join)(p,`${t.organizationName}.yml`);a.t.info(d.default.magenta(`Writing configuration to file "${n}"...`)),o.a(n,e),a.t.info(d.default.green(`${i}Successfully pulled configuration!`))}catch(n){a.t.error(d.default.red(`${i}Failed to sync configuration. - ${n.message}`)),e.push(t.organizationName)}}),e.length>0&&(a.t.info(d.default.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${n.r}`)),this.process.exit(1))}}exports.pull=f;
|
|
2
|
+
//# sourceMappingURL=impl-DGel0ZLe.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DGel0ZLe.cjs","names":["DEFAULT_TRANSCEND_PULL_RESOURCES","DEFAULT_CONSENT_TRACKER_STATUSES","validateTranscendAuth","TranscendPullResource","pullTranscendConfiguration","buildTranscendGraphQLClient","ADMIN_DASH_INTEGRATIONS","fs","mapSeries"],"sources":["../src/commands/inventory/pull/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport type { LocalContext } from '../../../context';\nimport { TranscendPullResource } from '../../../enums';\nimport {\n DEFAULT_CONSENT_TRACKER_STATUSES,\n DEFAULT_TRANSCEND_PULL_RESOURCES,\n} from './command';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { mapSeries } from '../../../lib/bluebird';\nimport { join } from 'node:path';\nimport fs from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n pullTranscendConfiguration,\n} from '../../../lib/graphql';\n\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullCommandFlags {\n auth: string;\n resources?: (TranscendPullResource | 'all')[];\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n integrationNames?: string[];\n trackerStatuses?: ConsentTrackerStatus[];\n pageSize: number;\n skipDatapoints: boolean;\n skipSubDatapoints: boolean;\n includeGuessedCategories: boolean;\n debug: boolean;\n}\n\nexport async function pull(\n this: LocalContext,\n {\n auth,\n resources = DEFAULT_TRANSCEND_PULL_RESOURCES,\n file,\n transcendUrl,\n dataSiloIds = [],\n integrationNames = [],\n trackerStatuses = DEFAULT_CONSENT_TRACKER_STATUSES,\n pageSize,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n debug,\n }: PullCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n const resourcesToPull: TranscendPullResource[] = resources.includes('all')\n ? Object.values(TranscendPullResource)\n : (resources as TranscendPullResource[]);\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n writeTranscendYaml(file, configuration);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the schema: ${\n debug ? err.stack : err.message\n }`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to disk at ${file}! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n if (!fs.lstatSync(file).isDirectory()) {\n throw new Error(\n 'File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/',\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull configuration...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n const filePath = join(file, `${apiKey.organizationName}.yml`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${filePath}\"...`),\n );\n writeTranscendYaml(filePath, configuration);\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(\n colors.red(`${prefix}Failed to sync configuration. - ${err.message}`),\n );\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"],"mappings":"uiBAsCA,eAAsB,EAEpB,CACE,OACA,YAAYA,EAAAA,EACZ,OACA,eACA,cAAc,EAAE,CAChB,mBAAmB,EAAE,CACrB,kBAAkBC,EAAAA,EAClB,WACA,iBACA,oBACA,2BACA,SAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAMC,EAAAA,EAAsB,EAAK,CAEhD,EAA2C,EAAU,SAAS,MAAM,CACtE,OAAO,OAAOC,EAAAA,EAAsB,CACnC,EAGL,GAAI,OAAO,GAAiB,SAAU,CACpC,GAAI,CAIF,IAAM,EAAgB,MAAMC,EAAAA,GAFbC,EAAAA,GAA4B,EAAc,EAAa,CAEP,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,EAAA,EAAmB,EAAM,EAAc,OAChC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yCACE,EAAQ,EAAI,MAAQ,EAAI,UAE3B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,4CAA4C,EAAK,YAAYC,EAAAA,IAC9D,CACF,KACI,CACL,GAAI,CAACC,EAAAA,QAAG,UAAU,EAAK,CAAC,aAAa,CACnC,MAAU,MACR,0GACD,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAMC,EAAAA,GAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAChD,EAAO,iBACR,IACD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAO,4CAClB,CACF,CAGD,IAAM,EAASH,EAAAA,GAA4B,EAAc,EAAO,OAAO,CAEvE,GAAI,CACF,IAAM,EAAgB,MAAMD,EAAAA,GAA2B,EAAQ,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEI,GAAA,EAAA,EAAA,MAAgB,EAAM,GAAG,EAAO,iBAAiB,MAAM,CAC7D,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,kCAAkC,EAAS,MAAM,CACjE,CACD,EAAA,EAAmB,EAAU,EAAc,CAE3C,EAAA,EAAO,KACL,EAAA,QAAO,MAAM,GAAG,EAAO,oCAAoC,CAC5D,OACM,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,GAAG,EAAO,kCAAkC,EAAI,UAAU,CACtE,CACD,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAA,EAAO,KACL,EAAA,QAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0DE,EAAAA,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-K6pQQtc7.cjs`),n=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const i=require(`./data-inventory-BKAQGjFN.cjs`),a=require(`./done-input-validation-DGckEJ5a.cjs`);let o=require(`colors`);o=e.t(o);async function s({auth:e,file:s,transcendUrl:c,dataSiloIds:l,includeAttributes:u,includeGuessedCategories:d,parentCategories:f,subCategories:p=[]}){a.t(this.process.exit);try{let t=await i.n(n.ti(c,e),{dataSiloIds:l,includeGuessedCategories:d,parentCategories:f,includeAttributes:u,subCategories:p});r.t.info(o.default.magenta(`Writing datapoints to file "${s}"...`));let a=[];await n.d(s,t.map(e=>{let t={"Property ID":e.id,"Data Silo":e.dataSilo.title,Object:e.dataPoint.name,"Object Path":e.dataPoint.path.join(`.`),Property:e.name,"Property Description":e.description,"Data Categories":e.categories.map(e=>`${e.category}:${e.name}`).join(`, `),"Guessed Category":e.pendingCategoryGuesses?.[0]?`${e.pendingCategoryGuesses[0].category.category}:${e.pendingCategoryGuesses[0].category.name}`:``,"Processing Purposes":e.purposes.map(e=>`${e.purpose}:${e.name}`).join(`, `),...Object.entries(n.As(e.attributeValues||[],({attributeKey:e})=>e.name)).reduce((e,[t,n])=>(e[t]=n.map(e=>e.name).join(`,`),e),{})};return a=n.Ds([...a,...Object.keys(t)]),t}),a)}catch(e){r.t.error(o.default.red(`An error occurred syncing the datapoints: ${e.message}`)),this.process.exit(1)}r.t.info(o.default.green(`Successfully synced datapoints to disk at ${s}! View at ${t.n}`))}exports.pullDatapoints=s;
|
|
2
|
+
//# sourceMappingURL=impl-DL2j8g1C.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DL2j8g1C.cjs","names":["pullAllDatapoints","buildTranscendGraphQLClient","writeLargeCsv","groupBy","uniq","ADMIN_DASH_DATAPOINTS"],"sources":["../src/commands/inventory/pull-datapoints/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { uniq, groupBy } from 'lodash-es';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { ADMIN_DASH_DATAPOINTS } from '../../../constants';\nimport { pullAllDatapoints } from '../../../lib/data-inventory';\nimport { DataCategoryType } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullDatapointsCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n includeAttributes: boolean;\n includeGuessedCategories: boolean;\n parentCategories?: DataCategoryType[];\n subCategories?: string[];\n}\n\nexport async function pullDatapoints(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n includeAttributes,\n includeGuessedCategories,\n parentCategories,\n subCategories = [],\n }: PullDatapointsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const dataPoints = await pullAllDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n parentCategories,\n includeAttributes,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n });\n\n logger.info(colors.magenta(`Writing datapoints to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = dataPoints.map((point) => {\n const result = {\n 'Property ID': point.id,\n 'Data Silo': point.dataSilo.title,\n Object: point.dataPoint.name,\n 'Object Path': point.dataPoint.path.join('.'),\n Property: point.name,\n 'Property Description': point.description,\n 'Data Categories': point.categories\n .map((category) => `${category.category}:${category.name}`)\n .join(', '),\n 'Guessed Category': point.pendingCategoryGuesses?.[0]\n ? `${point.pendingCategoryGuesses![0]!.category.category}:${\n point.pendingCategoryGuesses![0]!.category.name\n }`\n : '',\n 'Processing Purposes': point.purposes\n .map((purpose) => `${purpose.purpose}:${purpose.name}`)\n .join(', '),\n ...Object.entries(\n groupBy(\n point.attributeValues || [],\n ({ attributeKey }) => attributeKey.name,\n ),\n ).reduce((acc, [key, values]) => {\n acc[key] = values.map((value) => value.name).join(',');\n return acc;\n }, {} as Record<string, string>),\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the datapoints: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced datapoints to disk at ${file}! View at ${ADMIN_DASH_DATAPOINTS}`,\n ),\n );\n}\n"],"mappings":"qYAuBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,oBACA,2BACA,mBACA,gBAAgB,EAAE,EAEL,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAa,MAAMA,EAAAA,EAFVC,EAAAA,GAA4B,EAAc,EAAK,CAEX,CACjD,cACA,2BACA,mBACA,oBACA,gBACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,+BAA+B,EAAK,MAAM,CAAC,CACtE,IAAI,EAAoB,EAAE,CAiC1B,MAAMC,EAAAA,EAAc,EAhCL,EAAW,IAAK,GAAU,CACvC,IAAM,EAAS,CACb,cAAe,EAAM,GACrB,YAAa,EAAM,SAAS,MAC5B,OAAQ,EAAM,UAAU,KACxB,cAAe,EAAM,UAAU,KAAK,KAAK,IAAI,CAC7C,SAAU,EAAM,KAChB,uBAAwB,EAAM,YAC9B,kBAAmB,EAAM,WACtB,IAAK,GAAa,GAAG,EAAS,SAAS,GAAG,EAAS,OAAO,CAC1D,KAAK,KAAK,CACb,mBAAoB,EAAM,yBAAyB,GAC/C,GAAG,EAAM,uBAAwB,GAAI,SAAS,SAAS,GACrD,EAAM,uBAAwB,GAAI,SAAS,OAE7C,GACJ,sBAAuB,EAAM,SAC1B,IAAK,GAAY,GAAG,EAAQ,QAAQ,GAAG,EAAQ,OAAO,CACtD,KAAK,KAAK,CACb,GAAG,OAAO,QACRC,EAAAA,GACE,EAAM,iBAAmB,EAAE,EAC1B,CAAE,kBAAmB,EAAa,KACpC,CACF,CAAC,QAAQ,EAAK,CAAC,EAAK,MACnB,EAAI,GAAO,EAAO,IAAK,GAAU,EAAM,KAAK,CAAC,KAAK,IAAI,CAC/C,GACN,EAAE,CAA2B,CACjC,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,6CAA6C,EAAI,UAAU,CACvE,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,6CAA6C,EAAK,YAAYC,EAAAA,IAC/D,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
require(`./constants-K6pQQtc7.cjs`);const e=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,identifierNames:i,actions:a=[]}){t.t(this.process.exit),await e.P({requestActions:a,transcendUrl:r,auth:n,identifierNames:i})}exports.rejectUnverifiedIdentifiers=n;
|
|
2
|
+
//# sourceMappingURL=impl-DSNgFKP_.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DSNgFKP_.cjs","names":["removeUnverifiedRequestIdentifiers"],"sources":["../src/commands/request/reject-unverified-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { removeUnverifiedRequestIdentifiers } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n identifierNames,\n actions = [],\n }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"],"mappings":"6QAYA,eAAsB,EAEpB,CACE,OACA,eACA,kBACA,UAAU,EAAE,EAEC,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAmC,CACvC,eAAgB,EAChB,eACA,OACA,kBACD,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
require(`./constants-K6pQQtc7.cjs`);const e=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,enricherIds:i}){t.t(this.process.exit),await e.z({transcendUrl:r,auth:n,enricherIds:i})}exports.skipPreflightJobs=n;
|
|
2
|
+
//# sourceMappingURL=impl-DU85U1jO.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DU85U1jO.cjs","names":["skipPreflightJobsHelper"],"sources":["../src/commands/request/skip-preflight-jobs/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { skipPreflightJobs as skipPreflightJobsHelper } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface SkipPreflightJobsCommandFlags {\n auth: string;\n enricherIds: string[];\n transcendUrl: string;\n}\n\nexport async function skipPreflightJobs(\n this: LocalContext,\n { auth, transcendUrl, enricherIds }: SkipPreflightJobsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipPreflightJobsHelper({\n transcendUrl,\n auth,\n enricherIds,\n });\n}\n"],"mappings":"6QAUA,eAAsB,EAEpB,CAAE,OAAM,eAAc,eACP,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAwB,CAC5B,eACA,OACA,cACD,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
require(`./constants-K6pQQtc7.cjs`),require(`./syncConfigurationToTranscend-DKliAJhK.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const e=require(`./manual-enrichment-C6h9gjY1.cjs`),t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,file:i,enricherId:a,concurrency:o,markSilent:s,sombraAuth:c}){t.t(this.process.exit),await e.t({file:i,transcendUrl:r,enricherId:a,concurrency:o,markSilent:s,auth:n,sombraAuth:c})}exports.pushIdentifiers=n;
|
|
2
|
+
//# sourceMappingURL=impl-DV5f54rm.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DV5f54rm.cjs","names":["pushManualEnrichmentIdentifiersFromCsv"],"sources":["../src/commands/request/preflight/push-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pushManualEnrichmentIdentifiersFromCsv } from '../../../../lib/manual-enrichment';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PushIdentifiersCommandFlags {\n auth: string;\n enricherId: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n markSilent: boolean;\n concurrency: number;\n}\n\nexport async function pushIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n enricherId,\n concurrency,\n markSilent,\n sombraAuth,\n }: PushIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushManualEnrichmentIdentifiersFromCsv({\n file,\n transcendUrl,\n enricherId,\n concurrency,\n markSilent,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,cACA,aACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAuC,CAC3C,OACA,eACA,aACA,cACA,aACA,OACA,aACD,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`);let i=require(`colors`);i=e.t(i);async function a({auth:e,transcendUrl:a,file:o,pageLimit:s,actions:c,sombraAuth:l,skipRequestIdentifiers:u,statuses:d,createdAtBefore:f,createdAtAfter:p,updatedAtBefore:m,updatedAtAfter:h,showTests:g}){r.t(this.process.exit);let{requestsFormattedForCsv:_}=await t.I({transcendUrl:a,pageLimit:s,actions:c,skipRequestIdentifiers:u,statuses:d,auth:e,sombraAuth:l,createdAtBefore:f,createdAtAfter:p,updatedAtBefore:m,updatedAtAfter:h,isTest:g});await t.d(o,_,t.Ds(_.map(e=>Object.keys(e)).flat())),n.t.info(i.default.green(`Successfully wrote ${_.length} requests to file "${o}"`))}exports._export=a;
|
|
2
|
+
//# sourceMappingURL=impl-DXKJH0AZ.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DXKJH0AZ.cjs","names":["pullPrivacyRequests","writeLargeCsv","uniq"],"sources":["../src/commands/request/export/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { logger } from '../../../logger';\nimport { uniq } from 'lodash-es';\nimport { pullPrivacyRequests } from '../../../lib/requests';\nimport { writeLargeCsv } from '../../../lib/helpers';\nimport type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ExportCommandFlags {\n auth: string;\n sombraAuth?: string;\n actions?: RequestAction[];\n statuses?: RequestStatus[];\n transcendUrl: string;\n file: string;\n concurrency: number;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n updatedAtBefore?: Date;\n updatedAtAfter?: Date;\n showTests?: boolean;\n skipRequestIdentifiers?: boolean;\n pageLimit: number;\n}\n\n// `export` is a reserved keyword, so we need to prefix it with an underscore\n// eslint-disable-next-line no-underscore-dangle\nexport async function _export(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n pageLimit,\n actions,\n sombraAuth,\n skipRequestIdentifiers,\n statuses,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n showTests,\n }: ExportCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { requestsFormattedForCsv } = await pullPrivacyRequests({\n transcendUrl,\n pageLimit,\n actions,\n skipRequestIdentifiers,\n statuses,\n auth,\n sombraAuth,\n createdAtBefore,\n createdAtAfter,\n updatedAtBefore,\n updatedAtAfter,\n isTest: showTests,\n });\n\n // Write to CSV\n const headers = uniq(\n requestsFormattedForCsv.map((d) => Object.keys(d)).flat(),\n );\n await writeLargeCsv(file, requestsFormattedForCsv, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${requestsFormattedForCsv.length} requests to file \"${file}\"`,\n ),\n );\n}\n"],"mappings":"8VA6BA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,YACA,UACA,aACA,yBACA,WACA,kBACA,iBACA,kBACA,iBACA,aAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,2BAA4B,MAAMA,EAAAA,EAAoB,CAC5D,eACA,YACA,UACA,yBACA,WACA,OACA,aACA,kBACA,iBACA,kBACA,iBACA,OAAQ,EACT,CAAC,CAMF,MAAMC,EAAAA,EAAc,EAAM,EAHVC,EAAAA,GACd,EAAwB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAC1D,CAC0D,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAwB,OAAO,qBAAqB,EAAK,GAChF,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-K6pQQtc7.cjs`);const t=require(`./syncConfigurationToTranscend-DKliAJhK.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./pooling-DLEGcLtt.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o(){let e=Number(process.env.WORKER_ID||`0`);n.t.info(`[w${e}] ready pid=${process.pid}`),process.send?.({type:`ready`}),process.on(`message`,async r=>{if(!r||typeof r!=`object`||(r.type===`shutdown`&&process.exit(0),r.type!==`task`))return;let{filePath:i,options:a}=r.payload,{outputDir:o,clearOutputDir:s}=a;try{n.t.info(`[w${e}] processing ${i}`);let{DuckDBInstance:r}=await import(`@duckdb/node-api`);await t.b({filePath:i,outputDir:o,clearOutputDir:s,onProgress:(e,t)=>process.send?.({type:`progress`,payload:{filePath:i,processed:e,total:t}})},r),process.send?.({type:`result`,payload:{ok:!0,filePath:i}})}catch(r){let a=t.O(r);n.t.error(`[w${e}] ERROR ${i}: ${r.stack||a}`),process.send?.({type:`result`,payload:{ok:!1,filePath:i,error:a}})}}),await new Promise(()=>{})}function s(e){return r.r(e)}function c(e){return r.i(e)}const l={renderHeader:s,renderWorkers:c};function u(){return typeof __filename<`u`?__filename:process.argv[1]}async function d(e){i.t(this.process.exit);let{directory:o,outputDir:s,clearOutputDir:c,concurrency:d,viewerMode:f}=e,p=t.x(o,this),{poolSize:m,cpuCount:h}=r.s(d,p.length);n.t.info(a.default.green(`Converting ${p.length} Parquet file(s) → CSV with pool size ${m} (CPU=${h})`));let g=p.map(e=>({filePath:e,options:{outputDir:s,clearOutputDir:c}}));await r.n({title:`Parquet → CSV - ${o}`,baseDir:o||s||process.cwd(),childFlag:r.o,childModulePath:u(),poolSize:m,cpuCount:h,filesTotal:p.length,hooks:{nextTask:()=>g.shift(),taskLabel:e=>e.filePath,initTotals:()=>({}),initSlotProgress:()=>void 0,onProgress:e=>e,onResult:(e,t)=>({totals:e,ok:!!t.ok}),postProcess:async()=>{}},viewerMode:f,render:e=>r.a(e,l,f),extraKeyHandler:({logsBySlot:e,repaint:t,setPaused:n})=>r.t({logsBySlot:e,repaint:t,setPaused:n})})}process.argv.includes(r.o)&&o().catch(e=>{n.t.error(e),process.exit(1)}),exports.parquetToCsv=d;
|
|
2
|
+
//# sourceMappingURL=impl-DbxzDk8h.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DbxzDk8h.cjs","names":["parquetToCsvOneFile","extractErrorMessage","makeHeader","makeWorkerRows","collectParquetFilesOrExit","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/commands/admin/parquet-to-csv/worker.ts","../src/commands/admin/parquet-to-csv/ui/plugin.ts","../src/commands/admin/parquet-to-csv/impl.ts"],"sourcesContent":["import { parquetToCsvOneFile, extractErrorMessage } from '../../../lib/helpers';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\nexport type ParquetTask = {\n /** Absolute path of the Parquet file to convert. */\n filePath: string;\n options: {\n /** Optional directory where CSV output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output before writing new ones. */\n clearOutputDir: boolean;\n };\n};\n\nexport type ParquetProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Rows processed so far. */\n processed: number;\n /** Optional known total rows (not always available). */\n total?: number;\n};\n\nexport type ParquetResult = {\n ok: boolean;\n filePath: string;\n error?: string;\n};\n\n/**\n * Worker loop: convert a single Parquet file to one or more CSV files.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n process.send?.({ type: 'ready' });\n\n process.on('message', async (msg: ToWorker<ParquetTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir } = options;\n\n try {\n logger.info(`[w${workerId}] processing ${filePath}`);\n const { DuckDBInstance } = await import('@duckdb/node-api');\n await parquetToCsvOneFile(\n {\n filePath,\n outputDir,\n clearOutputDir,\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n },\n DuckDBInstance,\n );\n\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${err.stack || message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive until shutdown\n await new Promise<never>(() => {\n // Do nothing\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for parquet-to-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for parquet-to-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const parquetToCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectParquetFilesOrExit } from '../../../lib/helpers';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ParquetProgress,\n type ParquetResult,\n type ParquetTask,\n} from './worker';\nimport { parquetToCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path.\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/** No custom totals for the header; the runner’s built-ins suffice. */\ntype Totals = Record<string, never>;\n\nexport type ParquetToCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Convert all Parquet files in a directory to CSV, in parallel.\n *\n * @param flags - The command flags.\n */\nexport async function parquetToCsv(\n this: LocalContext,\n flags: ParquetToCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { directory, outputDir, clearOutputDir, concurrency, viewerMode } =\n flags;\n\n /* 1) Discover .parquet inputs */\n const files = collectParquetFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Converting ${files.length} Parquet file(s) → CSV with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Build FIFO queue of tasks (one per file) */\n const queue = files.map<ParquetTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir },\n }));\n\n /* 4) Pool hooks */\n const hooks: PoolHooks<ParquetTask, ParquetProgress, ParquetResult, Totals> =\n {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n postProcess: async () => {\n // nothing special post-run\n },\n };\n\n /* 5) Launch the pool runner with custom dashboard plugin */\n await runPool({\n title: `Parquet → CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, parquetToCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({ logsBySlot, repaint, setPaused }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"kYAiCA,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CACtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAEjC,QAAQ,GAAG,UAAW,KAAO,IAA+B,CAM1D,GALI,CAAC,GAAO,OAAO,GAAQ,WAEvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAEb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,kBAAmB,EAEtC,GAAI,CACF,EAAA,EAAO,KAAK,KAAK,EAAS,eAAe,IAAW,CACpD,GAAM,CAAE,kBAAmB,MAAM,OAAO,oBACxC,MAAMA,EAAAA,EACJ,CACE,WACA,YACA,iBACA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CACD,EACD,CAED,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CACZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,EAAI,OAAS,IAAU,CACzE,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAE7B,CCrEJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAAkE,CAC7E,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAmBtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,YAAW,YAAW,iBAAgB,cAAa,cACzD,EAGI,EAAQC,EAAAA,EAA0B,EAAW,KAAK,CAGlD,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,wCAAwC,EAAS,QAAQ,EAAS,GAC9F,CACF,CAGD,IAAM,EAAQ,EAAM,IAAkB,IAAc,CAClD,WACA,QAAS,CAAE,YAAW,iBAAgB,CACvC,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,mBAAmB,IAC1B,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MArBA,CACE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EACpD,YAAa,SAAY,GAG1B,CAYD,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAoB,EAAW,CACzE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CAAE,aAAY,UAAS,YAAW,CAAC,CAC5D,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
|