@transcend-io/cli 8.24.1 → 8.25.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -15
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +2 -2
- package/dist/{chunk-YFF6A56N.cjs → chunk-2Q7ADV5Y.cjs} +2 -2
- package/dist/{chunk-YFF6A56N.cjs.map → chunk-2Q7ADV5Y.cjs.map} +1 -1
- package/dist/{chunk-KKAJPI6O.cjs → chunk-5PTFTN6J.cjs} +11 -11
- package/dist/{chunk-KKAJPI6O.cjs.map → chunk-5PTFTN6J.cjs.map} +1 -1
- package/dist/chunk-7ZGJ4MJF.cjs +12 -0
- package/dist/{chunk-AOO3U7W3.cjs.map → chunk-7ZGJ4MJF.cjs.map} +1 -1
- package/dist/{chunk-T5QZTOZC.cjs → chunk-BGZ6TA6C.cjs} +2 -2
- package/dist/{chunk-T5QZTOZC.cjs.map → chunk-BGZ6TA6C.cjs.map} +1 -1
- package/dist/{chunk-NA6JTKNJ.cjs → chunk-COKHIMLO.cjs} +2 -2
- package/dist/{chunk-NA6JTKNJ.cjs.map → chunk-COKHIMLO.cjs.map} +1 -1
- package/dist/chunk-F3BRFYKD.cjs +2 -0
- package/dist/chunk-F3BRFYKD.cjs.map +1 -0
- package/dist/{chunk-TRQBFWZP.cjs → chunk-GG7P2RO6.cjs} +2 -2
- package/dist/{chunk-TRQBFWZP.cjs.map → chunk-GG7P2RO6.cjs.map} +1 -1
- package/dist/{chunk-WAXPOCXW.cjs → chunk-HRNCQV6G.cjs} +4 -4
- package/dist/{chunk-WAXPOCXW.cjs.map → chunk-HRNCQV6G.cjs.map} +1 -1
- package/dist/{chunk-NNHHVBQP.cjs → chunk-KYVDLURL.cjs} +4 -4
- package/dist/{chunk-NNHHVBQP.cjs.map → chunk-KYVDLURL.cjs.map} +1 -1
- package/dist/{chunk-O3EACWMC.cjs → chunk-MOWFESXN.cjs} +2 -2
- package/dist/{chunk-O3EACWMC.cjs.map → chunk-MOWFESXN.cjs.map} +1 -1
- package/dist/{chunk-MWZABOIC.cjs → chunk-MPMKY5NF.cjs} +22 -22
- package/dist/chunk-MPMKY5NF.cjs.map +1 -0
- package/dist/{chunk-VY55L6P5.cjs → chunk-NI434OII.cjs} +2 -2
- package/dist/{chunk-VY55L6P5.cjs.map → chunk-NI434OII.cjs.map} +1 -1
- package/dist/{chunk-VTYBZZQ5.cjs → chunk-RGBVP433.cjs} +2 -2
- package/dist/{chunk-VTYBZZQ5.cjs.map → chunk-RGBVP433.cjs.map} +1 -1
- package/dist/chunk-WWJHUHSQ.cjs +12 -0
- package/dist/chunk-WWJHUHSQ.cjs.map +1 -0
- package/dist/{impl-OPBD2KMY.cjs → impl-32ZRDOCN.cjs} +5 -5
- package/dist/{impl-OPBD2KMY.cjs.map → impl-32ZRDOCN.cjs.map} +1 -1
- package/dist/{impl-3PH5TSFY.cjs → impl-3VTN6SPE.cjs} +3 -3
- package/dist/{impl-3PH5TSFY.cjs.map → impl-3VTN6SPE.cjs.map} +1 -1
- package/dist/{impl-WIGDO7DS.cjs → impl-4YI4ERPI.cjs} +2 -2
- package/dist/{impl-WIGDO7DS.cjs.map → impl-4YI4ERPI.cjs.map} +1 -1
- package/dist/{impl-6HWW3LAY.cjs → impl-5H724WN5.cjs} +2 -2
- package/dist/{impl-6HWW3LAY.cjs.map → impl-5H724WN5.cjs.map} +1 -1
- package/dist/{impl-MAHW2W43.cjs → impl-6S3K72GP.cjs} +2 -2
- package/dist/{impl-MAHW2W43.cjs.map → impl-6S3K72GP.cjs.map} +1 -1
- package/dist/{impl-SMK2SUTD.cjs → impl-7G2TDJGL.cjs} +2 -2
- package/dist/{impl-SMK2SUTD.cjs.map → impl-7G2TDJGL.cjs.map} +1 -1
- package/dist/{impl-VMV24ZJH.cjs → impl-7WCN5RAN.cjs} +2 -2
- package/dist/{impl-VMV24ZJH.cjs.map → impl-7WCN5RAN.cjs.map} +1 -1
- package/dist/{impl-VXHUL55C.cjs → impl-7ZGTTYG5.cjs} +2 -2
- package/dist/{impl-VXHUL55C.cjs.map → impl-7ZGTTYG5.cjs.map} +1 -1
- package/dist/impl-AI276JNQ.cjs +2 -0
- package/dist/impl-AI276JNQ.cjs.map +1 -0
- package/dist/{impl-FO3XHC3T.cjs → impl-AQY6F4SC.cjs} +2 -2
- package/dist/{impl-FO3XHC3T.cjs.map → impl-AQY6F4SC.cjs.map} +1 -1
- package/dist/{impl-IESXEP2S.cjs → impl-CLA7WK6U.cjs} +2 -2
- package/dist/{impl-IESXEP2S.cjs.map → impl-CLA7WK6U.cjs.map} +1 -1
- package/dist/{impl-CUNZ7YNN.cjs → impl-CP2GLGVZ.cjs} +2 -2
- package/dist/{impl-CUNZ7YNN.cjs.map → impl-CP2GLGVZ.cjs.map} +1 -1
- package/dist/impl-CRQ4DYII.cjs +2 -0
- package/dist/impl-CRQ4DYII.cjs.map +1 -0
- package/dist/{impl-OOQNMHTX.cjs → impl-EQ4EH4QJ.cjs} +2 -2
- package/dist/{impl-OOQNMHTX.cjs.map → impl-EQ4EH4QJ.cjs.map} +1 -1
- package/dist/{impl-2YVWUXEC.cjs → impl-FD5CDY7Y.cjs} +2 -2
- package/dist/{impl-2YVWUXEC.cjs.map → impl-FD5CDY7Y.cjs.map} +1 -1
- package/dist/{impl-L6F4PTY3.cjs → impl-FJ4XWHLE.cjs} +2 -2
- package/dist/{impl-L6F4PTY3.cjs.map → impl-FJ4XWHLE.cjs.map} +1 -1
- package/dist/{impl-2TS5SALN.cjs → impl-G6WAGNDN.cjs} +2 -2
- package/dist/{impl-2TS5SALN.cjs.map → impl-G6WAGNDN.cjs.map} +1 -1
- package/dist/{impl-TXQ7EFQE.cjs → impl-GIPZ5EW5.cjs} +2 -2
- package/dist/{impl-TXQ7EFQE.cjs.map → impl-GIPZ5EW5.cjs.map} +1 -1
- package/dist/{impl-7YED2CRU.cjs → impl-HCY5MWEP.cjs} +2 -2
- package/dist/{impl-7YED2CRU.cjs.map → impl-HCY5MWEP.cjs.map} +1 -1
- package/dist/{impl-WLYOQTIL.cjs → impl-HEPMD6RG.cjs} +2 -2
- package/dist/{impl-WLYOQTIL.cjs.map → impl-HEPMD6RG.cjs.map} +1 -1
- package/dist/{impl-MCE7B6NW.cjs → impl-JREQ3SEN.cjs} +2 -2
- package/dist/{impl-MCE7B6NW.cjs.map → impl-JREQ3SEN.cjs.map} +1 -1
- package/dist/{impl-IBGULX7G.cjs → impl-K5LDQF7V.cjs} +2 -2
- package/dist/{impl-IBGULX7G.cjs.map → impl-K5LDQF7V.cjs.map} +1 -1
- package/dist/{impl-WKBZDQYY.cjs → impl-KPZSEHQG.cjs} +2 -2
- package/dist/{impl-WKBZDQYY.cjs.map → impl-KPZSEHQG.cjs.map} +1 -1
- package/dist/{impl-IZTOX7X3.cjs → impl-LGUMGRRA.cjs} +2 -2
- package/dist/{impl-IZTOX7X3.cjs.map → impl-LGUMGRRA.cjs.map} +1 -1
- package/dist/{impl-YEXHKEWQ.cjs → impl-LI3SHHAQ.cjs} +2 -2
- package/dist/{impl-YEXHKEWQ.cjs.map → impl-LI3SHHAQ.cjs.map} +1 -1
- package/dist/{impl-YPKYWIYP.cjs → impl-MH7OR57S.cjs} +2 -2
- package/dist/{impl-YPKYWIYP.cjs.map → impl-MH7OR57S.cjs.map} +1 -1
- package/dist/{impl-OCLMWYMP.cjs → impl-OIXVXSLU.cjs} +2 -2
- package/dist/{impl-OCLMWYMP.cjs.map → impl-OIXVXSLU.cjs.map} +1 -1
- package/dist/{impl-2Y7WAQGJ.cjs → impl-OXOU2A27.cjs} +2 -2
- package/dist/{impl-2Y7WAQGJ.cjs.map → impl-OXOU2A27.cjs.map} +1 -1
- package/dist/impl-PZ3JDEQN.cjs +2 -0
- package/dist/{impl-R3RC2YP7.cjs.map → impl-PZ3JDEQN.cjs.map} +1 -1
- package/dist/{impl-4EEEQLZS.cjs → impl-QSTK5NYY.cjs} +3 -3
- package/dist/{impl-4EEEQLZS.cjs.map → impl-QSTK5NYY.cjs.map} +1 -1
- package/dist/{impl-AKKWJJRW.cjs → impl-QZHQCQ3B.cjs} +2 -2
- package/dist/{impl-AKKWJJRW.cjs.map → impl-QZHQCQ3B.cjs.map} +1 -1
- package/dist/{impl-RHAYM5BX.cjs → impl-SYXFCDZC.cjs} +4 -4
- package/dist/{impl-RHAYM5BX.cjs.map → impl-SYXFCDZC.cjs.map} +1 -1
- package/dist/{impl-DDDGYCKH.cjs → impl-TCN6BRVK.cjs} +2 -2
- package/dist/{impl-DDDGYCKH.cjs.map → impl-TCN6BRVK.cjs.map} +1 -1
- package/dist/{impl-FGLNPBNN.cjs → impl-TFAARXUG.cjs} +2 -2
- package/dist/{impl-FGLNPBNN.cjs.map → impl-TFAARXUG.cjs.map} +1 -1
- package/dist/{impl-ZVMYU6PT.cjs → impl-VAQF5Y6W.cjs} +2 -2
- package/dist/{impl-ZVMYU6PT.cjs.map → impl-VAQF5Y6W.cjs.map} +1 -1
- package/dist/{impl-6OZ2ZIVM.cjs → impl-VGJPIZZY.cjs} +2 -2
- package/dist/{impl-6OZ2ZIVM.cjs.map → impl-VGJPIZZY.cjs.map} +1 -1
- package/dist/impl-VGOV6N6A.cjs +2 -0
- package/dist/impl-VGOV6N6A.cjs.map +1 -0
- package/dist/{impl-DJCQNX56.cjs → impl-X6Z7MKVJ.cjs} +2 -2
- package/dist/{impl-DJCQNX56.cjs.map → impl-X6Z7MKVJ.cjs.map} +1 -1
- package/dist/{impl-YHIIZRG6.cjs → impl-YIYW7STX.cjs} +2 -2
- package/dist/{impl-YHIIZRG6.cjs.map → impl-YIYW7STX.cjs.map} +1 -1
- package/dist/{impl-4RXGRXXS.cjs → impl-ZGACCLWF.cjs} +2 -2
- package/dist/{impl-4RXGRXXS.cjs.map → impl-ZGACCLWF.cjs.map} +1 -1
- package/dist/{impl-JIVQZGAO.cjs → impl-ZV4VREIU.cjs} +2 -2
- package/dist/{impl-JIVQZGAO.cjs.map → impl-ZV4VREIU.cjs.map} +1 -1
- package/dist/index.cjs +3 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +43 -89
- package/package.json +1 -1
- package/dist/chunk-AOO3U7W3.cjs +0 -12
- package/dist/chunk-MWZABOIC.cjs.map +0 -1
- package/dist/chunk-ZUT7UIQU.cjs +0 -2
- package/dist/chunk-ZUT7UIQU.cjs.map +0 -1
- package/dist/impl-KUZ5CZKZ.cjs +0 -2
- package/dist/impl-KUZ5CZKZ.cjs.map +0 -1
- package/dist/impl-R3RC2YP7.cjs +0 -2
- package/dist/impl-TJIMDEBN.cjs +0 -12
- package/dist/impl-TJIMDEBN.cjs.map +0 -1
- package/dist/impl-VKEICKZ7.cjs +0 -2
- package/dist/impl-VKEICKZ7.cjs.map +0 -1
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');var _chunkQ7I37FJVcjs = require('./chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');var _privacytypes = require('@transcend-io/privacy-types');var _ms = require('ms'); var _ms2 = _interopRequireDefault(_ms);function S(e){if(!/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(e))throw new Error(`Invalid UUID format: ${e}`);return e}function n(e){try{return new URL(e).toString().replace(/\/$/,"")}catch (e2){throw new Error(`Invalid URL format: ${e}`)}}function T(e){return e.split(",").map(r=>r.trim()).filter(r=>r.length>0)}function y(e){let r=new Date(e);if(Number.isNaN(r.getTime()))throw new TypeError(`Invalid date: ${e}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`);return r}function k(e){if(typeof e=="number"&&Number.isFinite(e))return Math.round(e*1e3);if(typeof e=="string"){let r=e.trim();if(r==="")throw new Error('Invalid duration. Examples: "45", "2d", "1h", "90 minutes", "10s".');let t=Number(r);if(r!==""&&Number.isFinite(t))return Math.round(t*1e3);let a;try{a=_ms2.default.call(void 0, r)}catch (e3){throw new Error('Invalid duration. Examples: "45", "2d", "1h", "90 minutes", "10s".')}if(typeof a=="number"&&Number.isFinite(a))return a}throw new Error('Invalid duration. Examples: "45", "2d", "1h", "90 minutes", "10s".')}var d=({scopes:e,requiresSiloScope:r=!1})=>{let t={kind:"parsed",parse:String,brief:"The Transcend API key."};return r&&(t.brief+=" This key must be associated with the data silo(s) being operated on."),e==="Varies"?{...t,brief:`${t.brief} The scopes required will vary depending on the operation performed. If in doubt, the ${_privacytypes.TRANSCEND_SCOPES[_privacytypes.ScopeName.FullAdmin].title} scope will always work.`}:e.length===0?{...t,brief:`${t.brief} No scopes are required for this command.`}:{...t,brief:`${t.brief} Requires scopes: ${e.map(a=>`"${_privacytypes.TRANSCEND_SCOPES[a].title}"`).join(", ")}`}},u= exports.f =(e=_chunkNI434OIIcjs.r)=>({kind:"parsed",parse:n,brief:"URL of the Transcend backend. Use https://api.us.transcend.io for US hosting",default:e}),x= exports.g =(e=_chunkNI434OIIcjs.s)=>({kind:"parsed",parse:n,brief:"URL of the Transcend consent backend. Use https://consent.us.transcend.io for US hosting",default:e}),A= exports.h =()=>({kind:"parsed",parse:String,brief:"The Sombra internal key, use for additional authentication when self-hosting Sombra",optional:!0});var g=["dataSilos","enrichers","templates","apiKeys"],I= exports.j =Object.values(_privacytypes.ConsentTrackerStatus),R= exports.k =_core.buildCommand.call(void 0, {loader:async()=>{let{pull:e}=await Promise.resolve().then(() => _interopRequireWildcard(require("./impl-QSTK5NYY.cjs")));return e},parameters:{flags:{auth:d({scopes:"Varies"}),resources:{kind:"enum",values:["all",...Object.values(_chunkQ7I37FJVcjs.d)],brief:`The different resource types to pull in. Defaults to ${g.join(",")}.`,variadic:",",optional:!0},file:{kind:"parsed",parse:String,brief:"Path to the YAML file to pull into",default:"./transcend.yml"},transcendUrl:u(),dataSiloIds:{kind:"parsed",parse:String,variadic:",",brief:"The UUIDs of the data silos that should be pulled into the YAML file",optional:!0},integrationNames:{kind:"parsed",parse:String,variadic:",",brief:"The types of integrations to pull down",optional:!0},trackerStatuses:{kind:"enum",values:Object.values(_privacytypes.ConsentTrackerStatus),variadic:",",brief:"The statuses of consent manager trackers to pull down. Defaults to all statuses.",optional:!0},pageSize:{kind:"parsed",parse:_core.numberParser,brief:"The page size to use when paginating over the API",default:"50"},skipDatapoints:{kind:"boolean",brief:"When true, skip pulling in datapoints alongside data silo resource",default:!1},skipSubDatapoints:{kind:"boolean",brief:"When true, skip pulling in subDatapoints alongside data silo resource",default:!1},includeGuessedCategories:{kind:"boolean",brief:"When true, included guessed data categories that came from the content classifier",default:!1},debug:{kind:"boolean",brief:"Set to true to include debug logs while pulling the configuration",default:!1}}},docs:{brief:"Pull metadata from Transcend into transcend.yml",fullDescription:`Generates a transcend.yml by pulling the configuration from your Transcend instance.
|
|
2
2
|
|
|
3
3
|
The API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).
|
|
4
4
|
|
|
@@ -6,4 +6,4 @@ This command can be helpful if you are looking to:
|
|
|
6
6
|
|
|
7
7
|
- Copy your data into another instance
|
|
8
8
|
- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`}});exports.a = S; exports.b = T; exports.c = y; exports.d = k; exports.e = d; exports.f = u; exports.g = x; exports.h = A; exports.i = g; exports.j = I; exports.k = R;
|
|
9
|
-
//# sourceMappingURL=chunk-
|
|
9
|
+
//# sourceMappingURL=chunk-RGBVP433.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-RGBVP433.cjs","../src/commands/inventory/pull/command.ts","../src/lib/cli/parsers.ts"],"names":["uuidParser","input"],"mappings":"AAAA,mfAA0C,wDAAyC,qCCAxC,2DACN,gECDiB,SAStCA,CAAAA,CAAWC,CAAAA,CAAuB,CAGhD,EAAA,CAAI,CADF,4EAAA,CACa,IAAA,CAAKA,CAAK,CAAA,CACvB,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwBA,CAAK,CAAA,CAAA;ADwF5B;AAAA;AAAA;AAAA;AAAA;AAAA;AASpB,qGAAA","file":"/home/runner/work/cli/cli/dist/chunk-RGBVP433.cjs","sourcesContent":[null,"import { buildCommand, numberParser } from '@stricli/core';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport {\n createAuthParameter,\n createTranscendUrlParameter,\n} from '../../../lib/cli/common-parameters';\nimport { TranscendPullResource } from '../../../enums';\n\nexport const DEFAULT_TRANSCEND_PULL_RESOURCES = [\n TranscendPullResource.DataSilos,\n TranscendPullResource.Enrichers,\n TranscendPullResource.Templates,\n TranscendPullResource.ApiKeys,\n];\n\nexport const DEFAULT_CONSENT_TRACKER_STATUSES =\n Object.values(ConsentTrackerStatus);\n\nexport const pullCommand = buildCommand({\n loader: async () => {\n const { pull } = await import('./impl');\n return pull;\n },\n parameters: {\n flags: {\n auth: createAuthParameter({\n scopes: 'Varies',\n }),\n resources: {\n kind: 'enum',\n values: ['all', ...Object.values(TranscendPullResource)],\n brief: `The different resource types to pull in. Defaults to ${DEFAULT_TRANSCEND_PULL_RESOURCES.join(\n ',',\n )}.`,\n variadic: ',',\n optional: true,\n },\n file: {\n kind: 'parsed',\n parse: String,\n brief: 'Path to the YAML file to pull into',\n default: './transcend.yml',\n },\n transcendUrl: createTranscendUrlParameter(),\n dataSiloIds: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief:\n 'The UUIDs of the data silos that should be pulled into the YAML file',\n optional: true,\n },\n integrationNames: {\n kind: 'parsed',\n parse: String,\n variadic: ',',\n brief: 'The types of integrations to pull down',\n optional: true,\n },\n trackerStatuses: {\n kind: 'enum',\n values: Object.values(ConsentTrackerStatus),\n variadic: ',',\n brief:\n 'The statuses of consent manager trackers to pull down. Defaults to all statuses.',\n optional: true,\n },\n pageSize: {\n kind: 'parsed',\n parse: numberParser,\n brief: 'The page size to use when paginating over the API',\n default: '50',\n },\n skipDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in datapoints alongside data silo resource',\n default: false,\n },\n skipSubDatapoints: {\n kind: 'boolean',\n brief:\n 'When true, skip pulling in subDatapoints alongside data silo resource',\n default: false,\n },\n includeGuessedCategories: {\n kind: 'boolean',\n brief:\n 'When true, included guessed data categories that came from the content classifier',\n default: false,\n },\n debug: {\n kind: 'boolean',\n brief:\n 'Set to true to include debug logs while pulling the configuration',\n default: false,\n },\n },\n },\n docs: {\n brief: 'Pull metadata from Transcend into transcend.yml',\n fullDescription: `Generates a transcend.yml by pulling the configuration from your Transcend instance.\n\nThe API key needs various scopes depending on the resources being pulled (see the CLI's README for more details).\n\nThis command can be helpful if you are looking to:\n\n- Copy your data into another instance\n- Generate a transcend.yml file as a starting point to maintain parts of your data inventory in code.`,\n },\n});\n","import ms, { type StringValue as MsStringValue } from 'ms';\n\n/**\n * Validates and returns a UUID string.\n *\n * @param input - The input string to validate as UUID\n * @returns The validated UUID string\n * @throws Error if input is not a valid UUID\n */\nexport function uuidParser(input: string): string {\n const uuidRegex =\n /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;\n if (!uuidRegex.test(input)) {\n throw new Error(`Invalid UUID format: ${input}`);\n }\n return input;\n}\n\n/**\n * Validates and returns a URL string.\n *\n * @param input - The input string to validate as URL\n * @returns The validated URL string\n * @throws Error if input is not a valid URL\n */\nexport function urlParser(input: string): string {\n try {\n const url = new URL(input);\n return url.toString().replace(/\\/$/, '');\n } catch {\n throw new Error(`Invalid URL format: ${input}`);\n }\n}\n\n/**\n * Parse a comma-separated string to array.\n * NOTE: Prefer using `variadic` for list arguments instead of this function. This should only be used for arguments which have a default value.\n *\n * @param input - The comma-separated string to parse\n * @returns Array of trimmed, non-empty strings\n */\nexport function arrayParser(input: string): string[] {\n return input\n .split(',')\n .map((s) => s.trim())\n .filter((s) => s.length > 0);\n}\n\n/**\n * Parse a date string to a Date object.\n *\n * @param input - The date string to parse\n * @returns The parsed Date object\n * @throws TypeError if input is not a valid date\n */\nexport function dateParser(input: string): Date {\n const date = new Date(input);\n if (Number.isNaN(date.getTime())) {\n throw new TypeError(\n `Invalid date: ${input}. Try using the ISO 8601 format (YYYY-MM-DDTHH:MM:SS.SSSZ)`,\n );\n }\n return date;\n}\n\n/**\n * Parse a duration string to milliseconds.\n * Accepts concise/natural-ish strings (powered by `ms`) and returns milliseconds.\n * Examples: \"3600\", \"2d\", \"1h\", \"90 minutes\", \"10s\".\n *\n * @param input - The duration to parse\n * @returns The parsed duration in milliseconds\n * @throws Error if input is not a valid duration\n */\nexport function parseDurationToMs(input: unknown): number {\n if (typeof input === 'number' && Number.isFinite(input)) {\n // backward-compat: numbers => seconds\n return Math.round(input * 1000);\n }\n\n if (typeof input === 'string') {\n const trimmed = input.trim();\n // empty string → our standardized error (avoid ms throwing its own)\n if (trimmed === '') {\n throw new Error(\n 'Invalid duration. Examples: \"45\", \"2d\", \"1h\", \"90 minutes\", \"10s\".',\n );\n }\n\n // bare numeric string => seconds (backward-compat)\n const asNumber = Number(trimmed);\n if (trimmed !== '' && Number.isFinite(asNumber)) {\n return Math.round(asNumber * 1000);\n }\n\n // let ms parse human strings\n let parsed: number | undefined;\n try {\n parsed = ms(trimmed as MsStringValue);\n } catch {\n // normalize ms' error to ours\n throw new Error(\n 'Invalid duration. Examples: \"45\", \"2d\", \"1h\", \"90 minutes\", \"10s\".',\n );\n }\n if (typeof parsed === 'number' && Number.isFinite(parsed)) {\n return parsed;\n }\n }\n\n throw new Error(\n 'Invalid duration. Examples: \"45\", \"2d\", \"1h\", \"90 minutes\", \"10s\".',\n );\n}\n"]}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _persistedstate = require('@transcend-io/persisted-state');var _iots = require('io-ts'); var F = _interopRequireWildcard(_iots); var b = _interopRequireWildcard(_iots); var e = _interopRequireWildcard(_iots);var _typeutils = require('@transcend-io/type-utils');var Ve=["ENOTFOUND","ECONNRESET","ETIMEDOUT","504 Gateway Time-out","Task timed out after"];async function Q(f,{maxAttempts:a=3,baseDelayMs:y=250,isRetryable:m=(c,s)=>Ve.some(r=>s.includes(r)),onRetry:l}={}){let c=0;for(;;){c+=1;try{return await f()}catch(s){let r=_nullishCoalesce((s&&(_optionalChain([s, 'access', _2 => _2.response, 'optionalAccess', _3 => _3.body])||s.message)), () => (String(_nullishCoalesce(s, () => ("Unknown error")))));if(!(c<a&&m(s,r)))throw new Error(`Preference query failed after ${c} attempt(s): ${r}`);_optionalChain([l, 'optionalCall', _4 => _4(c,s,r)]);let o=y*2**(c-1),n=Math.floor(Math.random()*y),t=o+n;_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[retry] attempt ${c}/${a-1}; backing off ${t}ms: ${r}`)),await _chunk5PTFTN6Jcjs.Xf.call(void 0, t)}}}var _privacytypes = require('@transcend-io/privacy-types');var D=b.intersection([b.type({nodes:b.array(_privacytypes.PreferenceQueryResponseItem)}),b.partial({cursor:b.string})]);async function me(f,{identifiers:a,partitionKey:y,skipLogging:m=!1,concurrency:l=40}){let c=[],s=_chunkNI434OIIcjs.b.call(void 0, a,100),r=new Date().getTime(),i=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);m||i.start(a.length,0);let o=0;await _bluebird.map.call(void 0, s,async p=>{let g=await Q(()=>f.post(`v1/preferences/${y}/query`,{json:{filter:{identifiers:p},limit:p.length}}).json(),{onRetry:(w,R,P)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[RETRY] group size=${p.length} partition=${y} attempt=${w}: ${P}`))}}),u=_typeutils.decodeCodec.call(void 0, D,g);c.push(...u.nodes),o+=p.length,i.update(o)},{concurrency:l}),i.stop();let t=new Date().getTime()-r;return m||_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Completed download in "${t/1e3}" seconds.`)),c}function q({row:f,columnToPurposeName:a,purposeSlugs:y,preferenceTopics:m}){let l={};return Object.entries(a).forEach(([c,{purpose:s,preference:r,valueMapping:i}])=>{if(!y.includes(s))throw new Error(`Invalid purpose slug: ${s}, expected: ${y.join(", ")}`);let o=f[c];if(r){let n=m.find(t=>t.slug===r&&t.purpose.trackingType===s);if(!n){let t=m.filter(p=>p.purpose.trackingType===s).map(p=>p.slug);throw new Error(`Invalid preference slug: ${r} for purpose: ${s}. Allowed preference slugs for purpose are: ${t.join(",")}`)}switch(l[s]||(l[s]={preferences:[]}),l[s].preferences||(l[s].preferences=[]),n.type){case _privacytypes.PreferenceTopicType.Boolean:{let t=i[o];if(t===void 0&&o!=="")throw new Error(`No preference mapping found for value "${o}" in column "${c}" (purpose=${s}, preference=${r})`);if(t==null)return;if(typeof t!="boolean")throw new Error(`Invalid value for boolean preference: ${r}, expected boolean, got: ${o}`);l[s].preferences.push({topic:r,choice:{booleanValue:t}});break}case _privacytypes.PreferenceTopicType.Select:{let t=i[o];if(t===void 0&&o!=="")throw new Error(`No preference mapping found for value "${o}" in column "${c}" (purpose=${s}, preference=${r})`);if(t==null)return;if(typeof t!="string")throw new Error(`Invalid value for select preference: ${r}, expected string, got: ${o}`);let p=t.trim()||null;if(p&&!n.preferenceOptionValues.map(({slug:g})=>g).includes(p))throw new Error(`Invalid value for select preference: ${r}, expected one of: ${n.preferenceOptionValues.map(({slug:g})=>g).join(", ")}, got: ${o}`);l[s].preferences.push({topic:r,choice:{selectValue:p}});break}case _privacytypes.PreferenceTopicType.MultiSelect:{if(typeof o!="string")throw new Error(`Invalid value for multi select preference: ${r}, expected string, got: ${o}`);let t=_chunk5PTFTN6Jcjs.oc.call(void 0, o).map(p=>{let g=i[p];if(g===void 0&&o!=="")throw new Error(`No preference mapping found for multi select token "${o}" in column "${c}" (purpose=${s}, preference=${r})`);if(g==null)return null;if(typeof g!="string")throw new Error(`Invalid value for multi select preference: ${r}, expected one of: ${n.preferenceOptionValues.map(({slug:u})=>u).join(", ")}, got: ${p}`);return g}).filter(p=>p!==null).sort((p,g)=>p.localeCompare(g));t.length>0&&l[s].preferences.push({topic:r,choice:{selectValues:t}});break}default:throw new Error(`Unknown preference type: ${n.type}`)}}else{let n=i[o];if(n===void 0&&o!=="")throw new Error(`No preference mapping found for value "${o}" in column "${c}" (purpose=${s}, preference=\u2205)`);if(n===null)return;l[s]?l[s].enabled=n===!0:l[s]={enabled:n===!0}}}),_typeutils.apply.call(void 0, l,(c,s)=>{if(typeof c.enabled!="boolean")throw new Error(`No mapping provided for purpose.enabled=true/false value: ${s}`);return{...c,enabled:c.enabled}})}var _inquirer = require('inquirer'); var _inquirer2 = _interopRequireDefault(_inquirer);var B="[NONE]";async function de(f,a){let y=_chunkNI434OIIcjs.j.call(void 0, f.map(l=>Object.keys(l)).flat()),m=_chunkNI434OIIcjs.c.call(void 0, y,[...a.identifierColumn?[a.identifierColumn]:[],...Object.keys(a.columnToPurposeName)]);if(!a.timestampColum){let{timestampName:l}=await _inquirer2.default.prompt([{name:"timestampName",message:"Choose the column that will be used as the timestamp of last preference update",type:"list",default:m.find(c=>c.toLowerCase().includes("date"))||m.find(c=>c.toLowerCase().includes("time"))||m[0],choices:[...m,B]}]);a.timestampColum=l}if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Using timestamp column "${a.timestampColum}"`)),a.timestampColum!==B){let l=f.map((c,s)=>c[a.timestampColum]?null:[s]).filter(c=>!!c).flat();if(l.length>0)throw new Error(`The timestamp column "${a.timestampColum}" is missing a value for the following rows: ${l.join(`
|
|
2
|
+
`)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The timestamp column "${a.timestampColum}" is present for all row`))}return a}async function ge(f,a){let y=_chunkNI434OIIcjs.j.call(void 0, f.map(r=>Object.keys(r)).flat()),m=_chunkNI434OIIcjs.c.call(void 0, y,[...a.identifierColumn?[a.identifierColumn]:[],...Object.keys(a.columnToPurposeName)]);if(!a.identifierColumn){let{identifierName:r}=await _inquirer2.default.prompt([{name:"identifierName",message:"Choose the column that will be used as the identifier to upload consent preferences by",type:"list",default:m.find(i=>i.toLowerCase().includes("email"))||m[0],choices:m}]);a.identifierColumn=r}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Using identifier column "${a.identifierColumn}"`));let l=f.map((r,i)=>r[a.identifierColumn]?null:[i]).filter(r=>!!r).flat();if(l.length>0){let r=`The identifier column "${a.identifierColumn}" is missing a value for the following rows: ${l.join(", ")}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(r)),!await _chunk5PTFTN6Jcjs.Rf.call(void 0, {message:"Would you like to skip rows missing an identifier?"}))throw new Error(r);let o=f.length;f=f.filter(n=>n[a.identifierColumn]),_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Skipped ${o-f.length} rows missing an identifier`))}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The identifier column "${a.identifierColumn}" is present for all rows`));let c=_chunkNI434OIIcjs.d.call(void 0, f,a.identifierColumn),s=Object.entries(c).filter(([,r])=>r.length>1);if(s.length>0){let r=`The identifier column "${a.identifierColumn}" has duplicate values for the following rows: ${s.slice(0,10).map(([o,n])=>`${o} (${n.length})`).join(`
|
|
3
|
+
`)}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(r)),!await _chunk5PTFTN6Jcjs.Rf.call(void 0, {message:"Would you like to automatically take the latest update?"}))throw new Error(r);f=Object.entries(c).map(([,o])=>o.sort((t,p)=>new Date(p[a.timestampColum]).getTime()-new Date(t[a.timestampColum]).getTime())[0]).filter(o=>o)}return{currentState:a,preferences:f}}async function ye(f,a,{purposeSlugs:y,preferenceTopics:m,forceTriggerWorkflows:l}){let c=_chunkNI434OIIcjs.j.call(void 0, f.map(i=>Object.keys(i)).flat()),s=_chunkNI434OIIcjs.c.call(void 0, c,[...a.identifierColumn?[a.identifierColumn]:[],...a.timestampColum?[a.timestampColum]:[]]);if(s.length===0){if(l)return a;throw new Error("No other columns to process")}let r=[...y,...m.map(i=>`${i.purpose.trackingType}->${i.slug}`)];return await _bluebird.mapSeries.call(void 0, s,async i=>{let o=_chunkNI434OIIcjs.j.call(void 0, f.map(t=>t[i])),n=a.columnToPurposeName[i];if(n)_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Column "${i}" is associated with purpose "${n.purpose}"`));else{let{purposeName:t}=await _inquirer2.default.prompt([{name:"purposeName",message:`Choose the purpose that column ${i} is associated with`,type:"list",default:r.find(u=>u.startsWith(y[0])),choices:r}]),[p,g]=t.split("->");n={purpose:p,preference:g||null,valueMapping:{}}}await _bluebird.mapSeries.call(void 0, o,async t=>{if(n.valueMapping[t]!==void 0){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Value "${t}" is associated with purpose value "${n.valueMapping[t]}"`));return}if(n.preference===null){let{purposeValue:p}=await _inquirer2.default.prompt([{name:"purposeValue",message:`Choose the purpose value for value "${t}" associated with purpose "${n.purpose}"`,type:"confirm",default:t!=="false"}]);n.valueMapping[t]=p}if(n.preference!==null){let p=m.find(u=>u.slug===n.preference);if(!p){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Preference topic "${n.preference}" not found`));return}let g=p.preferenceOptionValues.map(({slug:u})=>u);if(p.type===_privacytypes.PreferenceTopicType.Boolean){let{preferenceValue:u}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${p.slug}" value "${t}" associated with purpose "${n.purpose}"`,type:"confirm",default:t!=="false"}]);n.valueMapping[t]=u;return}if(p.type===_privacytypes.PreferenceTopicType.Select){let{preferenceValue:u}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${p.slug}" value "${t}" associated with purpose "${n.purpose}"`,type:"list",choices:g,default:g.find(w=>w===t)}]);n.valueMapping[t]=u;return}if(p.type===_privacytypes.PreferenceTopicType.MultiSelect){let u=_chunk5PTFTN6Jcjs.oc.call(void 0, t);await _bluebird.mapSeries.call(void 0, u,async w=>{if(n.valueMapping[w]!==void 0)return;let{preferenceValue:R}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${p.slug}" value "${w}" associated with purpose "${n.purpose}"`,type:"list",choices:g,default:g.find(P=>P===w)}]);n.valueMapping[w]=R});return}throw new Error(`Unknown preference topic type: ${p.type}`)}}),a.columnToPurposeName[i]=n}),a}function we({currentConsentRecord:f,pendingUpdates:a,preferenceTopics:y}){return Object.entries(a).every(([m,{preferences:l=[],enabled:c}])=>{let s=f.purposes.find(i=>i.purpose===m);return!!s&&s.enabled===c?l.every(({topic:i,choice:o})=>s.preferences&&s.preferences.find(n=>{if(n.topic!==i)return!1;let t=y.find(p=>p.slug===i&&p.purpose.trackingType===m);if(!t)throw new Error(`Could not find preference topic for ${i}`);switch(t.type){case _privacytypes.PreferenceTopicType.Boolean:return n.choice.booleanValue===o.booleanValue;case _privacytypes.PreferenceTopicType.Select:return n.choice.selectValue===o.selectValue;case _privacytypes.PreferenceTopicType.MultiSelect:let p=(n.choice.selectValues||[]).sort(),g=(o.selectValues||[]).sort();return p.length===g.length&&p.every((u,w)=>u===g[w]);default:throw new Error(`Unknown preference topic type: ${t.type}`)}})):!1})}function Pe({currentConsentRecord:f,pendingUpdates:a,preferenceTopics:y,log:m}){return!!Object.entries(a).find(([l,{preferences:c=[],enabled:s}])=>{let r=f.purposes.find(i=>i.purpose===l);return r?r.enabled!==s?(m&&_chunkZUNVPK23cjs.a.warn(`Purpose ${l} enabled value conflict for user ${f.userId}. Pending Value: ${s}, Current Value: ${r.enabled}`),!0):!!c.find(({topic:i,choice:o})=>{let n=(r.preferences||[]).find(u=>u.topic===i);if(!n)return m&&_chunkZUNVPK23cjs.a.warn(`No existing preference found for topic ${i} in purpose ${l} for user ${f.userId}.`),!1;let t=y.find(u=>u.slug===i&&u.purpose.trackingType===l);if(!t)throw new Error(`Could not find preference topic for ${i}`);let p,g;switch(t.type){case _privacytypes.PreferenceTopicType.Boolean:return p=n.choice.booleanValue!==o.booleanValue,m&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} boolean value conflict for user ${f.userId}. Expected: ${o.booleanValue}, Found: ${n.choice.booleanValue}`),p;case _privacytypes.PreferenceTopicType.Select:return g=n.choice.selectValue!==o.selectValue,m&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} select value conflict for user ${f.userId}. Expected: ${o.selectValue}, Found: ${n.choice.selectValue}`),g;case _privacytypes.PreferenceTopicType.MultiSelect:let u=(n.choice.selectValues||[]).sort(),w=(o.selectValues||[]).sort();return g=u.length!==w.length||!u.every((R,P)=>R===w[P]),m&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} multi-select value conflict for user ${f.userId}. Expected: ${w.join(", ")}, Found: ${u.join(", ")}`),g;default:throw new Error(`Unknown preference topic type: ${t.type}`)}}):(m&&_chunkZUNVPK23cjs.a.warn(`No existing purpose found for ${l} in consent record for ${f.userId}.`),!1)})}async function $e({file:f,sombra:a,purposeSlugs:y,preferenceTopics:m,partitionKey:l,skipExistingRecordCheck:c,forceTriggerWorkflows:s},r){let i=new Date().getTime(),o=r.getValue("fileMetadata");_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading in file: "${f}"`));let n=_chunk5PTFTN6Jcjs.rc.call(void 0, f,F.record(F.string,F.string)),t={columnToPurposeName:{},pendingSafeUpdates:{},pendingConflictUpdates:{},skippedUpdates:{},...o[f]||{},lastFetchedAt:new Date().toISOString()};t=await de(n,t),o[f]=t,await r.setValue(o,"fileMetadata");let p=await ge(n,t);t=p.currentState,n=p.preferences,o[f]=t,await r.setValue(o,"fileMetadata"),t=await ye(n,t,{preferenceTopics:m,purposeSlugs:y,forceTriggerWorkflows:s}),o[f]=t,await r.setValue(o,"fileMetadata");let g=n.map(P=>P[t.identifierColumn]),u=c?[]:await me(a,{identifiers:g.map(P=>({value:P})),partitionKey:l}),w=_chunkNI434OIIcjs.e.call(void 0, u,"userId");t.pendingConflictUpdates={},t.pendingSafeUpdates={},t.skippedUpdates={},n.forEach(P=>{let M=P[t.identifierColumn],V=q({row:P,columnToPurposeName:t.columnToPurposeName,preferenceTopics:m,purposeSlugs:y}),C=w[M];if(s&&!C)throw new Error(`No existing consent record found for user with id: ${M}.
|
|
4
|
+
When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(C&&we({currentConsentRecord:C,pendingUpdates:V,preferenceTopics:m})&&!s){t.skippedUpdates[M]=P;return}if(C&&Pe({currentConsentRecord:C,pendingUpdates:V,preferenceTopics:m})){t.pendingConflictUpdates[M]={row:P,record:C};return}t.pendingSafeUpdates[M]=P}),o[f]=t,await r.setValue(o,"fileMetadata");let R=new Date().getTime();_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pre-processed file: "${f}" in ${(R-i)/1e3}s`))}var Ce=e.type({purpose:e.string,preference:e.union([e.string,e.null]),valueMapping:e.record(e.string,e.union([e.string,e.boolean,e.null,e.undefined]))}),Xt=e.record(e.string,Ce),Ne=e.type({name:e.string,isUniqueOnPreferenceStore:e.boolean}),Zt=e.record(e.string,Ne),Ae=e.intersection([e.type({columnToPurposeName:e.record(e.string,Ce),lastFetchedAt:e.string,pendingSafeUpdates:e.record(e.string,e.record(e.string,e.string)),pendingConflictUpdates:e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),skippedUpdates:e.record(e.string,e.record(e.string,e.string))}),e.partial({identifierColumn:e.string,timestampColum:e.string})]),er=e.record(e.string,e.union([e.boolean,_privacytypes.PreferenceUpdateItem])),tr=e.record(e.string,e.union([e.boolean,e.record(e.string,e.string)])),rr=e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),or=e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),nr=e.record(e.string,e.record(e.string,e.string)),Te=e.type({fileMetadata:e.record(e.string,Ae),failingUpdates:e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),pendingUpdates:e.record(e.string,_privacytypes.PreferenceUpdateItem)});async function br({auth:f,sombraAuth:a,receiptFilepath:y,file:m,partition:l,isSilent:c=!0,dryRun:s=!1,skipWorkflowTriggers:r=!1,skipConflictUpdates:i=!1,skipExistingRecordCheck:o=!1,attributes:n=[],transcendUrl:t,forceTriggerWorkflows:p=!1}){let g=_chunk5PTFTN6Jcjs.qc.call(void 0, n),u=new (0, _persistedstate.PersistedState)(y,Te,{fileMetadata:{},failingUpdates:{},pendingUpdates:{}}),w=u.getValue("failingUpdates"),R=u.getValue("pendingUpdates"),P=u.getValue("fileMetadata");_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Restored cache, there are:
|
|
5
|
+
${Object.values(w).length} failing requests to be retried
|
|
6
|
+
${Object.values(R).length} pending requests to be processed
|
|
7
|
+
The following files are stored in cache and will be used:
|
|
8
|
+
${Object.keys(P).map(h=>h).join(`
|
|
9
|
+
`)}
|
|
10
|
+
The following file will be processed: ${m}
|
|
11
|
+
`));let M=_chunk5PTFTN6Jcjs.wc.call(void 0, t,f),[V,C,Z]=await Promise.all([_chunk5PTFTN6Jcjs.xc.call(void 0, t,f,a),p?Promise.resolve([]):_chunk5PTFTN6Jcjs.fd.call(void 0, M),p?Promise.resolve([]):_chunk5PTFTN6Jcjs.bd.call(void 0, M)]);await $e({file:m,purposeSlugs:C.map(h=>h.trackingType),preferenceTopics:Z,sombra:V,partitionKey:l,skipExistingRecordCheck:o,forceTriggerWorkflows:p},u);let O={};P=u.getValue("fileMetadata");let U=P[m];if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(U.pendingSafeUpdates).length} safe updates in ${m}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(U.pendingConflictUpdates).length} conflict updates in ${m}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(U.skippedUpdates).length} skipped updates in ${m}`)),Object.entries({...U.pendingSafeUpdates,...i?{}:_typeutils.apply.call(void 0, U.pendingConflictUpdates,({row:h})=>h)}).forEach(([h,$])=>{let j=U.timestampColum===B?new Date:new Date($[U.timestampColum]),k=q({row:$,columnToPurposeName:U.columnToPurposeName,preferenceTopics:Z,purposeSlugs:C.map(I=>I.trackingType)});O[h]={userId:h,partition:l,timestamp:j.toISOString(),purposes:Object.entries(k).map(([I,xe])=>({...xe,purpose:I,workflowSettings:{attributes:g,isSilent:c,skipWorkflowTrigger:r}}))}}),await u.setValue(O,"pendingUpdates"),await u.setValue({},"failingUpdates"),s){_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Dry run complete, exiting. ${Object.values(O).length} pending updates. Check file: ${y}`));return}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${Object.values(O).length} preferences to partition: ${l}`));let Me=new Date().getTime(),G=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),ee=0,W=Object.entries(O),Ue=_chunkNI434OIIcjs.b.call(void 0, W,r?100:10);G.start(W.length,0),await _bluebird.map.call(void 0, Ue,async h=>{try{await V.put("v1/preferences",{json:{records:h.map(([,$])=>$),skipWorkflowTriggers:r,forceTriggerWorkflows:p}}).json()}catch($){try{let k=JSON.parse(_optionalChain([$, 'optionalAccess', _5 => _5.response, 'optionalAccess', _6 => _6.body])||"{}");k.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${k.error}`))}catch (e2){}_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to upload ${h.length} user preferences to partition ${l}: ${_optionalChain([$, 'optionalAccess', _7 => _7.response, 'optionalAccess', _8 => _8.body])||_optionalChain([$, 'optionalAccess', _9 => _9.message])}`));let j=u.getValue("failingUpdates");h.forEach(([k,I])=>{j[k]={uploadedAt:new Date().toISOString(),update:I,error:_optionalChain([$, 'optionalAccess', _10 => _10.response, 'optionalAccess', _11 => _11.body])||_optionalChain([$, 'optionalAccess', _12 => _12.message])||"Unknown error"}}),await u.setValue(j,"failingUpdates")}ee+=h.length,G.update(ee)},{concurrency:40}),G.stop();let ke=new Date().getTime()-Me;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${W.length} user preferences to partition ${l} in "${ke/1e3}" seconds!`))}function Tr({identifiers:f=[],purposes:a=[],metadata:y=[],consentManagement:m={},system:l={decryptionStatus:"DECRYPTED"},...c}){let s={...c,...l,...m};if(Array.isArray(f)){let r=new Map;for(let{name:i,value:o}of f)r.has(i)||r.set(i,new Set),o&&r.get(i).add(o);for(let[i,o]of r.entries())s[i]=Array.from(o).join(",")}if(Array.isArray(y)){let r=new Map;for(let{key:i,value:o}of y)r.has(i)||r.set(i,new Set),o&&r.get(i).add(o);for(let[i,o]of r.entries())s[`metadata_${i}`]=Array.from(o).join(",")}if(Array.isArray(a)){for(let{purpose:r,preferences:i,enabled:o}of a)if(s[r]=!!o,Array.isArray(i))for(let{topic:n,choice:t}of i){let p=`${r}_${n}`,g=null;Object.prototype.hasOwnProperty.call(t,"booleanValue")?g=!!t.booleanValue:Object.prototype.hasOwnProperty.call(t,"selectValue")?g=String(_nullishCoalesce(t.selectValue, () => (""))):Array.isArray(t.selectValues)?g=t.selectValues.map(w=>String(w)).filter(w=>w.length>0).join(","):g=null,s[p]=g}}return s}async function Or(f,{partition:a,filterBy:y={},limit:m=50}){let l=[],c,s=y&&(Object.keys(y).length>0||y.system&&Object.keys(y.system).length>0),r=Math.max(1,Math.min(50,_nullishCoalesce(m, () => (50))));for(;;){let i={limit:r};s&&(i.filter=y),c&&(i.cursor=c);let o=await Q(()=>f.post(`v1/preferences/${a}/query`,{json:i}).json(),{onRetry:(p,g,u)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Retry attempt ${p} for fetchConsentPreferences due to error: ${u}`))}}),{nodes:n,cursor:t}=_typeutils.decodeCodec.call(void 0, D,o);if(!n||n.length===0||(l.push(...n),!t))break;c=t}return l}exports.a = br; exports.b = Tr; exports.c = Or;
|
|
12
|
+
//# sourceMappingURL=chunk-WWJHUHSQ.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-WWJHUHSQ.cjs","../src/lib/preference-management/uploadPreferenceManagementPreferencesInteractive.ts","../src/lib/preference-management/parsePreferenceManagementCsv.ts","../src/lib/preference-management/getPreferencesForIdentifiers.ts","../src/lib/preference-management/withPreferenceQueryRetry.ts","../src/lib/preference-management/parsePreferenceIdentifiersFromCsv.ts","../src/lib/preference-management/parsePreferenceAndPurposeValuesFromCsv.ts","../src/lib/preference-management/codecs.ts"],"names":["RETRY_PREFERENCE_MSGS","withPreferenceQueryRetry","fn","maxAttempts","baseDelayMs","isRetryable","_err","msg","m","onRetry","attempt","err","preferences","value"],"mappings":"AAAA,2lCAAgH,wDAAyC,wDAAuE,gFCQ7M,oCACC,qGAGI,+DAEO,qJCXZ,qDCES,ICGfA,EAAAA,CAAkC,CAC7C,WAAA,CACA,YAAA,CACA,WAAA,CACA,sBAAA,CACA,sBACF,CAAA,CAwBA,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,CAACC,CAAAA,CAAMC,CAAAA,CAAAA,EACnBP,EAAAA,CAAsB,IAAA,CAAMQ,CAAAA,EAAMD,CAAAA,CAAI,QAAA,CAASC,CAAC,CAAC,CAAA,CACnD,OAAA,CAAAC,CACF,CAAA,CAAkB,CAAC,CAAA,CACP,CACZ,IAAIC,CAAAA,CAAU,CAAA,CAEd,GAAA,CAAA,CAAA,CAAA,CAAa,CACXA,CAAAA,EAAW,CAAA,CACX,GAAI,CACF,OAAO,MAAMR,CAAAA,CAAG,CAElB,CAAA,KAAA,CAASS,CAAAA,CAAU,CACjB,IAAMJ,CAAAA,kBAAAA,CACHI,CAAAA,EAAAA,iBAAQA,CAAAA,qBAAI,QAAA,6BAAU,MAAA,EAAQA,CAAAA,CAAI,OAAA,CAAA,CAAA,SACnC,MAAA,kBAAOA,CAAAA,SAAO,iBAAe,GAAA,CAE/B,EAAA,CAAI,CAAA,CADcD,CAAAA,CAAUP,CAAAA,EAAeE,CAAAA,CAAYM,CAAAA,CAAKJ,CAAG,CAAA,CAAA,CAE7D,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiCG,CAAO,CAAA,aAAA,EAAgBH,CAAG,CAAA,CAAA;ACwBjEK;ACfU;ACuBV,oGAAA;ANJI;AAGA;AAAA;AAKQ;AAAK;AACgC,sCAAA;AAqF1CC","file":"/home/runner/work/cli/cli/dist/chunk-WWJHUHSQ.cjs","sourcesContent":[null,"import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllPurposes,\n fetchAllPreferenceTopics,\n PreferenceTopic,\n Purpose,\n} from '../graphql';\nimport colors from 'colors';\nimport { map } from 'bluebird';\nimport { chunk } from 'lodash-es';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { parseAttributesFromString } from '../requests';\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport { parsePreferenceManagementCsvWithCache } from './parsePreferenceManagementCsv';\nimport { PreferenceState } from './codecs';\nimport { PreferenceUpdateItem } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\nimport { NONE_PREFERENCE_MAP } from './parsePreferenceTimestampsFromCsv';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadPreferenceManagementPreferencesInteractive({\n auth,\n sombraAuth,\n receiptFilepath,\n file,\n partition,\n isSilent = true,\n dryRun = false,\n skipWorkflowTriggers = false,\n skipConflictUpdates = false,\n skipExistingRecordCheck = false,\n attributes = [],\n transcendUrl,\n forceTriggerWorkflows = false,\n}: {\n /** The Transcend API key */\n auth: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Partition key */\n partition: string;\n /** File where to store receipt and continue from where left off */\n receiptFilepath: string;\n /** The file to process */\n file: string;\n /** API URL for Transcend backend */\n transcendUrl: string;\n /** Whether to do a dry run */\n dryRun?: boolean;\n /** Whether to upload as isSilent */\n isSilent?: boolean;\n /** Attributes string pre-parse. In format Key:Value */\n attributes?: string[];\n /** Skip workflow triggers */\n skipWorkflowTriggers?: boolean;\n /**\n * When true, only update preferences that do not conflict with existing\n * preferences. When false, update all preferences in CSV based on timestamp.\n */\n skipConflictUpdates?: boolean;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck?: boolean;\n /** Whether to force trigger workflows */\n forceTriggerWorkflows?: boolean;\n}): Promise<void> {\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state file to store the requests from this run\n const preferenceState = new PersistedState(receiptFilepath, PreferenceState, {\n fileMetadata: {},\n failingUpdates: {},\n pendingUpdates: {},\n });\n const failingRequests = preferenceState.getValue('failingUpdates');\n const pendingRequests = preferenceState.getValue('pendingUpdates');\n let fileMetadata = preferenceState.getValue('fileMetadata');\n\n logger.info(\n colors.magenta(\n 'Restored cache, there are: \\n' +\n `${\n Object.values(failingRequests).length\n } failing requests to be retried\\n` +\n `${\n Object.values(pendingRequests).length\n } pending requests to be processed\\n` +\n `The following files are stored in cache and will be used:\\n${Object.keys(\n fileMetadata,\n )\n .map((x) => x)\n .join('\\n')}\\n` +\n `The following file will be processed: ${file}\\n`,\n ),\n );\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const [sombra, purposes, preferenceTopics] = await Promise.all([\n // Create sombra instance to communicate with\n createSombraGotInstance(transcendUrl, auth, sombraAuth),\n // get all purposes and topics\n forceTriggerWorkflows\n ? Promise.resolve([] as Purpose[])\n : fetchAllPurposes(client),\n forceTriggerWorkflows\n ? Promise.resolve([] as PreferenceTopic[])\n : fetchAllPreferenceTopics(client),\n ]);\n\n // Process the file\n await parsePreferenceManagementCsvWithCache(\n {\n file,\n purposeSlugs: purposes.map((x) => x.trackingType),\n preferenceTopics,\n sombra,\n partitionKey: partition,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n },\n preferenceState,\n );\n\n // Construct the pending updates\n const pendingUpdates: Record<string, PreferenceUpdateItem> = {};\n fileMetadata = preferenceState.getValue('fileMetadata');\n const metadata = fileMetadata[file];\n\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingSafeUpdates).length\n } safe updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingConflictUpdates).length\n } conflict updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.skippedUpdates).length\n } skipped updates in ${file}`,\n ),\n );\n\n // Update either safe updates only or safe + conflict\n Object.entries({\n ...metadata.pendingSafeUpdates,\n ...(skipConflictUpdates\n ? {}\n : apply(metadata.pendingConflictUpdates, ({ row }) => row)),\n }).forEach(([userId, update]) => {\n // Determine timestamp\n const timestamp =\n metadata.timestampColum === NONE_PREFERENCE_MAP\n ? new Date()\n : new Date(update[metadata.timestampColum!]);\n\n // Determine updates\n const updates = getPreferenceUpdatesFromRow({\n row: update,\n columnToPurposeName: metadata.columnToPurposeName,\n preferenceTopics,\n purposeSlugs: purposes.map((x) => x.trackingType),\n });\n pendingUpdates[userId] = {\n userId,\n partition,\n timestamp: timestamp.toISOString(),\n purposes: Object.entries(updates).map(([purpose, value]) => ({\n ...value,\n purpose,\n workflowSettings: {\n attributes: parsedAttributes,\n isSilent,\n skipWorkflowTrigger: skipWorkflowTriggers,\n },\n })),\n };\n });\n await preferenceState.setValue(pendingUpdates, 'pendingUpdates');\n await preferenceState.setValue({}, 'failingUpdates');\n\n // Exist early if dry run\n if (dryRun) {\n logger.info(\n colors.green(\n `Dry run complete, exiting. ${\n Object.values(pendingUpdates).length\n } pending updates. Check file: ${receiptFilepath}`,\n ),\n );\n return;\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${\n Object.values(pendingUpdates).length\n } preferences to partition: ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n const updatesToRun = Object.entries(pendingUpdates);\n const chunkedUpdates = chunk(updatesToRun, skipWorkflowTriggers ? 100 : 10);\n progressBar.start(updatesToRun.length, 0);\n await map(\n chunkedUpdates,\n async (currentChunk) => {\n // Make the request\n try {\n await sombra\n .put('v1/preferences', {\n json: {\n records: currentChunk.map(([, update]) => update),\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n },\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n logger.error(\n colors.red(\n `Failed to upload ${\n currentChunk.length\n } user preferences to partition ${partition}: ${\n err?.response?.body || err?.message\n }`,\n ),\n );\n const failingUpdates = preferenceState.getValue('failingUpdates');\n currentChunk.forEach(([userId, update]) => {\n failingUpdates[userId] = {\n uploadedAt: new Date().toISOString(),\n update,\n error: err?.response?.body || err?.message || 'Unknown error',\n };\n });\n await preferenceState.setValue(failingUpdates, 'failingUpdates');\n }\n\n total += currentChunk.length;\n progressBar.update(total);\n },\n {\n concurrency: 40,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n logger.info(\n colors.green(\n `Successfully uploaded ${\n updatesToRun.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport type { Got } from 'got';\nimport { keyBy } from 'lodash-es';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { FileMetadataState, PreferenceState } from './codecs';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { getPreferencesForIdentifiers } from './getPreferencesForIdentifiers';\nimport { PreferenceTopic } from '../graphql';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\nimport { parsePreferenceTimestampsFromCsv } from './parsePreferenceTimestampsFromCsv';\nimport { parsePreferenceIdentifiersFromCsv } from './parsePreferenceIdentifiersFromCsv';\nimport { parsePreferenceAndPurposeValuesFromCsv } from './parsePreferenceAndPurposeValuesFromCsv';\nimport { checkIfPendingPreferenceUpdatesAreNoOp } from './checkIfPendingPreferenceUpdatesAreNoOp';\nimport { checkIfPendingPreferenceUpdatesCauseConflict } from './checkIfPendingPreferenceUpdatesCauseConflict';\n\n/**\n * Parse a file into the cache\n *\n *\n * @param options - Options\n * @param cache - The cache to store the parsed file in\n * @returns The cache with the parsed file\n */\nexport async function parsePreferenceManagementCsvWithCache(\n {\n file,\n sombra,\n purposeSlugs,\n preferenceTopics,\n partitionKey,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n }: {\n /** File to parse */\n file: string;\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Sombra got instance */\n sombra: Got;\n /** Partition key */\n partitionKey: string;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck: boolean;\n /** Wheather to force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n cache: PersistedState<typeof PreferenceState>,\n): Promise<void> {\n // Start the timer\n const t0 = new Date().getTime();\n\n // Get the current metadata\n const fileMetadata = cache.getValue('fileMetadata');\n\n // Read in the file\n logger.info(colors.magenta(`Reading in file: \"${file}\"`));\n let preferences = readCsv(file, t.record(t.string, t.string));\n\n // start building the cache, can use previous cache as well\n let currentState: FileMetadataState = {\n columnToPurposeName: {},\n pendingSafeUpdates: {},\n pendingConflictUpdates: {},\n skippedUpdates: {},\n // Load in the last fetched time\n ...((fileMetadata[file] || {}) as Partial<FileMetadataState>),\n lastFetchedAt: new Date().toISOString(),\n };\n\n // Validate that all timestamps are present in the file\n currentState = await parsePreferenceTimestampsFromCsv(\n preferences,\n currentState,\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Validate that all identifiers are present and unique\n const result = await parsePreferenceIdentifiersFromCsv(\n preferences,\n currentState,\n );\n currentState = result.currentState;\n preferences = result.preferences;\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Ensure all other columns are mapped to purpose and preference\n // slug values\n currentState = await parsePreferenceAndPurposeValuesFromCsv(\n preferences,\n currentState,\n {\n preferenceTopics,\n purposeSlugs,\n forceTriggerWorkflows,\n },\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Grab existing preference store records\n const identifiers = preferences.map(\n (pref) => pref[currentState.identifierColumn!],\n );\n const existingConsentRecords = skipExistingRecordCheck\n ? []\n : await getPreferencesForIdentifiers(sombra, {\n identifiers: identifiers.map((x) => ({ value: x })),\n partitionKey,\n });\n const consentRecordByIdentifier = keyBy(existingConsentRecords, 'userId');\n\n // Clear out previous updates\n currentState.pendingConflictUpdates = {};\n currentState.pendingSafeUpdates = {};\n currentState.skippedUpdates = {};\n\n // Process each row\n preferences.forEach((pref) => {\n // Grab unique Id for the user\n const userId = pref[currentState.identifierColumn!];\n\n // determine updates for user\n const pendingUpdates = getPreferenceUpdatesFromRow({\n row: pref,\n columnToPurposeName: currentState.columnToPurposeName,\n preferenceTopics,\n purposeSlugs,\n });\n\n // Grab current state of the update\n const currentConsentRecord = consentRecordByIdentifier[userId];\n if (forceTriggerWorkflows && !currentConsentRecord) {\n throw new Error(\n `No existing consent record found for user with id: ${userId}. \n When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`,\n );\n }\n // Check if the update can be skipped\n // this is the case if a record exists, and the purpose\n // and preference values are all in sync\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n }) &&\n !forceTriggerWorkflows\n ) {\n currentState.skippedUpdates[userId] = pref;\n return;\n }\n\n // Determine if there are any conflicts\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n })\n ) {\n currentState.pendingConflictUpdates[userId] = {\n row: pref,\n record: currentConsentRecord,\n };\n return;\n }\n\n // Add to pending updates\n currentState.pendingSafeUpdates[userId] = pref;\n });\n\n // Read in the file\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n const t1 = new Date().getTime();\n logger.info(\n colors.green(\n `Successfully pre-processed file: \"${file}\" in ${(t1 - t0) / 1000}s`,\n ),\n );\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport { chunk } from 'lodash-es';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from 'bluebird';\nimport { logger } from '../../logger';\nimport { withPreferenceQueryRetry } from './withPreferenceQueryRetry';\nimport { ConsentPreferenceResponse } from './types';\n\n/**\n * Grab the current consent preference values for a list of identifiers\n *\n * @param sombra - Backend to make API call to\n * @param options - Options\n * @returns Plaintext context information\n */\nexport async function getPreferencesForIdentifiers(\n sombra: Got,\n {\n identifiers,\n partitionKey,\n skipLogging = false,\n concurrency = 40,\n }: {\n /** The list of identifiers to look up */\n identifiers: {\n /** The value of the identifier */\n value: string;\n }[];\n /** The partition key to look up */\n partitionKey: string;\n /** Whether to skip logging */\n skipLogging?: boolean;\n /** Concurrency for requests (default 40) */\n concurrency?: number;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const results: PreferenceQueryResponseItem[] = [];\n const groupedIdentifiers = chunk(identifiers, 100);\n\n // create a new progress bar instance and use shades_classic theme\n const t0 = new Date().getTime();\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n if (!skipLogging) {\n progressBar.start(identifiers.length, 0);\n }\n\n let total = 0;\n await map(\n groupedIdentifiers,\n async (group) => {\n const rawResult = await withPreferenceQueryRetry(\n () =>\n sombra\n .post(`v1/preferences/${partitionKey}/query`, {\n json: {\n filter: { identifiers: group },\n limit: group.length,\n },\n })\n .json(),\n {\n onRetry: (attempt, _err, msg) => {\n logger.warn(\n colors.yellow(\n `[RETRY] group size=${group.length} partition=${partitionKey} attempt=${attempt}: ${msg}`,\n ),\n );\n },\n },\n );\n\n const result = decodeCodec(ConsentPreferenceResponse, rawResult);\n results.push(...result.nodes);\n total += group.length;\n progressBar.update(total);\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n if (!skipLogging) {\n // Log completion time\n logger.info(\n colors.green(`Completed download in \"${totalTime / 1000}\" seconds.`),\n );\n }\n\n return results;\n}\n","import colors from 'colors';\nimport { logger } from '../../logger';\nimport { sleepPromise } from '../helpers';\n\n/**\n * Transient network / platform errors that merit a retry.\n * Keep this list short and specific to avoid masking real failures.\n */\nexport const RETRY_PREFERENCE_MSGS: string[] = [\n 'ENOTFOUND',\n 'ECONNRESET',\n 'ETIMEDOUT',\n '504 Gateway Time-out',\n 'Task timed out after',\n];\n\n/**\n * Options for retrying preference queries.\n */\nexport type RetryOptions = {\n /** Max attempts including the first try (default 3) */\n maxAttempts?: number;\n /** Initial backoff in ms (default 250) */\n baseDelayMs?: number;\n /** Optional custom predicate to decide if an error is retryable */\n isRetryable?: (err: unknown, message: string) => boolean;\n /** Optional hook to log on each retry */\n onRetry?: (attempt: number, err: unknown, message: string) => void;\n};\n\n/**\n * Run an async function with standardized retry behavior for preference queries.\n * Exponential backoff with jitter; only retries on known-transient messages.\n *\n * @param fn - Function to run\n * @param options - Retry options\n * @returns Result of the function\n */\nexport async function withPreferenceQueryRetry<T>(\n fn: () => Promise<T>,\n {\n maxAttempts = 3,\n baseDelayMs = 250,\n isRetryable = (_err, msg) =>\n RETRY_PREFERENCE_MSGS.some((m) => msg.includes(m)),\n onRetry,\n }: RetryOptions = {},\n): Promise<T> {\n let attempt = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n attempt += 1;\n try {\n return await fn();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n const msg: string =\n (err && (err.response?.body || err.message)) ??\n String(err ?? 'Unknown error');\n const willRetry = attempt < maxAttempts && isRetryable(err, msg);\n if (!willRetry) {\n throw new Error(\n `Preference query failed after ${attempt} attempt(s): ${msg}`,\n );\n }\n onRetry?.(attempt, err, msg);\n\n const backoff = baseDelayMs * 2 ** (attempt - 1);\n const jitter = Math.floor(Math.random() * baseDelayMs);\n const delay = backoff + jitter;\n logger.warn(\n colors.yellow(\n `[retry] attempt ${attempt}/${\n maxAttempts - 1\n }; backing off ${delay}ms: ${msg}`,\n ),\n );\n await sleepPromise(delay);\n }\n }\n}\n","import { uniq, groupBy, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { inquirerConfirmBoolean } from '../helpers';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse identifiers from a CSV list of preferences\n *\n * Ensures that all rows have a valid identifier\n * and that all identifiers are unique.\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceIdentifiersFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<{\n /** The updated state */\n currentState: FileMetadataState;\n /** The updated preferences */\n preferences: Record<string, string>[];\n}> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const remainingColumnsForIdentifier = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the identifier column to work off of\n if (!currentState.identifierColumn) {\n const { identifierName } = await inquirer.prompt<{\n /** Identifier name */\n identifierName: string;\n }>([\n {\n name: 'identifierName',\n message:\n 'Choose the column that will be used as the identifier to upload consent preferences by',\n type: 'list',\n default:\n remainingColumnsForIdentifier.find((col) =>\n col.toLowerCase().includes('email'),\n ) || remainingColumnsForIdentifier[0],\n choices: remainingColumnsForIdentifier,\n },\n ]);\n currentState.identifierColumn = identifierName;\n }\n logger.info(\n colors.magenta(\n `Using identifier column \"${currentState.identifierColumn}\"`,\n ),\n );\n\n // Validate that the identifier column is present for all rows and unique\n const identifierColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.identifierColumn!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (identifierColumnsMissing.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" is missing a value for the following rows: ${identifierColumnsMissing.join(\n ', ',\n )}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to skip rows missing an identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to skip rows missing an identifier?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n\n // Filter out rows missing an identifier\n const previous = preferences.length;\n preferences = preferences.filter(\n (pref) => pref[currentState.identifierColumn!],\n );\n logger.info(\n colors.yellow(\n `Skipped ${previous - preferences.length} rows missing an identifier`,\n ),\n );\n }\n logger.info(\n colors.magenta(\n `The identifier column \"${currentState.identifierColumn}\" is present for all rows`,\n ),\n );\n\n // Validate that all identifiers are unique\n const rowsByUserId = groupBy(preferences, currentState.identifierColumn);\n const duplicateIdentifiers = Object.entries(rowsByUserId).filter(\n ([, rows]) => rows.length > 1,\n );\n if (duplicateIdentifiers.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" has duplicate values for the following rows: ${duplicateIdentifiers\n .slice(0, 10)\n .map(([userId, rows]) => `${userId} (${rows.length})`)\n .join('\\n')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to take the most recent update\n // for each duplicate identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to automatically take the latest update?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n preferences = Object.entries(rowsByUserId)\n .map(([, rows]) => {\n const sorted = rows.sort(\n (a, b) =>\n new Date(b[currentState.timestampColum!]).getTime() -\n new Date(a[currentState.timestampColum!]).getTime(),\n );\n return sorted[0];\n })\n .filter((x) => x);\n }\n\n return { currentState, preferences };\n}\n/* eslint-enable no-param-reassign */\n","import { uniq, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { mapSeries } from 'bluebird';\nimport { PreferenceTopic } from '../graphql';\nimport { PreferenceTopicType } from '@transcend-io/privacy-types';\nimport { splitCsvToList } from '../requests';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse out the purpose.enabled and preference values from a CSV file\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @param options - Options\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceAndPurposeValuesFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n {\n purposeSlugs,\n preferenceTopics,\n forceTriggerWorkflows,\n }: {\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const otherColumns = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...(currentState.timestampColum ? [currentState.timestampColum] : []),\n ]);\n if (otherColumns.length === 0) {\n if (forceTriggerWorkflows) {\n return currentState;\n }\n throw new Error('No other columns to process');\n }\n\n // The purpose and preferences to map to\n const purposeNames = [\n ...purposeSlugs,\n ...preferenceTopics.map((x) => `${x.purpose.trackingType}->${x.slug}`),\n ];\n\n // Ensure all columns are accounted for\n await mapSeries(otherColumns, async (col) => {\n // Determine the unique values to map in this column\n const uniqueValues = uniq(preferences.map((x) => x[col]));\n\n // Map the column to a purpose\n let purposeMapping = currentState.columnToPurposeName[col];\n if (purposeMapping) {\n logger.info(\n colors.magenta(\n `Column \"${col}\" is associated with purpose \"${purposeMapping.purpose}\"`,\n ),\n );\n } else {\n const { purposeName } = await inquirer.prompt<{\n /** purpose name */\n purposeName: string;\n }>([\n {\n name: 'purposeName',\n message: `Choose the purpose that column ${col} is associated with`,\n type: 'list',\n default: purposeNames.find((x) => x.startsWith(purposeSlugs[0])),\n choices: purposeNames,\n },\n ]);\n const [purposeSlug, preferenceSlug] = purposeName.split('->');\n purposeMapping = {\n purpose: purposeSlug,\n preference: preferenceSlug || null,\n valueMapping: {},\n };\n }\n\n // map each value to the purpose value\n await mapSeries(uniqueValues, async (value) => {\n if (purposeMapping.valueMapping[value] !== undefined) {\n logger.info(\n colors.magenta(\n `Value \"${value}\" is associated with purpose value \"${purposeMapping.valueMapping[value]}\"`,\n ),\n );\n return;\n }\n // if preference is null, this column is just for the purpose\n if (purposeMapping.preference === null) {\n const { purposeValue } = await inquirer.prompt<{\n /** purpose value */\n purposeValue: boolean;\n }>([\n {\n name: 'purposeValue',\n message: `Choose the purpose value for value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = purposeValue;\n }\n\n // if preference is not null, this column is for a specific preference\n if (purposeMapping.preference !== null) {\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === purposeMapping.preference,\n );\n if (!preferenceTopic) {\n logger.error(\n colors.red(\n `Preference topic \"${purposeMapping.preference}\" not found`,\n ),\n );\n return;\n }\n const preferenceOptions = preferenceTopic.preferenceOptionValues.map(\n ({ slug }) => slug,\n );\n\n if (preferenceTopic.type === PreferenceTopicType.Boolean) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n message:\n // eslint-disable-next-line max-len\n `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.Select) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === value),\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.MultiSelect) {\n const parsedValues = splitCsvToList(value);\n // need to do this serially\n await mapSeries(parsedValues, async (parsedValue) => {\n // if we already have a value, skip re-processing it again\n if (purposeMapping.valueMapping[parsedValue] !== undefined) {\n return;\n }\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${parsedValue}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === parsedValue),\n },\n ]);\n purposeMapping.valueMapping[parsedValue] = preferenceValue;\n });\n return;\n }\n\n throw new Error(\n `Unknown preference topic type: ${preferenceTopic.type}`,\n );\n }\n });\n\n currentState.columnToPurposeName[col] = purposeMapping;\n });\n\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import {\n PreferenceQueryResponseItem,\n PreferenceUpdateItem,\n} from '@transcend-io/privacy-types';\nimport * as t from 'io-ts';\n\nexport const PurposeRowMapping = t.type({\n /**\n * The slug or trackingType of the purpose to map to\n *\n * e.g. `Marketing`\n */\n purpose: t.string,\n /**\n * If the column maps to a preference instead of a purpose\n * this is the slug of the purpose.\n *\n * null value indicates that this column maps to the true/false\n * value of the purpose\n */\n preference: t.union([t.string, t.null]),\n /**\n * The mapping between each row value and purpose/preference value.\n *\n * e.g. for a boolean preference or purpose\n * {\n * 'true': true,\n * 'false': false,\n * '': true,\n * }\n *\n * or for a single or multi select preference\n * {\n * '': true,\n * 'value1': 'Value1',\n * 'value2': 'Value2',\n * }\n */\n valueMapping: t.record(\n t.string,\n t.union([t.string, t.boolean, t.null, t.undefined]),\n ),\n});\n\n/** Override type */\nexport type PurposeRowMapping = t.TypeOf<typeof PurposeRowMapping>;\n\n/**\n * Mapping of column name to purpose row mapping.\n * This is used to map each column in the CSV to the relevant purpose and preference definitions in\n * transcend.\n */\nexport const ColumnPurposeMap = t.record(t.string, PurposeRowMapping);\n\n/** Override type */\nexport type ColumnPurposeMap = t.TypeOf<typeof ColumnPurposeMap>;\n\nexport const IdentifierMetadataForPreference = t.type({\n /** The identifier name */\n name: t.string,\n /** Is unique on preference store */\n isUniqueOnPreferenceStore: t.boolean,\n});\n\n/** Override type */\nexport type IdentifierMetadataForPreference = t.TypeOf<\n typeof IdentifierMetadataForPreference\n>;\n\n/**\n * Mapping of identifier name to the column name in the CSV file.\n * This is used to map each identifier name to the column in the CSV file.\n */\nexport const ColumnIdentifierMap = t.record(\n t.string,\n IdentifierMetadataForPreference,\n);\n\n/** Override type */\nexport type ColumnIdentifierMap = t.TypeOf<typeof ColumnIdentifierMap>;\n\nexport const FileMetadataState = t.intersection([\n t.type({\n /**\n * Definition of how to map each column in the CSV to\n * the relevant purpose and preference definitions in transcend\n */\n columnToPurposeName: t.record(t.string, PurposeRowMapping),\n /** Last time the file was last parsed at */\n lastFetchedAt: t.string,\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * These uploads are overwriting non-existent preferences and are safe\n */\n pendingSafeUpdates: t.record(t.string, t.record(t.string, t.string)),\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * these records have conflicts with existing consent preferences\n */\n pendingConflictUpdates: t.record(\n t.string,\n t.type({\n record: PreferenceQueryResponseItem,\n row: t.record(t.string, t.string),\n }),\n ),\n /**\n * Mapping of userId to the rows in the file that can be skipped because\n * their preferences are already in the store\n */\n skippedUpdates: t.record(t.string, t.record(t.string, t.string)),\n }),\n t.partial({\n /** Determine which column name in file maps to consent record identifier to upload on */\n identifierColumn: t.string,\n /** Determine which column name in file maps to the timestamp */\n timestampColum: t.string,\n }),\n]);\n\n/** Override type */\nexport type FileMetadataState = t.TypeOf<typeof FileMetadataState>;\n\n/**\n * This is the type of the receipts that are stored in the file\n * that is used to track the state of the upload process.\n * It is used to resume the upload process from where it left off.\n * It is used to persist the state of the upload process across multiple runs.\n */\nexport const PreferenceUpdateMap = t.record(\n t.string,\n // This can either be true to indicate the record is pending\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, PreferenceUpdateItem]),\n);\n\n/** Override type */\nexport type PreferenceUpdateMap = t.TypeOf<typeof PreferenceUpdateMap>;\n\n/**\n * This is the type of the pending updates that are safe to run without\n * conflicts with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is safe to upload.\n */\nexport const PendingSafePreferenceUpdates = t.record(\n t.string,\n // This can either be true to indicate the record is safe\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, t.record(t.string, t.string)]),\n);\n\n/** Override type */\nexport type PendingSafePreferenceUpdates = t.TypeOf<\n typeof PendingSafePreferenceUpdates\n>;\n\n/**\n * These are the updates that failed to be uploaded to the API.\n */\nexport const FailingPreferenceUpdates = t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n);\n\n/** Override type */\nexport type FailingPreferenceUpdates = t.TypeOf<\n typeof FailingPreferenceUpdates\n>;\n\n/**\n * This is the type of the pending updates that are in conflict with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is pending upload.\n */\nexport const PendingWithConflictPreferenceUpdates = t.record(\n t.string,\n // We always return the conflicts for investigation\n t.type({\n /** Record to be inserted to transcend v1/preferences API */\n record: PreferenceQueryResponseItem,\n /** The row in the file that is pending upload */\n row: t.record(t.string, t.string),\n }),\n);\n\n/** Override type */\nexport type PendingWithConflictPreferenceUpdates = t.TypeOf<\n typeof PendingWithConflictPreferenceUpdates\n>;\n\n/**\n * The set of preference updates that are skipped\n * Key is primaryKey and value is the row in the CSV\n * that is skipped.\n *\n * This is usually because the preferences are already in the store\n * or there are duplicate rows in the CSV file that are identical.\n */\nexport const SkippedPreferenceUpdates = t.record(\n t.string,\n t.record(t.string, t.string),\n);\n\n/** Override type */\nexport type SkippedPreferenceUpdates = t.TypeOf<\n typeof SkippedPreferenceUpdates\n>;\n\n/** Persist this data between runs of the script */\nexport const PreferenceState = t.type({\n /**\n * Store a cache of previous files read in\n */\n fileMetadata: t.record(t.string, FileMetadataState),\n /**\n * The set of successful uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n failingUpdates: t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n ),\n /**\n * The set of pending uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n pendingUpdates: t.record(t.string, PreferenceUpdateItem),\n});\n\n/** Override type */\nexport type PreferenceState = t.TypeOf<typeof PreferenceState>;\n"]}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }require('./chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }require('./chunk-COKHIMLO.cjs');var _chunkLR3CPNDMcjs = require('./chunk-LR3CPNDM.cjs');var _chunk7ZGJ4MJFcjs = require('./chunk-7ZGJ4MJF.cjs');require('./chunk-F3BRFYKD.cjs');var _chunkMOWFESXNcjs = require('./chunk-MOWFESXN.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _bluebird = require('bluebird');var _path = require('path');var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);async function U({auth:T,start:C,end:f,folder:r,bin:p,transcendUrl:N}){let l=p;Object.values(_chunk5PTFTN6Jcjs.Sc).includes(l)||(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to parse argument "bin" with value "${p}"
|
|
2
2
|
Expected one of:
|
|
3
|
-
${Object.values(
|
|
4
|
-
`)}`)),this.process.exit(1));let n=new Date(C),o=f?new Date(f):new Date;Number.isNaN(n.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Start date provided is invalid date. Got --start="${C}" expected --start="01/01/2023"`)),this.process.exit(1)),Number.isNaN(o.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`End date provided is invalid date. Got --end="${f}" expected --end="01/01/2023"`)),this.process.exit(1)),n>o&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Got a start date "${n.toISOString()}" that was larger than the end date "${o.toISOString()}". Start date must be before end date.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let m=await
|
|
3
|
+
${Object.values(_chunk5PTFTN6Jcjs.Sc).join(`
|
|
4
|
+
`)}`)),this.process.exit(1));let n=new Date(C),o=f?new Date(f):new Date;Number.isNaN(n.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Start date provided is invalid date. Got --start="${C}" expected --start="01/01/2023"`)),this.process.exit(1)),Number.isNaN(o.getTime())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`End date provided is invalid date. Got --end="${f}" expected --end="01/01/2023"`)),this.process.exit(1)),n>o&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Got a start date "${n.toISOString()}" that was larger than the end date "${o.toISOString()}". Start date must be before end date.`)),this.process.exit(1)),_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let m=await _chunkMOWFESXNcjs.b.call(void 0, T);if(_fs2.default.existsSync(r)&&!_fs2.default.lstatSync(r).isDirectory()&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red('The provided argument "folder" was passed a file. expected: folder="./consent-metrics/"')),this.process.exit(1)),_fs.existsSync.call(void 0, r)||_fs.mkdirSync.call(void 0, r),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling consent metrics from start=${n.toString()} to end=${o.toISOString()} with bin size "${p}"`)),typeof m=="string"){try{let i=_chunk5PTFTN6Jcjs.wc.call(void 0, N,m),s=await _chunk7ZGJ4MJFcjs.d.call(void 0, i,{bin:l,start:n,end:o});Object.entries(s).forEach(([g,c])=>{c.forEach(({points:u,name:d})=>{let a=_path.join.call(void 0, r,`${g}_${d}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${a}"...`)),_chunkLR3CPNDMcjs.c.call(void 0, a,u.map(({key:h,value:$})=>({timestamp:h,value:$})))})})}catch(i){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the schema: ${i.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced consent metrics to disk in folder "${r}"! View at ${_chunkNI434OIIcjs.p}`))}else{let i=[];await _bluebird.mapSeries.call(void 0, m,async(s,g)=>{let c=`[${g+1}/${m.length}][${s.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
|
|
5
5
|
|
|
6
6
|
${c}Attempting to pull consent metrics...
|
|
7
7
|
|
|
8
|
-
~~~`));let u=
|
|
9
|
-
//# sourceMappingURL=impl-
|
|
8
|
+
~~~`));let u=_chunk5PTFTN6Jcjs.wc.call(void 0, N,s.apiKey);try{let d=await _chunk7ZGJ4MJFcjs.d.call(void 0, u,{bin:l,start:n,end:o}),a=_path.join.call(void 0, r,s.organizationName);_fs.existsSync.call(void 0, a)||_fs.mkdirSync.call(void 0, a),Object.entries(d).forEach(([h,$])=>{$.forEach(({points:A,name:F})=>{let D=_path.join.call(void 0, a,`${h}_${F}.csv`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing configuration to file "${D}"...`)),_chunkLR3CPNDMcjs.c.call(void 0, D,A.map(({key:G,value:z})=>({timestamp:G,value:z})))})}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${c}Successfully pulled configuration!`))}catch (e2){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${c}Failed to sync configuration.`)),i.push(s.organizationName)}}),i.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${i.join(",")}". View output above for more information, or check out ${_chunkNI434OIIcjs.p}`)),this.process.exit(1))}}exports.pullConsentMetrics = U;
|
|
9
|
+
//# sourceMappingURL=impl-32ZRDOCN.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-32ZRDOCN.cjs","../src/commands/consent/pull-consent-metrics/impl.ts"],"names":["pullConsentMetrics","auth","start","end","folder","bin","transcendUrl","parsedBin","ConsentManagerMetricBin","logger","colors"],"mappings":"AAAA,yMAA4B,wDAAyC,wDAAyC,gCAA6B,wDAAyC,gCAA6B,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gFCEra,oCACO,4BACL,gEACqB,MAoB1C,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,GAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CACF,CAAA,CACe,CAEf,IAAMC,CAAAA,CAAYF,CAAAA,CACb,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,QAAA,CAASD,CAAS,CAAA,EAAA,CAC5DE,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CL,CAAG,CAAA;AAAA;AAAA,EACzB,MAAA,CAAO,MAAA,CAAOG,oBAAuB,CAAA,CAAE,IAAA,CAC3D,CAAA;AAAA,CACF,CAAC,CAAA,CAAA;AA+GD;AAAgB;AAAA;AAAA,GAAA","file":"/home/runner/work/cli/cli/dist/impl-32ZRDOCN.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { mapSeries } from 'bluebird';\nimport { join } from 'node:path';\nimport fs, { existsSync, mkdirSync } from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n ConsentManagerMetricBin,\n} from '../../../lib/graphql';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { pullConsentManagerMetrics } from '../../../lib/consent-manager';\nimport { writeCsv } from '../../../lib/cron';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullConsentMetricsCommandFlags {\n auth: string;\n start: Date;\n end?: Date;\n folder: string;\n bin: string;\n transcendUrl: string;\n}\n\nexport async function pullConsentMetrics(\n this: LocalContext,\n {\n auth,\n start,\n end,\n folder,\n bin,\n transcendUrl,\n }: PullConsentMetricsCommandFlags,\n): Promise<void> {\n // Validate bin\n const parsedBin = bin as ConsentManagerMetricBin;\n if (!Object.values(ConsentManagerMetricBin).includes(parsedBin)) {\n logger.error(\n colors.red(\n `Failed to parse argument \"bin\" with value \"${bin}\"\\n` +\n `Expected one of: \\n${Object.values(ConsentManagerMetricBin).join(\n '\\n',\n )}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Parse the dates\n const startDate = new Date(start);\n const endDate = end ? new Date(end) : new Date();\n if (Number.isNaN(startDate.getTime())) {\n logger.error(\n colors.red(\n `Start date provided is invalid date. Got --start=\"${start}\" expected --start=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (Number.isNaN(endDate.getTime())) {\n logger.error(\n colors.red(\n `End date provided is invalid date. Got --end=\"${end}\" expected --end=\"01/01/2023\"`,\n ),\n );\n this.process.exit(1);\n }\n if (startDate > endDate) {\n logger.error(\n colors.red(\n `Got a start date \"${startDate.toISOString()}\" that was larger than the end date \"${endDate.toISOString()}\". ` +\n 'Start date must be before end date.',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Ensure folder either does not exist or is not a file\n if (fs.existsSync(folder) && !fs.lstatSync(folder).isDirectory()) {\n logger.error(\n colors.red(\n 'The provided argument \"folder\" was passed a file. expected: folder=\"./consent-metrics/\"',\n ),\n );\n this.process.exit(1);\n }\n\n // Create the folder if it does not exist\n if (!existsSync(folder)) {\n mkdirSync(folder);\n }\n\n logger.info(\n colors.magenta(\n `Pulling consent metrics from start=${startDate.toString()} to end=${endDate.toISOString()} with bin size \"${bin}\"`,\n ),\n );\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n // Pull the metrics\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(folder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the schema: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced consent metrics to disk in folder \"${folder}\"! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull consent metrics...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullConsentManagerMetrics(client, {\n bin: parsedBin,\n start: startDate,\n end: endDate,\n });\n\n // ensure folder exists for that organization\n const subFolder = join(folder, apiKey.organizationName);\n if (!existsSync(subFolder)) {\n mkdirSync(subFolder);\n }\n\n // Write to file\n Object.entries(configuration).forEach(([metricName, metrics]) => {\n metrics.forEach(({ points, name }) => {\n const file = join(subFolder, `${metricName}_${name}.csv`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${file}\"...`),\n );\n writeCsv(\n file,\n points.map(({ key, value }) => ({\n timestamp: key,\n value,\n })),\n );\n });\n });\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"]}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkIBTP5OXEcjs = require('./chunk-IBTP5OXE.cjs');var _chunkRE5YALEOcjs = require('./chunk-RE5YALEO.cjs');var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkIBTP5OXEcjs = require('./chunk-IBTP5OXE.cjs');var _chunkRE5YALEOcjs = require('./chunk-RE5YALEO.cjs');var _chunkMOWFESXNcjs = require('./chunk-MOWFESXN.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');require('./chunk-Q7I37FJV.cjs');var _bluebird = require('bluebird');var _fs = require('fs');var _path = require('path');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function I({transcendUrl:i,auth:c,pageSize:h,publishToPrivacyCenter:y,contents:l,deleteExtraAttributeValues:m=!1,classifyService:f=!1}){let u=_chunk5PTFTN6Jcjs.wc.call(void 0, i,c);try{return!await _chunk5PTFTN6Jcjs.ng.call(void 0, l,u,{pageSize:h,publishToPrivacyCenter:y,classifyService:f,deleteExtraAttributeValues:m})}catch(o){return _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An unexpected error occurred syncing the schema: ${o.message}`)),!1}}async function U({file:i="./transcend.yml",transcendUrl:c,auth:h,variables:y,pageSize:l,publishToPrivacyCenter:m,classifyService:f,deleteExtraAttributeValues:u}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);let o=await _chunkMOWFESXNcjs.b.call(void 0, h),N=_chunk5PTFTN6Jcjs.Uf.call(void 0, y),p;if(Array.isArray(o)&&_fs.lstatSync.call(void 0, i).isDirectory()?p=_chunkMOWFESXNcjs.c.call(void 0, i).map(e=>_path.join.call(void 0, i,e)):p=i.split(","),p.length<1)throw new Error("No file specified!");let s=p.map(e=>{_fs.existsSync.call(void 0, e)?_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading file "${e}"...`)):(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`The file path does not exist on disk: ${e}. You can specify the filepath using --file=./examples/transcend.yml`)),this.process.exit(1));try{let r=_chunkRE5YALEOcjs.d.call(void 0, e,N);return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully read in "${e}"`)),{content:r,name:e.split("/").pop().replace(".yml","")}}catch(r){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`The shape of your yaml file is invalid with the following errors: ${r.message}`)),this.process.exit(1)}});if(typeof o=="string"){let[e,...r]=s.map(({content:d})=>d),$=_chunkIBTP5OXEcjs.a.call(void 0, e,...r);await I({transcendUrl:c,auth:o,contents:$,publishToPrivacyCenter:m,deleteExtraAttributeValues:u,pageSize:l,classifyService:!!f})||(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors. View output above for more information, or check out ${_chunkNI434OIIcjs.p}`)),this.process.exit(1))}else{if(s.length!==1&&s.length!==o.length)throw new Error(`Expected list of yml files to be equal to the list of API keys.Got ${s.length} YML file${s.length===1?"":"s"} and ${o.length} API key${o.length===1?"":"s"}`);let e=[];await _bluebird.mapSeries.call(void 0, o,async(r,$)=>{let a=`[${$+1}/${o.length}][${r.organizationName}] `;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`~~~
|
|
2
2
|
|
|
3
3
|
${a}Attempting to push configuration...
|
|
4
4
|
|
|
5
|
-
~~~`));let d=s.length===1?s[0].content:_optionalChain([s, 'access', _ => _.find, 'call', _2 => _2(E=>E.name===r.organizationName), 'optionalAccess', _3 => _3.content]);if(!d){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to find transcend.yml file for organization: "${r.organizationName}".`)),e.push(r.organizationName);return}await I({transcendUrl:c,auth:r.apiKey,contents:d,pageSize:l,publishToPrivacyCenter:m,deleteExtraAttributeValues:u,classifyService:f})?_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${a}Successfully pushed configuration!`)):(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to sync configuration.`)),e.push(r.organizationName))}),e.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${e.join(",")}". View output above for more information, or check out ${
|
|
6
|
-
//# sourceMappingURL=impl-
|
|
5
|
+
~~~`));let d=s.length===1?s[0].content:_optionalChain([s, 'access', _ => _.find, 'call', _2 => _2(E=>E.name===r.organizationName), 'optionalAccess', _3 => _3.content]);if(!d){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to find transcend.yml file for organization: "${r.organizationName}".`)),e.push(r.organizationName);return}await I({transcendUrl:c,auth:r.apiKey,contents:d,pageSize:l,publishToPrivacyCenter:m,deleteExtraAttributeValues:u,classifyService:f})?_chunkZUNVPK23cjs.a.info(_colors2.default.green(`${a}Successfully pushed configuration!`)):(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`${a}Failed to sync configuration.`)),e.push(r.organizationName))}),e.length>0&&(_chunkZUNVPK23cjs.a.info(_colors2.default.red(`Sync encountered errors for "${e.join(",")}". View output above for more information, or check out ${_chunkNI434OIIcjs.p}`)),this.process.exit(1))}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced yaml file to Transcend! View at ${_chunkNI434OIIcjs.p}`))}exports.push = U;
|
|
6
|
+
//# sourceMappingURL=impl-3VTN6SPE.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-3VTN6SPE.cjs","../src/commands/inventory/push/impl.ts"],"names":["syncConfiguration","transcendUrl","auth","pageSize","publishToPrivacyCenter","contents","deleteExtraAttributeValues","classifyService","client","buildTranscendGraphQLClient","syncConfigurationToTranscend","err","logger","colors"],"mappings":"AAAA,quBAAwC,wDAAyC,wDAAgD,gCAA6B,wDAAyC,wDAA0D,gCAA6B,wDAAyC,wDAAyC,gCAA6B,oCCGnX,wBACY,4BACjB,gFAEF,MAmBnB,SAAeA,CAAAA,CAAkB,CAC/B,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,sBAAA,CAAAC,CAAAA,CACA,QAAA,CAAAC,CAAAA,CACA,0BAAA,CAAAC,CAAAA,CAA6B,CAAA,CAAA,CAC7B,eAAA,CAAAC,CAAAA,CAAkB,CAAA,CACpB,CAAA,CAeqB,CACnB,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BR,CAAcC,CAAI,CAAA,CAG7D,GAAI,CAWF,MAAO,CAVkB,MAAMQ,kCAAAA,CAC7BL,CACAG,CAAAA,CACA,CACE,QAAA,CAAAL,CAAAA,CACA,sBAAA,CAAAC,CAAAA,CACA,eAAA,CAAAG,CAAAA,CACA,0BAAA,CAAAD,CACF,CACF,CAEF,CAAA,KAAA,CAASK,CAAAA,CAAK,CACZ,OAAAC,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,iDAAA,EAAoDF,CAAAA,CAAI,OAAO,CAAA,CAAA;AAsI7D;AAAgB;AAAA;AA8D1B,GAAA","file":"/home/runner/work/cli/cli/dist/impl-3VTN6SPE.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\n\nimport { logger } from '../../../logger';\nimport { mapSeries } from 'bluebird';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { readTranscendYaml } from '../../../lib/readTranscendYaml';\nimport colors from 'colors';\nimport {\n buildTranscendGraphQLClient,\n syncConfigurationToTranscend,\n} from '../../../lib/graphql';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { TranscendInput } from '../../../codecs';\nimport { validateTranscendAuth, listFiles } from '../../../lib/api-keys';\nimport { mergeTranscendInputs } from '../../../lib/mergeTranscendInputs';\nimport { parseVariablesFromString } from '../../../lib/helpers/parseVariablesFromString';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Sync configuration to Transcend\n *\n * @param options - Options\n * @returns True if synced successfully, false if error occurs\n */\nasync function syncConfiguration({\n transcendUrl,\n auth,\n pageSize,\n publishToPrivacyCenter,\n contents,\n deleteExtraAttributeValues = false,\n classifyService = false,\n}: {\n /** Transcend YAML */\n contents: TranscendInput;\n /** Transcend URL */\n transcendUrl: string;\n /** API key */\n auth: string;\n /** Page size */\n pageSize: number;\n /** Skip privacy center publish step */\n publishToPrivacyCenter: boolean;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** Delete attributes when syncing */\n deleteExtraAttributeValues?: boolean;\n}): Promise<boolean> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Sync to Transcend\n try {\n const encounteredError = await syncConfigurationToTranscend(\n contents,\n client,\n {\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n },\n );\n return !encounteredError;\n } catch (err) {\n logger.error(\n colors.red(\n `An unexpected error occurred syncing the schema: ${err.message}`,\n ),\n );\n return false;\n }\n}\n\nexport interface PushCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n pageSize: number;\n variables: string;\n publishToPrivacyCenter: boolean;\n classifyService: boolean;\n deleteExtraAttributeValues: boolean;\n}\n\nexport async function push(\n this: LocalContext,\n {\n file = './transcend.yml',\n transcendUrl,\n auth,\n variables,\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n }: PushCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Parse out the variables\n const vars = parseVariablesFromString(variables);\n\n // check if we are being passed a list of API keys and a list of files\n let fileList: string[];\n if (Array.isArray(apiKeyOrList) && lstatSync(file).isDirectory()) {\n fileList = listFiles(file).map((filePath) => join(file, filePath));\n } else {\n fileList = file.split(',');\n }\n\n // Ensure at least one file is parsed\n if (fileList.length < 1) {\n throw new Error('No file specified!');\n }\n\n // eslint-disable-next-line array-callback-return,consistent-return\n const transcendInputs = fileList.map((filePath) => {\n // Ensure yaml file exists on disk\n if (!existsSync(filePath)) {\n logger.error(\n colors.red(\n `The file path does not exist on disk: ${filePath}. You can specify the filepath using --file=./examples/transcend.yml`,\n ),\n );\n this.process.exit(1);\n } else {\n logger.info(colors.magenta(`Reading file \"${filePath}\"...`));\n }\n\n try {\n // Read in the yaml file and validate it's shape\n const newContents = readTranscendYaml(filePath, vars);\n logger.info(colors.green(`Successfully read in \"${filePath}\"`));\n return {\n content: newContents,\n name: filePath.split('/').pop()!.replace('.yml', ''),\n };\n } catch (err) {\n logger.error(\n colors.red(\n `The shape of your yaml file is invalid with the following errors: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n });\n\n // process a single API key\n if (typeof apiKeyOrList === 'string') {\n // if passed multiple inputs, merge them together\n const [base, ...rest] = transcendInputs.map(({ content }) => content);\n const contents = mergeTranscendInputs(base, ...rest);\n\n // sync the configuration\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKeyOrList,\n contents,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n pageSize,\n classifyService: !!classifyService,\n });\n\n // exist with error code\n if (!success) {\n logger.info(\n colors.red(\n `Sync encountered errors. View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n } else {\n // if passed multiple inputs, expect them to be one per instance\n if (\n transcendInputs.length !== 1 &&\n transcendInputs.length !== apiKeyOrList.length\n ) {\n throw new Error(\n 'Expected list of yml files to be equal to the list of API keys.' +\n `Got ${transcendInputs.length} YML file${\n transcendInputs.length === 1 ? '' : 's'\n } and ${apiKeyOrList.length} API key${\n apiKeyOrList.length === 1 ? '' : 's'\n }`,\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to push configuration...\\n\\n~~~`,\n ),\n );\n\n // use the merged contents if 1 yml passed, else use the contents that map to that organization\n const useContents =\n transcendInputs.length === 1\n ? transcendInputs[0].content\n : transcendInputs.find(\n (input) => input.name === apiKey.organizationName,\n )?.content;\n\n // Throw error if cannot find a yml file matching that organization name\n if (!useContents) {\n logger.error(\n colors.red(\n `${prefix}Failed to find transcend.yml file for organization: \"${apiKey.organizationName}\".`,\n ),\n );\n encounteredErrors.push(apiKey.organizationName);\n return;\n }\n\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKey.apiKey,\n contents: useContents,\n pageSize,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n classifyService,\n });\n\n if (success) {\n logger.info(\n colors.green(`${prefix}Successfully pushed configuration!`),\n );\n } else {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to Transcend! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n}\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkGG7P2RO6cjs = require('./chunk-GG7P2RO6.cjs');require('./chunk-LR3CPNDM.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');require('./chunk-5PTFTN6J.cjs');require('./chunk-LCDYXJN6.cjs');require('./chunk-ZUNVPK23.cjs');require('./chunk-NI434OII.cjs');require('./chunk-Q7I37FJV.cjs');async function u({auth:r,transcendUrl:i,file:e,enricherId:o,concurrency:s,markSilent:a,sombraAuth:m}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),await _chunkGG7P2RO6cjs.d.call(void 0, {file:e,transcendUrl:i,enricherId:o,concurrency:s,markSilent:a,auth:r,sombraAuth:m})}exports.pushIdentifiers = u;
|
|
2
|
+
//# sourceMappingURL=impl-4YI4ERPI.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-4YI4ERPI.cjs","../src/commands/request/preflight/push-identifiers/impl.ts"],"names":["pushIdentifiers","auth","transcendUrl","file","enricherId","concurrency","markSilent","sombraAuth","doneInputValidation","pushManualEnrichmentIdentifiersFromCsv"],"mappings":"AAAA,iIAAwC,gCAA6B,wDAAyC,gCAA6B,gCAA6B,gCAA6B,gCAA6B,gCAA6B,MCc/P,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,MAAMC,iCAAAA,CACJ,IAAA,CAAAN,CAAAA,CACA,YAAA,CAAAD,CAAAA,CACA,UAAA,CAAAE,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,IAAA,CAAAL,CAAAA,CACA,UAAA,CAAAM,CACF,CAAC,CACH,CAAA,4BAAA","file":"/home/runner/work/cli/cli/dist/impl-4YI4ERPI.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../../context';\nimport { pushManualEnrichmentIdentifiersFromCsv } from '../../../../lib/manual-enrichment';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PushIdentifiersCommandFlags {\n auth: string;\n enricherId: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n markSilent: boolean;\n concurrency: number;\n}\n\nexport async function pushIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n enricherId,\n concurrency,\n markSilent,\n sombraAuth,\n }: PushIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushManualEnrichmentIdentifiersFromCsv({\n file,\n transcendUrl,\n enricherId,\n concurrency,\n markSilent,\n auth,\n sombraAuth,\n });\n}\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkKYVDLURLcjs = require('./chunk-KYVDLURL.cjs');require('./chunk-COKHIMLO.cjs');var _chunkLR3CPNDMcjs = require('./chunk-LR3CPNDM.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');require('./chunk-Q7I37FJV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function j({auth:p,file:o,transcendUrl:f,dataSiloIds:g,subCategories:C,status:S,includeEncryptedSnippets:n}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit);try{let s=_chunk5PTFTN6Jcjs.wc.call(void 0, f,p),b=await _chunkKYVDLURLcjs.b.call(void 0, s,{dataSiloIds:g,subCategories:C,status:S,includeEncryptedSnippets:n});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let i=[],h=b.map(t=>{let a={"Entry ID":t.id,"Data Silo ID":t.dataSiloId,"Object Path ID":t.scannedObjectPathId,"Object ID":t.scannedObjectId,...n?{Entry:t.name,"Context Snippet":t.contextSnippet}:{},"Data Category":`${t.dataSubCategory.category}:${t.dataSubCategory.name}`,"Classification Status":t.status,"Confidence Score":t.confidence,"Classification Method":t.classificationMethod,"Classifier Version":t.classifierVersion};return i=_chunkNI434OIIcjs.j.call(void 0, [...i,...Object.keys(a)]),a});_chunkLR3CPNDMcjs.c.call(void 0, o,h,i)}catch(s){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error occurred syncing the unstructured discovery files: ${s.message}`)),this.process.exit(1)}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles = j;
|
|
2
|
+
//# sourceMappingURL=impl-5H724WN5.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-5H724WN5.cjs","../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"names":["pullUnstructuredDiscoveryFiles","auth","file","transcendUrl","dataSiloIds","subCategories","status","includeEncryptedSnippets","doneInputValidation","client","buildTranscendGraphQLClient","entries","pullUnstructuredSubDataPointRecommendations","logger","colors","headers","inputs","entry","result"],"mappings":"AAAA,iOAAwC,gCAA6B,wDAAyC,wDAAyC,wDAA0C,gCAA6B,wDAAyC,wDAAyC,gCAA6B,gFCE1T,MAkBnB,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,YAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAErC,GAAI,CAEF,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BP,CAAcF,CAAI,CAAA,CAEvDU,CAAAA,CAAU,MAAMC,iCAAAA,CAA4CH,CAAQ,CACxE,WAAA,CAAAL,CAAAA,CACA,aAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,wBAAA,CAAAC,CACF,CAAC,CAAA,CAEDM,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,8CAAA,EAAiDZ,CAAI,CAAA,IAAA,CACvD,CACF,CAAA,CACA,IAAIa,CAAAA,CAAoB,CAAC,CAAA,CACnBC,CAAAA,CAASL,CAAAA,CAAQ,GAAA,CAAKM,CAAAA,EAAU,CACpC,IAAMC,CAAAA,CAAS,CACb,UAAA,CAAYD,CAAAA,CAAM,EAAA,CAClB,cAAA,CAAgBA,CAAAA,CAAM,UAAA,CACtB,gBAAA,CAAkBA,CAAAA,CAAM,mBAAA,CACxB,WAAA,CAAaA,CAAAA,CAAM,eAAA,CACnB,GAAIV,CAAAA,CACA,CAAE,KAAA,CAAOU,CAAAA,CAAM,IAAA,CAAM,iBAAA,CAAmBA,CAAAA,CAAM,cAAe,CAAA,CAC7D,CAAC,CAAA,CACL,eAAA,CAAiB,CAAA,EAAA","file":"/home/runner/work/cli/cli/dist/impl-5H724WN5.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { writeCsv } from '../../../lib/cron';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n writeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkBGZ6TA6Ccjs = require('./chunk-BGZ6TA6C.cjs');var _chunkRE5YALEOcjs = require('./chunk-RE5YALEO.cjs');var _chunkMOWFESXNcjs = require('./chunk-MOWFESXN.cjs');require('./chunk-5UBGZNDC.cjs');var _chunkWKCTKYN4cjs = require('./chunk-WKCTKYN4.cjs');var _chunk5PTFTN6Jcjs = require('./chunk-5PTFTN6J.cjs');require('./chunk-LCDYXJN6.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkNI434OIIcjs = require('./chunk-NI434OII.cjs');require('./chunk-Q7I37FJV.cjs');var _path = require('path');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _fs = require('fs');async function J({auth:F,dataFlowsYmlFolder:r,output:x,ignoreYmls:C=[],transcendUrl:N}){_chunkWKCTKYN4cjs.a.call(void 0, this.process.exit),(!_fs.existsSync.call(void 0, r)||!_fs.lstatSync.call(void 0, r).isDirectory())&&(_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Folder does not exist: "${r}"`)),this.process.exit(1));let k=C.map(t=>t.split(".")[0]),l=_chunkMOWFESXNcjs.c.call(void 0, r).map(t=>{let{"data-flows":o=[]}=_chunkRE5YALEOcjs.d.call(void 0, _path.join.call(void 0, r,t)),{adTechDataSilos:m,siteTechDataSilos:i}=_chunkBGZ6TA6Ccjs.a.call(void 0, o,{serviceToSupportedIntegration:d,serviceToTitle:u});return{adTechDataSilos:m,siteTechDataSilos:i,organizationName:t.split(".")[0]}}),a={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o,organizationName:m})=>{[...t,...o].forEach(e=>{let n=e["outer-type"]||e.integrationName;a[n]||(a[n]=[]),a[n].push(m),a[n]=[...new Set(a[n])]})});let p=[...new Set(l.map(({adTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],f=_chunkNI434OIIcjs.c.call(void 0, [...new Set(l.map(({siteTechDataSilos:t})=>t.map(o=>o["outer-type"]||o.integrationName)).flat())],p),s={};l.forEach(({adTechDataSilos:t,siteTechDataSilos:o})=>{[...t,...o].forEach(i=>{let e=i["outer-type"]||i.integrationName,n=_optionalChain([i, 'access', _ => _.attributes, 'optionalAccess', _2 => _2.find, 'call', _3 => _3(E=>E.key==="Found On Domain")]);s[e]||(s[e]=[]),s[e].push(..._optionalChain([n, 'optionalAccess', _4 => _4.values])||[]),s[e]=[...new Set(s[e])]})});let A=_chunk5PTFTN6Jcjs.wc.call(void 0, N,F),{serviceToTitle:u,serviceToSupportedIntegration:d}=await _chunk5PTFTN6Jcjs.ud.call(void 0, A),h=[...p,...f].map(t=>({title:u[t],...d[t]?{integrationName:t}:{integrationName:"promptAPerson","outer-type":t},attributes:[{key:"Tech Type",values:["Ad Tech"]},{key:"Business Units",values:_chunkNI434OIIcjs.c.call(void 0, a[t]||[],k)},{key:"Found On Domain",values:s[t]||[]}]}));_chunkZUNVPK23cjs.a.log(`Total Services: ${h.length}`),_chunkZUNVPK23cjs.a.log(`Ad Tech Services: ${p.length}`),_chunkZUNVPK23cjs.a.log(`Site Tech Services: ${f.length}`),_chunkRE5YALEOcjs.e.call(void 0, x,{"data-silos":h})}exports.deriveDataSilosFromDataFlowsCrossInstance = J;
|
|
2
|
+
//# sourceMappingURL=impl-6S3K72GP.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-MAHW2W43.cjs","../src/commands/inventory/derive-data-silos-from-data-flows-cross-instance/impl.ts"],"names":["deriveDataSilosFromDataFlowsCrossInstance","auth","dataFlowsYmlFolder","output","ignoreYmls","transcendUrl","doneInputValidation","existsSync","lstatSync","logger","colors","instancesToIgnore","x","dataSiloInputs","listFiles","directory","dataFlows","readTranscendYaml","join","adTechDataSilos","siteTechDataSilos","dataFlowsToDataSilos","serviceToSupportedIntegration","serviceToTitle","serviceToInstance","organizationName","dataSilo","service","adTechIntegrations","silo","siteTechIntegrations","difference_default","serviceToFoundOnDomain","foundOnDomain","attr","client","buildTranscendGraphQLClient","fetchAndIndexCatalogs","dataSilos"],"mappings":"AAAA,quBAAwC,wDAAgD,wDAAyC,gCAA6B,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,4BCKhX,gFAEF,wBAImB,MAgBtC,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,kBAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CAAa,CAAC,CAAA,CACd,YAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAAA,CAInC,CAACC,4BAAAA,CAA6B,CAAA,EAC9B,CAACC,2BAAAA,CAA4B,CAAA,CAAE,WAAA,CAAY,CAAA,CAAA,EAAA,CAE3CC,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,wBAAA,EAA2BR,CAAkB,CAAA,CAAA,CAAG,CAAC,CAAA,CACzE,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAIrB,IAAMS,CAAAA,CAAoBP,CAAAA,CAAW,GAAA,CAAKQ,CAAAA,EAAMA,CAAAA,CAAE,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAAC,CAAA,CAGzDC,CAAAA,CAAiBC,iCAAAA,CAA4B,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAc,CAEtE,GAAM,CAAE,YAAA,CAAcC,CAAAA,CAAY,CAAC,CAAE,CAAA,CAAIC,iCAAAA,wBACvCC,CAAKhB,CAAoBa,CAAS,CACpC,CAAA,CAGM,CAAE,eAAA,CAAAI,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAIC,iCAAAA,CAC7CL,CACA,CACE,6BAAA,CAAAM,CAAAA,CACA,cAAA,CAAAC,CACF,CACF,CAAA,CAEA,MAAO,CACL,eAAA,CAAAJ,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAkBL,CAAAA,CAAU,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAC1C,CACF,CAAC,CAAA,CAGKS,CAAAA,CAAiD,CAAC,CAAA,CACxDX,CAAAA,CAAe,OAAA,CACb,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAAA,CAAmB,gBAAA,CAAAK,CAAiB,CAAA,CAAA,EAAM,CACvC,CAAC,GAAGN,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAE9CF,CAAAA,CAAkBG,CAAO,CAAA,EAAA,CAC5BH,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAEhCH,CAAAA,CAAkBG,CAAO,CAAA,CAAG,IAAA,CAAKF,CAAgB,CAAA,CACjDD,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,GAAG,IAAI,GAAA,CAAIH,CAAAA,CAAkBG,CAAO,CAAC,CAAC,CACtE,CAAC,CACH,CACF,CAAA,CAGA,IAAMC,CAAAA,CAAqB,CACzB,GAAG,IAAI,GAAA,CACLf,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,eAAA,CAAAM,CAAgB,CAAA,CAAA,EACtBA,CAAAA,CAAgB,GAAA,CACbU,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CAGMC,CAAAA,CAAuBC,iCAAAA,CAEzB,GAAG,IAAI,GAAA,CACLlB,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,iBAAA,CAAAO,CAAkB,CAAA,CAAA,EACxBA,CAAAA,CAAkB,GAAA,CACfS,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CACAD,CACF,CAAA,CAGMI,CAAAA,CAAsD,CAAC,CAAA,CAC7DnB,CAAAA,CAAe,OAAA,CAAQ,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAA,EAAM,CAC5C,CAAC,GAAGD,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAC7CO,CAAAA,iBAAgBP,CAAAA,mBAAS,UAAA,6BAAY,IAAA,mBACxCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,GAAQ,iBACzB,GAAA,CAEKF,CAAAA,CAAuBL,CAAO,CAAA,EAAA,CACjCK,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAErCK,CAAAA,CAAuBL,CAAO,CAAA,CAAG,IAAA,CAAK,mBAAIM,CAAAA,6BAAe,QAAA,EAAU,CAAC,CAAE,CAAA,CACtED,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAChC,GAAG,IAAI,GAAA,CAAIK,CAAAA,CAAuBL,CAAO,CAAC,CAC5C,CACF,CAAC,CACH,CAAC,CAAA,CAGD,IAAMQ,CAAAA,CAASC,kCAAAA,CAA4B/B,CAAcJ,CAAI,CAAA,CACvD,CAAE,cAAA,CAAAsB,CAAAA,CAAgB,6BAAA,CAAAD,CAA8B,CAAA,CACpD,MAAMe,kCAAAA,CAA4B,CAAA,CAG9BC,CAAAA,CAAY,CAAC,GAAGV,CAAAA,CAAoB,GAAGE,CAAoB,CAAA,CAAE,GAAA,CAChEH,CAAAA,EAAAA,CAAa,CACZ,KAAA,CAAOJ,CAAAA,CAAeI,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,YAAA,CAAcA,CAAQ,CAAA,CAC9D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,SAAS,CACpB,CAAA,CACA,CACE,GAAA,CAAK,gBAAA,CACL,MAAA,CAAQI,iCAAAA,CACNP,CAAkBG,CAAO,CAAA,EAAK,CAAC,CAAA,CAC/BhB,CACF,CACF,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQqB,CAAAA,CAAuBL,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CACF,CAAA,CAGAlB,mBAAAA,CAAO,GAAA,CAAI,CAAA,gBAAA,EAAmB6B,CAAAA,CAAU,MAAM,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-MAHW2W43.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '../../../lib/graphql';\nimport { join } from 'node:path';\nimport { difference } from 'lodash-es';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '../../../codecs';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { listFiles } from '../../../lib/api-keys';\nimport {\n readTranscendYaml,\n writeTranscendYaml,\n} from '../../../lib/readTranscendYaml';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n output: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlowsCrossInstance(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n output,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ignore the data flows in these yml files\n const instancesToIgnore = ignoreYmls.map((x) => x.split('.')[0]);\n\n // Map over each data flow yml file and convert to data silo configurations\n const dataSiloInputs = listFiles(dataFlowsYmlFolder).map((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n return {\n adTechDataSilos,\n siteTechDataSilos,\n organizationName: directory.split('.')[0],\n };\n });\n\n // Mapping from service name to instances that have that service\n const serviceToInstance: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(\n ({ adTechDataSilos, siteTechDataSilos, organizationName }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n // create mapping to instance\n if (!serviceToInstance[service]) {\n serviceToInstance[service] = [];\n }\n serviceToInstance[service]!.push(organizationName);\n serviceToInstance[service] = [...new Set(serviceToInstance[service])];\n });\n },\n );\n\n // List of ad tech integrations\n const adTechIntegrations = [\n ...new Set(\n dataSiloInputs\n .map(({ adTechDataSilos }) =>\n adTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ];\n\n // List of site tech integrations\n const siteTechIntegrations = difference(\n [\n ...new Set(\n dataSiloInputs\n .map(({ siteTechDataSilos }) =>\n siteTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ],\n adTechIntegrations,\n );\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(({ adTechDataSilos, siteTechDataSilos }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n const foundOnDomain = dataSilo.attributes?.find(\n (attr) => attr.key === 'Found On Domain',\n );\n // create mapping to instance\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(...(foundOnDomain?.values || []));\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n });\n });\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // construct the aggregated data silo inputs\n const dataSilos = [...adTechIntegrations, ...siteTechIntegrations].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Business Units',\n values: difference(\n serviceToInstance[service] || [],\n instancesToIgnore,\n ),\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n // Log output\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechIntegrations.length}`);\n logger.log(`Site Tech Services: ${siteTechIntegrations.length}`);\n\n // Write to yaml\n writeTranscendYaml(output, {\n 'data-silos': dataSilos,\n });\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/impl-6S3K72GP.cjs","../src/commands/inventory/derive-data-silos-from-data-flows-cross-instance/impl.ts"],"names":["deriveDataSilosFromDataFlowsCrossInstance","auth","dataFlowsYmlFolder","output","ignoreYmls","transcendUrl","doneInputValidation","existsSync","lstatSync","logger","colors","instancesToIgnore","x","dataSiloInputs","listFiles","directory","dataFlows","readTranscendYaml","join","adTechDataSilos","siteTechDataSilos","dataFlowsToDataSilos","serviceToSupportedIntegration","serviceToTitle","serviceToInstance","organizationName","dataSilo","service","adTechIntegrations","silo","siteTechIntegrations","difference_default","serviceToFoundOnDomain","foundOnDomain","attr","client","buildTranscendGraphQLClient","fetchAndIndexCatalogs","dataSilos"],"mappings":"AAAA,quBAAwC,wDAAgD,wDAAyC,gCAA6B,wDAAyC,wDAAkD,gCAA6B,wDAAyC,wDAAyC,gCAA6B,4BCKhX,gFAEF,wBAImB,MAgBtC,SAAsBA,CAAAA,CAEpB,CACE,IAAA,CAAAC,CAAAA,CACA,kBAAA,CAAAC,CAAAA,CACA,MAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CAAa,CAAC,CAAA,CACd,YAAA,CAAAC,CACF,CAAA,CACe,CACfC,iCAAAA,IAAoB,CAAK,OAAA,CAAQ,IAAI,CAAA,CAAA,CAInC,CAACC,4BAAAA,CAA6B,CAAA,EAC9B,CAACC,2BAAAA,CAA4B,CAAA,CAAE,WAAA,CAAY,CAAA,CAAA,EAAA,CAE3CC,mBAAAA,CAAO,KAAA,CAAMC,gBAAAA,CAAO,GAAA,CAAI,CAAA,wBAAA,EAA2BR,CAAkB,CAAA,CAAA,CAAG,CAAC,CAAA,CACzE,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,CAAA,CAIrB,IAAMS,CAAAA,CAAoBP,CAAAA,CAAW,GAAA,CAAKQ,CAAAA,EAAMA,CAAAA,CAAE,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAAC,CAAA,CAGzDC,CAAAA,CAAiBC,iCAAAA,CAA4B,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAc,CAEtE,GAAM,CAAE,YAAA,CAAcC,CAAAA,CAAY,CAAC,CAAE,CAAA,CAAIC,iCAAAA,wBACvCC,CAAKhB,CAAoBa,CAAS,CACpC,CAAA,CAGM,CAAE,eAAA,CAAAI,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAIC,iCAAAA,CAC7CL,CACA,CACE,6BAAA,CAAAM,CAAAA,CACA,cAAA,CAAAC,CACF,CACF,CAAA,CAEA,MAAO,CACL,eAAA,CAAAJ,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAkBL,CAAAA,CAAU,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAC1C,CACF,CAAC,CAAA,CAGKS,CAAAA,CAAiD,CAAC,CAAA,CACxDX,CAAAA,CAAe,OAAA,CACb,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAAA,CAAmB,gBAAA,CAAAK,CAAiB,CAAA,CAAA,EAAM,CACvC,CAAC,GAAGN,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAE9CF,CAAAA,CAAkBG,CAAO,CAAA,EAAA,CAC5BH,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAEhCH,CAAAA,CAAkBG,CAAO,CAAA,CAAG,IAAA,CAAKF,CAAgB,CAAA,CACjDD,CAAAA,CAAkBG,CAAO,CAAA,CAAI,CAAC,GAAG,IAAI,GAAA,CAAIH,CAAAA,CAAkBG,CAAO,CAAC,CAAC,CACtE,CAAC,CACH,CACF,CAAA,CAGA,IAAMC,CAAAA,CAAqB,CACzB,GAAG,IAAI,GAAA,CACLf,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,eAAA,CAAAM,CAAgB,CAAA,CAAA,EACtBA,CAAAA,CAAgB,GAAA,CACbU,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CAGMC,CAAAA,CAAuBC,iCAAAA,CAEzB,GAAG,IAAI,GAAA,CACLlB,CAAAA,CACG,GAAA,CAAI,CAAC,CAAE,iBAAA,CAAAO,CAAkB,CAAA,CAAA,EACxBA,CAAAA,CAAkB,GAAA,CACfS,CAAAA,EAASA,CAAAA,CAAK,YAAY,CAAA,EAAKA,CAAAA,CAAK,eACvC,CACF,CAAA,CACC,IAAA,CAAK,CACV,CACF,CAAA,CACAD,CACF,CAAA,CAGMI,CAAAA,CAAsD,CAAC,CAAA,CAC7DnB,CAAAA,CAAe,OAAA,CAAQ,CAAC,CAAE,eAAA,CAAAM,CAAAA,CAAiB,iBAAA,CAAAC,CAAkB,CAAA,CAAA,EAAM,CAC5C,CAAC,GAAGD,CAAAA,CAAiB,GAAGC,CAAiB,CAAA,CACjD,OAAA,CAASM,CAAAA,EAAa,CACjC,IAAMC,CAAAA,CAAUD,CAAAA,CAAS,YAAY,CAAA,EAAKA,CAAAA,CAAS,eAAA,CAC7CO,CAAAA,iBAAgBP,CAAAA,mBAAS,UAAA,6BAAY,IAAA,mBACxCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,GAAQ,iBACzB,GAAA,CAEKF,CAAAA,CAAuBL,CAAO,CAAA,EAAA,CACjCK,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAErCK,CAAAA,CAAuBL,CAAO,CAAA,CAAG,IAAA,CAAK,mBAAIM,CAAAA,6BAAe,QAAA,EAAU,CAAC,CAAE,CAAA,CACtED,CAAAA,CAAuBL,CAAO,CAAA,CAAI,CAChC,GAAG,IAAI,GAAA,CAAIK,CAAAA,CAAuBL,CAAO,CAAC,CAC5C,CACF,CAAC,CACH,CAAC,CAAA,CAGD,IAAMQ,CAAAA,CAASC,kCAAAA,CAA4B/B,CAAcJ,CAAI,CAAA,CACvD,CAAE,cAAA,CAAAsB,CAAAA,CAAgB,6BAAA,CAAAD,CAA8B,CAAA,CACpD,MAAMe,kCAAAA,CAA4B,CAAA,CAG9BC,CAAAA,CAAY,CAAC,GAAGV,CAAAA,CAAoB,GAAGE,CAAoB,CAAA,CAAE,GAAA,CAChEH,CAAAA,EAAAA,CAAa,CACZ,KAAA,CAAOJ,CAAAA,CAAeI,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,YAAA,CAAcA,CAAQ,CAAA,CAC9D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,SAAS,CACpB,CAAA,CACA,CACE,GAAA,CAAK,gBAAA,CACL,MAAA,CAAQI,iCAAAA,CACNP,CAAkBG,CAAO,CAAA,EAAK,CAAC,CAAA,CAC/BhB,CACF,CACF,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQqB,CAAAA,CAAuBL,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CACF,CAAA,CAGAlB,mBAAAA,CAAO,GAAA,CAAI,CAAA,gBAAA,EAAmB6B,CAAAA,CAAU,MAAM,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/impl-6S3K72GP.cjs","sourcesContent":[null,"import type { LocalContext } from '../../../context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '../../../lib/graphql';\nimport { join } from 'node:path';\nimport { difference } from 'lodash-es';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '../../../codecs';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { listFiles } from '../../../lib/api-keys';\nimport {\n readTranscendYaml,\n writeTranscendYaml,\n} from '../../../lib/readTranscendYaml';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n output: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlowsCrossInstance(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n output,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCrossInstanceCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ignore the data flows in these yml files\n const instancesToIgnore = ignoreYmls.map((x) => x.split('.')[0]);\n\n // Map over each data flow yml file and convert to data silo configurations\n const dataSiloInputs = listFiles(dataFlowsYmlFolder).map((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n return {\n adTechDataSilos,\n siteTechDataSilos,\n organizationName: directory.split('.')[0],\n };\n });\n\n // Mapping from service name to instances that have that service\n const serviceToInstance: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(\n ({ adTechDataSilos, siteTechDataSilos, organizationName }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n // create mapping to instance\n if (!serviceToInstance[service]) {\n serviceToInstance[service] = [];\n }\n serviceToInstance[service]!.push(organizationName);\n serviceToInstance[service] = [...new Set(serviceToInstance[service])];\n });\n },\n );\n\n // List of ad tech integrations\n const adTechIntegrations = [\n ...new Set(\n dataSiloInputs\n .map(({ adTechDataSilos }) =>\n adTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ];\n\n // List of site tech integrations\n const siteTechIntegrations = difference(\n [\n ...new Set(\n dataSiloInputs\n .map(({ siteTechDataSilos }) =>\n siteTechDataSilos.map(\n (silo) => silo['outer-type'] || silo.integrationName,\n ),\n )\n .flat(),\n ),\n ],\n adTechIntegrations,\n );\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n dataSiloInputs.forEach(({ adTechDataSilos, siteTechDataSilos }) => {\n const allDataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n allDataSilos.forEach((dataSilo) => {\n const service = dataSilo['outer-type'] || dataSilo.integrationName;\n const foundOnDomain = dataSilo.attributes?.find(\n (attr) => attr.key === 'Found On Domain',\n );\n // create mapping to instance\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(...(foundOnDomain?.values || []));\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n });\n });\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // construct the aggregated data silo inputs\n const dataSilos = [...adTechIntegrations, ...siteTechIntegrations].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Business Units',\n values: difference(\n serviceToInstance[service] || [],\n instancesToIgnore,\n ),\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n // Log output\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechIntegrations.length}`);\n logger.log(`Site Tech Services: ${siteTechIntegrations.length}`);\n\n // Write to yaml\n writeTranscendYaml(output, {\n 'data-silos': dataSilos,\n });\n}\n"]}
|