@transcend-io/cli 8.32.6 → 8.32.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +46 -46
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +2 -2
- package/dist/{chunk-PCK2N4IA.cjs → chunk-4GILGOAM.cjs} +4 -4
- package/dist/{chunk-PCK2N4IA.cjs.map → chunk-4GILGOAM.cjs.map} +1 -1
- package/dist/{chunk-MLH6KKUK.cjs → chunk-4NEO6Q2F.cjs} +2 -2
- package/dist/{chunk-MLH6KKUK.cjs.map → chunk-4NEO6Q2F.cjs.map} +1 -1
- package/dist/{chunk-X2P2BWT7.cjs → chunk-4OEVQFNG.cjs} +21 -21
- package/dist/{chunk-X2P2BWT7.cjs.map → chunk-4OEVQFNG.cjs.map} +1 -1
- package/dist/{chunk-GJ6V5BHG.cjs → chunk-5MG2CEZV.cjs} +2 -2
- package/dist/{chunk-GJ6V5BHG.cjs.map → chunk-5MG2CEZV.cjs.map} +1 -1
- package/dist/{chunk-SZ7K447J.cjs → chunk-ATEDOYYC.cjs} +2 -2
- package/dist/{chunk-SZ7K447J.cjs.map → chunk-ATEDOYYC.cjs.map} +1 -1
- package/dist/{chunk-LE6FS55Q.cjs → chunk-EMLESF76.cjs} +6 -6
- package/dist/{chunk-LE6FS55Q.cjs.map → chunk-EMLESF76.cjs.map} +1 -1
- package/dist/{chunk-I6GMLKI6.cjs → chunk-ETSLJPTD.cjs} +2 -2
- package/dist/{chunk-I6GMLKI6.cjs.map → chunk-ETSLJPTD.cjs.map} +1 -1
- package/dist/{chunk-SFPZ6GEN.cjs → chunk-GCTE43TS.cjs} +4 -4
- package/dist/{chunk-SFPZ6GEN.cjs.map → chunk-GCTE43TS.cjs.map} +1 -1
- package/dist/{chunk-CDX243U3.cjs → chunk-H3EBIKX6.cjs} +2 -2
- package/dist/{chunk-CDX243U3.cjs.map → chunk-H3EBIKX6.cjs.map} +1 -1
- package/dist/{chunk-NZEATKWL.cjs → chunk-HZU3YROT.cjs} +2 -2
- package/dist/{chunk-NZEATKWL.cjs.map → chunk-HZU3YROT.cjs.map} +1 -1
- package/dist/{chunk-O7SJYOEK.cjs → chunk-ILBF5XZN.cjs} +2 -2
- package/dist/{chunk-O7SJYOEK.cjs.map → chunk-ILBF5XZN.cjs.map} +1 -1
- package/dist/{chunk-WJ3RFUZV.cjs → chunk-JRYG6ZXI.cjs} +2 -2
- package/dist/{chunk-WJ3RFUZV.cjs.map → chunk-JRYG6ZXI.cjs.map} +1 -1
- package/dist/{chunk-VWN5MN3U.cjs → chunk-KYOQWIIS.cjs} +4 -4
- package/dist/{chunk-VWN5MN3U.cjs.map → chunk-KYOQWIIS.cjs.map} +1 -1
- package/dist/{chunk-5UWKWV3X.cjs → chunk-ZNTCC7Y7.cjs} +2 -2
- package/dist/{chunk-5UWKWV3X.cjs.map → chunk-ZNTCC7Y7.cjs.map} +1 -1
- package/dist/{impl-UKTKLJSZ.cjs → impl-2NLTX2DX.cjs} +2 -2
- package/dist/{impl-UKTKLJSZ.cjs.map → impl-2NLTX2DX.cjs.map} +1 -1
- package/dist/{impl-DVIXGQJR.cjs → impl-2Z37FQLH.cjs} +2 -2
- package/dist/{impl-DVIXGQJR.cjs.map → impl-2Z37FQLH.cjs.map} +1 -1
- package/dist/{impl-ZUAB2R6X.cjs → impl-3G7CA7V7.cjs} +2 -2
- package/dist/{impl-ZUAB2R6X.cjs.map → impl-3G7CA7V7.cjs.map} +1 -1
- package/dist/{impl-MO2EAW2B.cjs → impl-43DHFRH3.cjs} +2 -2
- package/dist/{impl-MO2EAW2B.cjs.map → impl-43DHFRH3.cjs.map} +1 -1
- package/dist/{impl-G7YD5U53.cjs → impl-4GESIKYG.cjs} +2 -2
- package/dist/{impl-G7YD5U53.cjs.map → impl-4GESIKYG.cjs.map} +1 -1
- package/dist/{impl-PHXT2QG7.cjs → impl-4LNG6EWX.cjs} +2 -2
- package/dist/{impl-PHXT2QG7.cjs.map → impl-4LNG6EWX.cjs.map} +1 -1
- package/dist/{impl-EAI3VXKU.cjs → impl-5BSN73TG.cjs} +4 -4
- package/dist/{impl-EAI3VXKU.cjs.map → impl-5BSN73TG.cjs.map} +1 -1
- package/dist/{impl-RAZVKALS.cjs → impl-AB7CBUNN.cjs} +2 -2
- package/dist/{impl-RAZVKALS.cjs.map → impl-AB7CBUNN.cjs.map} +1 -1
- package/dist/{impl-CL5OTH3R.cjs → impl-ANZ5Q2QW.cjs} +2 -2
- package/dist/{impl-CL5OTH3R.cjs.map → impl-ANZ5Q2QW.cjs.map} +1 -1
- package/dist/{impl-575YOEHZ.cjs → impl-B2KHIHOI.cjs} +2 -2
- package/dist/{impl-575YOEHZ.cjs.map → impl-B2KHIHOI.cjs.map} +1 -1
- package/dist/{impl-D7RH4J5E.cjs → impl-BCRCUBC6.cjs} +2 -2
- package/dist/{impl-D7RH4J5E.cjs.map → impl-BCRCUBC6.cjs.map} +1 -1
- package/dist/{impl-SUS5VKKB.cjs → impl-BFSM3FLJ.cjs} +2 -2
- package/dist/{impl-SUS5VKKB.cjs.map → impl-BFSM3FLJ.cjs.map} +1 -1
- package/dist/{impl-QMDXFUAO.cjs → impl-C3RL4PL5.cjs} +2 -2
- package/dist/{impl-QMDXFUAO.cjs.map → impl-C3RL4PL5.cjs.map} +1 -1
- package/dist/{impl-PHHRQWTE.cjs → impl-C3YOETUJ.cjs} +2 -2
- package/dist/{impl-PHHRQWTE.cjs.map → impl-C3YOETUJ.cjs.map} +1 -1
- package/dist/{impl-CV6MBTAL.cjs → impl-CUQGOWYL.cjs} +2 -2
- package/dist/{impl-CV6MBTAL.cjs.map → impl-CUQGOWYL.cjs.map} +1 -1
- package/dist/{impl-RZSXBVAC.cjs → impl-CWWJOV4W.cjs} +2 -2
- package/dist/{impl-RZSXBVAC.cjs.map → impl-CWWJOV4W.cjs.map} +1 -1
- package/dist/{impl-T3KGRFUR.cjs → impl-D27XUTVP.cjs} +2 -2
- package/dist/{impl-T3KGRFUR.cjs.map → impl-D27XUTVP.cjs.map} +1 -1
- package/dist/{impl-IC4KAL33.cjs → impl-D6D7MZ4F.cjs} +2 -2
- package/dist/{impl-IC4KAL33.cjs.map → impl-D6D7MZ4F.cjs.map} +1 -1
- package/dist/{impl-TXBSRO6N.cjs → impl-EOWKDUEB.cjs} +2 -2
- package/dist/{impl-TXBSRO6N.cjs.map → impl-EOWKDUEB.cjs.map} +1 -1
- package/dist/impl-EQL4NORJ.cjs +2 -0
- package/dist/{impl-DNVWNGPJ.cjs.map → impl-EQL4NORJ.cjs.map} +1 -1
- package/dist/{impl-T6FTWSLY.cjs → impl-FAE3G5FE.cjs} +2 -2
- package/dist/{impl-T6FTWSLY.cjs.map → impl-FAE3G5FE.cjs.map} +1 -1
- package/dist/{impl-DGZB5IDM.cjs → impl-FVEGOBQM.cjs} +2 -2
- package/dist/{impl-DGZB5IDM.cjs.map → impl-FVEGOBQM.cjs.map} +1 -1
- package/dist/{impl-SA4YQJID.cjs → impl-HNWZZEKE.cjs} +2 -2
- package/dist/{impl-SA4YQJID.cjs.map → impl-HNWZZEKE.cjs.map} +1 -1
- package/dist/{impl-5LMTXEQA.cjs → impl-JJ56CQTW.cjs} +2 -2
- package/dist/{impl-5LMTXEQA.cjs.map → impl-JJ56CQTW.cjs.map} +1 -1
- package/dist/{impl-UU5CW4E4.cjs → impl-JNDENIQD.cjs} +2 -2
- package/dist/{impl-UU5CW4E4.cjs.map → impl-JNDENIQD.cjs.map} +1 -1
- package/dist/{impl-XT4Q54W2.cjs → impl-JOK5WPTD.cjs} +2 -2
- package/dist/{impl-XT4Q54W2.cjs.map → impl-JOK5WPTD.cjs.map} +1 -1
- package/dist/{impl-G422JWSA.cjs → impl-KJTD4WO2.cjs} +3 -3
- package/dist/{impl-G422JWSA.cjs.map → impl-KJTD4WO2.cjs.map} +1 -1
- package/dist/{impl-UYKI3NKQ.cjs → impl-KRMOTRYI.cjs} +2 -2
- package/dist/{impl-UYKI3NKQ.cjs.map → impl-KRMOTRYI.cjs.map} +1 -1
- package/dist/{impl-M2JWCIOX.cjs → impl-LX4ZUDUY.cjs} +2 -2
- package/dist/{impl-M2JWCIOX.cjs.map → impl-LX4ZUDUY.cjs.map} +1 -1
- package/dist/{impl-Y6ENZCUI.cjs → impl-MBWE7OAC.cjs} +2 -2
- package/dist/{impl-Y6ENZCUI.cjs.map → impl-MBWE7OAC.cjs.map} +1 -1
- package/dist/{impl-ZV7OZ5BN.cjs → impl-O4WDS4V2.cjs} +2 -2
- package/dist/{impl-ZV7OZ5BN.cjs.map → impl-O4WDS4V2.cjs.map} +1 -1
- package/dist/{impl-C6TDBSVQ.cjs → impl-PISNT2E4.cjs} +2 -2
- package/dist/{impl-C6TDBSVQ.cjs.map → impl-PISNT2E4.cjs.map} +1 -1
- package/dist/{impl-VVICNSEP.cjs → impl-PS3WRA65.cjs} +2 -2
- package/dist/{impl-VVICNSEP.cjs.map → impl-PS3WRA65.cjs.map} +1 -1
- package/dist/{impl-R5PX6MIE.cjs → impl-TBYACXLK.cjs} +5 -5
- package/dist/{impl-R5PX6MIE.cjs.map → impl-TBYACXLK.cjs.map} +1 -1
- package/dist/{impl-YQXGFOOO.cjs → impl-TD5BKXIH.cjs} +2 -2
- package/dist/{impl-YQXGFOOO.cjs.map → impl-TD5BKXIH.cjs.map} +1 -1
- package/dist/{impl-XOHK7EPT.cjs → impl-VBS6LHXD.cjs} +2 -2
- package/dist/{impl-XOHK7EPT.cjs.map → impl-VBS6LHXD.cjs.map} +1 -1
- package/dist/impl-WPM2STGX.cjs +2 -0
- package/dist/{impl-WY3A5X7O.cjs.map → impl-WPM2STGX.cjs.map} +1 -1
- package/dist/{impl-56QEHP2B.cjs → impl-WQF6YIDK.cjs} +3 -3
- package/dist/{impl-56QEHP2B.cjs.map → impl-WQF6YIDK.cjs.map} +1 -1
- package/dist/{impl-LJBAH4YS.cjs → impl-WWYWKXMK.cjs} +2 -2
- package/dist/{impl-LJBAH4YS.cjs.map → impl-WWYWKXMK.cjs.map} +1 -1
- package/dist/{impl-JT7MI4YS.cjs → impl-Y5TP56IB.cjs} +2 -2
- package/dist/{impl-JT7MI4YS.cjs.map → impl-Y5TP56IB.cjs.map} +1 -1
- package/dist/{impl-7DIFXY6N.cjs → impl-YOSAEFO2.cjs} +3 -3
- package/dist/{impl-7DIFXY6N.cjs.map → impl-YOSAEFO2.cjs.map} +1 -1
- package/dist/{impl-MCIWWT2M.cjs → impl-ZIBUPJJN.cjs} +2 -2
- package/dist/{impl-MCIWWT2M.cjs.map → impl-ZIBUPJJN.cjs.map} +1 -1
- package/dist/index.cjs +3 -3
- package/dist/index.d.cts +1 -1
- package/package.json +1 -1
- package/dist/impl-DNVWNGPJ.cjs +0 -2
- package/dist/impl-WY3A5X7O.cjs +0 -2
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkZNTCC7Y7cjs = require('./chunk-ZNTCC7Y7.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _fs = require('fs');var _typeutils = require('@transcend-io/type-utils');var _privacytypes = require('@transcend-io/privacy-types');var j=/target ('|")(.*?)('|")/,M=/pod ('|")(.*?)('|")(, ('|")~> (.+?)('|")|)/,S={supportedFiles:["Podfile"],ignoreDirs:["Pods","Build"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(j,"g"),matches:["quote1","name","quote2"]},e),p=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(M,"g"),matches:["quote1","name","quote2","extra","quote3","version","quote4"]},e);return i.map((r,c)=>({name:r.name,type:_privacytypes.CodePackageType.CocoaPods,softwareDevelopmentKits:p.filter(m=>m.matchIndex>r.matchIndex&&(!i[c+1]||m.matchIndex<i[c+1].matchIndex)).map(m=>({name:m.name,version:m.version}))}))}};var _path = require('path');var U=/implementation( *)('|")(.+?):(.+?):(.+?|)('|")/,W=/apply plugin: *('|")(.+?)(:(.+?)|)('|")/,Y=/implementation group:( *)('|")(.+?)('|"),( *)name:( *)('|")(.+?)('|"),( *)version:( *)('|")(.+?)('|")/,V=/applicationId( *)"(.+?)"/,_={supportedFiles:["build.gradle**"],ignoreDirs:["gradle-app.setting","gradle-wrapper.jar","gradle-wrapper.properties"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_path.dirname.call(void 0, s),p=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(U,"g"),matches:["space","quote1","name","path","version","quote2"]},e),a=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(W,"g"),matches:["quote1","name","group","version","quote2"]},e),r=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(Y,"g"),matches:["space1","quote1","group","quote2","space2","space3","quote3","name","quote4","space4","space5","quote5","version","quote6"]},e),c=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(V,"g"),matches:["space","name"]},e);if(c.length>1)throw new Error(`Expected only one applicationId per file: ${s}`);return[{name:_optionalChain([c, 'access', _2 => _2[0], 'optionalAccess', _3 => _3.name])||i.split("/").pop(),softwareDevelopmentKits:[...p,...r,...a].map(m=>({name:m.name,version:m.version||void 0}))}]}};var I={supportedFiles:["package.json"],ignoreDirs:["node_modules","serverless-build","lambda-build"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_path.dirname.call(void 0, s),p=JSON.parse(e),{name:a,description:r,dependencies:c={},devDependencies:m={},optionalDependencies:t={}}=p;return[{name:a||i.split("/").pop(),description:r,softwareDevelopmentKits:[...Object.entries(c).map(([o,d])=>({name:o,version:typeof d=="string"?d:void 0})),...Object.entries(m).map(([o,d])=>({name:o,version:typeof d=="string"?d:void 0,isDevDependency:!0})),...Object.entries(t).map(([o,d])=>({name:o,version:typeof d=="string"?d:void 0}))]}]}};var ne=/(.+?)(=+)(.+)/,te=/name *= *('|")(.+?)('|")/,oe=/description *= *('|")(.+?)('|")/,x={supportedFiles:["requirements.txt"],ignoreDirs:["build","lib","lib64"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_path.dirname.call(void 0, s),a=_chunkZNTCC7Y7cjs.c.call(void 0, i).find(o=>o==="setup.py"),r=a?_fs.readFileSync.call(void 0, _path.join.call(void 0, i,a),"utf-8"):void 0,c=r?(te.exec(r)||[])[2]:void 0,m=r?(oe.exec(r)||[])[2]:void 0,t=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(ne,"g"),matches:["name","equals","version"]},e);return[{name:c||i.split("/").pop(),description:m||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:t.map(o=>({name:o.name,version:o.version}))}]}};var pe=/gem *('|")(.+?)('|")(, *('|")(.+?)('|")|)/,ae=/spec\.name *= *('|")(.+?)('|")/,ce=/spec\.description *= *('|")(.+?)('|")/,me=/spec\.summary *= *('|")(.+?)('|")/,D={supportedFiles:["Gemfile"],ignoreDirs:["bin"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_path.dirname.call(void 0, s),a=_chunkZNTCC7Y7cjs.c.call(void 0, i).find(o=>o===".gemspec"),r=a?_fs.readFileSync.call(void 0, a,"utf-8"):void 0,c=r?(ae.exec(r)||[])[2]:void 0,m=r?(ce.exec(r)||me.exec(r)||[])[1]:void 0,t=_typeutils.findAllWithRegex.call(void 0, {value:new RegExp(pe,"g"),matches:["quote1","name","quote2","hasVersion","quote3","version","quote4"]},e);return[{name:c||i.split("/").pop(),description:m||void 0,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:t.map(o=>({name:o.name,version:o.version}))}]}};var _jsyaml = require('js-yaml'); var _jsyaml2 = _interopRequireDefault(_jsyaml);function ue(s){return s.split(`
|
|
2
2
|
`).map(e=>{let i=e.indexOf("#");return i>-1&&!e.substring(0,i).includes('"')&&!e.substring(0,i).includes("'")?e.substring(0,i).trim():e}).filter(e=>e.length>0).join(`
|
|
3
3
|
`)}var v={supportedFiles:["pubspec.yml"],ignoreDirs:["build"],scanFunction:s=>{let e=_path.dirname.call(void 0, s),i=_fs.readFileSync.call(void 0, s,"utf-8"),{name:p,description:a,dev_dependencies:r={},dependencies:c={}}=_jsyaml2.default.load(ue(i));return[{name:p||e.split("/").pop(),description:a,type:_privacytypes.CodePackageType.RequirementsTxt,softwareDevelopmentKits:[...Object.entries(c).map(([m,t])=>({name:m,version:typeof t=="string"?t:typeof t=="number"?t.toString():_optionalChain([t, 'optionalAccess', _4 => _4.sdk])})),...Object.entries(r).map(([m,t])=>({name:m,version:typeof t=="string"?t:typeof t=="number"?t.toString():_optionalChain([t, 'optionalAccess', _5 => _5.sdk]),isDevDependency:!0}))]}]}};var G={supportedFiles:["composer.json"],ignoreDirs:["vendor","node_modules","cache","build","dist"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_path.dirname.call(void 0, s),p=JSON.parse(e),{name:a,description:r,require:c={},"require-dev":m={}}=p;return[{name:a||i.split("/").pop(),description:r,softwareDevelopmentKits:[...Object.entries(c).map(([t,o])=>({name:t,version:typeof o=="string"?o:void 0})),...Object.entries(m).map(([t,o])=>({name:t,version:typeof o=="string"?o:void 0,isDevDependency:!0}))]}]}};var _iots = require('io-ts'); var n = _interopRequireWildcard(_iots);var Se=n.type({pins:n.array(n.type({identity:n.string,kind:n.string,location:n.string,state:n.intersection([n.type({revision:n.string}),n.partial({version:n.union([n.string,n.undefined,n.null])})])})),version:n.number}),_e=n.type({object:n.type({pins:n.array(n.type({package:n.string,repositoryURL:n.string,state:n.intersection([n.type({branch:n.union([n.string,n.undefined,n.null]),revision:n.string}),n.partial({version:n.union([n.string,n.undefined,n.null])})])}))}),version:n.number}),P={supportedFiles:["Package.resolved"],ignoreDirs:[],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8");try{let i=_typeutils.decodeCodec.call(void 0, Se,e),p=_path.dirname.call(void 0, s).split("/"),a=p[p.length-1],r=a;return r==="swiftpm"&&(r=p[p.length-2],r==="xcshareddata"?r=p[p.length-3]:r||(r=a),r==="project.xcworkspace"&&(r=p[p.length-4])),[{name:r,type:_privacytypes.CodePackageType.Swift,softwareDevelopmentKits:i.pins.map(c=>({name:c.identity,version:c.state.version||void 0}))}]}catch(i){if(!_optionalChain([i, 'optionalAccess', _6 => _6.message, 'optionalAccess', _7 => _7.includes, 'call', _8 => _8("Failed to decode codec")]))throw i;try{let p=_typeutils.decodeCodec.call(void 0, _e,e);return[{name:_path.dirname.call(void 0, s).split("/").pop()||"",type:_privacytypes.CodePackageType.Swift,softwareDevelopmentKits:p.object.pins.map(a=>({name:a.package,version:a.state.version||void 0}))}]}catch(p){throw _optionalChain([p, 'optionalAccess', _9 => _9.message, 'optionalAccess', _10 => _10.includes, 'call', _11 => _11("Failed to decode codec")])?i:p}}}};var A="(implementation|api|kapt|ksp|debugImplementation|releaseImplementation|androidTestImplementation|testImplementation|compileOnly|runtimeOnly)",De=new RegExp(`${A}\\s*\\(\\s*["']([^"':\\s]+):([^"':\\s]+):?([^"']*)["']\\s*\\)`,"g"),ve=new RegExp(`${A}\\s*\\(\\s*platform\\(\\s*["']([^"':\\s]+):([^"':\\s]+):?([^"']*)["']\\s*\\)\\s*\\)`,"g"),Pe=new RegExp(`${A}\\s*\\(\\s*libs(?:\\.[\\w\\-\\.]+|\\[["'][^"']+["']\\])\\s*\\)`,"g"),Ae=/id\s*\(\s*["']([^"']+)["']\s*\)(?:\s*version\s*["']([^"']+)["'])?/g,Re=/apply\s*\(\s*plugin\s*=\s*["']([^"']+)["']\s*\)/g,ke=/plugins\s*\{[^}]*alias\s*\(\s*libs(?:\.plugins)?(?:\.[\w\-.]+|\[["'][^"']+["']\])\s*\)[^}]*\}/g,he=/applicationId\s*=\s*["']([^"']+)["']/g,Ge=/applicationId\s*\(\s*["']([^"']+)["']\s*\)/g;function u(s,e){let i=e&&e.trim().length>0&&e!=="_"?e.trim():void 0;return{name:s,version:i}}var N={supportedFiles:["**/build.gradle.kts","**/*.gradle.kts"],ignoreDirs:["gradle-app.setting","gradle-wrapper.jar","gradle-wrapper.properties"],scanFunction:s=>{let e=_fs.readFileSync.call(void 0, s,"utf-8"),i=_path.dirname.call(void 0, s),p=[..._typeutils.findAllWithRegex.call(void 0, {value:he,matches:["name"]},e),..._typeutils.findAllWithRegex.call(void 0, {value:Ge,matches:["name"]},e)];if(p.length>1)throw new Error(`Expected only one applicationId per file: ${s}`);let a=_optionalChain([p, 'access', _12 => _12[0], 'optionalAccess', _13 => _13.name])||i.split("/").pop(),r=[];for(let t of e.matchAll(De)){let[,,o,d,f]=t;r.push(u(`${o}:${d}`,f))}for(let t of e.matchAll(ve)){let[,,o,d,f]=t;r.push(u(`${o}:${d}`,f))}for(let t of e.matchAll(Pe)){let o=t[0].replace(/^[^(]+\(\s*/,"").replace(/\)\s*$/,"").trim();r.push(u(o))}let c=[];for(let t of e.matchAll(Ae)){let[,o,d]=t;c.push(u(o,d))}for(let t of e.matchAll(Re)){let[,o]=t;c.push(u(o))}if(ke.test(e)){let t=e.matchAll(/alias\s*\(\s*(libs(?:\.plugins)?(?:\.[\w\-.]+|\[["'][^"']+["']\]))\s*\)/g);for(let o of t)c.push(u(o[1]))}let m=[...r,...c].reduce((t,o)=>{let d=`${o.name}@@${o.version||""}`;return t.map.has(d)||(t.map.set(d,o),t.list.push(o)),t},{map:new Map,list:[]}).list;return[{name:a,softwareDevelopmentKits:m}]}};var Fn={cocoaPods:S,gradle:_,javascriptPackageJson:I,pythonRequirementsTxt:x,gemfile:D,pubspec:v,swift:P},q={[_privacytypes.CodePackageType.CocoaPods]:S,[_privacytypes.CodePackageType.Gradle]:_,[_privacytypes.CodePackageType.PackageJson]:I,[_privacytypes.CodePackageType.RequirementsTxt]:x,[_privacytypes.CodePackageType.Gemfile]:D,[_privacytypes.CodePackageType.Pubspec]:v,[_privacytypes.CodePackageType.ComposerJson]:G,[_privacytypes.CodePackageType.Swift]:P,[_privacytypes.CodePackageType.Kotlin]:N};var _fastglob = require('fast-glob'); var _fastglob2 = _interopRequireDefault(_fastglob);var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function Ln({scanPath:s,ignoreDirs:e=[],repositoryName:i}){return(await Promise.all(_typeutils.getEntries.call(void 0, q).map(async([a,r])=>{let{ignoreDirs:c,supportedFiles:m,scanFunction:t}=r,o=[...e,...c].filter(d=>d.length>0);try{let d=await _fastglob2.default.call(void 0, `${s}/**/${m.join("|")}`,{ignore:o.map(l=>`${s}/**/${l}`),unique:!0,onlyFiles:!0});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Scanning: ${d.length} files of type ${a}`));let f=d.map(l=>t(l).map(K=>({...K,relativePath:l.replace(`${s}/`,"")}))).flat();return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Found: ${f.length} packages and ${f.map(({softwareDevelopmentKits:l=[]})=>l).flat().length} sdks`)),f.map(l=>({...l,type:a,repositoryName:i}))}catch(d){throw new Error(`Error scanning globs ${m} with error: ${d}`)}}))).flat()}exports.a = Fn; exports.b = Ln;
|
|
4
|
-
//# sourceMappingURL=chunk-
|
|
4
|
+
//# sourceMappingURL=chunk-ATEDOYYC.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-SZ7K447J.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/integrations/swift.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAA,CAAQ,OAAO,CAAA,CAC5B,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;ACkBR","file":"/home/runner/work/cli/cli/dist/chunk-SZ7K447J.cjs","sourcesContent":[null,"import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods', 'Build'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'node:path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'node:path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport * as t from 'io-ts';\nimport { dirname } from 'node:path';\n\nconst SwiftPackage = t.type({\n pins: t.array(\n t.type({\n identity: t.string,\n kind: t.string,\n location: t.string,\n state: t.intersection([\n t.type({\n revision: t.string,\n }),\n t.partial({\n version: t.union([t.string, t.undefined, t.null]),\n }),\n ]),\n }),\n ),\n version: t.number,\n});\n\nconst SwiftPackageV1 = t.type({\n object: t.type({\n pins: t.array(\n t.type({\n package: t.string,\n repositoryURL: t.string,\n state: t.intersection([\n t.type({\n branch: t.union([t.string, t.undefined, t.null]),\n revision: t.string,\n }),\n t.partial({\n version: t.union([t.string, t.undefined, t.null]),\n }),\n ]),\n }),\n ),\n }),\n version: t.number,\n});\n\nexport const swift: CodeScanningConfig = {\n supportedFiles: ['Package.resolved'],\n ignoreDirs: [],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n // Attempt latest version first\n try {\n const parsed = decodeCodec(SwiftPackage, fileContents);\n const splitPath = dirname(filePath).split('/');\n const originalName = splitPath[splitPath.length - 1];\n let name = originalName;\n if (name === 'swiftpm') {\n name = splitPath[splitPath.length - 2];\n if (name === 'xcshareddata') {\n name = splitPath[splitPath.length - 3];\n } else if (!name) {\n name = originalName;\n }\n if (name === 'project.xcworkspace') {\n name = splitPath[splitPath.length - 4];\n }\n }\n return [\n {\n name,\n type: CodePackageType.Swift,\n softwareDevelopmentKits: parsed.pins.map((target) => ({\n name: target.identity,\n version: target.state.version || undefined,\n })),\n },\n ];\n } catch (e) {\n // Throw non codec errors\n if (!e?.message?.includes('Failed to decode codec')) {\n throw e;\n }\n\n // Attempt v1\n try {\n const parsed = decodeCodec(SwiftPackageV1, fileContents);\n return [\n {\n name: dirname(filePath).split('/').pop() || '', // TODO pull from Package.swift ->> name if possible\n type: CodePackageType.Swift,\n softwareDevelopmentKits: parsed.object.pins.map((target) => ({\n name: target.package,\n version: target.state.version || undefined,\n })),\n },\n ];\n } catch (e2) {\n if (!e2?.message?.includes('Failed to decode codec')) {\n throw e2;\n }\n throw e;\n }\n }\n },\n};\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-ATEDOYYC.cjs","../src/lib/code-scanning/integrations/cocoaPods.ts","../src/lib/code-scanning/integrations/gradle.ts","../src/lib/code-scanning/integrations/pubspec.ts","../src/lib/code-scanning/integrations/swift.ts"],"names":["POD_TARGET_REGEX","POD_PACKAGE_REGEX","cocoaPods","filePath","fileContents","readFileSync","targets","findAllWithRegex","packages","target","ind","CodePackageType","pkg","GRADLE_IMPLEMENTATION_REGEX","GRADLE_PLUGIN_REGEX","GRADLE_IMPLEMENTATION_GROUP_REGEX","GRADLE_APPLICATION_NAME_REGEX","gradle","directory","dirname","targetPlugins","targetGroups","applications"],"mappings":"AAAA,u/BAAwC,wDAAyC,wBCApD,qDAGI,2DACD,IAE1BA,CAAAA,CAAmB,wBAAA,CACnBC,CAAAA,CAAoB,4CAAA,CAEbC,CAAAA,CAAgC,CAC3C,cAAA,CAAgB,CAAC,SAAS,CAAA,CAC1B,UAAA,CAAY,CAAC,MAAA,CAAQ,OAAO,CAAA,CAC5B,YAAA,CAAeC,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAE7CG,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOP,CAAAA,CAAkB,GAAG,CAAA,CACvC,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,QAAQ,CACtC,CAAA,CACAI,CACF,CAAA,CACMI,CAAAA,CAAWD,yCAAAA,CAEb,KAAA,CAAO,IAAI,MAAA,CAAON,CAAAA,CAAmB,GAAG,CAAA,CACxC,OAAA,CAAS,CACP,QAAA,CACA,MAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAG,CACF,CAAA,CAiBA,OAf+BE,CAAAA,CAAQ,GAAA,CAAI,CAACG,CAAAA,CAAQC,CAAAA,CAAAA,EAAAA,CAAS,CAC3D,IAAA,CAAMD,CAAAA,CAAO,IAAA,CACb,IAAA,CAAME,6BAAAA,CAAgB,SAAA,CACtB,uBAAA,CAAyBH,CAAAA,CACtB,MAAA,CACEI,CAAAA,EACCA,CAAAA,CAAI,UAAA,CAAaH,CAAAA,CAAO,UAAA,EAAA,CACvB,CAACH,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,EAAKE,CAAAA,CAAI,UAAA,CAAaN,CAAAA,CAAQI,CAAAA,CAAM,CAAC,CAAA,CAAE,UAAA,CAC5D,CAAA,CACC,GAAA,CAAKE,CAAAA,EAAAA,CAAS,CACb,IAAA,CAAMA,CAAAA,CAAI,IAAA,CACV,OAAA,CAASA,CAAAA,CAAI,OACf,CAAA,CAAE,CACN,CAAA,CAAE,CAGJ,CACF,CAAA,CCvDA,4BAGwB,IAElBC,CAAAA,CACJ,gDAAA,CACIC,CAAAA,CAAsB,yCAAA,CACtBC,CAAAA,CACJ,uGAAA,CACIC,CAAAA,CAAgC,0BAAA,CAYzBC,CAAAA,CAA6B,CACxC,cAAA,CAAgB,CAAC,gBAAgB,CAAA,CACjC,UAAA,CAAY,CACV,oBAAA,CACA,oBAAA,CACA,2BACF,CAAA,CACA,YAAA,CAAed,CAAAA,EAAa,CAC1B,IAAMC,CAAAA,CAAeC,8BAAAA,CAAaF,CAAU,OAAO,CAAA,CAC7Ce,CAAAA,CAAYC,2BAAAA,CAAgB,CAAA,CAE5Bb,CAAAA,CAAUC,yCAAAA,CAEZ,KAAA,CAAO,IAAI,MAAA,CAAOM,CAAAA,CAA6B,GAAG,CAAA,CAClD,OAAA,CAAS,CAAC,OAAA,CAAS,QAAA,CAAU,MAAA,CAAQ,MAAA,CAAQ,SAAA,CAAW,QAAQ,CAClE,CAAA,CACAT,CACF,CAAA,CACMgB,CAAAA,CAAgBb,yCAAAA,CAElB,KAAA,CAAO,IAAI,MAAA,CAAOO,CAAAA,CAAqB,GAAG,CAAA,CAC1C,OAAA,CAAS,CAAC,QAAA,CAAU,MAAA,CAAQ,OAAA,CAAS,SAAA,CAAW,QAAQ,CAC1D,CAAA,CACAV,CACF,CAAA,CACMiB,CAAAA,CAAed,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOQ,CAAAA,CAAmC,GAAG,CAAA,CACxD,OAAA,CAAS,CACP,QAAA,CACA,QAAA,CACA,OAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,MAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,QAAA,CACA,SAAA,CACA,QACF,CACF,CAAA,CACAX,CACF,CAAA,CACMkB,CAAAA,CAAef,yCAAAA,CAEjB,KAAA,CAAO,IAAI,MAAA,CAAOS,CAAAA,CAA+B,GAAG,CAAA,CACpD,OAAA,CAAS,CAAC,OAAA,CAAS,MAAM,CAC3B,CAAA,CACAZ,CACF,CAAA,CACA,EAAA,CAAIkB,CAAAA,CAAa,MAAA,CAAS,CAAA,CACxB,MAAM,IAAI,KAAA,CAAM,CAAA,0CAAA,EAA6CnB,CAAQ,CAAA,CAAA;AC/CjE;ACkBR","file":"/home/runner/work/cli/cli/dist/chunk-ATEDOYYC.cjs","sourcesContent":[null,"import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageSdk } from '../../../codecs';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { CodePackageType } from '@transcend-io/privacy-types';\n\nconst POD_TARGET_REGEX = /target ('|\")(.*?)('|\")/;\nconst POD_PACKAGE_REGEX = /pod ('|\")(.*?)('|\")(, ('|\")~> (.+?)('|\")|)/;\n\nexport const cocoaPods: CodeScanningConfig = {\n supportedFiles: ['Podfile'],\n ignoreDirs: ['Pods', 'Build'],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(POD_TARGET_REGEX, 'g'),\n matches: ['quote1', 'name', 'quote2'],\n },\n fileContents,\n );\n const packages = findAllWithRegex(\n {\n value: new RegExp(POD_PACKAGE_REGEX, 'g'),\n matches: [\n 'quote1',\n 'name',\n 'quote2',\n 'extra',\n 'quote3',\n 'version',\n 'quote4',\n ],\n },\n fileContents,\n );\n\n const deps: CodePackageSdk[] = targets.map((target, ind) => ({\n name: target.name,\n type: CodePackageType.CocoaPods,\n softwareDevelopmentKits: packages\n .filter(\n (pkg) =>\n pkg.matchIndex > target.matchIndex &&\n (!targets[ind + 1] || pkg.matchIndex < targets[ind + 1].matchIndex),\n )\n .map((pkg) => ({\n name: pkg.name,\n version: pkg.version,\n })),\n }));\n\n return deps;\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { findAllWithRegex } from '@transcend-io/type-utils';\nimport { dirname } from 'node:path';\n\nconst GRADLE_IMPLEMENTATION_REGEX =\n /implementation( *)('|\")(.+?):(.+?):(.+?|)('|\")/;\nconst GRADLE_PLUGIN_REGEX = /apply plugin: *('|\")(.+?)(:(.+?)|)('|\")/;\nconst GRADLE_IMPLEMENTATION_GROUP_REGEX =\n /implementation group:( *)('|\")(.+?)('|\"),( *)name:( *)('|\")(.+?)('|\"),( *)version:( *)('|\")(.+?)('|\")/;\nconst GRADLE_APPLICATION_NAME_REGEX = /applicationId( *)\"(.+?)\"/;\n\n/**\n * So far, there are three ways of defining dependencies that is supported\n * implementation group: 'org.eclipse.jdt', name: 'org.eclipse.jdt.core', version: '3.28.0'\n * or\n * implementation 'com.google.firebase:firebase-analytics:18.0.0'\n * or\n * apply plugin: 'com.google.gms.google-services'\n *\n * single and double quotes are both recognized\n */\nexport const gradle: CodeScanningConfig = {\n supportedFiles: ['build.gradle**'],\n ignoreDirs: [\n 'gradle-app.setting',\n 'gradle-wrapper.jar',\n 'gradle-wrapper.properties',\n ],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n const directory = dirname(filePath);\n\n const targets = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_REGEX, 'g'),\n matches: ['space', 'quote1', 'name', 'path', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetPlugins = findAllWithRegex(\n {\n value: new RegExp(GRADLE_PLUGIN_REGEX, 'g'),\n matches: ['quote1', 'name', 'group', 'version', 'quote2'],\n },\n fileContents,\n );\n const targetGroups = findAllWithRegex(\n {\n value: new RegExp(GRADLE_IMPLEMENTATION_GROUP_REGEX, 'g'),\n matches: [\n 'space1',\n 'quote1',\n 'group',\n 'quote2',\n 'space2',\n 'space3',\n 'quote3',\n 'name',\n 'quote4',\n 'space4',\n 'space5',\n 'quote5',\n 'version',\n 'quote6',\n ],\n },\n fileContents,\n );\n const applications = findAllWithRegex(\n {\n value: new RegExp(GRADLE_APPLICATION_NAME_REGEX, 'g'),\n matches: ['space', 'name'],\n },\n fileContents,\n );\n if (applications.length > 1) {\n throw new Error(`Expected only one applicationId per file: ${filePath}`);\n }\n\n return [\n {\n name: applications[0]?.name || directory.split('/').pop()!,\n softwareDevelopmentKits: [\n ...targets,\n ...targetGroups,\n ...targetPlugins,\n ].map((target) => ({\n name: target.name,\n version: target.version || undefined,\n })),\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport yaml from 'js-yaml';\nimport { dirname } from 'node:path';\n\n/**\n * Remove YAML comments from a string\n *\n * @param yamlString - YAML string\n * @returns String without comments\n */\nfunction removeYAMLComments(yamlString: string): string {\n return yamlString\n .split('\\n')\n .map((line) => {\n // Remove inline comments\n const commentIndex = line.indexOf('#');\n if (commentIndex > -1) {\n // Check if '#' is not inside a string\n if (\n !line.substring(0, commentIndex).includes('\"') &&\n !line.substring(0, commentIndex).includes(\"'\")\n ) {\n return line.substring(0, commentIndex).trim();\n }\n }\n return line;\n })\n .filter((line) => line.length > 0)\n .join('\\n');\n}\n\nexport const pubspec: CodeScanningConfig = {\n supportedFiles: ['pubspec.yml'],\n ignoreDirs: ['build'],\n scanFunction: (filePath) => {\n const directory = dirname(filePath);\n const fileContents = readFileSync(filePath, 'utf-8');\n const {\n name,\n description,\n dev_dependencies = {},\n dependencies = {},\n } = yaml.load(removeYAMLComments(fileContents)) as {\n /** Name */\n name?: string;\n /** Description */\n description?: string;\n /** Dev dependencies */\n dev_dependencies?: { [k in string]: number | Record<string, string> };\n /** Dependencies */\n dependencies?: { [k in string]: number | Record<string, string> };\n };\n return [\n {\n name: name || directory.split('/').pop()!,\n description,\n type: CodePackageType.RequirementsTxt,\n softwareDevelopmentKits: [\n ...Object.entries(dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n })),\n ...Object.entries(dev_dependencies).map(([name, version]) => ({\n name,\n version:\n typeof version === 'string'\n ? version\n : typeof version === 'number'\n ? version.toString()\n : version?.sdk,\n isDevDependency: true,\n })),\n ],\n },\n ];\n },\n};\n","import { readFileSync } from 'node:fs';\nimport { CodeScanningConfig } from '../types';\nimport { CodePackageType } from '@transcend-io/privacy-types';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport * as t from 'io-ts';\nimport { dirname } from 'node:path';\n\nconst SwiftPackage = t.type({\n pins: t.array(\n t.type({\n identity: t.string,\n kind: t.string,\n location: t.string,\n state: t.intersection([\n t.type({\n revision: t.string,\n }),\n t.partial({\n version: t.union([t.string, t.undefined, t.null]),\n }),\n ]),\n }),\n ),\n version: t.number,\n});\n\nconst SwiftPackageV1 = t.type({\n object: t.type({\n pins: t.array(\n t.type({\n package: t.string,\n repositoryURL: t.string,\n state: t.intersection([\n t.type({\n branch: t.union([t.string, t.undefined, t.null]),\n revision: t.string,\n }),\n t.partial({\n version: t.union([t.string, t.undefined, t.null]),\n }),\n ]),\n }),\n ),\n }),\n version: t.number,\n});\n\nexport const swift: CodeScanningConfig = {\n supportedFiles: ['Package.resolved'],\n ignoreDirs: [],\n scanFunction: (filePath) => {\n const fileContents = readFileSync(filePath, 'utf-8');\n\n // Attempt latest version first\n try {\n const parsed = decodeCodec(SwiftPackage, fileContents);\n const splitPath = dirname(filePath).split('/');\n const originalName = splitPath[splitPath.length - 1];\n let name = originalName;\n if (name === 'swiftpm') {\n name = splitPath[splitPath.length - 2];\n if (name === 'xcshareddata') {\n name = splitPath[splitPath.length - 3];\n } else if (!name) {\n name = originalName;\n }\n if (name === 'project.xcworkspace') {\n name = splitPath[splitPath.length - 4];\n }\n }\n return [\n {\n name,\n type: CodePackageType.Swift,\n softwareDevelopmentKits: parsed.pins.map((target) => ({\n name: target.identity,\n version: target.state.version || undefined,\n })),\n },\n ];\n } catch (e) {\n // Throw non codec errors\n if (!e?.message?.includes('Failed to decode codec')) {\n throw e;\n }\n\n // Attempt v1\n try {\n const parsed = decodeCodec(SwiftPackageV1, fileContents);\n return [\n {\n name: dirname(filePath).split('/').pop() || '', // TODO pull from Package.swift ->> name if possible\n type: CodePackageType.Swift,\n softwareDevelopmentKits: parsed.object.pins.map((target) => ({\n name: target.package,\n version: target.state.version || undefined,\n })),\n },\n ];\n } catch (e2) {\n if (!e2?.message?.includes('Failed to decode codec')) {\n throw e2;\n }\n throw e;\n }\n }\n },\n};\n"]}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
2
|
-
`)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The timestamp column "${t.timestampColum}" is present for all row`))}return t}async function xe(p,t){let f=
|
|
3
|
-
`)}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(o)),!await
|
|
4
|
-
When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(T&&Oe({currentConsentRecord:T,pendingUpdates:C,preferenceTopics:d})&&!s){r.skippedUpdates[P]=w;return}if(T&&Fe({currentConsentRecord:T,pendingUpdates:C,preferenceTopics:d})){r.pendingConflictUpdates[P]={row:w,record:T};return}r.pendingSafeUpdates[P]=w}),n[p]=r,await o.setValue(n,"fileMetadata");let b=new Date().getTime();_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pre-processed file: "${p}" in ${(b-i)/1e3}s`))}var Ee=e.type({purpose:e.string,preference:e.union([e.string,e.null]),valueMapping:e.record(e.string,e.union([e.string,e.boolean,e.null,e.undefined]))}),Rr=e.record(e.string,Ee),ot=e.type({name:e.string,isUniqueOnPreferenceStore:e.boolean}),Tr=e.record(e.string,ot),nt=e.intersection([e.type({columnToPurposeName:e.record(e.string,Ee),lastFetchedAt:e.string,pendingSafeUpdates:e.record(e.string,e.record(e.string,e.string)),pendingConflictUpdates:e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),skippedUpdates:e.record(e.string,e.record(e.string,e.string))}),e.partial({identifierColumn:e.string,timestampColum:e.string})]),Sr=e.record(e.string,e.union([e.boolean,_privacytypes.PreferenceUpdateItem])),kr=e.record(e.string,e.union([e.boolean,e.record(e.string,e.string)])),Mr=e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),xr=e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),Ir=e.record(e.string,e.record(e.string,e.string)),Ve=e.type({fileMetadata:e.record(e.string,nt),failingUpdates:e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),pendingUpdates:e.record(e.string,_privacytypes.PreferenceUpdateItem)}),Or=e.type({records:e.array(e.type({anchorIdentifier:_privacytypes.PreferenceStoreIdentifier,timestamp:e.string}))}),Ae=e.intersection([e.type({records:e.array(e.intersection([e.type({success:e.boolean}),e.partial({errorMessage:e.string})])),failures:e.array(e.type({index:e.number,error:e.string}))}),e.partial({errors:e.array(e.string)})]),Qe=e.type({name:e.string,value:e.string});async function Wr({auth:p,sombraAuth:t,receiptFilepath:f,file:d,partition:m,isSilent:u=!0,dryRun:s=!1,skipWorkflowTriggers:o=!1,skipConflictUpdates:i=!1,skipExistingRecordCheck:n=!1,attributes:a=[],transcendUrl:r,forceTriggerWorkflows:l=!1}){let y=
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkKYOQWIIScjs = require('./chunk-KYOQWIIS.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunk5MG2CEZVcjs = require('./chunk-5MG2CEZV.cjs');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _persistedstate = require('@transcend-io/persisted-state');var _iots = require('io-ts'); var L = _interopRequireWildcard(_iots); var M = _interopRequireWildcard(_iots); var e = _interopRequireWildcard(_iots);var _typeutils = require('@transcend-io/type-utils');var He=["ENOTFOUND","ECONNRESET","ETIMEDOUT","502 Bad Gateway","504 Gateway Time-out","429","Rate limit exceeded","Task timed out after","unknown request error"].map(p=>p.toLowerCase());async function x(p,t,{maxAttempts:f=3,baseDelayMs:d=250,isRetryable:m=(s,o)=>He.some(i=>o.toLowerCase().includes(i)),onRetry:u}={}){let s=0;for(;;){s+=1;try{return await t()}catch(o){let i=_nullishCoalesce((o&&(_optionalChain([o, 'access', _2 => _2.response, 'optionalAccess', _3 => _3.body])||o.message)), () => (String(_nullishCoalesce(o, () => ("Unknown error")))));if(!(s<f&&m(o,i)))throw new Error(`${p} failed after ${s} attempt(s): ${i}`);_optionalChain([u, 'optionalCall', _4 => _4(s,o,i)]);let a=d*2**(s-1),r=Math.floor(Math.random()*d),l=a+r;_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[retry] attempt ${s}/${f-1}; backing off ${l}ms: ${i}`)),await _chunkKYOQWIIScjs.$f.call(void 0, l)}}}var _privacytypes = require('@transcend-io/privacy-types');var V=M.intersection([M.type({nodes:M.array(_privacytypes.PreferenceQueryResponseItem)}),M.partial({cursor:M.string})]);async function Se(p,{identifiers:t,partitionKey:f,skipLogging:d=!1,concurrency:m=40}){let u=[],s=_chunk5MG2CEZVcjs.b.call(void 0, t,100),o=new Date().getTime(),i=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);d||i.start(t.length,0);let n=0;await _chunkKYOQWIIScjs.a.call(void 0, s,async l=>{let y=await x("Preference Query",()=>p.post(`v1/preferences/${f}/query`,{json:{filter:{identifiers:l},limit:l.length}}).json(),{onRetry:(h,b,w)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`[RETRY] group size=${l.length} partition=${f} attempt=${h}: ${w}`))}}),g=_typeutils.decodeCodec.call(void 0, V,y);u.push(...g.nodes),n+=l.length,i.update(n)},{concurrency:m}),i.stop();let r=new Date().getTime()-o;return d||_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Completed download in "${r/1e3}" seconds.`)),u}function X({row:p,columnToPurposeName:t,purposeSlugs:f,preferenceTopics:d}){let m={};return Object.entries(t).forEach(([u,{purpose:s,preference:o,valueMapping:i}])=>{if(!f.includes(s))throw new Error(`Invalid purpose slug: ${s}, expected: ${f.join(", ")}`);let n=p[u];if(o){let a=d.find(r=>r.slug===o&&r.purpose.trackingType===s);if(!a){let r=d.filter(l=>l.purpose.trackingType===s).map(l=>l.slug);throw new Error(`Invalid preference slug: ${o} for purpose: ${s}. Allowed preference slugs for purpose are: ${r.join(",")}`)}switch(m[s]||(m[s]={preferences:[]}),m[s].preferences||(m[s].preferences=[]),a.type){case _privacytypes.PreferenceTopicType.Boolean:{let r=i[n];if(r===void 0&&n!=="")throw new Error(`No preference mapping found for value "${n}" in column "${u}" (purpose=${s}, preference=${o})`);if(r==null)return;if(typeof r!="boolean")throw new Error(`Invalid value for boolean preference: ${o}, expected boolean, got: ${n}`);m[s].preferences.push({topic:o,choice:{booleanValue:r}});break}case _privacytypes.PreferenceTopicType.Select:{let r=i[n];if(r===void 0&&n!=="")throw new Error(`No preference mapping found for value "${n}" in column "${u}" (purpose=${s}, preference=${o})`);if(r==null)return;if(typeof r!="string")throw new Error(`Invalid value for select preference: ${o}, expected string, got: ${n}`);let l=r.trim()||null;if(l&&!a.preferenceOptionValues.map(({slug:y})=>y).includes(l))throw new Error(`Invalid value for select preference: ${o}, expected one of: ${a.preferenceOptionValues.map(({slug:y})=>y).join(", ")}, got: ${n}`);m[s].preferences.push({topic:o,choice:{selectValue:l}});break}case _privacytypes.PreferenceTopicType.MultiSelect:{if(typeof n!="string")throw new Error(`Invalid value for multi select preference: ${o}, expected string, got: ${n}`);let r=_chunkKYOQWIIScjs.rc.call(void 0, n).map(l=>{let y=i[l];if(y===void 0&&n!=="")throw new Error(`No preference mapping found for multi select token "${n}" in column "${u}" (purpose=${s}, preference=${o})`);if(y==null)return null;if(typeof y!="string")throw new Error(`Invalid value for multi select preference: ${o}, expected one of: ${a.preferenceOptionValues.map(({slug:g})=>g).join(", ")}, got: ${l}`);return y}).filter(l=>l!==null).sort((l,y)=>l.localeCompare(y));r.length>0&&m[s].preferences.push({topic:o,choice:{selectValues:r}});break}default:throw new Error(`Unknown preference type: ${a.type}`)}}else{let a=i[n];if(a===void 0&&n!=="")throw new Error(`No preference mapping found for value "${n}" in column "${u}" (purpose=${s}, preference=\u2205)`);if(a===null)return;m[s]?m[s].enabled=a===!0:m[s]={enabled:a===!0}}}),_typeutils.apply.call(void 0, m,(u,s)=>{if(typeof u.enabled!="boolean")throw new Error(`No mapping provided for purpose.enabled=true/false value: ${s}`);return{...u,enabled:u.enabled}})}var _inquirer = require('inquirer'); var _inquirer2 = _interopRequireDefault(_inquirer);var Z="[NONE]";async function Me(p,t){let f=_chunk5MG2CEZVcjs.j.call(void 0, p.map(m=>Object.keys(m)).flat()),d=_chunk5MG2CEZVcjs.c.call(void 0, f,[...t.identifierColumn?[t.identifierColumn]:[],...Object.keys(t.columnToPurposeName)]);if(!t.timestampColum){let{timestampName:m}=await _inquirer2.default.prompt([{name:"timestampName",message:"Choose the column that will be used as the timestamp of last preference update",type:"list",default:d.find(u=>u.toLowerCase().includes("date"))||d.find(u=>u.toLowerCase().includes("time"))||d[0],choices:[...d,Z]}]);t.timestampColum=m}if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Using timestamp column "${t.timestampColum}"`)),t.timestampColum!==Z){let m=p.map((u,s)=>u[t.timestampColum]?null:[s]).filter(u=>!!u).flat();if(m.length>0)throw new Error(`The timestamp column "${t.timestampColum}" is missing a value for the following rows: ${m.join(`
|
|
2
|
+
`)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The timestamp column "${t.timestampColum}" is present for all row`))}return t}async function xe(p,t){let f=_chunk5MG2CEZVcjs.j.call(void 0, p.map(o=>Object.keys(o)).flat()),d=_chunk5MG2CEZVcjs.c.call(void 0, f,[...t.identifierColumn?[t.identifierColumn]:[],...Object.keys(t.columnToPurposeName)]);if(!t.identifierColumn){let{identifierName:o}=await _inquirer2.default.prompt([{name:"identifierName",message:"Choose the column that will be used as the identifier to upload consent preferences by",type:"list",default:d.find(i=>i.toLowerCase().includes("email"))||d[0],choices:d}]);t.identifierColumn=o}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Using identifier column "${t.identifierColumn}"`));let m=p.map((o,i)=>o[t.identifierColumn]?null:[i]).filter(o=>!!o).flat();if(m.length>0){let o=`The identifier column "${t.identifierColumn}" is missing a value for the following rows: ${m.join(", ")}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(o)),!await _chunkKYOQWIIScjs.Vf.call(void 0, {message:"Would you like to skip rows missing an identifier?"}))throw new Error(o);let n=p.length;p=p.filter(a=>a[t.identifierColumn]),_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Skipped ${n-p.length} rows missing an identifier`))}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`The identifier column "${t.identifierColumn}" is present for all rows`));let u=_chunk5MG2CEZVcjs.d.call(void 0, p,t.identifierColumn),s=Object.entries(u).filter(([,o])=>o.length>1);if(s.length>0){let o=`The identifier column "${t.identifierColumn}" has duplicate values for the following rows: ${s.slice(0,10).map(([n,a])=>`${n} (${a.length})`).join(`
|
|
3
|
+
`)}`;if(_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(o)),!await _chunkKYOQWIIScjs.Vf.call(void 0, {message:"Would you like to automatically take the latest update?"}))throw new Error(o);p=Object.entries(u).map(([,n])=>n.sort((r,l)=>new Date(l[t.timestampColum]).getTime()-new Date(r[t.timestampColum]).getTime())[0]).filter(n=>n)}return{currentState:t,preferences:p}}async function Ie(p,t,{purposeSlugs:f,preferenceTopics:d,forceTriggerWorkflows:m}){let u=_chunk5MG2CEZVcjs.j.call(void 0, p.map(i=>Object.keys(i)).flat()),s=_chunk5MG2CEZVcjs.c.call(void 0, u,[...t.identifierColumn?[t.identifierColumn]:[],...t.timestampColum?[t.timestampColum]:[]]);if(s.length===0){if(m)return t;throw new Error("No other columns to process")}let o=[...f,...d.map(i=>`${i.purpose.trackingType}->${i.slug}`)];return await _chunkKYOQWIIScjs.b.call(void 0, s,async i=>{let n=_chunk5MG2CEZVcjs.j.call(void 0, p.map(r=>r[i])),a=t.columnToPurposeName[i];if(a)_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Column "${i}" is associated with purpose "${a.purpose}"`));else{let{purposeName:r}=await _inquirer2.default.prompt([{name:"purposeName",message:`Choose the purpose that column ${i} is associated with`,type:"list",default:o.find(g=>g.startsWith(f[0])),choices:o}]),[l,y]=r.split("->");a={purpose:l,preference:y||null,valueMapping:{}}}await _chunkKYOQWIIScjs.b.call(void 0, n,async r=>{if(a.valueMapping[r]!==void 0){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Value "${r}" is associated with purpose value "${a.valueMapping[r]}"`));return}if(a.preference===null){let{purposeValue:l}=await _inquirer2.default.prompt([{name:"purposeValue",message:`Choose the purpose value for value "${r}" associated with purpose "${a.purpose}"`,type:"confirm",default:r!=="false"}]);a.valueMapping[r]=l}if(a.preference!==null){let l=d.find(g=>g.slug===a.preference);if(!l){_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Preference topic "${a.preference}" not found`));return}let y=l.preferenceOptionValues.map(({slug:g})=>g);if(l.type===_privacytypes.PreferenceTopicType.Boolean){let{preferenceValue:g}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${l.slug}" value "${r}" associated with purpose "${a.purpose}"`,type:"confirm",default:r!=="false"}]);a.valueMapping[r]=g;return}if(l.type===_privacytypes.PreferenceTopicType.Select){let{preferenceValue:g}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${l.slug}" value "${r}" associated with purpose "${a.purpose}"`,type:"list",choices:y,default:y.find(h=>h===r)}]);a.valueMapping[r]=g;return}if(l.type===_privacytypes.PreferenceTopicType.MultiSelect){let g=_chunkKYOQWIIScjs.rc.call(void 0, r);await _chunkKYOQWIIScjs.b.call(void 0, g,async h=>{if(a.valueMapping[h]!==void 0)return;let{preferenceValue:b}=await _inquirer2.default.prompt([{name:"preferenceValue",message:`Choose the preference value for "${l.slug}" value "${h}" associated with purpose "${a.purpose}"`,type:"list",choices:y,default:y.find(w=>w===h)}]);a.valueMapping[h]=b});return}throw new Error(`Unknown preference topic type: ${l.type}`)}}),t.columnToPurposeName[i]=a}),t}function Oe({currentConsentRecord:p,pendingUpdates:t,preferenceTopics:f}){return Object.entries(t).every(([d,{preferences:m=[],enabled:u}])=>{let s=p.purposes.find(i=>i.purpose===d);return!!s&&s.enabled===u?m.every(({topic:i,choice:n})=>s.preferences&&s.preferences.find(a=>{if(a.topic!==i)return!1;let r=f.find(l=>l.slug===i&&l.purpose.trackingType===d);if(!r)throw new Error(`Could not find preference topic for ${i}`);switch(r.type){case _privacytypes.PreferenceTopicType.Boolean:return a.choice.booleanValue===n.booleanValue;case _privacytypes.PreferenceTopicType.Select:return a.choice.selectValue===n.selectValue;case _privacytypes.PreferenceTopicType.MultiSelect:let l=(a.choice.selectValues||[]).sort(),y=(n.selectValues||[]).sort();return l.length===y.length&&l.every((g,h)=>g===y[h]);default:throw new Error(`Unknown preference topic type: ${r.type}`)}})):!1})}function Fe({currentConsentRecord:p,pendingUpdates:t,preferenceTopics:f,log:d}){return!!Object.entries(t).find(([m,{preferences:u=[],enabled:s}])=>{let o=p.purposes.find(i=>i.purpose===m);return o?o.enabled!==s?(d&&_chunkZUNVPK23cjs.a.warn(`Purpose ${m} enabled value conflict for user ${p.userId}. Pending Value: ${s}, Current Value: ${o.enabled}`),!0):!!u.find(({topic:i,choice:n})=>{let a=(o.preferences||[]).find(g=>g.topic===i);if(!a)return d&&_chunkZUNVPK23cjs.a.warn(`No existing preference found for topic ${i} in purpose ${m} for user ${p.userId}.`),!1;let r=f.find(g=>g.slug===i&&g.purpose.trackingType===m);if(!r)throw new Error(`Could not find preference topic for ${i}`);let l,y;switch(r.type){case _privacytypes.PreferenceTopicType.Boolean:return l=a.choice.booleanValue!==n.booleanValue,d&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} boolean value conflict for user ${p.userId}. Expected: ${n.booleanValue}, Found: ${a.choice.booleanValue}`),l;case _privacytypes.PreferenceTopicType.Select:return y=a.choice.selectValue!==n.selectValue,d&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} select value conflict for user ${p.userId}. Expected: ${n.selectValue}, Found: ${a.choice.selectValue}`),y;case _privacytypes.PreferenceTopicType.MultiSelect:let g=(a.choice.selectValues||[]).sort(),h=(n.selectValues||[]).sort();return y=g.length!==h.length||!g.every((b,w)=>b===h[w]),d&&_chunkZUNVPK23cjs.a.warn(`Preference topic ${i} multi-select value conflict for user ${p.userId}. Expected: ${h.join(", ")}, Found: ${g.join(", ")}`),y;default:throw new Error(`Unknown preference topic type: ${r.type}`)}}):(d&&_chunkZUNVPK23cjs.a.warn(`No existing purpose found for ${m} in consent record for ${p.userId}.`),!1)})}async function De({file:p,sombra:t,purposeSlugs:f,preferenceTopics:d,partitionKey:m,skipExistingRecordCheck:u,forceTriggerWorkflows:s},o){let i=new Date().getTime(),n=o.getValue("fileMetadata");_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading in file: "${p}"`));let a=_chunkKYOQWIIScjs.uc.call(void 0, p,L.record(L.string,L.string)),r={columnToPurposeName:{},pendingSafeUpdates:{},pendingConflictUpdates:{},skippedUpdates:{},...n[p]||{},lastFetchedAt:new Date().toISOString()};r=await Me(a,r),n[p]=r,await o.setValue(n,"fileMetadata");let l=await xe(a,r);r=l.currentState,a=l.preferences,n[p]=r,await o.setValue(n,"fileMetadata"),r=await Ie(a,r,{preferenceTopics:d,purposeSlugs:f,forceTriggerWorkflows:s}),n[p]=r,await o.setValue(n,"fileMetadata");let y=a.map(w=>w[r.identifierColumn]),g=u?[]:await Se(t,{identifiers:y.map(w=>({value:w})),partitionKey:m}),h=_chunk5MG2CEZVcjs.e.call(void 0, g,"userId");r.pendingConflictUpdates={},r.pendingSafeUpdates={},r.skippedUpdates={},a.forEach(w=>{let P=w[r.identifierColumn],C=X({row:w,columnToPurposeName:r.columnToPurposeName,preferenceTopics:d,purposeSlugs:f}),T=h[P];if(s&&!T)throw new Error(`No existing consent record found for user with id: ${P}.
|
|
4
|
+
When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`);if(T&&Oe({currentConsentRecord:T,pendingUpdates:C,preferenceTopics:d})&&!s){r.skippedUpdates[P]=w;return}if(T&&Fe({currentConsentRecord:T,pendingUpdates:C,preferenceTopics:d})){r.pendingConflictUpdates[P]={row:w,record:T};return}r.pendingSafeUpdates[P]=w}),n[p]=r,await o.setValue(n,"fileMetadata");let b=new Date().getTime();_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pre-processed file: "${p}" in ${(b-i)/1e3}s`))}var Ee=e.type({purpose:e.string,preference:e.union([e.string,e.null]),valueMapping:e.record(e.string,e.union([e.string,e.boolean,e.null,e.undefined]))}),Rr=e.record(e.string,Ee),ot=e.type({name:e.string,isUniqueOnPreferenceStore:e.boolean}),Tr=e.record(e.string,ot),nt=e.intersection([e.type({columnToPurposeName:e.record(e.string,Ee),lastFetchedAt:e.string,pendingSafeUpdates:e.record(e.string,e.record(e.string,e.string)),pendingConflictUpdates:e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),skippedUpdates:e.record(e.string,e.record(e.string,e.string))}),e.partial({identifierColumn:e.string,timestampColum:e.string})]),Sr=e.record(e.string,e.union([e.boolean,_privacytypes.PreferenceUpdateItem])),kr=e.record(e.string,e.union([e.boolean,e.record(e.string,e.string)])),Mr=e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),xr=e.record(e.string,e.type({record:_privacytypes.PreferenceQueryResponseItem,row:e.record(e.string,e.string)})),Ir=e.record(e.string,e.record(e.string,e.string)),Ve=e.type({fileMetadata:e.record(e.string,nt),failingUpdates:e.record(e.string,e.type({uploadedAt:e.string,error:e.string,update:_privacytypes.PreferenceUpdateItem})),pendingUpdates:e.record(e.string,_privacytypes.PreferenceUpdateItem)}),Or=e.type({records:e.array(e.type({anchorIdentifier:_privacytypes.PreferenceStoreIdentifier,timestamp:e.string}))}),Ae=e.intersection([e.type({records:e.array(e.intersection([e.type({success:e.boolean}),e.partial({errorMessage:e.string})])),failures:e.array(e.type({index:e.number,error:e.string}))}),e.partial({errors:e.array(e.string)})]),Qe=e.type({name:e.string,value:e.string});async function Wr({auth:p,sombraAuth:t,receiptFilepath:f,file:d,partition:m,isSilent:u=!0,dryRun:s=!1,skipWorkflowTriggers:o=!1,skipConflictUpdates:i=!1,skipExistingRecordCheck:n=!1,attributes:a=[],transcendUrl:r,forceTriggerWorkflows:l=!1}){let y=_chunkKYOQWIIScjs.tc.call(void 0, a),g=new (0, _persistedstate.PersistedState)(f,Ve,{fileMetadata:{},failingUpdates:{},pendingUpdates:{}}),h=g.getValue("failingUpdates"),b=g.getValue("pendingUpdates"),w=g.getValue("fileMetadata");_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Restored cache, there are:
|
|
5
5
|
${Object.values(h).length} failing requests to be retried
|
|
6
6
|
${Object.values(b).length} pending requests to be processed
|
|
7
7
|
The following files are stored in cache and will be used:
|
|
8
8
|
${Object.keys(w).map(S=>S).join(`
|
|
9
9
|
`)}
|
|
10
10
|
The following file will be processed: ${d}
|
|
11
|
-
`));let P=_chunkVWN5MN3Ucjs.zc.call(void 0, r,p),[C,T,me]=await Promise.all([_chunkVWN5MN3Ucjs.Ac.call(void 0, r,p,t),_chunkVWN5MN3Ucjs.id.call(void 0, P),_chunkVWN5MN3Ucjs.ed.call(void 0, P)]);await De({file:d,purposeSlugs:T.map(S=>S.trackingType),preferenceTopics:me,sombra:C,partitionKey:m,skipExistingRecordCheck:n,forceTriggerWorkflows:l},g);let j={};w=g.getValue("fileMetadata");let F=w[d];if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(F.pendingSafeUpdates).length} safe updates in ${d}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(F.pendingConflictUpdates).length} conflict updates in ${d}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(F.skippedUpdates).length} skipped updates in ${d}`)),Object.entries({...F.pendingSafeUpdates,...i?{}:_typeutils.apply.call(void 0, F.pendingConflictUpdates,({row:S})=>S)}).forEach(([S,k])=>{let z=F.timestampColum===Z?new Date:new Date(k[F.timestampColum]),D=X({row:k,columnToPurposeName:F.columnToPurposeName,preferenceTopics:me,purposeSlugs:T.map(B=>B.trackingType)});j[S]={userId:S,partition:m,timestamp:z.toISOString(),purposes:Object.entries(D).map(([B,Ye])=>({...Ye,purpose:B,workflowSettings:{attributes:y,isSilent:u,skipWorkflowTrigger:o,...l?{forceTriggerWorkflow:l}:{}}}))}}),await g.setValue(j,"pendingUpdates"),await g.setValue({},"failingUpdates"),s){_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Dry run complete, exiting. ${Object.values(j).length} pending updates. Check file: ${f}`));return}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${Object.values(j).length} preferences to partition: ${m}`));let We=new Date().getTime(),oe=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),le=0,ne=Object.entries(j),_e=_chunkGJ6V5BHGcjs.b.call(void 0, ne,o?100:10);oe.start(ne.length,0),await _chunkVWN5MN3Ucjs.a.call(void 0, _e,async S=>{try{await C.put("v1/preferences",{json:{records:S.map(([,k])=>k),skipWorkflowTriggers:o}}).json()}catch(k){try{let D=JSON.parse(_optionalChain([k, 'optionalAccess', _5 => _5.response, 'optionalAccess', _6 => _6.body])||"{}");D.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${D.error}`))}catch (e2){}_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to upload ${S.length} user preferences to partition ${m}: ${_optionalChain([k, 'optionalAccess', _7 => _7.response, 'optionalAccess', _8 => _8.body])||_optionalChain([k, 'optionalAccess', _9 => _9.message])}`));let z=g.getValue("failingUpdates");S.forEach(([D,B])=>{z[D]={uploadedAt:new Date().toISOString(),update:B,error:_optionalChain([k, 'optionalAccess', _10 => _10.response, 'optionalAccess', _11 => _11.body])||_optionalChain([k, 'optionalAccess', _12 => _12.message])||"Unknown error"}}),await g.setValue(z,"failingUpdates")}le+=S.length,oe.update(le)},{concurrency:40}),oe.stop();let ze=new Date().getTime()-We;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${ne.length} user preferences to partition ${m} in "${ze/1e3}" seconds!`))}function zr({identifiers:p=[],purposes:t=[],metadata:f=[],consentManagement:d={},system:m={decryptionStatus:"DECRYPTED"},...u}){let s={...u,...m,...d};if(Array.isArray(p)){let o=new Map;for(let{name:i,value:n}of p)o.has(i)||o.set(i,new Set),n&&o.get(i).add(n);for(let[i,n]of o.entries())s[i]=Array.from(n).join(",")}if(Array.isArray(f)&&(s.metadata=JSON.stringify(f.reduce((o,{key:i,value:n})=>(o[i]=n,o),{}))),Array.isArray(t)){for(let{purpose:o,preferences:i,enabled:n}of t)if(s[o]=!!n,Array.isArray(i))for(let{topic:a,choice:r}of i){let l=`${o}_${a}`,y=null;typeof r.booleanValue=="boolean"?y=r.booleanValue:r.selectValue?y=r.selectValue:Array.isArray(r.selectValues)?y=r.selectValues.filter(h=>h.length>0).join(","):y=null,s[l]=y}}return s}async function*te(p,t,f,d){let m;for(;;){let u={limit:d};f&&Object.keys(f).length&&(u.filter=f),m&&(u.cursor=m);let s=await x("Preference Query",()=>p.post(`v1/preferences/${t}/query`,{json:u}).json(),{onRetry:(n,a,r)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Retry attempt ${n} for fetchConsentPreferences due to error: ${r}`))}}),{nodes:o,cursor:i}=_typeutils.decodeCodec.call(void 0, V,s);if(!_optionalChain([o, 'optionalAccess', _13 => _13.length])||(yield o,!i))break;m=i}}function re(p){return!!p.timestampAfter||!!p.timestampBefore?"timestamp":"updated"}function A(p,t){return p==="timestamp"?new Date(t.timestamp):_optionalChain([t, 'access', _14 => _14.system, 'optionalAccess', _15 => _15.updatedAt])?new Date(t.system.updatedAt):new Date}function je(p,t){if(p==="timestamp")return{after:t.timestampAfter?new Date(t.timestampAfter):void 0,before:t.timestampBefore?new Date(t.timestampBefore):void 0};let f=_nullishCoalesce(t.system, () => ({}));return{after:f.updatedAfter?new Date(f.updatedAfter):void 0,before:f.updatedBefore?new Date(f.updatedBefore):void 0}}function W(p,t,f){return p==="timestamp"?{...t,timestampBefore:_nullishCoalesce(f, () => (t.timestampBefore))}:{...t,system:{...t.system||{},...f?{updatedBefore:f}:{}},timestampAfter:void 0,timestampBefore:void 0}}async function _(p,t,f){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Single-record probe with filter: ${JSON.stringify(f)}`));let m=await te(p,t,f,1).next();if(m.done||!m.value||m.value.length===0)return _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Probe result: no record")),null;let u=m.value[0];return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Probe result: found record at ${A(re(f),u).toISOString()}`)),u}async function Be(p,t){let{partition:f,mode:d,baseFilter:m,maxLookbackDays:u=3650}=t,s=await _(p,f,W(d,m));if(!s)return _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("No records found; defaulting earliest day to today.")),_chunkVWN5MN3Ucjs.lg.call(void 0, new Date);let o=A(d,s);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Newest instant: ${o.toISOString()}`));let i=[1,7,30],n=0,a=i[0]*864e5,r=o,l=null;for(;;){let w=n<i.length?new Date(o.getTime()-i[n]*864e5):new Date(o.getTime()-a);if((_chunkVWN5MN3Ucjs.lg.call(void 0, new Date).getTime()-_chunkVWN5MN3Ucjs.lg.call(void 0, w).getTime())/864e5>u){_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Exponential jump exceeded maxLookbackDays=${u}. Using current bounds.`)),l=w;break}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Probing before=${w.toISOString()} (jump step ${n<i.length?`${i[n]}d`:`${Math.round(a/864e5)}d`})\u2026`));let C=await _(p,f,W(d,m,w.toISOString()));if(C){r=A(d,C),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Found older record at ${r.toISOString()} \u2014 continue jumping back.`)),n<i.length-1?(n+=1,a=i[n]*864e5):n===i.length-1?(n+=1,a=i[i.length-1]*2*864e5):a*=2;continue}l=w,_chunkZUNVPK23cjs.a.info(_colors2.default.green(`No record before ${w.toISOString()} \u2014 established empty lower bound.`));break}l||(l=new Date(r.getTime()-864e5));let y=l,g=r,h=Math.max(864e5,Math.floor((g.getTime()-y.getTime())/64));_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Exponential forward-from-empty start: empty=${y.toISOString()} found=${g.toISOString()} step=${Math.round(h/864e5)}d`));for(let w=0;w<8;w+=1){let P=new Date(y.getTime()+h);if(P.getTime()>=g.getTime())break;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Forward gallop probe before=${P.toISOString()}\u2026`));let C=await _(p,f,W(d,m,P.toISOString()));if(C?(g=A(d,C),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Gallop hit at ${g.toISOString()} \u2014 tightening found bound. Next step halves.`)),h=Math.max(864e5,Math.floor(h/2))):(y.setTime(P.getTime()),_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Gallop miss \u2014 advancing empty bound to ${y.toISOString()}. Next step doubles.`)),h=Math.min(g.getTime()-y.getTime(),h*2),h<864e5&&(h=864e5)),g.getTime()-y.getTime()<=864e5)break}for(;g.getTime()-y.getTime()>864e5;){let w=new Date(y.getTime()+Math.floor((g.getTime()-y.getTime())/2));_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Binary probe before=${w.toISOString()}\u2026`));let P=await _(p,f,W(d,m,w.toISOString()));if(P){let C=A(d,P);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Binary probe found record at ${C.toISOString()}.`)),g=C}else _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Binary probe found no record.")),y=w}let b=_chunkVWN5MN3Ucjs.lg.call(void 0, g);return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Earliest day (UTC) resolved to ${b.toISOString()} (instant \u2248 ${g.toISOString()}).`)),b}async function Ge(p,t){let{partition:f,mode:d,baseFilter:m}=t;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("Latest-day discovery: probing newest record\u2026"));let u=await _(p,f,W(d,m));if(!u)return _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("No records found at all; defaulting latest day to today.")),_chunkVWN5MN3Ucjs.lg.call(void 0, new Date);let s=A(d,u);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Newest record instant is ${s.toISOString()}.`));let o=_chunkVWN5MN3Ucjs.lg.call(void 0, s);return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Latest day (UTC) resolved to ${o.toISOString()} from instant ${s.toISOString()}.`)),o}function qe(p,t,f,d=5e3){let m=Math.max(0,f.getTime()-t.getTime());if(m===0)return[];let u=new Date(Math.floor(t.getTime()/3e5)*3e5),s=Math.ceil(m/Math.max(1,d)),o=Math.max(3e5,s),i=Math.ceil((f.getTime()-u.getTime())/o),n=[];for(let a=0;a<i;a+=1){let r=u.getTime()+a*o,y=Math.min(f.getTime(),r+o)-1,g=Math.max(r,y),h=new Date(r).toISOString(),b=new Date(g).toISOString();p==="timestamp"?n.push({timestampAfter:h,timestampBefore:b}):n.push({system:{updatedAfter:h,updatedBefore:b}})}return n}function ft(p,t,f){return p==="timestamp"?{...t,timestampAfter:_nullishCoalesce(f.timestampAfter, () => (t.timestampAfter)),timestampBefore:_nullishCoalesce(f.timestampBefore, () => (t.timestampBefore)),system:void 0}:{...t,system:{...t.system||{},..._optionalChain([f, 'access', _16 => _16.system, 'optionalAccess', _17 => _17.updatedAfter])?{updatedAfter:f.system.updatedAfter}:{},..._optionalChain([f, 'access', _18 => _18.system, 'optionalAccess', _19 => _19.updatedBefore])?{updatedBefore:f.system.updatedBefore}:{}},timestampAfter:void 0,timestampBefore:void 0}}async function Ro(p,{partition:t,filterBy:f={},limit:d=50,windowConcurrency:m=25,maxChunks:u=5e3,maxLookbackDays:s=3650,onItems:o}){let i=re(f);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching consent preferences in chunks by ${i==="timestamp"?"timestamp":"system.updatedAt"}...`));let{after:n,before:a}=je(i,f);if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Initial bounds: after=${_nullishCoalesce(_optionalChain([n, 'optionalAccess', _20 => _20.toISOString, 'call', _21 => _21()]), () => ("undefined"))} before=${_nullishCoalesce(_optionalChain([a, 'optionalAccess', _22 => _22.toISOString, 'call', _23 => _23()]), () => ("undefined"))}`)),(!n||!a)&&(n||(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Discovering earliest day with data for partition ${t}...`)),n=await Be(p,{partition:t,mode:i,baseFilter:f,maxLookbackDays:s}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Discovered earliest day with data: ${n.toISOString()}`))),!a)){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Discovering latest day with data for partition ${t}...`));let P=await Ge(p,{partition:t,mode:i,baseFilter:f,earliest:n});a=_chunkVWN5MN3Ucjs.og.call(void 0, P,1),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Discovered latest day with data: ${P.toISOString()}`))}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Final bounds (UTC): after=${n.toISOString()} before=${a.toISOString()}`));let r=qe(i,n,a,u);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching consent preferences from partition ${t} in ${r.length} chunks...`));let l=new _cliprogress2.default.SingleBar({format:"Downloading [{bar}] {percentage}% | chunks {value}/{total} | fetched {fetched}"},_cliprogress2.default.Presets.shades_classic),y=0,g=0;l.start(r.length,0,{fetched:g});let h=Date.now(),b=_chunkVWN5MN3Ucjs.kg.call(void 0, d),w=[];return await _chunkVWN5MN3Ucjs.a.call(void 0, r.map((P,C)=>({windowFilter:P,idx:C})),async({windowFilter:P})=>{let C=ft(i,f,P);for await(let T of te(p,t,C,b))g+=T.length,l.update(y,{fetched:g}),o?await o(T):w.push(...T);y+=1,l.update(y,{fetched:g})},{concurrency:Math.max(1,m)}),l.update(y,{fetched:g}),l.stop(),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Fetched ${g} consent preference records from partition ${t} in ${(Date.now()-h)/1e3}s.`)),o?[]:w}async function Fo(p,{partition:t,filterBy:f={},limit:d=50,onItems:m}){let u=[],s,o=f&&(Object.keys(f).length>0||f.system&&Object.keys(f.system).length>0),i=Math.max(1,Math.min(50,_nullishCoalesce(d, () => (50))));for(;;){let n={limit:i};o&&(n.filter=f),s&&(n.cursor=s);let a=await x("Preference Query",()=>p.post(`v1/preferences/${t}/query`,{json:n}).json(),{onRetry:(y,g,h)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Retry attempt ${y} for fetchConsentPreferences due to error: ${h}`))}}),{nodes:r,cursor:l}=_typeutils.decodeCodec.call(void 0, V,a);if(!r||r.length===0||(m?await m(r):u.push(...r),!l))break;s=l}return m?[]:u}async function ut(p,{partition:t,identifierChunk:f,timestamp:d}){try{let m=await x("Delete Preference Records",()=>p.post(`v1/preferences/${t}/delete`,{json:{records:f.map(s=>({anchorIdentifier:s,timestamp:d.toISOString()}))}}).json(),{maxAttempts:3,onRetry:(s,o,i)=>{_chunkZUNVPK23cjs.a.debug(_colors2.default.yellow(`Attempt ${s} to delete preference records failed: ${i}`))}}),{failures:u}=_typeutils.decodeCodec.call(void 0, Ae,m);return u.length>0?u.map(({index:s,error:o})=>({...f[s],error:o})):[]}catch(m){return f.map(u=>({...u,error:m.message}))}}async function Bo(p,{partition:t,filePath:f,timestamp:d,maxItemsInChunk:m,maxConcurrency:u}){let s=_chunkVWN5MN3Ucjs.uc.call(void 0, f,Qe),o=_chunkGJ6V5BHGcjs.b.call(void 0, s,m);return(await _chunkVWN5MN3Ucjs.a.call(void 0, o,async n=>await ut(p,{partition:t,identifierChunk:n,timestamp:d}),{concurrency:u})).flat()}exports.a = Wr; exports.b = zr; exports.c = Ro; exports.d = Fo; exports.e = Bo;
|
|
12
|
-
//# sourceMappingURL=chunk-
|
|
11
|
+
`));let P=_chunkKYOQWIIScjs.zc.call(void 0, r,p),[C,T,me]=await Promise.all([_chunkKYOQWIIScjs.Ac.call(void 0, r,p,t),_chunkKYOQWIIScjs.id.call(void 0, P),_chunkKYOQWIIScjs.ed.call(void 0, P)]);await De({file:d,purposeSlugs:T.map(S=>S.trackingType),preferenceTopics:me,sombra:C,partitionKey:m,skipExistingRecordCheck:n,forceTriggerWorkflows:l},g);let j={};w=g.getValue("fileMetadata");let F=w[d];if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(F.pendingSafeUpdates).length} safe updates in ${d}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(F.pendingConflictUpdates).length} conflict updates in ${d}`)),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Found ${Object.entries(F.skippedUpdates).length} skipped updates in ${d}`)),Object.entries({...F.pendingSafeUpdates,...i?{}:_typeutils.apply.call(void 0, F.pendingConflictUpdates,({row:S})=>S)}).forEach(([S,k])=>{let z=F.timestampColum===Z?new Date:new Date(k[F.timestampColum]),D=X({row:k,columnToPurposeName:F.columnToPurposeName,preferenceTopics:me,purposeSlugs:T.map(B=>B.trackingType)});j[S]={userId:S,partition:m,timestamp:z.toISOString(),purposes:Object.entries(D).map(([B,Ye])=>({...Ye,purpose:B,workflowSettings:{attributes:y,isSilent:u,skipWorkflowTrigger:o,...l?{forceTriggerWorkflow:l}:{}}}))}}),await g.setValue(j,"pendingUpdates"),await g.setValue({},"failingUpdates"),s){_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Dry run complete, exiting. ${Object.values(j).length} pending updates. Check file: ${f}`));return}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${Object.values(j).length} preferences to partition: ${m}`));let We=new Date().getTime(),oe=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),le=0,ne=Object.entries(j),_e=_chunk5MG2CEZVcjs.b.call(void 0, ne,o?100:10);oe.start(ne.length,0),await _chunkKYOQWIIScjs.a.call(void 0, _e,async S=>{try{await C.put("v1/preferences",{json:{records:S.map(([,k])=>k),skipWorkflowTriggers:o}}).json()}catch(k){try{let D=JSON.parse(_optionalChain([k, 'optionalAccess', _5 => _5.response, 'optionalAccess', _6 => _6.body])||"{}");D.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${D.error}`))}catch (e2){}_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to upload ${S.length} user preferences to partition ${m}: ${_optionalChain([k, 'optionalAccess', _7 => _7.response, 'optionalAccess', _8 => _8.body])||_optionalChain([k, 'optionalAccess', _9 => _9.message])}`));let z=g.getValue("failingUpdates");S.forEach(([D,B])=>{z[D]={uploadedAt:new Date().toISOString(),update:B,error:_optionalChain([k, 'optionalAccess', _10 => _10.response, 'optionalAccess', _11 => _11.body])||_optionalChain([k, 'optionalAccess', _12 => _12.message])||"Unknown error"}}),await g.setValue(z,"failingUpdates")}le+=S.length,oe.update(le)},{concurrency:40}),oe.stop();let ze=new Date().getTime()-We;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${ne.length} user preferences to partition ${m} in "${ze/1e3}" seconds!`))}function zr({identifiers:p=[],purposes:t=[],metadata:f=[],consentManagement:d={},system:m={decryptionStatus:"DECRYPTED"},...u}){let s={...u,...m,...d};if(Array.isArray(p)){let o=new Map;for(let{name:i,value:n}of p)o.has(i)||o.set(i,new Set),n&&o.get(i).add(n);for(let[i,n]of o.entries())s[i]=Array.from(n).join(",")}if(Array.isArray(f)&&(s.metadata=JSON.stringify(f.reduce((o,{key:i,value:n})=>(o[i]=n,o),{}))),Array.isArray(t)){for(let{purpose:o,preferences:i,enabled:n}of t)if(s[o]=!!n,Array.isArray(i))for(let{topic:a,choice:r}of i){let l=`${o}_${a}`,y=null;typeof r.booleanValue=="boolean"?y=r.booleanValue:r.selectValue?y=r.selectValue:Array.isArray(r.selectValues)?y=r.selectValues.filter(h=>h.length>0).join(","):y=null,s[l]=y}}return s}async function*te(p,t,f,d){let m;for(;;){let u={limit:d};f&&Object.keys(f).length&&(u.filter=f),m&&(u.cursor=m);let s=await x("Preference Query",()=>p.post(`v1/preferences/${t}/query`,{json:u}).json(),{onRetry:(n,a,r)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Retry attempt ${n} for fetchConsentPreferences due to error: ${r}`))}}),{nodes:o,cursor:i}=_typeutils.decodeCodec.call(void 0, V,s);if(!_optionalChain([o, 'optionalAccess', _13 => _13.length])||(yield o,!i))break;m=i}}function re(p){return!!p.timestampAfter||!!p.timestampBefore?"timestamp":"updated"}function A(p,t){return p==="timestamp"?new Date(t.timestamp):_optionalChain([t, 'access', _14 => _14.system, 'optionalAccess', _15 => _15.updatedAt])?new Date(t.system.updatedAt):new Date}function je(p,t){if(p==="timestamp")return{after:t.timestampAfter?new Date(t.timestampAfter):void 0,before:t.timestampBefore?new Date(t.timestampBefore):void 0};let f=_nullishCoalesce(t.system, () => ({}));return{after:f.updatedAfter?new Date(f.updatedAfter):void 0,before:f.updatedBefore?new Date(f.updatedBefore):void 0}}function W(p,t,f){return p==="timestamp"?{...t,timestampBefore:_nullishCoalesce(f, () => (t.timestampBefore))}:{...t,system:{...t.system||{},...f?{updatedBefore:f}:{}},timestampAfter:void 0,timestampBefore:void 0}}async function _(p,t,f){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Single-record probe with filter: ${JSON.stringify(f)}`));let m=await te(p,t,f,1).next();if(m.done||!m.value||m.value.length===0)return _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Probe result: no record")),null;let u=m.value[0];return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Probe result: found record at ${A(re(f),u).toISOString()}`)),u}async function Be(p,t){let{partition:f,mode:d,baseFilter:m,maxLookbackDays:u=3650}=t,s=await _(p,f,W(d,m));if(!s)return _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("No records found; defaulting earliest day to today.")),_chunkKYOQWIIScjs.lg.call(void 0, new Date);let o=A(d,s);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Newest instant: ${o.toISOString()}`));let i=[1,7,30],n=0,a=i[0]*864e5,r=o,l=null;for(;;){let w=n<i.length?new Date(o.getTime()-i[n]*864e5):new Date(o.getTime()-a);if((_chunkKYOQWIIScjs.lg.call(void 0, new Date).getTime()-_chunkKYOQWIIScjs.lg.call(void 0, w).getTime())/864e5>u){_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Exponential jump exceeded maxLookbackDays=${u}. Using current bounds.`)),l=w;break}_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Probing before=${w.toISOString()} (jump step ${n<i.length?`${i[n]}d`:`${Math.round(a/864e5)}d`})\u2026`));let C=await _(p,f,W(d,m,w.toISOString()));if(C){r=A(d,C),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Found older record at ${r.toISOString()} \u2014 continue jumping back.`)),n<i.length-1?(n+=1,a=i[n]*864e5):n===i.length-1?(n+=1,a=i[i.length-1]*2*864e5):a*=2;continue}l=w,_chunkZUNVPK23cjs.a.info(_colors2.default.green(`No record before ${w.toISOString()} \u2014 established empty lower bound.`));break}l||(l=new Date(r.getTime()-864e5));let y=l,g=r,h=Math.max(864e5,Math.floor((g.getTime()-y.getTime())/64));_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Exponential forward-from-empty start: empty=${y.toISOString()} found=${g.toISOString()} step=${Math.round(h/864e5)}d`));for(let w=0;w<8;w+=1){let P=new Date(y.getTime()+h);if(P.getTime()>=g.getTime())break;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Forward gallop probe before=${P.toISOString()}\u2026`));let C=await _(p,f,W(d,m,P.toISOString()));if(C?(g=A(d,C),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Gallop hit at ${g.toISOString()} \u2014 tightening found bound. Next step halves.`)),h=Math.max(864e5,Math.floor(h/2))):(y.setTime(P.getTime()),_chunkZUNVPK23cjs.a.info(_colors2.default.yellow(`Gallop miss \u2014 advancing empty bound to ${y.toISOString()}. Next step doubles.`)),h=Math.min(g.getTime()-y.getTime(),h*2),h<864e5&&(h=864e5)),g.getTime()-y.getTime()<=864e5)break}for(;g.getTime()-y.getTime()>864e5;){let w=new Date(y.getTime()+Math.floor((g.getTime()-y.getTime())/2));_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Binary probe before=${w.toISOString()}\u2026`));let P=await _(p,f,W(d,m,w.toISOString()));if(P){let C=A(d,P);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Binary probe found record at ${C.toISOString()}.`)),g=C}else _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("Binary probe found no record.")),y=w}let b=_chunkKYOQWIIScjs.lg.call(void 0, g);return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Earliest day (UTC) resolved to ${b.toISOString()} (instant \u2248 ${g.toISOString()}).`)),b}async function Ge(p,t){let{partition:f,mode:d,baseFilter:m}=t;_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("Latest-day discovery: probing newest record\u2026"));let u=await _(p,f,W(d,m));if(!u)return _chunkZUNVPK23cjs.a.info(_colors2.default.yellow("No records found at all; defaulting latest day to today.")),_chunkKYOQWIIScjs.lg.call(void 0, new Date);let s=A(d,u);_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Newest record instant is ${s.toISOString()}.`));let o=_chunkKYOQWIIScjs.lg.call(void 0, s);return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Latest day (UTC) resolved to ${o.toISOString()} from instant ${s.toISOString()}.`)),o}function qe(p,t,f,d=5e3){let m=Math.max(0,f.getTime()-t.getTime());if(m===0)return[];let u=new Date(Math.floor(t.getTime()/3e5)*3e5),s=Math.ceil(m/Math.max(1,d)),o=Math.max(3e5,s),i=Math.ceil((f.getTime()-u.getTime())/o),n=[];for(let a=0;a<i;a+=1){let r=u.getTime()+a*o,y=Math.min(f.getTime(),r+o)-1,g=Math.max(r,y),h=new Date(r).toISOString(),b=new Date(g).toISOString();p==="timestamp"?n.push({timestampAfter:h,timestampBefore:b}):n.push({system:{updatedAfter:h,updatedBefore:b}})}return n}function ft(p,t,f){return p==="timestamp"?{...t,timestampAfter:_nullishCoalesce(f.timestampAfter, () => (t.timestampAfter)),timestampBefore:_nullishCoalesce(f.timestampBefore, () => (t.timestampBefore)),system:void 0}:{...t,system:{...t.system||{},..._optionalChain([f, 'access', _16 => _16.system, 'optionalAccess', _17 => _17.updatedAfter])?{updatedAfter:f.system.updatedAfter}:{},..._optionalChain([f, 'access', _18 => _18.system, 'optionalAccess', _19 => _19.updatedBefore])?{updatedBefore:f.system.updatedBefore}:{}},timestampAfter:void 0,timestampBefore:void 0}}async function Ro(p,{partition:t,filterBy:f={},limit:d=50,windowConcurrency:m=25,maxChunks:u=5e3,maxLookbackDays:s=3650,onItems:o}){let i=re(f);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching consent preferences in chunks by ${i==="timestamp"?"timestamp":"system.updatedAt"}...`));let{after:n,before:a}=je(i,f);if(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Initial bounds: after=${_nullishCoalesce(_optionalChain([n, 'optionalAccess', _20 => _20.toISOString, 'call', _21 => _21()]), () => ("undefined"))} before=${_nullishCoalesce(_optionalChain([a, 'optionalAccess', _22 => _22.toISOString, 'call', _23 => _23()]), () => ("undefined"))}`)),(!n||!a)&&(n||(_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Discovering earliest day with data for partition ${t}...`)),n=await Be(p,{partition:t,mode:i,baseFilter:f,maxLookbackDays:s}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Discovered earliest day with data: ${n.toISOString()}`))),!a)){_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Discovering latest day with data for partition ${t}...`));let P=await Ge(p,{partition:t,mode:i,baseFilter:f,earliest:n});a=_chunkKYOQWIIScjs.og.call(void 0, P,1),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Discovered latest day with data: ${P.toISOString()}`))}_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Final bounds (UTC): after=${n.toISOString()} before=${a.toISOString()}`));let r=qe(i,n,a,u);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Fetching consent preferences from partition ${t} in ${r.length} chunks...`));let l=new _cliprogress2.default.SingleBar({format:"Downloading [{bar}] {percentage}% | chunks {value}/{total} | fetched {fetched}"},_cliprogress2.default.Presets.shades_classic),y=0,g=0;l.start(r.length,0,{fetched:g});let h=Date.now(),b=_chunkKYOQWIIScjs.kg.call(void 0, d),w=[];return await _chunkKYOQWIIScjs.a.call(void 0, r.map((P,C)=>({windowFilter:P,idx:C})),async({windowFilter:P})=>{let C=ft(i,f,P);for await(let T of te(p,t,C,b))g+=T.length,l.update(y,{fetched:g}),o?await o(T):w.push(...T);y+=1,l.update(y,{fetched:g})},{concurrency:Math.max(1,m)}),l.update(y,{fetched:g}),l.stop(),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Fetched ${g} consent preference records from partition ${t} in ${(Date.now()-h)/1e3}s.`)),o?[]:w}async function Fo(p,{partition:t,filterBy:f={},limit:d=50,onItems:m}){let u=[],s,o=f&&(Object.keys(f).length>0||f.system&&Object.keys(f.system).length>0),i=Math.max(1,Math.min(50,_nullishCoalesce(d, () => (50))));for(;;){let n={limit:i};o&&(n.filter=f),s&&(n.cursor=s);let a=await x("Preference Query",()=>p.post(`v1/preferences/${t}/query`,{json:n}).json(),{onRetry:(y,g,h)=>{_chunkZUNVPK23cjs.a.warn(_colors2.default.yellow(`Retry attempt ${y} for fetchConsentPreferences due to error: ${h}`))}}),{nodes:r,cursor:l}=_typeutils.decodeCodec.call(void 0, V,a);if(!r||r.length===0||(m?await m(r):u.push(...r),!l))break;s=l}return m?[]:u}async function ut(p,{partition:t,identifierChunk:f,timestamp:d}){try{let m=await x("Delete Preference Records",()=>p.post(`v1/preferences/${t}/delete`,{json:{records:f.map(s=>({anchorIdentifier:s,timestamp:d.toISOString()}))}}).json(),{maxAttempts:3,onRetry:(s,o,i)=>{_chunkZUNVPK23cjs.a.debug(_colors2.default.yellow(`Attempt ${s} to delete preference records failed: ${i}`))}}),{failures:u}=_typeutils.decodeCodec.call(void 0, Ae,m);return u.length>0?u.map(({index:s,error:o})=>({...f[s],error:o})):[]}catch(m){return f.map(u=>({...u,error:m.message}))}}async function Bo(p,{partition:t,filePath:f,timestamp:d,maxItemsInChunk:m,maxConcurrency:u}){let s=_chunkKYOQWIIScjs.uc.call(void 0, f,Qe),o=_chunk5MG2CEZVcjs.b.call(void 0, s,m);return(await _chunkKYOQWIIScjs.a.call(void 0, o,async n=>await ut(p,{partition:t,identifierChunk:n,timestamp:d}),{concurrency:u})).flat()}exports.a = Wr; exports.b = zr; exports.c = Ro; exports.d = Fo; exports.e = Bo;
|
|
12
|
+
//# sourceMappingURL=chunk-EMLESF76.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-LE6FS55Q.cjs","../src/lib/preference-management/uploadPreferenceManagementPreferencesInteractive.ts","../src/lib/preference-management/parsePreferenceManagementCsv.ts","../src/lib/preference-management/getPreferencesForIdentifiers.ts","../src/lib/preference-management/withPreferenceRetry.ts","../src/lib/preference-management/parsePreferenceIdentifiersFromCsv.ts","../src/lib/preference-management/parsePreferenceAndPurposeValuesFromCsv.ts","../src/lib/preference-management/codecs.ts"],"names":["RETRY_PREFERENCE_MSGS","s","withPreferenceRetry","name","fn","maxAttempts","baseDelayMs","isRetryable","_err","msg","m","onRetry","attempt","err"],"mappings":"AAAA,2lCAAwK,wDAAyC,wDAAuE,gFCMrQ,qGAIK,+DAEO,qJCTZ,qDCES,ICGfA,EAAAA,CAAkC,CAC7C,WAAA,CACA,YAAA,CACA,WAAA,CACA,iBAAA,CACA,sBAAA,CACA,KAAA,CACA,qBAAA,CACA,sBAAA,CACA,uBACF,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAMA,CAAAA,CAAE,WAAA,CAAY,CAAC,CAAA,CAyB5B,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACAC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,CAACC,CAAAA,CAAMC,CAAAA,CAAAA,EACnBT,EAAAA,CAAsB,IAAA,CAAMU,CAAAA,EAAMD,CAAAA,CAAI,WAAA,CAAY,CAAA,CAAE,QAAA,CAASC,CAAC,CAAC,CAAA,CACjE,OAAA,CAAAC,CACF,CAAA,CAAkB,CAAC,CAAA,CACP,CACZ,IAAIC,CAAAA,CAAU,CAAA,CAEd,GAAA,CAAA,CAAA,CAAA,CAAa,CACXA,CAAAA,EAAW,CAAA,CACX,GAAI,CACF,OAAO,MAAMR,CAAAA,CAAG,CAElB,CAAA,KAAA,CAASS,CAAAA,CAAU,CACjB,IAAMJ,CAAAA,kBAAAA,CACHI,CAAAA,EAAAA,iBAAQA,CAAAA,qBAAI,QAAA,6BAAU,MAAA,EAAQA,CAAAA,CAAI,OAAA,CAAA,CAAA,SACnC,MAAA,kBAAOA,CAAAA,SAAO,iBAAe,GAAA,CAE/B,EAAA,CAAI,CAAA,CADcD,CAAAA,CAAUP,CAAAA,EAAeE,CAAAA,CAAYM,CAAAA,CAAKJ,CAAG,CAAA,CAAA,CAE7D,MAAM,IAAI,KAAA,CAAM,CAAA,EAAA;ACuBX;ACbM;ACkBO,oGAAA;ANPlB;AAGA;AAAA;AAKQ;AAAK;AACgC,sCAAA;AAwFrC","file":"/home/runner/work/cli/cli/dist/chunk-LE6FS55Q.cjs","sourcesContent":[null,"import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllPurposes,\n fetchAllPreferenceTopics,\n} from '../graphql';\nimport colors from 'colors';\nimport { map } from '../bluebird';\nimport { chunk } from 'lodash-es';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { parseAttributesFromString } from '../requests';\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport { parsePreferenceManagementCsvWithCache } from './parsePreferenceManagementCsv';\nimport { PreferenceState } from './codecs';\nimport { PreferenceUpdateItem } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\nimport { NONE_PREFERENCE_MAP } from './parsePreferenceTimestampsFromCsv';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadPreferenceManagementPreferencesInteractive({\n auth,\n sombraAuth,\n receiptFilepath,\n file,\n partition,\n isSilent = true,\n dryRun = false,\n skipWorkflowTriggers = false,\n skipConflictUpdates = false,\n skipExistingRecordCheck = false,\n attributes = [],\n transcendUrl,\n forceTriggerWorkflows = false,\n}: {\n /** The Transcend API key */\n auth: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Partition key */\n partition: string;\n /** File where to store receipt and continue from where left off */\n receiptFilepath: string;\n /** The file to process */\n file: string;\n /** API URL for Transcend backend */\n transcendUrl: string;\n /** Whether to do a dry run */\n dryRun?: boolean;\n /** Whether to upload as isSilent */\n isSilent?: boolean;\n /** Attributes string pre-parse. In format Key:Value */\n attributes?: string[];\n /** Skip workflow triggers */\n skipWorkflowTriggers?: boolean;\n /**\n * When true, only update preferences that do not conflict with existing\n * preferences. When false, update all preferences in CSV based on timestamp.\n */\n skipConflictUpdates?: boolean;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck?: boolean;\n /** Whether to force trigger workflows */\n forceTriggerWorkflows?: boolean;\n}): Promise<void> {\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state file to store the requests from this run\n const preferenceState = new PersistedState(receiptFilepath, PreferenceState, {\n fileMetadata: {},\n failingUpdates: {},\n pendingUpdates: {},\n });\n const failingRequests = preferenceState.getValue('failingUpdates');\n const pendingRequests = preferenceState.getValue('pendingUpdates');\n let fileMetadata = preferenceState.getValue('fileMetadata');\n\n logger.info(\n colors.magenta(\n 'Restored cache, there are: \\n' +\n `${\n Object.values(failingRequests).length\n } failing requests to be retried\\n` +\n `${\n Object.values(pendingRequests).length\n } pending requests to be processed\\n` +\n `The following files are stored in cache and will be used:\\n${Object.keys(\n fileMetadata,\n )\n .map((x) => x)\n .join('\\n')}\\n` +\n `The following file will be processed: ${file}\\n`,\n ),\n );\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const [sombra, purposes, preferenceTopics] = await Promise.all([\n // Create sombra instance to communicate with\n createSombraGotInstance(transcendUrl, auth, sombraAuth),\n // get all purposes and topics\n fetchAllPurposes(client),\n fetchAllPreferenceTopics(client),\n ]);\n\n // Process the file\n await parsePreferenceManagementCsvWithCache(\n {\n file,\n purposeSlugs: purposes.map((x) => x.trackingType),\n preferenceTopics,\n sombra,\n partitionKey: partition,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n },\n preferenceState,\n );\n\n // Construct the pending updates\n const pendingUpdates: Record<string, PreferenceUpdateItem> = {};\n fileMetadata = preferenceState.getValue('fileMetadata');\n const metadata = fileMetadata[file];\n\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingSafeUpdates).length\n } safe updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingConflictUpdates).length\n } conflict updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.skippedUpdates).length\n } skipped updates in ${file}`,\n ),\n );\n\n // Update either safe updates only or safe + conflict\n Object.entries({\n ...metadata.pendingSafeUpdates,\n ...(skipConflictUpdates\n ? {}\n : apply(metadata.pendingConflictUpdates, ({ row }) => row)),\n }).forEach(([userId, update]) => {\n // Determine timestamp\n const timestamp =\n metadata.timestampColum === NONE_PREFERENCE_MAP\n ? new Date()\n : new Date(update[metadata.timestampColum!]);\n\n // Determine updates\n const updates = getPreferenceUpdatesFromRow({\n row: update,\n columnToPurposeName: metadata.columnToPurposeName,\n preferenceTopics,\n purposeSlugs: purposes.map((x) => x.trackingType),\n });\n pendingUpdates[userId] = {\n userId,\n partition,\n timestamp: timestamp.toISOString(),\n purposes: Object.entries(updates).map(([purpose, value]) => ({\n ...value,\n purpose,\n workflowSettings: {\n attributes: parsedAttributes,\n isSilent,\n skipWorkflowTrigger: skipWorkflowTriggers,\n ...(forceTriggerWorkflows\n ? { forceTriggerWorkflow: forceTriggerWorkflows }\n : {}),\n },\n })),\n };\n });\n await preferenceState.setValue(pendingUpdates, 'pendingUpdates');\n await preferenceState.setValue({}, 'failingUpdates');\n\n // Exist early if dry run\n if (dryRun) {\n logger.info(\n colors.green(\n `Dry run complete, exiting. ${\n Object.values(pendingUpdates).length\n } pending updates. Check file: ${receiptFilepath}`,\n ),\n );\n return;\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${\n Object.values(pendingUpdates).length\n } preferences to partition: ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n const updatesToRun = Object.entries(pendingUpdates);\n const chunkedUpdates = chunk(updatesToRun, skipWorkflowTriggers ? 100 : 10);\n progressBar.start(updatesToRun.length, 0);\n await map(\n chunkedUpdates,\n async (currentChunk) => {\n // Make the request\n try {\n await sombra\n .put('v1/preferences', {\n json: {\n records: currentChunk.map(([, update]) => update),\n skipWorkflowTriggers,\n },\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n logger.error(\n colors.red(\n `Failed to upload ${\n currentChunk.length\n } user preferences to partition ${partition}: ${\n err?.response?.body || err?.message\n }`,\n ),\n );\n const failingUpdates = preferenceState.getValue('failingUpdates');\n currentChunk.forEach(([userId, update]) => {\n failingUpdates[userId] = {\n uploadedAt: new Date().toISOString(),\n update,\n error: err?.response?.body || err?.message || 'Unknown error',\n };\n });\n await preferenceState.setValue(failingUpdates, 'failingUpdates');\n }\n\n total += currentChunk.length;\n progressBar.update(total);\n },\n {\n concurrency: 40,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n logger.info(\n colors.green(\n `Successfully uploaded ${\n updatesToRun.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport type { Got } from 'got';\nimport { keyBy } from 'lodash-es';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { FileMetadataState, PreferenceState } from './codecs';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { getPreferencesForIdentifiers } from './getPreferencesForIdentifiers';\nimport { PreferenceTopic } from '../graphql';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\nimport { parsePreferenceTimestampsFromCsv } from './parsePreferenceTimestampsFromCsv';\nimport { parsePreferenceIdentifiersFromCsv } from './parsePreferenceIdentifiersFromCsv';\nimport { parsePreferenceAndPurposeValuesFromCsv } from './parsePreferenceAndPurposeValuesFromCsv';\nimport { checkIfPendingPreferenceUpdatesAreNoOp } from './checkIfPendingPreferenceUpdatesAreNoOp';\nimport { checkIfPendingPreferenceUpdatesCauseConflict } from './checkIfPendingPreferenceUpdatesCauseConflict';\n\n/**\n * Parse a file into the cache\n *\n *\n * @param options - Options\n * @param cache - The cache to store the parsed file in\n * @returns The cache with the parsed file\n */\nexport async function parsePreferenceManagementCsvWithCache(\n {\n file,\n sombra,\n purposeSlugs,\n preferenceTopics,\n partitionKey,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n }: {\n /** File to parse */\n file: string;\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Sombra got instance */\n sombra: Got;\n /** Partition key */\n partitionKey: string;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck: boolean;\n /** Whether to force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n cache: PersistedState<typeof PreferenceState>,\n): Promise<void> {\n // Start the timer\n const t0 = new Date().getTime();\n\n // Get the current metadata\n const fileMetadata = cache.getValue('fileMetadata');\n\n // Read in the file\n logger.info(colors.magenta(`Reading in file: \"${file}\"`));\n let preferences = readCsv(file, t.record(t.string, t.string));\n\n // start building the cache, can use previous cache as well\n let currentState: FileMetadataState = {\n columnToPurposeName: {},\n pendingSafeUpdates: {},\n pendingConflictUpdates: {},\n skippedUpdates: {},\n // Load in the last fetched time\n ...((fileMetadata[file] || {}) as Partial<FileMetadataState>),\n lastFetchedAt: new Date().toISOString(),\n };\n\n // Validate that all timestamps are present in the file\n currentState = await parsePreferenceTimestampsFromCsv(\n preferences,\n currentState,\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Validate that all identifiers are present and unique\n const result = await parsePreferenceIdentifiersFromCsv(\n preferences,\n currentState,\n );\n currentState = result.currentState;\n preferences = result.preferences;\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Ensure all other columns are mapped to purpose and preference\n // slug values\n currentState = await parsePreferenceAndPurposeValuesFromCsv(\n preferences,\n currentState,\n {\n preferenceTopics,\n purposeSlugs,\n forceTriggerWorkflows,\n },\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Grab existing preference store records\n const identifiers = preferences.map(\n (pref) => pref[currentState.identifierColumn!],\n );\n const existingConsentRecords = skipExistingRecordCheck\n ? []\n : await getPreferencesForIdentifiers(sombra, {\n identifiers: identifiers.map((x) => ({ value: x })),\n partitionKey,\n });\n const consentRecordByIdentifier = keyBy(existingConsentRecords, 'userId');\n\n // Clear out previous updates\n currentState.pendingConflictUpdates = {};\n currentState.pendingSafeUpdates = {};\n currentState.skippedUpdates = {};\n\n // Process each row\n preferences.forEach((pref) => {\n // Grab unique Id for the user\n const userId = pref[currentState.identifierColumn!];\n\n // determine updates for user\n const pendingUpdates = getPreferenceUpdatesFromRow({\n row: pref,\n columnToPurposeName: currentState.columnToPurposeName,\n preferenceTopics,\n purposeSlugs,\n });\n\n // Grab current state of the update\n const currentConsentRecord = consentRecordByIdentifier[userId];\n if (forceTriggerWorkflows && !currentConsentRecord) {\n throw new Error(\n `No existing consent record found for user with id: ${userId}.\n When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`,\n );\n }\n // Check if the update can be skipped\n // this is the case if a record exists, and the purpose\n // and preference values are all in sync\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n }) &&\n !forceTriggerWorkflows\n ) {\n currentState.skippedUpdates[userId] = pref;\n return;\n }\n\n // Determine if there are any conflicts\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n })\n ) {\n currentState.pendingConflictUpdates[userId] = {\n row: pref,\n record: currentConsentRecord,\n };\n return;\n }\n\n // Add to pending updates\n currentState.pendingSafeUpdates[userId] = pref;\n });\n\n // Read in the file\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n const t1 = new Date().getTime();\n logger.info(\n colors.green(\n `Successfully pre-processed file: \"${file}\" in ${(t1 - t0) / 1000}s`,\n ),\n );\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport { chunk } from 'lodash-es';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from '../bluebird';\nimport { logger } from '../../logger';\nimport { withPreferenceRetry } from './withPreferenceRetry';\nimport { ConsentPreferenceResponse } from './types';\n\n/**\n * Grab the current consent preference values for a list of identifiers\n *\n * @param sombra - Backend to make API call to\n * @param options - Options\n * @returns Plaintext context information\n */\nexport async function getPreferencesForIdentifiers(\n sombra: Got,\n {\n identifiers,\n partitionKey,\n skipLogging = false,\n concurrency = 40,\n }: {\n /** The list of identifiers to look up */\n identifiers: {\n /** The value of the identifier */\n value: string;\n }[];\n /** The partition key to look up */\n partitionKey: string;\n /** Whether to skip logging */\n skipLogging?: boolean;\n /** Concurrency for requests (default 40) */\n concurrency?: number;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const results: PreferenceQueryResponseItem[] = [];\n const groupedIdentifiers = chunk(identifiers, 100);\n\n // create a new progress bar instance and use shades_classic theme\n const t0 = new Date().getTime();\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n if (!skipLogging) {\n progressBar.start(identifiers.length, 0);\n }\n\n let total = 0;\n await map(\n groupedIdentifiers,\n async (group) => {\n const rawResult = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partitionKey}/query`, {\n json: {\n filter: { identifiers: group },\n limit: group.length,\n },\n })\n .json(),\n {\n onRetry: (attempt, _err, msg) => {\n logger.warn(\n colors.yellow(\n `[RETRY] group size=${group.length} partition=${partitionKey} attempt=${attempt}: ${msg}`,\n ),\n );\n },\n },\n );\n\n const result = decodeCodec(ConsentPreferenceResponse, rawResult);\n results.push(...result.nodes);\n total += group.length;\n progressBar.update(total);\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n if (!skipLogging) {\n // Log completion time\n logger.info(\n colors.green(`Completed download in \"${totalTime / 1000}\" seconds.`),\n );\n }\n\n return results;\n}\n","import colors from 'colors';\nimport { logger } from '../../logger';\nimport { sleepPromise } from '../helpers';\n\n/**\n * Transient network / platform errors that merit a retry.\n * Keep this list short and specific to avoid masking real failures.\n */\nexport const RETRY_PREFERENCE_MSGS: string[] = [\n 'ENOTFOUND',\n 'ECONNRESET',\n 'ETIMEDOUT',\n '502 Bad Gateway',\n '504 Gateway Time-out',\n '429',\n 'Rate limit exceeded',\n 'Task timed out after',\n 'unknown request error',\n].map((s) => s.toLowerCase());\n\n/**\n * Options for retrying preference operations.\n */\nexport type RetryOptions = {\n /** Max attempts including the first try (default 3) */\n maxAttempts?: number;\n /** Initial backoff in ms (default 250) */\n baseDelayMs?: number;\n /** Optional custom predicate to decide if an error is retryable */\n isRetryable?: (err: unknown, message: string) => boolean;\n /** Optional hook to log on each retry */\n onRetry?: (attempt: number, err: unknown, message: string) => void;\n};\n\n/**\n * Run an async function with standardized retry behavior for preference operations.\n * Exponential backoff with jitter; only retries on known-transient messages.\n *\n * @param name - Name of the operation (for logging)\n * @param fn - Function to run\n * @param options - Retry options\n * @returns Result of the function\n */\nexport async function withPreferenceRetry<T>(\n name: string,\n fn: () => Promise<T>,\n {\n maxAttempts = 3,\n baseDelayMs = 250,\n isRetryable = (_err, msg) =>\n RETRY_PREFERENCE_MSGS.some((m) => msg.toLowerCase().includes(m)),\n onRetry,\n }: RetryOptions = {},\n): Promise<T> {\n let attempt = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n attempt += 1;\n try {\n return await fn();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n const msg: string =\n (err && (err.response?.body || err.message)) ??\n String(err ?? 'Unknown error');\n const willRetry = attempt < maxAttempts && isRetryable(err, msg);\n if (!willRetry) {\n throw new Error(`${name} failed after ${attempt} attempt(s): ${msg}`);\n }\n onRetry?.(attempt, err, msg);\n\n const backoff = baseDelayMs * 2 ** (attempt - 1);\n const jitter = Math.floor(Math.random() * baseDelayMs);\n const delay = backoff + jitter;\n logger.warn(\n colors.yellow(\n `[retry] attempt ${attempt}/${\n maxAttempts - 1\n }; backing off ${delay}ms: ${msg}`,\n ),\n );\n await sleepPromise(delay);\n }\n }\n}\n","import { uniq, groupBy, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { inquirerConfirmBoolean } from '../helpers';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse identifiers from a CSV list of preferences\n *\n * Ensures that all rows have a valid identifier\n * and that all identifiers are unique.\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceIdentifiersFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<{\n /** The updated state */\n currentState: FileMetadataState;\n /** The updated preferences */\n preferences: Record<string, string>[];\n}> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const remainingColumnsForIdentifier = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the identifier column to work off of\n if (!currentState.identifierColumn) {\n const { identifierName } = await inquirer.prompt<{\n /** Identifier name */\n identifierName: string;\n }>([\n {\n name: 'identifierName',\n message:\n 'Choose the column that will be used as the identifier to upload consent preferences by',\n type: 'list',\n default:\n remainingColumnsForIdentifier.find((col) =>\n col.toLowerCase().includes('email'),\n ) || remainingColumnsForIdentifier[0],\n choices: remainingColumnsForIdentifier,\n },\n ]);\n currentState.identifierColumn = identifierName;\n }\n logger.info(\n colors.magenta(\n `Using identifier column \"${currentState.identifierColumn}\"`,\n ),\n );\n\n // Validate that the identifier column is present for all rows and unique\n const identifierColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.identifierColumn!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (identifierColumnsMissing.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" is missing a value for the following rows: ${identifierColumnsMissing.join(\n ', ',\n )}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to skip rows missing an identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to skip rows missing an identifier?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n\n // Filter out rows missing an identifier\n const previous = preferences.length;\n preferences = preferences.filter(\n (pref) => pref[currentState.identifierColumn!],\n );\n logger.info(\n colors.yellow(\n `Skipped ${previous - preferences.length} rows missing an identifier`,\n ),\n );\n }\n logger.info(\n colors.magenta(\n `The identifier column \"${currentState.identifierColumn}\" is present for all rows`,\n ),\n );\n\n // Validate that all identifiers are unique\n const rowsByUserId = groupBy(preferences, currentState.identifierColumn);\n const duplicateIdentifiers = Object.entries(rowsByUserId).filter(\n ([, rows]) => rows.length > 1,\n );\n if (duplicateIdentifiers.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" has duplicate values for the following rows: ${duplicateIdentifiers\n .slice(0, 10)\n .map(([userId, rows]) => `${userId} (${rows.length})`)\n .join('\\n')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to take the most recent update\n // for each duplicate identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to automatically take the latest update?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n preferences = Object.entries(rowsByUserId)\n .map(([, rows]) => {\n const sorted = rows.sort(\n (a, b) =>\n new Date(b[currentState.timestampColum!]).getTime() -\n new Date(a[currentState.timestampColum!]).getTime(),\n );\n return sorted[0];\n })\n .filter((x) => x);\n }\n\n return { currentState, preferences };\n}\n/* eslint-enable no-param-reassign */\n","import { uniq, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { mapSeries } from '../bluebird';\nimport { PreferenceTopic } from '../graphql';\nimport { PreferenceTopicType } from '@transcend-io/privacy-types';\nimport { splitCsvToList } from '../requests';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse out the purpose.enabled and preference values from a CSV file\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @param options - Options\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceAndPurposeValuesFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n {\n purposeSlugs,\n preferenceTopics,\n forceTriggerWorkflows,\n }: {\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const otherColumns = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...(currentState.timestampColum ? [currentState.timestampColum] : []),\n ]);\n if (otherColumns.length === 0) {\n if (forceTriggerWorkflows) {\n return currentState;\n }\n throw new Error('No other columns to process');\n }\n\n // The purpose and preferences to map to\n const purposeNames = [\n ...purposeSlugs,\n ...preferenceTopics.map((x) => `${x.purpose.trackingType}->${x.slug}`),\n ];\n\n // Ensure all columns are accounted for\n await mapSeries(otherColumns, async (col) => {\n // Determine the unique values to map in this column\n const uniqueValues = uniq(preferences.map((x) => x[col]));\n\n // Map the column to a purpose\n let purposeMapping = currentState.columnToPurposeName[col];\n if (purposeMapping) {\n logger.info(\n colors.magenta(\n `Column \"${col}\" is associated with purpose \"${purposeMapping.purpose}\"`,\n ),\n );\n } else {\n const { purposeName } = await inquirer.prompt<{\n /** purpose name */\n purposeName: string;\n }>([\n {\n name: 'purposeName',\n message: `Choose the purpose that column ${col} is associated with`,\n type: 'list',\n default: purposeNames.find((x) => x.startsWith(purposeSlugs[0])),\n choices: purposeNames,\n },\n ]);\n const [purposeSlug, preferenceSlug] = purposeName.split('->');\n purposeMapping = {\n purpose: purposeSlug,\n preference: preferenceSlug || null,\n valueMapping: {},\n };\n }\n\n // map each value to the purpose value\n await mapSeries(uniqueValues, async (value) => {\n if (purposeMapping.valueMapping[value] !== undefined) {\n logger.info(\n colors.magenta(\n `Value \"${value}\" is associated with purpose value \"${purposeMapping.valueMapping[value]}\"`,\n ),\n );\n return;\n }\n // if preference is null, this column is just for the purpose\n if (purposeMapping.preference === null) {\n const { purposeValue } = await inquirer.prompt<{\n /** purpose value */\n purposeValue: boolean;\n }>([\n {\n name: 'purposeValue',\n message: `Choose the purpose value for value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = purposeValue;\n }\n\n // if preference is not null, this column is for a specific preference\n if (purposeMapping.preference !== null) {\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === purposeMapping.preference,\n );\n if (!preferenceTopic) {\n logger.error(\n colors.red(\n `Preference topic \"${purposeMapping.preference}\" not found`,\n ),\n );\n return;\n }\n const preferenceOptions = preferenceTopic.preferenceOptionValues.map(\n ({ slug }) => slug,\n );\n\n if (preferenceTopic.type === PreferenceTopicType.Boolean) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n message:\n // eslint-disable-next-line max-len\n `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.Select) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === value),\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.MultiSelect) {\n const parsedValues = splitCsvToList(value);\n // need to do this serially\n await mapSeries(parsedValues, async (parsedValue) => {\n // if we already have a value, skip re-processing it again\n if (purposeMapping.valueMapping[parsedValue] !== undefined) {\n return;\n }\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${parsedValue}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === parsedValue),\n },\n ]);\n purposeMapping.valueMapping[parsedValue] = preferenceValue;\n });\n return;\n }\n\n throw new Error(\n `Unknown preference topic type: ${preferenceTopic.type}`,\n );\n }\n });\n\n currentState.columnToPurposeName[col] = purposeMapping;\n });\n\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import {\n PreferenceQueryResponseItem,\n PreferenceStoreIdentifier,\n PreferenceUpdateItem,\n} from '@transcend-io/privacy-types';\nimport * as t from 'io-ts';\n\nexport const PurposeRowMapping = t.type({\n /**\n * The slug or trackingType of the purpose to map to\n *\n * e.g. `Marketing`\n */\n purpose: t.string,\n /**\n * If the column maps to a preference instead of a purpose\n * this is the slug of the purpose.\n *\n * null value indicates that this column maps to the true/false\n * value of the purpose\n */\n preference: t.union([t.string, t.null]),\n /**\n * The mapping between each row value and purpose/preference value.\n *\n * e.g. for a boolean preference or purpose\n * {\n * 'true': true,\n * 'false': false,\n * '': true,\n * }\n *\n * or for a single or multi select preference\n * {\n * '': true,\n * 'value1': 'Value1',\n * 'value2': 'Value2',\n * }\n */\n valueMapping: t.record(\n t.string,\n t.union([t.string, t.boolean, t.null, t.undefined]),\n ),\n});\n\n/** Override type */\nexport type PurposeRowMapping = t.TypeOf<typeof PurposeRowMapping>;\n\n/**\n * Mapping of column name to purpose row mapping.\n * This is used to map each column in the CSV to the relevant purpose and preference definitions in\n * transcend.\n */\nexport const ColumnPurposeMap = t.record(t.string, PurposeRowMapping);\n\n/** Override type */\nexport type ColumnPurposeMap = t.TypeOf<typeof ColumnPurposeMap>;\n\nexport const IdentifierMetadataForPreference = t.type({\n /** The identifier name */\n name: t.string,\n /** Is unique on preference store */\n isUniqueOnPreferenceStore: t.boolean,\n});\n\n/** Override type */\nexport type IdentifierMetadataForPreference = t.TypeOf<\n typeof IdentifierMetadataForPreference\n>;\n\n/**\n * Mapping of identifier name to the column name in the CSV file.\n * This is used to map each identifier name to the column in the CSV file.\n */\nexport const ColumnIdentifierMap = t.record(\n t.string,\n IdentifierMetadataForPreference,\n);\n\n/** Override type */\nexport type ColumnIdentifierMap = t.TypeOf<typeof ColumnIdentifierMap>;\n\nexport const FileMetadataState = t.intersection([\n t.type({\n /**\n * Definition of how to map each column in the CSV to\n * the relevant purpose and preference definitions in transcend\n */\n columnToPurposeName: t.record(t.string, PurposeRowMapping),\n /** Last time the file was last parsed at */\n lastFetchedAt: t.string,\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * These uploads are overwriting non-existent preferences and are safe\n */\n pendingSafeUpdates: t.record(t.string, t.record(t.string, t.string)),\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * these records have conflicts with existing consent preferences\n */\n pendingConflictUpdates: t.record(\n t.string,\n t.type({\n record: PreferenceQueryResponseItem,\n row: t.record(t.string, t.string),\n }),\n ),\n /**\n * Mapping of userId to the rows in the file that can be skipped because\n * their preferences are already in the store\n */\n skippedUpdates: t.record(t.string, t.record(t.string, t.string)),\n }),\n t.partial({\n /** Determine which column name in file maps to consent record identifier to upload on */\n identifierColumn: t.string,\n /** Determine which column name in file maps to the timestamp */\n timestampColum: t.string,\n }),\n]);\n\n/** Override type */\nexport type FileMetadataState = t.TypeOf<typeof FileMetadataState>;\n\n/**\n * This is the type of the receipts that are stored in the file\n * that is used to track the state of the upload process.\n * It is used to resume the upload process from where it left off.\n * It is used to persist the state of the upload process across multiple runs.\n */\nexport const PreferenceUpdateMap = t.record(\n t.string,\n // This can either be true to indicate the record is pending\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, PreferenceUpdateItem]),\n);\n\n/** Override type */\nexport type PreferenceUpdateMap = t.TypeOf<typeof PreferenceUpdateMap>;\n\n/**\n * This is the type of the pending updates that are safe to run without\n * conflicts with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is safe to upload.\n */\nexport const PendingSafePreferenceUpdates = t.record(\n t.string,\n // This can either be true to indicate the record is safe\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, t.record(t.string, t.string)]),\n);\n\n/** Override type */\nexport type PendingSafePreferenceUpdates = t.TypeOf<\n typeof PendingSafePreferenceUpdates\n>;\n\n/**\n * These are the updates that failed to be uploaded to the API.\n */\nexport const FailingPreferenceUpdates = t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n);\n\n/** Override type */\nexport type FailingPreferenceUpdates = t.TypeOf<\n typeof FailingPreferenceUpdates\n>;\n\n/**\n * This is the type of the pending updates that are in conflict with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is pending upload.\n */\nexport const PendingWithConflictPreferenceUpdates = t.record(\n t.string,\n // We always return the conflicts for investigation\n t.type({\n /** Record to be inserted to transcend v1/preferences API */\n record: PreferenceQueryResponseItem,\n /** The row in the file that is pending upload */\n row: t.record(t.string, t.string),\n }),\n);\n\n/** Override type */\nexport type PendingWithConflictPreferenceUpdates = t.TypeOf<\n typeof PendingWithConflictPreferenceUpdates\n>;\n\n/**\n * The set of preference updates that are skipped\n * Key is primaryKey and value is the row in the CSV\n * that is skipped.\n *\n * This is usually because the preferences are already in the store\n * or there are duplicate rows in the CSV file that are identical.\n */\nexport const SkippedPreferenceUpdates = t.record(\n t.string,\n t.record(t.string, t.string),\n);\n\n/** Override type */\nexport type SkippedPreferenceUpdates = t.TypeOf<\n typeof SkippedPreferenceUpdates\n>;\n\n/** Persist this data between runs of the script */\nexport const PreferenceState = t.type({\n /**\n * Store a cache of previous files read in\n */\n fileMetadata: t.record(t.string, FileMetadataState),\n /**\n * The set of successful uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n failingUpdates: t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n ),\n /**\n * The set of pending uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n pendingUpdates: t.record(t.string, PreferenceUpdateItem),\n});\n\n/** Override type */\nexport type PreferenceState = t.TypeOf<typeof PreferenceState>;\n\nexport const DeletePreferenceRecordsInput = t.type({\n /** Array of consent preference records to delete */\n records: t.array(\n t.type({\n /** The anchor identifier to locate the consent record */\n anchorIdentifier: PreferenceStoreIdentifier,\n /** The ISO 8601 timestamp of when the deletion is requested */\n timestamp: t.string,\n }),\n ),\n});\n\n/** Override type */\nexport type DeletePreferenceRecordsInput = t.TypeOf<\n typeof DeletePreferenceRecordsInput\n>;\n\nexport const DeletePreferenceRecordsResponse = t.intersection([\n t.type({\n /** Array of results for each preference record deletion */\n records: t.array(\n t.intersection([\n t.type({\n /** Whether the deletion was successful */\n success: t.boolean,\n }),\n t.partial({\n /** An error message if the deletion failed */\n errorMessage: t.string,\n }),\n ]),\n ),\n /** The list of failed deletions with their respective errors */\n failures: t.array(\n t.type({\n /** The index of the failed update in the original request */\n index: t.number,\n /** The error message associated with the failure */\n error: t.string,\n }),\n ),\n }),\n t.partial({\n /** Any general errors that occurred during the operation */\n errors: t.array(t.string),\n }),\n]);\n\n/** Override type */\nexport type DeletePreferenceRecordsResponse = t.TypeOf<\n typeof DeletePreferenceRecordsResponse\n>;\n\n/** CLI CSV Row for deleting preference records */\nexport const DeletePreferenceRecordCliCsvRow = t.type({\n /** The name of the identifier type (e.g., email, userId) */\n name: t.string,\n /** The value of the identifier */\n value: t.string,\n});\n\n/** Override type */\nexport type DeletePreferenceRecordCliCsvRow = t.TypeOf<\n typeof DeletePreferenceRecordCliCsvRow\n>;\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-EMLESF76.cjs","../src/lib/preference-management/uploadPreferenceManagementPreferencesInteractive.ts","../src/lib/preference-management/parsePreferenceManagementCsv.ts","../src/lib/preference-management/getPreferencesForIdentifiers.ts","../src/lib/preference-management/withPreferenceRetry.ts","../src/lib/preference-management/parsePreferenceIdentifiersFromCsv.ts","../src/lib/preference-management/parsePreferenceAndPurposeValuesFromCsv.ts","../src/lib/preference-management/codecs.ts"],"names":["RETRY_PREFERENCE_MSGS","s","withPreferenceRetry","name","fn","maxAttempts","baseDelayMs","isRetryable","_err","msg","m","onRetry","attempt","err"],"mappings":"AAAA,2lCAAwK,wDAAyC,wDAAuE,gFCMrQ,qGAIK,+DAEO,qJCTZ,qDCES,ICGfA,EAAAA,CAAkC,CAC7C,WAAA,CACA,YAAA,CACA,WAAA,CACA,iBAAA,CACA,sBAAA,CACA,KAAA,CACA,qBAAA,CACA,sBAAA,CACA,uBACF,CAAA,CAAE,GAAA,CAAKC,CAAAA,EAAMA,CAAAA,CAAE,WAAA,CAAY,CAAC,CAAA,CAyB5B,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACAC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,WAAA,CAAAC,CAAAA,CAAc,CAACC,CAAAA,CAAMC,CAAAA,CAAAA,EACnBT,EAAAA,CAAsB,IAAA,CAAMU,CAAAA,EAAMD,CAAAA,CAAI,WAAA,CAAY,CAAA,CAAE,QAAA,CAASC,CAAC,CAAC,CAAA,CACjE,OAAA,CAAAC,CACF,CAAA,CAAkB,CAAC,CAAA,CACP,CACZ,IAAIC,CAAAA,CAAU,CAAA,CAEd,GAAA,CAAA,CAAA,CAAA,CAAa,CACXA,CAAAA,EAAW,CAAA,CACX,GAAI,CACF,OAAO,MAAMR,CAAAA,CAAG,CAElB,CAAA,KAAA,CAASS,CAAAA,CAAU,CACjB,IAAMJ,CAAAA,kBAAAA,CACHI,CAAAA,EAAAA,iBAAQA,CAAAA,qBAAI,QAAA,6BAAU,MAAA,EAAQA,CAAAA,CAAI,OAAA,CAAA,CAAA,SACnC,MAAA,kBAAOA,CAAAA,SAAO,iBAAe,GAAA,CAE/B,EAAA,CAAI,CAAA,CADcD,CAAAA,CAAUP,CAAAA,EAAeE,CAAAA,CAAYM,CAAAA,CAAKJ,CAAG,CAAA,CAAA,CAE7D,MAAM,IAAI,KAAA,CAAM,CAAA,EAAA;ACuBX;ACbM;ACkBO,oGAAA;ANPlB;AAGA;AAAA;AAKQ;AAAK;AACgC,sCAAA;AAwFrC","file":"/home/runner/work/cli/cli/dist/chunk-EMLESF76.cjs","sourcesContent":[null,"import {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllPurposes,\n fetchAllPreferenceTopics,\n} from '../graphql';\nimport colors from 'colors';\nimport { map } from '../bluebird';\nimport { chunk } from 'lodash-es';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { parseAttributesFromString } from '../requests';\nimport { PersistedState } from '@transcend-io/persisted-state';\nimport { parsePreferenceManagementCsvWithCache } from './parsePreferenceManagementCsv';\nimport { PreferenceState } from './codecs';\nimport { PreferenceUpdateItem } from '@transcend-io/privacy-types';\nimport { apply } from '@transcend-io/type-utils';\nimport { NONE_PREFERENCE_MAP } from './parsePreferenceTimestampsFromCsv';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadPreferenceManagementPreferencesInteractive({\n auth,\n sombraAuth,\n receiptFilepath,\n file,\n partition,\n isSilent = true,\n dryRun = false,\n skipWorkflowTriggers = false,\n skipConflictUpdates = false,\n skipExistingRecordCheck = false,\n attributes = [],\n transcendUrl,\n forceTriggerWorkflows = false,\n}: {\n /** The Transcend API key */\n auth: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Partition key */\n partition: string;\n /** File where to store receipt and continue from where left off */\n receiptFilepath: string;\n /** The file to process */\n file: string;\n /** API URL for Transcend backend */\n transcendUrl: string;\n /** Whether to do a dry run */\n dryRun?: boolean;\n /** Whether to upload as isSilent */\n isSilent?: boolean;\n /** Attributes string pre-parse. In format Key:Value */\n attributes?: string[];\n /** Skip workflow triggers */\n skipWorkflowTriggers?: boolean;\n /**\n * When true, only update preferences that do not conflict with existing\n * preferences. When false, update all preferences in CSV based on timestamp.\n */\n skipConflictUpdates?: boolean;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck?: boolean;\n /** Whether to force trigger workflows */\n forceTriggerWorkflows?: boolean;\n}): Promise<void> {\n // Parse out the extra attributes to apply to all requests uploaded\n const parsedAttributes = parseAttributesFromString(attributes);\n\n // Create a new state file to store the requests from this run\n const preferenceState = new PersistedState(receiptFilepath, PreferenceState, {\n fileMetadata: {},\n failingUpdates: {},\n pendingUpdates: {},\n });\n const failingRequests = preferenceState.getValue('failingUpdates');\n const pendingRequests = preferenceState.getValue('pendingUpdates');\n let fileMetadata = preferenceState.getValue('fileMetadata');\n\n logger.info(\n colors.magenta(\n 'Restored cache, there are: \\n' +\n `${\n Object.values(failingRequests).length\n } failing requests to be retried\\n` +\n `${\n Object.values(pendingRequests).length\n } pending requests to be processed\\n` +\n `The following files are stored in cache and will be used:\\n${Object.keys(\n fileMetadata,\n )\n .map((x) => x)\n .join('\\n')}\\n` +\n `The following file will be processed: ${file}\\n`,\n ),\n );\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const [sombra, purposes, preferenceTopics] = await Promise.all([\n // Create sombra instance to communicate with\n createSombraGotInstance(transcendUrl, auth, sombraAuth),\n // get all purposes and topics\n fetchAllPurposes(client),\n fetchAllPreferenceTopics(client),\n ]);\n\n // Process the file\n await parsePreferenceManagementCsvWithCache(\n {\n file,\n purposeSlugs: purposes.map((x) => x.trackingType),\n preferenceTopics,\n sombra,\n partitionKey: partition,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n },\n preferenceState,\n );\n\n // Construct the pending updates\n const pendingUpdates: Record<string, PreferenceUpdateItem> = {};\n fileMetadata = preferenceState.getValue('fileMetadata');\n const metadata = fileMetadata[file];\n\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingSafeUpdates).length\n } safe updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.pendingConflictUpdates).length\n } conflict updates in ${file}`,\n ),\n );\n logger.info(\n colors.magenta(\n `Found ${\n Object.entries(metadata.skippedUpdates).length\n } skipped updates in ${file}`,\n ),\n );\n\n // Update either safe updates only or safe + conflict\n Object.entries({\n ...metadata.pendingSafeUpdates,\n ...(skipConflictUpdates\n ? {}\n : apply(metadata.pendingConflictUpdates, ({ row }) => row)),\n }).forEach(([userId, update]) => {\n // Determine timestamp\n const timestamp =\n metadata.timestampColum === NONE_PREFERENCE_MAP\n ? new Date()\n : new Date(update[metadata.timestampColum!]);\n\n // Determine updates\n const updates = getPreferenceUpdatesFromRow({\n row: update,\n columnToPurposeName: metadata.columnToPurposeName,\n preferenceTopics,\n purposeSlugs: purposes.map((x) => x.trackingType),\n });\n pendingUpdates[userId] = {\n userId,\n partition,\n timestamp: timestamp.toISOString(),\n purposes: Object.entries(updates).map(([purpose, value]) => ({\n ...value,\n purpose,\n workflowSettings: {\n attributes: parsedAttributes,\n isSilent,\n skipWorkflowTrigger: skipWorkflowTriggers,\n ...(forceTriggerWorkflows\n ? { forceTriggerWorkflow: forceTriggerWorkflows }\n : {}),\n },\n })),\n };\n });\n await preferenceState.setValue(pendingUpdates, 'pendingUpdates');\n await preferenceState.setValue({}, 'failingUpdates');\n\n // Exist early if dry run\n if (dryRun) {\n logger.info(\n colors.green(\n `Dry run complete, exiting. ${\n Object.values(pendingUpdates).length\n } pending updates. Check file: ${receiptFilepath}`,\n ),\n );\n return;\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${\n Object.values(pendingUpdates).length\n } preferences to partition: ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n const updatesToRun = Object.entries(pendingUpdates);\n const chunkedUpdates = chunk(updatesToRun, skipWorkflowTriggers ? 100 : 10);\n progressBar.start(updatesToRun.length, 0);\n await map(\n chunkedUpdates,\n async (currentChunk) => {\n // Make the request\n try {\n await sombra\n .put('v1/preferences', {\n json: {\n records: currentChunk.map(([, update]) => update),\n skipWorkflowTriggers,\n },\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n logger.error(\n colors.red(\n `Failed to upload ${\n currentChunk.length\n } user preferences to partition ${partition}: ${\n err?.response?.body || err?.message\n }`,\n ),\n );\n const failingUpdates = preferenceState.getValue('failingUpdates');\n currentChunk.forEach(([userId, update]) => {\n failingUpdates[userId] = {\n uploadedAt: new Date().toISOString(),\n update,\n error: err?.response?.body || err?.message || 'Unknown error',\n };\n });\n await preferenceState.setValue(failingUpdates, 'failingUpdates');\n }\n\n total += currentChunk.length;\n progressBar.update(total);\n },\n {\n concurrency: 40,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n logger.info(\n colors.green(\n `Successfully uploaded ${\n updatesToRun.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n","import { PersistedState } from '@transcend-io/persisted-state';\nimport type { Got } from 'got';\nimport { keyBy } from 'lodash-es';\nimport * as t from 'io-ts';\nimport colors from 'colors';\nimport { FileMetadataState, PreferenceState } from './codecs';\nimport { logger } from '../../logger';\nimport { readCsv } from '../requests';\nimport { getPreferencesForIdentifiers } from './getPreferencesForIdentifiers';\nimport { PreferenceTopic } from '../graphql';\nimport { getPreferenceUpdatesFromRow } from './getPreferenceUpdatesFromRow';\nimport { parsePreferenceTimestampsFromCsv } from './parsePreferenceTimestampsFromCsv';\nimport { parsePreferenceIdentifiersFromCsv } from './parsePreferenceIdentifiersFromCsv';\nimport { parsePreferenceAndPurposeValuesFromCsv } from './parsePreferenceAndPurposeValuesFromCsv';\nimport { checkIfPendingPreferenceUpdatesAreNoOp } from './checkIfPendingPreferenceUpdatesAreNoOp';\nimport { checkIfPendingPreferenceUpdatesCauseConflict } from './checkIfPendingPreferenceUpdatesCauseConflict';\n\n/**\n * Parse a file into the cache\n *\n *\n * @param options - Options\n * @param cache - The cache to store the parsed file in\n * @returns The cache with the parsed file\n */\nexport async function parsePreferenceManagementCsvWithCache(\n {\n file,\n sombra,\n purposeSlugs,\n preferenceTopics,\n partitionKey,\n skipExistingRecordCheck,\n forceTriggerWorkflows,\n }: {\n /** File to parse */\n file: string;\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Sombra got instance */\n sombra: Got;\n /** Partition key */\n partitionKey: string;\n /** Whether to skip the check for existing records. SHOULD ONLY BE USED FOR INITIAL UPLOAD */\n skipExistingRecordCheck: boolean;\n /** Whether to force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n cache: PersistedState<typeof PreferenceState>,\n): Promise<void> {\n // Start the timer\n const t0 = new Date().getTime();\n\n // Get the current metadata\n const fileMetadata = cache.getValue('fileMetadata');\n\n // Read in the file\n logger.info(colors.magenta(`Reading in file: \"${file}\"`));\n let preferences = readCsv(file, t.record(t.string, t.string));\n\n // start building the cache, can use previous cache as well\n let currentState: FileMetadataState = {\n columnToPurposeName: {},\n pendingSafeUpdates: {},\n pendingConflictUpdates: {},\n skippedUpdates: {},\n // Load in the last fetched time\n ...((fileMetadata[file] || {}) as Partial<FileMetadataState>),\n lastFetchedAt: new Date().toISOString(),\n };\n\n // Validate that all timestamps are present in the file\n currentState = await parsePreferenceTimestampsFromCsv(\n preferences,\n currentState,\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Validate that all identifiers are present and unique\n const result = await parsePreferenceIdentifiersFromCsv(\n preferences,\n currentState,\n );\n currentState = result.currentState;\n preferences = result.preferences;\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Ensure all other columns are mapped to purpose and preference\n // slug values\n currentState = await parsePreferenceAndPurposeValuesFromCsv(\n preferences,\n currentState,\n {\n preferenceTopics,\n purposeSlugs,\n forceTriggerWorkflows,\n },\n );\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n\n // Grab existing preference store records\n const identifiers = preferences.map(\n (pref) => pref[currentState.identifierColumn!],\n );\n const existingConsentRecords = skipExistingRecordCheck\n ? []\n : await getPreferencesForIdentifiers(sombra, {\n identifiers: identifiers.map((x) => ({ value: x })),\n partitionKey,\n });\n const consentRecordByIdentifier = keyBy(existingConsentRecords, 'userId');\n\n // Clear out previous updates\n currentState.pendingConflictUpdates = {};\n currentState.pendingSafeUpdates = {};\n currentState.skippedUpdates = {};\n\n // Process each row\n preferences.forEach((pref) => {\n // Grab unique Id for the user\n const userId = pref[currentState.identifierColumn!];\n\n // determine updates for user\n const pendingUpdates = getPreferenceUpdatesFromRow({\n row: pref,\n columnToPurposeName: currentState.columnToPurposeName,\n preferenceTopics,\n purposeSlugs,\n });\n\n // Grab current state of the update\n const currentConsentRecord = consentRecordByIdentifier[userId];\n if (forceTriggerWorkflows && !currentConsentRecord) {\n throw new Error(\n `No existing consent record found for user with id: ${userId}.\n When 'forceTriggerWorkflows' is set all the user identifiers should contain a consent record`,\n );\n }\n // Check if the update can be skipped\n // this is the case if a record exists, and the purpose\n // and preference values are all in sync\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesAreNoOp({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n }) &&\n !forceTriggerWorkflows\n ) {\n currentState.skippedUpdates[userId] = pref;\n return;\n }\n\n // Determine if there are any conflicts\n if (\n currentConsentRecord &&\n checkIfPendingPreferenceUpdatesCauseConflict({\n currentConsentRecord,\n pendingUpdates,\n preferenceTopics,\n })\n ) {\n currentState.pendingConflictUpdates[userId] = {\n row: pref,\n record: currentConsentRecord,\n };\n return;\n }\n\n // Add to pending updates\n currentState.pendingSafeUpdates[userId] = pref;\n });\n\n // Read in the file\n fileMetadata[file] = currentState;\n await cache.setValue(fileMetadata, 'fileMetadata');\n const t1 = new Date().getTime();\n logger.info(\n colors.green(\n `Successfully pre-processed file: \"${file}\" in ${(t1 - t0) / 1000}s`,\n ),\n );\n}\n","import { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\nimport type { Got } from 'got';\nimport colors from 'colors';\nimport cliProgress from 'cli-progress';\nimport { chunk } from 'lodash-es';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport { map } from '../bluebird';\nimport { logger } from '../../logger';\nimport { withPreferenceRetry } from './withPreferenceRetry';\nimport { ConsentPreferenceResponse } from './types';\n\n/**\n * Grab the current consent preference values for a list of identifiers\n *\n * @param sombra - Backend to make API call to\n * @param options - Options\n * @returns Plaintext context information\n */\nexport async function getPreferencesForIdentifiers(\n sombra: Got,\n {\n identifiers,\n partitionKey,\n skipLogging = false,\n concurrency = 40,\n }: {\n /** The list of identifiers to look up */\n identifiers: {\n /** The value of the identifier */\n value: string;\n }[];\n /** The partition key to look up */\n partitionKey: string;\n /** Whether to skip logging */\n skipLogging?: boolean;\n /** Concurrency for requests (default 40) */\n concurrency?: number;\n },\n): Promise<PreferenceQueryResponseItem[]> {\n const results: PreferenceQueryResponseItem[] = [];\n const groupedIdentifiers = chunk(identifiers, 100);\n\n // create a new progress bar instance and use shades_classic theme\n const t0 = new Date().getTime();\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n if (!skipLogging) {\n progressBar.start(identifiers.length, 0);\n }\n\n let total = 0;\n await map(\n groupedIdentifiers,\n async (group) => {\n const rawResult = await withPreferenceRetry(\n 'Preference Query',\n () =>\n sombra\n .post(`v1/preferences/${partitionKey}/query`, {\n json: {\n filter: { identifiers: group },\n limit: group.length,\n },\n })\n .json(),\n {\n onRetry: (attempt, _err, msg) => {\n logger.warn(\n colors.yellow(\n `[RETRY] group size=${group.length} partition=${partitionKey} attempt=${attempt}: ${msg}`,\n ),\n );\n },\n },\n );\n\n const result = decodeCodec(ConsentPreferenceResponse, rawResult);\n results.push(...result.nodes);\n total += group.length;\n progressBar.update(total);\n },\n {\n concurrency,\n },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n if (!skipLogging) {\n // Log completion time\n logger.info(\n colors.green(`Completed download in \"${totalTime / 1000}\" seconds.`),\n );\n }\n\n return results;\n}\n","import colors from 'colors';\nimport { logger } from '../../logger';\nimport { sleepPromise } from '../helpers';\n\n/**\n * Transient network / platform errors that merit a retry.\n * Keep this list short and specific to avoid masking real failures.\n */\nexport const RETRY_PREFERENCE_MSGS: string[] = [\n 'ENOTFOUND',\n 'ECONNRESET',\n 'ETIMEDOUT',\n '502 Bad Gateway',\n '504 Gateway Time-out',\n '429',\n 'Rate limit exceeded',\n 'Task timed out after',\n 'unknown request error',\n].map((s) => s.toLowerCase());\n\n/**\n * Options for retrying preference operations.\n */\nexport type RetryOptions = {\n /** Max attempts including the first try (default 3) */\n maxAttempts?: number;\n /** Initial backoff in ms (default 250) */\n baseDelayMs?: number;\n /** Optional custom predicate to decide if an error is retryable */\n isRetryable?: (err: unknown, message: string) => boolean;\n /** Optional hook to log on each retry */\n onRetry?: (attempt: number, err: unknown, message: string) => void;\n};\n\n/**\n * Run an async function with standardized retry behavior for preference operations.\n * Exponential backoff with jitter; only retries on known-transient messages.\n *\n * @param name - Name of the operation (for logging)\n * @param fn - Function to run\n * @param options - Retry options\n * @returns Result of the function\n */\nexport async function withPreferenceRetry<T>(\n name: string,\n fn: () => Promise<T>,\n {\n maxAttempts = 3,\n baseDelayMs = 250,\n isRetryable = (_err, msg) =>\n RETRY_PREFERENCE_MSGS.some((m) => msg.toLowerCase().includes(m)),\n onRetry,\n }: RetryOptions = {},\n): Promise<T> {\n let attempt = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n attempt += 1;\n try {\n return await fn();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } catch (err: any) {\n const msg: string =\n (err && (err.response?.body || err.message)) ??\n String(err ?? 'Unknown error');\n const willRetry = attempt < maxAttempts && isRetryable(err, msg);\n if (!willRetry) {\n throw new Error(`${name} failed after ${attempt} attempt(s): ${msg}`);\n }\n onRetry?.(attempt, err, msg);\n\n const backoff = baseDelayMs * 2 ** (attempt - 1);\n const jitter = Math.floor(Math.random() * baseDelayMs);\n const delay = backoff + jitter;\n logger.warn(\n colors.yellow(\n `[retry] attempt ${attempt}/${\n maxAttempts - 1\n }; backing off ${delay}ms: ${msg}`,\n ),\n );\n await sleepPromise(delay);\n }\n }\n}\n","import { uniq, groupBy, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { inquirerConfirmBoolean } from '../helpers';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse identifiers from a CSV list of preferences\n *\n * Ensures that all rows have a valid identifier\n * and that all identifiers are unique.\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceIdentifiersFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n): Promise<{\n /** The updated state */\n currentState: FileMetadataState;\n /** The updated preferences */\n preferences: Record<string, string>[];\n}> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const remainingColumnsForIdentifier = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...Object.keys(currentState.columnToPurposeName),\n ]);\n\n // Determine the identifier column to work off of\n if (!currentState.identifierColumn) {\n const { identifierName } = await inquirer.prompt<{\n /** Identifier name */\n identifierName: string;\n }>([\n {\n name: 'identifierName',\n message:\n 'Choose the column that will be used as the identifier to upload consent preferences by',\n type: 'list',\n default:\n remainingColumnsForIdentifier.find((col) =>\n col.toLowerCase().includes('email'),\n ) || remainingColumnsForIdentifier[0],\n choices: remainingColumnsForIdentifier,\n },\n ]);\n currentState.identifierColumn = identifierName;\n }\n logger.info(\n colors.magenta(\n `Using identifier column \"${currentState.identifierColumn}\"`,\n ),\n );\n\n // Validate that the identifier column is present for all rows and unique\n const identifierColumnsMissing = preferences\n .map((pref, ind) => (pref[currentState.identifierColumn!] ? null : [ind]))\n .filter((x): x is number[] => !!x)\n .flat();\n if (identifierColumnsMissing.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" is missing a value for the following rows: ${identifierColumnsMissing.join(\n ', ',\n )}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to skip rows missing an identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to skip rows missing an identifier?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n\n // Filter out rows missing an identifier\n const previous = preferences.length;\n preferences = preferences.filter(\n (pref) => pref[currentState.identifierColumn!],\n );\n logger.info(\n colors.yellow(\n `Skipped ${previous - preferences.length} rows missing an identifier`,\n ),\n );\n }\n logger.info(\n colors.magenta(\n `The identifier column \"${currentState.identifierColumn}\" is present for all rows`,\n ),\n );\n\n // Validate that all identifiers are unique\n const rowsByUserId = groupBy(preferences, currentState.identifierColumn);\n const duplicateIdentifiers = Object.entries(rowsByUserId).filter(\n ([, rows]) => rows.length > 1,\n );\n if (duplicateIdentifiers.length > 0) {\n const msg = `The identifier column \"${\n currentState.identifierColumn\n }\" has duplicate values for the following rows: ${duplicateIdentifiers\n .slice(0, 10)\n .map(([userId, rows]) => `${userId} (${rows.length})`)\n .join('\\n')}`;\n logger.warn(colors.yellow(msg));\n\n // Ask user if they would like to take the most recent update\n // for each duplicate identifier\n const skip = await inquirerConfirmBoolean({\n message: 'Would you like to automatically take the latest update?',\n });\n if (!skip) {\n throw new Error(msg);\n }\n preferences = Object.entries(rowsByUserId)\n .map(([, rows]) => {\n const sorted = rows.sort(\n (a, b) =>\n new Date(b[currentState.timestampColum!]).getTime() -\n new Date(a[currentState.timestampColum!]).getTime(),\n );\n return sorted[0];\n })\n .filter((x) => x);\n }\n\n return { currentState, preferences };\n}\n/* eslint-enable no-param-reassign */\n","import { uniq, difference } from 'lodash-es';\nimport colors from 'colors';\nimport inquirer from 'inquirer';\nimport { FileMetadataState } from './codecs';\nimport { logger } from '../../logger';\nimport { mapSeries } from '../bluebird';\nimport { PreferenceTopic } from '../graphql';\nimport { PreferenceTopicType } from '@transcend-io/privacy-types';\nimport { splitCsvToList } from '../requests';\n\n/* eslint-disable no-param-reassign */\n\n/**\n * Parse out the purpose.enabled and preference values from a CSV file\n *\n * @param preferences - List of preferences\n * @param currentState - The current file metadata state for parsing this list\n * @param options - Options\n * @returns The updated file metadata state\n */\nexport async function parsePreferenceAndPurposeValuesFromCsv(\n preferences: Record<string, string>[],\n currentState: FileMetadataState,\n {\n purposeSlugs,\n preferenceTopics,\n forceTriggerWorkflows,\n }: {\n /** The purpose slugs that are allowed to be updated */\n purposeSlugs: string[];\n /** The preference topics */\n preferenceTopics: PreferenceTopic[];\n /** Force workflow triggers */\n forceTriggerWorkflows: boolean;\n },\n): Promise<FileMetadataState> {\n // Determine columns to map\n const columnNames = uniq(preferences.map((x) => Object.keys(x)).flat());\n\n // Determine the columns that could potentially be used for identifier\n const otherColumns = difference(columnNames, [\n ...(currentState.identifierColumn ? [currentState.identifierColumn] : []),\n ...(currentState.timestampColum ? [currentState.timestampColum] : []),\n ]);\n if (otherColumns.length === 0) {\n if (forceTriggerWorkflows) {\n return currentState;\n }\n throw new Error('No other columns to process');\n }\n\n // The purpose and preferences to map to\n const purposeNames = [\n ...purposeSlugs,\n ...preferenceTopics.map((x) => `${x.purpose.trackingType}->${x.slug}`),\n ];\n\n // Ensure all columns are accounted for\n await mapSeries(otherColumns, async (col) => {\n // Determine the unique values to map in this column\n const uniqueValues = uniq(preferences.map((x) => x[col]));\n\n // Map the column to a purpose\n let purposeMapping = currentState.columnToPurposeName[col];\n if (purposeMapping) {\n logger.info(\n colors.magenta(\n `Column \"${col}\" is associated with purpose \"${purposeMapping.purpose}\"`,\n ),\n );\n } else {\n const { purposeName } = await inquirer.prompt<{\n /** purpose name */\n purposeName: string;\n }>([\n {\n name: 'purposeName',\n message: `Choose the purpose that column ${col} is associated with`,\n type: 'list',\n default: purposeNames.find((x) => x.startsWith(purposeSlugs[0])),\n choices: purposeNames,\n },\n ]);\n const [purposeSlug, preferenceSlug] = purposeName.split('->');\n purposeMapping = {\n purpose: purposeSlug,\n preference: preferenceSlug || null,\n valueMapping: {},\n };\n }\n\n // map each value to the purpose value\n await mapSeries(uniqueValues, async (value) => {\n if (purposeMapping.valueMapping[value] !== undefined) {\n logger.info(\n colors.magenta(\n `Value \"${value}\" is associated with purpose value \"${purposeMapping.valueMapping[value]}\"`,\n ),\n );\n return;\n }\n // if preference is null, this column is just for the purpose\n if (purposeMapping.preference === null) {\n const { purposeValue } = await inquirer.prompt<{\n /** purpose value */\n purposeValue: boolean;\n }>([\n {\n name: 'purposeValue',\n message: `Choose the purpose value for value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = purposeValue;\n }\n\n // if preference is not null, this column is for a specific preference\n if (purposeMapping.preference !== null) {\n const preferenceTopic = preferenceTopics.find(\n (x) => x.slug === purposeMapping.preference,\n );\n if (!preferenceTopic) {\n logger.error(\n colors.red(\n `Preference topic \"${purposeMapping.preference}\" not found`,\n ),\n );\n return;\n }\n const preferenceOptions = preferenceTopic.preferenceOptionValues.map(\n ({ slug }) => slug,\n );\n\n if (preferenceTopic.type === PreferenceTopicType.Boolean) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n message:\n // eslint-disable-next-line max-len\n `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'confirm',\n default: value !== 'false',\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.Select) {\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${value}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === value),\n },\n ]);\n purposeMapping.valueMapping[value] = preferenceValue;\n return;\n }\n\n if (preferenceTopic.type === PreferenceTopicType.MultiSelect) {\n const parsedValues = splitCsvToList(value);\n // need to do this serially\n await mapSeries(parsedValues, async (parsedValue) => {\n // if we already have a value, skip re-processing it again\n if (purposeMapping.valueMapping[parsedValue] !== undefined) {\n return;\n }\n const { preferenceValue } = await inquirer.prompt<{\n /** purpose value */\n preferenceValue: boolean;\n }>([\n {\n name: 'preferenceValue',\n // eslint-disable-next-line max-len\n message: `Choose the preference value for \"${preferenceTopic.slug}\" value \"${parsedValue}\" associated with purpose \"${purposeMapping.purpose}\"`,\n type: 'list',\n choices: preferenceOptions,\n default: preferenceOptions.find((x) => x === parsedValue),\n },\n ]);\n purposeMapping.valueMapping[parsedValue] = preferenceValue;\n });\n return;\n }\n\n throw new Error(\n `Unknown preference topic type: ${preferenceTopic.type}`,\n );\n }\n });\n\n currentState.columnToPurposeName[col] = purposeMapping;\n });\n\n return currentState;\n}\n/* eslint-enable no-param-reassign */\n","import {\n PreferenceQueryResponseItem,\n PreferenceStoreIdentifier,\n PreferenceUpdateItem,\n} from '@transcend-io/privacy-types';\nimport * as t from 'io-ts';\n\nexport const PurposeRowMapping = t.type({\n /**\n * The slug or trackingType of the purpose to map to\n *\n * e.g. `Marketing`\n */\n purpose: t.string,\n /**\n * If the column maps to a preference instead of a purpose\n * this is the slug of the purpose.\n *\n * null value indicates that this column maps to the true/false\n * value of the purpose\n */\n preference: t.union([t.string, t.null]),\n /**\n * The mapping between each row value and purpose/preference value.\n *\n * e.g. for a boolean preference or purpose\n * {\n * 'true': true,\n * 'false': false,\n * '': true,\n * }\n *\n * or for a single or multi select preference\n * {\n * '': true,\n * 'value1': 'Value1',\n * 'value2': 'Value2',\n * }\n */\n valueMapping: t.record(\n t.string,\n t.union([t.string, t.boolean, t.null, t.undefined]),\n ),\n});\n\n/** Override type */\nexport type PurposeRowMapping = t.TypeOf<typeof PurposeRowMapping>;\n\n/**\n * Mapping of column name to purpose row mapping.\n * This is used to map each column in the CSV to the relevant purpose and preference definitions in\n * transcend.\n */\nexport const ColumnPurposeMap = t.record(t.string, PurposeRowMapping);\n\n/** Override type */\nexport type ColumnPurposeMap = t.TypeOf<typeof ColumnPurposeMap>;\n\nexport const IdentifierMetadataForPreference = t.type({\n /** The identifier name */\n name: t.string,\n /** Is unique on preference store */\n isUniqueOnPreferenceStore: t.boolean,\n});\n\n/** Override type */\nexport type IdentifierMetadataForPreference = t.TypeOf<\n typeof IdentifierMetadataForPreference\n>;\n\n/**\n * Mapping of identifier name to the column name in the CSV file.\n * This is used to map each identifier name to the column in the CSV file.\n */\nexport const ColumnIdentifierMap = t.record(\n t.string,\n IdentifierMetadataForPreference,\n);\n\n/** Override type */\nexport type ColumnIdentifierMap = t.TypeOf<typeof ColumnIdentifierMap>;\n\nexport const FileMetadataState = t.intersection([\n t.type({\n /**\n * Definition of how to map each column in the CSV to\n * the relevant purpose and preference definitions in transcend\n */\n columnToPurposeName: t.record(t.string, PurposeRowMapping),\n /** Last time the file was last parsed at */\n lastFetchedAt: t.string,\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * These uploads are overwriting non-existent preferences and are safe\n */\n pendingSafeUpdates: t.record(t.string, t.record(t.string, t.string)),\n /**\n * Mapping of userId to the rows in the file that need to be uploaded\n * these records have conflicts with existing consent preferences\n */\n pendingConflictUpdates: t.record(\n t.string,\n t.type({\n record: PreferenceQueryResponseItem,\n row: t.record(t.string, t.string),\n }),\n ),\n /**\n * Mapping of userId to the rows in the file that can be skipped because\n * their preferences are already in the store\n */\n skippedUpdates: t.record(t.string, t.record(t.string, t.string)),\n }),\n t.partial({\n /** Determine which column name in file maps to consent record identifier to upload on */\n identifierColumn: t.string,\n /** Determine which column name in file maps to the timestamp */\n timestampColum: t.string,\n }),\n]);\n\n/** Override type */\nexport type FileMetadataState = t.TypeOf<typeof FileMetadataState>;\n\n/**\n * This is the type of the receipts that are stored in the file\n * that is used to track the state of the upload process.\n * It is used to resume the upload process from where it left off.\n * It is used to persist the state of the upload process across multiple runs.\n */\nexport const PreferenceUpdateMap = t.record(\n t.string,\n // This can either be true to indicate the record is pending\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, PreferenceUpdateItem]),\n);\n\n/** Override type */\nexport type PreferenceUpdateMap = t.TypeOf<typeof PreferenceUpdateMap>;\n\n/**\n * This is the type of the pending updates that are safe to run without\n * conflicts with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is safe to upload.\n */\nexport const PendingSafePreferenceUpdates = t.record(\n t.string,\n // This can either be true to indicate the record is safe\n // or it can be an object showing the object\n // We only return a fixed number of results to avoid\n // making the JSON file too large\n t.union([t.boolean, t.record(t.string, t.string)]),\n);\n\n/** Override type */\nexport type PendingSafePreferenceUpdates = t.TypeOf<\n typeof PendingSafePreferenceUpdates\n>;\n\n/**\n * These are the updates that failed to be uploaded to the API.\n */\nexport const FailingPreferenceUpdates = t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n);\n\n/** Override type */\nexport type FailingPreferenceUpdates = t.TypeOf<\n typeof FailingPreferenceUpdates\n>;\n\n/**\n * This is the type of the pending updates that are in conflict with existing consent preferences.\n *\n * Key is primaryKey of the record in the file.\n * The value is the row in the file that is pending upload.\n */\nexport const PendingWithConflictPreferenceUpdates = t.record(\n t.string,\n // We always return the conflicts for investigation\n t.type({\n /** Record to be inserted to transcend v1/preferences API */\n record: PreferenceQueryResponseItem,\n /** The row in the file that is pending upload */\n row: t.record(t.string, t.string),\n }),\n);\n\n/** Override type */\nexport type PendingWithConflictPreferenceUpdates = t.TypeOf<\n typeof PendingWithConflictPreferenceUpdates\n>;\n\n/**\n * The set of preference updates that are skipped\n * Key is primaryKey and value is the row in the CSV\n * that is skipped.\n *\n * This is usually because the preferences are already in the store\n * or there are duplicate rows in the CSV file that are identical.\n */\nexport const SkippedPreferenceUpdates = t.record(\n t.string,\n t.record(t.string, t.string),\n);\n\n/** Override type */\nexport type SkippedPreferenceUpdates = t.TypeOf<\n typeof SkippedPreferenceUpdates\n>;\n\n/** Persist this data between runs of the script */\nexport const PreferenceState = t.type({\n /**\n * Store a cache of previous files read in\n */\n fileMetadata: t.record(t.string, FileMetadataState),\n /**\n * The set of successful uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n failingUpdates: t.record(\n t.string,\n t.type({\n /** Time upload ran at */\n uploadedAt: t.string,\n /** Attempts to upload that resulted in an error */\n error: t.string,\n /** The update body */\n update: PreferenceUpdateItem,\n }),\n ),\n /**\n * The set of pending uploads to Transcend\n * Mapping from userId to the upload metadata\n */\n pendingUpdates: t.record(t.string, PreferenceUpdateItem),\n});\n\n/** Override type */\nexport type PreferenceState = t.TypeOf<typeof PreferenceState>;\n\nexport const DeletePreferenceRecordsInput = t.type({\n /** Array of consent preference records to delete */\n records: t.array(\n t.type({\n /** The anchor identifier to locate the consent record */\n anchorIdentifier: PreferenceStoreIdentifier,\n /** The ISO 8601 timestamp of when the deletion is requested */\n timestamp: t.string,\n }),\n ),\n});\n\n/** Override type */\nexport type DeletePreferenceRecordsInput = t.TypeOf<\n typeof DeletePreferenceRecordsInput\n>;\n\nexport const DeletePreferenceRecordsResponse = t.intersection([\n t.type({\n /** Array of results for each preference record deletion */\n records: t.array(\n t.intersection([\n t.type({\n /** Whether the deletion was successful */\n success: t.boolean,\n }),\n t.partial({\n /** An error message if the deletion failed */\n errorMessage: t.string,\n }),\n ]),\n ),\n /** The list of failed deletions with their respective errors */\n failures: t.array(\n t.type({\n /** The index of the failed update in the original request */\n index: t.number,\n /** The error message associated with the failure */\n error: t.string,\n }),\n ),\n }),\n t.partial({\n /** Any general errors that occurred during the operation */\n errors: t.array(t.string),\n }),\n]);\n\n/** Override type */\nexport type DeletePreferenceRecordsResponse = t.TypeOf<\n typeof DeletePreferenceRecordsResponse\n>;\n\n/** CLI CSV Row for deleting preference records */\nexport const DeletePreferenceRecordCliCsvRow = t.type({\n /** The name of the identifier type (e.g., email, userId) */\n name: t.string,\n /** The value of the identifier */\n value: t.string,\n});\n\n/** Override type */\nexport type DeletePreferenceRecordCliCsvRow = t.TypeOf<\n typeof DeletePreferenceRecordCliCsvRow\n>;\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var
|
|
2
|
-
//# sourceMappingURL=chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunk5MG2CEZVcjs = require('./chunk-5MG2CEZV.cjs');function f(p,{adTechPurposes:m=["SaleOfInfo"],serviceToTitle:s,serviceToSupportedIntegration:r}){let e=[],i=[],n={};p.forEach(t=>{let{service:a,attributes:c=[]}=t;if(!a||a==="internalService")return;let u=c.find(o=>o.key==="Found on Domain");u&&(n[a]||(n[a]=[]),n[a].push(...u.values.map(o=>o.replace("https://","").replace("http://",""))),n[a]=[...new Set(n[a])]),_chunk5MG2CEZVcjs.i.call(void 0, t.trackingPurposes,m).length>0?(i.push(a),e.includes(a)&&(e=e.filter(o=>o!==a))):i.includes(a)||e.push(a)});let h=[...new Set(i)].map(t=>({title:s[t],...r[t]?{integrationName:t}:{integrationName:"promptAPerson","outer-type":t},attributes:[{key:"Tech Type",values:["Ad Tech"]},{key:"Found On Domain",values:n[t]||[]}]}));return{siteTechDataSilos:[...new Set(e)].map(t=>({title:s[t],...r[t]?{integrationName:t}:{integrationName:"promptAPerson",outerType:t},attributes:[{key:"Tech Type",values:["Site Tech"]},{key:"Found On Domain",values:n[t]||[]}]})),adTechDataSilos:h}}exports.a = f;
|
|
2
|
+
//# sourceMappingURL=chunk-ETSLJPTD.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-ETSLJPTD.cjs","../src/lib/consent-manager/dataFlowsToDataSilos.ts"],"names":["dataFlowsToDataSilos","inputs","adTechPurposes","serviceToTitle","serviceToSupportedIntegration","siteTechIntegrations","adTechIntegrations","serviceToFoundOnDomain","flow","service","attributes","foundOnDomain","attr","v","union_default","s","adTechDataSilos"],"mappings":"AAAA,iIAAwC,SCWxBA,CAAAA,CACdC,CAAAA,CACA,CACE,cAAA,CAAAC,CAAAA,CAAiB,CAAC,YAAY,CAAA,CAC9B,cAAA,CAAAC,CAAAA,CACA,6BAAA,CAAAC,CACF,CAAA,CASA,CAEA,IAAIC,CAAAA,CAAiC,CAAC,CAAA,CAGhCC,CAAAA,CAA+B,CAAC,CAAA,CAGhCC,CAAAA,CAAsD,CAAC,CAAA,CAG7DN,CAAAA,CAAO,OAAA,CAASO,CAAAA,EAAS,CAEvB,GAAM,CAAE,OAAA,CAAAC,CAAAA,CAAS,UAAA,CAAAC,CAAAA,CAAa,CAAC,CAAE,CAAA,CAAIF,CAAAA,CACrC,EAAA,CAAI,CAACC,CAAAA,EAAWA,CAAAA,GAAY,iBAAA,CAC1B,MAAA,CAIF,IAAME,CAAAA,CAAgBD,CAAAA,CAAW,IAAA,CAC9BE,CAAAA,EAASA,CAAAA,CAAK,GAAA,GAAQ,iBACzB,CAAA,CAGID,CAAAA,EAAAA,CACGJ,CAAAA,CAAuBE,CAAO,CAAA,EAAA,CACjCF,CAAAA,CAAuBE,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAErCF,CAAAA,CAAuBE,CAAO,CAAA,CAAG,IAAA,CAC/B,GAAGE,CAAAA,CAAc,MAAA,CAAO,GAAA,CAAKE,CAAAA,EAC3BA,CAAAA,CAAE,OAAA,CAAQ,UAAA,CAAY,EAAE,CAAA,CAAE,OAAA,CAAQ,SAAA,CAAW,EAAE,CACjD,CACF,CAAA,CACAN,CAAAA,CAAuBE,CAAO,CAAA,CAAI,CAChC,GAAG,IAAI,GAAA,CAAIF,CAAAA,CAAuBE,CAAO,CAAC,CAC5C,CAAA,CAAA,CAIEK,iCAAAA,CAAMN,CAAK,gBAAA,CAAkBN,CAAc,CAAA,CAAE,MAAA,CAAS,CAAA,CAAA,CAExDI,CAAAA,CAAmB,IAAA,CAAKG,CAAO,CAAA,CAG3BJ,CAAAA,CAAqB,QAAA,CAASI,CAAO,CAAA,EAAA,CACvCJ,CAAAA,CAAuBA,CAAAA,CAAqB,MAAA,CACzCU,CAAAA,EAAMA,CAAAA,GAAMN,CACf,CAAA,CAAA,CAAA,CAEQH,CAAAA,CAAmB,QAAA,CAASG,CAAO,CAAA,EAE7CJ,CAAAA,CAAqB,IAAA,CAAKI,CAAO,CAErC,CAAC,CAAA,CAGD,IAAMO,CAAAA,CAAkB,CAAC,GAAG,IAAI,GAAA,CAAIV,CAAkB,CAAC,CAAA,CAAE,GAAA,CAAKG,CAAAA,EAAAA,CAAa,CACzE,KAAA,CAAON,CAAAA,CAAeM,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,YAAA,CAAcA,CAAQ,CAAA,CAC9D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,SAAS,CACpB,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQF,CAAAA,CAAuBE,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CAAE,CAAA,CAsBF,MAAO,CACL,iBAAA,CApBwB,CAAC,GAAG,IAAI,GAAA,CAAIJ,CAAoB,CAAC,CAAA,CAAE,GAAA,CAC1DI,CAAAA,EAAAA,CAAa,CACZ,KAAA,CAAON,CAAAA,CAAeM,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,SAAA,CAAWA,CAAQ,CAAA,CAC3D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,WAAW,CACtB,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQF,CAAAA,CAAuBE,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CACF,CAAA,CAIE,eAAA,CAAAO,CACF,CACF,CAAA,cAAA","file":"/home/runner/work/cli/cli/dist/chunk-ETSLJPTD.cjs","sourcesContent":[null,"import { DataFlowInput, DataSiloInput } from '../../codecs';\nimport { union } from 'lodash-es';\nimport { IndexedCatalogs } from '../graphql';\n\n/**\n * Convert data flow configurations into a set of data silo configurations\n *\n * @param inputs - Data flow input to convert to data silos\n * @param options - Additional options\n * @returns Business entity configuration input\n */\nexport function dataFlowsToDataSilos(\n inputs: DataFlowInput[],\n {\n adTechPurposes = ['SaleOfInfo'],\n serviceToTitle,\n serviceToSupportedIntegration,\n }: IndexedCatalogs & {\n /** List of purposes that are considered \"Ad Tech\" */\n adTechPurposes?: string[];\n },\n): {\n /** List of data silo configurations for site-tech services */\n siteTechDataSilos: DataSiloInput[];\n /** List of data silo configurations for ad-tech services */\n adTechDataSilos: DataSiloInput[];\n} {\n // List of site tech integrations\n let siteTechIntegrations: string[] = [];\n\n // List of ad tech integrations\n const adTechIntegrations: string[] = [];\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n\n // iterate over each flow\n inputs.forEach((flow) => {\n // process data flows with services\n const { service, attributes = [] } = flow;\n if (!service || service === 'internalService') {\n return;\n }\n\n // create mapping to found on domain\n const foundOnDomain = attributes.find(\n (attr) => attr.key === 'Found on Domain',\n );\n\n // Create a list of all domains where the data flow was found\n if (foundOnDomain) {\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(\n ...foundOnDomain.values.map((v) =>\n v.replace('https://', '').replace('http://', ''),\n ),\n );\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n }\n\n // Keep track of ad tech\n if (union(flow.trackingPurposes, adTechPurposes).length > 0) {\n // add service to ad tech list\n adTechIntegrations.push(service);\n\n // remove from site tech list\n if (siteTechIntegrations.includes(service)) {\n siteTechIntegrations = siteTechIntegrations.filter(\n (s) => s !== service,\n );\n }\n } else if (!adTechIntegrations.includes(service)) {\n // add to site tech list\n siteTechIntegrations.push(service);\n }\n });\n\n // create the list of ad tech integrations\n const adTechDataSilos = [...new Set(adTechIntegrations)].map((service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }));\n\n // create the list of site tech integrations\n const siteTechDataSilos = [...new Set(siteTechIntegrations)].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', outerType: service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Site Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n return {\n siteTechDataSilos,\n adTechDataSilos,\n };\n}\n"]}
|