@transcend-io/cli 7.0.5 → 7.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -4
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +2 -2
- package/dist/{chunk-L7NHDBBN.cjs → chunk-2FBBA65B.cjs} +3 -3
- package/dist/{chunk-L7NHDBBN.cjs.map → chunk-2FBBA65B.cjs.map} +1 -1
- package/dist/{chunk-K4QC24DY.cjs → chunk-327HQXYV.cjs} +2 -2
- package/dist/{chunk-K4QC24DY.cjs.map → chunk-327HQXYV.cjs.map} +1 -1
- package/dist/{chunk-XWMFHJGK.cjs → chunk-3G77IGKQ.cjs} +2 -2
- package/dist/{chunk-XWMFHJGK.cjs.map → chunk-3G77IGKQ.cjs.map} +1 -1
- package/dist/{chunk-EUDUZQOO.cjs → chunk-5JXP3N7U.cjs} +4 -4
- package/dist/{chunk-EUDUZQOO.cjs.map → chunk-5JXP3N7U.cjs.map} +1 -1
- package/dist/{chunk-XXVZA7HN.cjs → chunk-74OAET6D.cjs} +2 -2
- package/dist/{chunk-XXVZA7HN.cjs.map → chunk-74OAET6D.cjs.map} +1 -1
- package/dist/{chunk-RQWXNYMW.cjs → chunk-AFYBKLWT.cjs} +2 -2
- package/dist/{chunk-RQWXNYMW.cjs.map → chunk-AFYBKLWT.cjs.map} +1 -1
- package/dist/{chunk-QBBOYJ3O.cjs → chunk-EAJZX4TX.cjs} +2 -2
- package/dist/{chunk-QBBOYJ3O.cjs.map → chunk-EAJZX4TX.cjs.map} +1 -1
- package/dist/{chunk-UTG3LZO7.cjs → chunk-I4XVTF53.cjs} +2 -2
- package/dist/{chunk-UTG3LZO7.cjs.map → chunk-I4XVTF53.cjs.map} +1 -1
- package/dist/{chunk-SAEKBZGF.cjs → chunk-LCDYXJN6.cjs} +2 -2
- package/dist/{chunk-SAEKBZGF.cjs.map → chunk-LCDYXJN6.cjs.map} +1 -1
- package/dist/{chunk-YGAK4NBT.cjs → chunk-NNMCZMFJ.cjs} +2 -2
- package/dist/{chunk-YGAK4NBT.cjs.map → chunk-NNMCZMFJ.cjs.map} +1 -1
- package/dist/{chunk-NSKHXTBW.cjs → chunk-NW3O6OPS.cjs} +2 -2
- package/dist/{chunk-NSKHXTBW.cjs.map → chunk-NW3O6OPS.cjs.map} +1 -1
- package/dist/{chunk-6W7CSPHF.cjs → chunk-PIJPXUK5.cjs} +3 -3
- package/dist/{chunk-6W7CSPHF.cjs.map → chunk-PIJPXUK5.cjs.map} +1 -1
- package/dist/chunk-Q7I37FJV.cjs +2 -0
- package/dist/chunk-Q7I37FJV.cjs.map +1 -0
- package/dist/chunk-R77JR6OQ.cjs +2919 -0
- package/dist/chunk-R77JR6OQ.cjs.map +1 -0
- package/dist/chunk-UWUR56CO.cjs +2 -0
- package/dist/chunk-UWUR56CO.cjs.map +1 -0
- package/dist/chunk-VIPFYWRR.cjs +2 -0
- package/dist/chunk-VIPFYWRR.cjs.map +1 -0
- package/dist/{chunk-X37NM7FZ.cjs → chunk-VMGXBH6E.cjs} +19 -19
- package/dist/{chunk-X37NM7FZ.cjs.map → chunk-VMGXBH6E.cjs.map} +1 -1
- package/dist/{chunk-N27TJLBG.cjs → chunk-YK5YCHD6.cjs} +2 -2
- package/dist/{chunk-N27TJLBG.cjs.map → chunk-YK5YCHD6.cjs.map} +1 -1
- package/dist/{chunk-TET6I2QK.cjs → chunk-Z2PA2QDO.cjs} +2 -2
- package/dist/{chunk-TET6I2QK.cjs.map → chunk-Z2PA2QDO.cjs.map} +1 -1
- package/dist/{impl-O2M7RVTK.cjs → impl-275S73PA.cjs} +2 -2
- package/dist/{impl-O2M7RVTK.cjs.map → impl-275S73PA.cjs.map} +1 -1
- package/dist/{impl-JG5TMU6H.cjs → impl-2ZHRCHKL.cjs} +2 -2
- package/dist/{impl-JG5TMU6H.cjs.map → impl-2ZHRCHKL.cjs.map} +1 -1
- package/dist/impl-3HQ63RRZ.cjs +2 -0
- package/dist/{impl-YL6NFYNB.cjs.map → impl-3HQ63RRZ.cjs.map} +1 -1
- package/dist/impl-3ON47TMW.cjs +2 -0
- package/dist/{impl-LDTQPPG4.cjs.map → impl-3ON47TMW.cjs.map} +1 -1
- package/dist/{impl-CL6KPZLE.cjs → impl-42EGVNC4.cjs} +3 -3
- package/dist/{impl-CL6KPZLE.cjs.map → impl-42EGVNC4.cjs.map} +1 -1
- package/dist/{impl-ZVQSMGTX.cjs → impl-57FO3PX7.cjs} +5 -5
- package/dist/{impl-ZVQSMGTX.cjs.map → impl-57FO3PX7.cjs.map} +1 -1
- package/dist/{impl-LZV3EW2T.cjs → impl-5RAQCIIT.cjs} +2 -2
- package/dist/{impl-LZV3EW2T.cjs.map → impl-5RAQCIIT.cjs.map} +1 -1
- package/dist/{impl-D5TZDNRA.cjs → impl-6ALA6KXM.cjs} +2 -2
- package/dist/{impl-D5TZDNRA.cjs.map → impl-6ALA6KXM.cjs.map} +1 -1
- package/dist/{impl-OCCJLM7J.cjs → impl-6FMDP53G.cjs} +2 -2
- package/dist/{impl-OCCJLM7J.cjs.map → impl-6FMDP53G.cjs.map} +1 -1
- package/dist/{impl-U2RBVO73.cjs → impl-7L5YW5CG.cjs} +2 -2
- package/dist/{impl-U2RBVO73.cjs.map → impl-7L5YW5CG.cjs.map} +1 -1
- package/dist/{impl-2JBFAO77.cjs → impl-B5GIP2VU.cjs} +2 -2
- package/dist/{impl-2JBFAO77.cjs.map → impl-B5GIP2VU.cjs.map} +1 -1
- package/dist/{impl-QQKQBRMF.cjs → impl-BSJJNNCI.cjs} +2 -2
- package/dist/{impl-QQKQBRMF.cjs.map → impl-BSJJNNCI.cjs.map} +1 -1
- package/dist/{impl-TGEXS5L4.cjs → impl-C7VBO6EQ.cjs} +2 -2
- package/dist/{impl-TGEXS5L4.cjs.map → impl-C7VBO6EQ.cjs.map} +1 -1
- package/dist/impl-CNNTC625.cjs +2 -0
- package/dist/{impl-6KCSMCD6.cjs.map → impl-CNNTC625.cjs.map} +1 -1
- package/dist/{impl-RKBK7S5H.cjs → impl-CSVPLCDE.cjs} +2 -2
- package/dist/{impl-RKBK7S5H.cjs.map → impl-CSVPLCDE.cjs.map} +1 -1
- package/dist/{impl-5VEW37CW.cjs → impl-EKGVCWVS.cjs} +4 -4
- package/dist/{impl-5VEW37CW.cjs.map → impl-EKGVCWVS.cjs.map} +1 -1
- package/dist/impl-EZEGBKX7.cjs +2 -0
- package/dist/{impl-7Q5BF5OS.cjs.map → impl-EZEGBKX7.cjs.map} +1 -1
- package/dist/{impl-6FXWJOHN.cjs → impl-FPDL23FO.cjs} +2 -2
- package/dist/{impl-6FXWJOHN.cjs.map → impl-FPDL23FO.cjs.map} +1 -1
- package/dist/{impl-2ETA24UG.cjs → impl-HI3ZAAZJ.cjs} +6 -6
- package/dist/{impl-2ETA24UG.cjs.map → impl-HI3ZAAZJ.cjs.map} +1 -1
- package/dist/{impl-CD3OZA7N.cjs → impl-HKEYENUI.cjs} +2 -2
- package/dist/{impl-CD3OZA7N.cjs.map → impl-HKEYENUI.cjs.map} +1 -1
- package/dist/impl-IZ6KZF27.cjs +2 -0
- package/dist/{impl-6O3K3VET.cjs.map → impl-IZ6KZF27.cjs.map} +1 -1
- package/dist/{impl-CVJ7ESYK.cjs → impl-JDJ4CQXV.cjs} +2 -2
- package/dist/{impl-CVJ7ESYK.cjs.map → impl-JDJ4CQXV.cjs.map} +1 -1
- package/dist/{impl-6FE4QJ4J.cjs → impl-JKSZNCWA.cjs} +2 -2
- package/dist/{impl-6FE4QJ4J.cjs.map → impl-JKSZNCWA.cjs.map} +1 -1
- package/dist/{impl-XK5I264J.cjs → impl-MEK2DTKK.cjs} +2 -2
- package/dist/{impl-XK5I264J.cjs.map → impl-MEK2DTKK.cjs.map} +1 -1
- package/dist/{impl-KE2PQIWC.cjs → impl-NL7N7YF4.cjs} +2 -2
- package/dist/{impl-KE2PQIWC.cjs.map → impl-NL7N7YF4.cjs.map} +1 -1
- package/dist/{impl-DQN5SZPW.cjs → impl-OBAYFQ23.cjs} +2 -2
- package/dist/{impl-DQN5SZPW.cjs.map → impl-OBAYFQ23.cjs.map} +1 -1
- package/dist/impl-OJ5LZMML.cjs +2 -0
- package/dist/{impl-SPMWIV5D.cjs.map → impl-OJ5LZMML.cjs.map} +1 -1
- package/dist/impl-OKB3HF7H.cjs +2 -0
- package/dist/{impl-7HFUP3UR.cjs.map → impl-OKB3HF7H.cjs.map} +1 -1
- package/dist/{impl-5TMNTUZT.cjs → impl-OOKYV6NI.cjs} +3 -3
- package/dist/{impl-5TMNTUZT.cjs.map → impl-OOKYV6NI.cjs.map} +1 -1
- package/dist/{impl-BMAMFZHY.cjs → impl-P77UGVDU.cjs} +2 -2
- package/dist/{impl-BMAMFZHY.cjs.map → impl-P77UGVDU.cjs.map} +1 -1
- package/dist/impl-PJF6PVKZ.cjs +2 -0
- package/dist/{impl-DDY5QERH.cjs.map → impl-PJF6PVKZ.cjs.map} +1 -1
- package/dist/impl-RTDKCEU3.cjs +2 -0
- package/dist/{impl-IGRDFRB5.cjs.map → impl-RTDKCEU3.cjs.map} +1 -1
- package/dist/impl-S7SJFITO.cjs +2 -0
- package/dist/{impl-KEOLBOR2.cjs.map → impl-S7SJFITO.cjs.map} +1 -1
- package/dist/{impl-24MFRX5R.cjs → impl-U64CK4IR.cjs} +2 -2
- package/dist/{impl-24MFRX5R.cjs.map → impl-U64CK4IR.cjs.map} +1 -1
- package/dist/{impl-IXNKHZFW.cjs → impl-V3TJ5WS2.cjs} +2 -2
- package/dist/{impl-IXNKHZFW.cjs.map → impl-V3TJ5WS2.cjs.map} +1 -1
- package/dist/{impl-3AJDNF67.cjs → impl-VOTMQS3M.cjs} +2 -2
- package/dist/{impl-3AJDNF67.cjs.map → impl-VOTMQS3M.cjs.map} +1 -1
- package/dist/impl-WMJ7ZSI2.cjs +2 -0
- package/dist/{impl-4L4ETIBX.cjs.map → impl-WMJ7ZSI2.cjs.map} +1 -1
- package/dist/impl-Y2UFIDZG.cjs +2 -0
- package/dist/{impl-GNUG6ZDE.cjs.map → impl-Y2UFIDZG.cjs.map} +1 -1
- package/dist/impl-ZP5ZWANC.cjs +2 -0
- package/dist/{impl-QGSF3JGN.cjs.map → impl-ZP5ZWANC.cjs.map} +1 -1
- package/dist/index.cjs +3 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +33442 -6116
- package/package.json +4 -3
- package/dist/chunk-35HDA5WV.cjs +0 -2
- package/dist/chunk-35HDA5WV.cjs.map +0 -1
- package/dist/chunk-AWMP5TQB.cjs +0 -2847
- package/dist/chunk-AWMP5TQB.cjs.map +0 -1
- package/dist/chunk-BY7W4UQF.cjs +0 -2
- package/dist/chunk-BY7W4UQF.cjs.map +0 -1
- package/dist/chunk-ZD2HOHJI.cjs +0 -2
- package/dist/chunk-ZD2HOHJI.cjs.map +0 -1
- package/dist/impl-4L4ETIBX.cjs +0 -2
- package/dist/impl-6KCSMCD6.cjs +0 -2
- package/dist/impl-6O3K3VET.cjs +0 -2
- package/dist/impl-7HFUP3UR.cjs +0 -2
- package/dist/impl-7Q5BF5OS.cjs +0 -2
- package/dist/impl-DDY5QERH.cjs +0 -2
- package/dist/impl-GNUG6ZDE.cjs +0 -2
- package/dist/impl-IGRDFRB5.cjs +0 -2
- package/dist/impl-KEOLBOR2.cjs +0 -2
- package/dist/impl-LDTQPPG4.cjs +0 -2
- package/dist/impl-QGSF3JGN.cjs +0 -2
- package/dist/impl-SPMWIV5D.cjs +0 -2
- package/dist/impl-YL6NFYNB.cjs +0 -2
package/README.md
CHANGED
|
@@ -2150,7 +2150,7 @@ transcend consent upload-preferences \
|
|
|
2150
2150
|
|
|
2151
2151
|
```txt
|
|
2152
2152
|
USAGE
|
|
2153
|
-
transcend inventory pull (--auth value) [--resources all|apiKeys|customFields|templates|dataSilos|enrichers|dataFlows|businessEntities|actions|dataSubjects|identifiers|cookies|consentManager|partitions|prompts|promptPartials|promptGroups|agents|agentFunctions|agentFiles|vendors|dataCategories|processingPurposes|actionItems|actionItemCollections|teams|privacyCenters|policies|messages|assessments|assessmentTemplates|purposes] [--file value] [--transcendUrl value] [--dataSiloIds value]... [--integrationNames value]... [--trackerStatuses LIVE|NEEDS_REVIEW] [--pageSize value] [--skipDatapoints] [--skipSubDatapoints] [--includeGuessedCategories] [--debug]
|
|
2153
|
+
transcend inventory pull (--auth value) [--resources all|apiKeys|customFields|templates|dataSilos|enrichers|dataFlows|businessEntities|processingActivities|actions|dataSubjects|identifiers|cookies|consentManager|partitions|prompts|promptPartials|promptGroups|agents|agentFunctions|agentFiles|vendors|dataCategories|processingPurposes|actionItems|actionItemCollections|teams|privacyCenters|policies|messages|assessments|assessmentTemplates|purposes] [--file value] [--transcendUrl value] [--dataSiloIds value]... [--integrationNames value]... [--trackerStatuses LIVE|NEEDS_REVIEW] [--pageSize value] [--skipDatapoints] [--skipSubDatapoints] [--includeGuessedCategories] [--debug]
|
|
2154
2154
|
transcend inventory pull --help
|
|
2155
2155
|
|
|
2156
2156
|
Generates a transcend.yml by pulling the configuration from your Transcend instance.
|
|
@@ -2164,7 +2164,7 @@ This command can be helpful if you are looking to:
|
|
|
2164
2164
|
|
|
2165
2165
|
FLAGS
|
|
2166
2166
|
--auth The Transcend API key. The scopes required will vary depending on the operation performed. If in doubt, the Full Admin scope will always work.
|
|
2167
|
-
[--resources] The different resource types to pull in. Defaults to dataSilos,enrichers,templates,apiKeys. [all|apiKeys|customFields|templates|dataSilos|enrichers|dataFlows|businessEntities|actions|dataSubjects|identifiers|cookies|consentManager|partitions|prompts|promptPartials|promptGroups|agents|agentFunctions|agentFiles|vendors|dataCategories|processingPurposes|actionItems|actionItemCollections|teams|privacyCenters|policies|messages|assessments|assessmentTemplates|purposes, separator = ,]
|
|
2167
|
+
[--resources] The different resource types to pull in. Defaults to dataSilos,enrichers,templates,apiKeys. [all|apiKeys|customFields|templates|dataSilos|enrichers|dataFlows|businessEntities|processingActivities|actions|dataSubjects|identifiers|cookies|consentManager|partitions|prompts|promptPartials|promptGroups|agents|agentFunctions|agentFiles|vendors|dataCategories|processingPurposes|actionItems|actionItemCollections|teams|privacyCenters|policies|messages|assessments|assessmentTemplates|purposes, separator = ,]
|
|
2168
2168
|
[--file] Path to the YAML file to pull into [default = ./transcend.yml]
|
|
2169
2169
|
[--transcendUrl] URL of the Transcend backend. Use https://api.us.transcend.io for US hosting [default = https://api.transcend.io]
|
|
2170
2170
|
[--dataSiloIds]... The UUIDs of the data silos that should be pulled into the YAML file [separator = ,]
|
|
@@ -2191,6 +2191,7 @@ The API key permissions for this command vary based on the `resources` argument:
|
|
|
2191
2191
|
| enrichers | The Privacy Request enricher configurations. | View Identity Verification Settings | [DSR Automation -> Identifiers](https://app.transcend.io/privacy-requests/identifiers) |
|
|
2192
2192
|
| dataFlows | Consent Manager Data Flow definitions. | View Data Flows | [Consent Management -> Data Flows](https://app.transcend.io/consent-manager/data-flows/approved) |
|
|
2193
2193
|
| businessEntities | The business entities in the data inventory. | View Data Inventory | [Data Inventory -> Business Entities](https://app.transcend.io/data-map/data-inventory/business-entities) |
|
|
2194
|
+
| processingActivities | The processing activities in the data inventory. | View Data Inventory | [Data Inventory -> Processing Activities](https://app.transcend.io/data-map/data-inventory/processing-activities) |
|
|
2194
2195
|
| actions | The Privacy Request action settings. | View Data Subject Request Settings | [DSR Automation -> Request Settings](https://app.transcend.io/privacy-requests/settings) |
|
|
2195
2196
|
| dataSubjects | The Privacy Request data subject settings. | View Data Subject Request Settings | [DSR Automation -> Request Settings](https://app.transcend.io/privacy-requests/settings) |
|
|
2196
2197
|
| identifiers | The Privacy Request identifier configurations. | View Identity Verification Settings | [DSR Automation -> Identifiers](https://app.transcend.io/privacy-requests/identifiers) |
|
|
@@ -2287,6 +2288,12 @@ transcend inventory pull --auth="$TRANSCEND_API_KEY" --resources=customFields
|
|
|
2287
2288
|
transcend inventory pull --auth="$TRANSCEND_API_KEY" --resources=businessEntities
|
|
2288
2289
|
```
|
|
2289
2290
|
|
|
2291
|
+
**Pull processing activities only (see [this example](./examples/processing-activities.yml))**
|
|
2292
|
+
|
|
2293
|
+
```sh
|
|
2294
|
+
transcend inventory pull --auth="$TRANSCEND_API_KEY" --resources=processingActivities
|
|
2295
|
+
```
|
|
2296
|
+
|
|
2290
2297
|
**Pull enrichers and identifiers (see [this example](./examples/enrichers.yml))**
|
|
2291
2298
|
|
|
2292
2299
|
```sh
|
|
@@ -2985,7 +2992,7 @@ transcend inventory consent-managers-to-business-entities \
|
|
|
2985
2992
|
|
|
2986
2993
|
```txt
|
|
2987
2994
|
USAGE
|
|
2988
|
-
transcend admin generate-api-keys (--email value) (--password value) (--apiKeyTitle value) (--file value) (--scopes View Only|Full Admin|Rotate Hosted Sombra keys|Manage Global Attributes|Manage Access Controls|Manage Billing|Manage SSO|Manage API Keys|Manage Organization Information|Manage Email Domains|View Customer Data in Privacy Requests|View Customer Data in Data Mapping|View API Keys|View Audit Events|View SSO|View Scopes|View All Action Items|Manage All Action Items|View Employees|View Email Domains|View Global Attributes|View Legal Hold|Manage Legal Holds|Manage Request Security|Manage Request Compilation|Manage Assigned Privacy Requests|Submit New Data Subject Request|Manage Data Subject Request Settings|Manage Email Templates|Manage Request Identity Verification|Publish Privacy Center|Manage Data Map|Manage Privacy Center Layout|Manage Policies|View Policies|Manage Internationalization Messages|View Internationalization Messages|Request Approval and Communication|View Data Subject Request Settings|View the Request Compilation|View Identity Verification Settings|View Incoming Requests|View Assigned Privacy Requests|View Privacy Center Layout|View Email Templates|Connect Data Silos|Manage Data Inventory|Manage Assigned Data Inventory|Manage Assigned Integrations|View Data Map|View Assigned Integrations|View Assigned Data Inventory|View Data Inventory|Manage Consent Manager|Manage Consent Manager Developer Settings|Manage Consent Manager Display Settings|Deploy Test Consent Manager|Deploy Consent Manager|Manage Assigned Consent Manager|Manage Data Flows|View Data Flows|View Assigned Consent Manager|View Consent Manager|View Assessments|Manage Assessments|View Assigned Assessments|Manage Assigned Assessments|
|
|
2995
|
+
transcend admin generate-api-keys (--email value) (--password value) (--apiKeyTitle value) (--file value) (--scopes View Only|Full Admin|Rotate Hosted Sombra keys|Manage Global Attributes|Manage Access Controls|Manage Billing|Manage SSO|Manage API Keys|Manage Organization Information|Manage Email Domains|Manage Data Sub Categories|View Customer Data in Privacy Requests|View Customer Data in Data Mapping|View API Keys|View Audit Events|View SSO|View Scopes|View All Action Items|Manage All Action Items|View Employees|View Email Domains|View Global Attributes|View Legal Hold|Manage Legal Holds|Manage Request Security|Manage Request Compilation|Manage Assigned Privacy Requests|Submit New Data Subject Request|Manage Data Subject Request Settings|Manage Email Templates|Manage Request Identity Verification|Publish Privacy Center|Manage Data Map|Manage Privacy Center Layout|Manage Policies|View Policies|Manage Internationalization Messages|View Internationalization Messages|Request Approval and Communication|View Data Subject Request Settings|View the Request Compilation|View Identity Verification Settings|View Incoming Requests|View Assigned Privacy Requests|View Privacy Center Layout|View Email Templates|Connect Data Silos|Manage Data Inventory|Manage Assigned Data Inventory|Manage Assigned Integrations|View Data Map|View Assigned Integrations|View Assigned Data Inventory|View Data Inventory|Manage Consent Manager|Manage Consent Manager Developer Settings|Manage Consent Manager Display Settings|Deploy Test Consent Manager|Deploy Consent Manager|Manage Assigned Consent Manager|Manage Data Flows|View Data Flows|View Assigned Consent Manager|View Consent Manager|View Assessments|Manage Assessments|View Assigned Assessments|Manage Assigned Assessments|View Pathfinder|Manage Pathfinder|View Contract Scanning|Manage Contract Scanning|View Prompts|Manage Prompts|View Prompt Runs|Manage Prompt Runs|View Code Scanning|Manage Code Scanning|Execute Prompt|View Auditor Runs|Manage Auditor Runs and Schedules|Execute Auditor|Approve Prompts|Manage Action Item Collections|View Managed Consent Database Admin API|Modify User Stored Preferences|Manage Preference Store Settings|View Preference Store Settings|LLM Log Transfer|Manage Workflows|View Data Sub Categories) [--deleteExistingApiKey] [--createNewApiKey] [--parentOrganizationId value] [--transcendUrl value]
|
|
2989
2996
|
transcend admin generate-api-keys --help
|
|
2990
2997
|
|
|
2991
2998
|
This command allows for creating API keys across multiple Transcend instances. This is useful for customers that are managing many Transcend instances and need to regularly create, cycle or delete API keys across all of their instances.
|
|
@@ -2999,7 +3006,7 @@ FLAGS
|
|
|
2999
3006
|
--password The password for your account login
|
|
3000
3007
|
--apiKeyTitle The title of the API key being generated or destroyed
|
|
3001
3008
|
--file The file where API keys should be written to
|
|
3002
|
-
--scopes The list of scopes that should be given to the API key [View Only|Full Admin|Rotate Hosted Sombra keys|Manage Global Attributes|Manage Access Controls|Manage Billing|Manage SSO|Manage API Keys|Manage Organization Information|Manage Email Domains|View Customer Data in Privacy Requests|View Customer Data in Data Mapping|View API Keys|View Audit Events|View SSO|View Scopes|View All Action Items|Manage All Action Items|View Employees|View Email Domains|View Global Attributes|View Legal Hold|Manage Legal Holds|Manage Request Security|Manage Request Compilation|Manage Assigned Privacy Requests|Submit New Data Subject Request|Manage Data Subject Request Settings|Manage Email Templates|Manage Request Identity Verification|Publish Privacy Center|Manage Data Map|Manage Privacy Center Layout|Manage Policies|View Policies|Manage Internationalization Messages|View Internationalization Messages|Request Approval and Communication|View Data Subject Request Settings|View the Request Compilation|View Identity Verification Settings|View Incoming Requests|View Assigned Privacy Requests|View Privacy Center Layout|View Email Templates|Connect Data Silos|Manage Data Inventory|Manage Assigned Data Inventory|Manage Assigned Integrations|View Data Map|View Assigned Integrations|View Assigned Data Inventory|View Data Inventory|Manage Consent Manager|Manage Consent Manager Developer Settings|Manage Consent Manager Display Settings|Deploy Test Consent Manager|Deploy Consent Manager|Manage Assigned Consent Manager|Manage Data Flows|View Data Flows|View Assigned Consent Manager|View Consent Manager|View Assessments|Manage Assessments|View Assigned Assessments|Manage Assigned Assessments|
|
|
3009
|
+
--scopes The list of scopes that should be given to the API key [View Only|Full Admin|Rotate Hosted Sombra keys|Manage Global Attributes|Manage Access Controls|Manage Billing|Manage SSO|Manage API Keys|Manage Organization Information|Manage Email Domains|Manage Data Sub Categories|View Customer Data in Privacy Requests|View Customer Data in Data Mapping|View API Keys|View Audit Events|View SSO|View Scopes|View All Action Items|Manage All Action Items|View Employees|View Email Domains|View Global Attributes|View Legal Hold|Manage Legal Holds|Manage Request Security|Manage Request Compilation|Manage Assigned Privacy Requests|Submit New Data Subject Request|Manage Data Subject Request Settings|Manage Email Templates|Manage Request Identity Verification|Publish Privacy Center|Manage Data Map|Manage Privacy Center Layout|Manage Policies|View Policies|Manage Internationalization Messages|View Internationalization Messages|Request Approval and Communication|View Data Subject Request Settings|View the Request Compilation|View Identity Verification Settings|View Incoming Requests|View Assigned Privacy Requests|View Privacy Center Layout|View Email Templates|Connect Data Silos|Manage Data Inventory|Manage Assigned Data Inventory|Manage Assigned Integrations|View Data Map|View Assigned Integrations|View Assigned Data Inventory|View Data Inventory|Manage Consent Manager|Manage Consent Manager Developer Settings|Manage Consent Manager Display Settings|Deploy Test Consent Manager|Deploy Consent Manager|Manage Assigned Consent Manager|Manage Data Flows|View Data Flows|View Assigned Consent Manager|View Consent Manager|View Assessments|Manage Assessments|View Assigned Assessments|Manage Assigned Assessments|View Pathfinder|Manage Pathfinder|View Contract Scanning|Manage Contract Scanning|View Prompts|Manage Prompts|View Prompt Runs|Manage Prompt Runs|View Code Scanning|Manage Code Scanning|Execute Prompt|View Auditor Runs|Manage Auditor Runs and Schedules|Execute Auditor|Approve Prompts|Manage Action Item Collections|View Managed Consent Database Admin API|Modify User Stored Preferences|Manage Preference Store Settings|View Preference Store Settings|LLM Log Transfer|Manage Workflows|View Data Sub Categories, separator = ,]
|
|
3003
3010
|
[--deleteExistingApiKey/--noDeleteExistingApiKey] When true, if an API key exists with the specified apiKeyTitle, the existing API key is deleted [default = true]
|
|
3004
3011
|
[--createNewApiKey/--noCreateNewApiKey] When true, new API keys will be created. Set to false if you simply want to delete all API keys with a title [default = true]
|
|
3005
3012
|
[--parentOrganizationId] Filter for only a specific organization by ID, returning all child accounts associated with that organization
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
"use strict"; function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkT462ONFXcjs = require('../chunk-T462ONFX.cjs');var
|
|
2
|
+
"use strict"; function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkT462ONFXcjs = require('../chunk-T462ONFX.cjs');var _chunkVMGXBH6Ecjs = require('../chunk-VMGXBH6E.cjs');require('../chunk-AFYBKLWT.cjs');require('../chunk-VIPFYWRR.cjs');require('../chunk-EAJZX4TX.cjs');require('../chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');async function e(){let o=process.argv.slice(3);_optionalChain([process, 'access', _ => _.env, 'access', _2 => _2.COMP_LINE, 'optionalAccess', _3 => _3.endsWith, 'call', _4 => _4(" ")])&&o.push(""),await _core.proposeCompletions.call(void 0, _chunkVMGXBH6Ecjs.a,o,_chunkT462ONFXcjs.a.call(void 0, process));try{for(let{completion:r}of await _core.proposeCompletions.call(void 0, _chunkVMGXBH6Ecjs.a,o,_chunkT462ONFXcjs.a.call(void 0, process)))process.stdout.write(`${r}
|
|
3
3
|
`)}catch (e2){}}e();
|
|
4
4
|
//# sourceMappingURL=bash-complete.cjs.map
|
package/dist/bin/cli.cjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
"use strict";var _chunkT462ONFXcjs = require('../chunk-T462ONFX.cjs');var
|
|
2
|
+
"use strict";var _chunkT462ONFXcjs = require('../chunk-T462ONFX.cjs');var _chunkVMGXBH6Ecjs = require('../chunk-VMGXBH6E.cjs');require('../chunk-AFYBKLWT.cjs');require('../chunk-VIPFYWRR.cjs');require('../chunk-EAJZX4TX.cjs');require('../chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');async function n(){await _core.run.call(void 0, _chunkVMGXBH6Ecjs.a,process.argv.slice(2),_chunkT462ONFXcjs.a.call(void 0, process))}n();
|
|
3
3
|
//# sourceMappingURL=cli.cjs.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
"use strict"; function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
2
|
+
"use strict"; function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkVMGXBH6Ecjs = require('../chunk-VMGXBH6E.cjs');require('../chunk-AFYBKLWT.cjs');var _chunkZUNVPK23cjs = require('../chunk-ZUNVPK23.cjs');require('../chunk-VIPFYWRR.cjs');require('../chunk-EAJZX4TX.cjs');require('../chunk-Q7I37FJV.cjs');var _core = require('@stricli/core');var o={"tr-build-xdi-sync-endpoint":["consent","build-xdi-sync-endpoint"],"tr-consent-manager-service-json-to-yml":["inventory","consent-manager-service-json-to-yml"],"tr-consent-managers-to-business-entities":["inventory","consent-managers-to-business-entities"],"tr-cron-mark-identifiers-completed":["request","cron","mark-identifiers-completed"],"tr-cron-pull-identifiers":["request","cron","pull-identifiers"],"tr-derive-data-silos-from-data-flows":["inventory","derive-data-silos-from-data-flows"],"tr-derive-data-silos-from-data-flows-cross-instance":["inventory","derive-data-silos-from-data-flows-cross-instance"],"tr-discover-silos":["inventory","discover-silos"],"tr-generate-api-keys":["admin","generate-api-keys"],"tr-manual-enrichment-pull-identifiers":["request","preflight","pull-identifiers"],"tr-manual-enrichment-push-identifiers":["request","preflight","push-identifiers"],"tr-mark-request-data-silos-completed":["request","system","mark-request-data-silos-completed"],"tr-pull":["inventory","pull"],"tr-pull-consent-metrics":["consent","pull-consent-metrics"],"tr-pull-consent-preferences":["consent","pull-consent-preferences"],"tr-pull-datapoints":["inventory","pull-datapoints"],"tr-pull-pull-unstructured-discovery-files":["inventory","pull-unstructured-discovery-files"],"tr-push":["inventory","push"],"tr-request-approve":["request","approve"],"tr-request-cancel":["request","cancel"],"tr-request-download-files":["request","download-files"],"tr-request-enricher-restart":["request","enricher-restart"],"tr-request-export":["request","export"],"tr-request-mark-silent":["request","mark-silent"],"tr-request-notify-additional-time":["request","notify-additional-time"],"tr-request-reject-unverified-identifiers":["request","reject-unverified-identifiers"],"tr-request-restart":["request","restart"],"tr-request-upload":["request","upload"],"tr-retry-request-data-silos":["request","system","retry-request-data-silos"],"tr-scan-packages":["inventory","scan-packages"],"tr-skip-preflight-jobs":["request","skip-preflight-jobs"],"tr-skip-request-data-silos":["request","system","skip-request-data-silos"],"tr-sync-ot":["migration","sync-ot"],"tr-update-consent-manager":["consent","update-consent-manager"],"tr-upload-consent-preferences":["consent","upload-consent-preferences"],"tr-upload-cookies-from-csv":["consent","upload-cookies-from-csv"],"tr-upload-data-flows-from-csv":["consent","upload-data-flows-from-csv"],"tr-upload-preferences":["consent","upload-preferences"]};function p(t){return _optionalChain([_core.generateHelpTextForAllCommands.call(void 0, _chunkVMGXBH6Ecjs.a), 'access', _ => _.find, 'call', _2 => _2(s=>s[0]===`${_chunkVMGXBH6Ecjs.a.config.name} ${t.join(" ")}`), 'optionalAccess', _3 => _3[1]])}function i(t){_chunkZUNVPK23cjs.a.log("[DEPRECATION NOTICE]");let e=o[t];if(!e){let a=Object.entries(o).map(([l,d])=>`\`${l}\` -> \`${_chunkVMGXBH6Ecjs.a.config.name} ${d.join(" ")}\``).join(`
|
|
3
3
|
`);_chunkZUNVPK23cjs.a.log(`This command is deprecated as of v7.0.0. Here is a list of new commands, mapped to their legacy command names:
|
|
4
4
|
${a}`);return}_chunkZUNVPK23cjs.a.log(`\`${t}\` is deprecated as of v7.0.0.
|
|
5
|
-
Use \`${
|
|
5
|
+
Use \`${_chunkVMGXBH6Ecjs.a.config.name} ${e.join(" ")}\` instead.
|
|
6
6
|
`);let s=p(e);if(!s)throw new Error(`Failed to get help text for command: \`${e.join(" ")}\``);_chunkZUNVPK23cjs.a.log(s)}function m(){let e=_optionalChain([process, 'access', _4 => _4.argv, 'access', _5 => _5.at, 'call', _6 => _6(-1), 'optionalAccess', _7 => _7.split, 'call', _8 => _8("/"), 'access', _9 => _9.pop, 'call', _10 => _10(), 'optionalAccess', _11 => _11.trim, 'call', _12 => _12()]);if(e)i(e);else throw new Error("Deprecated command");process.exit(1)}m();
|
|
7
7
|
//# sourceMappingURL=deprecated-command.cjs.map
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkUWUR56COcjs = require('./chunk-UWUR56CO.cjs');var _typeutils = require('@transcend-io/type-utils');var _jsyaml = require('js-yaml'); var _jsyaml2 = _interopRequireDefault(_jsyaml);var _fs = require('fs');var s=/<<parameters\.(.+?)>>/,i= exports.b ="parameters";function u(e,t,a=""){let r=e;if(Object.entries(t).forEach(([n,p])=>{r=r.split(`<<${i}.${n}>>`).join(p)}),s.test(r)){let[,n]=s.exec(r)||[];throw new Error(`Found variable that was not set: ${n}.
|
|
2
2
|
Make sure you are passing all parameters through the --${i}=${n}:value-for-param flag.
|
|
3
|
-
${a}`)}return r}function b(e,t={}){let a=_fs.readFileSync.call(void 0, e,"utf-8"),r=u(a,t,`Also check that there are no extra variables defined in your yaml: ${e}`);return _typeutils.decodeCodec.call(void 0,
|
|
4
|
-
//# sourceMappingURL=chunk-
|
|
3
|
+
${a}`)}return r}function b(e,t={}){let a=_fs.readFileSync.call(void 0, e,"utf-8"),r=u(a,t,`Also check that there are no extra variables defined in your yaml: ${e}`);return _typeutils.decodeCodec.call(void 0, _chunkUWUR56COcjs.ga,_jsyaml2.default.load(r))}function h(e,t){_fs.writeFileSync.call(void 0, e,_jsyaml2.default.dump(_typeutils.decodeCodec.call(void 0, _chunkUWUR56COcjs.ga,t)))}exports.a = s; exports.b = i; exports.c = u; exports.d = b; exports.e = h;
|
|
4
|
+
//# sourceMappingURL=chunk-2FBBA65B.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-2FBBA65B.cjs","../src/lib/readTranscendYaml.ts"],"names":["VARIABLE_PARAMETERS_REGEXP","VARIABLE_PARAMETERS_NAME","replaceVariablesInYaml","input","variables","extraErrorMessage","contents","name","value"],"mappings":"AAAA,iOAAyC,qDCAA,iFACxB,wBAC2B,IAG/BA,CAAAA,CAA6B,uBAAA,CAC7BC,CAAAA,aAA2B,YAAA,CAWjC,SAASC,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CAAoB,EAAA,CACZ,CACR,IAAIC,CAAAA,CAAWH,CAAAA,CASf,EAAA,CAPA,MAAA,CAAO,OAAA,CAAQC,CAAS,CAAA,CAAE,OAAA,CAAQ,CAAC,CAACG,CAAAA,CAAMC,CAAK,CAAA,CAAA,EAAM,CACnDF,CAAAA,CAAWA,CAAAA,CACR,KAAA,CAAM,CAAA,EAAA,EAAKL,CAAwB,CAAA,CAAA,EAAIM,CAAI,CAAA,EAAA,CAAI,CAAA,CAC/C,IAAA,CAAKC,CAAK,CACf,CAAC,CAAA,CAGGR,CAAAA,CAA2B,IAAA,CAAKM,CAAQ,CAAA,CAAG,CAC7C,GAAM,CAAC,CAAEC,CAAI,CAAA,CAAIP,CAAAA,CAA2B,IAAA,CAAKM,CAAQ,CAAA,EAAK,CAAC,CAAA,CAC/D,MAAM,IAAI,KAAA,CACR,CAAA,iCAAA,EAAoCC,CAAI,CAAA;AAAA,uDAAA,EACWN,CAAwB,CAAA,CAAA,EAAIM,CAAI,CAAA;AAAA,EACvFF,CAAiB,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-2FBBA65B.cjs","sourcesContent":[null,"import { decodeCodec, ObjByString } from '@transcend-io/type-utils';\nimport yaml from 'js-yaml';\nimport { readFileSync, writeFileSync } from 'fs';\nimport { TranscendInput } from '../codecs';\n\nexport const VARIABLE_PARAMETERS_REGEXP = /<<parameters\\.(.+?)>>/;\nexport const VARIABLE_PARAMETERS_NAME = 'parameters';\n\n/**\n * Function that replaces variables in a text file.\n * Throws error if there are variables that have not been replaced\n *\n * @param input - Input text\n * @param variables - Variables to replace\n * @param extraErrorMessage - Additional error message text\n * @returns Output text\n */\nexport function replaceVariablesInYaml(\n input: string,\n variables: ObjByString,\n extraErrorMessage = '',\n): string {\n let contents = input;\n // Replace variables\n Object.entries(variables).forEach(([name, value]) => {\n contents = contents\n .split(`<<${VARIABLE_PARAMETERS_NAME}.${name}>>`)\n .join(value);\n });\n\n // Throw error if unfilled variables\n if (VARIABLE_PARAMETERS_REGEXP.test(contents)) {\n const [, name] = VARIABLE_PARAMETERS_REGEXP.exec(contents) || [];\n throw new Error(\n `Found variable that was not set: ${name}.\nMake sure you are passing all parameters through the --${VARIABLE_PARAMETERS_NAME}=${name}:value-for-param flag.\n${extraErrorMessage}`,\n );\n }\n\n return contents;\n}\n\n/**\n * Read in the contents of a yaml file and validate that the shape\n * of the yaml file matches the codec API\n *\n * @param filePath - Path to yaml file\n * @param variables - Variables to fill in\n * @returns The contents of the yaml file, type-checked\n */\nexport function readTranscendYaml(\n filePath: string,\n variables: ObjByString = {},\n): TranscendInput {\n // Read in contents\n const fileContents = readFileSync(filePath, 'utf-8');\n\n // Replace variables\n const replacedVariables = replaceVariablesInYaml(\n fileContents,\n variables,\n `Also check that there are no extra variables defined in your yaml: ${filePath}`,\n );\n\n // Validate shape\n return decodeCodec(TranscendInput, yaml.load(replacedVariables));\n}\n\n/**\n * Write a Transcend configuration to disk\n *\n * @param filePath - Path to yaml file\n * @param input - The input to write out\n */\nexport function writeTranscendYaml(\n filePath: string,\n input: TranscendInput,\n): void {\n writeFileSync(filePath, yaml.dump(decodeCodec(TranscendInput, input)));\n}\n"]}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkZVK4HIDFcjs = require('./chunk-ZVK4HIDF.cjs');var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _chunkZVK4HIDFcjs = require('./chunk-ZVK4HIDF.cjs');var _chunkNW3O6OPScjs = require('./chunk-NW3O6OPS.cjs');var _chunkR77JR6OQcjs = require('./chunk-R77JR6OQ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkVIPFYWRRcjs = require('./chunk-VIPFYWRR.cjs');var _chunkEAJZX4TXcjs = require('./chunk-EAJZX4TX.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function re({file:u,auth:s,sombraAuth:g,requestActions:f=[],concurrency:p=100,transcendUrl:n=_chunkVIPFYWRRcjs.e}){let o=_chunkR77JR6OQcjs.Wd.call(void 0, n,s),e=await _chunkR77JR6OQcjs.Xd.call(void 0, n,s,g);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Pulling manual enrichment requests, filtered for actions: ${f.join(",")}`));let d=await _chunkR77JR6OQcjs.ne.call(void 0, o,{actions:f,statuses:[_privacytypes.RequestStatus.Enriching]}),i=[];await _chunkR77JR6OQcjs.b.call(void 0, d,async t=>{let c=await _chunkR77JR6OQcjs.ie.call(void 0, o,{requestId:t.id});if(c.filter(({status:m})=>m==="ACTION_REQUIRED")){let m=await _chunkR77JR6OQcjs.le.call(void 0, o,e,{requestId:t.id});i.push({...t,requestIdentifiers:m,requestEnrichers:c})}},{concurrency:p});let l=i.map(({attributeValues:t,requestIdentifiers:c,requestEnrichers:w,...m})=>({...m,...Object.entries(_chunkEAJZX4TXcjs.d.call(void 0, c,"name")).reduce((P,[$,C])=>Object.assign(P,{[$]:C.map(({value:A})=>A).join(",")}),{}),...Object.entries(_chunkEAJZX4TXcjs.d.call(void 0, t,"attributeKey.name")).reduce((P,[$,C])=>Object.assign(P,{[$]:C.map(({name:A})=>A).join(",")}),{})})),a=_chunkEAJZX4TXcjs.j.call(void 0, l.map(t=>Object.keys(t)).flat());return _chunkZVK4HIDFcjs.c.call(void 0, u,l,a),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully wrote ${i.length} requests to file "${u}"`)),i}var _iots = require('io-ts'); var R = _interopRequireWildcard(_iots);var j="https://app.transcend.io/privacy-requests/incoming-requests/",M= exports.b =R.record(R.string,R.string);async function N(u,{id:s,...g},f,p){if(!s){let e=`Request ID must be provided to enricher request.${p?` Found error in row: ${p}`:""}`;throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(e)),new Error(e)}let n=s.toLowerCase(),o=Object.entries(g).reduce((e,[d,i])=>_chunkEAJZX4TXcjs.j.call(void 0, _chunkNW3O6OPScjs.n.call(void 0, i)).length===0?e:Object.assign(e,{[d]:_chunkEAJZX4TXcjs.j.call(void 0, _chunkNW3O6OPScjs.n.call(void 0, i)).map(a=>({value:d==="email"?a.toLowerCase():a}))}),{});try{return await u.post("v1/enrich-identifiers",{headers:{"x-transcend-request-id":n,"x-transcend-enricher-id":f},json:{enrichedIdentifiers:o}}).json(),_chunkZUNVPK23cjs.a.error(_colors2.default.green(`Successfully enriched request: ${j}${n}`)),!0}catch(e){if(typeof e.response.body=="string"&&e.response.body.includes("Cannot update a resolved RequestEnricher"))return _chunkZUNVPK23cjs.a.warn(_colors2.default.magenta(`Skipped enrichment for request: ${j}${n}, request is no longer in the enriching phase.`)),!1;throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`Failed to enricher identifiers for request with id: ${j}${n} - ${e.message} - ${e.response.body}`)),e}}async function he({file:u,auth:s,sombraAuth:g,enricherId:f,markSilent:p,concurrency:n=100,transcendUrl:o=_chunkVIPFYWRRcjs.e}){let e=await _chunkR77JR6OQcjs.Xd.call(void 0, o,s,g),d=_chunkR77JR6OQcjs.Wd.call(void 0, o,s);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Reading "${u}" from disk`));let i=_chunkNW3O6OPScjs.q.call(void 0, u,M);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Enriching "${i.length}" privacy requests.`));let l=0,a=0,t=0;if(await _chunkR77JR6OQcjs.b.call(void 0, i,async(c,w)=>{try{p&&(await _chunkR77JR6OQcjs.cc.call(void 0, d,_chunkR77JR6OQcjs._,{input:{id:c.id,isSilent:!0}}),_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Mark request as silent mode - ${c.id}`))),await N(e,c,f,w)?l+=1:a+=1}catch (e2){t+=1}},{concurrency:n}),_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully notified Transcend!
|
|
2
2
|
Success count: ${l}.`)),a>0&&_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Skipped count: ${a}.`)),t>0)throw _chunkZUNVPK23cjs.a.info(_colors2.default.red(`Error Count: ${t}.`)),new Error(`Failed to enrich: ${t} requests.`);return i.length}exports.a = re; exports.b = M; exports.c = N; exports.d = he;
|
|
3
|
-
//# sourceMappingURL=chunk-
|
|
3
|
+
//# sourceMappingURL=chunk-327HQXYV.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-327HQXYV.cjs","../src/lib/manual-enrichment/pullManualEnrichmentIdentifiersToCsv.ts","../src/lib/manual-enrichment/pushManualEnrichmentIdentifiersFromCsv.ts"],"names":["pullManualEnrichmentIdentifiersToCsv","file","auth","sombraAuth","requestActions","concurrency","transcendUrl","DEFAULT_TRANSCEND_API","client","buildTranscendGraphQLClient","sombra","createSombraGotInstance","logger","colors"],"mappings":"AAAA,mfAAwC,wDAAgD,wDAAgG,wDAAyC,wDAAyC,wDAAgD,2DCA7Q,gFAE1B,MA6BnB,SAAsBA,EAAAA,CAAqC,CACzD,IAAA,CAAAC,CAAAA,CACA,IAAA,CAAAC,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,cAAA,CAAAC,CAAAA,CAAiB,CAAC,CAAA,CAClB,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAa6C,CAE3C,IAAMC,CAAAA,CAASC,kCAAAA,CAA4BH,CAAcJ,CAAI,CAAA,CACvDQ,CAAAA,CAAS,MAAMC,kCAAAA,CAAwBL,CAAcJ,CAAAA,CAAMC,CAAU,CAAA,CAE3ES,mBAAAA,CAAO,IAAA,CACLC,gBAAAA,CAAO,OAAA,CACL,CAAA,0DAAA,EAA6DT,CAAAA,CAAe,IAAA,CAC1E,GACF,CAAC,CAAA,CAAA;ACuDP,gBAAA","file":"/home/runner/work/cli/cli/dist/chunk-327HQXYV.cjs","sourcesContent":[null,"import { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { map } from '../bluebird-replace';\nimport colors from 'colors';\nimport { groupBy, uniq } from 'lodash-es';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\nimport { writeCsv } from '../cron/writeCsv';\nimport {\n PrivacyRequest,\n RequestEnricher,\n RequestIdentifier,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllRequestEnrichers,\n fetchAllRequestIdentifiers,\n fetchAllRequests,\n} from '../graphql';\nimport { logger } from '../../logger';\n\nexport interface PrivacyRequestWithIdentifiers extends PrivacyRequest {\n /** Request Enrichers */\n requestEnrichers: RequestEnricher[];\n /** Request Identifiers */\n requestIdentifiers: RequestIdentifier[];\n}\n\n/**\n * Pull the set of manual enrichment jobs to CSV\n *\n * @param options - Options\n * @returns List of requests with identifiers\n */\nexport async function pullManualEnrichmentIdentifiersToCsv({\n file,\n auth,\n sombraAuth,\n requestActions = [],\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** Sombra API key */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** The request actions to fetch */\n requestActions?: RequestAction[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n}): Promise<PrivacyRequestWithIdentifiers[]> {\n // Find all requests made before createdAt that are in a removing data state\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n\n logger.info(\n colors.magenta(\n `Pulling manual enrichment requests, filtered for actions: ${requestActions.join(\n ',',\n )}`,\n ),\n );\n\n // Pull all privacy requests\n const allRequests = await fetchAllRequests(client, {\n actions: requestActions,\n statuses: [RequestStatus.Enriching],\n });\n\n // Requests to save\n const savedRequests: PrivacyRequestWithIdentifiers[] = [];\n\n // Filter down requests to what is needed\n await map(\n allRequests,\n async (request) => {\n // Fetch enrichers\n const requestEnrichers = await fetchAllRequestEnrichers(client, {\n requestId: request.id,\n });\n\n // Check if manual enrichment exists for that request\n const hasManualEnrichment = requestEnrichers.filter(\n ({ status }) => status === 'ACTION_REQUIRED',\n );\n\n // Save request to queue\n if (hasManualEnrichment) {\n const requestIdentifiers = await fetchAllRequestIdentifiers(\n client,\n sombra,\n {\n requestId: request.id,\n },\n );\n savedRequests.push({\n ...request,\n requestIdentifiers,\n requestEnrichers,\n });\n }\n },\n {\n concurrency,\n },\n );\n\n const data = savedRequests.map(\n ({\n attributeValues,\n requestIdentifiers,\n requestEnrichers, // eslint-disable-line @typescript-eslint/no-unused-vars\n ...request\n }) => ({\n ...request,\n // flatten identifiers\n ...Object.entries(groupBy(requestIdentifiers, 'name')).reduce(\n (acc, [key, values]) =>\n Object.assign(acc, {\n [key]: values.map(({ value }) => value).join(','),\n }),\n {},\n ),\n // flatten attributes\n ...Object.entries(groupBy(attributeValues, 'attributeKey.name')).reduce(\n (acc, [key, values]) =>\n Object.assign(acc, {\n [key]: values.map(({ name }) => name).join(','),\n }),\n {},\n ),\n }),\n );\n\n // Write out to CSV\n const headers = uniq(data.map((d) => Object.keys(d)).flat());\n writeCsv(file, data, headers);\n\n logger.info(\n colors.green(\n `Successfully wrote ${savedRequests.length} requests to file \"${file}\"`,\n ),\n );\n\n return savedRequests;\n}\n","import colors from 'colors';\nimport { map } from '../bluebird-replace';\nimport { logger } from '../../logger';\nimport {\n UPDATE_PRIVACY_REQUEST,\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n makeGraphQLRequest,\n} from '../graphql';\nimport {\n enrichPrivacyRequest,\n EnrichPrivacyRequest,\n} from './enrichPrivacyRequest';\nimport { readCsv } from '../requests';\nimport { DEFAULT_TRANSCEND_API } from '../../constants';\n\n/**\n * Push a CSV of enriched requests back into Transcend\n *\n * @param options - Options\n * @returns Number of items processed\n */\nexport async function pushManualEnrichmentIdentifiersFromCsv({\n file,\n auth,\n sombraAuth,\n enricherId,\n markSilent,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_API,\n}: {\n /** CSV file path */\n file: string;\n /** Transcend API key authentication */\n auth: string;\n /** ID of enricher being uploaded to */\n enricherId: string;\n /** Sombra API key authentication */\n sombraAuth?: string;\n /** Concurrency */\n concurrency?: number;\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Mark requests in silent mode before enriching */\n markSilent?: boolean;\n}): Promise<number> {\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Read from CSV\n logger.info(colors.magenta(`Reading \"${file}\" from disk`));\n const activeResults = readCsv(file, EnrichPrivacyRequest);\n\n // Notify Transcend\n logger.info(\n colors.magenta(`Enriching \"${activeResults.length}\" privacy requests.`),\n );\n\n let successCount = 0;\n let skippedCount = 0;\n let errorCount = 0;\n\n await map(\n activeResults,\n async (request, index) => {\n try {\n // Mark requests in silent mode before a certain date\n if (markSilent) {\n await makeGraphQLRequest(client, UPDATE_PRIVACY_REQUEST, {\n input: {\n id: request.id,\n isSilent: true,\n },\n });\n\n logger.info(\n colors.magenta(`Mark request as silent mode - ${request.id}`),\n );\n }\n\n const result = await enrichPrivacyRequest(\n sombra,\n request,\n enricherId,\n index,\n );\n if (result) {\n successCount += 1;\n } else {\n skippedCount += 1;\n }\n } catch (err) {\n errorCount += 1;\n }\n },\n { concurrency },\n );\n\n logger.info(\n colors.green(\n `Successfully notified Transcend! \\n Success count: ${successCount}.`,\n ),\n );\n\n if (skippedCount > 0) {\n logger.info(colors.magenta(`Skipped count: ${skippedCount}.`));\n }\n\n if (errorCount > 0) {\n logger.info(colors.red(`Error Count: ${errorCount}.`));\n throw new Error(`Failed to enrich: ${errorCount} requests.`);\n }\n\n return activeResults.length;\n}\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var
|
|
2
|
-
//# sourceMappingURL=chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkEAJZX4TXcjs = require('./chunk-EAJZX4TX.cjs');function f(p,{adTechPurposes:m=["SaleOfInfo"],serviceToTitle:s,serviceToSupportedIntegration:r}){let e=[],i=[],n={};p.forEach(t=>{let{service:a,attributes:c=[]}=t;if(!a||a==="internalService")return;let u=c.find(o=>o.key==="Found on Domain");u&&(n[a]||(n[a]=[]),n[a].push(...u.values.map(o=>o.replace("https://","").replace("http://",""))),n[a]=[...new Set(n[a])]),_chunkEAJZX4TXcjs.i.call(void 0, t.trackingPurposes,m).length>0?(i.push(a),e.includes(a)&&(e=e.filter(o=>o!==a))):i.includes(a)||e.push(a)});let h=[...new Set(i)].map(t=>({title:s[t],...r[t]?{integrationName:t}:{integrationName:"promptAPerson","outer-type":t},attributes:[{key:"Tech Type",values:["Ad Tech"]},{key:"Found On Domain",values:n[t]||[]}]}));return{siteTechDataSilos:[...new Set(e)].map(t=>({title:s[t],...r[t]?{integrationName:t}:{integrationName:"promptAPerson",outerType:t},attributes:[{key:"Tech Type",values:["Site Tech"]},{key:"Found On Domain",values:n[t]||[]}]})),adTechDataSilos:h}}exports.a = f;
|
|
2
|
+
//# sourceMappingURL=chunk-3G77IGKQ.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-3G77IGKQ.cjs","../src/lib/consent-manager/dataFlowsToDataSilos.ts"],"names":["dataFlowsToDataSilos","inputs","adTechPurposes","serviceToTitle","serviceToSupportedIntegration","siteTechIntegrations","adTechIntegrations","serviceToFoundOnDomain","flow","service","attributes","foundOnDomain","attr","v","union_default","s","adTechDataSilos"],"mappings":"AAAA,iIAAwC,SCWxBA,CAAAA,CACdC,CAAAA,CACA,CACE,cAAA,CAAAC,CAAAA,CAAiB,CAAC,YAAY,CAAA,CAC9B,cAAA,CAAAC,CAAAA,CACA,6BAAA,CAAAC,CACF,CAAA,CASA,CAEA,IAAIC,CAAAA,CAAiC,CAAC,CAAA,CAGhCC,CAAAA,CAA+B,CAAC,CAAA,CAGhCC,CAAAA,CAAsD,CAAC,CAAA,CAG7DN,CAAAA,CAAO,OAAA,CAASO,CAAAA,EAAS,CAEvB,GAAM,CAAE,OAAA,CAAAC,CAAAA,CAAS,UAAA,CAAAC,CAAAA,CAAa,CAAC,CAAE,CAAA,CAAIF,CAAAA,CACrC,EAAA,CAAI,CAACC,CAAAA,EAAWA,CAAAA,GAAY,iBAAA,CAC1B,MAAA,CAIF,IAAME,CAAAA,CAAgBD,CAAAA,CAAW,IAAA,CAC9BE,CAAAA,EAASA,CAAAA,CAAK,GAAA,GAAQ,iBACzB,CAAA,CAGID,CAAAA,EAAAA,CACGJ,CAAAA,CAAuBE,CAAO,CAAA,EAAA,CACjCF,CAAAA,CAAuBE,CAAO,CAAA,CAAI,CAAC,CAAA,CAAA,CAErCF,CAAAA,CAAuBE,CAAO,CAAA,CAAG,IAAA,CAC/B,GAAGE,CAAAA,CAAc,MAAA,CAAO,GAAA,CAAKE,CAAAA,EAC3BA,CAAAA,CAAE,OAAA,CAAQ,UAAA,CAAY,EAAE,CAAA,CAAE,OAAA,CAAQ,SAAA,CAAW,EAAE,CACjD,CACF,CAAA,CACAN,CAAAA,CAAuBE,CAAO,CAAA,CAAI,CAChC,GAAG,IAAI,GAAA,CAAIF,CAAAA,CAAuBE,CAAO,CAAC,CAC5C,CAAA,CAAA,CAIEK,iCAAAA,CAAMN,CAAK,gBAAA,CAAkBN,CAAc,CAAA,CAAE,MAAA,CAAS,CAAA,CAAA,CAExDI,CAAAA,CAAmB,IAAA,CAAKG,CAAO,CAAA,CAG3BJ,CAAAA,CAAqB,QAAA,CAASI,CAAO,CAAA,EAAA,CACvCJ,CAAAA,CAAuBA,CAAAA,CAAqB,MAAA,CACzCU,CAAAA,EAAMA,CAAAA,GAAMN,CACf,CAAA,CAAA,CAAA,CAEQH,CAAAA,CAAmB,QAAA,CAASG,CAAO,CAAA,EAE7CJ,CAAAA,CAAqB,IAAA,CAAKI,CAAO,CAErC,CAAC,CAAA,CAGD,IAAMO,CAAAA,CAAkB,CAAC,GAAG,IAAI,GAAA,CAAIV,CAAkB,CAAC,CAAA,CAAE,GAAA,CAAKG,CAAAA,EAAAA,CAAa,CACzE,KAAA,CAAON,CAAAA,CAAeM,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,YAAA,CAAcA,CAAQ,CAAA,CAC9D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,SAAS,CACpB,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQF,CAAAA,CAAuBE,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CAAE,CAAA,CAsBF,MAAO,CACL,iBAAA,CApBwB,CAAC,GAAG,IAAI,GAAA,CAAIJ,CAAoB,CAAC,CAAA,CAAE,GAAA,CAC1DI,CAAAA,EAAAA,CAAa,CACZ,KAAA,CAAON,CAAAA,CAAeM,CAAO,CAAA,CAC7B,GAAIL,CAAAA,CAA8BK,CAAO,CAAA,CACrC,CAAE,eAAA,CAAiBA,CAAQ,CAAA,CAC3B,CAAE,eAAA,CAAiB,eAAA,CAAiB,SAAA,CAAWA,CAAQ,CAAA,CAC3D,UAAA,CAAY,CACV,CACE,GAAA,CAAK,WAAA,CACL,MAAA,CAAQ,CAAC,WAAW,CACtB,CAAA,CACA,CACE,GAAA,CAAK,iBAAA,CACL,MAAA,CAAQF,CAAAA,CAAuBE,CAAO,CAAA,EAAK,CAAC,CAC9C,CACF,CACF,CAAA,CACF,CAAA,CAIE,eAAA,CAAAO,CACF,CACF,CAAA,cAAA","file":"/home/runner/work/cli/cli/dist/chunk-3G77IGKQ.cjs","sourcesContent":[null,"import { DataFlowInput, DataSiloInput } from '../../codecs';\nimport { union } from 'lodash-es';\nimport { IndexedCatalogs } from '../graphql';\n\n/**\n * Convert data flow configurations into a set of data silo configurations\n *\n * @param inputs - Data flow input to convert to data silos\n * @param options - Additional options\n * @returns Business entity configuration input\n */\nexport function dataFlowsToDataSilos(\n inputs: DataFlowInput[],\n {\n adTechPurposes = ['SaleOfInfo'],\n serviceToTitle,\n serviceToSupportedIntegration,\n }: IndexedCatalogs & {\n /** List of purposes that are considered \"Ad Tech\" */\n adTechPurposes?: string[];\n },\n): {\n /** List of data silo configurations for site-tech services */\n siteTechDataSilos: DataSiloInput[];\n /** List of data silo configurations for ad-tech services */\n adTechDataSilos: DataSiloInput[];\n} {\n // List of site tech integrations\n let siteTechIntegrations: string[] = [];\n\n // List of ad tech integrations\n const adTechIntegrations: string[] = [];\n\n // Mapping from service name to list of\n const serviceToFoundOnDomain: { [k in string]: string[] } = {};\n\n // iterate over each flow\n inputs.forEach((flow) => {\n // process data flows with services\n const { service, attributes = [] } = flow;\n if (!service || service === 'internalService') {\n return;\n }\n\n // create mapping to found on domain\n const foundOnDomain = attributes.find(\n (attr) => attr.key === 'Found on Domain',\n );\n\n // Create a list of all domains where the data flow was found\n if (foundOnDomain) {\n if (!serviceToFoundOnDomain[service]) {\n serviceToFoundOnDomain[service] = [];\n }\n serviceToFoundOnDomain[service]!.push(\n ...foundOnDomain.values.map((v) =>\n v.replace('https://', '').replace('http://', ''),\n ),\n );\n serviceToFoundOnDomain[service] = [\n ...new Set(serviceToFoundOnDomain[service]),\n ];\n }\n\n // Keep track of ad tech\n if (union(flow.trackingPurposes, adTechPurposes).length > 0) {\n // add service to ad tech list\n adTechIntegrations.push(service);\n\n // remove from site tech list\n if (siteTechIntegrations.includes(service)) {\n siteTechIntegrations = siteTechIntegrations.filter(\n (s) => s !== service,\n );\n }\n } else if (!adTechIntegrations.includes(service)) {\n // add to site tech list\n siteTechIntegrations.push(service);\n }\n });\n\n // create the list of ad tech integrations\n const adTechDataSilos = [...new Set(adTechIntegrations)].map((service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', 'outer-type': service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Ad Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }));\n\n // create the list of site tech integrations\n const siteTechDataSilos = [...new Set(siteTechIntegrations)].map(\n (service) => ({\n title: serviceToTitle[service],\n ...(serviceToSupportedIntegration[service]\n ? { integrationName: service }\n : { integrationName: 'promptAPerson', outerType: service }),\n attributes: [\n {\n key: 'Tech Type',\n values: ['Site Tech'],\n },\n {\n key: 'Found On Domain',\n values: serviceToFoundOnDomain[service] || [],\n },\n ],\n }),\n );\n\n return {\n siteTechDataSilos,\n adTechDataSilos,\n };\n}\n"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkR77JR6OQcjs = require('./chunk-R77JR6OQ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkEAJZX4TXcjs = require('./chunk-EAJZX4TX.cjs');var _privacytypes = require('@transcend-io/privacy-types');var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _graphqlrequest = require('graphql-request');var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);async function q(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=[],m=new Date().getTime(),d=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),s={...c.length>0?{category:c}:{},...t.length>0?{subCategoryIds:t}:{},...c.length+t.length>0&&!l?{status:_privacytypes.SubDataPointDataSubCategoryGuessStatus.Approved}:{},...e.length>0?{dataSilos:e}:{}},{subDataPoints:{totalCount:o}}=await _chunkR77JR6OQcjs.cc.call(void 0, u,_chunkR77JR6OQcjs.f,{filterBy:s});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),d.start(o,0);let y=0,D=!1,r,b=0;do try{let{subDataPoints:{nodes:P}}=await _chunkR77JR6OQcjs.cc.call(void 0, u,_graphqlrequest.gql`
|
|
2
2
|
query TranscendCliSubDataPointCsvExport(
|
|
3
3
|
$filterBy: SubDataPointFiltersInput
|
|
4
4
|
$first: Int!
|
|
@@ -41,7 +41,7 @@
|
|
|
41
41
|
}
|
|
42
42
|
}
|
|
43
43
|
}
|
|
44
|
-
`,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=
|
|
44
|
+
`,{first:p,offset:b,filterBy:{...s}});r=_optionalChain([P, 'access', _2 => _2[P.length-1], 'optionalAccess', _3 => _3.id]),n.push(...P),D=P.length===p,y+=P.length,b+=P.length,d.update(y)}catch(P){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${r} and offset ${b}`)),P}while(D);d.stop();let C=new Date().getTime()-m,g=_chunkEAJZX4TXcjs.g.call(void 0, n,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${g.length} subdatapoints in ${C/1e3} seconds!`)),g}async function F(u,{dataPointIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 2/3] Fetching metadata for ${e.length} datapoints`));let p=_chunkEAJZX4TXcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _chunkR77JR6OQcjs.a.call(void 0, p,async s=>{try{let{dataPoints:{nodes:o}}=await _chunkR77JR6OQcjs.cc.call(void 0, u,_chunkR77JR6OQcjs.i,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} dataPoints in ${d/1e3} seconds!`)),a}async function Q(u,{dataSiloIds:e=[],pageSize:l=100}){let a=[],c=new Date().getTime(),t=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`[Step 3/3] Fetching metadata for ${e.length} data silos`));let p=_chunkEAJZX4TXcjs.b.call(void 0, e,l);t.start(e.length,0);let n=0;await _chunkR77JR6OQcjs.a.call(void 0, p,async s=>{try{let{dataSilos:{nodes:o}}=await _chunkR77JR6OQcjs.cc.call(void 0, u,_chunkR77JR6OQcjs.l,{first:l,filterBy:{ids:s}});a.push(...o),n+=s.length,t.update(n)}catch(o){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching data silos for IDs ${s.join(", ")}`)),o}}),t.stop();let d=new Date().getTime()-c;return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${a.length} data silos in ${d/1e3} seconds!`)),a}async function Y(u,{dataSiloIds:e=[],includeGuessedCategories:l,includeAttributes:a,parentCategories:c=[],subCategories:t=[],pageSize:p=1e3}={}){let n=await q(u,{dataSiloIds:e,includeGuessedCategories:l,includeAttributes:a,parentCategories:c,subCategories:t,pageSize:p}),m=_chunkEAJZX4TXcjs.j.call(void 0, n.map(r=>r.dataPointId)),d=await F(u,{dataPointIds:m}),s=_chunkEAJZX4TXcjs.e.call(void 0, d,"id"),o=_chunkEAJZX4TXcjs.j.call(void 0, n.map(r=>r.dataSiloId)),y=await Q(u,{dataSiloIds:o}),D=_chunkEAJZX4TXcjs.e.call(void 0, y,"id");return n.map(r=>({...r,dataPoint:s[r.dataPointId],dataSilo:D[r.dataSiloId]}))}async function nt(u,{dataSiloIds:e=[],status:l,subCategories:a=[],includeEncryptedSnippets:c,pageSize:t=100}={}){let p=[],n=new Date().getTime(),m=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),d={...a.length>0?{subCategoryIds:a}:{},...l?{status:l}:{},...e.length>0?{dataSilos:e}:{}},{unstructuredSubDataPointRecommendations:{totalCount:s}}=await _chunkR77JR6OQcjs.cc.call(void 0, u,_chunkR77JR6OQcjs.j,{filterBy:d});_chunkZUNVPK23cjs.a.info(_colors2.default.magenta("[Step 1/3] Pulling in all subdatapoints")),m.start(s,0);let o=0,y=!1,D,r=0;do try{let{unstructuredSubDataPointRecommendations:{nodes:g}}=await _chunkR77JR6OQcjs.cc.call(void 0, u,_graphqlrequest.gql`
|
|
45
45
|
query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(
|
|
46
46
|
$filterBy: UnstructuredSubDataPointRecommendationsFilterInput
|
|
47
47
|
$first: Int!
|
|
@@ -71,5 +71,5 @@
|
|
|
71
71
|
}
|
|
72
72
|
}
|
|
73
73
|
}
|
|
74
|
-
`,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let R=new Date().getTime()-n,C=
|
|
75
|
-
//# sourceMappingURL=chunk-
|
|
74
|
+
`,{first:t,offset:r,filterBy:{...d}});D=_optionalChain([g, 'access', _4 => _4[g.length-1], 'optionalAccess', _5 => _5.id]),p.push(...g),y=g.length===t,o+=g.length,r+=g.length,m.update(o)}catch(g){throw _chunkZUNVPK23cjs.a.error(_colors2.default.red(`An error fetching subdatapoints for cursor ${D} and offset ${r}`)),g}while(y);m.stop();let R=new Date().getTime()-n,C=_chunkEAJZX4TXcjs.g.call(void 0, p,"name");return _chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully pulled in ${C.length} subdatapoints in ${R/1e3} seconds!`)),C}exports.a = Y; exports.b = nt;
|
|
75
|
+
//# sourceMappingURL=chunk-5JXP3N7U.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-EUDUZQOO.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAA4E,wDAAyC,wDAA8D,2DCK5K,qGACiB,iDACJ,gFACD,MAkFnB,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-EUDUZQOO.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from '../bluebird-replace';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-5JXP3N7U.cjs","../src/lib/data-inventory/pullAllDatapoints.ts","../src/lib/data-inventory/pullUnstructuredSubDataPointRecommendations.ts"],"names":["pullSubDatapoints","client","dataSiloIds","includeGuessedCategories","includeAttributes","parentCategories","subCategories","pageSize","subDataPoints","t0","progressBar","cliProgress","filterBy","SubDataPointDataSubCategoryGuessStatus","totalCount","makeGraphQLRequest","SUB_DATA_POINTS_COUNT","logger","colors","total","shouldContinue","cursor","offset","nodes","gql","err"],"mappings":"AAAA,quBAA4E,wDAAyC,wDAA8D,2DCK5K,qGACiB,iDACJ,gFACD,MAkFnB,SAAeA,CAAAA,CACbC,CAAAA,CACA,CACE,WAAA,CAAAC,CAAAA,CAAc,CAAC,CAAA,CACf,wBAAA,CAAAC,CAAAA,CACA,iBAAA,CAAAC,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CAAmB,CAAC,CAAA,CACpB,aAAA,CAAAC,CAAAA,CAAgB,CAAC,CAAA,CACjB,QAAA,CAAAC,CAAAA,CAAW,GACb,CAAA,CAGI,CAAC,CAAA,CAC8B,CACnC,IAAMC,CAAAA,CAA0C,CAAC,CAAA,CAG3CC,CAAAA,CAAK,IAAI,IAAA,CAAK,CAAA,CAAE,OAAA,CAAQ,CAAA,CAGxBC,CAAAA,CAAc,IAAIC,qBAAAA,CAAY,SAAA,CAClC,CAAC,CAAA,CACDA,qBAAAA,CAAY,OAAA,CAAQ,cACtB,CAAA,CAGMC,CAAAA,CAAW,CACf,GAAIP,CAAAA,CAAiB,MAAA,CAAS,CAAA,CAAI,CAAE,QAAA,CAAUA,CAAiB,CAAA,CAAI,CAAC,CAAA,CACpE,GAAIC,CAAAA,CAAc,MAAA,CAAS,CAAA,CAAI,CAAE,cAAA,CAAgBA,CAAc,CAAA,CAAI,CAAC,CAAA,CAEpE,GAAID,CAAAA,CAAiB,MAAA,CAASC,CAAAA,CAAc,MAAA,CAAS,CAAA,EACrD,CAACH,CAAAA,CAEG,CAAE,MAAA,CAAQU,oDAAAA,CAAuC,QAAS,CAAA,CAC1D,CAAC,CAAA,CACL,GAAIX,CAAAA,CAAY,MAAA,CAAS,CAAA,CAAI,CAAE,SAAA,CAAWA,CAAY,CAAA,CAAI,CAAC,CAC7D,CAAA,CAGM,CACJ,aAAA,CAAe,CAAE,UAAA,CAAAY,CAAW,CAC9B,CAAA,CAAI,MAAMC,kCAAAA,CAMPd,CAAQe,mBAAAA,CAAuB,CAChC,QAAA,CAAAJ,CACF,CAAC,CAAA,CAEDK,mBAAAA,CAAO,IAAA,CAAKC,gBAAAA,CAAO,OAAA,CAAQ,yCAAyC,CAAC,CAAA,CAErER,CAAAA,CAAY,KAAA,CAAMI,CAAAA,CAAY,CAAC,CAAA,CAC/B,IAAIK,CAAAA,CAAQ,CAAA,CACRC,CAAAA,CAAiB,CAAA,CAAA,CACjBC,CAAAA,CACAC,CAAAA,CAAS,CAAA,CACb,GACE,GAAI,CACF,GAAM,CACJ,aAAA,CAAe,CAAE,KAAA,CAAAC,CAAM,CACzB,CAAA,CAAI,MAAMR,kCAAAA,CAORd,CACAuB,mBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAA,EA2BUrB,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAQA,EACN,CAAA;AAAA,gBAAA,EAEEC,CAAAA,CACI,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAA,CAAA,CAMA,EACN,CAAA;AAAA;AAAA;AAAA;AAAA,QAAA,CAAA,CAKR,CACE,KAAA,CAAOG,CAAAA,CACP,MAAA,CAAAe,CAAAA,CACA,QAAA,CAAU,CACR,GAAGV,CAGL,CACF,CACF,CAAA,CAEAS,CAAAA,iBAASE,CAAAA,qBAAMA,CAAAA,CAAM,MAAA,CAAS,CAAC,CAAA,6BAAG,IAAA,CAClCf,CAAAA,CAAc,IAAA,CAAK,GAAGe,CAAK,CAAA,CAC3BH,CAAAA,CAAiBG,CAAAA,CAAM,MAAA,GAAWhB,CAAAA,CAClCY,CAAAA,EAASI,CAAAA,CAAM,MAAA,CACfD,CAAAA,EAAUC,CAAAA,CAAM,MAAA,CAChBb,CAAAA,CAAY,MAAA,CAAOS,CAAK,CAC1B,CAAA,KAAA,CAASM,CAAAA,CAAK,CACZ,MAAAR,mBAAAA,CAAO,KAAA,CACLC,gBAAAA,CAAO,GAAA,CACL,CAAA,2CAAA,EAA8CG,CAAM,CAAA,YAAA,EAAeC,CAAM,CAAA,CAAA;AC7G3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBgD,gBAAA;AACU,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+BiB,QAAA","file":"/home/runner/work/cli/cli/dist/chunk-5JXP3N7U.cjs","sourcesContent":[null,"/* eslint-disable max-lines */\nimport { keyBy, uniq, chunk, sortBy } from 'lodash-es';\nimport {\n type DataCategoryType,\n SubDataPointDataSubCategoryGuessStatus,\n} from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport { gql } from 'graphql-request';\nimport colors from 'colors';\nimport type { GraphQLClient } from 'graphql-request';\nimport {\n DATAPOINT_EXPORT,\n DATA_SILO_EXPORT,\n type DataSiloAttributeValue,\n SUB_DATA_POINTS_COUNT,\n makeGraphQLRequest,\n} from '../graphql';\nimport { logger } from '../../logger';\nimport type { DataCategoryInput, ProcessingPurposeInput } from '../../codecs';\nimport { mapSeries } from '../bluebird-replace';\n\nexport interface DataSiloCsvPreview {\n /** ID of dataSilo */\n id: string;\n /** Name of dataSilo */\n title: string;\n}\n\nexport interface DataPointCsvPreview {\n /** ID of dataPoint */\n id: string;\n /** The path to this data point */\n path: string[];\n /** Description */\n description: {\n /** Default message */\n defaultMessage: string;\n };\n /** Name */\n name: string;\n}\n\nexport interface SubDataPointCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Name (or key) of the subdatapoint */\n name: string;\n /** The description */\n description?: string;\n /** Personal data category */\n categories: DataCategoryInput[];\n /** Data point ID */\n dataPointId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** The processing purpose for this sub datapoint */\n purposes: ProcessingPurposeInput[];\n /** Attribute attached to subdatapoint */\n attributeValues?: DataSiloAttributeValue[];\n /** Data category guesses that are output by the classifier */\n pendingCategoryGuesses?: {\n /** Data category being guessed */\n category: DataCategoryInput;\n /** Status of guess */\n status: SubDataPointDataSubCategoryGuessStatus;\n /** classifier version that produced the guess */\n classifierVersion: number;\n }[];\n}\n\nexport interface DatapointFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Whether to include guessed categories, defaults to only approved categories */\n includeGuessedCategories?: boolean;\n /** Whether or not to include attributes */\n includeAttributes?: boolean;\n /** Parent categories to filter down for */\n parentCategories?: DataCategoryType[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n}\n\n/**\n * Pull subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The subdatapoints\n */\nasync function pullSubDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<SubDataPointCsvPreview[]> {\n const subDataPoints: SubDataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(parentCategories.length > 0 ? { category: parentCategories } : {}),\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n // if parentCategories or subCategories and not includeGuessedCategories\n ...(parentCategories.length + subCategories.length > 0 &&\n !includeGuessedCategories\n ? // then only show data points with approved data categories\n { status: SubDataPointDataSubCategoryGuessStatus.Approved }\n : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n subDataPoints: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** Count */\n totalCount: number;\n };\n }>(client, SUB_DATA_POINTS_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n subDataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n subDataPoints: {\n /** List of matches */\n nodes: SubDataPointCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliSubDataPointCsvExport(\n $filterBy: SubDataPointFiltersInput\n $first: Int!\n $offset: Int!\n ) {\n subDataPoints(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n name\n description\n dataPointId\n dataSiloId\n purposes {\n name\n purpose\n }\n categories {\n name\n category\n }\n ${\n includeGuessedCategories\n ? `pendingCategoryGuesses {\n category {\n name\n category\n }\n status\n classifierVersion\n }`\n : ''\n }\n ${\n includeAttributes\n ? `attributeValues {\n attributeKey {\n name\n }\n name\n }`\n : ''\n }\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n // TODO: https://transcend.height.app/T-40484 - add cursor support\n // ...(cursor ? { cursor: { id: cursor } } : {}),\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n subDataPoints.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(subDataPoints, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n\n/**\n * Pull datapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints\n */\nasync function pullDatapoints(\n client: GraphQLClient,\n {\n dataPointIds = [],\n pageSize = 100,\n }: {\n /** IDs of data points to filter down */\n dataPointIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataPointCsvPreview[]> {\n const dataPoints: DataPointCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 2/3] Fetching metadata for ${dataPointIds.length} datapoints`,\n ),\n );\n\n // Group by 100\n const dataPointsGrouped = chunk(dataPointIds, pageSize);\n\n progressBar.start(dataPointIds.length, 0);\n let total = 0;\n await mapSeries(dataPointsGrouped, async (dataPointIdsGroup) => {\n try {\n const {\n dataPoints: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataPoints: {\n /** List of matches */\n nodes: DataPointCsvPreview[];\n };\n }>(client, DATAPOINT_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataPointIdsGroup,\n },\n });\n\n dataPoints.push(...nodes);\n total += dataPointIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for IDs ${dataPointIdsGroup.join(\n ', ',\n )}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataPoints.length} dataPoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataPoints;\n}\n\n/**\n * Pull data silo information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The data silos\n */\nasync function pullDataSilos(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n pageSize = 100,\n }: {\n /** IDs of data silos to filter down */\n dataSiloIds: string[];\n /** Page size to pull in */\n pageSize?: number;\n },\n): Promise<DataSiloCsvPreview[]> {\n const dataSilos: DataSiloCsvPreview[] = [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n logger.info(\n colors.magenta(\n `[Step 3/3] Fetching metadata for ${dataSiloIds.length} data silos`,\n ),\n );\n\n // Group by 100\n const dataSilosGrouped = chunk(dataSiloIds, pageSize);\n\n progressBar.start(dataSiloIds.length, 0);\n let total = 0;\n await mapSeries(dataSilosGrouped, async (dataSiloIdsGroup) => {\n try {\n const {\n dataSilos: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n dataSilos: {\n /** List of matches */\n nodes: DataSiloCsvPreview[];\n };\n }>(client, DATA_SILO_EXPORT, {\n first: pageSize,\n filterBy: {\n ids: dataSiloIdsGroup,\n },\n });\n\n dataSilos.push(...nodes);\n total += dataSiloIdsGroup.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching data silos for IDs ${dataSiloIdsGroup.join(', ')}`,\n ),\n );\n throw err;\n }\n });\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully pulled in ${dataSilos.length} data silos in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return dataSilos;\n}\n\n/**\n * Pull all datapoints from the data inventory.\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @returns The datapoints and data silos\n */\nexport async function pullAllDatapoints(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n includeGuessedCategories,\n includeAttributes,\n parentCategories = [],\n subCategories = [],\n pageSize = 1000,\n }: DatapointFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<\n (SubDataPointCsvPreview & {\n /** Data point information */\n dataPoint: DataPointCsvPreview;\n /** Data silo information */\n dataSilo: DataSiloCsvPreview;\n })[]\n> {\n // Subdatapoint information\n const subDatapoints = await pullSubDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n includeAttributes,\n parentCategories,\n subCategories,\n pageSize,\n });\n\n // The datapoint ids to grab\n const dataPointIds = uniq(subDatapoints.map((point) => point.dataPointId));\n const dataPoints = await pullDatapoints(client, {\n dataPointIds,\n });\n const dataPointById = keyBy(dataPoints, 'id');\n\n // The data silo IDs to grab\n const allDataSiloIds = uniq(subDatapoints.map((point) => point.dataSiloId));\n const dataSilos = await pullDataSilos(client, {\n dataSiloIds: allDataSiloIds,\n });\n const dataSiloById = keyBy(dataSilos, 'id');\n\n return subDatapoints.map((subDataPoint) => ({\n ...subDataPoint,\n dataPoint: dataPointById[subDataPoint.dataPointId],\n dataSilo: dataSiloById[subDataPoint.dataSiloId],\n }));\n}\n/* eslint-enable max-lines */\n","import type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport cliProgress from 'cli-progress';\nimport colors from 'colors';\nimport { gql, type GraphQLClient } from 'graphql-request';\nimport { sortBy } from 'lodash-es';\nimport type { DataCategoryInput } from '../../codecs';\nimport { ENTRY_COUNT, makeGraphQLRequest } from '../graphql';\nimport { logger } from '../../logger';\n\ninterface UnstructuredSubDataPointRecommendationCsvPreview {\n /** ID of subDatapoint */\n id: string;\n /** Entry or Named Entity recognized by the classifier */\n name: string;\n /** Context snippet including entry */\n contextSnippet: string;\n /** Scanned object ID */\n scannedObjectId: string;\n /** Scanned object path ID */\n scannedObjectPathId: string;\n /** The data silo ID */\n dataSiloId: string;\n /** Personal data category */\n dataSubCategory: DataCategoryInput;\n /** Classification Status */\n status: UnstructuredSubDataPointRecommendationStatus;\n /** Confidence */\n confidence: number;\n /** Classification method */\n classificationMethod: string;\n /** Classifier version */\n classifierVersion: string;\n}\n\ninterface EntryFilterOptions {\n /** IDs of data silos to filter down */\n dataSiloIds?: string[];\n /** Parent categories to filter down for */\n status?: UnstructuredSubDataPointRecommendationStatus[];\n /** Sub categories to filter down for */\n subCategories?: string[]; // TODO: https://transcend.height.app/T-40482 - do by name not ID\n /** Include entry and snippet */\n includeEncryptedSnippets?: boolean;\n /** Include encryptedSamplesS3Key */\n includeEncryptedSamplesS3Key?: boolean;\n}\n/**\n * Pull unstructured subdatapoint information\n *\n * @param client - Client to use for the request\n * @param options - Options\n * @param options.dataSiloIds - IDs of data silos to filter down\n * @param options.status - Parent categories to filter down for\n * @param options.subCategories - Sub categories to filter down for\n * @param options.includeEncryptedSnippets - Include entry and snippet\n * @param options.includeEncryptedSamplesS3Key - Include encryptedSamplesS3Key\n * @param options.pageSize - Page size to pull in\n * @returns A promise that resolves to an array of unstructured subdatapoint recommendations\n */\nexport async function pullUnstructuredSubDataPointRecommendations(\n client: GraphQLClient,\n {\n dataSiloIds = [],\n status,\n subCategories = [],\n includeEncryptedSnippets,\n pageSize = 100,\n }: EntryFilterOptions & {\n /** Page size to pull in */\n pageSize?: number;\n } = {},\n): Promise<UnstructuredSubDataPointRecommendationCsvPreview[]> {\n const unstructuredSubDataPointRecommendations: UnstructuredSubDataPointRecommendationCsvPreview[] =\n [];\n\n // Time duration\n const t0 = new Date().getTime();\n\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Filters\n const filterBy = {\n ...(subCategories.length > 0 ? { subCategoryIds: subCategories } : {}),\n ...(status ? { status } : {}),\n ...(dataSiloIds.length > 0 ? { dataSilos: dataSiloIds } : {}),\n };\n\n // Build a GraphQL client\n const {\n unstructuredSubDataPointRecommendations: { totalCount },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** Count */\n totalCount: number;\n };\n }>(client, ENTRY_COUNT, {\n filterBy,\n });\n\n logger.info(colors.magenta('[Step 1/3] Pulling in all subdatapoints'));\n\n progressBar.start(totalCount, 0);\n let total = 0;\n let shouldContinue = false;\n let cursor: string | undefined;\n let offset = 0;\n do {\n try {\n const {\n unstructuredSubDataPointRecommendations: { nodes },\n } = await makeGraphQLRequest<{\n /** Query response */\n unstructuredSubDataPointRecommendations: {\n /** List of matches */\n nodes: UnstructuredSubDataPointRecommendationCsvPreview[];\n };\n }>(\n client,\n gql`\n query TranscendCliUnstructuredSubDataPointRecommendationCsvExport(\n $filterBy: UnstructuredSubDataPointRecommendationsFilterInput\n $first: Int!\n $offset: Int!\n ) {\n unstructuredSubDataPointRecommendations(\n filterBy: $filterBy\n first: $first\n offset: $offset\n useMaster: false\n ) {\n nodes {\n id\n dataSiloId\n scannedObjectPathId\n scannedObjectId\n ${includeEncryptedSnippets ? 'name' : ''}\n ${includeEncryptedSnippets ? 'contextSnippet' : ''}\n dataSubCategory {\n name\n category\n }\n status\n confidence\n classificationMethod\n classifierVersion\n }\n }\n }\n `,\n {\n first: pageSize,\n offset,\n filterBy: {\n ...filterBy,\n },\n },\n );\n\n cursor = nodes[nodes.length - 1]?.id as string;\n unstructuredSubDataPointRecommendations.push(...nodes);\n shouldContinue = nodes.length === pageSize;\n total += nodes.length;\n offset += nodes.length;\n progressBar.update(total);\n } catch (err) {\n logger.error(\n colors.red(\n `An error fetching subdatapoints for cursor ${cursor} and offset ${offset}`,\n ),\n );\n throw err;\n }\n } while (shouldContinue);\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n const sorted = sortBy(unstructuredSubDataPointRecommendations, 'name');\n\n logger.info(\n colors.green(\n `Successfully pulled in ${sorted.length} subdatapoints in ${\n totalTime / 1000\n } seconds!`,\n ),\n );\n return sorted;\n}\n"]}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var
|
|
2
|
-
//# sourceMappingURL=chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkR77JR6OQcjs = require('./chunk-R77JR6OQ.cjs');var _chunkZUNVPK23cjs = require('./chunk-ZUNVPK23.cjs');var _chunkVIPFYWRRcjs = require('./chunk-VIPFYWRR.cjs');var _crypto = require('crypto'); var E = _interopRequireWildcard(_crypto);var _jsonwebtoken = require('jsonwebtoken'); var $ = _interopRequireWildcard(_jsonwebtoken);function B(c,l,n){let o=Buffer.from(n,"base64"),f=Buffer.from(l,"base64"),u="id-aes256-wrap-pad",d=Buffer.from("A65959A6","hex"),s=E.createCipheriv(u,f,d),i={encryptedIdentifier:Buffer.concat([s.update(c),s.final()]).toString("base64")};return $.sign(i,o,{algorithm:"HS384"})}var _iots = require('io-ts'); var e = _interopRequireWildcard(_iots); var r = _interopRequireWildcard(_iots);var O=e.intersection([e.type({userId:e.string,timestamp:e.string}),e.partial({confirmed:e.union([e.literal("true"),e.literal("false")]),updated:e.union([e.literal("true"),e.literal("false")]),prompted:e.union([e.literal("true"),e.literal("false")]),metadata:e.string,usp:e.union([e.string,e.null]),gpp:e.union([e.string,e.null])})]),Y= exports.b =e.intersection([O,e.partial({purposes:e.string})]),G= exports.c =e.intersection([O,e.type({partition:e.string}),e.partial({tcf:e.union([e.string,e.null]),purposes:e.record(e.string,e.union([e.boolean,e.string]))})]);var _colors = require('colors'); var _colors2 = _interopRequireDefault(_colors);var _cliprogress = require('cli-progress'); var _cliprogress2 = _interopRequireDefault(_cliprogress);var _typeutils = require('@transcend-io/type-utils');var R=/^[0-9][Y|N]([Y|N])[Y|N]$/,_= exports.e =r.record(r.string,r.union([r.boolean,r.literal("Auto")]));async function Z({base64EncryptionKey:c,base64SigningKey:l,preferences:n,partition:o,concurrency:f=100,transcendUrl:u=_chunkVIPFYWRRcjs.f}){let d=_chunkR77JR6OQcjs.Yd.call(void 0, u),s=n.filter(t=>t.usp&&!R.test(t.usp));if(s.length>0)throw new Error(`Received invalid usp strings: ${JSON.stringify(s,null,2)}`);let m=n.map((t,y)=>[t,y]).filter(([t])=>{if(!t.purposes)return!1;try{return _typeutils.decodeCodec.call(void 0, _,t.purposes),!1}catch (e2){return!0}});if(m.length>0)throw new Error(`Received invalid purpose maps: ${JSON.stringify(m,null,2)}`);let i=n.filter(t=>!t.usp&&!t.purposes);if(i.length>0)throw new Error(`Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(i,null,2)}`);_chunkZUNVPK23cjs.a.info(_colors2.default.magenta(`Uploading ${n.length} user preferences to partition ${o}`));let w=new Date().getTime(),g=new _cliprogress2.default.SingleBar({},_cliprogress2.default.Presets.shades_classic),S=0;g.start(n.length,0),await _chunkR77JR6OQcjs.b.call(void 0, n,async({userId:t,confirmed:y="true",updated:T,prompted:v,purposes:x,...p})=>{let k=B(t,c,l),[,D]=p.usp?R.exec(p.usp)||[]:[],F={token:k,partition:o,consent:{confirmed:y==="true",purposes:x?_typeutils.decodeCodec.call(void 0, _,x):p.usp?{SaleOfInfo:D==="Y"}:{},...T?{updated:T==="true"}:{},...v?{prompted:v==="true"}:{},...p}};try{await d.post("sync",{json:F}).json()}catch(h){try{let C=JSON.parse(_optionalChain([h, 'optionalAccess', _2 => _2.response, 'optionalAccess', _3 => _3.body])||"{}");C.error&&_chunkZUNVPK23cjs.a.error(_colors2.default.red(`Error: ${C.error}`))}catch (e3){}throw new Error(`Received an error from server: ${_optionalChain([h, 'optionalAccess', _4 => _4.response, 'optionalAccess', _5 => _5.body])||_optionalChain([h, 'optionalAccess', _6 => _6.message])}`)}S+=1,g.update(S)},{concurrency:f}),g.stop();let j=new Date().getTime()-w;_chunkZUNVPK23cjs.a.info(_colors2.default.green(`Successfully uploaded ${n.length} user preferences to partition ${o} in "${j/1e3}" seconds!`))}exports.a = B; exports.b = Y; exports.c = G; exports.d = R; exports.e = _; exports.f = Z;
|
|
2
|
+
//# sourceMappingURL=chunk-74OAET6D.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-XXVZA7HN.cjs","../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/types.ts","../src/lib/consent-manager/uploadConsents.ts"],"names":["createConsentToken","userId","base64EncryptionKey","base64SigningKey","signingKey","encryptionKey","encryptionAlgorithm","iv","cipher","jwtPayload","ConsentPreferenceBase","ConsentPreferenceUpload","ConsentPreferenceFetch","USP_STRING_REGEX","PurposeMap","uploadConsents","preferences","partition","concurrency","transcendUrl","DEFAULT_TRANSCEND_CONSENT_API","transcendConsentApi","createTranscendConsentGotInstance","invalidUspStrings","pref"],"mappings":"AAAA,u/BAA2C,wDAAoC,wDAAyC,0ECAhG,4FACH,SAWLA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACQ,CAER,IAAMC,CAAAA,CAAa,MAAA,CAAO,IAAA,CAAKD,CAAAA,CAAkB,QAAQ,CAAA,CACnDE,CAAAA,CAAgB,MAAA,CAAO,IAAA,CAAKH,CAAAA,CAAqB,QAAQ,CAAA,CAGzDI,CAAAA,CAAsB,oBAAA,CAEtBC,CAAAA,CAAK,MAAA,CAAO,IAAA,CAAK,UAAA,CAAY,KAAK,CAAA,CAElCC,CAAAA,CAAgB,CAAA,CAAA,cAAA,CAAeF,CAAAA,CAAqBD,CAAAA,CAAeE,CAAE,CAAA,CAYrEE,CAAAA,CAAa,CACjB,mBAAA,CAV0B,MAAA,CAAO,MAAA,CAAO,CACxCD,CAAAA,CAAO,MAAA,CAAOP,CAAM,CAAA,CACpBO,CAAAA,CAAO,KAAA,CAAM,CACf,CAAC,CAAA,CAAE,QAAA,CAAS,QAAQ,CAQpB,CAAA,CAOA,OAJyB,CAAA,CAAA,IAAA,CAAKC,CAAAA,CAAYL,CAAAA,CAAY,CACpD,SAAA,CAAW,OACb,CAAC,CAGH,CChDA,6GAAmB,IAENM,CAAAA,CAA0B,CAAA,CAAA,YAAA,CAAa,CAChD,CAAA,CAAA,IAAA,CAAK,CAEL,MAAA,CAAU,CAAA,CAAA,MAAA,CAEV,SAAA,CAAa,CAAA,CAAA,MACf,CAAC,CAAA,CACC,CAAA,CAAA,OAAA,CAAQ,CAER,SAAA,CAAa,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAK,CAAA,CAAA,OAAA,CAAQ,OAAO,CAAC,CAAC,CAAA,CAI1D,OAAA,CAAW,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAK,CAAA,CAAA,OAAA,CAAQ,OAAO,CAAC,CAAC,CAAA,CAIxD,QAAA,CAAY,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAK,CAAA,CAAA,OAAA,CAAQ,OAAO,CAAC,CAAC,CAAA,CAEzD,QAAA,CAAY,CAAA,CAAA,MAAA,CAEZ,GAAA,CAAO,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,IAAI,CAAC,CAAA,CAE/B,GAAA,CAAO,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,IAAI,CAAC,CACjC,CAAC,CACH,CAAC,CAAA,CAKYC,CAAAA,aAA4B,CAAA,CAAA,YAAA,CAAa,CACpDD,CAAAA,CACE,CAAA,CAAA,OAAA,CAAQ,CAKR,QAAA,CAAY,CAAA,CAAA,MACd,CAAC,CACH,CAAC,CAAA,CAKYE,CAAAA,aAA2B,CAAA,CAAA,YAAA,CAAa,CACnDF,CAAAA,CACE,CAAA,CAAA,IAAA,CAAK,CAEL,SAAA,CAAa,CAAA,CAAA,MACf,CAAC,CAAA,CACC,CAAA,CAAA,OAAA,CAAQ,CAER,GAAA,CAAO,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,IAAI,CAAC,CAAA,CAK/B,QAAA,CAAY,CAAA,CAAA,MAAA,CAAS,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,MAAM,CAAC,CAAC,CAC7D,CAAC,CACH,CAAC,CAAA,CC5DD,gFAAmB,qGAMK,qDACI,IAIfG,CAAAA,CAAmB,0BAAA,CAEnBC,CAAAA,aAAe,CAAA,CAAA,MAAA,CACxB,CAAA,CAAA,MAAA,CACA,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAC,CAAC,CACxC,CAAA,CAOA,MAAA,SAAsBC,CAAAA,CAAe,CACnC,mBAAA,CAAAb,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,WAAA,CAAAa,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAakB,CAEhB,IAAMC,CAAAA,CAAsBC,kCAAAA,CAA8C,CAAA,CAGpEC,CAAAA,CAAoBP,CAAAA,CAAY,MAAA,CACnCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,EAAO,CAACX,CAAAA,CAAiB,IAAA,CAAKW,CAAAA,CAAK,GAAG,CACvD,CAAA,CACA,EAAA,CAAID,CAAAA,CAAkB,MAAA,CAAS,CAAA,CAC7B,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiC,IAAA,CAAK,SAAA,CACpCA,CAAAA,CACA,IAAA,CACA,CACF,CAAC,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-XXVZA7HN.cjs","sourcesContent":[null,"import * as crypto from 'crypto';\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([\n cipher.update(userId),\n cipher.final(),\n ]).toString('base64');\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import * as t from 'io-ts';\n\nexport const ConsentPreferenceBase = t.intersection([\n t.type({\n /** User ID */\n userId: t.string,\n /** Has the consent been updated (including no-change confirmation) since default resolution */\n timestamp: t.string,\n }),\n t.partial({\n /** Was tracking consent confirmed by the user? If this is false, the consent was resolved from defaults & is not yet confirmed */\n confirmed: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Has the consent been updated (including no-change confirmation) since default resolution\n */\n updated: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Whether or not the UI has been shown to the end-user (undefined in older versions of airgap.js)\n */\n prompted: t.union([t.literal('true'), t.literal('false')]),\n /** Consent metadata */\n metadata: t.string,\n /** US Privacy (USP) String */\n usp: t.union([t.string, t.null]),\n /** IAB GPP String */\n gpp: t.union([t.string, t.null]),\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceBase = t.TypeOf<typeof ConsentPreferenceUpload>;\n\nexport const ConsentPreferenceUpload = t.intersection([\n ConsentPreferenceBase,\n t.partial({\n /**\n * Purpose map\n * This is a stringified JSON object with keys as purpose names and values as booleans or 'Auto'\n */\n purposes: t.string,\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceUpload = t.TypeOf<typeof ConsentPreferenceUpload>;\n\nexport const ConsentPreferenceFetch = t.intersection([\n ConsentPreferenceBase,\n t.type({\n /** This is the partition key used for the dynamo entry */\n partition: t.string,\n }),\n t.partial({\n /** IAB TCF String */\n tcf: t.union([t.string, t.null]),\n /**\n * Purpose map\n * This is a JSON object with keys as purpose names and values as booleans or 'Auto'\n */\n purposes: t.record(t.string, t.union([t.boolean, t.string])),\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceFetch = t.TypeOf<typeof ConsentPreferenceFetch>;\n","import { createTranscendConsentGotInstance } from '../graphql';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants';\nimport { map } from '../bluebird-replace';\nimport { createConsentToken } from './createConsentToken';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport type { ConsentPreferenceUpload } from './types';\nimport { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(\n t.string,\n t.union([t.boolean, t.literal('Auto')]),\n);\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(\n `Received invalid usp strings: ${JSON.stringify(\n invalidUspStrings,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(\n invalidPurposeMaps,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter(\n (pref) => !pref.usp && !pref.purposes,\n );\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${preferences.length} user preferences to partition ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({\n userId,\n confirmed = 'true',\n updated,\n prompted,\n purposes,\n ...consent\n }) => {\n const token = createConsentToken(\n userId,\n base64EncryptionKey,\n base64SigningKey,\n );\n\n // parse usp string\n const [, saleStatus] = consent.usp\n ? USP_STRING_REGEX.exec(consent.usp) || []\n : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n throw new Error(\n `Received an error from server: ${\n err?.response?.body || err?.message\n }`,\n );\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${\n preferences.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/cli/cli/dist/chunk-74OAET6D.cjs","../src/lib/consent-manager/createConsentToken.ts","../src/lib/consent-manager/types.ts","../src/lib/consent-manager/uploadConsents.ts"],"names":["createConsentToken","userId","base64EncryptionKey","base64SigningKey","signingKey","encryptionKey","encryptionAlgorithm","iv","cipher","jwtPayload","ConsentPreferenceBase","ConsentPreferenceUpload","ConsentPreferenceFetch","USP_STRING_REGEX","PurposeMap","uploadConsents","preferences","partition","concurrency","transcendUrl","DEFAULT_TRANSCEND_CONSENT_API","transcendConsentApi","createTranscendConsentGotInstance","invalidUspStrings","pref"],"mappings":"AAAA,u/BAA2C,wDAAoC,wDAAyC,0ECAhG,4FACH,SAWLA,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CACQ,CAER,IAAMC,CAAAA,CAAa,MAAA,CAAO,IAAA,CAAKD,CAAAA,CAAkB,QAAQ,CAAA,CACnDE,CAAAA,CAAgB,MAAA,CAAO,IAAA,CAAKH,CAAAA,CAAqB,QAAQ,CAAA,CAGzDI,CAAAA,CAAsB,oBAAA,CAEtBC,CAAAA,CAAK,MAAA,CAAO,IAAA,CAAK,UAAA,CAAY,KAAK,CAAA,CAElCC,CAAAA,CAAgB,CAAA,CAAA,cAAA,CAAeF,CAAAA,CAAqBD,CAAAA,CAAeE,CAAE,CAAA,CAYrEE,CAAAA,CAAa,CACjB,mBAAA,CAV0B,MAAA,CAAO,MAAA,CAAO,CACxCD,CAAAA,CAAO,MAAA,CAAOP,CAAM,CAAA,CACpBO,CAAAA,CAAO,KAAA,CAAM,CACf,CAAC,CAAA,CAAE,QAAA,CAAS,QAAQ,CAQpB,CAAA,CAOA,OAJyB,CAAA,CAAA,IAAA,CAAKC,CAAAA,CAAYL,CAAAA,CAAY,CACpD,SAAA,CAAW,OACb,CAAC,CAGH,CChDA,6GAAmB,IAENM,CAAAA,CAA0B,CAAA,CAAA,YAAA,CAAa,CAChD,CAAA,CAAA,IAAA,CAAK,CAEL,MAAA,CAAU,CAAA,CAAA,MAAA,CAEV,SAAA,CAAa,CAAA,CAAA,MACf,CAAC,CAAA,CACC,CAAA,CAAA,OAAA,CAAQ,CAER,SAAA,CAAa,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAK,CAAA,CAAA,OAAA,CAAQ,OAAO,CAAC,CAAC,CAAA,CAI1D,OAAA,CAAW,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAK,CAAA,CAAA,OAAA,CAAQ,OAAO,CAAC,CAAC,CAAA,CAIxD,QAAA,CAAY,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAK,CAAA,CAAA,OAAA,CAAQ,OAAO,CAAC,CAAC,CAAA,CAEzD,QAAA,CAAY,CAAA,CAAA,MAAA,CAEZ,GAAA,CAAO,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,IAAI,CAAC,CAAA,CAE/B,GAAA,CAAO,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,IAAI,CAAC,CACjC,CAAC,CACH,CAAC,CAAA,CAKYC,CAAAA,aAA4B,CAAA,CAAA,YAAA,CAAa,CACpDD,CAAAA,CACE,CAAA,CAAA,OAAA,CAAQ,CAKR,QAAA,CAAY,CAAA,CAAA,MACd,CAAC,CACH,CAAC,CAAA,CAKYE,CAAAA,aAA2B,CAAA,CAAA,YAAA,CAAa,CACnDF,CAAAA,CACE,CAAA,CAAA,IAAA,CAAK,CAEL,SAAA,CAAa,CAAA,CAAA,MACf,CAAC,CAAA,CACC,CAAA,CAAA,OAAA,CAAQ,CAER,GAAA,CAAO,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,IAAI,CAAC,CAAA,CAK/B,QAAA,CAAY,CAAA,CAAA,MAAA,CAAS,CAAA,CAAA,MAAA,CAAU,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,MAAM,CAAC,CAAC,CAC7D,CAAC,CACH,CAAC,CAAA,CC5DD,gFAAmB,qGAMK,qDACI,IAIfG,CAAAA,CAAmB,0BAAA,CAEnBC,CAAAA,aAAe,CAAA,CAAA,MAAA,CACxB,CAAA,CAAA,MAAA,CACA,CAAA,CAAA,KAAA,CAAM,CAAG,CAAA,CAAA,OAAA,CAAW,CAAA,CAAA,OAAA,CAAQ,MAAM,CAAC,CAAC,CACxC,CAAA,CAOA,MAAA,SAAsBC,CAAAA,CAAe,CACnC,mBAAA,CAAAb,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,WAAA,CAAAa,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,WAAA,CAAAC,CAAAA,CAAc,GAAA,CACd,YAAA,CAAAC,CAAAA,CAAeC,mBACjB,CAAA,CAakB,CAEhB,IAAMC,CAAAA,CAAsBC,kCAAAA,CAA8C,CAAA,CAGpEC,CAAAA,CAAoBP,CAAAA,CAAY,MAAA,CACnCQ,CAAAA,EAASA,CAAAA,CAAK,GAAA,EAAO,CAACX,CAAAA,CAAiB,IAAA,CAAKW,CAAAA,CAAK,GAAG,CACvD,CAAA,CACA,EAAA,CAAID,CAAAA,CAAkB,MAAA,CAAS,CAAA,CAC7B,MAAM,IAAI,KAAA,CACR,CAAA,8BAAA,EAAiC,IAAA,CAAK,SAAA,CACpCA,CAAAA,CACA,IAAA,CACA,CACF,CAAC,CAAA,CAAA","file":"/home/runner/work/cli/cli/dist/chunk-74OAET6D.cjs","sourcesContent":[null,"import * as crypto from 'crypto';\nimport * as jwt from 'jsonwebtoken';\n\n/**\n * Function to create a consent manager token\n *\n * @see https://docs.transcend.io/docs/consent/reference/managed-consent-database\n * @param userId - User ID\n * @param base64EncryptionKey - Encryption key\n * @param base64SigningKey - Signing key\n * @returns Token\n */\nexport function createConsentToken(\n userId: string,\n base64EncryptionKey: string,\n base64SigningKey: string,\n): string {\n // Read on for where to find these keys\n const signingKey = Buffer.from(base64SigningKey, 'base64');\n const encryptionKey = Buffer.from(base64EncryptionKey, 'base64');\n\n // NIST's AES-KWP implementation { aes 48 } - see https://tools.ietf.org/html/rfc5649\n const encryptionAlgorithm = 'id-aes256-wrap-pad';\n // Initial Value for AES-KWP integrity check - see https://tools.ietf.org/html/rfc5649#section-3\n const iv = Buffer.from('A65959A6', 'hex');\n // Set up encryption algorithm\n const cipher = crypto.createCipheriv(encryptionAlgorithm, encryptionKey, iv);\n\n // Encrypt the userId and base64-encode the result\n const encryptedIdentifier = Buffer.concat([\n cipher.update(userId),\n cipher.final(),\n ]).toString('base64');\n\n // Create the JWT content - jwt.sign will add a 'iat' (issued at) field to the payload\n // If you wanted to add something manually, consider\n // const issued: Date = new Date();\n // const isoDate = issued.toISOString();\n const jwtPayload = {\n encryptedIdentifier,\n };\n\n // Create a JSON web token and HMAC it with SHA-384\n const consentToken = jwt.sign(jwtPayload, signingKey, {\n algorithm: 'HS384',\n });\n\n return consentToken;\n}\n","import * as t from 'io-ts';\n\nexport const ConsentPreferenceBase = t.intersection([\n t.type({\n /** User ID */\n userId: t.string,\n /** Has the consent been updated (including no-change confirmation) since default resolution */\n timestamp: t.string,\n }),\n t.partial({\n /** Was tracking consent confirmed by the user? If this is false, the consent was resolved from defaults & is not yet confirmed */\n confirmed: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Has the consent been updated (including no-change confirmation) since default resolution\n */\n updated: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Whether or not the UI has been shown to the end-user (undefined in older versions of airgap.js)\n */\n prompted: t.union([t.literal('true'), t.literal('false')]),\n /** Consent metadata */\n metadata: t.string,\n /** US Privacy (USP) String */\n usp: t.union([t.string, t.null]),\n /** IAB GPP String */\n gpp: t.union([t.string, t.null]),\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceBase = t.TypeOf<typeof ConsentPreferenceUpload>;\n\nexport const ConsentPreferenceUpload = t.intersection([\n ConsentPreferenceBase,\n t.partial({\n /**\n * Purpose map\n * This is a stringified JSON object with keys as purpose names and values as booleans or 'Auto'\n */\n purposes: t.string,\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceUpload = t.TypeOf<typeof ConsentPreferenceUpload>;\n\nexport const ConsentPreferenceFetch = t.intersection([\n ConsentPreferenceBase,\n t.type({\n /** This is the partition key used for the dynamo entry */\n partition: t.string,\n }),\n t.partial({\n /** IAB TCF String */\n tcf: t.union([t.string, t.null]),\n /**\n * Purpose map\n * This is a JSON object with keys as purpose names and values as booleans or 'Auto'\n */\n purposes: t.record(t.string, t.union([t.boolean, t.string])),\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceFetch = t.TypeOf<typeof ConsentPreferenceFetch>;\n","import { createTranscendConsentGotInstance } from '../graphql';\nimport colors from 'colors';\nimport * as t from 'io-ts';\nimport { DEFAULT_TRANSCEND_CONSENT_API } from '../../constants';\nimport { map } from '../bluebird-replace';\nimport { createConsentToken } from './createConsentToken';\nimport { logger } from '../../logger';\nimport cliProgress from 'cli-progress';\nimport { decodeCodec } from '@transcend-io/type-utils';\nimport type { ConsentPreferenceUpload } from './types';\nimport { ConsentPreferencesBody } from '@transcend-io/airgap.js-types';\n\nexport const USP_STRING_REGEX = /^[0-9][Y|N]([Y|N])[Y|N]$/;\n\nexport const PurposeMap = t.record(\n t.string,\n t.union([t.boolean, t.literal('Auto')]),\n);\n\n/**\n * Upload a set of consent preferences\n *\n * @param options - Options\n */\nexport async function uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency = 100,\n transcendUrl = DEFAULT_TRANSCEND_CONSENT_API,\n}: {\n /** base64 encryption key */\n base64EncryptionKey: string;\n /** base64 signing key */\n base64SigningKey: string;\n /** Partition key */\n partition: string;\n /** Sombra API key authentication */\n preferences: ConsentPreferenceUpload[];\n /** API URL for Transcend backend */\n transcendUrl?: string;\n /** Concurrency limit for approving */\n concurrency?: number;\n}): Promise<void> {\n // Create connection to API\n const transcendConsentApi = createTranscendConsentGotInstance(transcendUrl);\n\n // Ensure usp strings are valid\n const invalidUspStrings = preferences.filter(\n (pref) => pref.usp && !USP_STRING_REGEX.test(pref.usp),\n );\n if (invalidUspStrings.length > 0) {\n throw new Error(\n `Received invalid usp strings: ${JSON.stringify(\n invalidUspStrings,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure purpose maps are valid\n const invalidPurposeMaps = preferences\n .map((pref, ind) => [pref, ind] as [ConsentPreferenceUpload, number])\n .filter(([pref]) => {\n if (!pref.purposes) {\n return false;\n }\n try {\n decodeCodec(PurposeMap, pref.purposes);\n return false;\n } catch {\n return true;\n }\n });\n if (invalidPurposeMaps.length > 0) {\n throw new Error(\n `Received invalid purpose maps: ${JSON.stringify(\n invalidPurposeMaps,\n null,\n 2,\n )}`,\n );\n }\n\n // Ensure usp or preferences are provided\n const invalidInputs = preferences.filter(\n (pref) => !pref.usp && !pref.purposes,\n );\n if (invalidInputs.length > 0) {\n throw new Error(\n `Received invalid inputs, expected either purposes or usp to be defined: ${JSON.stringify(\n invalidInputs,\n null,\n 2,\n )}`,\n );\n }\n\n logger.info(\n colors.magenta(\n `Uploading ${preferences.length} user preferences to partition ${partition}`,\n ),\n );\n\n // Time duration\n const t0 = new Date().getTime();\n // create a new progress bar instance and use shades_classic theme\n const progressBar = new cliProgress.SingleBar(\n {},\n cliProgress.Presets.shades_classic,\n );\n\n // Build a GraphQL client\n let total = 0;\n progressBar.start(preferences.length, 0);\n await map(\n preferences,\n async ({\n userId,\n confirmed = 'true',\n updated,\n prompted,\n purposes,\n ...consent\n }) => {\n const token = createConsentToken(\n userId,\n base64EncryptionKey,\n base64SigningKey,\n );\n\n // parse usp string\n const [, saleStatus] = consent.usp\n ? USP_STRING_REGEX.exec(consent.usp) || []\n : [];\n\n const input = {\n token,\n partition,\n consent: {\n confirmed: confirmed === 'true',\n purposes: purposes\n ? decodeCodec(PurposeMap, purposes)\n : consent.usp\n ? { SaleOfInfo: saleStatus === 'Y' }\n : {},\n ...(updated ? { updated: updated === 'true' } : {}),\n ...(prompted ? { prompted: prompted === 'true' } : {}),\n ...consent,\n },\n } as ConsentPreferencesBody;\n\n // Make the request\n try {\n await transcendConsentApi\n .post('sync', {\n json: input,\n })\n .json();\n } catch (err) {\n try {\n const parsed = JSON.parse(err?.response?.body || '{}');\n if (parsed.error) {\n logger.error(colors.red(`Error: ${parsed.error}`));\n }\n } catch (e) {\n // continue\n }\n throw new Error(\n `Received an error from server: ${\n err?.response?.body || err?.message\n }`,\n );\n }\n\n total += 1;\n progressBar.update(total);\n },\n { concurrency },\n );\n\n progressBar.stop();\n const t1 = new Date().getTime();\n const totalTime = t1 - t0;\n\n logger.info(\n colors.green(\n `Successfully uploaded ${\n preferences.length\n } user preferences to partition ${partition} in \"${\n totalTime / 1000\n }\" seconds!`,\n ),\n );\n}\n"]}
|