@transcend-io/cli 8.37.1 → 8.38.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. package/dist/{api-keys-Bb2BbZQe.cjs → api-keys-DKB-2PVX.cjs} +2 -2
  2. package/dist/{api-keys-Bb2BbZQe.cjs.map → api-keys-DKB-2PVX.cjs.map} +1 -1
  3. package/dist/{app-C9jD-f87.cjs → app-BUVlVXyg.cjs} +18 -18
  4. package/dist/{app-C9jD-f87.cjs.map → app-BUVlVXyg.cjs.map} +1 -1
  5. package/dist/bin/bash-complete.cjs +1 -1
  6. package/dist/bin/cli.cjs +1 -1
  7. package/dist/bin/deprecated-command.cjs +1 -1
  8. package/dist/{code-scanning-4d0zlFxk.cjs → code-scanning-BAPEq8Xx.cjs} +2 -2
  9. package/dist/{code-scanning-4d0zlFxk.cjs.map → code-scanning-BAPEq8Xx.cjs.map} +1 -1
  10. package/dist/{command-XJ7XPQ04.cjs → command-DM9vqmJO.cjs} +2 -2
  11. package/dist/{command-XJ7XPQ04.cjs.map → command-DM9vqmJO.cjs.map} +1 -1
  12. package/dist/{consent-manager-CCyvzvY5.cjs → consent-manager-C6jfR89E.cjs} +2 -2
  13. package/dist/{consent-manager-CCyvzvY5.cjs.map → consent-manager-C6jfR89E.cjs.map} +1 -1
  14. package/dist/{constants-wkuhlP8d.cjs → constants-BvCUdOlO.cjs} +2 -2
  15. package/dist/{constants-wkuhlP8d.cjs.map → constants-BvCUdOlO.cjs.map} +1 -1
  16. package/dist/{cron-DfEGA7Rf.cjs → cron-BO5TvkmX.cjs} +2 -2
  17. package/dist/{cron-DfEGA7Rf.cjs.map → cron-BO5TvkmX.cjs.map} +1 -1
  18. package/dist/{data-inventory-C1eqZk1M.cjs → data-inventory-Bc9DbWkU.cjs} +2 -2
  19. package/dist/{data-inventory-C1eqZk1M.cjs.map → data-inventory-Bc9DbWkU.cjs.map} +1 -1
  20. package/dist/{dataFlowsToDataSilos-DXlFFHMV.cjs → dataFlowsToDataSilos-CRqkUmje.cjs} +2 -2
  21. package/dist/{dataFlowsToDataSilos-DXlFFHMV.cjs.map → dataFlowsToDataSilos-CRqkUmje.cjs.map} +1 -1
  22. package/dist/{impl-BfC5CRRX.cjs → impl-B-JHBYrZ.cjs} +2 -2
  23. package/dist/{impl-BfC5CRRX.cjs.map → impl-B-JHBYrZ.cjs.map} +1 -1
  24. package/dist/{impl-LgUGDTQK.cjs → impl-B-N9AGP8.cjs} +2 -2
  25. package/dist/{impl-LgUGDTQK.cjs.map → impl-B-N9AGP8.cjs.map} +1 -1
  26. package/dist/{impl-DqfyWyoV.cjs → impl-B2rIghgi.cjs} +2 -2
  27. package/dist/{impl-DqfyWyoV.cjs.map → impl-B2rIghgi.cjs.map} +1 -1
  28. package/dist/{impl-CnRqR4kw.cjs → impl-B73GDL8C.cjs} +2 -2
  29. package/dist/{impl-CnRqR4kw.cjs.map → impl-B73GDL8C.cjs.map} +1 -1
  30. package/dist/{impl-CsKfLxov.cjs → impl-BCKGKwcc.cjs} +2 -2
  31. package/dist/{impl-CsKfLxov.cjs.map → impl-BCKGKwcc.cjs.map} +1 -1
  32. package/dist/{impl-Dvoj_snk.cjs → impl-BJAX3aRj.cjs} +2 -2
  33. package/dist/{impl-Dvoj_snk.cjs.map → impl-BJAX3aRj.cjs.map} +1 -1
  34. package/dist/{impl-BOEjB3fo.cjs → impl-BgTpF43h.cjs} +2 -2
  35. package/dist/{impl-BOEjB3fo.cjs.map → impl-BgTpF43h.cjs.map} +1 -1
  36. package/dist/{impl-DmXYpp-M.cjs → impl-Bm3SCfN0.cjs} +2 -2
  37. package/dist/{impl-DmXYpp-M.cjs.map → impl-Bm3SCfN0.cjs.map} +1 -1
  38. package/dist/{impl-Dzq0t6mX.cjs → impl-BwRZ2Uh4.cjs} +2 -2
  39. package/dist/{impl-Dzq0t6mX.cjs.map → impl-BwRZ2Uh4.cjs.map} +1 -1
  40. package/dist/{impl-CyzGdwB1.cjs → impl-ByMM0BtF.cjs} +2 -2
  41. package/dist/{impl-CyzGdwB1.cjs.map → impl-ByMM0BtF.cjs.map} +1 -1
  42. package/dist/{impl-CLcnbVfj.cjs → impl-C0Uwz7KR.cjs} +2 -2
  43. package/dist/{impl-CLcnbVfj.cjs.map → impl-C0Uwz7KR.cjs.map} +1 -1
  44. package/dist/{impl-kxwq3OMk.cjs → impl-C3SuSalX.cjs} +2 -2
  45. package/dist/{impl-kxwq3OMk.cjs.map → impl-C3SuSalX.cjs.map} +1 -1
  46. package/dist/{impl-DhbV3bBZ.cjs → impl-C3h9aEfs.cjs} +2 -2
  47. package/dist/{impl-DhbV3bBZ.cjs.map → impl-C3h9aEfs.cjs.map} +1 -1
  48. package/dist/{impl-6TmoWv0o.cjs → impl-C7tMMwk6.cjs} +2 -2
  49. package/dist/{impl-6TmoWv0o.cjs.map → impl-C7tMMwk6.cjs.map} +1 -1
  50. package/dist/{impl-DetfC7CT.cjs → impl-CBxlf2zy.cjs} +2 -2
  51. package/dist/{impl-DetfC7CT.cjs.map → impl-CBxlf2zy.cjs.map} +1 -1
  52. package/dist/{impl-DHuguAlW.cjs → impl-CEUSknYB.cjs} +2 -2
  53. package/dist/{impl-DHuguAlW.cjs.map → impl-CEUSknYB.cjs.map} +1 -1
  54. package/dist/{impl-C-aKX3zu.cjs → impl-CO2YHWDs.cjs} +2 -2
  55. package/dist/{impl-C-aKX3zu.cjs.map → impl-CO2YHWDs.cjs.map} +1 -1
  56. package/dist/{impl-DjP2MJNK.cjs → impl-CPVqTQlt.cjs} +2 -2
  57. package/dist/{impl-DjP2MJNK.cjs.map → impl-CPVqTQlt.cjs.map} +1 -1
  58. package/dist/{impl-BGoAnVJu.cjs → impl-CS2vnMmW.cjs} +2 -2
  59. package/dist/{impl-BGoAnVJu.cjs.map → impl-CS2vnMmW.cjs.map} +1 -1
  60. package/dist/{impl-u8o3S8w2.cjs → impl-C_Ap9B9z.cjs} +2 -2
  61. package/dist/{impl-u8o3S8w2.cjs.map → impl-C_Ap9B9z.cjs.map} +1 -1
  62. package/dist/{impl-C6JjApDI.cjs → impl-Cd8q5X4H.cjs} +2 -2
  63. package/dist/{impl-C6JjApDI.cjs.map → impl-Cd8q5X4H.cjs.map} +1 -1
  64. package/dist/{impl-B_2CdctV.cjs → impl-CdxiD7Yy.cjs} +2 -2
  65. package/dist/{impl-B_2CdctV.cjs.map → impl-CdxiD7Yy.cjs.map} +1 -1
  66. package/dist/{impl-Dp3-sA6b.cjs → impl-CniCF9_X.cjs} +2 -2
  67. package/dist/{impl-Dp3-sA6b.cjs.map → impl-CniCF9_X.cjs.map} +1 -1
  68. package/dist/{impl-CmEsmnYZ.cjs → impl-CpJSuHLj.cjs} +2 -2
  69. package/dist/{impl-CmEsmnYZ.cjs.map → impl-CpJSuHLj.cjs.map} +1 -1
  70. package/dist/{impl-_QrpPIPw.cjs → impl-CrUSLLov.cjs} +2 -2
  71. package/dist/{impl-_QrpPIPw.cjs.map → impl-CrUSLLov.cjs.map} +1 -1
  72. package/dist/{impl-CKYwKeLz.cjs → impl-CvNge9fm.cjs} +2 -2
  73. package/dist/{impl-CKYwKeLz.cjs.map → impl-CvNge9fm.cjs.map} +1 -1
  74. package/dist/{impl-C4q9xHFr.cjs → impl-CwX4DDy7.cjs} +2 -2
  75. package/dist/{impl-C4q9xHFr.cjs.map → impl-CwX4DDy7.cjs.map} +1 -1
  76. package/dist/{impl-7LAuV25D.cjs → impl-D5YmaznJ.cjs} +2 -2
  77. package/dist/{impl-7LAuV25D.cjs.map → impl-D5YmaznJ.cjs.map} +1 -1
  78. package/dist/{impl-DwWoAbT_.cjs → impl-DIQtkPMh.cjs} +2 -2
  79. package/dist/{impl-DwWoAbT_.cjs.map → impl-DIQtkPMh.cjs.map} +1 -1
  80. package/dist/{impl-CgKn47V9.cjs → impl-DN-bpfF3.cjs} +2 -2
  81. package/dist/{impl-CgKn47V9.cjs.map → impl-DN-bpfF3.cjs.map} +1 -1
  82. package/dist/{impl-DQ8rr7Fv.cjs → impl-DSnwLiWc.cjs} +2 -2
  83. package/dist/{impl-DQ8rr7Fv.cjs.map → impl-DSnwLiWc.cjs.map} +1 -1
  84. package/dist/{impl-DC_YquN8.cjs → impl-DeGnhjhF.cjs} +2 -2
  85. package/dist/{impl-DC_YquN8.cjs.map → impl-DeGnhjhF.cjs.map} +1 -1
  86. package/dist/{impl-CV3axMeT.cjs → impl-DmV67HHX.cjs} +2 -2
  87. package/dist/{impl-CV3axMeT.cjs.map → impl-DmV67HHX.cjs.map} +1 -1
  88. package/dist/{impl-B04CctrY.cjs → impl-DnAz_55s.cjs} +2 -2
  89. package/dist/{impl-B04CctrY.cjs.map → impl-DnAz_55s.cjs.map} +1 -1
  90. package/dist/{impl-DrJj-l3s.cjs → impl-DxCRgxen.cjs} +2 -2
  91. package/dist/{impl-DrJj-l3s.cjs.map → impl-DxCRgxen.cjs.map} +1 -1
  92. package/dist/{impl-CR-wyJSg.cjs → impl-DyNE7_Z9.cjs} +2 -2
  93. package/dist/{impl-CR-wyJSg.cjs.map → impl-DyNE7_Z9.cjs.map} +1 -1
  94. package/dist/{impl-XyWPUpvw.cjs → impl-GcBLFRSN.cjs} +2 -2
  95. package/dist/{impl-XyWPUpvw.cjs.map → impl-GcBLFRSN.cjs.map} +1 -1
  96. package/dist/{impl-BOEoFzcB.cjs → impl-NaNBTYBy.cjs} +2 -2
  97. package/dist/{impl-BOEoFzcB.cjs.map → impl-NaNBTYBy.cjs.map} +1 -1
  98. package/dist/{impl-DxhyqjcY.cjs → impl-OpdHSaSE.cjs} +2 -2
  99. package/dist/{impl-DxhyqjcY.cjs.map → impl-OpdHSaSE.cjs.map} +1 -1
  100. package/dist/{impl-DNRsFfbU.cjs → impl-UF7LI32t.cjs} +2 -2
  101. package/dist/{impl-DNRsFfbU.cjs.map → impl-UF7LI32t.cjs.map} +1 -1
  102. package/dist/{impl-CqadSQOh.cjs → impl-VkDcqzOv.cjs} +2 -2
  103. package/dist/{impl-CqadSQOh.cjs.map → impl-VkDcqzOv.cjs.map} +1 -1
  104. package/dist/{impl-Cp7-Tctr.cjs → impl-eo3bGO0E.cjs} +2 -2
  105. package/dist/{impl-Cp7-Tctr.cjs.map → impl-eo3bGO0E.cjs.map} +1 -1
  106. package/dist/index.cjs +1 -1
  107. package/dist/index.d.cts +9 -9
  108. package/dist/{manual-enrichment-CzTpv-mM.cjs → manual-enrichment-D_kDV9gc.cjs} +2 -2
  109. package/dist/{manual-enrichment-CzTpv-mM.cjs.map → manual-enrichment-D_kDV9gc.cjs.map} +1 -1
  110. package/dist/{pooling-DA_LwUEp.cjs → pooling-D9eTOOjE.cjs} +5 -5
  111. package/dist/pooling-D9eTOOjE.cjs.map +1 -0
  112. package/dist/{preference-management-aOhuZCuE.cjs → preference-management-CE64qiAB.cjs} +2 -2
  113. package/dist/preference-management-CE64qiAB.cjs.map +1 -0
  114. package/dist/{syncConfigurationToTranscend-DuTZKIG8.cjs → syncConfigurationToTranscend-BipGaTT0.cjs} +9 -5
  115. package/dist/syncConfigurationToTranscend-BipGaTT0.cjs.map +1 -0
  116. package/dist/{uploadConsents-C9Pv8Awr.cjs → uploadConsents-C7SPWTIr.cjs} +2 -2
  117. package/dist/{uploadConsents-C9Pv8Awr.cjs.map → uploadConsents-C7SPWTIr.cjs.map} +1 -1
  118. package/package.json +1 -1
  119. package/dist/pooling-DA_LwUEp.cjs.map +0 -1
  120. package/dist/preference-management-aOhuZCuE.cjs.map +0 -1
  121. package/dist/syncConfigurationToTranscend-DuTZKIG8.cjs.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"impl-u8o3S8w2.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"8lBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
1
+ {"version":3,"file":"impl-C_Ap9B9z.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"8lBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
@@ -1,2 +1,2 @@
1
- require(`./constants-wkuhlP8d.cjs`),require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const e=require(`./consent-manager-CCyvzvY5.cjs`);require(`./uploadConsents-C9Pv8Awr.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,trackerStatus:r,file:i,transcendUrl:a}){t.t(this.process.exit),await e.o({auth:n,trackerStatus:r,file:i,transcendUrl:a})}exports.uploadCookiesFromCsv=n;
2
- //# sourceMappingURL=impl-C6JjApDI.cjs.map
1
+ require(`./constants-BvCUdOlO.cjs`),require(`./syncConfigurationToTranscend-BipGaTT0.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const e=require(`./consent-manager-C6jfR89E.cjs`);require(`./uploadConsents-C7SPWTIr.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,trackerStatus:r,file:i,transcendUrl:a}){t.t(this.process.exit),await e.o({auth:n,trackerStatus:r,file:i,transcendUrl:a})}exports.uploadCookiesFromCsv=n;
2
+ //# sourceMappingURL=impl-Cd8q5X4H.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-C6JjApDI.cjs","names":["uploadCookiesFromCsvHelper"],"sources":["../src/commands/consent/upload-cookies-from-csv/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { uploadCookiesFromCsv as uploadCookiesFromCsvHelper } from '../../../lib/consent-manager';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadCookiesFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n transcendUrl: string;\n}\n\nexport async function uploadCookiesFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, transcendUrl }: UploadCookiesFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Upload cookies\n await uploadCookiesFromCsvHelper({\n auth,\n trackerStatus,\n file,\n transcendUrl,\n });\n}\n"],"mappings":"iYAYA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,gBACd,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,MAAMA,EAAAA,EAA2B,CAC/B,OACA,gBACA,OACA,eACD,CAAC"}
1
+ {"version":3,"file":"impl-Cd8q5X4H.cjs","names":["uploadCookiesFromCsvHelper"],"sources":["../src/commands/consent/upload-cookies-from-csv/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { uploadCookiesFromCsv as uploadCookiesFromCsvHelper } from '../../../lib/consent-manager';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadCookiesFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n transcendUrl: string;\n}\n\nexport async function uploadCookiesFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, transcendUrl }: UploadCookiesFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Upload cookies\n await uploadCookiesFromCsvHelper({\n auth,\n trackerStatus,\n file,\n transcendUrl,\n });\n}\n"],"mappings":"iYAYA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,gBACd,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,MAAMA,EAAAA,EAA2B,CAC/B,OACA,gBACA,OACA,eACD,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-wkuhlP8d.cjs`);const t=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),n=require(`./dataFlowsToDataSilos-DXlFFHMV.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const i=require(`./readTranscendYaml-Cycz6RxW.cjs`),a=require(`./api-keys-Bb2BbZQe.cjs`),o=require(`./done-input-validation-DGckEJ5a.cjs`);let s=require(`node:fs`),c=require(`node:path`),l=require(`colors`);l=e.t(l);async function u({auth:e,dataFlowsYmlFolder:u,dataSilosYmlFolder:d,ignoreYmls:f=[],transcendUrl:p}){o.t(this.process.exit),(!(0,s.existsSync)(u)||!(0,s.lstatSync)(u).isDirectory())&&(r.t.error(l.default.red(`Folder does not exist: "${u}"`)),this.process.exit(1)),(!(0,s.existsSync)(d)||!(0,s.lstatSync)(d).isDirectory())&&(r.t.error(l.default.red(`Folder does not exist: "${d}"`)),this.process.exit(1));let{serviceToTitle:m,serviceToSupportedIntegration:h}=await t.ir(t.ti(p,e));a.n(u).forEach(e=>{let{"data-flows":t=[]}=i.r((0,c.join)(u,e)),{adTechDataSilos:a,siteTechDataSilos:o}=n.t(t,{serviceToSupportedIntegration:h,serviceToTitle:m}),s=[...a,...o];r.t.log(`Total Services: ${s.length}`),r.t.log(`Ad Tech Services: ${a.length}`),r.t.log(`Site Tech Services: ${o.length}`),i.a((0,c.join)(d,e),{"data-silos":f.includes(e)?[]:s})})}exports.deriveDataSilosFromDataFlows=u;
2
- //# sourceMappingURL=impl-B_2CdctV.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-BvCUdOlO.cjs`);const t=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`),n=require(`./dataFlowsToDataSilos-CRqkUmje.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const i=require(`./readTranscendYaml-Cycz6RxW.cjs`),a=require(`./api-keys-DKB-2PVX.cjs`),o=require(`./done-input-validation-DGckEJ5a.cjs`);let s=require(`node:fs`),c=require(`node:path`),l=require(`colors`);l=e.t(l);async function u({auth:e,dataFlowsYmlFolder:u,dataSilosYmlFolder:d,ignoreYmls:f=[],transcendUrl:p}){o.t(this.process.exit),(!(0,s.existsSync)(u)||!(0,s.lstatSync)(u).isDirectory())&&(r.t.error(l.default.red(`Folder does not exist: "${u}"`)),this.process.exit(1)),(!(0,s.existsSync)(d)||!(0,s.lstatSync)(d).isDirectory())&&(r.t.error(l.default.red(`Folder does not exist: "${d}"`)),this.process.exit(1));let{serviceToTitle:m,serviceToSupportedIntegration:h}=await t.ir(t.ti(p,e));a.n(u).forEach(e=>{let{"data-flows":t=[]}=i.r((0,c.join)(u,e)),{adTechDataSilos:a,siteTechDataSilos:o}=n.t(t,{serviceToSupportedIntegration:h,serviceToTitle:m}),s=[...a,...o];r.t.log(`Total Services: ${s.length}`),r.t.log(`Ad Tech Services: ${a.length}`),r.t.log(`Site Tech Services: ${o.length}`),i.a((0,c.join)(d,e),{"data-silos":f.includes(e)?[]:s})})}exports.deriveDataSilosFromDataFlows=u;
2
+ //# sourceMappingURL=impl-CdxiD7Yy.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-B_2CdctV.cjs","names":["fetchAndIndexCatalogs","buildTranscendGraphQLClient","readTranscendYaml","dataFlowsToDataSilos"],"sources":["../src/commands/inventory/derive-data-silos-from-data-flows/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '../../../lib/graphql';\nimport { join } from 'node:path';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '../../../codecs';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { listFiles } from '../../../lib/api-keys';\nimport {\n readTranscendYaml,\n writeTranscendYaml,\n} from '../../../lib/readTranscendYaml';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DeriveDataSilosFromDataFlowsCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n dataSilosYmlFolder: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlows(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n dataSilosYmlFolder,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ensure folder is passed\n if (\n !existsSync(dataSilosYmlFolder) ||\n !lstatSync(dataSilosYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataSilosYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // List of each data flow yml file\n listFiles(dataFlowsYmlFolder).forEach((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n // combine and write to yml file\n const dataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechDataSilos.length}`);\n logger.log(`Site Tech Services: ${siteTechDataSilos.length}`);\n writeTranscendYaml(join(dataSilosYmlFolder, directory), {\n 'data-silos': ignoreYmls.includes(directory) ? [] : dataSilos,\n });\n });\n}\n"],"mappings":"+iBA0BA,eAAsB,EAEpB,CACE,OACA,qBACA,qBACA,aAAa,EAAE,CACf,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,EAIpC,EAAA,EAAA,EAAA,YAAY,EAAmB,EAC/B,EAAA,EAAA,EAAA,WAAW,EAAmB,CAAC,aAAa,IAE5C,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,2BAA2B,EAAmB,GAAG,CAAC,CAC1E,KAAK,QAAQ,KAAK,EAAE,GAKpB,EAAA,EAAA,EAAA,YAAY,EAAmB,EAC/B,EAAA,EAAA,EAAA,WAAW,EAAmB,CAAC,aAAa,IAE5C,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,2BAA2B,EAAmB,GAAG,CAAC,CAC1E,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAM,CAAE,iBAAgB,iCACtB,MAAMA,EAAAA,GAFOC,EAAAA,GAA4B,EAAc,EAAK,CAEzB,CAGrC,EAAA,EAAU,EAAmB,CAAC,QAAS,GAAc,CAEnD,GAAM,CAAE,aAAc,EAAY,EAAE,EAAKC,EAAAA,GAAAA,EAAAA,EAAAA,MAClC,EAAoB,EAAU,CACpC,CAGK,CAAE,kBAAiB,qBAAsBC,EAAAA,EAC7C,EACA,CACE,gCACA,iBACD,CACF,CAGK,EAAY,CAAC,GAAG,EAAiB,GAAG,EAAkB,CAC5D,EAAA,EAAO,IAAI,mBAAmB,EAAU,SAAS,CACjD,EAAA,EAAO,IAAI,qBAAqB,EAAgB,SAAS,CACzD,EAAA,EAAO,IAAI,uBAAuB,EAAkB,SAAS,CAC7D,EAAA,GAAA,EAAA,EAAA,MAAwB,EAAoB,EAAU,CAAE,CACtD,aAAc,EAAW,SAAS,EAAU,CAAG,EAAE,CAAG,EACrD,CAAC,EACF"}
1
+ {"version":3,"file":"impl-CdxiD7Yy.cjs","names":["fetchAndIndexCatalogs","buildTranscendGraphQLClient","readTranscendYaml","dataFlowsToDataSilos"],"sources":["../src/commands/inventory/derive-data-silos-from-data-flows/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport {\n fetchAndIndexCatalogs,\n buildTranscendGraphQLClient,\n} from '../../../lib/graphql';\nimport { join } from 'node:path';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { dataFlowsToDataSilos } from '../../../lib/consent-manager/dataFlowsToDataSilos';\nimport { DataFlowInput } from '../../../codecs';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { listFiles } from '../../../lib/api-keys';\nimport {\n readTranscendYaml,\n writeTranscendYaml,\n} from '../../../lib/readTranscendYaml';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DeriveDataSilosFromDataFlowsCommandFlags {\n auth: string;\n dataFlowsYmlFolder: string;\n dataSilosYmlFolder: string;\n ignoreYmls?: string[];\n transcendUrl: string;\n}\n\nexport async function deriveDataSilosFromDataFlows(\n this: LocalContext,\n {\n auth,\n dataFlowsYmlFolder,\n dataSilosYmlFolder,\n ignoreYmls = [],\n transcendUrl,\n }: DeriveDataSilosFromDataFlowsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure folder is passed\n if (\n !existsSync(dataFlowsYmlFolder) ||\n !lstatSync(dataFlowsYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataFlowsYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Ensure folder is passed\n if (\n !existsSync(dataSilosYmlFolder) ||\n !lstatSync(dataSilosYmlFolder).isDirectory()\n ) {\n logger.error(colors.red(`Folder does not exist: \"${dataSilosYmlFolder}\"`));\n this.process.exit(1);\n }\n\n // Fetch all integrations in the catalog\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { serviceToTitle, serviceToSupportedIntegration } =\n await fetchAndIndexCatalogs(client);\n\n // List of each data flow yml file\n listFiles(dataFlowsYmlFolder).forEach((directory) => {\n // read in the data flows for a specific instance\n const { 'data-flows': dataFlows = [] } = readTranscendYaml(\n join(dataFlowsYmlFolder, directory),\n );\n\n // map the data flows to data silos\n const { adTechDataSilos, siteTechDataSilos } = dataFlowsToDataSilos(\n dataFlows as DataFlowInput[],\n {\n serviceToSupportedIntegration,\n serviceToTitle,\n },\n );\n\n // combine and write to yml file\n const dataSilos = [...adTechDataSilos, ...siteTechDataSilos];\n logger.log(`Total Services: ${dataSilos.length}`);\n logger.log(`Ad Tech Services: ${adTechDataSilos.length}`);\n logger.log(`Site Tech Services: ${siteTechDataSilos.length}`);\n writeTranscendYaml(join(dataSilosYmlFolder, directory), {\n 'data-silos': ignoreYmls.includes(directory) ? [] : dataSilos,\n });\n });\n}\n"],"mappings":"+iBA0BA,eAAsB,EAEpB,CACE,OACA,qBACA,qBACA,aAAa,EAAE,CACf,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,EAIpC,EAAA,EAAA,EAAA,YAAY,EAAmB,EAC/B,EAAA,EAAA,EAAA,WAAW,EAAmB,CAAC,aAAa,IAE5C,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,2BAA2B,EAAmB,GAAG,CAAC,CAC1E,KAAK,QAAQ,KAAK,EAAE,GAKpB,EAAA,EAAA,EAAA,YAAY,EAAmB,EAC/B,EAAA,EAAA,EAAA,WAAW,EAAmB,CAAC,aAAa,IAE5C,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,2BAA2B,EAAmB,GAAG,CAAC,CAC1E,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAM,CAAE,iBAAgB,iCACtB,MAAMA,EAAAA,GAFOC,EAAAA,GAA4B,EAAc,EAAK,CAEzB,CAGrC,EAAA,EAAU,EAAmB,CAAC,QAAS,GAAc,CAEnD,GAAM,CAAE,aAAc,EAAY,EAAE,EAAKC,EAAAA,GAAAA,EAAAA,EAAAA,MAClC,EAAoB,EAAU,CACpC,CAGK,CAAE,kBAAiB,qBAAsBC,EAAAA,EAC7C,EACA,CACE,gCACA,iBACD,CACF,CAGK,EAAY,CAAC,GAAG,EAAiB,GAAG,EAAkB,CAC5D,EAAA,EAAO,IAAI,mBAAmB,EAAU,SAAS,CACjD,EAAA,EAAO,IAAI,qBAAqB,EAAgB,SAAS,CACzD,EAAA,EAAO,IAAI,uBAAuB,EAAkB,SAAS,CAC7D,EAAA,GAAA,EAAA,EAAA,MAAwB,EAAoB,EAAU,CAAE,CACtD,aAAc,EAAW,SAAS,EAAU,CAAG,EAAE,CAAG,EACrD,CAAC,EACF"}
@@ -1,2 +1,2 @@
1
- require(`./constants-wkuhlP8d.cjs`);const e=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,dataSiloId:r,status:i,statuses:a,transcendUrl:o}){t.t(this.process.exit),await e.F({transcendUrl:o,auth:n,status:i,dataSiloId:r,requestStatuses:a})}exports.skipRequestDataSilos=n;
2
- //# sourceMappingURL=impl-Dp3-sA6b.cjs.map
1
+ require(`./constants-BvCUdOlO.cjs`);const e=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,dataSiloId:r,status:i,statuses:a,transcendUrl:o}){t.t(this.process.exit),await e.F({transcendUrl:o,auth:n,status:i,dataSiloId:r,requestStatuses:a})}exports.skipRequestDataSilos=n;
2
+ //# sourceMappingURL=impl-CniCF9_X.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-Dp3-sA6b.cjs","names":["skipRequestDataSilosHelper"],"sources":["../src/commands/request/system/skip-request-data-silos/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport type {\n RequestDataSiloStatus,\n RequestStatus,\n} from '@transcend-io/privacy-types';\nimport { skipRequestDataSilos as skipRequestDataSilosHelper } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface SkipRequestDataSilosCommandFlags {\n auth: string;\n dataSiloId: string;\n transcendUrl: string;\n statuses: RequestStatus[];\n status:\n | (typeof RequestDataSiloStatus)['Skipped']\n | (typeof RequestDataSiloStatus)['Resolved'];\n}\n\nexport async function skipRequestDataSilos(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n status,\n statuses,\n transcendUrl,\n }: SkipRequestDataSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipRequestDataSilosHelper({\n transcendUrl,\n auth,\n status,\n dataSiloId,\n requestStatuses: statuses,\n });\n}\n"],"mappings":"6QAkBA,eAAsB,EAEpB,CACE,OACA,aACA,SACA,WACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAA2B,CAC/B,eACA,OACA,SACA,aACA,gBAAiB,EAClB,CAAC"}
1
+ {"version":3,"file":"impl-CniCF9_X.cjs","names":["skipRequestDataSilosHelper"],"sources":["../src/commands/request/system/skip-request-data-silos/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport type {\n RequestDataSiloStatus,\n RequestStatus,\n} from '@transcend-io/privacy-types';\nimport { skipRequestDataSilos as skipRequestDataSilosHelper } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface SkipRequestDataSilosCommandFlags {\n auth: string;\n dataSiloId: string;\n transcendUrl: string;\n statuses: RequestStatus[];\n status:\n | (typeof RequestDataSiloStatus)['Skipped']\n | (typeof RequestDataSiloStatus)['Resolved'];\n}\n\nexport async function skipRequestDataSilos(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n status,\n statuses,\n transcendUrl,\n }: SkipRequestDataSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipRequestDataSilosHelper({\n transcendUrl,\n auth,\n status,\n dataSiloId,\n requestStatuses: statuses,\n });\n}\n"],"mappings":"6QAkBA,eAAsB,EAEpB,CACE,OACA,aACA,SACA,WACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAA2B,CAC/B,eACA,OACA,SACA,aACA,gBAAiB,EAClB,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./command-XJ7XPQ04.cjs`),n=require(`./constants-wkuhlP8d.cjs`),r=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),i=require(`./enums-BZulhPFa.cjs`),a=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const o=require(`./readTranscendYaml-Cycz6RxW.cjs`),s=require(`./api-keys-Bb2BbZQe.cjs`),c=require(`./done-input-validation-DGckEJ5a.cjs`);let l=require(`node:fs`);l=e.t(l);let u=require(`node:path`),d=require(`colors`);d=e.t(d);async function f({auth:e,resources:f=t.n,file:p,transcendUrl:m,dataSiloIds:h=[],integrationNames:g=[],trackerStatuses:_=t.t,pageSize:v,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,debug:S}){c.t(this.process.exit);let C=await s.r(e),w=f.includes(`all`)?Object.values(i.o):f;if(typeof C==`string`){try{let e=await r._n(r.ti(m,C),{dataSiloIds:h,integrationNames:g,resources:w,pageSize:v,debug:S,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,trackerStatuses:_});a.t.info(d.default.magenta(`Writing configuration to file "${p}"...`)),o.a(p,e)}catch(e){a.t.error(d.default.red(`An error occurred syncing the schema: ${S?e.stack:e.message}`)),this.process.exit(1)}a.t.info(d.default.green(`Successfully synced yaml file to disk at ${p}! View at ${n.r}`))}else{if(!l.default.lstatSync(p).isDirectory())throw Error(`File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/`);let e=[];await r.Es(C,async(t,n)=>{let i=`[${n+1}/${C.length}][${t.organizationName}] `;a.t.info(d.default.magenta(`~~~\n\n${i}Attempting to pull configuration...\n\n~~~`));let s=r.ti(m,t.apiKey);try{let e=await r._n(s,{dataSiloIds:h,integrationNames:g,resources:w,pageSize:v,debug:S,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,trackerStatuses:_}),n=(0,u.join)(p,`${t.organizationName}.yml`);a.t.info(d.default.magenta(`Writing configuration to file "${n}"...`)),o.a(n,e),a.t.info(d.default.green(`${i}Successfully pulled configuration!`))}catch(n){a.t.error(d.default.red(`${i}Failed to sync configuration. - ${n.message}`)),e.push(t.organizationName)}}),e.length>0&&(a.t.info(d.default.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${n.r}`)),this.process.exit(1))}}exports.pull=f;
2
- //# sourceMappingURL=impl-CmEsmnYZ.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./command-DM9vqmJO.cjs`),n=require(`./constants-BvCUdOlO.cjs`),r=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`),i=require(`./enums-BZulhPFa.cjs`),a=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const o=require(`./readTranscendYaml-Cycz6RxW.cjs`),s=require(`./api-keys-DKB-2PVX.cjs`),c=require(`./done-input-validation-DGckEJ5a.cjs`);let l=require(`node:fs`);l=e.t(l);let u=require(`node:path`),d=require(`colors`);d=e.t(d);async function f({auth:e,resources:f=t.n,file:p,transcendUrl:m,dataSiloIds:h=[],integrationNames:g=[],trackerStatuses:_=t.t,pageSize:v,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,debug:S}){c.t(this.process.exit);let C=await s.r(e),w=f.includes(`all`)?Object.values(i.o):f;if(typeof C==`string`){try{let e=await r._n(r.ti(m,C),{dataSiloIds:h,integrationNames:g,resources:w,pageSize:v,debug:S,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,trackerStatuses:_});a.t.info(d.default.magenta(`Writing configuration to file "${p}"...`)),o.a(p,e)}catch(e){a.t.error(d.default.red(`An error occurred syncing the schema: ${S?e.stack:e.message}`)),this.process.exit(1)}a.t.info(d.default.green(`Successfully synced yaml file to disk at ${p}! View at ${n.r}`))}else{if(!l.default.lstatSync(p).isDirectory())throw Error(`File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/`);let e=[];await r.Es(C,async(t,n)=>{let i=`[${n+1}/${C.length}][${t.organizationName}] `;a.t.info(d.default.magenta(`~~~\n\n${i}Attempting to pull configuration...\n\n~~~`));let s=r.ti(m,t.apiKey);try{let e=await r._n(s,{dataSiloIds:h,integrationNames:g,resources:w,pageSize:v,debug:S,skipDatapoints:y,skipSubDatapoints:b,includeGuessedCategories:x,trackerStatuses:_}),n=(0,u.join)(p,`${t.organizationName}.yml`);a.t.info(d.default.magenta(`Writing configuration to file "${n}"...`)),o.a(n,e),a.t.info(d.default.green(`${i}Successfully pulled configuration!`))}catch(n){a.t.error(d.default.red(`${i}Failed to sync configuration. - ${n.message}`)),e.push(t.organizationName)}}),e.length>0&&(a.t.info(d.default.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${n.r}`)),this.process.exit(1))}}exports.pull=f;
2
+ //# sourceMappingURL=impl-CpJSuHLj.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CmEsmnYZ.cjs","names":["DEFAULT_TRANSCEND_PULL_RESOURCES","DEFAULT_CONSENT_TRACKER_STATUSES","validateTranscendAuth","TranscendPullResource","pullTranscendConfiguration","buildTranscendGraphQLClient","ADMIN_DASH_INTEGRATIONS","fs","mapSeries"],"sources":["../src/commands/inventory/pull/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport type { LocalContext } from '../../../context';\nimport { TranscendPullResource } from '../../../enums';\nimport {\n DEFAULT_CONSENT_TRACKER_STATUSES,\n DEFAULT_TRANSCEND_PULL_RESOURCES,\n} from './command';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { mapSeries } from '../../../lib/bluebird';\nimport { join } from 'node:path';\nimport fs from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n pullTranscendConfiguration,\n} from '../../../lib/graphql';\n\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullCommandFlags {\n auth: string;\n resources?: (TranscendPullResource | 'all')[];\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n integrationNames?: string[];\n trackerStatuses?: ConsentTrackerStatus[];\n pageSize: number;\n skipDatapoints: boolean;\n skipSubDatapoints: boolean;\n includeGuessedCategories: boolean;\n debug: boolean;\n}\n\nexport async function pull(\n this: LocalContext,\n {\n auth,\n resources = DEFAULT_TRANSCEND_PULL_RESOURCES,\n file,\n transcendUrl,\n dataSiloIds = [],\n integrationNames = [],\n trackerStatuses = DEFAULT_CONSENT_TRACKER_STATUSES,\n pageSize,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n debug,\n }: PullCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n const resourcesToPull: TranscendPullResource[] = resources.includes('all')\n ? Object.values(TranscendPullResource)\n : (resources as TranscendPullResource[]);\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n writeTranscendYaml(file, configuration);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the schema: ${\n debug ? err.stack : err.message\n }`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to disk at ${file}! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n if (!fs.lstatSync(file).isDirectory()) {\n throw new Error(\n 'File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/',\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull configuration...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n const filePath = join(file, `${apiKey.organizationName}.yml`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${filePath}\"...`),\n );\n writeTranscendYaml(filePath, configuration);\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(\n colors.red(`${prefix}Failed to sync configuration. - ${err.message}`),\n );\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"],"mappings":"uiBAsCA,eAAsB,EAEpB,CACE,OACA,YAAYA,EAAAA,EACZ,OACA,eACA,cAAc,EAAE,CAChB,mBAAmB,EAAE,CACrB,kBAAkBC,EAAAA,EAClB,WACA,iBACA,oBACA,2BACA,SAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAMC,EAAAA,EAAsB,EAAK,CAEhD,EAA2C,EAAU,SAAS,MAAM,CACtE,OAAO,OAAOC,EAAAA,EAAsB,CACnC,EAGL,GAAI,OAAO,GAAiB,SAAU,CACpC,GAAI,CAIF,IAAM,EAAgB,MAAMC,EAAAA,GAFbC,EAAAA,GAA4B,EAAc,EAAa,CAEP,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,EAAA,EAAmB,EAAM,EAAc,OAChC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yCACE,EAAQ,EAAI,MAAQ,EAAI,UAE3B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,4CAA4C,EAAK,YAAYC,EAAAA,IAC9D,CACF,KACI,CACL,GAAI,CAACC,EAAAA,QAAG,UAAU,EAAK,CAAC,aAAa,CACnC,MAAU,MACR,0GACD,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAMC,EAAAA,GAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAChD,EAAO,iBACR,IACD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAO,4CAClB,CACF,CAGD,IAAM,EAASH,EAAAA,GAA4B,EAAc,EAAO,OAAO,CAEvE,GAAI,CACF,IAAM,EAAgB,MAAMD,EAAAA,GAA2B,EAAQ,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEI,GAAA,EAAA,EAAA,MAAgB,EAAM,GAAG,EAAO,iBAAiB,MAAM,CAC7D,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,kCAAkC,EAAS,MAAM,CACjE,CACD,EAAA,EAAmB,EAAU,EAAc,CAE3C,EAAA,EAAO,KACL,EAAA,QAAO,MAAM,GAAG,EAAO,oCAAoC,CAC5D,OACM,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,GAAG,EAAO,kCAAkC,EAAI,UAAU,CACtE,CACD,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAA,EAAO,KACL,EAAA,QAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0DE,EAAAA,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"impl-CpJSuHLj.cjs","names":["DEFAULT_TRANSCEND_PULL_RESOURCES","DEFAULT_CONSENT_TRACKER_STATUSES","validateTranscendAuth","TranscendPullResource","pullTranscendConfiguration","buildTranscendGraphQLClient","ADMIN_DASH_INTEGRATIONS","fs","mapSeries"],"sources":["../src/commands/inventory/pull/impl.ts"],"sourcesContent":["import { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport type { LocalContext } from '../../../context';\nimport { TranscendPullResource } from '../../../enums';\nimport {\n DEFAULT_CONSENT_TRACKER_STATUSES,\n DEFAULT_TRANSCEND_PULL_RESOURCES,\n} from './command';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { mapSeries } from '../../../lib/bluebird';\nimport { join } from 'node:path';\nimport fs from 'node:fs';\nimport {\n buildTranscendGraphQLClient,\n pullTranscendConfiguration,\n} from '../../../lib/graphql';\n\nimport { writeTranscendYaml } from '../../../lib/readTranscendYaml';\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface PullCommandFlags {\n auth: string;\n resources?: (TranscendPullResource | 'all')[];\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n integrationNames?: string[];\n trackerStatuses?: ConsentTrackerStatus[];\n pageSize: number;\n skipDatapoints: boolean;\n skipSubDatapoints: boolean;\n includeGuessedCategories: boolean;\n debug: boolean;\n}\n\nexport async function pull(\n this: LocalContext,\n {\n auth,\n resources = DEFAULT_TRANSCEND_PULL_RESOURCES,\n file,\n transcendUrl,\n dataSiloIds = [],\n integrationNames = [],\n trackerStatuses = DEFAULT_CONSENT_TRACKER_STATUSES,\n pageSize,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n debug,\n }: PullCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n const resourcesToPull: TranscendPullResource[] = resources.includes('all')\n ? Object.values(TranscendPullResource)\n : (resources as TranscendPullResource[]);\n\n // Sync to Disk\n if (typeof apiKeyOrList === 'string') {\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKeyOrList);\n\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n logger.info(colors.magenta(`Writing configuration to file \"${file}\"...`));\n writeTranscendYaml(file, configuration);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the schema: ${\n debug ? err.stack : err.message\n }`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to disk at ${file}! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n } else {\n if (!fs.lstatSync(file).isDirectory()) {\n throw new Error(\n 'File is expected to be a folder when passing in a list of API keys to pull from. e.g. --file=./working/',\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to pull configuration...\\n\\n~~~`,\n ),\n );\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, apiKey.apiKey);\n\n try {\n const configuration = await pullTranscendConfiguration(client, {\n dataSiloIds,\n integrationNames,\n resources: resourcesToPull,\n pageSize,\n debug,\n skipDatapoints,\n skipSubDatapoints,\n includeGuessedCategories,\n trackerStatuses,\n });\n\n const filePath = join(file, `${apiKey.organizationName}.yml`);\n logger.info(\n colors.magenta(`Writing configuration to file \"${filePath}\"...`),\n );\n writeTranscendYaml(filePath, configuration);\n\n logger.info(\n colors.green(`${prefix}Successfully pulled configuration!`),\n );\n } catch (err) {\n logger.error(\n colors.red(`${prefix}Failed to sync configuration. - ${err.message}`),\n );\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n}\n"],"mappings":"uiBAsCA,eAAsB,EAEpB,CACE,OACA,YAAYA,EAAAA,EACZ,OACA,eACA,cAAc,EAAE,CAChB,mBAAmB,EAAE,CACrB,kBAAkBC,EAAAA,EAClB,WACA,iBACA,oBACA,2BACA,SAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAMC,EAAAA,EAAsB,EAAK,CAEhD,EAA2C,EAAU,SAAS,MAAM,CACtE,OAAO,OAAOC,EAAAA,EAAsB,CACnC,EAGL,GAAI,OAAO,GAAiB,SAAU,CACpC,GAAI,CAIF,IAAM,EAAgB,MAAMC,EAAAA,GAFbC,EAAAA,GAA4B,EAAc,EAAa,CAEP,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,kCAAkC,EAAK,MAAM,CAAC,CACzE,EAAA,EAAmB,EAAM,EAAc,OAChC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yCACE,EAAQ,EAAI,MAAQ,EAAI,UAE3B,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,4CAA4C,EAAK,YAAYC,EAAAA,IAC9D,CACF,KACI,CACL,GAAI,CAACC,EAAAA,QAAG,UAAU,EAAK,CAAC,aAAa,CACnC,MAAU,MACR,0GACD,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAMC,EAAAA,GAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAChD,EAAO,iBACR,IACD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAO,4CAClB,CACF,CAGD,IAAM,EAASH,EAAAA,GAA4B,EAAc,EAAO,OAAO,CAEvE,GAAI,CACF,IAAM,EAAgB,MAAMD,EAAAA,GAA2B,EAAQ,CAC7D,cACA,mBACA,UAAW,EACX,WACA,QACA,iBACA,oBACA,2BACA,kBACD,CAAC,CAEI,GAAA,EAAA,EAAA,MAAgB,EAAM,GAAG,EAAO,iBAAiB,MAAM,CAC7D,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,kCAAkC,EAAS,MAAM,CACjE,CACD,EAAA,EAAmB,EAAU,EAAc,CAE3C,EAAA,EAAO,KACL,EAAA,QAAO,MAAM,GAAG,EAAO,oCAAoC,CAC5D,OACM,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,GAAG,EAAO,kCAAkC,EAAI,UAAU,CACtE,CACD,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAA,EAAO,KACL,EAAA,QAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0DE,EAAAA,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-wkuhlP8d.cjs`);require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const n=require(`./api-keys-Bb2BbZQe.cjs`),r=require(`./done-input-validation-DGckEJ5a.cjs`);let i=require(`node:fs`);async function a({email:e,password:a,apiKeyTitle:o,file:s,scopes:c,deleteExistingApiKey:l,createNewApiKey:u,parentOrganizationId:d,transcendUrl:f}){r.t(this.process.exit);let{errors:p,apiKeys:m}=await n.i({transcendUrl:f,password:a,email:e,parentOrganizationId:d,deleteExistingApiKey:l,createNewApiKey:u,apiKeyTitle:o,scopes:c.map(e=>t.c[e].name)});(0,i.writeFileSync)(s,`${JSON.stringify(m,null,2)}\n`),p.length>0&&this.process.exit(1)}exports.generateApiKeys=a;
2
- //# sourceMappingURL=impl-_QrpPIPw.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-BvCUdOlO.cjs`);require(`./syncConfigurationToTranscend-BipGaTT0.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const n=require(`./api-keys-DKB-2PVX.cjs`),r=require(`./done-input-validation-DGckEJ5a.cjs`);let i=require(`node:fs`);async function a({email:e,password:a,apiKeyTitle:o,file:s,scopes:c,deleteExistingApiKey:l,createNewApiKey:u,parentOrganizationId:d,transcendUrl:f}){r.t(this.process.exit);let{errors:p,apiKeys:m}=await n.i({transcendUrl:f,password:a,email:e,parentOrganizationId:d,deleteExistingApiKey:l,createNewApiKey:u,apiKeyTitle:o,scopes:c.map(e=>t.c[e].name)});(0,i.writeFileSync)(s,`${JSON.stringify(m,null,2)}\n`),p.length>0&&this.process.exit(1)}exports.generateApiKeys=a;
2
+ //# sourceMappingURL=impl-CrUSLLov.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-_QrpPIPw.cjs","names":["generateCrossAccountApiKeys","SCOPES_BY_TITLE"],"sources":["../src/commands/admin/generate-api-keys/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { writeFileSync } from 'node:fs';\n\nimport { ScopeName } from '@transcend-io/privacy-types';\n\nimport { generateCrossAccountApiKeys } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { SCOPES_BY_TITLE } from '../../../constants';\n\n// Command flag interface\nexport interface GenerateApiKeysCommandFlags {\n email: string;\n password: string;\n apiKeyTitle: string;\n file: string;\n scopes: string[];\n deleteExistingApiKey: boolean;\n createNewApiKey: boolean;\n parentOrganizationId?: string;\n transcendUrl: string;\n}\n\n// Command implementation\nexport async function generateApiKeys(\n this: LocalContext,\n {\n email,\n password,\n apiKeyTitle,\n file,\n scopes,\n deleteExistingApiKey,\n createNewApiKey,\n parentOrganizationId,\n transcendUrl,\n }: GenerateApiKeysCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const scopeNames = scopes.map(\n (scopeTitle) => SCOPES_BY_TITLE[scopeTitle].name as ScopeName,\n );\n\n // Upload privacy requests\n const { errors, apiKeys } = await generateCrossAccountApiKeys({\n transcendUrl,\n password,\n email,\n parentOrganizationId,\n deleteExistingApiKey,\n createNewApiKey,\n apiKeyTitle,\n scopes: scopeNames,\n });\n\n // Write to disk\n writeFileSync(file, `${JSON.stringify(apiKeys, null, 2)}\\n`);\n if (errors.length > 0) {\n this.process.exit(1);\n }\n}\n"],"mappings":"8YAuBA,eAAsB,EAEpB,CACE,QACA,WACA,cACA,OACA,SACA,uBACA,kBACA,uBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAOtC,GAAM,CAAE,SAAQ,WAAY,MAAMA,EAAAA,EAA4B,CAC5D,eACA,WACA,QACA,uBACA,uBACA,kBACA,cACA,OAbiB,EAAO,IACvB,GAAeC,EAAAA,EAAgB,GAAY,KAC7C,CAYA,CAAC,EAGF,EAAA,EAAA,eAAc,EAAM,GAAG,KAAK,UAAU,EAAS,KAAM,EAAE,CAAC,IAAI,CACxD,EAAO,OAAS,GAClB,KAAK,QAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"impl-CrUSLLov.cjs","names":["generateCrossAccountApiKeys","SCOPES_BY_TITLE"],"sources":["../src/commands/admin/generate-api-keys/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { writeFileSync } from 'node:fs';\n\nimport { ScopeName } from '@transcend-io/privacy-types';\n\nimport { generateCrossAccountApiKeys } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { SCOPES_BY_TITLE } from '../../../constants';\n\n// Command flag interface\nexport interface GenerateApiKeysCommandFlags {\n email: string;\n password: string;\n apiKeyTitle: string;\n file: string;\n scopes: string[];\n deleteExistingApiKey: boolean;\n createNewApiKey: boolean;\n parentOrganizationId?: string;\n transcendUrl: string;\n}\n\n// Command implementation\nexport async function generateApiKeys(\n this: LocalContext,\n {\n email,\n password,\n apiKeyTitle,\n file,\n scopes,\n deleteExistingApiKey,\n createNewApiKey,\n parentOrganizationId,\n transcendUrl,\n }: GenerateApiKeysCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const scopeNames = scopes.map(\n (scopeTitle) => SCOPES_BY_TITLE[scopeTitle].name as ScopeName,\n );\n\n // Upload privacy requests\n const { errors, apiKeys } = await generateCrossAccountApiKeys({\n transcendUrl,\n password,\n email,\n parentOrganizationId,\n deleteExistingApiKey,\n createNewApiKey,\n apiKeyTitle,\n scopes: scopeNames,\n });\n\n // Write to disk\n writeFileSync(file, `${JSON.stringify(apiKeys, null, 2)}\\n`);\n if (errors.length > 0) {\n this.process.exit(1);\n }\n}\n"],"mappings":"8YAuBA,eAAsB,EAEpB,CACE,QACA,WACA,cACA,OACA,SACA,uBACA,kBACA,uBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAOtC,GAAM,CAAE,SAAQ,WAAY,MAAMA,EAAAA,EAA4B,CAC5D,eACA,WACA,QACA,uBACA,uBACA,kBACA,cACA,OAbiB,EAAO,IACvB,GAAeC,EAAAA,EAAgB,GAAY,KAC7C,CAYA,CAAC,EAGF,EAAA,EAAA,eAAc,EAAM,GAAG,KAAK,UAAU,EAAS,KAAM,EAAE,CAAC,IAAI,CACxD,EAAO,OAAS,GAClB,KAAK,QAAQ,KAAK,EAAE"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-wkuhlP8d.cjs`);const t=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./data-inventory-C1eqZk1M.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({auth:e,file:o,transcendUrl:s,dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d}){i.t(this.process.exit);try{let i=await r.t(t.ti(s,e),{dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d});n.t.info(a.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let f=[];await t.d(o,i.map(e=>{let n={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...d?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return f=t.Ds([...f,...Object.keys(n)]),n}),f)}catch(e){n.t.error(a.default.red(`An error occurred syncing the unstructured discovery files: ${e.message}`)),this.process.exit(1)}n.t.info(a.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles=o;
2
- //# sourceMappingURL=impl-CKYwKeLz.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-BvCUdOlO.cjs`);const t=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./data-inventory-Bc9DbWkU.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({auth:e,file:o,transcendUrl:s,dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d}){i.t(this.process.exit);try{let i=await r.t(t.ti(s,e),{dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d});n.t.info(a.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let f=[];await t.d(o,i.map(e=>{let n={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...d?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return f=t.Ds([...f,...Object.keys(n)]),n}),f)}catch(e){n.t.error(a.default.red(`An error occurred syncing the unstructured discovery files: ${e.message}`)),this.process.exit(1)}n.t.info(a.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles=o;
2
+ //# sourceMappingURL=impl-CvNge9fm.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CKYwKeLz.cjs","names":["pullUnstructuredSubDataPointRecommendations","buildTranscendGraphQLClient","writeLargeCsv","uniq"],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"],"mappings":"yYAoBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAMA,EAAAA,EAFPC,EAAAA,GAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,iDAAiD,EAAK,MACvD,CACF,CACD,IAAI,EAAoB,EAAE,CAmB1B,MAAMC,EAAAA,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,+DAA+D,EAAI,UACpE,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,+DAA+D,EAAK,GACrE,CACF"}
1
+ {"version":3,"file":"impl-CvNge9fm.cjs","names":["pullUnstructuredSubDataPointRecommendations","buildTranscendGraphQLClient","writeLargeCsv","uniq"],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"],"mappings":"yYAoBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAMA,EAAAA,EAFPC,EAAAA,GAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,iDAAiD,EAAK,MACvD,CACF,CACD,IAAI,EAAoB,EAAE,CAmB1B,MAAMC,EAAAA,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,+DAA+D,EAAI,UACpE,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,+DAA+D,EAAK,GACrE,CACF"}
@@ -1,2 +1,2 @@
1
- require(`./constants-wkuhlP8d.cjs`),require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const e=require(`./manual-enrichment-CzTpv-mM.cjs`),t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,file:i,enricherId:a,concurrency:o,markSilent:s,sombraAuth:c}){t.t(this.process.exit),await e.t({file:i,transcendUrl:r,enricherId:a,concurrency:o,markSilent:s,auth:n,sombraAuth:c})}exports.pushIdentifiers=n;
2
- //# sourceMappingURL=impl-C4q9xHFr.cjs.map
1
+ require(`./constants-BvCUdOlO.cjs`),require(`./syncConfigurationToTranscend-BipGaTT0.cjs`),require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const e=require(`./manual-enrichment-D_kDV9gc.cjs`),t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,transcendUrl:r,file:i,enricherId:a,concurrency:o,markSilent:s,sombraAuth:c}){t.t(this.process.exit),await e.t({file:i,transcendUrl:r,enricherId:a,concurrency:o,markSilent:s,auth:n,sombraAuth:c})}exports.pushIdentifiers=n;
2
+ //# sourceMappingURL=impl-CwX4DDy7.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-C4q9xHFr.cjs","names":["pushManualEnrichmentIdentifiersFromCsv"],"sources":["../src/commands/request/preflight/push-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pushManualEnrichmentIdentifiersFromCsv } from '../../../../lib/manual-enrichment';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PushIdentifiersCommandFlags {\n auth: string;\n enricherId: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n markSilent: boolean;\n concurrency: number;\n}\n\nexport async function pushIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n enricherId,\n concurrency,\n markSilent,\n sombraAuth,\n }: PushIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushManualEnrichmentIdentifiersFromCsv({\n file,\n transcendUrl,\n enricherId,\n concurrency,\n markSilent,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,cACA,aACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAuC,CAC3C,OACA,eACA,aACA,cACA,aACA,OACA,aACD,CAAC"}
1
+ {"version":3,"file":"impl-CwX4DDy7.cjs","names":["pushManualEnrichmentIdentifiersFromCsv"],"sources":["../src/commands/request/preflight/push-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pushManualEnrichmentIdentifiersFromCsv } from '../../../../lib/manual-enrichment';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PushIdentifiersCommandFlags {\n auth: string;\n enricherId: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n markSilent: boolean;\n concurrency: number;\n}\n\nexport async function pushIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n enricherId,\n concurrency,\n markSilent,\n sombraAuth,\n }: PushIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pushManualEnrichmentIdentifiersFromCsv({\n file,\n transcendUrl,\n enricherId,\n concurrency,\n markSilent,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,aACA,cACA,aACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAuC,CAC3C,OACA,eACA,aACA,cACA,aACA,OACA,aACD,CAAC"}
@@ -1,2 +1,2 @@
1
- require(`./constants-wkuhlP8d.cjs`);const e=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,file:r,transcendUrl:i,cacheFilepath:a,requestReceiptFolder:o,sombraAuth:s,concurrency:c,attributes:l,isTest:u,isSilent:d,skipSendingReceipt:f,emailIsVerified:p,skipFilterStep:m,dryRun:h,debug:g,defaultPhoneCountryCode:_}){t.t(this.process.exit),await e.H({cacheFilepath:a,requestReceiptFolder:o,file:r,auth:n,sombraAuth:s,concurrency:c,transcendUrl:i,defaultPhoneCountryCode:_,attributes:e.li(l),debug:g,skipFilterStep:m,isSilent:d,skipSendingReceipt:f,emailIsVerified:p,isTest:u,dryRun:h})}exports.upload=n;
2
- //# sourceMappingURL=impl-7LAuV25D.cjs.map
1
+ require(`./constants-BvCUdOlO.cjs`);const e=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`);require(`./enums-BZulhPFa.cjs`),require(`./logger-DQwEYtSS.cjs`),require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const t=require(`./done-input-validation-DGckEJ5a.cjs`);async function n({auth:n,file:r,transcendUrl:i,cacheFilepath:a,requestReceiptFolder:o,sombraAuth:s,concurrency:c,attributes:l,isTest:u,isSilent:d,skipSendingReceipt:f,emailIsVerified:p,skipFilterStep:m,dryRun:h,debug:g,defaultPhoneCountryCode:_}){t.t(this.process.exit),await e.H({cacheFilepath:a,requestReceiptFolder:o,file:r,auth:n,sombraAuth:s,concurrency:c,transcendUrl:i,defaultPhoneCountryCode:_,attributes:e.li(l),debug:g,skipFilterStep:m,isSilent:d,skipSendingReceipt:f,emailIsVerified:p,isTest:u,dryRun:h})}exports.upload=n;
2
+ //# sourceMappingURL=impl-D5YmaznJ.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-7LAuV25D.cjs","names":["uploadPrivacyRequestsFromCsv","splitCsvToList"],"sources":["../src/commands/request/upload/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport {\n splitCsvToList,\n uploadPrivacyRequestsFromCsv,\n} from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n cacheFilepath: string;\n requestReceiptFolder: string;\n sombraAuth?: string;\n concurrency: number;\n attributes: string;\n isTest: boolean;\n isSilent: boolean;\n skipSendingReceipt: boolean;\n emailIsVerified: boolean;\n skipFilterStep: boolean;\n dryRun: boolean;\n debug: boolean;\n defaultPhoneCountryCode: string;\n}\n\nexport async function upload(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n cacheFilepath,\n requestReceiptFolder,\n sombraAuth,\n concurrency,\n attributes,\n isTest,\n isSilent,\n skipSendingReceipt,\n emailIsVerified,\n skipFilterStep,\n dryRun,\n debug,\n defaultPhoneCountryCode,\n }: UploadCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await uploadPrivacyRequestsFromCsv({\n cacheFilepath,\n requestReceiptFolder,\n file,\n auth,\n sombraAuth,\n concurrency,\n transcendUrl,\n defaultPhoneCountryCode,\n attributes: splitCsvToList(attributes),\n debug,\n skipFilterStep,\n isSilent,\n skipSendingReceipt,\n emailIsVerified,\n isTest,\n dryRun,\n });\n}\n"],"mappings":"6QA0BA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,gBACA,uBACA,aACA,cACA,aACA,SACA,WACA,qBACA,kBACA,iBACA,SACA,QACA,2BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAA6B,CACjC,gBACA,uBACA,OACA,OACA,aACA,cACA,eACA,0BACA,WAAYC,EAAAA,GAAe,EAAW,CACtC,QACA,iBACA,WACA,qBACA,kBACA,SACA,SACD,CAAC"}
1
+ {"version":3,"file":"impl-D5YmaznJ.cjs","names":["uploadPrivacyRequestsFromCsv","splitCsvToList"],"sources":["../src/commands/request/upload/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport {\n splitCsvToList,\n uploadPrivacyRequestsFromCsv,\n} from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n cacheFilepath: string;\n requestReceiptFolder: string;\n sombraAuth?: string;\n concurrency: number;\n attributes: string;\n isTest: boolean;\n isSilent: boolean;\n skipSendingReceipt: boolean;\n emailIsVerified: boolean;\n skipFilterStep: boolean;\n dryRun: boolean;\n debug: boolean;\n defaultPhoneCountryCode: string;\n}\n\nexport async function upload(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n cacheFilepath,\n requestReceiptFolder,\n sombraAuth,\n concurrency,\n attributes,\n isTest,\n isSilent,\n skipSendingReceipt,\n emailIsVerified,\n skipFilterStep,\n dryRun,\n debug,\n defaultPhoneCountryCode,\n }: UploadCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await uploadPrivacyRequestsFromCsv({\n cacheFilepath,\n requestReceiptFolder,\n file,\n auth,\n sombraAuth,\n concurrency,\n transcendUrl,\n defaultPhoneCountryCode,\n attributes: splitCsvToList(attributes),\n debug,\n skipFilterStep,\n isSilent,\n skipSendingReceipt,\n emailIsVerified,\n isTest,\n dryRun,\n });\n}\n"],"mappings":"6QA0BA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,gBACA,uBACA,aACA,cACA,aACA,SACA,WACA,qBACA,kBACA,iBACA,SACA,QACA,2BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAA6B,CACjC,gBACA,uBACA,OACA,OACA,aACA,cACA,eACA,0BACA,WAAYC,EAAAA,GAAe,EAAW,CACtC,QACA,iBACA,WACA,qBACA,kBACA,SACA,SACD,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-wkuhlP8d.cjs`);const t=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./cron-DfEGA7Rf.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({file:e,fileTarget:o,transcendUrl:s,auth:c,sombraAuth:l,cronDataSiloId:u,targetDataSiloId:d,actions:f,skipRequestCount:p,pageLimit:m,chunkSize:h}){p&&n.t.info(a.default.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%m!==0)&&(n.t.error(a.default.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${m}.`)),this.process.exit(1)),i.t(this.process.exit);let g=t.ti(s,c),{baseName:_,extension:v}=t.c(e),{baseName:y,extension:b}=t.c(o),x=0,S=0,C=0;await r.t({dataSiloId:u,auth:c,sombraAuth:l,actions:f,apiPageSize:m,savePageSize:h,onSave:async r=>{x+=r.length;let i=await t.Ts(t.Ns(t.Ds(r.map(e=>e.requestId)),m),async e=>(n.t.info(a.default.magenta(`Fetching target identifiers for ${e.length} requests`)),(await t.Un(g,m*2,{requestIds:e,dataSiloIds:[d]})).map(({fileName:e,remoteId:t})=>{if(!t)throw Error(`Failed to find remoteId for ${e}`);return{RecordId:t,Object:e.replace(`.json`,``).split(`/`).pop()?.replace(` Information`,``),Comment:`Customer data deletion request submitted via transcend.io`}})),{concurrency:1});S+=i.flat().length;let s=t.Ds(r.map(e=>Object.keys(e)).flat()),c=`${_}-${C}${v}`,l=`${y}-${C}${b}`;await t.d(c,r,s),n.t.info(a.default.green(`Successfully wrote ${r.length} identifiers to file "${e}"`));let u=i.flat();await t.d(l,u,t.Ds(u.map(e=>Object.keys(e)).flat())),n.t.info(a.default.green(`Successfully wrote ${u.length} identifiers to file "${o}"`)),n.t.info(a.default.blue(`Processed chunk of ${t.Ns.length} identifiers, found ${u.length} target identifiers`)),C+=1},transcendUrl:s,skipRequestCount:p}),n.t.info(a.default.green(`Successfully wrote ${x} identifiers to file "${e}"`)),n.t.info(a.default.green(`Successfully wrote ${S} identifiers to file "${o}"`))}exports.pullProfiles=o;
2
- //# sourceMappingURL=impl-DwWoAbT_.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-BvCUdOlO.cjs`);const t=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./cron-BO5TvkmX.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`colors`);a=e.t(a);async function o({file:e,fileTarget:o,transcendUrl:s,auth:c,sombraAuth:l,cronDataSiloId:u,targetDataSiloId:d,actions:f,skipRequestCount:p,pageLimit:m,chunkSize:h}){p&&n.t.info(a.default.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%m!==0)&&(n.t.error(a.default.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${m}.`)),this.process.exit(1)),i.t(this.process.exit);let g=t.ti(s,c),{baseName:_,extension:v}=t.c(e),{baseName:y,extension:b}=t.c(o),x=0,S=0,C=0;await r.t({dataSiloId:u,auth:c,sombraAuth:l,actions:f,apiPageSize:m,savePageSize:h,onSave:async r=>{x+=r.length;let i=await t.Ts(t.Ns(t.Ds(r.map(e=>e.requestId)),m),async e=>(n.t.info(a.default.magenta(`Fetching target identifiers for ${e.length} requests`)),(await t.Un(g,m*2,{requestIds:e,dataSiloIds:[d]})).map(({fileName:e,remoteId:t})=>{if(!t)throw Error(`Failed to find remoteId for ${e}`);return{RecordId:t,Object:e.replace(`.json`,``).split(`/`).pop()?.replace(` Information`,``),Comment:`Customer data deletion request submitted via transcend.io`}})),{concurrency:1});S+=i.flat().length;let s=t.Ds(r.map(e=>Object.keys(e)).flat()),c=`${_}-${C}${v}`,l=`${y}-${C}${b}`;await t.d(c,r,s),n.t.info(a.default.green(`Successfully wrote ${r.length} identifiers to file "${e}"`));let u=i.flat();await t.d(l,u,t.Ds(u.map(e=>Object.keys(e)).flat())),n.t.info(a.default.green(`Successfully wrote ${u.length} identifiers to file "${o}"`)),n.t.info(a.default.blue(`Processed chunk of ${t.Ns.length} identifiers, found ${u.length} target identifiers`)),C+=1},transcendUrl:s,skipRequestCount:p}),n.t.info(a.default.green(`Successfully wrote ${x} identifiers to file "${e}"`)),n.t.info(a.default.green(`Successfully wrote ${S} identifiers to file "${o}"`))}exports.pullProfiles=o;
2
+ //# sourceMappingURL=impl-DIQtkPMh.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-DwWoAbT_.cjs","names":["buildTranscendGraphQLClient","parseFilePath","map","chunk","uniq","fetchRequestFilesForRequest","writeLargeCsv","pullChunkedCustomSiloOutstandingIdentifiers"],"sources":["../src/commands/request/cron/pull-profiles/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from '../../../../lib/bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"],"mappings":"+XA+BA,eAAsB,EAEpB,CACE,OACA,aACA,eACA,OACA,aACA,iBACA,mBACA,UACA,mBACA,YACA,aAEa,CACX,GACF,EAAA,EAAO,KACL,EAAA,QAAO,OACL,wEACD,CACF,EAID,OAAO,MAAM,EAAU,EACvB,GAAa,GACb,EAAY,IAAc,KAE1B,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CACxD,CAAE,WAAU,aAAcC,EAAAA,EAAc,EAAK,CAC7C,CAAE,SAAU,EAAgB,UAAW,GAC3CA,EAAAA,EAAc,EAAW,CAEvB,EAAsB,EACtB,EAA4B,EAC5B,EAAY,EAmFhB,MAAMM,EAAAA,EAA4C,CAChD,WAAY,EACZ,OACA,aACA,UACA,YAAa,EACb,aAAc,EACd,OAxFa,KACb,IACkB,CAElB,GAAuB,EAAY,OAQnC,IAAM,EAAU,MAAML,EAAAA,GADIC,EAAAA,GAHDC,EAAAA,GADN,EAAY,IAAK,GAAM,EAAE,UAAoB,CACvB,CAGS,EAAU,CAG1D,KAAO,KACL,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAW,OAAO,WACtD,CACF,EACe,MAAMC,EAAAA,GACpB,EACA,EAAY,EACZ,CACE,aACA,YAAa,CAAC,EAAiB,CAChC,CACF,EACc,KAAK,CAAE,WAAU,cAAe,CAC7C,GAAI,CAAC,EACH,MAAU,MAAM,+BAA+B,IAAW,CAE5D,MAAO,CACL,SAAU,EACV,OAAQ,EACL,QAAQ,QAAS,GAAG,CACpB,MAAM,IAAI,CACV,KAAK,EACJ,QAAQ,eAAgB,GAAG,CAC/B,QACE,4DACH,EACD,EAGJ,CACE,YAAa,EACd,CACF,CAED,GAA6B,EAAQ,MAAM,CAAC,OAG5C,IAAM,EAAUD,EAAAA,GAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAC7D,EAAmB,GAAG,EAAS,GAAG,IAAY,IAC9C,EAAyB,GAAG,EAAe,GAAG,IAAY,IAChE,MAAME,EAAAA,EAAc,EAAkB,EAAa,EAAQ,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAY,OAAO,wBAAwB,EAAK,GACvE,CACF,CAED,IAAM,EAAoB,EAAQ,MAAM,CAExC,MAAMA,EAAAA,EAAc,EAAwB,EAD3BF,EAAAA,GAAK,EAAkB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACF,CACxE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAkB,OAAO,wBAAwB,EAAW,GACnF,CACF,CAED,EAAA,EAAO,KACL,EAAA,QAAO,KACL,sBAAsBD,EAAAA,GAAM,OAAO,sBAAsB,EAAkB,OAAO,qBACnF,CACF,CACD,GAAa,GAYb,eACA,mBACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAoB,wBAAwB,EAAK,GACxE,CACF,CACD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAA0B,wBAAwB,EAAW,GACpF,CACF"}
1
+ {"version":3,"file":"impl-DIQtkPMh.cjs","names":["buildTranscendGraphQLClient","parseFilePath","map","chunk","uniq","fetchRequestFilesForRequest","writeLargeCsv","pullChunkedCustomSiloOutstandingIdentifiers"],"sources":["../src/commands/request/cron/pull-profiles/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from '../../../../lib/bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"],"mappings":"+XA+BA,eAAsB,EAEpB,CACE,OACA,aACA,eACA,OACA,aACA,iBACA,mBACA,UACA,mBACA,YACA,aAEa,CACX,GACF,EAAA,EAAO,KACL,EAAA,QAAO,OACL,wEACD,CACF,EAID,OAAO,MAAM,EAAU,EACvB,GAAa,GACb,EAAY,IAAc,KAE1B,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CACxD,CAAE,WAAU,aAAcC,EAAAA,EAAc,EAAK,CAC7C,CAAE,SAAU,EAAgB,UAAW,GAC3CA,EAAAA,EAAc,EAAW,CAEvB,EAAsB,EACtB,EAA4B,EAC5B,EAAY,EAmFhB,MAAMM,EAAAA,EAA4C,CAChD,WAAY,EACZ,OACA,aACA,UACA,YAAa,EACb,aAAc,EACd,OAxFa,KACb,IACkB,CAElB,GAAuB,EAAY,OAQnC,IAAM,EAAU,MAAML,EAAAA,GADIC,EAAAA,GAHDC,EAAAA,GADN,EAAY,IAAK,GAAM,EAAE,UAAoB,CACvB,CAGS,EAAU,CAG1D,KAAO,KACL,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAW,OAAO,WACtD,CACF,EACe,MAAMC,EAAAA,GACpB,EACA,EAAY,EACZ,CACE,aACA,YAAa,CAAC,EAAiB,CAChC,CACF,EACc,KAAK,CAAE,WAAU,cAAe,CAC7C,GAAI,CAAC,EACH,MAAU,MAAM,+BAA+B,IAAW,CAE5D,MAAO,CACL,SAAU,EACV,OAAQ,EACL,QAAQ,QAAS,GAAG,CACpB,MAAM,IAAI,CACV,KAAK,EACJ,QAAQ,eAAgB,GAAG,CAC/B,QACE,4DACH,EACD,EAGJ,CACE,YAAa,EACd,CACF,CAED,GAA6B,EAAQ,MAAM,CAAC,OAG5C,IAAM,EAAUD,EAAAA,GAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAC7D,EAAmB,GAAG,EAAS,GAAG,IAAY,IAC9C,EAAyB,GAAG,EAAe,GAAG,IAAY,IAChE,MAAME,EAAAA,EAAc,EAAkB,EAAa,EAAQ,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAY,OAAO,wBAAwB,EAAK,GACvE,CACF,CAED,IAAM,EAAoB,EAAQ,MAAM,CAExC,MAAMA,EAAAA,EAAc,EAAwB,EAD3BF,EAAAA,GAAK,EAAkB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACF,CACxE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAkB,OAAO,wBAAwB,EAAW,GACnF,CACF,CAED,EAAA,EAAO,KACL,EAAA,QAAO,KACL,sBAAsBD,EAAAA,GAAM,OAAO,sBAAsB,EAAkB,OAAO,qBACnF,CACF,CACD,GAAa,GAYb,eACA,mBACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAoB,wBAAwB,EAAK,GACxE,CACF,CACD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAA0B,wBAAwB,EAAW,GACpF,CACF"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-wkuhlP8d.cjs`),n=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const i=require(`./readTranscendYaml-Cycz6RxW.cjs`),a=require(`./mergeTranscendInputs-BIBCYbug.cjs`),o=require(`./api-keys-Bb2BbZQe.cjs`),s=require(`./done-input-validation-DGckEJ5a.cjs`);let c=require(`node:fs`),l=require(`node:path`),u=require(`colors`);u=e.t(u);async function d({transcendUrl:e,auth:t,pageSize:i,publishToPrivacyCenter:a,contents:o,deleteExtraAttributeValues:s=!1,classifyService:c=!1}){let l=n.ti(e,t);try{return!await n.t(o,l,{pageSize:i,publishToPrivacyCenter:a,classifyService:c,deleteExtraAttributeValues:s})}catch(e){return r.t.error(u.default.red(`An unexpected error occurred syncing the schema: ${e.message}`)),!1}}async function f({file:e=`./transcend.yml`,transcendUrl:f,auth:p,variables:m,pageSize:h,publishToPrivacyCenter:g,classifyService:_,deleteExtraAttributeValues:v}){s.t(this.process.exit);let y=await o.r(p),b=n.A(m),x;if(x=Array.isArray(y)&&(0,c.lstatSync)(e).isDirectory()?o.n(e).map(t=>(0,l.join)(e,t)):e.split(`,`),x.length<1)throw Error(`No file specified!`);let S=x.map(e=>{(0,c.existsSync)(e)?r.t.info(u.default.magenta(`Reading file "${e}"...`)):(r.t.error(u.default.red(`The file path does not exist on disk: ${e}. You can specify the filepath using --file=./examples/transcend.yml`)),this.process.exit(1));try{let t=i.r(e,b);return r.t.info(u.default.green(`Successfully read in "${e}"`)),{content:t,name:e.split(`/`).pop().replace(`.yml`,``)}}catch(e){r.t.error(u.default.red(`The shape of your yaml file is invalid with the following errors: ${e.message}`)),this.process.exit(1)}});if(typeof y==`string`){let[e,...n]=S.map(({content:e})=>e);await d({transcendUrl:f,auth:y,contents:a.t(e,...n),publishToPrivacyCenter:g,deleteExtraAttributeValues:v,pageSize:h,classifyService:!!_})||(r.t.info(u.default.red(`Sync encountered errors. View output above for more information, or check out ${t.r}`)),this.process.exit(1))}else{if(S.length!==1&&S.length!==y.length)throw Error(`Expected list of yml files to be equal to the list of API keys.Got ${S.length} YML file${S.length===1?``:`s`} and ${y.length} API key${y.length===1?``:`s`}`);let e=[];await n.Es(y,async(t,n)=>{let i=`[${n+1}/${y.length}][${t.organizationName}] `;r.t.info(u.default.magenta(`~~~\n\n${i}Attempting to push configuration...\n\n~~~`));let a=S.length===1?S[0].content:S.find(e=>e.name===t.organizationName)?.content;if(!a){r.t.error(u.default.red(`${i}Failed to find transcend.yml file for organization: "${t.organizationName}".`)),e.push(t.organizationName);return}await d({transcendUrl:f,auth:t.apiKey,contents:a,pageSize:h,publishToPrivacyCenter:g,deleteExtraAttributeValues:v,classifyService:_})?r.t.info(u.default.green(`${i}Successfully pushed configuration!`)):(r.t.error(u.default.red(`${i}Failed to sync configuration.`)),e.push(t.organizationName))}),e.length>0&&(r.t.info(u.default.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${t.r}`)),this.process.exit(1))}r.t.info(u.default.green(`Successfully synced yaml file to Transcend! View at ${t.r}`))}exports.push=f;
2
- //# sourceMappingURL=impl-CgKn47V9.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`),t=require(`./constants-BvCUdOlO.cjs`),n=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`);require(`./enums-BZulhPFa.cjs`);const r=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const i=require(`./readTranscendYaml-Cycz6RxW.cjs`),a=require(`./mergeTranscendInputs-BIBCYbug.cjs`),o=require(`./api-keys-DKB-2PVX.cjs`),s=require(`./done-input-validation-DGckEJ5a.cjs`);let c=require(`node:fs`),l=require(`node:path`),u=require(`colors`);u=e.t(u);async function d({transcendUrl:e,auth:t,pageSize:i,publishToPrivacyCenter:a,contents:o,deleteExtraAttributeValues:s=!1,classifyService:c=!1}){let l=n.ti(e,t);try{return!await n.t(o,l,{pageSize:i,publishToPrivacyCenter:a,classifyService:c,deleteExtraAttributeValues:s})}catch(e){return r.t.error(u.default.red(`An unexpected error occurred syncing the schema: ${e.message}`)),!1}}async function f({file:e=`./transcend.yml`,transcendUrl:f,auth:p,variables:m,pageSize:h,publishToPrivacyCenter:g,classifyService:_,deleteExtraAttributeValues:v}){s.t(this.process.exit);let y=await o.r(p),b=n.A(m),x;if(x=Array.isArray(y)&&(0,c.lstatSync)(e).isDirectory()?o.n(e).map(t=>(0,l.join)(e,t)):e.split(`,`),x.length<1)throw Error(`No file specified!`);let S=x.map(e=>{(0,c.existsSync)(e)?r.t.info(u.default.magenta(`Reading file "${e}"...`)):(r.t.error(u.default.red(`The file path does not exist on disk: ${e}. You can specify the filepath using --file=./examples/transcend.yml`)),this.process.exit(1));try{let t=i.r(e,b);return r.t.info(u.default.green(`Successfully read in "${e}"`)),{content:t,name:e.split(`/`).pop().replace(`.yml`,``)}}catch(e){r.t.error(u.default.red(`The shape of your yaml file is invalid with the following errors: ${e.message}`)),this.process.exit(1)}});if(typeof y==`string`){let[e,...n]=S.map(({content:e})=>e);await d({transcendUrl:f,auth:y,contents:a.t(e,...n),publishToPrivacyCenter:g,deleteExtraAttributeValues:v,pageSize:h,classifyService:!!_})||(r.t.info(u.default.red(`Sync encountered errors. View output above for more information, or check out ${t.r}`)),this.process.exit(1))}else{if(S.length!==1&&S.length!==y.length)throw Error(`Expected list of yml files to be equal to the list of API keys.Got ${S.length} YML file${S.length===1?``:`s`} and ${y.length} API key${y.length===1?``:`s`}`);let e=[];await n.Es(y,async(t,n)=>{let i=`[${n+1}/${y.length}][${t.organizationName}] `;r.t.info(u.default.magenta(`~~~\n\n${i}Attempting to push configuration...\n\n~~~`));let a=S.length===1?S[0].content:S.find(e=>e.name===t.organizationName)?.content;if(!a){r.t.error(u.default.red(`${i}Failed to find transcend.yml file for organization: "${t.organizationName}".`)),e.push(t.organizationName);return}await d({transcendUrl:f,auth:t.apiKey,contents:a,pageSize:h,publishToPrivacyCenter:g,deleteExtraAttributeValues:v,classifyService:_})?r.t.info(u.default.green(`${i}Successfully pushed configuration!`)):(r.t.error(u.default.red(`${i}Failed to sync configuration.`)),e.push(t.organizationName))}),e.length>0&&(r.t.info(u.default.red(`Sync encountered errors for "${e.join(`,`)}". View output above for more information, or check out ${t.r}`)),this.process.exit(1))}r.t.info(u.default.green(`Successfully synced yaml file to Transcend! View at ${t.r}`))}exports.push=f;
2
+ //# sourceMappingURL=impl-DN-bpfF3.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CgKn47V9.cjs","names":["buildTranscendGraphQLClient","syncConfigurationToTranscend","validateTranscendAuth","parseVariablesFromString","listFiles","readTranscendYaml","mergeTranscendInputs","ADMIN_DASH_INTEGRATIONS","mapSeries"],"sources":["../src/commands/inventory/push/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\n\nimport { logger } from '../../../logger';\nimport { mapSeries } from '../../../lib/bluebird';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { readTranscendYaml } from '../../../lib/readTranscendYaml';\nimport colors from 'colors';\nimport {\n buildTranscendGraphQLClient,\n syncConfigurationToTranscend,\n} from '../../../lib/graphql';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { TranscendInput } from '../../../codecs';\nimport { validateTranscendAuth, listFiles } from '../../../lib/api-keys';\nimport { mergeTranscendInputs } from '../../../lib/mergeTranscendInputs';\nimport { parseVariablesFromString } from '../../../lib/helpers/parseVariablesFromString';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Sync configuration to Transcend\n *\n * @param options - Options\n * @returns True if synced successfully, false if error occurs\n */\nasync function syncConfiguration({\n transcendUrl,\n auth,\n pageSize,\n publishToPrivacyCenter,\n contents,\n deleteExtraAttributeValues = false,\n classifyService = false,\n}: {\n /** Transcend YAML */\n contents: TranscendInput;\n /** Transcend URL */\n transcendUrl: string;\n /** API key */\n auth: string;\n /** Page size */\n pageSize: number;\n /** Skip privacy center publish step */\n publishToPrivacyCenter: boolean;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** Delete attributes when syncing */\n deleteExtraAttributeValues?: boolean;\n}): Promise<boolean> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Sync to Transcend\n try {\n const encounteredError = await syncConfigurationToTranscend(\n contents,\n client,\n {\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n },\n );\n return !encounteredError;\n } catch (err) {\n logger.error(\n colors.red(\n `An unexpected error occurred syncing the schema: ${err.message}`,\n ),\n );\n return false;\n }\n}\n\nexport interface PushCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n pageSize: number;\n variables: string;\n publishToPrivacyCenter: boolean;\n classifyService: boolean;\n deleteExtraAttributeValues: boolean;\n}\n\nexport async function push(\n this: LocalContext,\n {\n file = './transcend.yml',\n transcendUrl,\n auth,\n variables,\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n }: PushCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Parse out the variables\n const vars = parseVariablesFromString(variables);\n\n // check if we are being passed a list of API keys and a list of files\n let fileList: string[];\n if (Array.isArray(apiKeyOrList) && lstatSync(file).isDirectory()) {\n fileList = listFiles(file).map((filePath) => join(file, filePath));\n } else {\n fileList = file.split(',');\n }\n\n // Ensure at least one file is parsed\n if (fileList.length < 1) {\n throw new Error('No file specified!');\n }\n\n // eslint-disable-next-line array-callback-return,consistent-return\n const transcendInputs = fileList.map((filePath) => {\n // Ensure yaml file exists on disk\n if (!existsSync(filePath)) {\n logger.error(\n colors.red(\n `The file path does not exist on disk: ${filePath}. You can specify the filepath using --file=./examples/transcend.yml`,\n ),\n );\n this.process.exit(1);\n } else {\n logger.info(colors.magenta(`Reading file \"${filePath}\"...`));\n }\n\n try {\n // Read in the yaml file and validate it's shape\n const newContents = readTranscendYaml(filePath, vars);\n logger.info(colors.green(`Successfully read in \"${filePath}\"`));\n return {\n content: newContents,\n name: filePath.split('/').pop()!.replace('.yml', ''),\n };\n } catch (err) {\n logger.error(\n colors.red(\n `The shape of your yaml file is invalid with the following errors: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n });\n\n // process a single API key\n if (typeof apiKeyOrList === 'string') {\n // if passed multiple inputs, merge them together\n const [base, ...rest] = transcendInputs.map(({ content }) => content);\n const contents = mergeTranscendInputs(base, ...rest);\n\n // sync the configuration\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKeyOrList,\n contents,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n pageSize,\n classifyService: !!classifyService,\n });\n\n // exist with error code\n if (!success) {\n logger.info(\n colors.red(\n `Sync encountered errors. View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n } else {\n // if passed multiple inputs, expect them to be one per instance\n if (\n transcendInputs.length !== 1 &&\n transcendInputs.length !== apiKeyOrList.length\n ) {\n throw new Error(\n 'Expected list of yml files to be equal to the list of API keys.' +\n `Got ${transcendInputs.length} YML file${\n transcendInputs.length === 1 ? '' : 's'\n } and ${apiKeyOrList.length} API key${\n apiKeyOrList.length === 1 ? '' : 's'\n }`,\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to push configuration...\\n\\n~~~`,\n ),\n );\n\n // use the merged contents if 1 yml passed, else use the contents that map to that organization\n const useContents =\n transcendInputs.length === 1\n ? transcendInputs[0].content\n : transcendInputs.find(\n (input) => input.name === apiKey.organizationName,\n )?.content;\n\n // Throw error if cannot find a yml file matching that organization name\n if (!useContents) {\n logger.error(\n colors.red(\n `${prefix}Failed to find transcend.yml file for organization: \"${apiKey.organizationName}\".`,\n ),\n );\n encounteredErrors.push(apiKey.organizationName);\n return;\n }\n\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKey.apiKey,\n contents: useContents,\n pageSize,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n classifyService,\n });\n\n if (success) {\n logger.info(\n colors.green(`${prefix}Successfully pushed configuration!`),\n );\n } else {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to Transcend! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n}\n"],"mappings":"2iBA0BA,eAAe,EAAkB,CAC/B,eACA,OACA,WACA,yBACA,WACA,6BAA6B,GAC7B,kBAAkB,IAgBC,CACnB,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAG9D,GAAI,CAWF,MAAO,CAVkB,MAAMC,EAAAA,EAC7B,EACA,EACA,CACE,WACA,yBACA,kBACA,6BACD,CACF,OAEM,EAAK,CAMZ,OALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,oDAAoD,EAAI,UACzD,CACF,CACM,IAeX,eAAsB,EAEpB,CACE,OAAO,kBACP,eACA,OACA,YACA,WACA,yBACA,kBACA,8BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAMC,EAAAA,EAAsB,EAAK,CAGhD,EAAOC,EAAAA,EAAyB,EAAU,CAG5C,EAQJ,GAPA,AAGE,EAHE,MAAM,QAAQ,EAAa,GAAA,EAAA,EAAA,WAAc,EAAK,CAAC,aAAa,CACnDC,EAAAA,EAAU,EAAK,CAAC,IAAK,IAAA,EAAA,EAAA,MAAkB,EAAM,EAAS,CAAC,CAEvD,EAAK,MAAM,IAAI,CAIxB,EAAS,OAAS,EACpB,MAAU,MAAM,qBAAqB,CAIvC,IAAM,EAAkB,EAAS,IAAK,GAAa,EAE7C,EAAA,EAAA,YAAY,EAAS,CAQvB,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,iBAAiB,EAAS,MAAM,CAAC,EAP5D,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yCAAyC,EAAS,sEACnD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAI,CAEF,IAAM,EAAcC,EAAAA,EAAkB,EAAU,EAAK,CAErD,OADA,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,yBAAyB,EAAS,GAAG,CAAC,CACxD,CACL,QAAS,EACT,KAAM,EAAS,MAAM,IAAI,CAAC,KAAK,CAAE,QAAQ,OAAQ,GAAG,CACrD,OACM,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,qEAAqE,EAAI,UAC1E,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,GAEtB,CAGF,GAAI,OAAO,GAAiB,SAAU,CAEpC,GAAM,CAAC,EAAM,GAAG,GAAQ,EAAgB,KAAK,CAAE,aAAc,EAAQ,CAIrD,MAAM,EAAkB,CACtC,eACA,KAAM,EACN,SANeC,EAAAA,EAAqB,EAAM,GAAG,EAAK,CAOlD,yBACA,6BACA,WACA,gBAAiB,CAAC,CAAC,EACpB,CAAC,GAIA,EAAA,EAAO,KACL,EAAA,QAAO,IACL,iFAAiFC,EAAAA,IAClF,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,MAEjB,CAEL,GACE,EAAgB,SAAW,GAC3B,EAAgB,SAAW,EAAa,OAExC,MAAU,MACR,sEACS,EAAgB,OAAO,WAC5B,EAAgB,SAAW,EAAI,GAAK,IACrC,OAAO,EAAa,OAAO,UAC1B,EAAa,SAAW,EAAI,GAAK,MAEtC,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAMC,EAAAA,GAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAChD,EAAO,iBACR,IACD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAO,4CAClB,CACF,CAGD,IAAM,EACJ,EAAgB,SAAW,EACvB,EAAgB,GAAG,QACnB,EAAgB,KACb,GAAU,EAAM,OAAS,EAAO,iBAClC,EAAE,QAGT,GAAI,CAAC,EAAa,CAChB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,GAAG,EAAO,uDAAuD,EAAO,iBAAiB,IAC1F,CACF,CACD,EAAkB,KAAK,EAAO,iBAAiB,CAC/C,OAGc,MAAM,EAAkB,CACtC,eACA,KAAM,EAAO,OACb,SAAU,EACV,WACA,yBACA,6BACA,kBACD,CAAC,CAGA,EAAA,EAAO,KACL,EAAA,QAAO,MAAM,GAAG,EAAO,oCAAoC,CAC5D,EAED,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,GAAG,EAAO,+BAA+B,CAAC,CAClE,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAA,EAAO,KACL,EAAA,QAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0DD,EAAAA,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,EAKxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uDAAuDA,EAAAA,IACxD,CACF"}
1
+ {"version":3,"file":"impl-DN-bpfF3.cjs","names":["buildTranscendGraphQLClient","syncConfigurationToTranscend","validateTranscendAuth","parseVariablesFromString","listFiles","readTranscendYaml","mergeTranscendInputs","ADMIN_DASH_INTEGRATIONS","mapSeries"],"sources":["../src/commands/inventory/push/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\n\nimport { logger } from '../../../logger';\nimport { mapSeries } from '../../../lib/bluebird';\nimport { existsSync, lstatSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { readTranscendYaml } from '../../../lib/readTranscendYaml';\nimport colors from 'colors';\nimport {\n buildTranscendGraphQLClient,\n syncConfigurationToTranscend,\n} from '../../../lib/graphql';\n\nimport { ADMIN_DASH_INTEGRATIONS } from '../../../constants';\nimport { TranscendInput } from '../../../codecs';\nimport { validateTranscendAuth, listFiles } from '../../../lib/api-keys';\nimport { mergeTranscendInputs } from '../../../lib/mergeTranscendInputs';\nimport { parseVariablesFromString } from '../../../lib/helpers/parseVariablesFromString';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Sync configuration to Transcend\n *\n * @param options - Options\n * @returns True if synced successfully, false if error occurs\n */\nasync function syncConfiguration({\n transcendUrl,\n auth,\n pageSize,\n publishToPrivacyCenter,\n contents,\n deleteExtraAttributeValues = false,\n classifyService = false,\n}: {\n /** Transcend YAML */\n contents: TranscendInput;\n /** Transcend URL */\n transcendUrl: string;\n /** API key */\n auth: string;\n /** Page size */\n pageSize: number;\n /** Skip privacy center publish step */\n publishToPrivacyCenter: boolean;\n /** classify data flow service if missing */\n classifyService?: boolean;\n /** Delete attributes when syncing */\n deleteExtraAttributeValues?: boolean;\n}): Promise<boolean> {\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Sync to Transcend\n try {\n const encounteredError = await syncConfigurationToTranscend(\n contents,\n client,\n {\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n },\n );\n return !encounteredError;\n } catch (err) {\n logger.error(\n colors.red(\n `An unexpected error occurred syncing the schema: ${err.message}`,\n ),\n );\n return false;\n }\n}\n\nexport interface PushCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n pageSize: number;\n variables: string;\n publishToPrivacyCenter: boolean;\n classifyService: boolean;\n deleteExtraAttributeValues: boolean;\n}\n\nexport async function push(\n this: LocalContext,\n {\n file = './transcend.yml',\n transcendUrl,\n auth,\n variables,\n pageSize,\n publishToPrivacyCenter,\n classifyService,\n deleteExtraAttributeValues,\n }: PushCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Parse out the variables\n const vars = parseVariablesFromString(variables);\n\n // check if we are being passed a list of API keys and a list of files\n let fileList: string[];\n if (Array.isArray(apiKeyOrList) && lstatSync(file).isDirectory()) {\n fileList = listFiles(file).map((filePath) => join(file, filePath));\n } else {\n fileList = file.split(',');\n }\n\n // Ensure at least one file is parsed\n if (fileList.length < 1) {\n throw new Error('No file specified!');\n }\n\n // eslint-disable-next-line array-callback-return,consistent-return\n const transcendInputs = fileList.map((filePath) => {\n // Ensure yaml file exists on disk\n if (!existsSync(filePath)) {\n logger.error(\n colors.red(\n `The file path does not exist on disk: ${filePath}. You can specify the filepath using --file=./examples/transcend.yml`,\n ),\n );\n this.process.exit(1);\n } else {\n logger.info(colors.magenta(`Reading file \"${filePath}\"...`));\n }\n\n try {\n // Read in the yaml file and validate it's shape\n const newContents = readTranscendYaml(filePath, vars);\n logger.info(colors.green(`Successfully read in \"${filePath}\"`));\n return {\n content: newContents,\n name: filePath.split('/').pop()!.replace('.yml', ''),\n };\n } catch (err) {\n logger.error(\n colors.red(\n `The shape of your yaml file is invalid with the following errors: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n });\n\n // process a single API key\n if (typeof apiKeyOrList === 'string') {\n // if passed multiple inputs, merge them together\n const [base, ...rest] = transcendInputs.map(({ content }) => content);\n const contents = mergeTranscendInputs(base, ...rest);\n\n // sync the configuration\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKeyOrList,\n contents,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n pageSize,\n classifyService: !!classifyService,\n });\n\n // exist with error code\n if (!success) {\n logger.info(\n colors.red(\n `Sync encountered errors. View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n } else {\n // if passed multiple inputs, expect them to be one per instance\n if (\n transcendInputs.length !== 1 &&\n transcendInputs.length !== apiKeyOrList.length\n ) {\n throw new Error(\n 'Expected list of yml files to be equal to the list of API keys.' +\n `Got ${transcendInputs.length} YML file${\n transcendInputs.length === 1 ? '' : 's'\n } and ${apiKeyOrList.length} API key${\n apiKeyOrList.length === 1 ? '' : 's'\n }`,\n );\n }\n\n const encounteredErrors: string[] = [];\n await mapSeries(apiKeyOrList, async (apiKey, ind) => {\n const prefix = `[${ind + 1}/${apiKeyOrList.length}][${\n apiKey.organizationName\n }] `;\n logger.info(\n colors.magenta(\n `~~~\\n\\n${prefix}Attempting to push configuration...\\n\\n~~~`,\n ),\n );\n\n // use the merged contents if 1 yml passed, else use the contents that map to that organization\n const useContents =\n transcendInputs.length === 1\n ? transcendInputs[0].content\n : transcendInputs.find(\n (input) => input.name === apiKey.organizationName,\n )?.content;\n\n // Throw error if cannot find a yml file matching that organization name\n if (!useContents) {\n logger.error(\n colors.red(\n `${prefix}Failed to find transcend.yml file for organization: \"${apiKey.organizationName}\".`,\n ),\n );\n encounteredErrors.push(apiKey.organizationName);\n return;\n }\n\n const success = await syncConfiguration({\n transcendUrl,\n auth: apiKey.apiKey,\n contents: useContents,\n pageSize,\n publishToPrivacyCenter,\n deleteExtraAttributeValues,\n classifyService,\n });\n\n if (success) {\n logger.info(\n colors.green(`${prefix}Successfully pushed configuration!`),\n );\n } else {\n logger.error(colors.red(`${prefix}Failed to sync configuration.`));\n encounteredErrors.push(apiKey.organizationName);\n }\n });\n\n if (encounteredErrors.length > 0) {\n logger.info(\n colors.red(\n `Sync encountered errors for \"${encounteredErrors.join(\n ',',\n )}\". View output above for more information, or check out ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n\n this.process.exit(1);\n }\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced yaml file to Transcend! View at ${ADMIN_DASH_INTEGRATIONS}`,\n ),\n );\n}\n"],"mappings":"2iBA0BA,eAAe,EAAkB,CAC/B,eACA,OACA,WACA,yBACA,WACA,6BAA6B,GAC7B,kBAAkB,IAgBC,CACnB,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAG9D,GAAI,CAWF,MAAO,CAVkB,MAAMC,EAAAA,EAC7B,EACA,EACA,CACE,WACA,yBACA,kBACA,6BACD,CACF,OAEM,EAAK,CAMZ,OALA,EAAA,EAAO,MACL,EAAA,QAAO,IACL,oDAAoD,EAAI,UACzD,CACF,CACM,IAeX,eAAsB,EAEpB,CACE,OAAO,kBACP,eACA,OACA,YACA,WACA,yBACA,kBACA,8BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAe,MAAMC,EAAAA,EAAsB,EAAK,CAGhD,EAAOC,EAAAA,EAAyB,EAAU,CAG5C,EAQJ,GAPA,AAGE,EAHE,MAAM,QAAQ,EAAa,GAAA,EAAA,EAAA,WAAc,EAAK,CAAC,aAAa,CACnDC,EAAAA,EAAU,EAAK,CAAC,IAAK,IAAA,EAAA,EAAA,MAAkB,EAAM,EAAS,CAAC,CAEvD,EAAK,MAAM,IAAI,CAIxB,EAAS,OAAS,EACpB,MAAU,MAAM,qBAAqB,CAIvC,IAAM,EAAkB,EAAS,IAAK,GAAa,EAE7C,EAAA,EAAA,YAAY,EAAS,CAQvB,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,iBAAiB,EAAS,MAAM,CAAC,EAP5D,EAAA,EAAO,MACL,EAAA,QAAO,IACL,yCAAyC,EAAS,sEACnD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAKtB,GAAI,CAEF,IAAM,EAAcC,EAAAA,EAAkB,EAAU,EAAK,CAErD,OADA,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,yBAAyB,EAAS,GAAG,CAAC,CACxD,CACL,QAAS,EACT,KAAM,EAAS,MAAM,IAAI,CAAC,KAAK,CAAE,QAAQ,OAAQ,GAAG,CACrD,OACM,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,qEAAqE,EAAI,UAC1E,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,GAEtB,CAGF,GAAI,OAAO,GAAiB,SAAU,CAEpC,GAAM,CAAC,EAAM,GAAG,GAAQ,EAAgB,KAAK,CAAE,aAAc,EAAQ,CAIrD,MAAM,EAAkB,CACtC,eACA,KAAM,EACN,SANeC,EAAAA,EAAqB,EAAM,GAAG,EAAK,CAOlD,yBACA,6BACA,WACA,gBAAiB,CAAC,CAAC,EACpB,CAAC,GAIA,EAAA,EAAO,KACL,EAAA,QAAO,IACL,iFAAiFC,EAAAA,IAClF,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,MAEjB,CAEL,GACE,EAAgB,SAAW,GAC3B,EAAgB,SAAW,EAAa,OAExC,MAAU,MACR,sEACS,EAAgB,OAAO,WAC5B,EAAgB,SAAW,EAAI,GAAK,IACrC,OAAO,EAAa,OAAO,UAC1B,EAAa,SAAW,EAAI,GAAK,MAEtC,CAGH,IAAM,EAA8B,EAAE,CACtC,MAAMC,EAAAA,GAAU,EAAc,MAAO,EAAQ,IAAQ,CACnD,IAAM,EAAS,IAAI,EAAM,EAAE,GAAG,EAAa,OAAO,IAChD,EAAO,iBACR,IACD,EAAA,EAAO,KACL,EAAA,QAAO,QACL,UAAU,EAAO,4CAClB,CACF,CAGD,IAAM,EACJ,EAAgB,SAAW,EACvB,EAAgB,GAAG,QACnB,EAAgB,KACb,GAAU,EAAM,OAAS,EAAO,iBAClC,EAAE,QAGT,GAAI,CAAC,EAAa,CAChB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,GAAG,EAAO,uDAAuD,EAAO,iBAAiB,IAC1F,CACF,CACD,EAAkB,KAAK,EAAO,iBAAiB,CAC/C,OAGc,MAAM,EAAkB,CACtC,eACA,KAAM,EAAO,OACb,SAAU,EACV,WACA,yBACA,6BACA,kBACD,CAAC,CAGA,EAAA,EAAO,KACL,EAAA,QAAO,MAAM,GAAG,EAAO,oCAAoC,CAC5D,EAED,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,GAAG,EAAO,+BAA+B,CAAC,CAClE,EAAkB,KAAK,EAAO,iBAAiB,GAEjD,CAEE,EAAkB,OAAS,IAC7B,EAAA,EAAO,KACL,EAAA,QAAO,IACL,gCAAgC,EAAkB,KAChD,IACD,CAAC,0DAA0DD,EAAAA,IAC7D,CACF,CAED,KAAK,QAAQ,KAAK,EAAE,EAKxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,uDAAuDA,EAAAA,IACxD,CACF"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-wkuhlP8d.cjs`);const t=require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`),i=require(`./preference-management-aOhuZCuE.cjs`);let a=require(`colors`);a=e.t(a);const o=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function s({auth:e,partition:s,sombraAuth:c,file:l,transcendUrl:u,timestampBefore:d,timestampAfter:f,updatedBefore:p,updatedAfter:m,identifiers:h=[],concurrency:g,shouldChunk:_,windowConcurrency:v,maxChunks:y,exportIdentifiersWithDelimiter:b,maxLookbackDays:x}){r.t(this.process.exit);let S=await t.ei(u,e,c),C=t.ti(u,e),w=h.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),T={...d?{timestampBefore:d.toISOString()}:{},...f?{timestampAfter:f.toISOString()}:{},...m||p?{system:{...p?{updatedBefore:p.toISOString()}:{},...m?{updatedAfter:m.toISOString()}:{}}}:{},...w.length>0?{identifiers:w}:{}};n.t.info(`Fetching consent preferences from partition ${s}, using mode=${_?`chunked-stream`:`paged-stream`}...`),n.t.info(a.default.magenta(`Preparing CSV at: ${l}`));let[E,D]=await Promise.all([t._r(C),t.n(C)]),O=D.map(e=>e.name),k=Array.from(new Set(E.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),A=Array.from(new Set(E.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),j=[...o,...O,...A,...k],M=null,N=!1,P=e=>{if(!e||e.length===0)return;let n=e.map(e=>i.i(e,b));if(!N){let e=Object.keys(n[0]??{}),r=new Set;M=[...j,...e].filter(e=>e===void 0||r.has(e)?!1:(r.add(e),!0)),t.s(l,M),N=!0}t.a(l,n,M)};if(_){await i.r(S,{partition:s,filterBy:T,limit:g,windowConcurrency:v,maxChunks:y,maxLookbackDays:x,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`));return}await i.n(S,{partition:s,filterBy:T,limit:g,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`))}exports.pullConsentPreferences=s;
2
- //# sourceMappingURL=impl-DQ8rr7Fv.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-BvCUdOlO.cjs`);const t=require(`./syncConfigurationToTranscend-BipGaTT0.cjs`);require(`./enums-BZulhPFa.cjs`);const n=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`);const r=require(`./done-input-validation-DGckEJ5a.cjs`),i=require(`./preference-management-CE64qiAB.cjs`);let a=require(`colors`);a=e.t(a);const o=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function s({auth:e,partition:s,sombraAuth:c,file:l,transcendUrl:u,timestampBefore:d,timestampAfter:f,updatedBefore:p,updatedAfter:m,identifiers:h=[],concurrency:g,shouldChunk:_,windowConcurrency:v,maxChunks:y,exportIdentifiersWithDelimiter:b,maxLookbackDays:x}){r.t(this.process.exit);let S=await t.ei(u,e,c),C=t.ti(u,e),w=h.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),T={...d?{timestampBefore:d.toISOString()}:{},...f?{timestampAfter:f.toISOString()}:{},...m||p?{system:{...p?{updatedBefore:p.toISOString()}:{},...m?{updatedAfter:m.toISOString()}:{}}}:{},...w.length>0?{identifiers:w}:{}};n.t.info(`Fetching consent preferences from partition ${s}, using mode=${_?`chunked-stream`:`paged-stream`}...`),n.t.info(a.default.magenta(`Preparing CSV at: ${l}`));let[E,D]=await Promise.all([t._r(C),t.n(C)]),O=D.map(e=>e.name),k=Array.from(new Set(E.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),A=Array.from(new Set(E.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),j=[...o,...O,...A,...k],M=null,N=!1,P=e=>{if(!e||e.length===0)return;let n=e.map(e=>i.i(e,b));if(!N){let e=Object.keys(n[0]??{}),r=new Set;M=[...j,...e].filter(e=>e===void 0||r.has(e)?!1:(r.add(e),!0)),t.s(l,M),N=!0}t.a(l,n,M)};if(_){await i.r(S,{partition:s,filterBy:T,limit:g,windowConcurrency:v,maxChunks:y,maxLookbackDays:x,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`));return}await i.n(S,{partition:s,filterBy:T,limit:g,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`))}exports.pullConsentPreferences=s;
2
+ //# sourceMappingURL=impl-DSnwLiWc.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-DQ8rr7Fv.cjs","names":["createSombraGotInstance","buildTranscendGraphQLClient","fetchAllPurposesAndPreferences","fetchAllIdentifiers","transformPreferenceRecordToCsv","fetchConsentPreferencesChunked","fetchConsentPreferences"],"sources":["../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers';\nimport type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map(\n (identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n },\n );\n\n // Build filter\n const filterBy = {\n ...(timestampBefore\n ? { timestampBefore: timestampBefore.toISOString() }\n : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore\n ? { updatedBefore: updatedBefore.toISOString() }\n : {}),\n ...(updatedAfter\n ? { updatedAfter: updatedAfter.toISOString() }\n : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap(\n (p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? [],\n ),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(\n new Set(purposesWithTopics.map((p) => p.trackingType)),\n ).sort((a, b) => a.localeCompare(b));\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [\n ...CORE_COLS,\n ...identifierCols,\n ...purposeCols,\n ...topicCols,\n ];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"gZAsBA,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAMA,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IACnC,GAAqC,CACpC,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EAEzB,CAGK,EAAW,CACf,GAAI,EACA,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAClD,EAAE,CACN,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EACA,CAAE,cAAe,EAAc,aAAa,CAAE,CAC9C,EAAE,CACN,GAAI,EACA,CAAE,aAAc,EAAa,aAAa,CAAE,CAC5C,EAAE,CACP,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAA,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7DC,EAAAA,GAA+B,EAAO,CACtCC,EAAAA,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAChB,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CACjE,CACF,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KACxB,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CACvD,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAsB,CAC1B,GAAG,EACH,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGG,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtBC,EAAAA,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAA,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAA,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAMC,EAAAA,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAMC,EAAAA,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC"}
1
+ {"version":3,"file":"impl-DSnwLiWc.cjs","names":["createSombraGotInstance","buildTranscendGraphQLClient","fetchAllPurposesAndPreferences","fetchAllIdentifiers","transformPreferenceRecordToCsv","fetchConsentPreferencesChunked","fetchConsentPreferences"],"sources":["../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers';\nimport type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map(\n (identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n },\n );\n\n // Build filter\n const filterBy = {\n ...(timestampBefore\n ? { timestampBefore: timestampBefore.toISOString() }\n : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore\n ? { updatedBefore: updatedBefore.toISOString() }\n : {}),\n ...(updatedAfter\n ? { updatedAfter: updatedAfter.toISOString() }\n : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap(\n (p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? [],\n ),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(\n new Set(purposesWithTopics.map((p) => p.trackingType)),\n ).sort((a, b) => a.localeCompare(b));\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [\n ...CORE_COLS,\n ...identifierCols,\n ...purposeCols,\n ...topicCols,\n ];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"gZAsBA,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAMA,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IACnC,GAAqC,CACpC,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EAEzB,CAGK,EAAW,CACf,GAAI,EACA,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAClD,EAAE,CACN,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EACA,CAAE,cAAe,EAAc,aAAa,CAAE,CAC9C,EAAE,CACN,GAAI,EACA,CAAE,aAAc,EAAa,aAAa,CAAE,CAC5C,EAAE,CACP,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAA,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7DC,EAAAA,GAA+B,EAAO,CACtCC,EAAAA,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAChB,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CACjE,CACF,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KACxB,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CACvD,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAsB,CAC1B,GAAG,EACH,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGG,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtBC,EAAAA,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAA,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAA,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAMC,EAAAA,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAMC,EAAAA,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-wkuhlP8d.cjs`),require(`./syncConfigurationToTranscend-DuTZKIG8.cjs`),require(`./enums-BZulhPFa.cjs`);const t=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const n=require(`./consent-manager-CCyvzvY5.cjs`);require(`./uploadConsents-C9Pv8Awr.cjs`);const r=require(`./api-keys-Bb2BbZQe.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`node:fs`),o=require(`colors`);o=e.t(o);async function s({auth:e,xdiLocation:s,file:c,removeIpAddresses:l,domainBlockList:u,xdiAllowedCommands:d,transcendUrl:f}){i.t(this.process.exit);let{syncGroups:p,html:m}=await n.r(await r.r(e),{xdiLocation:s,transcendUrl:f,removeIpAddresses:l,domainBlockList:u.length>0?u:void 0,xdiAllowedCommands:d});t.t.info(o.default.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),(0,a.writeFileSync)(c,m),t.t.info(o.default.green(`Wrote configuration to file "${c}"!`))}exports.buildXdiSyncEndpoint=s;
2
- //# sourceMappingURL=impl-DC_YquN8.cjs.map
1
+ const e=require(`./chunk-Bmb41Sf3.cjs`);require(`./constants-BvCUdOlO.cjs`),require(`./syncConfigurationToTranscend-BipGaTT0.cjs`),require(`./enums-BZulhPFa.cjs`);const t=require(`./logger-DQwEYtSS.cjs`);require(`./buildAIIntegrationType-BwuCYR-o.cjs`),require(`./codecs-Bvmb8o9R.cjs`);const n=require(`./consent-manager-C6jfR89E.cjs`);require(`./uploadConsents-C7SPWTIr.cjs`);const r=require(`./api-keys-DKB-2PVX.cjs`),i=require(`./done-input-validation-DGckEJ5a.cjs`);let a=require(`node:fs`),o=require(`colors`);o=e.t(o);async function s({auth:e,xdiLocation:s,file:c,removeIpAddresses:l,domainBlockList:u,xdiAllowedCommands:d,transcendUrl:f}){i.t(this.process.exit);let{syncGroups:p,html:m}=await n.r(await r.r(e),{xdiLocation:s,transcendUrl:f,removeIpAddresses:l,domainBlockList:u.length>0?u:void 0,xdiAllowedCommands:d});t.t.info(o.default.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),(0,a.writeFileSync)(c,m),t.t.info(o.default.green(`Wrote configuration to file "${c}"!`))}exports.buildXdiSyncEndpoint=s;
2
+ //# sourceMappingURL=impl-DeGnhjhF.cjs.map