@transcend-io/cli 8.34.0 → 8.34.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. package/README.md +2 -2
  2. package/dist/{api-keys-CDp8NUhN.cjs → api-keys-DQKR5mr6.cjs} +2 -2
  3. package/dist/{api-keys-CDp8NUhN.cjs.map → api-keys-DQKR5mr6.cjs.map} +1 -1
  4. package/dist/{app-rVGy2Ks-.cjs → app-7Nt3fjr3.cjs} +18 -18
  5. package/dist/{app-rVGy2Ks-.cjs.map → app-7Nt3fjr3.cjs.map} +1 -1
  6. package/dist/bin/bash-complete.cjs +1 -1
  7. package/dist/bin/cli.cjs +1 -1
  8. package/dist/bin/deprecated-command.cjs +1 -1
  9. package/dist/{code-scanning-BwfVNIHr.cjs → code-scanning-Dw_M6bvD.cjs} +2 -2
  10. package/dist/{code-scanning-BwfVNIHr.cjs.map → code-scanning-Dw_M6bvD.cjs.map} +1 -1
  11. package/dist/{command-3fhEz5PC.cjs → command-DR1-zcHn.cjs} +2 -2
  12. package/dist/{command-3fhEz5PC.cjs.map → command-DR1-zcHn.cjs.map} +1 -1
  13. package/dist/{consent-manager-DXWjvCtI.cjs → consent-manager-yfT0ldzq.cjs} +2 -2
  14. package/dist/{consent-manager-DXWjvCtI.cjs.map → consent-manager-yfT0ldzq.cjs.map} +1 -1
  15. package/dist/{constants-B-TmLA0w.cjs → constants-Ck15_dOP.cjs} +2 -2
  16. package/dist/{constants-B-TmLA0w.cjs.map → constants-Ck15_dOP.cjs.map} +1 -1
  17. package/dist/{cron-DQHN57v7.cjs → cron-DjzQakpu.cjs} +2 -2
  18. package/dist/{cron-DQHN57v7.cjs.map → cron-DjzQakpu.cjs.map} +1 -1
  19. package/dist/{data-inventory-flXV6qPl.cjs → data-inventory-CvILSgb2.cjs} +2 -2
  20. package/dist/{data-inventory-flXV6qPl.cjs.map → data-inventory-CvILSgb2.cjs.map} +1 -1
  21. package/dist/{dataFlowsToDataSilos-Lk7WQ39V.cjs → dataFlowsToDataSilos-MmEqa0DG.cjs} +2 -2
  22. package/dist/{dataFlowsToDataSilos-Lk7WQ39V.cjs.map → dataFlowsToDataSilos-MmEqa0DG.cjs.map} +1 -1
  23. package/dist/{impl-B6qG10UZ.cjs → impl-5_JaY8jW.cjs} +2 -2
  24. package/dist/{impl-B6qG10UZ.cjs.map → impl-5_JaY8jW.cjs.map} +1 -1
  25. package/dist/{impl-EEKe6HmF.cjs → impl-6CHrtJ1S.cjs} +2 -2
  26. package/dist/{impl-EEKe6HmF.cjs.map → impl-6CHrtJ1S.cjs.map} +1 -1
  27. package/dist/{impl-CzO7dqsL.cjs → impl-B0MJjjNA.cjs} +2 -2
  28. package/dist/{impl-CzO7dqsL.cjs.map → impl-B0MJjjNA.cjs.map} +1 -1
  29. package/dist/{impl-Cw7Jxx0V.cjs → impl-B5L80zX3.cjs} +2 -2
  30. package/dist/{impl-Cw7Jxx0V.cjs.map → impl-B5L80zX3.cjs.map} +1 -1
  31. package/dist/{impl-DBnRvkUi.cjs → impl-BALncBPL.cjs} +2 -2
  32. package/dist/{impl-DBnRvkUi.cjs.map → impl-BALncBPL.cjs.map} +1 -1
  33. package/dist/{impl-C2_oQebA.cjs → impl-BE7hvm-o.cjs} +2 -2
  34. package/dist/{impl-C2_oQebA.cjs.map → impl-BE7hvm-o.cjs.map} +1 -1
  35. package/dist/{impl-DoP4FUJI.cjs → impl-BEtIBM3H.cjs} +2 -2
  36. package/dist/{impl-DoP4FUJI.cjs.map → impl-BEtIBM3H.cjs.map} +1 -1
  37. package/dist/{impl-CpoSlP1o.cjs → impl-BL1UK3c7.cjs} +2 -2
  38. package/dist/{impl-CpoSlP1o.cjs.map → impl-BL1UK3c7.cjs.map} +1 -1
  39. package/dist/{impl-BJ8i_gqQ.cjs → impl-BQE6Uay6.cjs} +2 -2
  40. package/dist/{impl-BJ8i_gqQ.cjs.map → impl-BQE6Uay6.cjs.map} +1 -1
  41. package/dist/{impl-DOmKR8yz.cjs → impl-BWAPcxmK.cjs} +2 -2
  42. package/dist/{impl-DOmKR8yz.cjs.map → impl-BWAPcxmK.cjs.map} +1 -1
  43. package/dist/{impl-Cc-Lfiig.cjs → impl-Baci0G6w.cjs} +2 -2
  44. package/dist/{impl-Cc-Lfiig.cjs.map → impl-Baci0G6w.cjs.map} +1 -1
  45. package/dist/{impl-DjTjLgew.cjs → impl-Bl-9i-F0.cjs} +2 -2
  46. package/dist/{impl-DjTjLgew.cjs.map → impl-Bl-9i-F0.cjs.map} +1 -1
  47. package/dist/{impl-Bsqlw8_g.cjs → impl-Bpiix3Up.cjs} +2 -2
  48. package/dist/{impl-Bsqlw8_g.cjs.map → impl-Bpiix3Up.cjs.map} +1 -1
  49. package/dist/{impl-BqyO4vYa.cjs → impl-ByCMcwi7.cjs} +2 -2
  50. package/dist/{impl-BqyO4vYa.cjs.map → impl-ByCMcwi7.cjs.map} +1 -1
  51. package/dist/{impl-B1p9GNrM.cjs → impl-C6Flxs5Z.cjs} +2 -2
  52. package/dist/{impl-B1p9GNrM.cjs.map → impl-C6Flxs5Z.cjs.map} +1 -1
  53. package/dist/{impl-DrNWIvMG.cjs → impl-C7yf9HJB.cjs} +2 -2
  54. package/dist/{impl-DrNWIvMG.cjs.map → impl-C7yf9HJB.cjs.map} +1 -1
  55. package/dist/{impl-CrsHy3BZ.cjs → impl-CJ6efbwZ.cjs} +2 -2
  56. package/dist/{impl-CrsHy3BZ.cjs.map → impl-CJ6efbwZ.cjs.map} +1 -1
  57. package/dist/{impl-D0r4dSxM.cjs → impl-CKfTAVk5.cjs} +2 -2
  58. package/dist/{impl-D0r4dSxM.cjs.map → impl-CKfTAVk5.cjs.map} +1 -1
  59. package/dist/{impl-b6KwZ74o.cjs → impl-CU1jfTc6.cjs} +2 -2
  60. package/dist/{impl-b6KwZ74o.cjs.map → impl-CU1jfTc6.cjs.map} +1 -1
  61. package/dist/{impl-TQVXJemY.cjs → impl-Cb2y0rmA.cjs} +2 -2
  62. package/dist/{impl-TQVXJemY.cjs.map → impl-Cb2y0rmA.cjs.map} +1 -1
  63. package/dist/{impl-2u3q0rji.cjs → impl-Cdwey5Ad.cjs} +2 -2
  64. package/dist/{impl-2u3q0rji.cjs.map → impl-Cdwey5Ad.cjs.map} +1 -1
  65. package/dist/{impl-CZmlwib3.cjs → impl-CqpWJ35V.cjs} +2 -2
  66. package/dist/{impl-CZmlwib3.cjs.map → impl-CqpWJ35V.cjs.map} +1 -1
  67. package/dist/{impl-BjsBvvGF.cjs → impl-D0kWgQ25.cjs} +2 -2
  68. package/dist/{impl-BjsBvvGF.cjs.map → impl-D0kWgQ25.cjs.map} +1 -1
  69. package/dist/impl-D2btoDa0.cjs +2 -0
  70. package/dist/impl-D2btoDa0.cjs.map +1 -0
  71. package/dist/{impl-Bc9DMV-V.cjs → impl-D6VaotKB.cjs} +2 -2
  72. package/dist/{impl-Bc9DMV-V.cjs.map → impl-D6VaotKB.cjs.map} +1 -1
  73. package/dist/{impl-DvlAq8xf.cjs → impl-DEv-oWoY.cjs} +2 -2
  74. package/dist/{impl-DvlAq8xf.cjs.map → impl-DEv-oWoY.cjs.map} +1 -1
  75. package/dist/{impl-DcQ_HfDZ.cjs → impl-DGRFzFu_.cjs} +2 -2
  76. package/dist/{impl-DcQ_HfDZ.cjs.map → impl-DGRFzFu_.cjs.map} +1 -1
  77. package/dist/{impl-CUkxcZrf.cjs → impl-DGRQPEpr.cjs} +2 -2
  78. package/dist/{impl-CUkxcZrf.cjs.map → impl-DGRQPEpr.cjs.map} +1 -1
  79. package/dist/{impl-DX7gLoTo.cjs → impl-DSwZ_Zpg.cjs} +2 -2
  80. package/dist/{impl-DX7gLoTo.cjs.map → impl-DSwZ_Zpg.cjs.map} +1 -1
  81. package/dist/{impl-CaSO2LPb.cjs → impl-DXYFSbdT.cjs} +2 -2
  82. package/dist/{impl-CaSO2LPb.cjs.map → impl-DXYFSbdT.cjs.map} +1 -1
  83. package/dist/{impl-CaUSDPuW.cjs → impl-DcZxvv9d.cjs} +2 -2
  84. package/dist/{impl-CaUSDPuW.cjs.map → impl-DcZxvv9d.cjs.map} +1 -1
  85. package/dist/{impl-BuJNWbOW.cjs → impl-DhGAHD6N.cjs} +2 -2
  86. package/dist/{impl-BuJNWbOW.cjs.map → impl-DhGAHD6N.cjs.map} +1 -1
  87. package/dist/{impl-8PlQ3Cvy.cjs → impl-DilrEold.cjs} +2 -2
  88. package/dist/{impl-8PlQ3Cvy.cjs.map → impl-DilrEold.cjs.map} +1 -1
  89. package/dist/{impl-E1vzeNmp.cjs → impl-Gy2MhlbR.cjs} +2 -2
  90. package/dist/{impl-E1vzeNmp.cjs.map → impl-Gy2MhlbR.cjs.map} +1 -1
  91. package/dist/{impl-B4iI3rcF.cjs → impl-KtaRdVpP.cjs} +2 -2
  92. package/dist/{impl-B4iI3rcF.cjs.map → impl-KtaRdVpP.cjs.map} +1 -1
  93. package/dist/{impl-t_fZSUcj.cjs → impl-M-kD-8Qh.cjs} +2 -2
  94. package/dist/{impl-t_fZSUcj.cjs.map → impl-M-kD-8Qh.cjs.map} +1 -1
  95. package/dist/{impl-CwPRkBc0.cjs → impl-NwB7y3aW.cjs} +2 -2
  96. package/dist/{impl-CwPRkBc0.cjs.map → impl-NwB7y3aW.cjs.map} +1 -1
  97. package/dist/{impl-80GXtjmz.cjs → impl-PU77PoPR.cjs} +2 -2
  98. package/dist/{impl-80GXtjmz.cjs.map → impl-PU77PoPR.cjs.map} +1 -1
  99. package/dist/{impl-CtMVi5m1.cjs → impl-keqxvDu-.cjs} +2 -2
  100. package/dist/{impl-CtMVi5m1.cjs.map → impl-keqxvDu-.cjs.map} +1 -1
  101. package/dist/{impl-CCeEUy6z.cjs → impl-miCFpR_U.cjs} +2 -2
  102. package/dist/{impl-CCeEUy6z.cjs.map → impl-miCFpR_U.cjs.map} +1 -1
  103. package/dist/{impl-3ih-x09b.cjs → impl-vDiQmycv.cjs} +2 -2
  104. package/dist/{impl-3ih-x09b.cjs.map → impl-vDiQmycv.cjs.map} +1 -1
  105. package/dist/{impl-BZc5cmdE.cjs → impl-xU8wyFx4.cjs} +2 -2
  106. package/dist/{impl-BZc5cmdE.cjs.map → impl-xU8wyFx4.cjs.map} +1 -1
  107. package/dist/index.cjs +1 -1
  108. package/dist/index.d.cts +529 -312
  109. package/dist/{manual-enrichment-Y_BQaSZQ.cjs → manual-enrichment-CkdYjftx.cjs} +2 -2
  110. package/dist/{manual-enrichment-Y_BQaSZQ.cjs.map → manual-enrichment-CkdYjftx.cjs.map} +1 -1
  111. package/dist/{pooling-Ct83vfEh.cjs → pooling-CE5JT_os.cjs} +2 -2
  112. package/dist/{pooling-Ct83vfEh.cjs.map → pooling-CE5JT_os.cjs.map} +1 -1
  113. package/dist/{preference-management-5uJDKuMK.cjs → preference-management-Dsrpuzbl.cjs} +2 -2
  114. package/dist/{preference-management-5uJDKuMK.cjs.map → preference-management-Dsrpuzbl.cjs.map} +1 -1
  115. package/dist/{syncConfigurationToTranscend-Bpge5AcC.cjs → syncConfigurationToTranscend-f6E4-H5W.cjs} +176 -176
  116. package/dist/syncConfigurationToTranscend-f6E4-H5W.cjs.map +1 -0
  117. package/dist/{uploadConsents-CJc_6Qwd.cjs → uploadConsents-BwTW5VNv.cjs} +2 -2
  118. package/dist/{uploadConsents-CJc_6Qwd.cjs.map → uploadConsents-BwTW5VNv.cjs.map} +1 -1
  119. package/package.json +8 -8
  120. package/dist/impl-NdV_MRsm.cjs +0 -2
  121. package/dist/impl-NdV_MRsm.cjs.map +0 -1
  122. package/dist/syncConfigurationToTranscend-Bpge5AcC.cjs.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"impl-DcQ_HfDZ.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"wjBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
1
+ {"version":3,"file":"impl-DGRFzFu_.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"wjBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./done-input-validation-Cgk5kNBs.cjs`),i=require(`./preference-management-5uJDKuMK.cjs`);let a=require(`colors`);a=e.s(a);const o=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function s({auth:e,partition:s,sombraAuth:c,file:l,transcendUrl:u,timestampBefore:d,timestampAfter:f,updatedBefore:p,updatedAfter:m,identifiers:h=[],concurrency:g,shouldChunk:_,windowConcurrency:v,maxChunks:y,exportIdentifiersWithDelimiter:b,maxLookbackDays:x}){r.t(this.process.exit);let S=await t.ei(u,e,c),C=t.ti(u,e),w=h.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),T={...d?{timestampBefore:d.toISOString()}:{},...f?{timestampAfter:f.toISOString()}:{},...m||p?{system:{...p?{updatedBefore:p.toISOString()}:{},...m?{updatedAfter:m.toISOString()}:{}}}:{},...w.length>0?{identifiers:w}:{}};n.t.info(`Fetching consent preferences from partition ${s}, using mode=${_?`chunked-stream`:`paged-stream`}...`),n.t.info(a.default.magenta(`Preparing CSV at: ${l}`));let[E,D]=await Promise.all([t._r(C),t.n(C)]),O=D.map(e=>e.name),k=Array.from(new Set(E.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),A=Array.from(new Set(E.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),j=[...o,...O,...A,...k],M=null,N=!1,P=e=>{if(!e||e.length===0)return;let n=e.map(e=>i.i(e,b));if(!N){let e=Object.keys(n[0]??{}),r=new Set;M=[...j,...e].filter(e=>e===void 0||r.has(e)?!1:(r.add(e),!0)),t.s(l,M),N=!0}t.a(l,n,M)};if(_){await i.r(S,{partition:s,filterBy:T,limit:g,windowConcurrency:v,maxChunks:y,maxLookbackDays:x,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`));return}await i.n(S,{partition:s,filterBy:T,limit:g,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`))}exports.pullConsentPreferences=s;
2
- //# sourceMappingURL=impl-CUkxcZrf.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-Ck15_dOP.cjs`);const t=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./done-input-validation-Cgk5kNBs.cjs`),i=require(`./preference-management-Dsrpuzbl.cjs`);let a=require(`colors`);a=e.s(a);const o=[`userId`,`timestamp`,`partition`,`decryptionStatus`,`updatedAt`,`usp`,`gpp`,`tcf`,`airgapVersion`,`metadata`,`metadataTimestamp`];async function s({auth:e,partition:s,sombraAuth:c,file:l,transcendUrl:u,timestampBefore:d,timestampAfter:f,updatedBefore:p,updatedAfter:m,identifiers:h=[],concurrency:g,shouldChunk:_,windowConcurrency:v,maxChunks:y,exportIdentifiersWithDelimiter:b,maxLookbackDays:x}){r.t(this.process.exit);let S=await t.ei(u,e,c),C=t.ti(u,e),w=h.map(e=>{if(!e.includes(`:`))return{name:`email`,value:e};let[t,n]=e.split(`:`);return{name:t,value:n}}),T={...d?{timestampBefore:d.toISOString()}:{},...f?{timestampAfter:f.toISOString()}:{},...m||p?{system:{...p?{updatedBefore:p.toISOString()}:{},...m?{updatedAfter:m.toISOString()}:{}}}:{},...w.length>0?{identifiers:w}:{}};n.t.info(`Fetching consent preferences from partition ${s}, using mode=${_?`chunked-stream`:`paged-stream`}...`),n.t.info(a.default.magenta(`Preparing CSV at: ${l}`));let[E,D]=await Promise.all([t._r(C),t.n(C)]),O=D.map(e=>e.name),k=Array.from(new Set(E.flatMap(e=>e.topics?.map(t=>`${e.trackingType}_${t.slug}`)??[]))).sort((e,t)=>e.localeCompare(t)),A=Array.from(new Set(E.map(e=>e.trackingType))).sort((e,t)=>e.localeCompare(t)),j=[...o,...O,...A,...k],M=null,N=!1,P=e=>{if(!e||e.length===0)return;let n=e.map(e=>i.i(e,b));if(!N){let e=Object.keys(n[0]??{}),r=new Set;M=[...j,...e].filter(e=>e===void 0||r.has(e)?!1:(r.add(e),!0)),t.s(l,M),N=!0}t.a(l,n,M)};if(_){await i.r(S,{partition:s,filterBy:T,limit:g,windowConcurrency:v,maxChunks:y,maxLookbackDays:x,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`));return}await i.n(S,{partition:s,filterBy:T,limit:g,onItems:e=>P(e)}),n.t.info(a.default.green(`Finished writing CSV to ${l}`))}exports.pullConsentPreferences=s;
2
+ //# sourceMappingURL=impl-DGRQPEpr.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CUkxcZrf.cjs","names":["createSombraGotInstance","buildTranscendGraphQLClient","fetchAllPurposesAndPreferences","fetchAllIdentifiers","transformPreferenceRecordToCsv","fetchConsentPreferencesChunked","fetchConsentPreferences"],"sources":["../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers';\nimport type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map(\n (identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n },\n );\n\n // Build filter\n const filterBy = {\n ...(timestampBefore\n ? { timestampBefore: timestampBefore.toISOString() }\n : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore\n ? { updatedBefore: updatedBefore.toISOString() }\n : {}),\n ...(updatedAfter\n ? { updatedAfter: updatedAfter.toISOString() }\n : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap(\n (p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? [],\n ),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(\n new Set(purposesWithTopics.map((p) => p.trackingType)),\n ).sort((a, b) => a.localeCompare(b));\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [\n ...CORE_COLS,\n ...identifierCols,\n ...purposeCols,\n ...topicCols,\n ];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"0WAsBA,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAMA,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IACnC,GAAqC,CACpC,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EAEzB,CAGK,EAAW,CACf,GAAI,EACA,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAClD,EAAE,CACN,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EACA,CAAE,cAAe,EAAc,aAAa,CAAE,CAC9C,EAAE,CACN,GAAI,EACA,CAAE,aAAc,EAAa,aAAa,CAAE,CAC5C,EAAE,CACP,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAA,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7DC,EAAAA,GAA+B,EAAO,CACtCC,EAAAA,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAChB,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CACjE,CACF,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KACxB,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CACvD,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAsB,CAC1B,GAAG,EACH,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGG,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtBC,EAAAA,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAA,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAA,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAMC,EAAAA,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAMC,EAAAA,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC"}
1
+ {"version":3,"file":"impl-DGRQPEpr.cjs","names":["createSombraGotInstance","buildTranscendGraphQLClient","fetchAllPurposesAndPreferences","fetchAllIdentifiers","transformPreferenceRecordToCsv","fetchConsentPreferencesChunked","fetchConsentPreferences"],"sources":["../src/commands/consent/pull-consent-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport {\n fetchConsentPreferences,\n fetchConsentPreferencesChunked,\n transformPreferenceRecordToCsv,\n type PreferenceIdentifier,\n} from '../../../lib/preference-management';\nimport {\n buildTranscendGraphQLClient,\n createSombraGotInstance,\n fetchAllIdentifiers,\n fetchAllPurposesAndPreferences,\n} from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { initCsvFile, appendCsvRowsOrdered } from '../../../lib/helpers';\nimport type { PreferenceQueryResponseItem } from '@transcend-io/privacy-types';\n\n// Known “core” columns your transformer usually produces up front.\n// Leave this list conservative; we’ll still union with transformer keys.\nconst CORE_COLS = [\n 'userId',\n 'timestamp',\n 'partition',\n 'decryptionStatus',\n 'updatedAt',\n 'usp',\n 'gpp',\n 'tcf',\n 'airgapVersion',\n 'metadata',\n 'metadataTimestamp',\n];\n\nexport interface PullConsentPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n file: string;\n transcendUrl: string;\n timestampBefore?: Date;\n exportIdentifiersWithDelimiter: string;\n timestampAfter?: Date;\n updatedBefore?: Date;\n updatedAfter?: Date;\n identifiers?: string[];\n concurrency: number;\n shouldChunk: boolean;\n windowConcurrency: number;\n maxChunks: number;\n maxLookbackDays: number;\n}\n\nexport async function pullConsentPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file,\n transcendUrl,\n timestampBefore,\n timestampAfter,\n updatedBefore,\n updatedAfter,\n identifiers = [],\n concurrency,\n shouldChunk,\n windowConcurrency,\n maxChunks,\n exportIdentifiersWithDelimiter,\n maxLookbackDays,\n }: PullConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Identifiers are key:value, parse to PreferenceIdentifier[]\n const parsedIdentifiers = identifiers.map(\n (identifier): PreferenceIdentifier => {\n if (!identifier.includes(':')) {\n return {\n name: 'email',\n value: identifier,\n };\n }\n const [name, value] = identifier.split(':');\n return { name, value };\n },\n );\n\n // Build filter\n const filterBy = {\n ...(timestampBefore\n ? { timestampBefore: timestampBefore.toISOString() }\n : {}),\n ...(timestampAfter ? { timestampAfter: timestampAfter.toISOString() } : {}),\n ...(updatedAfter || updatedBefore\n ? {\n system: {\n ...(updatedBefore\n ? { updatedBefore: updatedBefore.toISOString() }\n : {}),\n ...(updatedAfter\n ? { updatedAfter: updatedAfter.toISOString() }\n : {}),\n },\n }\n : {}),\n ...(parsedIdentifiers.length > 0 ? { identifiers: parsedIdentifiers } : {}),\n };\n\n logger.info(\n `Fetching consent preferences from partition ${partition}, using mode=${\n shouldChunk ? 'chunked-stream' : 'paged-stream'\n }...`,\n );\n\n logger.info(colors.magenta(`Preparing CSV at: ${file}`));\n\n // Fetch full sets (purposes+topics, identifiers) to ensure header completeness\n const [purposesWithTopics, allIdentifiers] = await Promise.all([\n fetchAllPurposesAndPreferences(client),\n fetchAllIdentifiers(client),\n ]);\n\n // Identifier columns: exactly the identifier names\n const identifierCols = allIdentifiers.map((i) => i.name);\n\n // Preference topic columns: topic names (de-duped)\n const topicCols = Array.from(\n new Set(\n purposesWithTopics.flatMap(\n (p) => p.topics?.map((t) => `${p.trackingType}_${t.slug}`) ?? [],\n ),\n ),\n ).sort((a, b) => a.localeCompare(b));\n\n // Some setups also want a per-purpose boolean column (e.g., “Email”, “Sms”).\n // If your transformer includes those, list them here, derived from purposes:\n const purposeCols = Array.from(\n new Set(purposesWithTopics.map((p) => p.trackingType)),\n ).sort((a, b) => a.localeCompare(b));\n\n // Build the complete header skeleton.\n // We’ll still union with the first transformed row’s keys to be safe.\n const completeHeadersList = [\n ...CORE_COLS,\n ...identifierCols,\n ...purposeCols,\n ...topicCols,\n ];\n\n // Lazily initialize CSV header order from the first transformed row.\n let headerOrder: string[] | null = null;\n let wroteHeader = false;\n const writeRows = (items: PreferenceQueryResponseItem[]): void => {\n if (!items || items.length === 0) return;\n const rows = items.map((row) =>\n transformPreferenceRecordToCsv(row, exportIdentifiersWithDelimiter),\n );\n if (!wroteHeader) {\n const firstKeys = Object.keys(rows[0] ?? {});\n const seen = new Set<string>();\n headerOrder = [...completeHeadersList, ...firstKeys].filter((k) => {\n if (k === undefined) return false;\n if (seen.has(k)) return false;\n seen.add(k);\n return true;\n });\n initCsvFile(file, headerOrder);\n wroteHeader = true;\n }\n appendCsvRowsOrdered(file, rows, headerOrder!);\n };\n\n if (shouldChunk) {\n // Stream via chunked fetcher with page callback\n await fetchConsentPreferencesChunked(sombra, {\n partition,\n filterBy,\n limit: concurrency,\n windowConcurrency,\n maxChunks,\n maxLookbackDays,\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n return;\n }\n\n // Non-chunked path: still stream page-by-page via onItems (no in-memory accumulation)\n await fetchConsentPreferences(sombra, {\n partition,\n filterBy,\n limit: concurrency, // page size (API max 50 enforced internally)\n onItems: (items) => writeRows(items),\n });\n\n logger.info(colors.green(`Finished writing CSV to ${file}`));\n}\n"],"mappings":"0WAsBA,MAAM,EAAY,CAChB,SACA,YACA,YACA,mBACA,YACA,MACA,MACA,MACA,gBACA,WACA,oBACD,CAqBD,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OACA,eACA,kBACA,iBACA,gBACA,eACA,cAAc,EAAE,CAChB,cACA,cACA,oBACA,YACA,iCACA,mBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAAS,MAAMA,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAASC,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAoB,EAAY,IACnC,GAAqC,CACpC,GAAI,CAAC,EAAW,SAAS,IAAI,CAC3B,MAAO,CACL,KAAM,QACN,MAAO,EACR,CAEH,GAAM,CAAC,EAAM,GAAS,EAAW,MAAM,IAAI,CAC3C,MAAO,CAAE,OAAM,QAAO,EAEzB,CAGK,EAAW,CACf,GAAI,EACA,CAAE,gBAAiB,EAAgB,aAAa,CAAE,CAClD,EAAE,CACN,GAAI,EAAiB,CAAE,eAAgB,EAAe,aAAa,CAAE,CAAG,EAAE,CAC1E,GAAI,GAAgB,EAChB,CACE,OAAQ,CACN,GAAI,EACA,CAAE,cAAe,EAAc,aAAa,CAAE,CAC9C,EAAE,CACN,GAAI,EACA,CAAE,aAAc,EAAa,aAAa,CAAE,CAC5C,EAAE,CACP,CACF,CACD,EAAE,CACN,GAAI,EAAkB,OAAS,EAAI,CAAE,YAAa,EAAmB,CAAG,EAAE,CAC3E,CAED,EAAA,EAAO,KACL,+CAA+C,EAAU,eACvD,EAAc,iBAAmB,eAClC,KACF,CAED,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAO,CAAC,CAGxD,GAAM,CAAC,EAAoB,GAAkB,MAAM,QAAQ,IAAI,CAC7DC,EAAAA,GAA+B,EAAO,CACtCC,EAAAA,EAAoB,EAAO,CAC5B,CAAC,CAGI,EAAiB,EAAe,IAAK,GAAM,EAAE,KAAK,CAGlD,EAAY,MAAM,KACtB,IAAI,IACF,EAAmB,QAChB,GAAM,EAAE,QAAQ,IAAK,GAAM,GAAG,EAAE,aAAa,GAAG,EAAE,OAAO,EAAI,EAAE,CACjE,CACF,CACF,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAc,MAAM,KACxB,IAAI,IAAI,EAAmB,IAAK,GAAM,EAAE,aAAa,CAAC,CACvD,CAAC,MAAM,EAAG,IAAM,EAAE,cAAc,EAAE,CAAC,CAI9B,EAAsB,CAC1B,GAAG,EACH,GAAG,EACH,GAAG,EACH,GAAG,EACJ,CAGG,EAA+B,KAC/B,EAAc,GACZ,EAAa,GAA+C,CAChE,GAAI,CAAC,GAAS,EAAM,SAAW,EAAG,OAClC,IAAM,EAAO,EAAM,IAAK,GACtBC,EAAAA,EAA+B,EAAK,EAA+B,CACpE,CACD,GAAI,CAAC,EAAa,CAChB,IAAM,EAAY,OAAO,KAAK,EAAK,IAAM,EAAE,CAAC,CACtC,EAAO,IAAI,IACjB,EAAc,CAAC,GAAG,EAAqB,GAAG,EAAU,CAAC,OAAQ,GACvD,IAAM,IAAA,IACN,EAAK,IAAI,EAAE,CAAS,IACxB,EAAK,IAAI,EAAE,CACJ,IACP,CACF,EAAA,EAAY,EAAM,EAAY,CAC9B,EAAc,GAEhB,EAAA,EAAqB,EAAM,EAAM,EAAa,EAGhD,GAAI,EAAa,CAEf,MAAMC,EAAAA,EAA+B,EAAQ,CAC3C,YACA,WACA,MAAO,EACP,oBACA,YACA,kBACA,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC,CAC5D,OAIF,MAAMC,EAAAA,EAAwB,EAAQ,CACpC,YACA,WACA,MAAO,EACP,QAAU,GAAU,EAAU,EAAM,CACrC,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,2BAA2B,IAAO,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-B-TmLA0w.cjs`),n=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),r=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`),require(`./api-keys-CDp8NUhN.cjs`);const i=require(`./done-input-validation-Cgk5kNBs.cjs`),a=require(`./code-scanning-BwfVNIHr.cjs`);let o=require(`colors`);o=e.s(o);let s=require(`child_process`);const c=`A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository`;async function l({auth:e,scanPath:l,ignoreDirs:u,repositoryName:d,transcendUrl:f}){i.t(this.process.exit);let p=d;if(!p)try{let e=(0,s.execSync)(`cd ${l} && git config --get remote.origin.url`).toString(`utf-8`).trim();[p]=e.includes(`https:`)?e.split(`/`).slice(3).join(`/`).split(`.`):(e.split(`:`).pop()||``).split(`.`),p||(r.t.error(o.default.red(c)),this.process.exit(1))}catch(e){r.t.error(o.default.red(`${c} - Got error: ${e.message}`)),this.process.exit(1)}let m=n.ti(f,e),h=await a.t({scanPath:l,ignoreDirs:u,repositoryName:p});await n.zt(m,h);let g=new URL(t.t);g.pathname=`/code-scanning/code-packages`,r.t.info(o.default.green(`Scan found ${h.length} packages at ${l}! View results at '${g.href}'`))}exports.scanPackages=l;
2
- //# sourceMappingURL=impl-DX7gLoTo.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-Ck15_dOP.cjs`),n=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),r=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`),require(`./api-keys-DQKR5mr6.cjs`);const i=require(`./done-input-validation-Cgk5kNBs.cjs`),a=require(`./code-scanning-Dw_M6bvD.cjs`);let o=require(`colors`);o=e.s(o);let s=require(`child_process`);const c=`A repository name must be provided. You can specify using --repositoryName=$REPO_NAME or by ensuring the command "git config --get remote.origin.url" returns the name of the repository`;async function l({auth:e,scanPath:l,ignoreDirs:u,repositoryName:d,transcendUrl:f}){i.t(this.process.exit);let p=d;if(!p)try{let e=(0,s.execSync)(`cd ${l} && git config --get remote.origin.url`).toString(`utf-8`).trim();[p]=e.includes(`https:`)?e.split(`/`).slice(3).join(`/`).split(`.`):(e.split(`:`).pop()||``).split(`.`),p||(r.t.error(o.default.red(c)),this.process.exit(1))}catch(e){r.t.error(o.default.red(`${c} - Got error: ${e.message}`)),this.process.exit(1)}let m=n.ti(f,e),h=await a.t({scanPath:l,ignoreDirs:u,repositoryName:p});await n.zt(m,h);let g=new URL(t.t);g.pathname=`/code-scanning/code-packages`,r.t.info(o.default.green(`Scan found ${h.length} packages at ${l}! View results at '${g.href}'`))}exports.scanPackages=l;
2
+ //# sourceMappingURL=impl-DSwZ_Zpg.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-DX7gLoTo.cjs","names":["buildTranscendGraphQLClient","findCodePackagesInFolder","syncCodePackages","ADMIN_DASH"],"sources":["../src/commands/inventory/scan-packages/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport { findCodePackagesInFolder } from '../../../lib/code-scanning';\nimport {\n buildTranscendGraphQLClient,\n syncCodePackages,\n} from '../../../lib/graphql';\nimport { execSync } from 'child_process';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\nexport interface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n {\n auth,\n scanPath,\n ignoreDirs,\n repositoryName,\n transcendUrl,\n }: ScanPackagesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(\n `cd ${scanPath} && git config --get remote.origin.url`,\n );\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n this.process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n this.process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` +\n `View results at '${newUrl.href}'`,\n ),\n );\n}\n"],"mappings":"icAYA,MAAM,EACJ,2LAYF,eAAsB,EAEpB,CACE,OACA,WACA,aACA,iBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAI,EAAoB,EACxB,GAAI,CAAC,EACH,GAAI,CAKF,IAAM,GAAA,EAAA,EAAA,UAHJ,MAAM,EAAS,wCAChB,CAEgB,SAAS,QAAQ,CAAC,MAAM,CACzC,CAAC,GAAsB,EAAI,SAAS,SAAS,CAEzC,EAAI,MAAM,IAAI,CAAC,MAAM,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,IAAI,EAD3C,EAAI,MAAM,IAAI,CAAC,KAAK,EAAI,IAAI,MAAM,IAAI,CAEtC,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,EAAW,CAAC,CACpC,KAAK,QAAQ,KAAK,EAAE,QAEf,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,GAAG,EAAW,gBAAgB,EAAI,UAAU,CAAC,CACrE,KAAK,QAAQ,KAAK,EAAE,CAKxB,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAU,MAAMC,EAAAA,EAAyB,CAC7C,WACA,aACA,eAAgB,EACjB,CAAC,CAGF,MAAMC,EAAAA,GAAiB,EAAQ,EAAQ,CAEvC,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,+BAGlB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,eAAe,EAAS,qBAC/B,EAAO,KAAK,GACnC,CACF"}
1
+ {"version":3,"file":"impl-DSwZ_Zpg.cjs","names":["buildTranscendGraphQLClient","findCodePackagesInFolder","syncCodePackages","ADMIN_DASH"],"sources":["../src/commands/inventory/scan-packages/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport { findCodePackagesInFolder } from '../../../lib/code-scanning';\nimport {\n buildTranscendGraphQLClient,\n syncCodePackages,\n} from '../../../lib/graphql';\nimport { execSync } from 'child_process';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nconst REPO_ERROR =\n 'A repository name must be provided. ' +\n 'You can specify using --repositoryName=$REPO_NAME or by ensuring the ' +\n 'command \"git config --get remote.origin.url\" returns the name of the repository';\n\nexport interface ScanPackagesCommandFlags {\n auth: string;\n scanPath: string;\n ignoreDirs?: string[];\n repositoryName?: string;\n transcendUrl: string;\n}\n\nexport async function scanPackages(\n this: LocalContext,\n {\n auth,\n scanPath,\n ignoreDirs,\n repositoryName,\n transcendUrl,\n }: ScanPackagesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Ensure repository name is specified\n let gitRepositoryName = repositoryName;\n if (!gitRepositoryName) {\n try {\n const name = execSync(\n `cd ${scanPath} && git config --get remote.origin.url`,\n );\n // Trim and parse the URL\n const url = name.toString('utf-8').trim();\n [gitRepositoryName] = !url.includes('https:')\n ? (url.split(':').pop() || '').split('.')\n : url.split('/').slice(3).join('/').split('.');\n if (!gitRepositoryName) {\n logger.error(colors.red(REPO_ERROR));\n this.process.exit(1);\n }\n } catch (err) {\n logger.error(colors.red(`${REPO_ERROR} - Got error: ${err.message}`));\n this.process.exit(1);\n }\n }\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n // Scan the codebase to discovery packages\n const results = await findCodePackagesInFolder({\n scanPath,\n ignoreDirs,\n repositoryName: gitRepositoryName,\n });\n\n // Report scan to Transcend\n await syncCodePackages(client, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/code-scanning/code-packages';\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} packages at ${scanPath}! ` +\n `View results at '${newUrl.href}'`,\n ),\n );\n}\n"],"mappings":"icAYA,MAAM,EACJ,2LAYF,eAAsB,EAEpB,CACE,OACA,WACA,aACA,iBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAI,EAAoB,EACxB,GAAI,CAAC,EACH,GAAI,CAKF,IAAM,GAAA,EAAA,EAAA,UAHJ,MAAM,EAAS,wCAChB,CAEgB,SAAS,QAAQ,CAAC,MAAM,CACzC,CAAC,GAAsB,EAAI,SAAS,SAAS,CAEzC,EAAI,MAAM,IAAI,CAAC,MAAM,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,IAAI,EAD3C,EAAI,MAAM,IAAI,CAAC,KAAK,EAAI,IAAI,MAAM,IAAI,CAEtC,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,EAAW,CAAC,CACpC,KAAK,QAAQ,KAAK,EAAE,QAEf,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,GAAG,EAAW,gBAAgB,EAAI,UAAU,CAAC,CACrE,KAAK,QAAQ,KAAK,EAAE,CAKxB,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAGxD,EAAU,MAAMC,EAAAA,EAAyB,CAC7C,WACA,aACA,eAAgB,EACjB,CAAC,CAGF,MAAMC,EAAAA,GAAiB,EAAQ,EAAQ,CAEvC,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,+BAGlB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,eAAe,EAAS,qBAC/B,EAAO,KAAK,GACnC,CACF"}
@@ -1,2 +1,2 @@
1
- require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`);const e=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,transcendUrl:r,identifierNames:i,actions:a=[]}){t.t(this.process.exit),await e.P({requestActions:a,transcendUrl:r,auth:n,identifierNames:i})}exports.rejectUnverifiedIdentifiers=n;
2
- //# sourceMappingURL=impl-CaSO2LPb.cjs.map
1
+ require(`./enums-CBXlBJii.cjs`),require(`./constants-Ck15_dOP.cjs`);const e=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,transcendUrl:r,identifierNames:i,actions:a=[]}){t.t(this.process.exit),await e.P({requestActions:a,transcendUrl:r,auth:n,identifierNames:i})}exports.rejectUnverifiedIdentifiers=n;
2
+ //# sourceMappingURL=impl-DXYFSbdT.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CaSO2LPb.cjs","names":["removeUnverifiedRequestIdentifiers"],"sources":["../src/commands/request/reject-unverified-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { removeUnverifiedRequestIdentifiers } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n identifierNames,\n actions = [],\n }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"],"mappings":"6QAYA,eAAsB,EAEpB,CACE,OACA,eACA,kBACA,UAAU,EAAE,EAEC,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAmC,CACvC,eAAgB,EAChB,eACA,OACA,kBACD,CAAC"}
1
+ {"version":3,"file":"impl-DXYFSbdT.cjs","names":["removeUnverifiedRequestIdentifiers"],"sources":["../src/commands/request/reject-unverified-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { removeUnverifiedRequestIdentifiers } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface RejectUnverifiedIdentifiersCommandFlags {\n auth: string;\n identifierNames: string[];\n actions?: RequestAction[];\n transcendUrl: string;\n}\n\nexport async function rejectUnverifiedIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n identifierNames,\n actions = [],\n }: RejectUnverifiedIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await removeUnverifiedRequestIdentifiers({\n requestActions: actions,\n transcendUrl,\n auth,\n identifierNames,\n });\n}\n"],"mappings":"6QAYA,eAAsB,EAEpB,CACE,OACA,eACA,kBACA,UAAU,EAAE,EAEC,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAmC,CACvC,eAAgB,EAChB,eACA,OACA,kBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`),require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`);const e=require(`./consent-manager-DXWjvCtI.cjs`);require(`./uploadConsents-CJc_6Qwd.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,trackerStatus:r,file:i,transcendUrl:a}){t.t(this.process.exit),await e.o({auth:n,trackerStatus:r,file:i,transcendUrl:a})}exports.uploadCookiesFromCsv=n;
2
- //# sourceMappingURL=impl-CaUSDPuW.cjs.map
1
+ require(`./enums-CBXlBJii.cjs`),require(`./constants-Ck15_dOP.cjs`),require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`);const e=require(`./consent-manager-yfT0ldzq.cjs`);require(`./uploadConsents-BwTW5VNv.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,trackerStatus:r,file:i,transcendUrl:a}){t.t(this.process.exit),await e.o({auth:n,trackerStatus:r,file:i,transcendUrl:a})}exports.uploadCookiesFromCsv=n;
2
+ //# sourceMappingURL=impl-DcZxvv9d.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CaUSDPuW.cjs","names":["uploadCookiesFromCsvHelper"],"sources":["../src/commands/consent/upload-cookies-from-csv/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { uploadCookiesFromCsv as uploadCookiesFromCsvHelper } from '../../../lib/consent-manager';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadCookiesFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n transcendUrl: string;\n}\n\nexport async function uploadCookiesFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, transcendUrl }: UploadCookiesFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Upload cookies\n await uploadCookiesFromCsvHelper({\n auth,\n trackerStatus,\n file,\n transcendUrl,\n });\n}\n"],"mappings":"iYAYA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,gBACd,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,MAAMA,EAAAA,EAA2B,CAC/B,OACA,gBACA,OACA,eACD,CAAC"}
1
+ {"version":3,"file":"impl-DcZxvv9d.cjs","names":["uploadCookiesFromCsvHelper"],"sources":["../src/commands/consent/upload-cookies-from-csv/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { uploadCookiesFromCsv as uploadCookiesFromCsvHelper } from '../../../lib/consent-manager';\nimport { ConsentTrackerStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadCookiesFromCsvCommandFlags {\n auth: string;\n trackerStatus: ConsentTrackerStatus;\n file: string;\n transcendUrl: string;\n}\n\nexport async function uploadCookiesFromCsv(\n this: LocalContext,\n { auth, trackerStatus, file, transcendUrl }: UploadCookiesFromCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Upload cookies\n await uploadCookiesFromCsvHelper({\n auth,\n trackerStatus,\n file,\n transcendUrl,\n });\n}\n"],"mappings":"iYAYA,eAAsB,EAEpB,CAAE,OAAM,gBAAe,OAAM,gBACd,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,MAAMA,EAAAA,EAA2B,CAC/B,OACA,gBACA,OACA,eACD,CAAC"}
@@ -1,2 +1,2 @@
1
- require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`);const e=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,enricherId:r,actions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,concurrency:l,transcendUrl:u}){t.t(this.process.exit),await e.R({auth:n,enricherId:r,requestActions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,concurrency:l,transcendUrl:u})}exports.enricherRestart=n;
2
- //# sourceMappingURL=impl-BuJNWbOW.cjs.map
1
+ require(`./enums-CBXlBJii.cjs`),require(`./constants-Ck15_dOP.cjs`);const e=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,enricherId:r,actions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s,createdAtAfter:c,concurrency:l,transcendUrl:u}){t.t(this.process.exit),await e.R({auth:n,enricherId:r,requestActions:i,requestEnricherStatuses:a,requestIds:o,createdAtBefore:s?new Date(s):void 0,createdAtAfter:c?new Date(c):void 0,concurrency:l,transcendUrl:u})}exports.enricherRestart=n;
2
+ //# sourceMappingURL=impl-DhGAHD6N.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-BuJNWbOW.cjs","names":["bulkRetryEnrichers"],"sources":["../src/commands/request/enricher-restart/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { bulkRetryEnrichers } from '../../../lib/requests';\nimport type {\n RequestAction,\n RequestEnricherStatus,\n} from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"6QAoBA,eAAsB,EAEpB,CACE,OACA,aACA,UACA,0BACA,aACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAmB,CACvB,OACA,aACA,eAAgB,EAChB,0BACA,aACA,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,cACA,eACD,CAAC"}
1
+ {"version":3,"file":"impl-DhGAHD6N.cjs","names":["bulkRetryEnrichers"],"sources":["../src/commands/request/enricher-restart/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { bulkRetryEnrichers } from '../../../lib/requests';\nimport type {\n RequestAction,\n RequestEnricherStatus,\n} from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface EnricherRestartCommandFlags {\n auth: string;\n enricherId: string;\n actions?: RequestAction[];\n requestEnricherStatuses?: RequestEnricherStatus[];\n transcendUrl: string;\n concurrency: number;\n requestIds?: string[];\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n}\n\nexport async function enricherRestart(\n this: LocalContext,\n {\n auth,\n enricherId,\n actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore,\n createdAtAfter,\n concurrency,\n transcendUrl,\n }: EnricherRestartCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await bulkRetryEnrichers({\n auth,\n enricherId,\n requestActions: actions,\n requestEnricherStatuses,\n requestIds,\n createdAtBefore: createdAtBefore ? new Date(createdAtBefore) : undefined,\n createdAtAfter: createdAtAfter ? new Date(createdAtAfter) : undefined,\n concurrency,\n transcendUrl,\n });\n}\n"],"mappings":"6QAoBA,eAAsB,EAEpB,CACE,OACA,aACA,UACA,0BACA,aACA,kBACA,iBACA,cACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAmB,CACvB,OACA,aACA,eAAgB,EAChB,0BACA,aACA,gBAAiB,EAAkB,IAAI,KAAK,EAAgB,CAAG,IAAA,GAC/D,eAAgB,EAAiB,IAAI,KAAK,EAAe,CAAG,IAAA,GAC5D,cACA,eACD,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const n=require(`./uploadConsents-CJc_6Qwd.cjs`),r=require(`./done-input-validation-Cgk5kNBs.cjs`);let i=require(`io-ts`);i=e.s(i);const a=i.intersection([i.type({userId:i.string,timestamp:i.string}),i.partial({confirmed:i.union([i.literal(`true`),i.literal(`false`)]),updated:i.union([i.literal(`true`),i.literal(`false`)]),prompted:i.union([i.literal(`true`),i.literal(`false`)]),metadata:i.string,usp:i.union([i.string,i.null]),gpp:i.union([i.string,i.null]),purposes:i.string})]);async function o({base64EncryptionKey:e,base64SigningKey:i,partition:o,file:s,consentUrl:c,concurrency:l}){r.t(this.process.exit),await n.r({base64EncryptionKey:e,base64SigningKey:i,preferences:t.oi(s,a),partition:o,concurrency:l,transcendUrl:c})}exports.uploadConsentPreferences=o;
2
- //# sourceMappingURL=impl-8PlQ3Cvy.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-Ck15_dOP.cjs`);const t=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const n=require(`./uploadConsents-BwTW5VNv.cjs`),r=require(`./done-input-validation-Cgk5kNBs.cjs`);let i=require(`io-ts`);i=e.s(i);const a=i.intersection([i.type({userId:i.string,timestamp:i.string}),i.partial({confirmed:i.union([i.literal(`true`),i.literal(`false`)]),updated:i.union([i.literal(`true`),i.literal(`false`)]),prompted:i.union([i.literal(`true`),i.literal(`false`)]),metadata:i.string,usp:i.union([i.string,i.null]),gpp:i.union([i.string,i.null]),purposes:i.string})]);async function o({base64EncryptionKey:e,base64SigningKey:i,partition:o,file:s,consentUrl:c,concurrency:l}){r.t(this.process.exit),await n.r({base64EncryptionKey:e,base64SigningKey:i,preferences:t.oi(s,a),partition:o,concurrency:l,transcendUrl:c})}exports.uploadConsentPreferences=o;
2
+ //# sourceMappingURL=impl-DilrEold.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-8PlQ3Cvy.cjs","names":["t","uploadConsents","readCsv"],"sources":["../src/lib/consent-manager/types.ts","../src/commands/consent/upload-consent-preferences/impl.ts"],"sourcesContent":["import * as t from 'io-ts';\n\nexport const ConsentPreferenceUpload = t.intersection([\n t.type({\n /** User ID */\n userId: t.string,\n /** Has the consent been updated (including no-change confirmation) since default resolution */\n timestamp: t.string,\n }),\n t.partial({\n /** Was tracking consent confirmed by the user? If this is false, the consent was resolved from defaults & is not yet confirmed */\n confirmed: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Has the consent been updated (including no-change confirmation) since default resolution\n */\n updated: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Whether or not the UI has been shown to the end-user (undefined in older versions of airgap.js)\n */\n prompted: t.union([t.literal('true'), t.literal('false')]),\n /** Consent metadata */\n metadata: t.string,\n /** US Privacy (USP) String */\n usp: t.union([t.string, t.null]),\n /** IAB GPP String */\n gpp: t.union([t.string, t.null]),\n /**\n * Purpose map\n * This is a stringified JSON object with keys as purpose names and values as booleans or 'Auto'\n */\n purposes: t.string,\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceUpload = t.TypeOf<typeof ConsentPreferenceUpload>;\n","import type { LocalContext } from '../../../context';\n\nimport { uploadConsents } from '../../../lib/consent-manager/uploadConsents';\nimport { ConsentPreferenceUpload } from '../../../lib/consent-manager/types';\nimport { readCsv } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadConsentPreferencesCommandFlags {\n base64EncryptionKey: string;\n base64SigningKey: string;\n partition: string;\n file: string;\n consentUrl: string;\n concurrency: number;\n}\n\nexport async function uploadConsentPreferences(\n this: LocalContext,\n {\n base64EncryptionKey,\n base64SigningKey,\n partition,\n file,\n consentUrl,\n concurrency,\n }: UploadConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Load in preferences from csv\n const preferences = readCsv(file, ConsentPreferenceUpload);\n\n // Upload cookies\n await uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency,\n transcendUrl: consentUrl,\n });\n}\n"],"mappings":"gWAEA,MAAa,EAA0BA,EAAE,aAAa,CACpDA,EAAE,KAAK,CAEL,OAAQA,EAAE,OAEV,UAAWA,EAAE,OACd,CAAC,CACFA,EAAE,QAAQ,CAER,UAAWA,EAAE,MAAM,CAACA,EAAE,QAAQ,OAAO,CAAEA,EAAE,QAAQ,QAAQ,CAAC,CAAC,CAI3D,QAASA,EAAE,MAAM,CAACA,EAAE,QAAQ,OAAO,CAAEA,EAAE,QAAQ,QAAQ,CAAC,CAAC,CAIzD,SAAUA,EAAE,MAAM,CAACA,EAAE,QAAQ,OAAO,CAAEA,EAAE,QAAQ,QAAQ,CAAC,CAAC,CAE1D,SAAUA,EAAE,OAEZ,IAAKA,EAAE,MAAM,CAACA,EAAE,OAAQA,EAAE,KAAK,CAAC,CAEhC,IAAKA,EAAE,MAAM,CAACA,EAAE,OAAQA,EAAE,KAAK,CAAC,CAKhC,SAAUA,EAAE,OACb,CAAC,CACH,CAAC,CChBF,eAAsB,EAEpB,CACE,sBACA,mBACA,YACA,OACA,aACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAMtC,MAAMC,EAAAA,EAAe,CACnB,sBACA,mBACA,YANkBC,EAAAA,GAAQ,EAAM,EAAwB,CAOxD,YACA,cACA,aAAc,EACf,CAAC"}
1
+ {"version":3,"file":"impl-DilrEold.cjs","names":["t","uploadConsents","readCsv"],"sources":["../src/lib/consent-manager/types.ts","../src/commands/consent/upload-consent-preferences/impl.ts"],"sourcesContent":["import * as t from 'io-ts';\n\nexport const ConsentPreferenceUpload = t.intersection([\n t.type({\n /** User ID */\n userId: t.string,\n /** Has the consent been updated (including no-change confirmation) since default resolution */\n timestamp: t.string,\n }),\n t.partial({\n /** Was tracking consent confirmed by the user? If this is false, the consent was resolved from defaults & is not yet confirmed */\n confirmed: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Has the consent been updated (including no-change confirmation) since default resolution\n */\n updated: t.union([t.literal('true'), t.literal('false')]),\n /**\n * Whether or not the UI has been shown to the end-user (undefined in older versions of airgap.js)\n */\n prompted: t.union([t.literal('true'), t.literal('false')]),\n /** Consent metadata */\n metadata: t.string,\n /** US Privacy (USP) String */\n usp: t.union([t.string, t.null]),\n /** IAB GPP String */\n gpp: t.union([t.string, t.null]),\n /**\n * Purpose map\n * This is a stringified JSON object with keys as purpose names and values as booleans or 'Auto'\n */\n purposes: t.string,\n }),\n]);\n\n/** Type override */\nexport type ConsentPreferenceUpload = t.TypeOf<typeof ConsentPreferenceUpload>;\n","import type { LocalContext } from '../../../context';\n\nimport { uploadConsents } from '../../../lib/consent-manager/uploadConsents';\nimport { ConsentPreferenceUpload } from '../../../lib/consent-manager/types';\nimport { readCsv } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadConsentPreferencesCommandFlags {\n base64EncryptionKey: string;\n base64SigningKey: string;\n partition: string;\n file: string;\n consentUrl: string;\n concurrency: number;\n}\n\nexport async function uploadConsentPreferences(\n this: LocalContext,\n {\n base64EncryptionKey,\n base64SigningKey,\n partition,\n file,\n consentUrl,\n concurrency,\n }: UploadConsentPreferencesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Load in preferences from csv\n const preferences = readCsv(file, ConsentPreferenceUpload);\n\n // Upload cookies\n await uploadConsents({\n base64EncryptionKey,\n base64SigningKey,\n preferences,\n partition,\n concurrency,\n transcendUrl: consentUrl,\n });\n}\n"],"mappings":"gWAEA,MAAa,EAA0BA,EAAE,aAAa,CACpDA,EAAE,KAAK,CAEL,OAAQA,EAAE,OAEV,UAAWA,EAAE,OACd,CAAC,CACFA,EAAE,QAAQ,CAER,UAAWA,EAAE,MAAM,CAACA,EAAE,QAAQ,OAAO,CAAEA,EAAE,QAAQ,QAAQ,CAAC,CAAC,CAI3D,QAASA,EAAE,MAAM,CAACA,EAAE,QAAQ,OAAO,CAAEA,EAAE,QAAQ,QAAQ,CAAC,CAAC,CAIzD,SAAUA,EAAE,MAAM,CAACA,EAAE,QAAQ,OAAO,CAAEA,EAAE,QAAQ,QAAQ,CAAC,CAAC,CAE1D,SAAUA,EAAE,OAEZ,IAAKA,EAAE,MAAM,CAACA,EAAE,OAAQA,EAAE,KAAK,CAAC,CAEhC,IAAKA,EAAE,MAAM,CAACA,EAAE,OAAQA,EAAE,KAAK,CAAC,CAKhC,SAAUA,EAAE,OACb,CAAC,CACH,CAAC,CChBF,eAAsB,EAEpB,CACE,sBACA,mBACA,YACA,OACA,aACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAMtC,MAAMC,EAAAA,EAAe,CACnB,sBACA,mBACA,YANkBC,EAAAA,GAAQ,EAAM,EAAwB,CAOxD,YACA,cACA,aAAc,EACf,CAAC"}
@@ -1,4 +1,4 @@
1
- const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-B-TmLA0w.cjs`),n=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),r=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`),require(`./api-keys-CDp8NUhN.cjs`);const i=require(`./done-input-validation-Cgk5kNBs.cjs`),a=require(`./code-scanning-BwfVNIHr.cjs`);let o=require(`colors`);o=e.s(o);let s=require(`fast-glob`);s=e.s(s);let c=require(`query-string`);async function l({scanPath:e,fileGlobs:t,ignoreDirs:n,config:i}){let{ignoreDirs:a,supportedFiles:o,scanFunction:c}=i,u=t===``?o:o.concat(t.split(`,`)),d=[...n.split(`,`),...a].filter(e=>e.length>0);try{let t=await(0,s.default)(`${e}/**/${u.join(`|`)}`,{ignore:d.map(t=>`${e}/**/${t}`),unique:!0,onlyFiles:!0});r.t.info(`Scanning: ${t.length} files`);let n=t.map(e=>c(e)).flat().map(e=>e.softwareDevelopmentKits||[]).flat(),i=[...new Set(n.map(e=>e.name))];return r.t.info(`Found: ${i.length} unique dependencies`),i.map(t=>({name:t,resourceId:`${e}/**/${t}`,useStrictClassifier:!0}))}catch(e){throw Error(`Error scanning globs ${l} with error: ${e}`)}}async function u({scanPath:e,dataSiloId:s,auth:u,fileGlobs:d,ignoreDirs:f,transcendUrl:p}){i.t(this.process.exit);let m=n.ti(p,u),h=await n.Xr(m,s),g=a.n[h.dataSilo.type];g||(r.t.error(o.default.red(`This plugin "${h.dataSilo.type}" is not supported for offline silo discovery.`)),this.process.exit(1));let _=await l({scanPath:e,fileGlobs:d,ignoreDirs:f,config:g});await n.ut(m,h.id,_);let v=new URL(t.t);v.pathname=`/data-map/data-inventory/silo-discovery/triage`,v.search=(0,c.stringify)({filters:JSON.stringify({pluginIds:[h.id]})}),r.t.info(o.default.green(`Scan found ${_.length} potential data silos at ${e}! View at '${v.href}'
1
+ const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-Ck15_dOP.cjs`),n=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),r=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`),require(`./api-keys-DQKR5mr6.cjs`);const i=require(`./done-input-validation-Cgk5kNBs.cjs`),a=require(`./code-scanning-Dw_M6bvD.cjs`);let o=require(`colors`);o=e.s(o);let s=require(`fast-glob`);s=e.s(s);let c=require(`query-string`);async function l({scanPath:e,fileGlobs:t,ignoreDirs:n,config:i}){let{ignoreDirs:a,supportedFiles:o,scanFunction:c}=i,u=t===``?o:o.concat(t.split(`,`)),d=[...n.split(`,`),...a].filter(e=>e.length>0);try{let t=await(0,s.default)(`${e}/**/${u.join(`|`)}`,{ignore:d.map(t=>`${e}/**/${t}`),unique:!0,onlyFiles:!0});r.t.info(`Scanning: ${t.length} files`);let n=t.map(e=>c(e)).flat().map(e=>e.softwareDevelopmentKits||[]).flat(),i=[...new Set(n.map(e=>e.name))];return r.t.info(`Found: ${i.length} unique dependencies`),i.map(t=>({name:t,resourceId:`${e}/**/${t}`,useStrictClassifier:!0}))}catch(e){throw Error(`Error scanning globs ${l} with error: ${e}`)}}async function u({scanPath:e,dataSiloId:s,auth:u,fileGlobs:d,ignoreDirs:f,transcendUrl:p}){i.t(this.process.exit);let m=n.ti(p,u),h=await n.Xr(m,s),g=a.n[h.dataSilo.type];g||(r.t.error(o.default.red(`This plugin "${h.dataSilo.type}" is not supported for offline silo discovery.`)),this.process.exit(1));let _=await l({scanPath:e,fileGlobs:d,ignoreDirs:f,config:g});await n.ut(m,h.id,_);let v=new URL(t.t);v.pathname=`/data-map/data-inventory/silo-discovery/triage`,v.search=(0,c.stringify)({filters:JSON.stringify({pluginIds:[h.id]})}),r.t.info(o.default.green(`Scan found ${_.length} potential data silos at ${e}! View at '${v.href}'
2
2
 
3
3
  NOTE: it may take 2-3 minutes for scan results to appear in the UI.`))}exports.discoverSilos=u;
4
- //# sourceMappingURL=impl-E1vzeNmp.cjs.map
4
+ //# sourceMappingURL=impl-Gy2MhlbR.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-E1vzeNmp.cjs","names":["buildTranscendGraphQLClient","fetchActiveSiloDiscoPlugin","SILO_DISCOVERY_CONFIGS","uploadSiloDiscoveryResults","ADMIN_DASH"],"sources":["../src/lib/code-scanning/findFilesToScan.ts","../src/commands/inventory/discover-silos/impl.ts"],"sourcesContent":["import fastGlob from 'fast-glob';\nimport { logger } from '../../logger';\nimport { CodeScanningConfig } from './types';\n\nexport interface SiloDiscoveryRawResults {\n /** The name of the potential data silo entry */\n name: string;\n /** A unique UUID (represents the same resource across different silo discovery runs) */\n resourceId: string;\n /** Any hosts associated with the entry */\n host?: string;\n /** Type of data silo */\n type?: string | undefined;\n}\n\n/**\n * Helper to scan for data silos in all package.json files that it can find in a directory\n *\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n}: {\n /** Where to look for package.json files */\n scanPath: string;\n /** Globs to look for */\n fileGlobs: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs: string;\n /** Silo Discovery configuration */\n config: CodeScanningConfig;\n}): Promise<SiloDiscoveryRawResults[]> {\n const { ignoreDirs: IGNORE_DIRS, supportedFiles, scanFunction } = config;\n const globsToSupport =\n fileGlobs === ''\n ? supportedFiles\n : supportedFiles.concat(fileGlobs.split(','));\n const dirsToIgnore = [...ignoreDirs.split(','), ...IGNORE_DIRS].filter(\n (dir) => dir.length > 0,\n );\n try {\n const filesToScan: string[] = await fastGlob(\n `${scanPath}/**/${globsToSupport.join('|')}`,\n {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n },\n );\n logger.info(`Scanning: ${filesToScan.length} files`);\n const allPackages = filesToScan\n .map((filePath: string) => scanFunction(filePath))\n .flat();\n const allSdks = allPackages\n .map((appPackage) => appPackage.softwareDevelopmentKits || [])\n .flat();\n const uniqueDeps = new Set(allSdks.map((sdk) => sdk.name));\n const deps = [...uniqueDeps];\n logger.info(`Found: ${deps.length} unique dependencies`);\n return deps.map((dep) => ({\n name: dep,\n resourceId: `${scanPath}/**/${dep}`,\n useStrictClassifier: true,\n }));\n } catch (error) {\n throw new Error(\n `Error scanning globs ${findFilesToScan} with error: ${error}`,\n );\n }\n}\n","import type { LocalContext } from '../../../context';\nimport { stringify } from 'query-string';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport {\n fetchActiveSiloDiscoPlugin,\n buildTranscendGraphQLClient,\n uploadSiloDiscoveryResults,\n} from '../../../lib/graphql';\nimport { findFilesToScan } from '../../../lib/code-scanning/findFilesToScan';\nimport { SILO_DISCOVERY_CONFIGS } from '../../../lib/code-scanning';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DiscoverSilosCommandFlags {\n scanPath: string;\n dataSiloId: string;\n auth: string;\n fileGlobs: string;\n ignoreDirs: string;\n transcendUrl: string;\n}\n\nexport async function discoverSilos(\n this: LocalContext,\n {\n scanPath,\n dataSiloId,\n auth,\n fileGlobs,\n ignoreDirs,\n transcendUrl,\n }: DiscoverSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const plugin = await fetchActiveSiloDiscoPlugin(client, dataSiloId);\n\n const config = SILO_DISCOVERY_CONFIGS[plugin.dataSilo.type];\n if (!config) {\n logger.error(\n colors.red(\n `This plugin \"${plugin.dataSilo.type}\" is not supported for offline silo discovery.`,\n ),\n );\n this.process.exit(1);\n }\n\n const results = await findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n });\n\n await uploadSiloDiscoveryResults(client, plugin.id, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/data-map/data-inventory/silo-discovery/triage';\n newUrl.search = stringify({\n filters: JSON.stringify({ pluginIds: [plugin.id] }),\n });\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} potential data silos at ${scanPath}! ` +\n `View at '${newUrl.href}' ` +\n '\\n\\n NOTE: it may take 2-3 minutes for scan results to appear in the UI.',\n ),\n );\n}\n"],"mappings":"oeAsBA,eAAsB,EAAgB,CACpC,WACA,YACA,aACA,UAUqC,CACrC,GAAM,CAAE,WAAY,EAAa,iBAAgB,gBAAiB,EAC5D,EACJ,IAAc,GACV,EACA,EAAe,OAAO,EAAU,MAAM,IAAI,CAAC,CAC3C,EAAe,CAAC,GAAG,EAAW,MAAM,IAAI,CAAE,GAAG,EAAY,CAAC,OAC7D,GAAQ,EAAI,OAAS,EACvB,CACD,GAAI,CACF,IAAM,EAAwB,MAAA,EAAA,EAAA,SAC5B,GAAG,EAAS,MAAM,EAAe,KAAK,IAAI,GAC1C,CACE,OAAQ,EAAa,IAAK,GAAgB,GAAG,EAAS,MAAM,IAAM,CAClE,OAAQ,GACR,UAAW,GACZ,CACF,CACD,EAAA,EAAO,KAAK,aAAa,EAAY,OAAO,QAAQ,CAIpD,IAAM,EAHc,EACjB,IAAK,GAAqB,EAAa,EAAS,CAAC,CACjD,MAAM,CAEN,IAAK,GAAe,EAAW,yBAA2B,EAAE,CAAC,CAC7D,MAAM,CAEH,EAAO,CAAC,GADK,IAAI,IAAI,EAAQ,IAAK,GAAQ,EAAI,KAAK,CAAC,CAC9B,CAE5B,OADA,EAAA,EAAO,KAAK,UAAU,EAAK,OAAO,sBAAsB,CACjD,EAAK,IAAK,IAAS,CACxB,KAAM,EACN,WAAY,GAAG,EAAS,MAAM,IAC9B,oBAAqB,GACtB,EAAE,OACI,EAAO,CACd,MAAU,MACR,wBAAwB,EAAgB,eAAe,IACxD,ECjDL,eAAsB,EAEpB,CACE,WACA,aACA,OACA,YACA,aACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAExD,EAAS,MAAMC,EAAAA,GAA2B,EAAQ,EAAW,CAE7D,EAASC,EAAAA,EAAuB,EAAO,SAAS,MACjD,IACH,EAAA,EAAO,MACL,EAAA,QAAO,IACL,gBAAgB,EAAO,SAAS,KAAK,gDACtC,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,IAAM,EAAU,MAAM,EAAgB,CACpC,WACA,YACA,aACA,SACD,CAAC,CAEF,MAAMC,EAAAA,GAA2B,EAAQ,EAAO,GAAI,EAAQ,CAE5D,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,iDAClB,EAAO,QAAA,EAAA,EAAA,WAAmB,CACxB,QAAS,KAAK,UAAU,CAAE,UAAW,CAAC,EAAO,GAAG,CAAE,CAAC,CACpD,CAAC,CAGF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,2BAA2B,EAAS,aACnD,EAAO,KAAK;;sEAE3B,CACF"}
1
+ {"version":3,"file":"impl-Gy2MhlbR.cjs","names":["buildTranscendGraphQLClient","fetchActiveSiloDiscoPlugin","SILO_DISCOVERY_CONFIGS","uploadSiloDiscoveryResults","ADMIN_DASH"],"sources":["../src/lib/code-scanning/findFilesToScan.ts","../src/commands/inventory/discover-silos/impl.ts"],"sourcesContent":["import fastGlob from 'fast-glob';\nimport { logger } from '../../logger';\nimport { CodeScanningConfig } from './types';\n\nexport interface SiloDiscoveryRawResults {\n /** The name of the potential data silo entry */\n name: string;\n /** A unique UUID (represents the same resource across different silo discovery runs) */\n resourceId: string;\n /** Any hosts associated with the entry */\n host?: string;\n /** Type of data silo */\n type?: string | undefined;\n}\n\n/**\n * Helper to scan for data silos in all package.json files that it can find in a directory\n *\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n}: {\n /** Where to look for package.json files */\n scanPath: string;\n /** Globs to look for */\n fileGlobs: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs: string;\n /** Silo Discovery configuration */\n config: CodeScanningConfig;\n}): Promise<SiloDiscoveryRawResults[]> {\n const { ignoreDirs: IGNORE_DIRS, supportedFiles, scanFunction } = config;\n const globsToSupport =\n fileGlobs === ''\n ? supportedFiles\n : supportedFiles.concat(fileGlobs.split(','));\n const dirsToIgnore = [...ignoreDirs.split(','), ...IGNORE_DIRS].filter(\n (dir) => dir.length > 0,\n );\n try {\n const filesToScan: string[] = await fastGlob(\n `${scanPath}/**/${globsToSupport.join('|')}`,\n {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n },\n );\n logger.info(`Scanning: ${filesToScan.length} files`);\n const allPackages = filesToScan\n .map((filePath: string) => scanFunction(filePath))\n .flat();\n const allSdks = allPackages\n .map((appPackage) => appPackage.softwareDevelopmentKits || [])\n .flat();\n const uniqueDeps = new Set(allSdks.map((sdk) => sdk.name));\n const deps = [...uniqueDeps];\n logger.info(`Found: ${deps.length} unique dependencies`);\n return deps.map((dep) => ({\n name: dep,\n resourceId: `${scanPath}/**/${dep}`,\n useStrictClassifier: true,\n }));\n } catch (error) {\n throw new Error(\n `Error scanning globs ${findFilesToScan} with error: ${error}`,\n );\n }\n}\n","import type { LocalContext } from '../../../context';\nimport { stringify } from 'query-string';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport {\n fetchActiveSiloDiscoPlugin,\n buildTranscendGraphQLClient,\n uploadSiloDiscoveryResults,\n} from '../../../lib/graphql';\nimport { findFilesToScan } from '../../../lib/code-scanning/findFilesToScan';\nimport { SILO_DISCOVERY_CONFIGS } from '../../../lib/code-scanning';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DiscoverSilosCommandFlags {\n scanPath: string;\n dataSiloId: string;\n auth: string;\n fileGlobs: string;\n ignoreDirs: string;\n transcendUrl: string;\n}\n\nexport async function discoverSilos(\n this: LocalContext,\n {\n scanPath,\n dataSiloId,\n auth,\n fileGlobs,\n ignoreDirs,\n transcendUrl,\n }: DiscoverSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const plugin = await fetchActiveSiloDiscoPlugin(client, dataSiloId);\n\n const config = SILO_DISCOVERY_CONFIGS[plugin.dataSilo.type];\n if (!config) {\n logger.error(\n colors.red(\n `This plugin \"${plugin.dataSilo.type}\" is not supported for offline silo discovery.`,\n ),\n );\n this.process.exit(1);\n }\n\n const results = await findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n });\n\n await uploadSiloDiscoveryResults(client, plugin.id, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/data-map/data-inventory/silo-discovery/triage';\n newUrl.search = stringify({\n filters: JSON.stringify({ pluginIds: [plugin.id] }),\n });\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} potential data silos at ${scanPath}! ` +\n `View at '${newUrl.href}' ` +\n '\\n\\n NOTE: it may take 2-3 minutes for scan results to appear in the UI.',\n ),\n );\n}\n"],"mappings":"oeAsBA,eAAsB,EAAgB,CACpC,WACA,YACA,aACA,UAUqC,CACrC,GAAM,CAAE,WAAY,EAAa,iBAAgB,gBAAiB,EAC5D,EACJ,IAAc,GACV,EACA,EAAe,OAAO,EAAU,MAAM,IAAI,CAAC,CAC3C,EAAe,CAAC,GAAG,EAAW,MAAM,IAAI,CAAE,GAAG,EAAY,CAAC,OAC7D,GAAQ,EAAI,OAAS,EACvB,CACD,GAAI,CACF,IAAM,EAAwB,MAAA,EAAA,EAAA,SAC5B,GAAG,EAAS,MAAM,EAAe,KAAK,IAAI,GAC1C,CACE,OAAQ,EAAa,IAAK,GAAgB,GAAG,EAAS,MAAM,IAAM,CAClE,OAAQ,GACR,UAAW,GACZ,CACF,CACD,EAAA,EAAO,KAAK,aAAa,EAAY,OAAO,QAAQ,CAIpD,IAAM,EAHc,EACjB,IAAK,GAAqB,EAAa,EAAS,CAAC,CACjD,MAAM,CAEN,IAAK,GAAe,EAAW,yBAA2B,EAAE,CAAC,CAC7D,MAAM,CAEH,EAAO,CAAC,GADK,IAAI,IAAI,EAAQ,IAAK,GAAQ,EAAI,KAAK,CAAC,CAC9B,CAE5B,OADA,EAAA,EAAO,KAAK,UAAU,EAAK,OAAO,sBAAsB,CACjD,EAAK,IAAK,IAAS,CACxB,KAAM,EACN,WAAY,GAAG,EAAS,MAAM,IAC9B,oBAAqB,GACtB,EAAE,OACI,EAAO,CACd,MAAU,MACR,wBAAwB,EAAgB,eAAe,IACxD,ECjDL,eAAsB,EAEpB,CACE,WACA,aACA,OACA,YACA,aACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAExD,EAAS,MAAMC,EAAAA,GAA2B,EAAQ,EAAW,CAE7D,EAASC,EAAAA,EAAuB,EAAO,SAAS,MACjD,IACH,EAAA,EAAO,MACL,EAAA,QAAO,IACL,gBAAgB,EAAO,SAAS,KAAK,gDACtC,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,IAAM,EAAU,MAAM,EAAgB,CACpC,WACA,YACA,aACA,SACD,CAAC,CAEF,MAAMC,EAAAA,GAA2B,EAAQ,EAAO,GAAI,EAAQ,CAE5D,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,iDAClB,EAAO,QAAA,EAAA,EAAA,WAAmB,CACxB,QAAS,KAAK,UAAU,CAAE,UAAW,CAAC,EAAO,GAAG,CAAE,CAAC,CACpD,CAAC,CAGF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,2BAA2B,EAAS,aACnD,EAAO,KAAK;;sEAE3B,CACF"}
@@ -1,4 +1,4 @@
1
- const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./done-input-validation-Cgk5kNBs.cjs`),i=require(`./preference-management-5uJDKuMK.cjs`);let a=require(`node:fs`),o=require(`node:path`),s=require(`colors`);s=e.s(s);let c=require(`cli-progress`);c=e.s(c);async function l({auth:e,partition:l,sombraAuth:u,file:d=``,directory:f,transcendUrl:p,timestamp:m,maxConcurrency:h,maxItemsInChunk:g,receiptDirectory:_,fileConcurrency:v}){f&&d&&(n.t.error(s.default.red(`Cannot provide both a directory and a file. Please provide only one.`)),this.process.exit(1)),!d&&!f&&(n.t.error(s.default.red(`A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences`)),this.process.exit(1)),r.t(this.process.exit);let y=[];if(f)try{let e=(0,a.readdirSync)(f).filter(e=>e.endsWith(`.csv`));e.length===0&&(n.t.error(s.default.red(`No CSV files found in directory: ${f}`)),this.process.exit(1)),y.push(...e.map(e=>(0,o.join)(f,e)))}catch(e){n.t.error(s.default.red(`Failed to read directory: ${f}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}else try{d.endsWith(`.csv`)||(n.t.error(s.default.red(`File must be a CSV file`)),this.process.exit(1)),y.push(d)}catch(e){n.t.error(s.default.red(`Failed to access file: ${d}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}n.t.debug(s.default.green(`Processing ${y.length} consent preferences files for partition: ${l}`)),n.t.debug(`\nFiles to process: ${y.join(`, `)}\n`);let b=await t.ei(p,e,u),x=new c.default.SingleBar({format:`Deletion Progress |${s.default.cyan(`[{bar}]`)}| Duration: ${s.default.red(`{duration_formatted}`)} | {value}/{total} Files Processed `},c.default.Presets.shades_classic);x.start(y.length,0);let S=await t.Ts(y,async e=>{let t=await i.t(b,{partition:l,filePath:e,timestamp:m,maxItemsInChunk:g,maxConcurrency:h});return x.increment(),t},{concurrency:v});x.stop();let C=S.flat(),w=``;C.length>0&&(w=(0,o.join)(_,`deletion-failures-${Date.now()}.csv`),t.l(w,C,!0)),n.t.info(s.default.green(`
1
+ const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-Ck15_dOP.cjs`);const t=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./done-input-validation-Cgk5kNBs.cjs`),i=require(`./preference-management-Dsrpuzbl.cjs`);let a=require(`node:fs`),o=require(`node:path`),s=require(`colors`);s=e.s(s);let c=require(`cli-progress`);c=e.s(c);async function l({auth:e,partition:l,sombraAuth:u,file:d=``,directory:f,transcendUrl:p,timestamp:m,maxConcurrency:h,maxItemsInChunk:g,receiptDirectory:_,fileConcurrency:v}){f&&d&&(n.t.error(s.default.red(`Cannot provide both a directory and a file. Please provide only one.`)),this.process.exit(1)),!d&&!f&&(n.t.error(s.default.red(`A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences`)),this.process.exit(1)),r.t(this.process.exit);let y=[];if(f)try{let e=(0,a.readdirSync)(f).filter(e=>e.endsWith(`.csv`));e.length===0&&(n.t.error(s.default.red(`No CSV files found in directory: ${f}`)),this.process.exit(1)),y.push(...e.map(e=>(0,o.join)(f,e)))}catch(e){n.t.error(s.default.red(`Failed to read directory: ${f}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}else try{d.endsWith(`.csv`)||(n.t.error(s.default.red(`File must be a CSV file`)),this.process.exit(1)),y.push(d)}catch(e){n.t.error(s.default.red(`Failed to access file: ${d}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}n.t.debug(s.default.green(`Processing ${y.length} consent preferences files for partition: ${l}`)),n.t.debug(`\nFiles to process: ${y.join(`, `)}\n`);let b=await t.ei(p,e,u),x=new c.default.SingleBar({format:`Deletion Progress |${s.default.cyan(`[{bar}]`)}| Duration: ${s.default.red(`{duration_formatted}`)} | {value}/{total} Files Processed `},c.default.Presets.shades_classic);x.start(y.length,0);let S=await t.Ts(y,async e=>{let t=await i.t(b,{partition:l,filePath:e,timestamp:m,maxItemsInChunk:g,maxConcurrency:h});return x.increment(),t},{concurrency:v});x.stop();let C=S.flat(),w=``;C.length>0&&(w=(0,o.join)(_,`deletion-failures-${Date.now()}.csv`),t.l(w,C,!0)),n.t.info(s.default.green(`
2
2
 
3
3
  ==================================
4
4
 
@@ -9,4 +9,4 @@ const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);cons
9
9
  ==================================
10
10
 
11
11
  `))}exports.deletePreferenceRecords=l;
12
- //# sourceMappingURL=impl-B4iI3rcF.cjs.map
12
+ //# sourceMappingURL=impl-KtaRdVpP.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-B4iI3rcF.cjs","names":["file","createSombraGotInstance","cliProgress","map","bulkDeletePreferenceRecords"],"sources":["../src/commands/consent/delete-preference-records/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { createSombraGotInstance } from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { readdirSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { map } from '../../../lib/bluebird';\nimport { bulkDeletePreferenceRecords } from '../../../lib/preference-management';\nimport cliProgress from 'cli-progress';\nimport { writeCsv } from '../../../lib/helpers';\n\nexport interface DeletePreferenceRecordsCommandFlags {\n /** Transcend API key for authentication */\n auth: string;\n /** Partition ID to delete preference records from */\n partition: string;\n /** Optional Sombra internal key for self-hosted instances */\n sombraAuth?: string;\n /** Path to the CSV file used to identify preference records to delete */\n file?: string;\n /** Path to the directory of CSV files to load preferences from */\n directory?: string;\n /** Base URL for the Transcend API */\n transcendUrl: string;\n /** The timestamp when the deletion operation is made. Used for logging purposes. */\n timestamp: Date;\n /** Maximum items to include in each deletion chunk */\n maxItemsInChunk: number;\n /** Maximum concurrency for deletion requests */\n maxConcurrency: number;\n /** Directory to write receipts of failed deletions to */\n receiptDirectory: string;\n /** Number of files to process concurrently when deleting preference records from multiple files */\n fileConcurrency: number;\n}\n\nexport async function deletePreferenceRecords(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file = '',\n directory,\n transcendUrl,\n timestamp,\n maxConcurrency,\n maxItemsInChunk,\n receiptDirectory,\n fileConcurrency,\n }: DeletePreferenceRecordsCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red(\n 'Cannot provide both a directory and a file. Please provide only one.',\n ),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(\n colors.red(`No CSV files found in directory: ${directory}`),\n );\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.debug(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`\\nFiles to process: ${files.join(', ')}\\n`);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const globalProgressBar = new cliProgress.SingleBar(\n {\n format: `Deletion Progress |${colors.cyan(\n '[{bar}]',\n )}| Duration: ${colors.red(\n '{duration_formatted}',\n )} | {value}/{total} Files Processed `,\n },\n cliProgress.Presets.shades_classic,\n );\n globalProgressBar.start(files.length, 0);\n\n // Process batch of files with concurrency\n const failedResultsArrays = await map(\n files,\n async (filePath) => {\n const result = await bulkDeletePreferenceRecords(sombra, {\n partition,\n filePath,\n timestamp,\n maxItemsInChunk,\n maxConcurrency,\n });\n globalProgressBar.increment();\n return result;\n },\n { concurrency: fileConcurrency },\n );\n globalProgressBar.stop();\n const failedResults = failedResultsArrays.flat();\n\n // Check for failed results and write receipt if any\n let receiptPath = '';\n if (failedResults.length > 0) {\n receiptPath = join(receiptDirectory, `deletion-failures-${Date.now()}.csv`);\n writeCsv(receiptPath, failedResults, true);\n }\n\n logger.info(colors.green('\\n\\n ================================== \\n\\n'));\n logger.info(colors.green('\\n#### Deletion Summary Report #####\\n'));\n logger.info(\n colors.green(\n `📁 Total Files Processed: ${files.length} \\n` +\n `❌ Errors: ${failedResults.length} \\n` +\n `📝 Receipt Path: ${receiptPath || 'N/A'}`,\n ),\n );\n logger.info(colors.green('\\n\\n==================================\\n\\n'));\n}\n"],"mappings":"6bAsCA,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OAAO,GACP,YACA,eACA,YACA,iBACA,kBACA,mBACA,mBAEa,CACT,GAAe,IACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uEACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,GAAA,EAAA,EAAA,aAD+B,EAAU,CACb,OAAQ,GAASA,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAC5D,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,IAAA,EAAA,EAAA,MAAc,EAAWA,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAA,EAAO,MACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAA,EAAO,MAAM,uBAAuB,EAAM,KAAK,KAAK,CAAC,IAAI,CAGzD,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAAoB,IAAIC,EAAAA,QAAY,UACxC,CACE,OAAQ,sBAAsB,EAAA,QAAO,KACnC,UACD,CAAC,cAAc,EAAA,QAAO,IACrB,uBACD,CAAC,qCACH,CACDA,EAAAA,QAAY,QAAQ,eACrB,CACD,EAAkB,MAAM,EAAM,OAAQ,EAAE,CAGxC,IAAM,EAAsB,MAAMC,EAAAA,GAChC,EACA,KAAO,IAAa,CAClB,IAAM,EAAS,MAAMC,EAAAA,EAA4B,EAAQ,CACvD,YACA,WACA,YACA,kBACA,iBACD,CAAC,CAEF,OADA,EAAkB,WAAW,CACtB,GAET,CAAE,YAAa,EAAiB,CACjC,CACD,EAAkB,MAAM,CACxB,IAAM,EAAgB,EAAoB,MAAM,CAG5C,EAAc,GACd,EAAc,OAAS,IACzB,GAAA,EAAA,EAAA,MAAmB,EAAkB,qBAAqB,KAAK,KAAK,CAAC,MAAM,CAC3E,EAAA,EAAS,EAAa,EAAe,GAAK,EAG5C,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;;;EAA+C,CAAC,CACzE,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;EAAyC,CAAC,CACnE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,6BAA6B,EAAM,OAAO,eAC3B,EAAc,OAAO,sBACd,GAAe,QACtC,CACF,CACD,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;;;EAA6C,CAAC"}
1
+ {"version":3,"file":"impl-KtaRdVpP.cjs","names":["file","createSombraGotInstance","cliProgress","map","bulkDeletePreferenceRecords"],"sources":["../src/commands/consent/delete-preference-records/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { createSombraGotInstance } from '../../../lib/graphql';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { logger } from '../../../logger';\nimport { readdirSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { map } from '../../../lib/bluebird';\nimport { bulkDeletePreferenceRecords } from '../../../lib/preference-management';\nimport cliProgress from 'cli-progress';\nimport { writeCsv } from '../../../lib/helpers';\n\nexport interface DeletePreferenceRecordsCommandFlags {\n /** Transcend API key for authentication */\n auth: string;\n /** Partition ID to delete preference records from */\n partition: string;\n /** Optional Sombra internal key for self-hosted instances */\n sombraAuth?: string;\n /** Path to the CSV file used to identify preference records to delete */\n file?: string;\n /** Path to the directory of CSV files to load preferences from */\n directory?: string;\n /** Base URL for the Transcend API */\n transcendUrl: string;\n /** The timestamp when the deletion operation is made. Used for logging purposes. */\n timestamp: Date;\n /** Maximum items to include in each deletion chunk */\n maxItemsInChunk: number;\n /** Maximum concurrency for deletion requests */\n maxConcurrency: number;\n /** Directory to write receipts of failed deletions to */\n receiptDirectory: string;\n /** Number of files to process concurrently when deleting preference records from multiple files */\n fileConcurrency: number;\n}\n\nexport async function deletePreferenceRecords(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n file = '',\n directory,\n transcendUrl,\n timestamp,\n maxConcurrency,\n maxItemsInChunk,\n receiptDirectory,\n fileConcurrency,\n }: DeletePreferenceRecordsCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red(\n 'Cannot provide both a directory and a file. Please provide only one.',\n ),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(\n colors.red(`No CSV files found in directory: ${directory}`),\n );\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.debug(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`\\nFiles to process: ${files.join(', ')}\\n`);\n\n // Create sombra instance to communicate with\n const sombra = await createSombraGotInstance(transcendUrl, auth, sombraAuth);\n const globalProgressBar = new cliProgress.SingleBar(\n {\n format: `Deletion Progress |${colors.cyan(\n '[{bar}]',\n )}| Duration: ${colors.red(\n '{duration_formatted}',\n )} | {value}/{total} Files Processed `,\n },\n cliProgress.Presets.shades_classic,\n );\n globalProgressBar.start(files.length, 0);\n\n // Process batch of files with concurrency\n const failedResultsArrays = await map(\n files,\n async (filePath) => {\n const result = await bulkDeletePreferenceRecords(sombra, {\n partition,\n filePath,\n timestamp,\n maxItemsInChunk,\n maxConcurrency,\n });\n globalProgressBar.increment();\n return result;\n },\n { concurrency: fileConcurrency },\n );\n globalProgressBar.stop();\n const failedResults = failedResultsArrays.flat();\n\n // Check for failed results and write receipt if any\n let receiptPath = '';\n if (failedResults.length > 0) {\n receiptPath = join(receiptDirectory, `deletion-failures-${Date.now()}.csv`);\n writeCsv(receiptPath, failedResults, true);\n }\n\n logger.info(colors.green('\\n\\n ================================== \\n\\n'));\n logger.info(colors.green('\\n#### Deletion Summary Report #####\\n'));\n logger.info(\n colors.green(\n `📁 Total Files Processed: ${files.length} \\n` +\n `❌ Errors: ${failedResults.length} \\n` +\n `📝 Receipt Path: ${receiptPath || 'N/A'}`,\n ),\n );\n logger.info(colors.green('\\n\\n==================================\\n\\n'));\n}\n"],"mappings":"6bAsCA,eAAsB,EAEpB,CACE,OACA,YACA,aACA,OAAO,GACP,YACA,eACA,YACA,iBACA,kBACA,mBACA,mBAEa,CACT,GAAe,IACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uEACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,GAAA,EAAA,EAAA,aAD+B,EAAU,CACb,OAAQ,GAASA,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAC5D,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,IAAA,EAAA,EAAA,MAAc,EAAWA,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAA,EAAO,MACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAA,EAAO,MAAM,uBAAuB,EAAM,KAAK,KAAK,CAAC,IAAI,CAGzD,IAAM,EAAS,MAAMC,EAAAA,GAAwB,EAAc,EAAM,EAAW,CACtE,EAAoB,IAAIC,EAAAA,QAAY,UACxC,CACE,OAAQ,sBAAsB,EAAA,QAAO,KACnC,UACD,CAAC,cAAc,EAAA,QAAO,IACrB,uBACD,CAAC,qCACH,CACDA,EAAAA,QAAY,QAAQ,eACrB,CACD,EAAkB,MAAM,EAAM,OAAQ,EAAE,CAGxC,IAAM,EAAsB,MAAMC,EAAAA,GAChC,EACA,KAAO,IAAa,CAClB,IAAM,EAAS,MAAMC,EAAAA,EAA4B,EAAQ,CACvD,YACA,WACA,YACA,kBACA,iBACD,CAAC,CAEF,OADA,EAAkB,WAAW,CACtB,GAET,CAAE,YAAa,EAAiB,CACjC,CACD,EAAkB,MAAM,CACxB,IAAM,EAAgB,EAAoB,MAAM,CAG5C,EAAc,GACd,EAAc,OAAS,IACzB,GAAA,EAAA,EAAA,MAAmB,EAAkB,qBAAqB,KAAK,KAAK,CAAC,MAAM,CAC3E,EAAA,EAAS,EAAa,EAAe,GAAK,EAG5C,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;;;EAA+C,CAAC,CACzE,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;EAAyC,CAAC,CACnE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,6BAA6B,EAAM,OAAO,eAC3B,EAAc,OAAO,sBACd,GAAe,QACtC,CACF,CACD,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM;;;;EAA6C,CAAC"}
@@ -1,2 +1,2 @@
1
- require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`),require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const e=require(`./manual-enrichment-Y_BQaSZQ.cjs`),t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,transcendUrl:r,file:i,concurrency:a,actions:o,sombraAuth:s}){t.t(this.process.exit),await e.i({file:i,transcendUrl:r,concurrency:a,requestActions:o,auth:n,sombraAuth:s})}exports.pullIdentifiers=n;
2
- //# sourceMappingURL=impl-t_fZSUcj.cjs.map
1
+ require(`./enums-CBXlBJii.cjs`),require(`./constants-Ck15_dOP.cjs`),require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const e=require(`./manual-enrichment-CkdYjftx.cjs`),t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,transcendUrl:r,file:i,concurrency:a,actions:o,sombraAuth:s}){t.t(this.process.exit),await e.i({file:i,transcendUrl:r,concurrency:a,requestActions:o,auth:n,sombraAuth:s})}exports.pullIdentifiers=n;
2
+ //# sourceMappingURL=impl-M-kD-8Qh.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-t_fZSUcj.cjs","names":["pullManualEnrichmentIdentifiersToCsv"],"sources":["../src/commands/request/preflight/pull-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pullManualEnrichmentIdentifiersToCsv } from '../../../../lib/manual-enrichment';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PullIdentifiersCommandFlags {\n auth: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n actions?: RequestAction[];\n concurrency: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n concurrency,\n actions,\n sombraAuth,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pullManualEnrichmentIdentifiersToCsv({\n file,\n transcendUrl,\n concurrency,\n requestActions: actions,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,cACA,UACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAqC,CACzC,OACA,eACA,cACA,eAAgB,EAChB,OACA,aACD,CAAC"}
1
+ {"version":3,"file":"impl-M-kD-8Qh.cjs","names":["pullManualEnrichmentIdentifiersToCsv"],"sources":["../src/commands/request/preflight/pull-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pullManualEnrichmentIdentifiersToCsv } from '../../../../lib/manual-enrichment';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PullIdentifiersCommandFlags {\n auth: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n actions?: RequestAction[];\n concurrency: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n concurrency,\n actions,\n sombraAuth,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pullManualEnrichmentIdentifiersToCsv({\n file,\n transcendUrl,\n concurrency,\n requestActions: actions,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,cACA,UACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAqC,CACzC,OACA,eACA,cACA,eAAgB,EAChB,OACA,aACD,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const n=require(`./done-input-validation-Cgk5kNBs.cjs`);let r=require(`@transcend-io/privacy-types`);async function i({auth:e,transcendUrl:i,folderPath:a,requestIds:o,statuses:s=[r.RequestStatus.Approving,r.RequestStatus.Downloadable],concurrency:c,createdAtBefore:l,createdAtAfter:u,approveAfterDownload:d}){n.t(this.process.exit),await t.Z({transcendUrl:i,auth:e,folderPath:a,requestIds:o,statuses:s,concurrency:c,createdAtBefore:l,createdAtAfter:u,approveAfterDownload:d})}exports.downloadFiles=i;
2
- //# sourceMappingURL=impl-CwPRkBc0.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-Ck15_dOP.cjs`);const t=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const n=require(`./done-input-validation-Cgk5kNBs.cjs`);let r=require(`@transcend-io/privacy-types`);async function i({auth:e,transcendUrl:i,folderPath:a,requestIds:o,statuses:s=[r.RequestStatus.Approving,r.RequestStatus.Downloadable],concurrency:c,createdAtBefore:l,createdAtAfter:u,approveAfterDownload:d}){n.t(this.process.exit),await t.Z({transcendUrl:i,auth:e,folderPath:a,requestIds:o,statuses:s,concurrency:c,createdAtBefore:l,createdAtAfter:u,approveAfterDownload:d})}exports.downloadFiles=i;
2
+ //# sourceMappingURL=impl-NwB7y3aW.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CwPRkBc0.cjs","names":["RequestStatus","downloadPrivacyRequestFiles"],"sources":["../src/commands/request/download-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { downloadPrivacyRequestFiles } from '../../../lib/requests';\nimport { RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DownloadFilesCommandFlags {\n auth: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n statuses?: RequestStatus[];\n folderPath: string;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n approveAfterDownload: boolean;\n transcendUrl: string;\n}\n\nexport async function downloadFiles(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n folderPath,\n requestIds,\n statuses = [RequestStatus.Approving, RequestStatus.Downloadable],\n concurrency,\n createdAtBefore,\n createdAtAfter,\n approveAfterDownload,\n }: DownloadFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await downloadPrivacyRequestFiles({\n transcendUrl,\n auth,\n folderPath,\n requestIds,\n statuses,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n approveAfterDownload,\n });\n}\n"],"mappings":"kUAkBA,eAAsB,EAEpB,CACE,OACA,eACA,aACA,aACA,WAAW,CAACA,EAAAA,cAAc,UAAWA,EAAAA,cAAc,aAAa,CAChE,cACA,kBACA,iBACA,wBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMC,EAAAA,EAA4B,CAChC,eACA,OACA,aACA,aACA,WACA,cACA,kBACA,iBACA,uBACD,CAAC"}
1
+ {"version":3,"file":"impl-NwB7y3aW.cjs","names":["RequestStatus","downloadPrivacyRequestFiles"],"sources":["../src/commands/request/download-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { downloadPrivacyRequestFiles } from '../../../lib/requests';\nimport { RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DownloadFilesCommandFlags {\n auth: string;\n sombraAuth?: string;\n concurrency: number;\n requestIds?: string[];\n statuses?: RequestStatus[];\n folderPath: string;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n approveAfterDownload: boolean;\n transcendUrl: string;\n}\n\nexport async function downloadFiles(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n folderPath,\n requestIds,\n statuses = [RequestStatus.Approving, RequestStatus.Downloadable],\n concurrency,\n createdAtBefore,\n createdAtAfter,\n approveAfterDownload,\n }: DownloadFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await downloadPrivacyRequestFiles({\n transcendUrl,\n auth,\n folderPath,\n requestIds,\n statuses,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n approveAfterDownload,\n });\n}\n"],"mappings":"kUAkBA,eAAsB,EAEpB,CACE,OACA,eACA,aACA,aACA,WAAW,CAACA,EAAAA,cAAc,UAAWA,EAAAA,cAAc,aAAa,CAChE,cACA,kBACA,iBACA,wBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMC,EAAAA,EAA4B,CAChC,eACA,OACA,aACA,aACA,WACA,cACA,kBACA,iBACA,uBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./cron-DQHN57v7.cjs`),i=require(`./done-input-validation-Cgk5kNBs.cjs`);let a=require(`colors`);a=e.s(a);async function o({file:e,fileTarget:o,transcendUrl:s,auth:c,sombraAuth:l,cronDataSiloId:u,targetDataSiloId:d,actions:f,skipRequestCount:p,pageLimit:m,chunkSize:h}){p&&n.t.info(a.default.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%m!==0)&&(n.t.error(a.default.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${m}.`)),this.process.exit(1)),i.t(this.process.exit);let g=t.ti(s,c),{baseName:_,extension:v}=t.c(e),{baseName:y,extension:b}=t.c(o),x=0,S=0,C=0;await r.t({dataSiloId:u,auth:c,sombraAuth:l,actions:f,apiPageSize:m,savePageSize:h,onSave:async r=>{x+=r.length;let i=await t.Ts(t.Ns(t.Ds(r.map(e=>e.requestId)),m),async e=>(n.t.info(a.default.magenta(`Fetching target identifiers for ${e.length} requests`)),(await t.Un(g,m*2,{requestIds:e,dataSiloIds:[d]})).map(({fileName:e,remoteId:t})=>{if(!t)throw Error(`Failed to find remoteId for ${e}`);return{RecordId:t,Object:e.replace(`.json`,``).split(`/`).pop()?.replace(` Information`,``),Comment:`Customer data deletion request submitted via transcend.io`}})),{concurrency:1});S+=i.flat().length;let s=t.Ds(r.map(e=>Object.keys(e)).flat()),c=`${_}-${C}${v}`,l=`${y}-${C}${b}`;await t.d(c,r,s),n.t.info(a.default.green(`Successfully wrote ${r.length} identifiers to file "${e}"`));let u=i.flat();await t.d(l,u,t.Ds(u.map(e=>Object.keys(e)).flat())),n.t.info(a.default.green(`Successfully wrote ${u.length} identifiers to file "${o}"`)),n.t.info(a.default.blue(`Processed chunk of ${t.Ns.length} identifiers, found ${u.length} target identifiers`)),C+=1},transcendUrl:s,skipRequestCount:p}),n.t.info(a.default.green(`Successfully wrote ${x} identifiers to file "${e}"`)),n.t.info(a.default.green(`Successfully wrote ${S} identifiers to file "${o}"`))}exports.pullProfiles=o;
2
- //# sourceMappingURL=impl-80GXtjmz.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-Ck15_dOP.cjs`);const t=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./cron-DjzQakpu.cjs`),i=require(`./done-input-validation-Cgk5kNBs.cjs`);let a=require(`colors`);a=e.s(a);async function o({file:e,fileTarget:o,transcendUrl:s,auth:c,sombraAuth:l,cronDataSiloId:u,targetDataSiloId:d,actions:f,skipRequestCount:p,pageLimit:m,chunkSize:h}){p&&n.t.info(a.default.yellow(`Skipping request count as requested. This may help speed up the call.`)),(Number.isNaN(h)||h<=0||h%m!==0)&&(n.t.error(a.default.red(`Invalid chunk size: "${h}". Must be a positive integer that is a multiple of ${m}.`)),this.process.exit(1)),i.t(this.process.exit);let g=t.ti(s,c),{baseName:_,extension:v}=t.c(e),{baseName:y,extension:b}=t.c(o),x=0,S=0,C=0;await r.t({dataSiloId:u,auth:c,sombraAuth:l,actions:f,apiPageSize:m,savePageSize:h,onSave:async r=>{x+=r.length;let i=await t.Ts(t.Ns(t.Ds(r.map(e=>e.requestId)),m),async e=>(n.t.info(a.default.magenta(`Fetching target identifiers for ${e.length} requests`)),(await t.Un(g,m*2,{requestIds:e,dataSiloIds:[d]})).map(({fileName:e,remoteId:t})=>{if(!t)throw Error(`Failed to find remoteId for ${e}`);return{RecordId:t,Object:e.replace(`.json`,``).split(`/`).pop()?.replace(` Information`,``),Comment:`Customer data deletion request submitted via transcend.io`}})),{concurrency:1});S+=i.flat().length;let s=t.Ds(r.map(e=>Object.keys(e)).flat()),c=`${_}-${C}${v}`,l=`${y}-${C}${b}`;await t.d(c,r,s),n.t.info(a.default.green(`Successfully wrote ${r.length} identifiers to file "${e}"`));let u=i.flat();await t.d(l,u,t.Ds(u.map(e=>Object.keys(e)).flat())),n.t.info(a.default.green(`Successfully wrote ${u.length} identifiers to file "${o}"`)),n.t.info(a.default.blue(`Processed chunk of ${t.Ns.length} identifiers, found ${u.length} target identifiers`)),C+=1},transcendUrl:s,skipRequestCount:p}),n.t.info(a.default.green(`Successfully wrote ${x} identifiers to file "${e}"`)),n.t.info(a.default.green(`Successfully wrote ${S} identifiers to file "${o}"`))}exports.pullProfiles=o;
2
+ //# sourceMappingURL=impl-PU77PoPR.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-80GXtjmz.cjs","names":["buildTranscendGraphQLClient","parseFilePath","map","chunk","uniq","fetchRequestFilesForRequest","writeLargeCsv","pullChunkedCustomSiloOutstandingIdentifiers"],"sources":["../src/commands/request/cron/pull-profiles/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from '../../../../lib/bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"],"mappings":"yVA+BA,eAAsB,EAEpB,CACE,OACA,aACA,eACA,OACA,aACA,iBACA,mBACA,UACA,mBACA,YACA,aAEa,CACX,GACF,EAAA,EAAO,KACL,EAAA,QAAO,OACL,wEACD,CACF,EAID,OAAO,MAAM,EAAU,EACvB,GAAa,GACb,EAAY,IAAc,KAE1B,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CACxD,CAAE,WAAU,aAAcC,EAAAA,EAAc,EAAK,CAC7C,CAAE,SAAU,EAAgB,UAAW,GAC3CA,EAAAA,EAAc,EAAW,CAEvB,EAAsB,EACtB,EAA4B,EAC5B,EAAY,EAmFhB,MAAMM,EAAAA,EAA4C,CAChD,WAAY,EACZ,OACA,aACA,UACA,YAAa,EACb,aAAc,EACd,OAxFa,KACb,IACkB,CAElB,GAAuB,EAAY,OAQnC,IAAM,EAAU,MAAML,EAAAA,GADIC,EAAAA,GAHDC,EAAAA,GADN,EAAY,IAAK,GAAM,EAAE,UAAoB,CACvB,CAGS,EAAU,CAG1D,KAAO,KACL,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAW,OAAO,WACtD,CACF,EACe,MAAMC,EAAAA,GACpB,EACA,EAAY,EACZ,CACE,aACA,YAAa,CAAC,EAAiB,CAChC,CACF,EACc,KAAK,CAAE,WAAU,cAAe,CAC7C,GAAI,CAAC,EACH,MAAU,MAAM,+BAA+B,IAAW,CAE5D,MAAO,CACL,SAAU,EACV,OAAQ,EACL,QAAQ,QAAS,GAAG,CACpB,MAAM,IAAI,CACV,KAAK,EACJ,QAAQ,eAAgB,GAAG,CAC/B,QACE,4DACH,EACD,EAGJ,CACE,YAAa,EACd,CACF,CAED,GAA6B,EAAQ,MAAM,CAAC,OAG5C,IAAM,EAAUD,EAAAA,GAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAC7D,EAAmB,GAAG,EAAS,GAAG,IAAY,IAC9C,EAAyB,GAAG,EAAe,GAAG,IAAY,IAChE,MAAME,EAAAA,EAAc,EAAkB,EAAa,EAAQ,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAY,OAAO,wBAAwB,EAAK,GACvE,CACF,CAED,IAAM,EAAoB,EAAQ,MAAM,CAExC,MAAMA,EAAAA,EAAc,EAAwB,EAD3BF,EAAAA,GAAK,EAAkB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACF,CACxE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAkB,OAAO,wBAAwB,EAAW,GACnF,CACF,CAED,EAAA,EAAO,KACL,EAAA,QAAO,KACL,sBAAsBD,EAAAA,GAAM,OAAO,sBAAsB,EAAkB,OAAO,qBACnF,CACF,CACD,GAAa,GAYb,eACA,mBACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAoB,wBAAwB,EAAK,GACxE,CACF,CACD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAA0B,wBAAwB,EAAW,GACpF,CACF"}
1
+ {"version":3,"file":"impl-PU77PoPR.cjs","names":["buildTranscendGraphQLClient","parseFilePath","map","chunk","uniq","fetchRequestFilesForRequest","writeLargeCsv","pullChunkedCustomSiloOutstandingIdentifiers"],"sources":["../src/commands/request/cron/pull-profiles/impl.ts"],"sourcesContent":["import type { RequestAction } from '@transcend-io/privacy-types';\nimport { logger } from '../../../../logger';\nimport colors from 'colors';\nimport { uniq, chunk } from 'lodash-es';\nimport { map } from '../../../../lib/bluebird';\nimport {\n buildTranscendGraphQLClient,\n fetchRequestFilesForRequest,\n} from '../../../../lib/graphql';\nimport type { LocalContext } from '../../../../context';\nimport {\n pullChunkedCustomSiloOutstandingIdentifiers,\n type CsvFormattedIdentifier,\n} from '../../../../lib/cron';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\nimport { parseFilePath, writeLargeCsv } from '../../../../lib/helpers';\n\nexport interface PullProfilesCommandFlags {\n file: string;\n fileTarget: string;\n transcendUrl: string;\n auth: string;\n sombraAuth?: string;\n cronDataSiloId: string;\n targetDataSiloId: string;\n actions: RequestAction[];\n skipRequestCount: boolean;\n pageLimit: number;\n chunkSize: number;\n}\n\nexport async function pullProfiles(\n this: LocalContext,\n {\n file,\n fileTarget,\n transcendUrl,\n auth,\n sombraAuth,\n cronDataSiloId,\n targetDataSiloId,\n actions,\n skipRequestCount,\n pageLimit,\n chunkSize,\n }: PullProfilesCommandFlags,\n): Promise<void> {\n if (skipRequestCount) {\n logger.info(\n colors.yellow(\n 'Skipping request count as requested. This may help speed up the call.',\n ),\n );\n }\n\n if (\n Number.isNaN(chunkSize) ||\n chunkSize <= 0 ||\n chunkSize % pageLimit !== 0\n ) {\n logger.error(\n colors.red(\n `Invalid chunk size: \"${chunkSize}\". Must be a positive integer that is a multiple of ${pageLimit}.`,\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n // Create GraphQL client to connect to Transcend backend\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n const { baseName, extension } = parseFilePath(file);\n const { baseName: baseNameTarget, extension: extensionTarget } =\n parseFilePath(fileTarget);\n\n let allIdentifiersCount = 0;\n let allTargetIdentifiersCount = 0;\n let fileCount = 0;\n // Create onSave callback to handle chunked processing\n const onSave = async (\n chunkToSave: CsvFormattedIdentifier[],\n ): Promise<void> => {\n // Add to all identifiers\n allIdentifiersCount += chunkToSave.length;\n\n // Get unique request IDs from this chunk\n const requestIds = chunkToSave.map((d) => d.requestId as string);\n const uniqueRequestIds = uniq(requestIds);\n\n // Pull down target identifiers for this chunk\n const chunkedRequestIds = chunk(uniqueRequestIds, pageLimit);\n const results = await map(\n chunkedRequestIds,\n async (requestIds) => {\n logger.info(\n colors.magenta(\n `Fetching target identifiers for ${requestIds.length} requests`,\n ),\n );\n const results = await fetchRequestFilesForRequest(\n client,\n pageLimit * 2,\n {\n requestIds,\n dataSiloIds: [targetDataSiloId],\n },\n );\n return results.map(({ fileName, remoteId }) => {\n if (!remoteId) {\n throw new Error(`Failed to find remoteId for ${fileName}`);\n }\n return {\n RecordId: remoteId,\n Object: fileName\n .replace('.json', '')\n .split('/')\n .pop()\n ?.replace(' Information', ''),\n Comment:\n 'Customer data deletion request submitted via transcend.io',\n };\n });\n },\n // We are grabbing all the request files for the 'pageLimit' # of requests at a time\n {\n concurrency: 1,\n },\n );\n\n allTargetIdentifiersCount += results.flat().length;\n\n // Write the identifiers and target identifiers to CSV\n const headers = uniq(chunkToSave.map((d) => Object.keys(d)).flat());\n const numberedFileName = `${baseName}-${fileCount}${extension}`;\n const numberedFileNameTarget = `${baseNameTarget}-${fileCount}${extensionTarget}`;\n await writeLargeCsv(numberedFileName, chunkToSave, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${chunkToSave.length} identifiers to file \"${file}\"`,\n ),\n );\n\n const targetIdentifiers = results.flat();\n const headers2 = uniq(targetIdentifiers.map((d) => Object.keys(d)).flat());\n await writeLargeCsv(numberedFileNameTarget, targetIdentifiers, headers2);\n logger.info(\n colors.green(\n `Successfully wrote ${targetIdentifiers.length} identifiers to file \"${fileTarget}\"`,\n ),\n );\n\n logger.info(\n colors.blue(\n `Processed chunk of ${chunk.length} identifiers, found ${targetIdentifiers.length} target identifiers`,\n ),\n );\n fileCount += 1;\n };\n\n // Pull down outstanding identifiers using the new chunked function\n await pullChunkedCustomSiloOutstandingIdentifiers({\n dataSiloId: cronDataSiloId,\n auth,\n sombraAuth,\n actions,\n apiPageSize: pageLimit,\n savePageSize: chunkSize,\n onSave,\n transcendUrl,\n skipRequestCount,\n });\n\n logger.info(\n colors.green(\n `Successfully wrote ${allIdentifiersCount} identifiers to file \"${file}\"`,\n ),\n );\n logger.info(\n colors.green(\n `Successfully wrote ${allTargetIdentifiersCount} identifiers to file \"${fileTarget}\"`,\n ),\n );\n}\n"],"mappings":"yVA+BA,eAAsB,EAEpB,CACE,OACA,aACA,eACA,OACA,aACA,iBACA,mBACA,UACA,mBACA,YACA,aAEa,CACX,GACF,EAAA,EAAO,KACL,EAAA,QAAO,OACL,wEACD,CACF,EAID,OAAO,MAAM,EAAU,EACvB,GAAa,GACb,EAAY,IAAc,KAE1B,EAAA,EAAO,MACL,EAAA,QAAO,IACL,wBAAwB,EAAU,sDAAsD,EAAU,GACnG,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CACxD,CAAE,WAAU,aAAcC,EAAAA,EAAc,EAAK,CAC7C,CAAE,SAAU,EAAgB,UAAW,GAC3CA,EAAAA,EAAc,EAAW,CAEvB,EAAsB,EACtB,EAA4B,EAC5B,EAAY,EAmFhB,MAAMM,EAAAA,EAA4C,CAChD,WAAY,EACZ,OACA,aACA,UACA,YAAa,EACb,aAAc,EACd,OAxFa,KACb,IACkB,CAElB,GAAuB,EAAY,OAQnC,IAAM,EAAU,MAAML,EAAAA,GADIC,EAAAA,GAHDC,EAAAA,GADN,EAAY,IAAK,GAAM,EAAE,UAAoB,CACvB,CAGS,EAAU,CAG1D,KAAO,KACL,EAAA,EAAO,KACL,EAAA,QAAO,QACL,mCAAmC,EAAW,OAAO,WACtD,CACF,EACe,MAAMC,EAAAA,GACpB,EACA,EAAY,EACZ,CACE,aACA,YAAa,CAAC,EAAiB,CAChC,CACF,EACc,KAAK,CAAE,WAAU,cAAe,CAC7C,GAAI,CAAC,EACH,MAAU,MAAM,+BAA+B,IAAW,CAE5D,MAAO,CACL,SAAU,EACV,OAAQ,EACL,QAAQ,QAAS,GAAG,CACpB,MAAM,IAAI,CACV,KAAK,EACJ,QAAQ,eAAgB,GAAG,CAC/B,QACE,4DACH,EACD,EAGJ,CACE,YAAa,EACd,CACF,CAED,GAA6B,EAAQ,MAAM,CAAC,OAG5C,IAAM,EAAUD,EAAAA,GAAK,EAAY,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAC7D,EAAmB,GAAG,EAAS,GAAG,IAAY,IAC9C,EAAyB,GAAG,EAAe,GAAG,IAAY,IAChE,MAAME,EAAAA,EAAc,EAAkB,EAAa,EAAQ,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAY,OAAO,wBAAwB,EAAK,GACvE,CACF,CAED,IAAM,EAAoB,EAAQ,MAAM,CAExC,MAAMA,EAAAA,EAAc,EAAwB,EAD3BF,EAAAA,GAAK,EAAkB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CACF,CACxE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAkB,OAAO,wBAAwB,EAAW,GACnF,CACF,CAED,EAAA,EAAO,KACL,EAAA,QAAO,KACL,sBAAsBD,EAAAA,GAAM,OAAO,sBAAsB,EAAkB,OAAO,qBACnF,CACF,CACD,GAAa,GAYb,eACA,mBACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAoB,wBAAwB,EAAK,GACxE,CACF,CACD,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAA0B,wBAAwB,EAAW,GACpF,CACF"}
@@ -1,2 +1,2 @@
1
- require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`);const e=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,actions:r,origins:i,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,transcendUrl:c,concurrency:l}){t.t(this.process.exit),await e.J({transcendUrl:c,requestActions:r,auth:n,requestOrigins:i,concurrency:l,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s})}exports.approve=n;
2
- //# sourceMappingURL=impl-CtMVi5m1.cjs.map
1
+ require(`./enums-CBXlBJii.cjs`),require(`./constants-Ck15_dOP.cjs`);const e=require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,actions:r,origins:i,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s,transcendUrl:c,concurrency:l}){t.t(this.process.exit),await e.J({transcendUrl:c,requestActions:r,auth:n,requestOrigins:i,concurrency:l,silentModeBefore:a,createdAtBefore:o,createdAtAfter:s})}exports.approve=n;
2
+ //# sourceMappingURL=impl-keqxvDu-.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CtMVi5m1.cjs","names":["approvePrivacyRequests"],"sources":["../src/commands/request/approve/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\n\nimport { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\nimport { approvePrivacyRequests } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n });\n}\n"],"mappings":"6QAiBA,eAAsB,EAEpB,CACE,OACA,UACA,UACA,mBACA,kBACA,iBACA,eACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAuB,CAC3B,eACA,eAAgB,EAChB,OACA,eAAgB,EAChB,cACA,mBACA,kBACA,iBACD,CAAC"}
1
+ {"version":3,"file":"impl-keqxvDu-.cjs","names":["approvePrivacyRequests"],"sources":["../src/commands/request/approve/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\n\nimport { RequestAction, RequestOrigin } from '@transcend-io/privacy-types';\nimport { approvePrivacyRequests } from '../../../lib/requests';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ApproveCommandFlags {\n auth: string;\n actions: RequestAction[];\n origins?: RequestOrigin[];\n silentModeBefore?: Date;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function approve(\n this: LocalContext,\n {\n auth,\n actions,\n origins,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n transcendUrl,\n concurrency,\n }: ApproveCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await approvePrivacyRequests({\n transcendUrl,\n requestActions: actions,\n auth,\n requestOrigins: origins,\n concurrency,\n silentModeBefore,\n createdAtBefore,\n createdAtAfter,\n });\n}\n"],"mappings":"6QAiBA,eAAsB,EAEpB,CACE,OACA,UACA,UACA,mBACA,kBACA,iBACA,eACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAuB,CAC3B,eACA,eAAgB,EAChB,OACA,eAAgB,EAChB,cACA,mBACA,kBACA,iBACD,CAAC"}
@@ -1,2 +1,2 @@
1
- const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-B-TmLA0w.cjs`);require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`);const n=require(`./api-keys-CDp8NUhN.cjs`),r=require(`./done-input-validation-Cgk5kNBs.cjs`);let i=require(`node:fs`);async function a({email:e,password:a,apiKeyTitle:o,file:s,scopes:c,deleteExistingApiKey:l,createNewApiKey:u,parentOrganizationId:d,transcendUrl:f}){r.t(this.process.exit);let{errors:p,apiKeys:m}=await n.i({transcendUrl:f,password:a,email:e,parentOrganizationId:d,deleteExistingApiKey:l,createNewApiKey:u,apiKeyTitle:o,scopes:c.map(e=>t.c[e].name)});(0,i.writeFileSync)(s,`${JSON.stringify(m,null,2)}\n`),p.length>0&&this.process.exit(1)}exports.generateApiKeys=a;
2
- //# sourceMappingURL=impl-CCeEUy6z.cjs.map
1
+ const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-Ck15_dOP.cjs`);require(`./syncConfigurationToTranscend-f6E4-H5W.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`);const n=require(`./api-keys-DQKR5mr6.cjs`),r=require(`./done-input-validation-Cgk5kNBs.cjs`);let i=require(`node:fs`);async function a({email:e,password:a,apiKeyTitle:o,file:s,scopes:c,deleteExistingApiKey:l,createNewApiKey:u,parentOrganizationId:d,transcendUrl:f}){r.t(this.process.exit);let{errors:p,apiKeys:m}=await n.i({transcendUrl:f,password:a,email:e,parentOrganizationId:d,deleteExistingApiKey:l,createNewApiKey:u,apiKeyTitle:o,scopes:c.map(e=>t.c[e].name)});(0,i.writeFileSync)(s,`${JSON.stringify(m,null,2)}\n`),p.length>0&&this.process.exit(1)}exports.generateApiKeys=a;
2
+ //# sourceMappingURL=impl-miCFpR_U.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"impl-CCeEUy6z.cjs","names":["generateCrossAccountApiKeys","SCOPES_BY_TITLE"],"sources":["../src/commands/admin/generate-api-keys/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { writeFileSync } from 'node:fs';\n\nimport { ScopeName } from '@transcend-io/privacy-types';\n\nimport { generateCrossAccountApiKeys } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { SCOPES_BY_TITLE } from '../../../constants';\n\n// Command flag interface\nexport interface GenerateApiKeysCommandFlags {\n email: string;\n password: string;\n apiKeyTitle: string;\n file: string;\n scopes: string[];\n deleteExistingApiKey: boolean;\n createNewApiKey: boolean;\n parentOrganizationId?: string;\n transcendUrl: string;\n}\n\n// Command implementation\nexport async function generateApiKeys(\n this: LocalContext,\n {\n email,\n password,\n apiKeyTitle,\n file,\n scopes,\n deleteExistingApiKey,\n createNewApiKey,\n parentOrganizationId,\n transcendUrl,\n }: GenerateApiKeysCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const scopeNames = scopes.map(\n (scopeTitle) => SCOPES_BY_TITLE[scopeTitle].name as ScopeName,\n );\n\n // Upload privacy requests\n const { errors, apiKeys } = await generateCrossAccountApiKeys({\n transcendUrl,\n password,\n email,\n parentOrganizationId,\n deleteExistingApiKey,\n createNewApiKey,\n apiKeyTitle,\n scopes: scopeNames,\n });\n\n // Write to disk\n writeFileSync(file, `${JSON.stringify(apiKeys, null, 2)}\\n`);\n if (errors.length > 0) {\n this.process.exit(1);\n }\n}\n"],"mappings":"8WAuBA,eAAsB,EAEpB,CACE,QACA,WACA,cACA,OACA,SACA,uBACA,kBACA,uBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAOtC,GAAM,CAAE,SAAQ,WAAY,MAAMA,EAAAA,EAA4B,CAC5D,eACA,WACA,QACA,uBACA,uBACA,kBACA,cACA,OAbiB,EAAO,IACvB,GAAeC,EAAAA,EAAgB,GAAY,KAC7C,CAYA,CAAC,EAGF,EAAA,EAAA,eAAc,EAAM,GAAG,KAAK,UAAU,EAAS,KAAM,EAAE,CAAC,IAAI,CACxD,EAAO,OAAS,GAClB,KAAK,QAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"impl-miCFpR_U.cjs","names":["generateCrossAccountApiKeys","SCOPES_BY_TITLE"],"sources":["../src/commands/admin/generate-api-keys/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { writeFileSync } from 'node:fs';\n\nimport { ScopeName } from '@transcend-io/privacy-types';\n\nimport { generateCrossAccountApiKeys } from '../../../lib/api-keys';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { SCOPES_BY_TITLE } from '../../../constants';\n\n// Command flag interface\nexport interface GenerateApiKeysCommandFlags {\n email: string;\n password: string;\n apiKeyTitle: string;\n file: string;\n scopes: string[];\n deleteExistingApiKey: boolean;\n createNewApiKey: boolean;\n parentOrganizationId?: string;\n transcendUrl: string;\n}\n\n// Command implementation\nexport async function generateApiKeys(\n this: LocalContext,\n {\n email,\n password,\n apiKeyTitle,\n file,\n scopes,\n deleteExistingApiKey,\n createNewApiKey,\n parentOrganizationId,\n transcendUrl,\n }: GenerateApiKeysCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const scopeNames = scopes.map(\n (scopeTitle) => SCOPES_BY_TITLE[scopeTitle].name as ScopeName,\n );\n\n // Upload privacy requests\n const { errors, apiKeys } = await generateCrossAccountApiKeys({\n transcendUrl,\n password,\n email,\n parentOrganizationId,\n deleteExistingApiKey,\n createNewApiKey,\n apiKeyTitle,\n scopes: scopeNames,\n });\n\n // Write to disk\n writeFileSync(file, `${JSON.stringify(apiKeys, null, 2)}\\n`);\n if (errors.length > 0) {\n this.process.exit(1);\n }\n}\n"],"mappings":"8WAuBA,eAAsB,EAEpB,CACE,QACA,WACA,cACA,OACA,SACA,uBACA,kBACA,uBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAOtC,GAAM,CAAE,SAAQ,WAAY,MAAMA,EAAAA,EAA4B,CAC5D,eACA,WACA,QACA,uBACA,uBACA,kBACA,cACA,OAbiB,EAAO,IACvB,GAAeC,EAAAA,EAAgB,GAAY,KAC7C,CAYA,CAAC,EAGF,EAAA,EAAA,eAAc,EAAM,GAAG,KAAK,UAAU,EAAS,KAAM,EAAE,CAAC,IAAI,CACxD,EAAO,OAAS,GAClB,KAAK,QAAQ,KAAK,EAAE"}