@transcend-io/cli 8.33.1 → 8.34.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -17
- package/dist/{api-keys-BY5LoEwO.cjs → api-keys-CDp8NUhN.cjs} +2 -2
- package/dist/{api-keys-BY5LoEwO.cjs.map → api-keys-CDp8NUhN.cjs.map} +1 -1
- package/dist/{app-BlzWBy2v.cjs → app-rVGy2Ks-.cjs} +18 -18
- package/dist/app-rVGy2Ks-.cjs.map +1 -0
- package/dist/bin/bash-complete.cjs +1 -1
- package/dist/bin/cli.cjs +1 -1
- package/dist/bin/deprecated-command.cjs +1 -1
- package/dist/{code-scanning-Bvs5QHcn.cjs → code-scanning-BwfVNIHr.cjs} +2 -2
- package/dist/{code-scanning-Bvs5QHcn.cjs.map → code-scanning-BwfVNIHr.cjs.map} +1 -1
- package/dist/{command-d4zv2BeF.cjs → command-3fhEz5PC.cjs} +2 -2
- package/dist/{command-d4zv2BeF.cjs.map → command-3fhEz5PC.cjs.map} +1 -1
- package/dist/{consent-manager-BjsU0NzZ.cjs → consent-manager-DXWjvCtI.cjs} +2 -2
- package/dist/{consent-manager-BjsU0NzZ.cjs.map → consent-manager-DXWjvCtI.cjs.map} +1 -1
- package/dist/{constants-KUUI8R0B.cjs → constants-B-TmLA0w.cjs} +2 -2
- package/dist/{constants-KUUI8R0B.cjs.map → constants-B-TmLA0w.cjs.map} +1 -1
- package/dist/{cron-trHmh4Ul.cjs → cron-DQHN57v7.cjs} +2 -2
- package/dist/{cron-trHmh4Ul.cjs.map → cron-DQHN57v7.cjs.map} +1 -1
- package/dist/{data-inventory-CTgo5IAK.cjs → data-inventory-flXV6qPl.cjs} +2 -2
- package/dist/{data-inventory-CTgo5IAK.cjs.map → data-inventory-flXV6qPl.cjs.map} +1 -1
- package/dist/{dataFlowsToDataSilos-CKr7XUmU.cjs → dataFlowsToDataSilos-Lk7WQ39V.cjs} +2 -2
- package/dist/{dataFlowsToDataSilos-CKr7XUmU.cjs.map → dataFlowsToDataSilos-Lk7WQ39V.cjs.map} +1 -1
- package/dist/{impl-BQuhNgNA.cjs → impl-2u3q0rji.cjs} +2 -2
- package/dist/{impl-BQuhNgNA.cjs.map → impl-2u3q0rji.cjs.map} +1 -1
- package/dist/{impl-BkoQZksU.cjs → impl-3ih-x09b.cjs} +2 -2
- package/dist/{impl-BkoQZksU.cjs.map → impl-3ih-x09b.cjs.map} +1 -1
- package/dist/{impl-oSvrDwzY.cjs → impl-80GXtjmz.cjs} +2 -2
- package/dist/{impl-oSvrDwzY.cjs.map → impl-80GXtjmz.cjs.map} +1 -1
- package/dist/{impl-D2Rar4JL.cjs → impl-8PlQ3Cvy.cjs} +2 -2
- package/dist/{impl-D2Rar4JL.cjs.map → impl-8PlQ3Cvy.cjs.map} +1 -1
- package/dist/{impl-BVXrPQ1G.cjs → impl-B1p9GNrM.cjs} +2 -2
- package/dist/{impl-BVXrPQ1G.cjs.map → impl-B1p9GNrM.cjs.map} +1 -1
- package/dist/{impl-wBfm0THj.cjs → impl-B4iI3rcF.cjs} +2 -2
- package/dist/{impl-wBfm0THj.cjs.map → impl-B4iI3rcF.cjs.map} +1 -1
- package/dist/{impl-CSfCo7EY.cjs → impl-B6qG10UZ.cjs} +2 -2
- package/dist/{impl-CSfCo7EY.cjs.map → impl-B6qG10UZ.cjs.map} +1 -1
- package/dist/{impl-DuaKD2ga.cjs → impl-BJ8i_gqQ.cjs} +2 -2
- package/dist/{impl-DuaKD2ga.cjs.map → impl-BJ8i_gqQ.cjs.map} +1 -1
- package/dist/{impl-_pb_69kE.cjs → impl-BZc5cmdE.cjs} +2 -2
- package/dist/{impl-_pb_69kE.cjs.map → impl-BZc5cmdE.cjs.map} +1 -1
- package/dist/{impl-BTXtq2nD.cjs → impl-Bc9DMV-V.cjs} +2 -2
- package/dist/{impl-BTXtq2nD.cjs.map → impl-Bc9DMV-V.cjs.map} +1 -1
- package/dist/{impl-D5zl_Mu5.cjs → impl-BjsBvvGF.cjs} +2 -2
- package/dist/{impl-D5zl_Mu5.cjs.map → impl-BjsBvvGF.cjs.map} +1 -1
- package/dist/{impl-3m5kiYs0.cjs → impl-BqyO4vYa.cjs} +2 -2
- package/dist/{impl-3m5kiYs0.cjs.map → impl-BqyO4vYa.cjs.map} +1 -1
- package/dist/{impl-DBAX5Mjz.cjs → impl-Bsqlw8_g.cjs} +2 -2
- package/dist/{impl-DBAX5Mjz.cjs.map → impl-Bsqlw8_g.cjs.map} +1 -1
- package/dist/{impl-DPzcKPpu.cjs → impl-BuJNWbOW.cjs} +2 -2
- package/dist/{impl-DPzcKPpu.cjs.map → impl-BuJNWbOW.cjs.map} +1 -1
- package/dist/{impl-l01EW7nc.cjs → impl-C2_oQebA.cjs} +2 -2
- package/dist/{impl-l01EW7nc.cjs.map → impl-C2_oQebA.cjs.map} +1 -1
- package/dist/{impl-k_N4BSHb.cjs → impl-CCeEUy6z.cjs} +2 -2
- package/dist/{impl-k_N4BSHb.cjs.map → impl-CCeEUy6z.cjs.map} +1 -1
- package/dist/impl-CUkxcZrf.cjs +2 -0
- package/dist/impl-CUkxcZrf.cjs.map +1 -0
- package/dist/{impl-CL4UPQTF.cjs → impl-CZmlwib3.cjs} +2 -2
- package/dist/{impl-CL4UPQTF.cjs.map → impl-CZmlwib3.cjs.map} +1 -1
- package/dist/{impl-DlHm8Nk9.cjs → impl-CaSO2LPb.cjs} +2 -2
- package/dist/{impl-DlHm8Nk9.cjs.map → impl-CaSO2LPb.cjs.map} +1 -1
- package/dist/{impl-BATmiMYN.cjs → impl-CaUSDPuW.cjs} +2 -2
- package/dist/{impl-BATmiMYN.cjs.map → impl-CaUSDPuW.cjs.map} +1 -1
- package/dist/{impl-BJCXAnkF.cjs → impl-Cc-Lfiig.cjs} +2 -2
- package/dist/{impl-BJCXAnkF.cjs.map → impl-Cc-Lfiig.cjs.map} +1 -1
- package/dist/{impl-Bthcogve.cjs → impl-CpoSlP1o.cjs} +2 -2
- package/dist/{impl-Bthcogve.cjs.map → impl-CpoSlP1o.cjs.map} +1 -1
- package/dist/{impl-CIkV6OPL.cjs → impl-CrsHy3BZ.cjs} +2 -2
- package/dist/{impl-CIkV6OPL.cjs.map → impl-CrsHy3BZ.cjs.map} +1 -1
- package/dist/{impl-BMnyINiQ.cjs → impl-CtMVi5m1.cjs} +2 -2
- package/dist/{impl-BMnyINiQ.cjs.map → impl-CtMVi5m1.cjs.map} +1 -1
- package/dist/{impl-BdqGR4Db.cjs → impl-Cw7Jxx0V.cjs} +2 -2
- package/dist/{impl-BdqGR4Db.cjs.map → impl-Cw7Jxx0V.cjs.map} +1 -1
- package/dist/{impl-D9degodL.cjs → impl-CwPRkBc0.cjs} +2 -2
- package/dist/{impl-D9degodL.cjs.map → impl-CwPRkBc0.cjs.map} +1 -1
- package/dist/{impl-DLlHf0f5.cjs → impl-CzO7dqsL.cjs} +2 -2
- package/dist/{impl-DLlHf0f5.cjs.map → impl-CzO7dqsL.cjs.map} +1 -1
- package/dist/{impl-Bks9LY66.cjs → impl-D0r4dSxM.cjs} +2 -2
- package/dist/{impl-Bks9LY66.cjs.map → impl-D0r4dSxM.cjs.map} +1 -1
- package/dist/{impl-BicjL2jn.cjs → impl-DBnRvkUi.cjs} +2 -2
- package/dist/{impl-BicjL2jn.cjs.map → impl-DBnRvkUi.cjs.map} +1 -1
- package/dist/{impl-DZWafiRw.cjs → impl-DOmKR8yz.cjs} +2 -2
- package/dist/{impl-DZWafiRw.cjs.map → impl-DOmKR8yz.cjs.map} +1 -1
- package/dist/{impl-wC9NKk7V.cjs → impl-DX7gLoTo.cjs} +2 -2
- package/dist/{impl-wC9NKk7V.cjs.map → impl-DX7gLoTo.cjs.map} +1 -1
- package/dist/{impl-NxZOP7L7.cjs → impl-DcQ_HfDZ.cjs} +2 -2
- package/dist/{impl-NxZOP7L7.cjs.map → impl-DcQ_HfDZ.cjs.map} +1 -1
- package/dist/{impl-BrjAmeGI.cjs → impl-DjTjLgew.cjs} +2 -2
- package/dist/{impl-BrjAmeGI.cjs.map → impl-DjTjLgew.cjs.map} +1 -1
- package/dist/{impl-CQ1Eg1Gs.cjs → impl-DoP4FUJI.cjs} +2 -2
- package/dist/{impl-CQ1Eg1Gs.cjs.map → impl-DoP4FUJI.cjs.map} +1 -1
- package/dist/{impl-GBzXG1_z.cjs → impl-DrNWIvMG.cjs} +2 -2
- package/dist/{impl-GBzXG1_z.cjs.map → impl-DrNWIvMG.cjs.map} +1 -1
- package/dist/{impl-ECqWmX0a.cjs → impl-DvlAq8xf.cjs} +2 -2
- package/dist/{impl-ECqWmX0a.cjs.map → impl-DvlAq8xf.cjs.map} +1 -1
- package/dist/{impl-DGql4qi2.cjs → impl-E1vzeNmp.cjs} +2 -2
- package/dist/{impl-DGql4qi2.cjs.map → impl-E1vzeNmp.cjs.map} +1 -1
- package/dist/{impl-DQ67TBSS.cjs → impl-EEKe6HmF.cjs} +2 -2
- package/dist/{impl-DQ67TBSS.cjs.map → impl-EEKe6HmF.cjs.map} +1 -1
- package/dist/{impl-BdrUgBNn.cjs → impl-NdV_MRsm.cjs} +2 -2
- package/dist/{impl-BdrUgBNn.cjs.map → impl-NdV_MRsm.cjs.map} +1 -1
- package/dist/{impl-d01YGIfB.cjs → impl-TQVXJemY.cjs} +2 -2
- package/dist/{impl-d01YGIfB.cjs.map → impl-TQVXJemY.cjs.map} +1 -1
- package/dist/{impl-DB2kHDaA.cjs → impl-b6KwZ74o.cjs} +2 -2
- package/dist/{impl-DB2kHDaA.cjs.map → impl-b6KwZ74o.cjs.map} +1 -1
- package/dist/{impl-DrBeb50_.cjs → impl-t_fZSUcj.cjs} +2 -2
- package/dist/{impl-DrBeb50_.cjs.map → impl-t_fZSUcj.cjs.map} +1 -1
- package/dist/index.cjs +1 -1
- package/dist/{manual-enrichment-D0Bhyycl.cjs → manual-enrichment-Y_BQaSZQ.cjs} +2 -2
- package/dist/{manual-enrichment-D0Bhyycl.cjs.map → manual-enrichment-Y_BQaSZQ.cjs.map} +1 -1
- package/dist/{pooling-CWVm4ilp.cjs → pooling-Ct83vfEh.cjs} +2 -2
- package/dist/{pooling-CWVm4ilp.cjs.map → pooling-Ct83vfEh.cjs.map} +1 -1
- package/dist/{preference-management-Dhmm0sgq.cjs → preference-management-5uJDKuMK.cjs} +3 -3
- package/dist/{preference-management-Dhmm0sgq.cjs.map → preference-management-5uJDKuMK.cjs.map} +1 -1
- package/dist/{syncConfigurationToTranscend-DWM5Kkl4.cjs → syncConfigurationToTranscend-Bpge5AcC.cjs} +2 -2
- package/dist/{syncConfigurationToTranscend-DWM5Kkl4.cjs.map → syncConfigurationToTranscend-Bpge5AcC.cjs.map} +1 -1
- package/dist/{uploadConsents-BhsOWa3P.cjs → uploadConsents-CJc_6Qwd.cjs} +2 -2
- package/dist/{uploadConsents-BhsOWa3P.cjs.map → uploadConsents-CJc_6Qwd.cjs.map} +1 -1
- package/package.json +1 -1
- package/dist/app-BlzWBy2v.cjs.map +0 -1
- package/dist/impl-hWCNtvcN.cjs +0 -2
- package/dist/impl-hWCNtvcN.cjs.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-NxZOP7L7.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"wjBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
|
|
1
|
+
{"version":3,"file":"impl-DcQ_HfDZ.cjs","names":["fastcsv","Parser","Transform","extractErrorMessage","makeHeader","makeWorkerRows","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/lib/helpers/collectCsvFilesOrExit.ts","../src/lib/helpers/chunkOneCsvFile.ts","../src/commands/admin/chunk-csv/worker.ts","../src/commands/admin/chunk-csv/ui/plugin.ts","../src/commands/admin/chunk-csv/impl.ts"],"sourcesContent":["import { join } from 'node:path';\nimport { readdirSync, statSync } from 'node:fs';\nimport colors from 'colors';\nimport { logger } from '../../logger';\nimport type { LocalContext } from '../../context';\n\n/**\n * Validate flags and collect CSV file paths from a directory.\n * On validation error, the provided `exit` function is called.\n *\n * @param directory - the directory containing CSV files\n * @param localContext - the context of the command, used for logging and exit\n * @returns an array of valid CSV file paths\n */\nexport function collectCsvFilesOrExit(\n directory: string | undefined,\n localContext: LocalContext,\n): string[] {\n if (!directory) {\n logger.error(colors.red('A --directory must be provided.'));\n localContext.process.exit(1);\n }\n\n let files: string[] = [];\n try {\n const entries = readdirSync(directory);\n files = entries\n .filter((f) => f.endsWith('.csv'))\n .map((f) => join(directory, f))\n .filter((p) => {\n try {\n return statSync(p).isFile();\n } catch {\n return false;\n }\n });\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n localContext.process.exit(1);\n }\n\n if (files.length === 0) {\n logger.error(colors.red(`No CSV files found in directory: ${directory}`));\n localContext.process.exit(1);\n }\n logger.info(colors.green(`Found: ${files.join(', ')} CSV files`));\n return files;\n}\n","import { createReadStream, createWriteStream } from 'node:fs';\nimport { mkdir, readdir, unlink, stat } from 'node:fs/promises';\nimport { pipeline } from 'node:stream/promises';\nimport { Transform } from 'node:stream';\nimport { once } from 'node:events';\nimport { Parser } from 'csv-parse';\nimport { basename, dirname, join } from 'node:path';\nimport colors from 'colors';\nimport * as fastcsv from 'fast-csv';\nimport { logger } from '../../logger';\n\n/**\n * Options for chunking a single CSV file\n */\nexport type ChunkOpts = {\n /** Path to the CSV file to chunk */\n filePath: string;\n /** Output directory for chunk files; defaults to the same directory as the input file */\n outputDir?: string;\n /** Clear output directory before starting */\n clearOutputDir: boolean;\n /** Chunk size in MB */\n chunkSizeMB: number;\n /** Optional report interval in milliseconds for progress updates */\n reportEveryMs?: number;\n /** Callback for progress updates */\n onProgress: (processed: number, total?: number) => void;\n};\n\n/**\n * Create a CSV writer (fast-csv formatter piped to a write stream) that writes\n * a header line first, and then accepts object rows. Returns a tiny API to\n * write rows with backpressure handling and to close the file cleanly.\n *\n * @param filePath - The path to the output CSV file\n * @param headers - The headers for the CSV file\n * @returns An object with `write` and `end` methods\n */\nfunction createCsvChunkWriter(\n filePath: string,\n headers: string[],\n): {\n /** Write a row object to the CSV file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the CSV file, ensuring all data is flushed */\n end: () => Promise<void>;\n} {\n const ws = createWriteStream(filePath);\n const csv = fastcsv.format({ headers, writeHeaders: true, objectMode: true });\n // Pipe csv → file stream\n csv.pipe(ws);\n\n return {\n /**\n * Write a row object to the CSV file.\n *\n * @param row - The row data as an object\n */\n async write(row) {\n // Respect backpressure from fast-csv formatter\n const ok = csv.write(row);\n if (!ok) {\n await once(csv, 'drain');\n }\n },\n /**\n * Close the CSV file, ensuring all data is flushed.\n */\n async end() {\n // End formatter; wait for underlying file stream to finish flush/close\n const finished = Promise.all([once(ws, 'finish')]);\n csv.end();\n await finished;\n },\n };\n}\n\n/**\n * Zero-pad chunk numbers to four digits (e.g., 1 → \"0001\").\n *\n * @param n - The chunk number to pad\n * @returns The padded chunk number as a string\n */\nfunction pad4(n: number): string {\n return String(n).padStart(4, '0');\n}\n\n/**\n * Approximate row size in bytes using comma-joined field values.\n *\n * @param obj - The row object to estimate size for\n * @returns Approximate byte size of the row when serialized as CSV\n */\nfunction approxRowBytes(obj: Record<string, unknown>): number {\n // naive but fast; adequate for chunk rollover thresholding\n return Buffer.byteLength(\n Object.values(obj)\n .map((v) => (v == null ? '' : String(v)))\n .join(','),\n 'utf8',\n );\n}\n\n/**\n * Stream a single CSV file and write chunk files of roughly chunkSizeMB.\n * - Writes header to each chunk.\n * - Logs periodic progress via onProgress.\n *\n * @param opts - Options for chunking the file\n * @returns Promise that resolves when done\n */\nexport async function chunkOneCsvFile(opts: ChunkOpts): Promise<void> {\n const {\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n onProgress,\n reportEveryMs = 500,\n } = opts;\n const { size: fileBytes } = await stat(filePath); // total bytes on disk\n let lastTick = 0;\n\n logger.info(\n colors.magenta(`Chunking ${filePath} into ~${chunkSizeMB}MB files...`),\n );\n\n const chunkSizeBytes = Math.floor(chunkSizeMB * 1024 * 1024);\n const baseName = basename(filePath, '.csv');\n const outDir = outputDir || dirname(filePath);\n logger.info(colors.magenta(`Output directory: ${outDir}`));\n await mkdir(outDir, { recursive: true });\n\n // Clear previous chunk files for this base\n if (clearOutputDir) {\n logger.warn(colors.yellow(`Clearing output directory: ${outDir}`));\n const files = await readdir(outDir);\n await Promise.all(\n files\n .filter((f) => f.startsWith(`${baseName}_chunk_`) && f.endsWith('.csv'))\n .map((f) => unlink(join(outDir, f))),\n );\n }\n\n let headerRow: string[] | null = null;\n let expectedCols: number | null = null;\n let totalLines = 0;\n let currentChunk = 1;\n let currentSize = 0;\n\n const parser = new Parser({\n columns: false,\n skip_empty_lines: true,\n });\n\n // running sample to estimate avg row bytes\n let sampleBytes = 0;\n let sampleRows = 0;\n\n const emit = (): void => {\n const avg = sampleRows > 0 ? sampleBytes / sampleRows : 0;\n const estTotal =\n avg > 0 ? Math.max(totalLines, Math.ceil(fileBytes / avg)) : undefined;\n onProgress(totalLines, estTotal); // <-- now has total\n lastTick = Date.now();\n };\n\n // seed an initial 0/N as soon as we start\n emit();\n\n // Current active chunk writer; created after we know headers\n let writer: {\n /** Write a row object to the current chunk file */\n write: (row: Record<string, unknown>) => Promise<void>;\n /** Close the current chunk file */\n end: () => Promise<void>;\n } | null = null;\n\n // Returns current chunk file path — chunk number is always 4-digit padded\n const currentChunkPath = (): string =>\n join(outDir, `${baseName}_chunk_${pad4(currentChunk)}.csv`);\n\n const t = new Transform({\n objectMode: true,\n /**\n * Transform each row of the CSV file into a chunk.\n *\n * @param row - The current row being processed\n * @param _enc - Encoding (not used)\n * @param cb - Callback to signal completion or error\n */\n async transform(row: string[], _enc, cb) {\n try {\n // First row is the header\n if (!headerRow) {\n headerRow = row.slice(0);\n expectedCols = headerRow.length;\n\n // Open first chunk with header asynchronously\n writer = createCsvChunkWriter(currentChunkPath(), headerRow);\n cb();\n return;\n }\n\n // sanity check rows (non-fatal)\n if (expectedCols !== null && row.length !== expectedCols) {\n // optionally log a warning or collect metrics\n logger.warn(\n colors.yellow(\n `Row has ${row.length} cols; expected ${expectedCols}`,\n ),\n );\n }\n\n totalLines += 1;\n if (totalLines % 250_000 === 0) {\n onProgress(totalLines);\n }\n\n // Build row object using the original header\n const obj = Object.fromEntries(headerRow!.map((h, i) => [h, row[i]]));\n\n // Determine the row size up-front\n const rowBytes = approxRowBytes(obj);\n sampleBytes += rowBytes;\n sampleRows += 1;\n\n // time-based throttle for UI updates\n if (Date.now() - lastTick >= reportEveryMs) emit();\n\n // If adding this row would exceed the threshold, roll first,\n // so this row becomes the first row in the next chunk.\n if (\n writer &&\n currentSize > 0 &&\n currentSize + rowBytes > chunkSizeBytes\n ) {\n await writer.end();\n currentChunk += 1;\n currentSize = 0;\n logger.info(\n colors.green(\n `Rolling to chunk ${currentChunk} after ${totalLines.toLocaleString()} rows.`,\n ),\n );\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Ensure writer exists (should after header)\n if (!writer) {\n writer = createCsvChunkWriter(currentChunkPath(), headerRow!);\n }\n\n // Write row and update approximate size\n await writer.write(obj);\n currentSize += rowBytes;\n\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n\n // Ensure final file is closed\n /**\n * Flush is called when the readable has ended; we close any open writer.\n *\n * @param cb - Callback to signal completion or error\n */\n async flush(cb) {\n try {\n if (writer) {\n await writer.end();\n writer = null;\n }\n emit(); // Final progress tick\n cb();\n } catch (e) {\n cb(e as Error);\n }\n },\n });\n\n const rs = createReadStream(filePath);\n await pipeline(rs, parser, t);\n\n // Final progress tick\n onProgress(totalLines);\n logger.info(\n colors.green(\n `Chunked ${filePath} into ${currentChunk} file(s); processed ${totalLines.toLocaleString()} rows.`,\n ),\n );\n}\n","import { extractErrorMessage } from '../../../lib/helpers';\nimport { chunkOneCsvFile } from '../../../lib/helpers/chunkOneCsvFile';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\n/**\n * A unit of work: instructs a worker to chunk a single CSV file.\n */\nexport type ChunkTask = {\n /** Absolute path of the CSV file to chunk. */\n filePath: string;\n /** Options controlling output and chunk size. */\n options: {\n /** Optional directory where chunked output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output chunks before writing new ones. */\n clearOutputDir: boolean;\n /** Approximate target chunk size in MB (well under Node’s string size limits). */\n chunkSizeMB: number;\n };\n};\n\n/**\n * Per-worker progress snapshot for the chunk-csv command.\n */\nexport type ChunkProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Number of rows processed so far. */\n processed: number;\n /** Optional total rows in the file (not always known). */\n total?: number;\n};\n\n/**\n * Worker result message once a file has finished processing.\n */\nexport type ChunkResult = {\n /** Whether the file completed successfully. */\n ok: boolean;\n /** File path for which this result applies. */\n filePath: string;\n /** Optional error message if the file failed to chunk. */\n error?: string;\n};\n\n/**\n * Worker entrypoint.\n *\n * Lifecycle:\n * 1) Announce readiness to the parent via `{ type: 'ready' }`.\n * 2) Wait for `{ type: 'task' }` messages; for each, call `chunkOneCsvFile(...)`.\n * - While chunking, forward progress to the parent via `{ type: 'progress' }`.\n * - On completion, send `{ type: 'result', ok: true }`.\n * - On error, send `{ type: 'result', ok: false, error }` and exit(1).\n * 3) On `{ type: 'shutdown' }`, exit(0) gracefully.\n *\n * Notes:\n * - This process is typically spawned by a pool manager that assigns file paths to workers.\n * - The long-lived promise at the end keeps the worker alive between tasks until the parent\n * sends an explicit shutdown.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n\n // Notify the parent that the worker is ready to receive tasks.\n process.send?.({ type: 'ready' });\n\n // Main message loop: receive tasks and shutdown requests from the parent.\n process.on('message', async (msg: ToWorker<ChunkTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n // Graceful shutdown: let the parent control lifecycle.\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n\n // Only handle task messages here.\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir, chunkSizeMB } = options;\n\n try {\n // Stream the input CSV and write chunk files asynchronously.\n await chunkOneCsvFile({\n filePath,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n // Propagate incremental progress to the parent.\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n // Report success to the parent.\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n // Log locally and report failure upstream; exit the worker with error code.\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive\n await new Promise<never>(() => {\n // This promise never resolves, keeping the worker alive indefinitely\n // until the parent process instructs shutdown.\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for chunk-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for chunk-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const chunkCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectCsvFilesOrExit } from '../../../lib/helpers/collectCsvFilesOrExit';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ChunkProgress,\n type ChunkResult,\n type ChunkTask,\n} from './worker';\nimport { chunkCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path as a string\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/**\n * Totals aggregate for this command.\n * We don’t need custom counters since the runner already tracks\n * completed/failed counts in its header — so we just use an empty record.\n */\ntype Totals = Record<string, never>;\n\n/**\n * CLI flags accepted by the `chunk-csv` command.\n *\n * These are passed down from the CLI parser into the parent process.\n */\nexport type ChunkCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n chunkSizeMB: number;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Parent entrypoint for chunking many CSVs in parallel using the worker pool runner.\n *\n * Lifecycle:\n * 1) Discover CSV inputs (exit if none).\n * 2) Compute pool size (CPU-count heuristic or --concurrency).\n * 3) Build a FIFO queue of `ChunkTask`s.\n * 4) Define pool hooks to drive task assignment, progress, and result handling.\n * 5) Launch the pool with `runPool`, rendering via the `chunkCsvPlugin`.\n *\n * @param this - Bound CLI context (provides process exit + logging).\n * @param flags - CLI options for the run.\n */\nexport async function chunkCsv(\n this: LocalContext,\n flags: ChunkCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const {\n directory,\n outputDir,\n clearOutputDir,\n chunkSizeMB,\n concurrency,\n viewerMode,\n } = flags;\n\n /* 1) Discover CSV inputs */\n const files = collectCsvFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Chunking ${files.length} CSV file(s) with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Prepare a simple FIFO queue of tasks (one per file). */\n const queue = files.map<ChunkTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir, chunkSizeMB },\n }));\n\n /* 4) Define pool hooks to adapt runner to this command. */\n const hooks: PoolHooks<ChunkTask, ChunkProgress, ChunkResult, Totals> = {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n // postProcess receives log context when viewerMode=true — we don’t need it here.\n postProcess: async () => {\n // nothing extra for chunk-csv\n },\n };\n\n /* 5) Launch the pool runner with our hooks and custom dashboard plugin. */\n await runPool({\n title: `Chunk CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, chunkCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({\n logsBySlot,\n repaint,\n setPaused,\n }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"wjBAcA,SAAgB,EACd,EACA,EACU,CACL,IACH,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,kCAAkC,CAAC,CAC3D,EAAa,QAAQ,KAAK,EAAE,EAG9B,IAAI,EAAkB,EAAE,CACxB,GAAI,CAEF,GAAA,EAAA,EAAA,aAD4B,EAAU,CAEnC,OAAQ,GAAM,EAAE,SAAS,OAAO,CAAC,CACjC,IAAK,IAAA,EAAA,EAAA,MAAW,EAAW,EAAE,CAAC,CAC9B,OAAQ,GAAM,CACb,GAAI,CACF,OAAA,EAAA,EAAA,UAAgB,EAAE,CAAC,QAAQ,MACrB,CACN,MAAO,KAET,OACG,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,EAAa,QAAQ,KAAK,EAAE,CAQ9B,OALI,EAAM,SAAW,IACnB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAAC,CACzE,EAAa,QAAQ,KAAK,EAAE,EAE9B,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,UAAU,EAAM,KAAK,KAAK,CAAC,YAAY,CAAC,CAC1D,ECTT,SAAS,EACP,EACA,EAMA,CACA,IAAM,GAAA,EAAA,EAAA,mBAAuB,EAAS,CAChC,EAAMA,EAAQ,OAAO,CAAE,UAAS,aAAc,GAAM,WAAY,GAAM,CAAC,CAI7E,OAFA,EAAI,KAAK,EAAG,CAEL,CAML,MAAM,MAAM,EAAK,CAEJ,EAAI,MAAM,EAAI,EAEvB,MAAA,EAAA,EAAA,MAAW,EAAK,QAAQ,EAM5B,MAAM,KAAM,CAEV,IAAM,EAAW,QAAQ,IAAI,EAAA,EAAA,EAAA,MAAM,EAAI,SAAS,CAAC,CAAC,CAClD,EAAI,KAAK,CACT,MAAM,GAET,CASH,SAAS,EAAK,EAAmB,CAC/B,OAAO,OAAO,EAAE,CAAC,SAAS,EAAG,IAAI,CASnC,SAAS,EAAe,EAAsC,CAE5D,OAAO,OAAO,WACZ,OAAO,OAAO,EAAI,CACf,IAAK,GAAO,GAAK,KAAO,GAAK,OAAO,EAAE,CAAE,CACxC,KAAK,IAAI,CACZ,OACD,CAWH,eAAsB,EAAgB,EAAgC,CACpE,GAAM,CACJ,WACA,YACA,iBACA,cACA,aACA,gBAAgB,KACd,EACE,CAAE,KAAM,GAAc,MAAA,EAAA,EAAA,MAAW,EAAS,CAC5C,EAAW,EAEf,EAAA,EAAO,KACL,EAAA,QAAO,QAAQ,YAAY,EAAS,SAAS,EAAY,aAAa,CACvE,CAED,IAAM,EAAiB,KAAK,MAAM,EAAc,KAAO,KAAK,CACtD,GAAA,EAAA,EAAA,UAAoB,EAAU,OAAO,CACrC,EAAS,IAAA,EAAA,EAAA,SAAqB,EAAS,CAK7C,GAJA,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,qBAAqB,IAAS,CAAC,CAC1D,MAAA,EAAA,EAAA,OAAY,EAAQ,CAAE,UAAW,GAAM,CAAC,CAGpC,EAAgB,CAClB,EAAA,EAAO,KAAK,EAAA,QAAO,OAAO,8BAA8B,IAAS,CAAC,CAClE,IAAM,EAAQ,MAAA,EAAA,EAAA,SAAc,EAAO,CACnC,MAAM,QAAQ,IACZ,EACG,OAAQ,GAAM,EAAE,WAAW,GAAG,EAAS,SAAS,EAAI,EAAE,SAAS,OAAO,CAAC,CACvE,IAAK,IAAA,EAAA,EAAA,SAAA,EAAA,EAAA,MAAkB,EAAQ,EAAE,CAAC,CAAC,CACvC,CAGH,IAAI,EAA6B,KAC7B,EAA8B,KAC9B,EAAa,EACb,EAAe,EACf,EAAc,EAEZ,EAAS,IAAIC,EAAAA,OAAO,CACxB,QAAS,GACT,iBAAkB,GACnB,CAAC,CAGE,EAAc,EACd,EAAa,EAEX,MAAmB,CACvB,IAAM,EAAM,EAAa,EAAI,EAAc,EAAa,EAClD,EACJ,EAAM,EAAI,KAAK,IAAI,EAAY,KAAK,KAAK,EAAY,EAAI,CAAC,CAAG,IAAA,GAC/D,EAAW,EAAY,EAAS,CAChC,EAAW,KAAK,KAAK,EAIvB,GAAM,CAGN,IAAI,EAKO,KAGL,OAAA,EAAA,EAAA,MACC,EAAQ,GAAG,EAAS,SAAS,EAAK,EAAa,CAAC,MAAM,CAEvD,EAAI,IAAIC,EAAAA,UAAU,CACtB,WAAY,GAQZ,MAAM,UAAU,EAAe,EAAM,EAAI,CACvC,GAAI,CAEF,GAAI,CAAC,EAAW,CACd,EAAY,EAAI,MAAM,EAAE,CACxB,EAAe,EAAU,OAGzB,EAAS,EAAqB,GAAkB,CAAE,EAAU,CAC5D,GAAI,CACJ,OAIE,IAAiB,MAAQ,EAAI,SAAW,GAE1C,EAAA,EAAO,KACL,EAAA,QAAO,OACL,WAAW,EAAI,OAAO,kBAAkB,IACzC,CACF,CAGH,GAAc,EACV,EAAa,MAAY,GAC3B,EAAW,EAAW,CAIxB,IAAM,EAAM,OAAO,YAAY,EAAW,KAAK,EAAG,IAAM,CAAC,EAAG,EAAI,GAAG,CAAC,CAAC,CAG/D,EAAW,EAAe,EAAI,CACpC,GAAe,EACf,GAAc,EAGV,KAAK,KAAK,CAAG,GAAY,GAAe,GAAM,CAKhD,GACA,EAAc,GACd,EAAc,EAAW,IAEzB,MAAM,EAAO,KAAK,CAClB,GAAgB,EAChB,EAAc,EACd,EAAA,EAAO,KACL,EAAA,QAAO,MACL,oBAAoB,EAAa,SAAS,EAAW,gBAAgB,CAAC,QACvE,CACF,CACD,EAAS,EAAqB,GAAkB,CAAE,EAAW,EAI/D,AACE,IAAS,EAAqB,GAAkB,CAAE,EAAW,CAI/D,MAAM,EAAO,MAAM,EAAI,CACvB,GAAe,EAEf,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAUlB,MAAM,MAAM,EAAI,CACd,GAAI,CACF,AAEE,KADA,MAAM,EAAO,KAAK,CACT,MAEX,GAAM,CACN,GAAI,OACG,EAAG,CACV,EAAG,EAAW,GAGnB,CAAC,CAGF,MAAA,EAAA,EAAA,WAAA,EAAA,EAAA,kBAD4B,EAAS,CAClB,EAAQ,EAAE,CAG7B,EAAW,EAAW,CACtB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,WAAW,EAAS,QAAQ,EAAa,sBAAsB,EAAW,gBAAgB,CAAC,QAC5F,CACF,CCtOH,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CAGtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAGjC,QAAQ,GAAG,UAAW,KAAO,IAA6B,CASxD,GARI,CAAC,GAAO,OAAO,GAAQ,WAGvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAIb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,iBAAgB,eAAgB,EAEnD,GAAI,CAEF,MAAM,EAAgB,CACpB,WACA,YACA,iBACA,cAEA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAGF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CAEZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,IAAU,CAC5D,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAG7B,CCzGJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAA8D,CACzE,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAqCtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CACJ,YACA,YACA,iBACA,cACA,cACA,cACE,EAGE,EAAQ,EAAsB,EAAW,KAAK,CAG9C,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,YAAY,EAAM,OAAO,8BAA8B,EAAS,QAAQ,EAAS,GAClF,CACF,CAGD,IAAM,EAAQ,EAAM,IAAgB,IAAc,CAChD,WACA,QAAS,CAAE,YAAW,iBAAgB,cAAa,CACpD,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,eAAe,IACtB,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MAtBsE,CACtE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EAEpD,YAAa,SAAY,GAG1B,CAYC,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAgB,EAAW,CACrE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CACpB,aACA,UACA,YACD,CAAC,CACL,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`),require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);const t=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`);const n=require(`./consent-manager-DXWjvCtI.cjs`);require(`./uploadConsents-CJc_6Qwd.cjs`);const r=require(`./api-keys-CDp8NUhN.cjs`),i=require(`./done-input-validation-Cgk5kNBs.cjs`);let a=require(`node:fs`),o=require(`colors`);o=e.s(o);async function s({auth:e,xdiLocation:s,file:c,removeIpAddresses:l,domainBlockList:u,xdiAllowedCommands:d,transcendUrl:f}){i.t(this.process.exit);let{syncGroups:p,html:m}=await n.r(await r.r(e),{xdiLocation:s,transcendUrl:f,removeIpAddresses:l,domainBlockList:u.length>0?u:void 0,xdiAllowedCommands:d});t.t.info(o.default.green(`Successfully constructed sync endpoint for sync groups: ${JSON.stringify(p,null,2)}`)),(0,a.writeFileSync)(c,m),t.t.info(o.default.green(`Wrote configuration to file "${c}"!`))}exports.buildXdiSyncEndpoint=s;
|
|
2
|
+
//# sourceMappingURL=impl-DjTjLgew.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DjTjLgew.cjs","names":["buildXdiSyncEndpointHelper","validateTranscendAuth"],"sources":["../src/commands/consent/build-xdi-sync-endpoint/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { writeFileSync } from 'node:fs';\nimport { validateTranscendAuth } from '../../../lib/api-keys';\nimport { buildXdiSyncEndpoint as buildXdiSyncEndpointHelper } from '../../../lib/consent-manager';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface BuildXdiSyncEndpointCommandFlags {\n auth: string;\n xdiLocation: string;\n file: string;\n removeIpAddresses: boolean;\n domainBlockList: string[];\n xdiAllowedCommands: string;\n transcendUrl: string;\n}\n\nexport async function buildXdiSyncEndpoint(\n this: LocalContext,\n {\n auth,\n xdiLocation,\n file,\n removeIpAddresses,\n domainBlockList,\n xdiAllowedCommands,\n transcendUrl,\n }: BuildXdiSyncEndpointCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Parse authentication as API key or path to list of API keys\n const apiKeyOrList = await validateTranscendAuth(auth);\n\n // Build the sync endpoint\n const { syncGroups, html } = await buildXdiSyncEndpointHelper(apiKeyOrList, {\n xdiLocation,\n transcendUrl,\n removeIpAddresses,\n domainBlockList: domainBlockList.length > 0 ? domainBlockList : undefined,\n xdiAllowedCommands,\n });\n\n // Log success\n logger.info(\n colors.green(\n `Successfully constructed sync endpoint for sync groups: ${JSON.stringify(\n syncGroups,\n null,\n 2,\n )}`,\n ),\n );\n\n // Write to disk\n writeFileSync(file, html);\n logger.info(colors.green(`Wrote configuration to file \"${file}\"!`));\n}\n"],"mappings":"4eAkBA,eAAsB,EAEpB,CACE,OACA,cACA,OACA,oBACA,kBACA,qBACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAMtC,GAAM,CAAE,aAAY,QAAS,MAAMA,EAAAA,EAHd,MAAMC,EAAAA,EAAsB,EAAK,CAGsB,CAC1E,cACA,eACA,oBACA,gBAAiB,EAAgB,OAAS,EAAI,EAAkB,IAAA,GAChE,qBACD,CAAC,CAGF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,2DAA2D,KAAK,UAC9D,EACA,KACA,EACD,GACF,CACF,EAGD,EAAA,EAAA,eAAc,EAAM,EAAK,CACzB,EAAA,EAAO,KAAK,EAAA,QAAO,MAAM,gCAAgC,EAAK,IAAI,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./data-inventory-flXV6qPl.cjs`),i=require(`./done-input-validation-Cgk5kNBs.cjs`);let a=require(`colors`);a=e.s(a);async function o({auth:e,file:o,transcendUrl:s,dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d}){i.t(this.process.exit);try{let i=await r.t(t.ti(s,e),{dataSiloIds:c,subCategories:l,status:u,includeEncryptedSnippets:d});n.t.info(a.default.magenta(`Writing unstructured discovery files to file "${o}"...`));let f=[];await t.d(o,i.map(e=>{let n={"Entry ID":e.id,"Data Silo ID":e.dataSiloId,"Object Path ID":e.scannedObjectPathId,"Object ID":e.scannedObjectId,...d?{Entry:e.name,"Context Snippet":e.contextSnippet}:{},"Data Category":`${e.dataSubCategory.category}:${e.dataSubCategory.name}`,"Classification Status":e.status,"Confidence Score":e.confidence,"Classification Method":e.classificationMethod,"Classifier Version":e.classifierVersion};return f=t.Ds([...f,...Object.keys(n)]),n}),f)}catch(e){n.t.error(a.default.red(`An error occurred syncing the unstructured discovery files: ${e.message}`)),this.process.exit(1)}n.t.info(a.default.green(`Successfully synced unstructured discovery files to disk at ${o}!`))}exports.pullUnstructuredDiscoveryFiles=o;
|
|
2
|
+
//# sourceMappingURL=impl-DoP4FUJI.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DoP4FUJI.cjs","names":["pullUnstructuredSubDataPointRecommendations","buildTranscendGraphQLClient","writeLargeCsv","uniq"],"sources":["../src/commands/inventory/pull-unstructured-discovery-files/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport type { UnstructuredSubDataPointRecommendationStatus } from '@transcend-io/privacy-types';\nimport colors from 'colors';\nimport { uniq } from 'lodash-es';\nimport { pullUnstructuredSubDataPointRecommendations } from '../../../lib/data-inventory';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { logger } from '../../../logger';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullUnstructuredDiscoveryFilesCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n subCategories?: string[];\n status?: UnstructuredSubDataPointRecommendationStatus[];\n includeEncryptedSnippets: boolean;\n}\n\nexport async function pullUnstructuredDiscoveryFiles(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n subCategories,\n status,\n includeEncryptedSnippets,\n }: PullUnstructuredDiscoveryFilesCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const entries = await pullUnstructuredSubDataPointRecommendations(client, {\n dataSiloIds,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n status,\n includeEncryptedSnippets,\n });\n\n logger.info(\n colors.magenta(\n `Writing unstructured discovery files to file \"${file}\"...`,\n ),\n );\n let headers: string[] = [];\n const inputs = entries.map((entry) => {\n const result = {\n 'Entry ID': entry.id,\n 'Data Silo ID': entry.dataSiloId,\n 'Object Path ID': entry.scannedObjectPathId,\n 'Object ID': entry.scannedObjectId,\n ...(includeEncryptedSnippets\n ? { Entry: entry.name, 'Context Snippet': entry.contextSnippet }\n : {}),\n 'Data Category': `${entry.dataSubCategory.category}:${entry.dataSubCategory.name}`,\n 'Classification Status': entry.status,\n 'Confidence Score': entry.confidence,\n 'Classification Method': entry.classificationMethod,\n 'Classifier Version': entry.classifierVersion,\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(\n `An error occurred syncing the unstructured discovery files: ${err.message}`,\n ),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced unstructured discovery files to disk at ${file}!`,\n ),\n );\n}\n"],"mappings":"mWAoBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,gBACA,SACA,4BAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAU,MAAMA,EAAAA,EAFPC,EAAAA,GAA4B,EAAc,EAAK,CAEY,CACxE,cACA,gBACA,SACA,2BACD,CAAC,CAEF,EAAA,EAAO,KACL,EAAA,QAAO,QACL,iDAAiD,EAAK,MACvD,CACF,CACD,IAAI,EAAoB,EAAE,CAmB1B,MAAMC,EAAAA,EAAc,EAlBL,EAAQ,IAAK,GAAU,CACpC,IAAM,EAAS,CACb,WAAY,EAAM,GAClB,eAAgB,EAAM,WACtB,iBAAkB,EAAM,oBACxB,YAAa,EAAM,gBACnB,GAAI,EACA,CAAE,MAAO,EAAM,KAAM,kBAAmB,EAAM,eAAgB,CAC9D,EAAE,CACN,gBAAiB,GAAG,EAAM,gBAAgB,SAAS,GAAG,EAAM,gBAAgB,OAC5E,wBAAyB,EAAM,OAC/B,mBAAoB,EAAM,WAC1B,wBAAyB,EAAM,qBAC/B,qBAAsB,EAAM,kBAC7B,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,+DAA+D,EAAI,UACpE,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,+DAA+D,EAAK,GACrE,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./done-input-validation-Cgk5kNBs.cjs`),i=require(`./preference-management-5uJDKuMK.cjs`);let a=require(`node:fs`),o=require(`node:path`),s=require(`colors`);s=e.s(s);async function c({auth:e,partition:c,sombraAuth:l,transcendUrl:u,file:d=``,directory:f,dryRun:p,skipExistingRecordCheck:m,receiptFileDir:h,skipWorkflowTriggers:g,forceTriggerWorkflows:_,skipConflictUpdates:v,isSilent:y,attributes:b,concurrency:x}){f&&d&&(n.t.error(s.default.red(`Cannot provide both a directory and a file. Please provide only one.`)),this.process.exit(1)),!d&&!f&&(n.t.error(s.default.red(`A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences`)),this.process.exit(1)),r.t(this.process.exit);let S=[];if(f)try{let e=(0,a.readdirSync)(f).filter(e=>e.endsWith(`.csv`));e.length===0&&(n.t.error(s.default.red(`No CSV files found in directory: ${f}`)),this.process.exit(1)),S.push(...e.map(e=>(0,o.join)(f,e)))}catch(e){n.t.error(s.default.red(`Failed to read directory: ${f}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}else try{d.endsWith(`.csv`)||(n.t.error(s.default.red(`File must be a CSV file`)),this.process.exit(1)),S.push(d)}catch(e){n.t.error(s.default.red(`Failed to access file: ${d}`)),n.t.error(s.default.red(e.message)),this.process.exit(1)}n.t.info(s.default.green(`Processing ${S.length} consent preferences files for partition: ${c}`)),n.t.debug(`Files to process: ${S.join(`, `)}`),m&&n.t.info(s.default.bgYellow(`Skipping existing record check: ${m}`)),await t.Ts(S,async n=>{await i.a({receiptFilepath:(0,o.join)(h,`${(0,o.basename)(n).replace(`.csv`,``)}-receipts.json`),auth:e,sombraAuth:l,file:n,partition:c,transcendUrl:u,skipConflictUpdates:v,skipWorkflowTriggers:g,skipExistingRecordCheck:m,isSilent:y,dryRun:p,attributes:t.li(b),forceTriggerWorkflows:_})},{concurrency:x})}exports.uploadPreferences=c;
|
|
2
|
+
//# sourceMappingURL=impl-DrNWIvMG.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DrNWIvMG.cjs","names":["file","map","uploadPreferenceManagementPreferencesInteractive","splitCsvToList"],"sources":["../src/commands/consent/upload-preferences/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { logger } from '../../../logger';\nimport { uploadPreferenceManagementPreferencesInteractive } from '../../../lib/preference-management';\nimport { splitCsvToList } from '../../../lib/requests';\nimport { readdirSync } from 'node:fs';\nimport { map } from '../../../lib/bluebird';\nimport { basename, join } from 'node:path';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface UploadPreferencesCommandFlags {\n auth: string;\n partition: string;\n sombraAuth?: string;\n transcendUrl: string;\n file?: string;\n directory?: string;\n dryRun: boolean;\n skipExistingRecordCheck: boolean;\n receiptFileDir: string;\n skipWorkflowTriggers: boolean;\n forceTriggerWorkflows: boolean;\n skipConflictUpdates: boolean;\n isSilent: boolean;\n attributes: string;\n receiptFilepath: string;\n concurrency: number;\n}\n\nexport async function uploadPreferences(\n this: LocalContext,\n {\n auth,\n partition,\n sombraAuth,\n transcendUrl,\n file = '',\n directory,\n dryRun,\n skipExistingRecordCheck,\n receiptFileDir,\n skipWorkflowTriggers,\n forceTriggerWorkflows,\n skipConflictUpdates,\n isSilent,\n attributes,\n concurrency,\n }: UploadPreferencesCommandFlags,\n): Promise<void> {\n if (!!directory && !!file) {\n logger.error(\n colors.red(\n 'Cannot provide both a directory and a file. Please provide only one.',\n ),\n );\n this.process.exit(1);\n }\n\n if (!file && !directory) {\n logger.error(\n colors.red(\n 'A file or directory must be provided. Please provide one using --file=./preferences.csv or --directory=./preferences',\n ),\n );\n this.process.exit(1);\n }\n\n doneInputValidation(this.process.exit);\n\n const files: string[] = [];\n\n if (directory) {\n try {\n const filesInDirectory = readdirSync(directory);\n const csvFiles = filesInDirectory.filter((file) => file.endsWith('.csv'));\n\n if (csvFiles.length === 0) {\n logger.error(\n colors.red(`No CSV files found in directory: ${directory}`),\n );\n this.process.exit(1);\n }\n\n // Add full paths for each CSV file\n files.push(...csvFiles.map((file) => join(directory, file)));\n } catch (err) {\n logger.error(colors.red(`Failed to read directory: ${directory}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n } else {\n try {\n // Verify file exists and is a CSV\n if (!file.endsWith('.csv')) {\n logger.error(colors.red('File must be a CSV file'));\n this.process.exit(1);\n }\n files.push(file);\n } catch (err) {\n logger.error(colors.red(`Failed to access file: ${file}`));\n logger.error(colors.red((err as Error).message));\n this.process.exit(1);\n }\n }\n\n logger.info(\n colors.green(\n `Processing ${files.length} consent preferences files for partition: ${partition}`,\n ),\n );\n logger.debug(`Files to process: ${files.join(', ')}`);\n\n if (skipExistingRecordCheck) {\n logger.info(\n colors.bgYellow(\n `Skipping existing record check: ${skipExistingRecordCheck}`,\n ),\n );\n }\n\n await map(\n files,\n async (filePath) => {\n const fileName = basename(filePath).replace('.csv', '');\n await uploadPreferenceManagementPreferencesInteractive({\n receiptFilepath: join(receiptFileDir, `${fileName}-receipts.json`),\n auth,\n sombraAuth,\n file: filePath,\n partition,\n transcendUrl,\n skipConflictUpdates,\n skipWorkflowTriggers,\n skipExistingRecordCheck,\n isSilent,\n dryRun,\n attributes: splitCsvToList(attributes),\n forceTriggerWorkflows,\n });\n },\n { concurrency },\n );\n}\n"],"mappings":"sZA8BA,eAAsB,EAEpB,CACE,OACA,YACA,aACA,eACA,OAAO,GACP,YACA,SACA,0BACA,iBACA,uBACA,wBACA,sBACA,WACA,aACA,eAEa,CACT,GAAe,IACnB,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uEACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGlB,CAAC,GAAQ,CAAC,IACZ,EAAA,EAAO,MACL,EAAA,QAAO,IACL,uHACD,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,IAAM,EAAkB,EAAE,CAE1B,GAAI,EACF,GAAI,CAEF,IAAM,GAAA,EAAA,EAAA,aAD+B,EAAU,CACb,OAAQ,GAASA,EAAK,SAAS,OAAO,CAAC,CAErE,EAAS,SAAW,IACtB,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,oCAAoC,IAAY,CAC5D,CACD,KAAK,QAAQ,KAAK,EAAE,EAItB,EAAM,KAAK,GAAG,EAAS,IAAK,IAAA,EAAA,EAAA,MAAc,EAAWA,EAAK,CAAC,CAAC,OACrD,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,6BAA6B,IAAY,CAAC,CAClE,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,MAGtB,GAAI,CAEG,EAAK,SAAS,OAAO,GACxB,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,CAAC,CACnD,KAAK,QAAQ,KAAK,EAAE,EAEtB,EAAM,KAAK,EAAK,OACT,EAAK,CACZ,EAAA,EAAO,MAAM,EAAA,QAAO,IAAI,0BAA0B,IAAO,CAAC,CAC1D,EAAA,EAAO,MAAM,EAAA,QAAO,IAAK,EAAc,QAAQ,CAAC,CAChD,KAAK,QAAQ,KAAK,EAAE,CAIxB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,4CAA4C,IACxE,CACF,CACD,EAAA,EAAO,MAAM,qBAAqB,EAAM,KAAK,KAAK,GAAG,CAEjD,GACF,EAAA,EAAO,KACL,EAAA,QAAO,SACL,mCAAmC,IACpC,CACF,CAGH,MAAMC,EAAAA,GACJ,EACA,KAAO,IAAa,CAElB,MAAMC,EAAAA,EAAiD,CACrD,iBAAA,EAAA,EAAA,MAAsB,EAAgB,IAAA,EAAA,EAAA,UAFd,EAAS,CAAC,QAAQ,OAAQ,GAAG,CAEH,gBAAgB,CAClE,OACA,aACA,KAAM,EACN,YACA,eACA,sBACA,uBACA,0BACA,WACA,SACA,WAAYC,EAAAA,GAAe,EAAW,CACtC,wBACD,CAAC,EAEJ,CAAE,cAAa,CAChB"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-B-TmLA0w.cjs`),n=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),r=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const i=require(`./data-inventory-flXV6qPl.cjs`),a=require(`./done-input-validation-Cgk5kNBs.cjs`);let o=require(`colors`);o=e.s(o);async function s({auth:e,file:s,transcendUrl:c,dataSiloIds:l,includeAttributes:u,includeGuessedCategories:d,parentCategories:f,subCategories:p=[]}){a.t(this.process.exit);try{let t=await i.n(n.ti(c,e),{dataSiloIds:l,includeGuessedCategories:d,parentCategories:f,includeAttributes:u,subCategories:p});r.t.info(o.default.magenta(`Writing datapoints to file "${s}"...`));let a=[];await n.d(s,t.map(e=>{let t={"Property ID":e.id,"Data Silo":e.dataSilo.title,Object:e.dataPoint.name,"Object Path":e.dataPoint.path.join(`.`),Property:e.name,"Property Description":e.description,"Data Categories":e.categories.map(e=>`${e.category}:${e.name}`).join(`, `),"Guessed Category":e.pendingCategoryGuesses?.[0]?`${e.pendingCategoryGuesses[0].category.category}:${e.pendingCategoryGuesses[0].category.name}`:``,"Processing Purposes":e.purposes.map(e=>`${e.purpose}:${e.name}`).join(`, `),...Object.entries(n.As(e.attributeValues||[],({attributeKey:e})=>e.name)).reduce((e,[t,n])=>(e[t]=n.map(e=>e.name).join(`,`),e),{})};return a=n.Ds([...a,...Object.keys(t)]),t}),a)}catch(e){r.t.error(o.default.red(`An error occurred syncing the datapoints: ${e.message}`)),this.process.exit(1)}r.t.info(o.default.green(`Successfully synced datapoints to disk at ${s}! View at ${t.n}`))}exports.pullDatapoints=s;
|
|
2
|
+
//# sourceMappingURL=impl-DvlAq8xf.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-DvlAq8xf.cjs","names":["pullAllDatapoints","buildTranscendGraphQLClient","writeLargeCsv","groupBy","uniq","ADMIN_DASH_DATAPOINTS"],"sources":["../src/commands/inventory/pull-datapoints/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { uniq, groupBy } from 'lodash-es';\n\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { buildTranscendGraphQLClient } from '../../../lib/graphql';\nimport { ADMIN_DASH_DATAPOINTS } from '../../../constants';\nimport { pullAllDatapoints } from '../../../lib/data-inventory';\nimport { DataCategoryType } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\nimport { writeLargeCsv } from '../../../lib/helpers';\n\nexport interface PullDatapointsCommandFlags {\n auth: string;\n file: string;\n transcendUrl: string;\n dataSiloIds?: string[];\n includeAttributes: boolean;\n includeGuessedCategories: boolean;\n parentCategories?: DataCategoryType[];\n subCategories?: string[];\n}\n\nexport async function pullDatapoints(\n this: LocalContext,\n {\n auth,\n file,\n transcendUrl,\n dataSiloIds,\n includeAttributes,\n includeGuessedCategories,\n parentCategories,\n subCategories = [],\n }: PullDatapointsCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n try {\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const dataPoints = await pullAllDatapoints(client, {\n dataSiloIds,\n includeGuessedCategories,\n parentCategories,\n includeAttributes,\n subCategories, // TODO: https://transcend.height.app/T-40482 - do by name not ID\n });\n\n logger.info(colors.magenta(`Writing datapoints to file \"${file}\"...`));\n let headers: string[] = [];\n const inputs = dataPoints.map((point) => {\n const result = {\n 'Property ID': point.id,\n 'Data Silo': point.dataSilo.title,\n Object: point.dataPoint.name,\n 'Object Path': point.dataPoint.path.join('.'),\n Property: point.name,\n 'Property Description': point.description,\n 'Data Categories': point.categories\n .map((category) => `${category.category}:${category.name}`)\n .join(', '),\n 'Guessed Category': point.pendingCategoryGuesses?.[0]\n ? `${point.pendingCategoryGuesses![0]!.category.category}:${\n point.pendingCategoryGuesses![0]!.category.name\n }`\n : '',\n 'Processing Purposes': point.purposes\n .map((purpose) => `${purpose.purpose}:${purpose.name}`)\n .join(', '),\n ...Object.entries(\n groupBy(\n point.attributeValues || [],\n ({ attributeKey }) => attributeKey.name,\n ),\n ).reduce((acc, [key, values]) => {\n acc[key] = values.map((value) => value.name).join(',');\n return acc;\n }, {} as Record<string, string>),\n };\n headers = uniq([...headers, ...Object.keys(result)]);\n return result;\n });\n await writeLargeCsv(file, inputs, headers);\n } catch (err) {\n logger.error(\n colors.red(`An error occurred syncing the datapoints: ${err.message}`),\n );\n this.process.exit(1);\n }\n\n // Indicate success\n logger.info(\n colors.green(\n `Successfully synced datapoints to disk at ${file}! View at ${ADMIN_DASH_DATAPOINTS}`,\n ),\n );\n}\n"],"mappings":"+VAuBA,eAAsB,EAEpB,CACE,OACA,OACA,eACA,cACA,oBACA,2BACA,mBACA,gBAAgB,EAAE,EAEL,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAI,CAIF,IAAM,EAAa,MAAMA,EAAAA,EAFVC,EAAAA,GAA4B,EAAc,EAAK,CAEX,CACjD,cACA,2BACA,mBACA,oBACA,gBACD,CAAC,CAEF,EAAA,EAAO,KAAK,EAAA,QAAO,QAAQ,+BAA+B,EAAK,MAAM,CAAC,CACtE,IAAI,EAAoB,EAAE,CAiC1B,MAAMC,EAAAA,EAAc,EAhCL,EAAW,IAAK,GAAU,CACvC,IAAM,EAAS,CACb,cAAe,EAAM,GACrB,YAAa,EAAM,SAAS,MAC5B,OAAQ,EAAM,UAAU,KACxB,cAAe,EAAM,UAAU,KAAK,KAAK,IAAI,CAC7C,SAAU,EAAM,KAChB,uBAAwB,EAAM,YAC9B,kBAAmB,EAAM,WACtB,IAAK,GAAa,GAAG,EAAS,SAAS,GAAG,EAAS,OAAO,CAC1D,KAAK,KAAK,CACb,mBAAoB,EAAM,yBAAyB,GAC/C,GAAG,EAAM,uBAAwB,GAAI,SAAS,SAAS,GACrD,EAAM,uBAAwB,GAAI,SAAS,OAE7C,GACJ,sBAAuB,EAAM,SAC1B,IAAK,GAAY,GAAG,EAAQ,QAAQ,GAAG,EAAQ,OAAO,CACtD,KAAK,KAAK,CACb,GAAG,OAAO,QACRC,EAAAA,GACE,EAAM,iBAAmB,EAAE,EAC1B,CAAE,kBAAmB,EAAa,KACpC,CACF,CAAC,QAAQ,EAAK,CAAC,EAAK,MACnB,EAAI,GAAO,EAAO,IAAK,GAAU,EAAM,KAAK,CAAC,KAAK,IAAI,CAC/C,GACN,EAAE,CAA2B,CACjC,CAED,MADA,GAAUC,EAAAA,GAAK,CAAC,GAAG,EAAS,GAAG,OAAO,KAAK,EAAO,CAAC,CAAC,CAC7C,GACP,CACgC,EAAQ,OACnC,EAAK,CACZ,EAAA,EAAO,MACL,EAAA,QAAO,IAAI,6CAA6C,EAAI,UAAU,CACvE,CACD,KAAK,QAAQ,KAAK,EAAE,CAItB,EAAA,EAAO,KACL,EAAA,QAAO,MACL,6CAA6C,EAAK,YAAYC,EAAAA,IAC/D,CACF"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-B-TmLA0w.cjs`),n=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),r=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),require(`./codecs-JSDJgtyL.cjs`),require(`./api-keys-CDp8NUhN.cjs`);const i=require(`./done-input-validation-Cgk5kNBs.cjs`),a=require(`./code-scanning-BwfVNIHr.cjs`);let o=require(`colors`);o=e.s(o);let s=require(`fast-glob`);s=e.s(s);let c=require(`query-string`);async function l({scanPath:e,fileGlobs:t,ignoreDirs:n,config:i}){let{ignoreDirs:a,supportedFiles:o,scanFunction:c}=i,u=t===``?o:o.concat(t.split(`,`)),d=[...n.split(`,`),...a].filter(e=>e.length>0);try{let t=await(0,s.default)(`${e}/**/${u.join(`|`)}`,{ignore:d.map(t=>`${e}/**/${t}`),unique:!0,onlyFiles:!0});r.t.info(`Scanning: ${t.length} files`);let n=t.map(e=>c(e)).flat().map(e=>e.softwareDevelopmentKits||[]).flat(),i=[...new Set(n.map(e=>e.name))];return r.t.info(`Found: ${i.length} unique dependencies`),i.map(t=>({name:t,resourceId:`${e}/**/${t}`,useStrictClassifier:!0}))}catch(e){throw Error(`Error scanning globs ${l} with error: ${e}`)}}async function u({scanPath:e,dataSiloId:s,auth:u,fileGlobs:d,ignoreDirs:f,transcendUrl:p}){i.t(this.process.exit);let m=n.ti(p,u),h=await n.Xr(m,s),g=a.n[h.dataSilo.type];g||(r.t.error(o.default.red(`This plugin "${h.dataSilo.type}" is not supported for offline silo discovery.`)),this.process.exit(1));let _=await l({scanPath:e,fileGlobs:d,ignoreDirs:f,config:g});await n.ut(m,h.id,_);let v=new URL(t.t);v.pathname=`/data-map/data-inventory/silo-discovery/triage`,v.search=(0,c.stringify)({filters:JSON.stringify({pluginIds:[h.id]})}),r.t.info(o.default.green(`Scan found ${_.length} potential data silos at ${e}! View at '${v.href}'
|
|
2
2
|
|
|
3
3
|
NOTE: it may take 2-3 minutes for scan results to appear in the UI.`))}exports.discoverSilos=u;
|
|
4
|
-
//# sourceMappingURL=impl-
|
|
4
|
+
//# sourceMappingURL=impl-E1vzeNmp.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-E1vzeNmp.cjs","names":["buildTranscendGraphQLClient","fetchActiveSiloDiscoPlugin","SILO_DISCOVERY_CONFIGS","uploadSiloDiscoveryResults","ADMIN_DASH"],"sources":["../src/lib/code-scanning/findFilesToScan.ts","../src/commands/inventory/discover-silos/impl.ts"],"sourcesContent":["import fastGlob from 'fast-glob';\nimport { logger } from '../../logger';\nimport { CodeScanningConfig } from './types';\n\nexport interface SiloDiscoveryRawResults {\n /** The name of the potential data silo entry */\n name: string;\n /** A unique UUID (represents the same resource across different silo discovery runs) */\n resourceId: string;\n /** Any hosts associated with the entry */\n host?: string;\n /** Type of data silo */\n type?: string | undefined;\n}\n\n/**\n * Helper to scan for data silos in all package.json files that it can find in a directory\n *\n * @deprecated TODO: https://transcend.height.app/T-32325 - use code scanning instead\n * @param options - Options\n * @returns the list of integrations\n */\nexport async function findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n}: {\n /** Where to look for package.json files */\n scanPath: string;\n /** Globs to look for */\n fileGlobs: string;\n /** The directories to ignore (excludes node_modules and serverless-build) */\n ignoreDirs: string;\n /** Silo Discovery configuration */\n config: CodeScanningConfig;\n}): Promise<SiloDiscoveryRawResults[]> {\n const { ignoreDirs: IGNORE_DIRS, supportedFiles, scanFunction } = config;\n const globsToSupport =\n fileGlobs === ''\n ? supportedFiles\n : supportedFiles.concat(fileGlobs.split(','));\n const dirsToIgnore = [...ignoreDirs.split(','), ...IGNORE_DIRS].filter(\n (dir) => dir.length > 0,\n );\n try {\n const filesToScan: string[] = await fastGlob(\n `${scanPath}/**/${globsToSupport.join('|')}`,\n {\n ignore: dirsToIgnore.map((dir: string) => `${scanPath}/**/${dir}`),\n unique: true,\n onlyFiles: true,\n },\n );\n logger.info(`Scanning: ${filesToScan.length} files`);\n const allPackages = filesToScan\n .map((filePath: string) => scanFunction(filePath))\n .flat();\n const allSdks = allPackages\n .map((appPackage) => appPackage.softwareDevelopmentKits || [])\n .flat();\n const uniqueDeps = new Set(allSdks.map((sdk) => sdk.name));\n const deps = [...uniqueDeps];\n logger.info(`Found: ${deps.length} unique dependencies`);\n return deps.map((dep) => ({\n name: dep,\n resourceId: `${scanPath}/**/${dep}`,\n useStrictClassifier: true,\n }));\n } catch (error) {\n throw new Error(\n `Error scanning globs ${findFilesToScan} with error: ${error}`,\n );\n }\n}\n","import type { LocalContext } from '../../../context';\nimport { stringify } from 'query-string';\nimport { logger } from '../../../logger';\nimport colors from 'colors';\nimport { ADMIN_DASH } from '../../../constants';\nimport {\n fetchActiveSiloDiscoPlugin,\n buildTranscendGraphQLClient,\n uploadSiloDiscoveryResults,\n} from '../../../lib/graphql';\nimport { findFilesToScan } from '../../../lib/code-scanning/findFilesToScan';\nimport { SILO_DISCOVERY_CONFIGS } from '../../../lib/code-scanning';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface DiscoverSilosCommandFlags {\n scanPath: string;\n dataSiloId: string;\n auth: string;\n fileGlobs: string;\n ignoreDirs: string;\n transcendUrl: string;\n}\n\nexport async function discoverSilos(\n this: LocalContext,\n {\n scanPath,\n dataSiloId,\n auth,\n fileGlobs,\n ignoreDirs,\n transcendUrl,\n }: DiscoverSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n // Create a GraphQL client\n const client = buildTranscendGraphQLClient(transcendUrl, auth);\n\n const plugin = await fetchActiveSiloDiscoPlugin(client, dataSiloId);\n\n const config = SILO_DISCOVERY_CONFIGS[plugin.dataSilo.type];\n if (!config) {\n logger.error(\n colors.red(\n `This plugin \"${plugin.dataSilo.type}\" is not supported for offline silo discovery.`,\n ),\n );\n this.process.exit(1);\n }\n\n const results = await findFilesToScan({\n scanPath,\n fileGlobs,\n ignoreDirs,\n config,\n });\n\n await uploadSiloDiscoveryResults(client, plugin.id, results);\n\n const newUrl = new URL(ADMIN_DASH);\n newUrl.pathname = '/data-map/data-inventory/silo-discovery/triage';\n newUrl.search = stringify({\n filters: JSON.stringify({ pluginIds: [plugin.id] }),\n });\n\n // Indicate success\n logger.info(\n colors.green(\n `Scan found ${results.length} potential data silos at ${scanPath}! ` +\n `View at '${newUrl.href}' ` +\n '\\n\\n NOTE: it may take 2-3 minutes for scan results to appear in the UI.',\n ),\n );\n}\n"],"mappings":"oeAsBA,eAAsB,EAAgB,CACpC,WACA,YACA,aACA,UAUqC,CACrC,GAAM,CAAE,WAAY,EAAa,iBAAgB,gBAAiB,EAC5D,EACJ,IAAc,GACV,EACA,EAAe,OAAO,EAAU,MAAM,IAAI,CAAC,CAC3C,EAAe,CAAC,GAAG,EAAW,MAAM,IAAI,CAAE,GAAG,EAAY,CAAC,OAC7D,GAAQ,EAAI,OAAS,EACvB,CACD,GAAI,CACF,IAAM,EAAwB,MAAA,EAAA,EAAA,SAC5B,GAAG,EAAS,MAAM,EAAe,KAAK,IAAI,GAC1C,CACE,OAAQ,EAAa,IAAK,GAAgB,GAAG,EAAS,MAAM,IAAM,CAClE,OAAQ,GACR,UAAW,GACZ,CACF,CACD,EAAA,EAAO,KAAK,aAAa,EAAY,OAAO,QAAQ,CAIpD,IAAM,EAHc,EACjB,IAAK,GAAqB,EAAa,EAAS,CAAC,CACjD,MAAM,CAEN,IAAK,GAAe,EAAW,yBAA2B,EAAE,CAAC,CAC7D,MAAM,CAEH,EAAO,CAAC,GADK,IAAI,IAAI,EAAQ,IAAK,GAAQ,EAAI,KAAK,CAAC,CAC9B,CAE5B,OADA,EAAA,EAAO,KAAK,UAAU,EAAK,OAAO,sBAAsB,CACjD,EAAK,IAAK,IAAS,CACxB,KAAM,EACN,WAAY,GAAG,EAAS,MAAM,IAC9B,oBAAqB,GACtB,EAAE,OACI,EAAO,CACd,MAAU,MACR,wBAAwB,EAAgB,eAAe,IACxD,ECjDL,eAAsB,EAEpB,CACE,WACA,aACA,OACA,YACA,aACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAGtC,IAAM,EAASA,EAAAA,GAA4B,EAAc,EAAK,CAExD,EAAS,MAAMC,EAAAA,GAA2B,EAAQ,EAAW,CAE7D,EAASC,EAAAA,EAAuB,EAAO,SAAS,MACjD,IACH,EAAA,EAAO,MACL,EAAA,QAAO,IACL,gBAAgB,EAAO,SAAS,KAAK,gDACtC,CACF,CACD,KAAK,QAAQ,KAAK,EAAE,EAGtB,IAAM,EAAU,MAAM,EAAgB,CACpC,WACA,YACA,aACA,SACD,CAAC,CAEF,MAAMC,EAAAA,GAA2B,EAAQ,EAAO,GAAI,EAAQ,CAE5D,IAAM,EAAS,IAAI,IAAIC,EAAAA,EAAW,CAClC,EAAO,SAAW,iDAClB,EAAO,QAAA,EAAA,EAAA,WAAmB,CACxB,QAAS,KAAK,UAAU,CAAE,UAAW,CAAC,EAAO,GAAG,CAAE,CAAC,CACpD,CAAC,CAGF,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAQ,OAAO,2BAA2B,EAAS,aACnD,EAAO,KAAK;;sEAE3B,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./done-input-validation-Cgk5kNBs.cjs`);let i=require(`colors`);i=e.s(i);async function a({auth:e,transcendUrl:a,file:o,pageLimit:s,actions:c,sombraAuth:l,skipRequestIdentifiers:u,statuses:d,createdAtBefore:f,createdAtAfter:p,showTests:m}){r.t(this.process.exit);let{requestsFormattedForCsv:h}=await t.I({transcendUrl:a,pageLimit:s,actions:c,skipRequestIdentifiers:u,statuses:d,auth:e,sombraAuth:l,createdAtBefore:f,createdAtAfter:p,isTest:m});await t.d(o,h,t.Ds(h.map(e=>Object.keys(e)).flat())),n.t.info(i.default.green(`Successfully wrote ${h.length} requests to file "${o}"`))}exports._export=a;
|
|
2
|
+
//# sourceMappingURL=impl-EEKe6HmF.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-EEKe6HmF.cjs","names":["pullPrivacyRequests","writeLargeCsv","uniq"],"sources":["../src/commands/request/export/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport colors from 'colors';\n\nimport { logger } from '../../../logger';\nimport { uniq } from 'lodash-es';\nimport { pullPrivacyRequests } from '../../../lib/requests';\nimport { writeLargeCsv } from '../../../lib/helpers';\nimport type { RequestAction, RequestStatus } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface ExportCommandFlags {\n auth: string;\n sombraAuth?: string;\n actions?: RequestAction[];\n statuses?: RequestStatus[];\n transcendUrl: string;\n file: string;\n concurrency: number;\n createdAtBefore?: Date;\n createdAtAfter?: Date;\n showTests?: boolean;\n skipRequestIdentifiers?: boolean;\n pageLimit: number;\n}\n\n// `export` is a reserved keyword, so we need to prefix it with an underscore\n// eslint-disable-next-line no-underscore-dangle\nexport async function _export(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n pageLimit,\n actions,\n sombraAuth,\n skipRequestIdentifiers,\n statuses,\n createdAtBefore,\n createdAtAfter,\n showTests,\n }: ExportCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { requestsFormattedForCsv } = await pullPrivacyRequests({\n transcendUrl,\n pageLimit,\n actions,\n skipRequestIdentifiers,\n statuses,\n auth,\n sombraAuth,\n createdAtBefore,\n createdAtAfter,\n isTest: showTests,\n });\n\n // Write to CSV\n const headers = uniq(\n requestsFormattedForCsv.map((d) => Object.keys(d)).flat(),\n );\n await writeLargeCsv(file, requestsFormattedForCsv, headers);\n logger.info(\n colors.green(\n `Successfully wrote ${requestsFormattedForCsv.length} requests to file \"${file}\"`,\n ),\n );\n}\n"],"mappings":"wTA2BA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,YACA,UACA,aACA,yBACA,WACA,kBACA,iBACA,aAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,2BAA4B,MAAMA,EAAAA,EAAoB,CAC5D,eACA,YACA,UACA,yBACA,WACA,OACA,aACA,kBACA,iBACA,OAAQ,EACT,CAAC,CAMF,MAAMC,EAAAA,EAAc,EAAM,EAHVC,EAAAA,GACd,EAAwB,IAAK,GAAM,OAAO,KAAK,EAAE,CAAC,CAAC,MAAM,CAC1D,CAC0D,CAC3D,EAAA,EAAO,KACL,EAAA,QAAO,MACL,sBAAsB,EAAwB,OAAO,qBAAqB,EAAK,GAChF,CACF"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`);require(`./constants-B-TmLA0w.cjs`);const t=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),n=require(`./logger-BaHHbWVd.cjs`);require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const r=require(`./pooling-Ct83vfEh.cjs`),i=require(`./done-input-validation-Cgk5kNBs.cjs`);let a=require(`colors`);a=e.s(a);async function o(){let e=Number(process.env.WORKER_ID||`0`);n.t.info(`[w${e}] ready pid=${process.pid}`),process.send?.({type:`ready`}),process.on(`message`,async r=>{if(!r||typeof r!=`object`||(r.type===`shutdown`&&process.exit(0),r.type!==`task`))return;let{filePath:i,options:a}=r.payload,{outputDir:o,clearOutputDir:s}=a;try{n.t.info(`[w${e}] processing ${i}`),await t.b({filePath:i,outputDir:o,clearOutputDir:s,onProgress:(e,t)=>process.send?.({type:`progress`,payload:{filePath:i,processed:e,total:t}})}),process.send?.({type:`result`,payload:{ok:!0,filePath:i}})}catch(r){let a=t.O(r);n.t.error(`[w${e}] ERROR ${i}: ${r.stack||a}`),process.send?.({type:`result`,payload:{ok:!1,filePath:i,error:a}})}}),await new Promise(()=>{})}function s(e){return r.r(e)}function c(e){return r.i(e)}const l={renderHeader:s,renderWorkers:c};function u(){return typeof __filename<`u`?__filename:process.argv[1]}async function d(e){i.t(this.process.exit);let{directory:o,outputDir:s,clearOutputDir:c,concurrency:d,viewerMode:f}=e,p=t.x(o,this),{poolSize:m,cpuCount:h}=r.s(d,p.length);n.t.info(a.default.green(`Converting ${p.length} Parquet file(s) → CSV with pool size ${m} (CPU=${h})`));let g=p.map(e=>({filePath:e,options:{outputDir:s,clearOutputDir:c}}));await r.n({title:`Parquet → CSV - ${o}`,baseDir:o||s||process.cwd(),childFlag:r.o,childModulePath:u(),poolSize:m,cpuCount:h,filesTotal:p.length,hooks:{nextTask:()=>g.shift(),taskLabel:e=>e.filePath,initTotals:()=>({}),initSlotProgress:()=>void 0,onProgress:e=>e,onResult:(e,t)=>({totals:e,ok:!!t.ok}),postProcess:async()=>{}},viewerMode:f,render:e=>r.a(e,l,f),extraKeyHandler:({logsBySlot:e,repaint:t,setPaused:n})=>r.t({logsBySlot:e,repaint:t,setPaused:n})})}process.argv.includes(r.o)&&o().catch(e=>{n.t.error(e),process.exit(1)}),exports.parquetToCsv=d;
|
|
2
|
+
//# sourceMappingURL=impl-NdV_MRsm.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-NdV_MRsm.cjs","names":["parquetToCsvOneFile","extractErrorMessage","makeHeader","makeWorkerRows","collectParquetFilesOrExit","computePoolSize","runPool","CHILD_FLAG","dashboardPlugin","createExtraKeyHandler"],"sources":["../src/commands/admin/parquet-to-csv/worker.ts","../src/commands/admin/parquet-to-csv/ui/plugin.ts","../src/commands/admin/parquet-to-csv/impl.ts"],"sourcesContent":["import { parquetToCsvOneFile, extractErrorMessage } from '../../../lib/helpers';\nimport type { ToWorker } from '../../../lib/pooling';\nimport { logger } from '../../../logger';\n\nexport type ParquetTask = {\n /** Absolute path of the Parquet file to convert. */\n filePath: string;\n options: {\n /** Optional directory where CSV output files should be written. */\n outputDir?: string;\n /** Whether to clear any pre-existing output before writing new ones. */\n clearOutputDir: boolean;\n };\n};\n\nexport type ParquetProgress = {\n /** File being processed by the worker. */\n filePath: string;\n /** Rows processed so far. */\n processed: number;\n /** Optional known total rows (not always available). */\n total?: number;\n};\n\nexport type ParquetResult = {\n ok: boolean;\n filePath: string;\n error?: string;\n};\n\n/**\n * Worker loop: convert a single Parquet file to one or more CSV files.\n */\nexport async function runChild(): Promise<void> {\n const workerId = Number(process.env.WORKER_ID || '0');\n logger.info(`[w${workerId}] ready pid=${process.pid}`);\n process.send?.({ type: 'ready' });\n\n process.on('message', async (msg: ToWorker<ParquetTask>) => {\n if (!msg || typeof msg !== 'object') return;\n\n if (msg.type === 'shutdown') {\n process.exit(0);\n }\n if (msg.type !== 'task') return;\n\n const { filePath, options } = msg.payload;\n const { outputDir, clearOutputDir } = options;\n\n try {\n logger.info(`[w${workerId}] processing ${filePath}`);\n await parquetToCsvOneFile({\n filePath,\n outputDir,\n clearOutputDir,\n onProgress: (processed, total) =>\n process.send?.({\n type: 'progress',\n payload: { filePath, processed, total },\n }),\n });\n\n process.send?.({\n type: 'result',\n payload: { ok: true, filePath },\n });\n } catch (err) {\n const message = extractErrorMessage(err);\n logger.error(`[w${workerId}] ERROR ${filePath}: ${err.stack || message}`);\n process.send?.({\n type: 'result',\n payload: { ok: false, filePath, error: message },\n });\n }\n });\n\n // keep alive until shutdown\n await new Promise<never>(() => {\n // Do nothing\n });\n}\n","import {\n makeHeader,\n makeWorkerRows,\n type ChunkSlotProgress,\n type CommonCtx,\n type DashboardPlugin,\n} from '../../../../lib/pooling';\n\n/**\n * Header for parquet-to-csv (no extra totals block).\n *\n * @param ctx - Dashboard context.\n * @returns Header lines.\n */\nfunction renderHeader<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n // no extra lines — reuse the shared header as-is\n return makeHeader(ctx);\n}\n\n/**\n * Worker rows for parquet-to-csv — share the generic row renderer.\n *\n * @param ctx - Dashboard context.\n * @returns Array of strings, each representing one worker row.\n */\nfunction renderWorkers<TTotals>(\n ctx: CommonCtx<TTotals, ChunkSlotProgress>,\n): string[] {\n return makeWorkerRows(ctx);\n}\n\nexport const parquetToCsvPlugin: DashboardPlugin<unknown, ChunkSlotProgress> = {\n renderHeader,\n renderWorkers,\n // no extras\n};\n","import type { LocalContext } from '../../../context';\nimport colors from 'colors';\nimport { logger } from '../../../logger';\nimport { collectParquetFilesOrExit } from '../../../lib/helpers';\nimport {\n computePoolSize,\n createExtraKeyHandler,\n CHILD_FLAG,\n type PoolHooks,\n runPool,\n dashboardPlugin,\n} from '../../../lib/pooling';\nimport {\n runChild,\n type ParquetProgress,\n type ParquetResult,\n type ParquetTask,\n} from './worker';\nimport { parquetToCsvPlugin } from './ui';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\n/**\n * Returns the current module's path so the worker pool knows what file to re-exec.\n * In Node ESM, __filename is undefined, so we fall back to argv[1].\n *\n * @returns The current module's path.\n */\nfunction getCurrentModulePath(): string {\n if (typeof __filename !== 'undefined') {\n return __filename as unknown as string;\n }\n return process.argv[1];\n}\n\n/** No custom totals for the header; the runner’s built-ins suffice. */\ntype Totals = Record<string, never>;\n\nexport type ParquetToCsvCommandFlags = {\n directory: string;\n outputDir?: string;\n clearOutputDir: boolean;\n concurrency?: number;\n viewerMode: boolean;\n};\n\n/**\n * Convert all Parquet files in a directory to CSV, in parallel.\n *\n * @param flags - The command flags.\n */\nexport async function parquetToCsv(\n this: LocalContext,\n flags: ParquetToCsvCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n const { directory, outputDir, clearOutputDir, concurrency, viewerMode } =\n flags;\n\n /* 1) Discover .parquet inputs */\n const files = collectParquetFilesOrExit(directory, this);\n\n /* 2) Size the pool */\n const { poolSize, cpuCount } = computePoolSize(concurrency, files.length);\n\n logger.info(\n colors.green(\n `Converting ${files.length} Parquet file(s) → CSV with pool size ${poolSize} (CPU=${cpuCount})`,\n ),\n );\n\n /* 3) Build FIFO queue of tasks (one per file) */\n const queue = files.map<ParquetTask>((filePath) => ({\n filePath,\n options: { outputDir, clearOutputDir },\n }));\n\n /* 4) Pool hooks */\n const hooks: PoolHooks<ParquetTask, ParquetProgress, ParquetResult, Totals> =\n {\n nextTask: () => queue.shift(),\n taskLabel: (t) => t.filePath,\n initTotals: () => ({} as Totals),\n initSlotProgress: () => undefined,\n onProgress: (totals) => totals,\n onResult: (totals, res) => ({ totals, ok: !!res.ok }),\n postProcess: async () => {\n // nothing special post-run\n },\n };\n\n /* 5) Launch the pool runner with custom dashboard plugin */\n await runPool({\n title: `Parquet → CSV - ${directory}`,\n baseDir: directory || outputDir || process.cwd(),\n childFlag: CHILD_FLAG,\n childModulePath: getCurrentModulePath(),\n poolSize,\n cpuCount,\n filesTotal: files.length,\n hooks,\n viewerMode,\n render: (input) => dashboardPlugin(input, parquetToCsvPlugin, viewerMode),\n extraKeyHandler: ({ logsBySlot, repaint, setPaused }) =>\n createExtraKeyHandler({ logsBySlot, repaint, setPaused }),\n });\n}\n\n/* -------------------------------------------------------------------------------------------------\n * If invoked directly as a child process, enter worker loop\n * ------------------------------------------------------------------------------------------------- */\nif (process.argv.includes(CHILD_FLAG)) {\n runChild().catch((err) => {\n logger.error(err);\n process.exit(1);\n });\n}\n"],"mappings":"4VAiCA,eAAsB,GAA0B,CAC9C,IAAM,EAAW,OAAO,QAAQ,IAAI,WAAa,IAAI,CACrD,EAAA,EAAO,KAAK,KAAK,EAAS,cAAc,QAAQ,MAAM,CACtD,QAAQ,OAAO,CAAE,KAAM,QAAS,CAAC,CAEjC,QAAQ,GAAG,UAAW,KAAO,IAA+B,CAM1D,GALI,CAAC,GAAO,OAAO,GAAQ,WAEvB,EAAI,OAAS,YACf,QAAQ,KAAK,EAAE,CAEb,EAAI,OAAS,QAAQ,OAEzB,GAAM,CAAE,WAAU,WAAY,EAAI,QAC5B,CAAE,YAAW,kBAAmB,EAEtC,GAAI,CACF,EAAA,EAAO,KAAK,KAAK,EAAS,eAAe,IAAW,CACpD,MAAMA,EAAAA,EAAoB,CACxB,WACA,YACA,iBACA,YAAa,EAAW,IACtB,QAAQ,OAAO,CACb,KAAM,WACN,QAAS,CAAE,WAAU,YAAW,QAAO,CACxC,CAAC,CACL,CAAC,CAEF,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAM,WAAU,CAChC,CAAC,OACK,EAAK,CACZ,IAAM,EAAUC,EAAAA,EAAoB,EAAI,CACxC,EAAA,EAAO,MAAM,KAAK,EAAS,UAAU,EAAS,IAAI,EAAI,OAAS,IAAU,CACzE,QAAQ,OAAO,CACb,KAAM,SACN,QAAS,CAAE,GAAI,GAAO,WAAU,MAAO,EAAS,CACjD,CAAC,GAEJ,CAGF,MAAM,IAAI,YAAqB,GAE7B,CCjEJ,SAAS,EACP,EACU,CAEV,OAAOC,EAAAA,EAAW,EAAI,CASxB,SAAS,EACP,EACU,CACV,OAAOC,EAAAA,EAAe,EAAI,CAG5B,MAAa,EAAkE,CAC7E,eACA,gBAED,CCVD,SAAS,GAA+B,CAItC,OAHI,OAAO,WAAe,IACjB,WAEF,QAAQ,KAAK,GAmBtB,eAAsB,EAEpB,EACe,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,GAAM,CAAE,YAAW,YAAW,iBAAgB,cAAa,cACzD,EAGI,EAAQC,EAAAA,EAA0B,EAAW,KAAK,CAGlD,CAAE,WAAU,YAAaC,EAAAA,EAAgB,EAAa,EAAM,OAAO,CAEzE,EAAA,EAAO,KACL,EAAA,QAAO,MACL,cAAc,EAAM,OAAO,wCAAwC,EAAS,QAAQ,EAAS,GAC9F,CACF,CAGD,IAAM,EAAQ,EAAM,IAAkB,IAAc,CAClD,WACA,QAAS,CAAE,YAAW,iBAAgB,CACvC,EAAE,CAiBH,MAAMC,EAAAA,EAAQ,CACZ,MAAO,mBAAmB,IAC1B,QAAS,GAAa,GAAa,QAAQ,KAAK,CAChD,UAAWC,EAAAA,EACX,gBAAiB,GAAsB,CACvC,WACA,WACA,WAAY,EAAM,OAClB,MArBA,CACE,aAAgB,EAAM,OAAO,CAC7B,UAAY,GAAM,EAAE,SACpB,gBAAmB,EAAE,EACrB,qBAAwB,IAAA,GACxB,WAAa,GAAW,EACxB,UAAW,EAAQ,KAAS,CAAE,SAAQ,GAAI,CAAC,CAAC,EAAI,GAAI,EACpD,YAAa,SAAY,GAG1B,CAYD,aACA,OAAS,GAAUC,EAAAA,EAAgB,EAAO,EAAoB,EAAW,CACzE,iBAAkB,CAAE,aAAY,UAAS,eACvCC,EAAAA,EAAsB,CAAE,aAAY,UAAS,YAAW,CAAC,CAC5D,CAAC,CAMA,QAAQ,KAAK,SAASF,EAAAA,EAAW,EACnC,GAAU,CAAC,MAAO,GAAQ,CACxB,EAAA,EAAO,MAAM,EAAI,CACjB,QAAQ,KAAK,EAAE,EACf"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
require(`./enums-CBXlBJii.cjs`),require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`);const e=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,transcendUrl:r,createdAtBefore:i,createdAtAfter:a,actions:o,daysLeft:s,days:c,requestIds:l,emailTemplate:u,concurrency:d}){t.t(this.process.exit),await e.q({transcendUrl:r,requestActions:o,auth:n,emailTemplate:u,days:c,daysLeft:s,requestIds:l,concurrency:d,createdAtBefore:i,createdAtAfter:a})}exports.notifyAdditionalTime=n;
|
|
2
|
+
//# sourceMappingURL=impl-TQVXJemY.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-TQVXJemY.cjs","names":["notifyPrivacyRequestsAdditionalTime"],"sources":["../src/commands/request/notify-additional-time/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../context';\nimport { notifyPrivacyRequestsAdditionalTime } from '../../../lib/requests';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../lib/cli/done-input-validation';\n\nexport interface NotifyAdditionalTimeCommandFlags {\n auth: string;\n createdAtBefore: Date;\n createdAtAfter?: Date;\n actions?: RequestAction[];\n daysLeft: number;\n days: number;\n requestIds?: string[];\n emailTemplate: string;\n transcendUrl: string;\n concurrency: number;\n}\n\nexport async function notifyAdditionalTime(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n createdAtBefore,\n createdAtAfter,\n actions,\n daysLeft,\n days,\n requestIds,\n emailTemplate,\n concurrency,\n }: NotifyAdditionalTimeCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await notifyPrivacyRequestsAdditionalTime({\n transcendUrl,\n requestActions: actions,\n auth,\n emailTemplate,\n days,\n daysLeft,\n requestIds,\n concurrency,\n createdAtBefore,\n createdAtAfter,\n });\n}\n"],"mappings":"6QAkBA,eAAsB,EAEpB,CACE,OACA,eACA,kBACA,iBACA,UACA,WACA,OACA,aACA,gBACA,eAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAoC,CACxC,eACA,eAAgB,EAChB,OACA,gBACA,OACA,WACA,aACA,cACA,kBACA,iBACD,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
require(`./enums-CBXlBJii.cjs`),require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`);const e=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,dataSiloId:r,status:i,statuses:a,transcendUrl:o}){t.t(this.process.exit),await e.F({transcendUrl:o,auth:n,status:i,dataSiloId:r,requestStatuses:a})}exports.skipRequestDataSilos=n;
|
|
2
|
+
//# sourceMappingURL=impl-b6KwZ74o.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-b6KwZ74o.cjs","names":["skipRequestDataSilosHelper"],"sources":["../src/commands/request/system/skip-request-data-silos/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport type {\n RequestDataSiloStatus,\n RequestStatus,\n} from '@transcend-io/privacy-types';\nimport { skipRequestDataSilos as skipRequestDataSilosHelper } from '../../../../lib/requests';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface SkipRequestDataSilosCommandFlags {\n auth: string;\n dataSiloId: string;\n transcendUrl: string;\n statuses: RequestStatus[];\n status:\n | (typeof RequestDataSiloStatus)['Skipped']\n | (typeof RequestDataSiloStatus)['Resolved'];\n}\n\nexport async function skipRequestDataSilos(\n this: LocalContext,\n {\n auth,\n dataSiloId,\n status,\n statuses,\n transcendUrl,\n }: SkipRequestDataSilosCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await skipRequestDataSilosHelper({\n transcendUrl,\n auth,\n status,\n dataSiloId,\n requestStatuses: statuses,\n });\n}\n"],"mappings":"6QAkBA,eAAsB,EAEpB,CACE,OACA,aACA,SACA,WACA,gBAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAA2B,CAC/B,eACA,OACA,SACA,aACA,gBAAiB,EAClB,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
require(`./enums-CBXlBJii.cjs`),require(`./constants-
|
|
2
|
-
//# sourceMappingURL=impl-
|
|
1
|
+
require(`./enums-CBXlBJii.cjs`),require(`./constants-B-TmLA0w.cjs`),require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`),require(`./logger-BaHHbWVd.cjs`),require(`./buildAIIntegrationType-n_Qlv8wG.cjs`);const e=require(`./manual-enrichment-Y_BQaSZQ.cjs`),t=require(`./done-input-validation-Cgk5kNBs.cjs`);async function n({auth:n,transcendUrl:r,file:i,concurrency:a,actions:o,sombraAuth:s}){t.t(this.process.exit),await e.i({file:i,transcendUrl:r,concurrency:a,requestActions:o,auth:n,sombraAuth:s})}exports.pullIdentifiers=n;
|
|
2
|
+
//# sourceMappingURL=impl-t_fZSUcj.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"impl-
|
|
1
|
+
{"version":3,"file":"impl-t_fZSUcj.cjs","names":["pullManualEnrichmentIdentifiersToCsv"],"sources":["../src/commands/request/preflight/pull-identifiers/impl.ts"],"sourcesContent":["import type { LocalContext } from '../../../../context';\nimport { pullManualEnrichmentIdentifiersToCsv } from '../../../../lib/manual-enrichment';\nimport type { RequestAction } from '@transcend-io/privacy-types';\nimport { doneInputValidation } from '../../../../lib/cli/done-input-validation';\n\nexport interface PullIdentifiersCommandFlags {\n auth: string;\n sombraAuth?: string;\n transcendUrl: string;\n file: string;\n actions?: RequestAction[];\n concurrency: number;\n}\n\nexport async function pullIdentifiers(\n this: LocalContext,\n {\n auth,\n transcendUrl,\n file,\n concurrency,\n actions,\n sombraAuth,\n }: PullIdentifiersCommandFlags,\n): Promise<void> {\n doneInputValidation(this.process.exit);\n\n await pullManualEnrichmentIdentifiersToCsv({\n file,\n transcendUrl,\n concurrency,\n requestActions: actions,\n auth,\n sombraAuth,\n });\n}\n"],"mappings":"mTAcA,eAAsB,EAEpB,CACE,OACA,eACA,OACA,cACA,UACA,cAEa,CACf,EAAA,EAAoB,KAAK,QAAQ,KAAK,CAEtC,MAAMA,EAAAA,EAAqC,CACzC,OACA,eACA,cACA,eAAgB,EAChB,OACA,aACD,CAAC"}
|
package/dist/index.cjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-
|
|
1
|
+
const e=require(`./enums-CBXlBJii.cjs`),t=require(`./constants-B-TmLA0w.cjs`),n=require(`./syncConfigurationToTranscend-Bpge5AcC.cjs`);require(`./logger-BaHHbWVd.cjs`);const r=require(`./buildAIIntegrationType-n_Qlv8wG.cjs`),i=require(`./codecs-JSDJgtyL.cjs`),a=require(`./readTranscendYaml-CL9nujUr.cjs`),o=require(`./mergeTranscendInputs-PPpGbYgG.cjs`),s=require(`./consent-manager-DXWjvCtI.cjs`),c=require(`./uploadConsents-CJc_6Qwd.cjs`),l=require(`./cron-DQHN57v7.cjs`),u=require(`./api-keys-CDp8NUhN.cjs`),d=require(`./data-inventory-flXV6qPl.cjs`),f=require(`./manual-enrichment-Y_BQaSZQ.cjs`);let p=require(`@transcend-io/privacy-types`),m=require(`@transcend-io/type-utils`),h=require(`@transcend-io/handlebars-utils`),g=require(`fast-glob`);g=e.s(g);let _=require(`child_process`);const v=20;async function y(e){let t=[],r=0,i=!1;do{let{largeLanguageModels:{nodes:a}}=await n.i(e,n.Ki,{first:20,offset:r});t.push(...a),r+=20,i=a.length===20}while(i);return t.sort((e,t)=>e.name.localeCompare(t.name))}function b(e){return RegExp(`<${e}>([\\s\\S]+?)<\\/${e}>`)}function x(e){return e}const S=e=>{try{return JSON.parse(e)}catch{return e}};var C=class{prompts;handlebarsOptions;promptContentMap;largeLanguageModels=[];agentsByName={};agentsByAgentId={};graphQLClient;defaultVariables;variables;handlebars;transcendApiKey;transcendUrl;requireApproval;cacheDuration;lastUpdatedAt;constructor({prompts:e,handlebarsOptions:r={},transcendUrl:i=t.a,transcendApiKey:a,requireApproval:o=!0,cacheDuration:s,defaultVariables:c={}}){this.prompts=e,this.transcendUrl=i,this.transcendApiKey=a,this.variables=c,this.defaultVariables=c,this.graphQLClient=n.ti(i,typeof a==`object`?a.release():a),this.requireApproval=o,this.cacheDuration=s,this.handlebarsOptions=r,this.handlebars=(0,h.createHandlebarsWithHelpers)(r)}async fetchPromptsAndMetadata(){let e=(0,m.getValues)(this.prompts),r=e.map(({id:e})=>e).filter(e=>!!e),i=e.map(({title:e})=>e).filter(e=>!!e),a=n.Ds(e.map(({agentNames:e})=>e||[]).flat()),[o,s,c]=await Promise.all([n.Xn(this.graphQLClient,{promptIds:r,promptTitles:i}),y(this.graphQLClient),n.Gr(this.graphQLClient,{names:a})]);this.agentsByName=t.g(c,`name`),this.agentsByAgentId=t.g(c,`agentId`),this.largeLanguageModels=s.filter(e=>e.isTranscendHosted===!1);let l=t.g(o.prompts,`title`),u=t.g(o.prompts,`id`);return this.variables={...o.calculatedVariables.reduce((e,t)=>Object.assign(e,{[t.name]:t.data?JSON.parse(t.data):t.data}),{}),...this.defaultVariables},this.handlebars=(0,h.createHandlebarsWithHelpers)({...this.handlebarsOptions,templates:[...this.handlebarsOptions.templates||[],...o.promptPartials.map(e=>({name:e.slug,content:e.content}))]}),this.promptContentMap=(0,m.apply)(this.prompts,({id:e,title:t})=>{let n=e?u[e]:t?l[t]:void 0;if(!n)throw Error(`Failed to find prompt with title: "${t}" and id: "${e}"`);return n}),this.lastUpdatedAt=new Date,o}async getAgentByName(e){let t=this.agentsByName[e];if(t)return t;let[r]=await n.Gr(this.graphQLClient,{names:[e]});if(r)return this.agentsByName[r.name]=r,this.agentsByAgentId[r.agentId]=r,r}async getPromptThreadBySlackTs(e){let[t]=await n.Jn(this.graphQLClient,{slackMessageTs:[e]});return t}async getAgentsByName(e){if(e.length<1)throw Error(`Expected at least one name to be provided`);let{hasCache:t=[],missingCache:r=[]}=n.As(e,e=>this.agentsByName[e]?`hasCache`:`missingCache`),i=t.map(e=>this.agentsByName[e]);if(r.length===0)return i;let a=n.Ns(r,50),o=[];return await n.Es(a,async e=>{let t=await n.Gr(this.graphQLClient,{names:e});t.forEach(e=>{this.agentsByName[e.name]=e,this.agentsByAgentId[e.agentId]=e}),o.push(...t)}),[...i,...o]}getAgentFiles(e){return n.qr(this.graphQLClient,e)}getLargeLanguageModel(e){let t=this.largeLanguageModels.find(t=>typeof e==`string`?t.id===e:t.name===e.name&&t.client===e.client);if(!t)throw Error(`Failed to find model matching: ${typeof e==`string`?e:JSON.stringify(e)}`);return t}async getPromptDefinition(e){(!this.lastUpdatedAt||this.cacheDuration===0||this.cacheDuration&&Date.now()-this.lastUpdatedAt.getTime()>this.cacheDuration)&&await this.fetchPromptsAndMetadata();let{promptContentMap:t}=this;if(!t)throw Error(`Expected this.promptContentMap to be defined`);let n=t[e];if(!n)throw Error(`Expected this.promptContentMap[${e}] to be defined`);return n}async compilePrompt(e,t){let n=await this.getPromptDefinition(e),r=this.prompts[e];if(!r)throw Error(`Expected this.prompts[${e}] to be defined`);if(this.requireApproval&&n.status!==p.PromptStatus.Approved)throw Error(`Assessment "${n.title}" cannot be used because its in status: "${n.status}"`);if(n.status===p.PromptStatus.Rejected)throw Error(`Assessment "${n.title}" cannot be used because it's in status: "${n.status}"`);return(0,m.decodeCodec)(r.paramCodec,t),this.handlebars.compile(n.content)({currentDate:new Date().toISOString(),...this.variables,...t})}parseAiResponse(e,t){let n=this.prompts[e];if(!n)throw Error(`Expected this.prompts[${e}] to be defined`);let r=n.extractFromTag&&(b(n.extractFromTag).exec(t)||[])[1]||t;return(0,m.decodeCodec)(n.outputCodec,S(r),!1)}async reportAndParsePromptRun(e,{largeLanguageModel:t,...r}){(!this.lastUpdatedAt||this.cacheDuration===0||this.cacheDuration&&Date.now()-this.lastUpdatedAt.getTime()>this.cacheDuration)&&await this.fetchPromptsAndMetadata();let i=r.name||`@transcend-io/cli-prompt-run-${new Date().toISOString()}`;if(!this.promptContentMap)throw Error(`Expected this.promptContentMap to be defined`);let a=this.promptContentMap[e];if(!a)throw Error(`Expected this.prompts[${e}] to be defined`);if(r.promptRunMessages.length===0)throw Error(`promptRunMessages is expected to have length > 0`);if(r.promptRunMessages[0].role!==p.ChatCompletionRole.System)throw Error(`promptRunMessages[0].role is expected to be = ${p.ChatCompletionRole.System}`);if(r.promptRunMessages[r.promptRunMessages.length-1].role!==p.ChatCompletionRole.Assistant)throw Error(`promptRunMessages[${r.promptRunMessages.length-1}].role is expected to be = ${p.ChatCompletionRole.Assistant}`);let o=r.promptRunMessages[r.promptRunMessages.length-1].content,s;try{s=this.parseAiResponse(e,o)}catch(e){throw await n.hn(this.graphQLClient,{productArea:p.PromptRunProductArea.PromptManager,...r,name:i,error:e.message,status:p.QueueStatus.Error,...typeof t==`string`?{largeLanguageModelId:t}:{largeLanguageModelName:t.name,largeLanguageModelClient:t.client},promptId:a.id,promptRunMessages:r.promptRunMessages.map((e,t)=>({...e,...t===0?{template:a.content}:{}}))}),e}let c=await n.hn(this.graphQLClient,{productArea:p.PromptRunProductArea.PromptManager,...r,name:i,status:p.QueueStatus.Resolved,...typeof t==`string`?{largeLanguageModelId:t}:{largeLanguageModelName:t.name,largeLanguageModelClient:t.client},promptId:a.id,promptRunMessages:r.promptRunMessages.map((e,t)=>({...e,...t===0?{template:a.content}:{}}))});return{result:s,promptRunId:c,promptRunUrl:`https://app.transcend.io/prompts/runs/${c}`}}async reportPromptRunError(e,{largeLanguageModel:t,...r}){(!this.lastUpdatedAt||this.cacheDuration===0||this.cacheDuration&&Date.now()-this.lastUpdatedAt.getTime()>this.cacheDuration)&&await this.fetchPromptsAndMetadata();let i=r.name||`@transcend-io/cli-prompt-run-${new Date().toISOString()}`;if(!this.promptContentMap)throw Error(`Expected this.promptContentMap to be defined`);let a=this.promptContentMap[e];if(!a)throw Error(`Expected this.prompts[${e}] to be defined`);if(r.promptRunMessages.length===0)throw Error(`promptRunMessages is expected to have length > 0`);if(r.promptRunMessages[0].role!==p.ChatCompletionRole.System)throw Error(`promptRunMessages[0].role is expected to be = ${p.ChatCompletionRole.System}`);let o=await n.hn(this.graphQLClient,{productArea:p.PromptRunProductArea.PromptManager,...r,name:i,status:p.QueueStatus.Error,...typeof t==`string`?{largeLanguageModelId:t}:{largeLanguageModelName:t.name,largeLanguageModelClient:t.client},promptId:a.id,promptRunMessages:r.promptRunMessages.map((e,t)=>({...e,...t===0?{template:a.content}:{}}))});return{promptRunId:o,promptRunUrl:`https://app.transcend.io/prompts/runs/${o}`}}};function w(e){return e.replace(/(https?:\/\/[^\s]+)/g,`<link-omitted>`)}function T(e){return Object.entries(e).reduce((e,[t,n])=>n!=null&&n!==``&&!(Array.isArray(n)&&n.length===0)&&!(typeof n==`object`&&Object.keys(n).length===0)?Object.assign(e,{[t]:n}):e,{})}function E({baseBranch:e,rootDirectory:t,githubRepo:r,excludedGlob:i=[],fileBlockList:a=[]}){(0,_.execSync)(`git fetch origin ${e}`);let o=(0,_.execSync)(`git ls-remote ${r} "refs/heads/${e}" | cut -f 1`,{encoding:`utf-8`}).split(`
|
|
2
2
|
`)[0],s=(0,_.execSync)(`git rev-parse HEAD`,{encoding:`utf-8`}).split(`
|
|
3
3
|
`)[0];if(!o||!s)throw Error(`FAILED TO FIND COMMIT RANGE`);let c=n.js((0,_.execSync)(`git fetch && git diff --name-only "${e||o}...${s}" -- ${t}`,{encoding:`utf-8`}).split(`
|
|
4
4
|
`).filter(e=>e),a),l=i.length>0?g.default.sync(c,{ignore:i}):c,u={};return l.forEach(e=>{u[e]=(0,_.execSync)(`git show ${s}:${e}`,{encoding:`utf-8`})}),{changedFiles:c,fileDiffs:u,repoName:r.split(`/`).pop().split(`.`)[0],commit:s}}exports.ACTIONS=n.ra,exports.ADD_MESSAGES_TO_PROMPT_RUN=n.Ga,exports.ADD_SILO_DISCOVERY_RESULTS=n.Wo,exports.ADMIN_DASH=t.t,exports.ADMIN_DASH_DATAPOINTS=t.n,exports.ADMIN_DASH_INTEGRATIONS=t.r,exports.AGENTS=n.Ui,exports.AGENT_FILES=n.Bi,exports.AGENT_FUNCTIONS=n.Li,exports.API_KEYS=n.qo,exports.APPROVE_PRIVACY_REQUEST=n.Eo,exports.ASSESSMENTS=n.vo,exports.ASSESSMENT_SECTION_FIELDS=n.yo,exports.ASSESSMENT_TEMPLATES=n.ho,exports.ASSUME_ROLE=n.qi,exports.ATTRIBUTES=n.Pa,exports.ATTRIBUTE_KEYS_REQUESTS=n.Bo,exports.ATTRIBUTE_VALUES=n.Fa,exports.ActionInput=i.t,exports.ActionItemCollectionInput=i.n,exports.ActionItemInput=i.r,exports.AgentFileInput=i.i,exports.AgentFunctionInput=i.a,exports.AgentInput=i.o,exports.ApiKeyInput=i.s,exports.AssessmentAction=n.Nn,exports.AssessmentAnswerOptionInput=i.c,exports.AssessmentDisplayLogicInput=i.l,exports.AssessmentInput=i.u,exports.AssessmentNestedRule=n.Pn,exports.AssessmentNestedRuleInput=i.d,exports.AssessmentResourceInput=i.f,exports.AssessmentRetentionScheduleInput=i.p,exports.AssessmentRiskLogic=n.jn,exports.AssessmentRule=n.Fn,exports.AssessmentRuleInput=i.m,exports.AssessmentRuleWithOperands=n.In,exports.AssessmentRuleWithoutOperands=n.Ln,exports.AssessmentSectionInput=i.h,exports.AssessmentSectionQuestionInput=i.g,exports.AssessmentTemplateInput=i._,exports.AttestedExtraIdentifiers=n.rt,exports.AttributeInput=i.v,exports.AttributePreview=i.y,exports.AttributeValueInput=i.b,exports.BLANK=n.pi,exports.BULK_APPLY=n.mi,exports.BULK_REQUEST_FILES=n.qa,exports.BUSINESS_ENTITIES=n.aa,exports.BusinessEntityInput=i.x,exports.CANCEL_PRIVACY_REQUEST=n.Do,exports.CAN_APPLY_IN_BULK=n.hi,exports.CATALOGS=n.ss,exports.CHANGE_REQUEST_DATA_SILO_STATUS=n.to,exports.CODE_PACKAGES=n.Po,exports.CONSENT_MANAGER_ANALYTICS_DATA=n.ca,exports.CONSENT_PARTITIONS=n.la,exports.COOKIES=n.ua,exports.CREATE_ACTION_ITEMS=n.ea,exports.CREATE_ACTION_ITEM_COLLECTION=n.Ha,exports.CREATE_AGENT=n.Wi,exports.CREATE_AGENT_FILE=n.Vi,exports.CREATE_AGENT_FUNCTION=n.Ri,exports.CREATE_API_KEY=n.Jo,exports.CREATE_ATTRIBUTE=n.Ia,exports.CREATE_ATTRIBUTE_VALUES=n.La,exports.CREATE_BUSINESS_ENTITY=n.oa,exports.CREATE_CODE_PACKAGE=n.Fo,exports.CREATE_CONSENT_EXPERIENCE=n.da,exports.CREATE_CONSENT_MANAGER=n.fa,exports.CREATE_CONSENT_PARTITION=n.pa,exports.CREATE_DATA_FLOWS=n.ma,exports.CREATE_DATA_SILOS=n.fs,exports.CREATE_DATA_SUBJECT=n.$o,exports.CREATE_DATA_SUB_CATEGORY=n.ji,exports.CREATE_ENRICHER=n.cs,exports.CREATE_IDENTIFIER=n.rs,exports.CREATE_PREFERENCE_ACCESS_TOKENS=n.Ko,exports.CREATE_PROCESSING_ACTIVITY=n.Ti,exports.CREATE_PROCESSING_PURPOSE_SUB_CATEGORY=n.Oi,exports.CREATE_PROMPT=n.ao,exports.CREATE_PROMPT_GROUP=n.oo,exports.CREATE_PROMPT_PARTIAL=n.so,exports.CREATE_REPOSITORY=n.Xo,exports.CREATE_SOFTWARE_DEVELOPMENT_KIT=n.Lo,exports.CREATE_TEAM=n.Qa,exports.CREATE_TEMPLATE=n.Ho,exports.CREATE_VENDOR=n.Pi,exports.CachedFileState=n.gi,exports.CachedRequestState=n._i,exports.CodePackageInput=i.S,exports.CodePackageSdk=i.C,exports.ColumnName=n.vi,exports.ConsentManageExperienceInput=i.w,exports.ConsentManagerInput=i.T,exports.ConsentManagerMetricBin=n.jr,exports.ConsentManagerServiceMetadata=i.E,exports.ConsentPreferenceTopic=i.D,exports.ConsentPreferenceTopicOptionValue=i.O,exports.ConsentPurpose=i.k,exports.CookieCsvInput=i.A,exports.CookieInput=i.j,exports.CronIdentifier=l.o,exports.CronIdentifierPush=l.i,exports.DATAPOINT_EXPORT=n.vs,exports.DATA_FLOWS=n.ha,exports.DATA_POINTS=n.ys,exports.DATA_POINT_COUNT=n.bs,exports.DATA_SILOS=n.ps,exports.DATA_SILOS_ENRICHED=n.ms,exports.DATA_SILO_EXPORT=n.hs,exports.DATA_SUBJECTS=n.es,exports.DATA_SUB_CATEGORIES=n.Mi,exports.DAY_MS=n.f,exports.DEBUG=t.i,exports.DEFAULT_TRANSCEND_API=t.a,exports.DEFAULT_TRANSCEND_CONSENT_API=t.o,exports.DEFAULT_TRANSCEND_PULL_RESOURCES=n.gn,exports.DELETE_API_KEY=n.Yo,exports.DELETE_ATTRIBUTE_VALUE=n.Ra,exports.DEPLOYED_PRIVACY_CENTER_URL=n.Xi,exports.DEPLOY_CONSENT_MANAGER=n.ga,exports.DETERMINE_LOGIN_METHOD=n.Ji,exports.DataCategoryGuessInput=i.M,exports.DataCategoryInput=i.N,exports.DataCategoryPreviewInput=i.P,exports.DataCategoryRecommendationInput=i.F,exports.DataFlowCsvInput=i.I,exports.DataFlowInput=i.L,exports.DataSiloInput=i.R,exports.DataSubjectInput=i.z,exports.DatapointInput=i.B,exports.ENABLED_PLUGINS=n.Go,exports.ENRICHERS=n.ls,exports.ENTRY_COUNT=n._s,exports.EXPERIENCES=n._a,exports.EnrichPrivacyRequest=f.n,exports.EnricherInput=i.V,exports.FETCH_CONSENT_MANAGER=n.va,exports.FETCH_CONSENT_MANAGER_ID=n.ya,exports.FETCH_CONSENT_MANAGER_THEME=n.ba,exports.FETCH_PRIVACY_CENTER_ID=n.Zi,exports.FIVE_MIN_MS=n.p,exports.FieldInput=i.H,exports.GLOBAL_ACTION_ITEMS=n.ta,exports.GLOBAL_ACTION_ITEM_COLLECTIONS=n.Ua,exports.HOUR_MS=n.m,exports.IDENTIFIERS=n.is,exports.IDENTIFIER_BLOCK_LIST=n.yi,exports.IMPORT_ONE_TRUST_ASSESSMENT_FORMS=n.bo,exports.INITIALIZER=n.us,exports.IP_ADDRESS_REGEX=s.n,exports.IS_REQUIRED=n.bi,exports.IdentifierInput=i.U,exports.ImportOnetrustAssessmentsInput=i.W,exports.IntlMessage=n.$,exports.IntlMessageInput=i.G,exports.LARGE_LANGUAGE_MODELS=n.Ki,exports.LOGIN=n.Yi,exports.MESSAGES=n.wo,exports.NEW_IDENTIFIER_TYPES=n.as,exports.NONE=n.xi,exports.NOTIFY_ADDITIONAL_TIME=n.Oo,exports.ORGANIZATION=n.Vo,exports.OneTrustFileFormat=e.t,exports.OneTrustPullResource=e.n,exports.OneTrustPullSource=e.r,exports.OpenAIEnabledRoute=i.K,exports.OpenAIEnabledRoutes=i.q,exports.OpenAIIntegration=i.J,exports.OpenAIRouteName=e.i,exports.POLICIES=n.jo,exports.PREFERENCE_TOPICS=n.go,exports.PRIVACY_CENTER=n.Qi,exports.PROCESSING_ACTIVITIES=n.Ei,exports.PROCESSING_PURPOSE_SUB_CATEGORIES=n.ki,exports.PROMPTS=n.co,exports.PROMPTS_WITH_VARIABLES=n.lo,exports.PROMPT_GROUPS=n.uo,exports.PROMPT_PARTIALS=n.do,exports.PROMPT_THREADS=n.No,exports.PURPOSES=n._o,exports.ParsedAttributeInput=n.si,exports.PartitionInput=i.Y,exports.PathfinderPolicy=i.X,exports.PathfinderPolicyName=e.a,exports.PathfinderPolicyNameC=i.Z,exports.PathfinderPromptRunMetadata=i.Q,exports.PolicyInput=i.$,exports.PrivacyCenterInput=i.et,exports.PrivacyRequest=n.ur,exports.PrivacyRequestInput=n.it,exports.PrivacyRequestResponse=n.Y,exports.ProcessingActivityInput=i.tt,exports.ProcessingPurposeInput=i.nt,exports.ProcessingPurposePreviewInput=i.rt,exports.PromptAVendorEmailSettings=i.it,exports.PromptGroupInput=i.at,exports.PromptInput=i.ot,exports.PromptPartialInput=i.st,exports.PurposeMap=c.t,exports.REDUCED_REQUESTS_FOR_DATA_SILO_COUNT=n.no,exports.REMOVE_REQUEST_IDENTIFIERS=n.Xa,exports.REPORT_PROMPT_RUN=n.Ka,exports.REPOSITORIES=n.Zo,exports.REQUESTS=n.ko,exports.REQUEST_DATA_SILOS=n.ro,exports.REQUEST_ENRICHERS=n.xo,exports.REQUEST_FILES=n.Ja,exports.REQUEST_IDENTIFIERS=n.Za,exports.RETRYABLE_BATCH_STATUSES=t.s,exports.RETRY_REQUEST_DATA_SILO=n.io,exports.RETRY_REQUEST_ENRICHER=n.So,exports.RateCounter=n.C,exports.RegionInput=i.ct,exports.RepositoryInput=i.lt,exports.RequestFileMetadata=n.et,exports.RequestFileMetadataResponse=n.tt,exports.RequestIdentifiersResponse=n.pr,exports.RequestPurposeTrigger=n.dr,exports.RiskAssignmentInput=i.ut,exports.RiskLogicInput=i.dt,exports.SCOPES_BY_TITLE=t.c,exports.SCOPE_TITLES=t.l,exports.SET_RESOURCE_ATTRIBUTES=n.za,exports.SILO_DISCOVERY_RESULTS=n.Ci,exports.SKIP_REQUEST_ENRICHER=n.Co,exports.SOFTWARE_DEVELOPMENT_KITS=n.Ro,exports.SOMBRA_VERSION=n.wi,exports.SUB_DATA_POINTS=n.xs,exports.SUB_DATA_POINTS_COUNT=n.Ss,exports.SUB_DATA_POINTS_WITH_GUESSES=n.Cs,exports.SYNC_ATTRIBUTE_TYPES=n.Rr,exports.SiloDiscoveryResultInput=i.ft,exports.SoftwareDevelopmentKitInput=i.pt,exports.StoredApiKey=i.mt,exports.SuccessfulRequest=n.Si,exports.TEAMS=n.$a,exports.TEMPLATES=n.Uo,exports.TOGGLE_CONSENT_PRECEDENCE=n.xa,exports.TOGGLE_DATA_SUBJECT=n.ts,exports.TOGGLE_TELEMETRY_PARTITION_STRATEGY=n.Sa,exports.TOGGLE_UNKNOWN_COOKIE_POLICY=n.Ca,exports.TOGGLE_UNKNOWN_REQUEST_POLICY=n.wa,exports.TR_PULL_RESOURCE_SCOPE_MAP=t.u,exports.TR_PUSH_RESOURCE_SCOPE_MAP=t.d,exports.TR_YML_RESOURCE_TO_FIELD_NAME=t.f,exports.TeamInput=i.ht,exports.TemplateInput=i.gt,exports.TranscendInput=i._t,exports.TranscendPromptManager=C,exports.TranscendPullResource=e.o,exports.UPDATE_ACTION=n.ia,exports.UPDATE_ACTION_ITEMS=n.na,exports.UPDATE_ACTION_ITEM_COLLECTION=n.Wa,exports.UPDATE_AGENTS=n.Gi,exports.UPDATE_AGENT_FILES=n.Hi,exports.UPDATE_AGENT_FUNCTIONS=n.zi,exports.UPDATE_ATTRIBUTE=n.Ba,exports.UPDATE_ATTRIBUTE_VALUES=n.Va,exports.UPDATE_BUSINESS_ENTITIES=n.sa,exports.UPDATE_CODE_PACKAGES=n.Io,exports.UPDATE_CONSENT_EXPERIENCE=n.Ta,exports.UPDATE_CONSENT_MANAGER_DOMAINS=n.Ea,exports.UPDATE_CONSENT_MANAGER_PARTITION=n.Da,exports.UPDATE_CONSENT_MANAGER_THEME=n.Oa,exports.UPDATE_CONSENT_MANAGER_TO_LATEST=n.ka,exports.UPDATE_CONSENT_MANAGER_VERSION=n.Aa,exports.UPDATE_DATA_FLOWS=n.ja,exports.UPDATE_DATA_SILOS=n.gs,exports.UPDATE_DATA_SUBJECT=n.ns,exports.UPDATE_DATA_SUB_CATEGORIES=n.Ni,exports.UPDATE_ENRICHER=n.ds,exports.UPDATE_IDENTIFIER=n.os,exports.UPDATE_INTL_MESSAGES=n.To,exports.UPDATE_LOAD_OPTIONS=n.Ma,exports.UPDATE_OR_CREATE_COOKIES=n.Na,exports.UPDATE_OR_CREATE_DATA_POINT=n.ws,exports.UPDATE_POLICIES=n.Mo,exports.UPDATE_PRIVACY_CENTER=n.$i,exports.UPDATE_PRIVACY_REQUEST=n.Ao,exports.UPDATE_PROCESSING_ACTIVITIES=n.Di,exports.UPDATE_PROCESSING_PURPOSE_SUB_CATEGORIES=n.Ai,exports.UPDATE_PROMPTS=n.fo,exports.UPDATE_PROMPT_GROUPS=n.po,exports.UPDATE_PROMPT_PARTIALS=n.mo,exports.UPDATE_REPOSITORIES=n.Qo,exports.UPDATE_SOFTWARE_DEVELOPMENT_KITS=n.zo,exports.UPDATE_TEAM=n.eo,exports.UPDATE_VENDORS=n.Fi,exports.USERS=n.Ya,exports.USP_STRING_REGEX=c.n,exports.VARIABLE_PARAMETERS_NAME=a.t,exports.VARIABLE_PARAMETERS_REGEXP=a.n,exports.VENDORS=n.Ii,exports.VendorInput=i.vt,exports.WebhookHeader=i.yt,exports.addDaysUtc=n.h,exports.addMessagesToPromptRun=n.ri,exports.addMs=n.g,exports.appendCsvRowsOrdered=n.a,exports.appendCsvSync=n.o,exports.approvePrivacyRequests=n.J,exports.assumeRole=n.Vn,exports.buildAIIntegrationType=r.t,exports.buildEnabledRouteType=r.n,exports.buildTranscendGraphQLClient=n.ti,exports.buildTranscendGraphQLClientGeneric=n.ni,exports.buildXdiSyncEndpoint=s.r,exports.bulkRestartRequests=n.B,exports.bulkRetryEnrichers=n.R,exports.cancelPrivacyRequests=n.G,exports.clampPageSize=n._,exports.collectParquetFilesOrExit=n.x,exports.consentManagersToBusinessEntities=s.t,exports.convertToDataSubjectAllowlist=n.$n,exports.convertToDataSubjectBlockList=n.er,exports.createActionItemCollection=n.ln,exports.createActionItems=n.on,exports.createAgent=n.Zt,exports.createAgentFile=n.rn,exports.createAgentFunction=n.en,exports.createApiKey=n.zn,exports.createBusinessEntity=n.qt,exports.createCodePackage=n.Rt,exports.createConsentToken=c.i,exports.createDataCategory=n.jt,exports.createDataFlows=n.Ot,exports.createPreferenceAccessTokens=n.Hr,exports.createProcessingPurpose=n._t,exports.createPrompt=n.mt,exports.createRegexForTag=b,exports.createRepository=n.Vt,exports.createSoftwareDevelopmentKit=n.Wt,exports.createSombraGotInstance=n.ei,exports.createTranscendConsentGotInstance=n.$r,exports.createVendor=n.dt,exports.defineTranscendPrompts=x,exports.deleteApiKey=n.Bn,exports.deployConsentManager=n.Zr,Object.defineProperty(exports,`description`,{enumerable:!0,get:function(){return t.m}}),exports.domainToHost=s.i,exports.downloadPrivacyRequestFiles=n.Z,exports.enrichPrivacyRequest=f.r,exports.ensureAllDataSubjectsExist=n.tr,exports.extractClientError=n.U,exports.extractErrorMessage=n.O,exports.fetchActiveSiloDiscoPlugin=n.Xr,exports.fetchAllActionItems=n.Yr,exports.fetchAllActions=n.Jr,exports.fetchAllAgentFiles=n.qr,exports.fetchAllAgentFunctions=n.Kr,exports.fetchAllAgents=n.Gr,exports.fetchAllApiKeys=n.ar,exports.fetchAllAssessmentTemplates=n.Ur,exports.fetchAllAssessments=n.Wr,exports.fetchAllAttributeValues=n.zr,exports.fetchAllAttributes=n.Br,exports.fetchAllBusinessEntities=n.Lr,exports.fetchAllCatalogs=n.rr,exports.fetchAllCookies=n.Ar,exports.fetchAllDataCategories=n.kr,exports.fetchAllDataFlows=n.Or,exports.fetchAllDataPoints=n.Tn,exports.fetchAllDataSilos=n.En,exports.fetchAllDataSubjects=n.nr,exports.fetchAllEnrichers=n.Cn,exports.fetchAllIdentifiers=n.n,exports.fetchAllMessages=n.Dr,exports.fetchAllPolicies=n.wr,exports.fetchAllPreferenceTopics=n.Sr,exports.fetchAllPrivacyCenters=n.xr,exports.fetchAllProcessingActivities=n.br,exports.fetchAllProcessingPurposes=n.yr,exports.fetchAllPromptGroups=n.Qn,exports.fetchAllPromptPartials=n.Zn,exports.fetchAllPromptThreads=n.Jn,exports.fetchAllPrompts=n.Yn,exports.fetchAllPurposes=n.vr,exports.fetchAllPurposesAndPreferences=n._r,exports.fetchAllRequestAttributeKeys=n.Vr,exports.fetchAllRequestEnrichers=n.gr,exports.fetchAllRequestIdentifierMetadata=n.hr,exports.fetchAllRequestIdentifiers=n.mr,exports.fetchAllRequests=n.fr,exports.fetchAllSiloDiscoveryResults=n.vn,exports.fetchAllSubDataPoints=n.Dn,exports.fetchAllTeams=n.lr,exports.fetchAllTemplates=n.xn,exports.fetchAllUsers=n.cr,exports.fetchAllVendors=n.sr,exports.fetchAndIndexCatalogs=n.ir,exports.fetchApiKeys=n.or,exports.fetchConsentManager=n.Mr,exports.fetchConsentManagerAnalyticsData=n.Nr,exports.fetchConsentManagerExperiences=n.Pr,exports.fetchConsentManagerId=n.Fr,exports.fetchConsentManagerTheme=n.Ir,exports.fetchEnrichedDataSilos=n.On,exports.fetchIdentifiersAndCreateMissing=n.r,exports.fetchPartitions=n.yn,exports.fetchPrivacyCenterId=n.Tr,exports.fetchPrivacyCenterUrl=n.Er,exports.fetchPromptsWithVariables=n.Xn,exports.fetchRequestDataSilo=n.Gn,exports.fetchRequestDataSiloActiveCount=n.Wn,exports.fetchRequestDataSilos=n.Kn,exports.fetchRequestDataSilosCount=n.qn,exports.fetchRequestFilesForRequest=n.Un,exports.filterNullishValuesFromObject=T,exports.filterRows=n.ii,exports.formatAttributeValues=n.Cr,exports.fuzzyMatchColumns=n.di,exports.fuzzySearch=n.fi,exports.generateCrossAccountApiKeys=u.i,exports.getErrorStatus=n.k,exports.getFileMetadataForPrivacyRequests=n.nt,exports.getGitFilesThatChanged=E,exports.getUniqueValuesForColumn=n.ai,exports.initCsvFile=n.s,exports.inquirerAutoComplete=n.j,exports.inquirerConfirmBoolean=n.M,exports.inquirerConfirmText=n.N,exports.limitRecords=n.w,exports.listDirectories=u.t,exports.listFiles=u.n,exports.loginUser=n.Hn,exports.makeGraphQLRequest=n.i,exports.mapColumnsToAttributes=n.W,exports.mapColumnsToIdentifiers=n.K,exports.mapCsvColumnsToApi=n.ct,exports.mapCsvRowsToRequestInputs=n.at,exports.mapEnumValues=n.ui,exports.mapRequestEnumValues=n.st,exports.markCronIdentifierCompleted=l.a,exports.markRequestDataSiloIdsCompleted=l.n,exports.markSilentPrivacyRequests=n.lt,exports.mergeTranscendInputs=o.t,exports.name=t.p,exports.normalizeIdentifierValue=n.ot,exports.notifyPrivacyRequestsAdditionalTime=n.q,exports.parquetToCsvOneFile=n.b,exports.parseAssessmentDisplayLogic=n.Rn,exports.parseAssessmentRiskLogic=n.Mn,exports.parseAttributesFromString=n.ci,exports.parseFilePath=n.c,exports.parseVariablesFromString=n.A,exports.pullAllDatapoints=d.n,exports.pullChunkedCustomSiloOutstandingIdentifiers=l.t,exports.pullConsentManagerMetrics=s.a,exports.pullCronPageOfIdentifiers=l.s,exports.pullManualEnrichmentIdentifiersToCsv=f.i,exports.pullPrivacyRequests=n.I,exports.pullTranscendConfiguration=n._n,exports.pullUnstructuredSubDataPointRecommendations=d.t,exports.pushCronIdentifiersFromCsv=l.r,exports.pushManualEnrichmentIdentifiersFromCsv=f.t,exports.readCsv=n.oi,exports.readSafe=n.S,exports.readTranscendYaml=a.r,exports.removeLinks=w,exports.removeUnverifiedRequestIdentifiers=n.P,exports.replaceVariablesInYaml=a.i,exports.reportPromptRun=n.hn,exports.restartPrivacyRequest=n.V,exports.retryRequestDataSilos=n.L,exports.retryRequestEnricher=n.mn,exports.retrySamePromise=n.T,exports.setResourceAttributes=n.pn,exports.skipPreflightJobs=n.z,exports.skipRequestDataSilos=n.F,exports.sleepPromise=n.D,exports.splitCsvToList=n.li,exports.splitInHalf=n.E,exports.startOfHour=n.v,exports.startOfUtcDay=n.y,exports.streamPrivacyRequestFiles=n.Q,exports.submitPrivacyRequest=n.X,exports.syncAction=n.fn,exports.syncActionItemCollections=n.un,exports.syncActionItems=n.sn,exports.syncAgentFiles=n.in,exports.syncAgentFunctions=n.tn,exports.syncAgents=n.Qt,exports.syncAttribute=n.Xt,exports.syncBusinessEntities=n.Jt,exports.syncCodePackages=n.zt,exports.syncConfigurationToTranscend=n.t,exports.syncConsentManager=n.It,exports.syncConsentManagerExperiences=n.Lt,exports.syncCookies=n.Pt,exports.syncDataCategories=n.Mt,exports.syncDataFlows=n.kt,exports.syncDataSiloDependencies=n.kn,exports.syncDataSilos=n.An,exports.syncDataSubject=n.Dt,exports.syncEnricher=n.wn,exports.syncIdentifier=n.Et,exports.syncIntlMessages=n.wt,exports.syncPartitions=n.bn,exports.syncPolicies=n.St,exports.syncPrivacyCenter=n.xt,exports.syncProcessingActivities=n.bt,exports.syncProcessingPurposes=n.vt,exports.syncPrompts=n.ht,exports.syncRepositories=n.Ht,exports.syncSoftwareDevelopmentKits=n.Gt,exports.syncTemplate=n.Sn,exports.syncVendors=n.ft,exports.updateActionItem=n.cn,exports.updateActionItemCollection=n.dn,exports.updateAgentFiles=n.an,exports.updateAgentFunctions=n.nn,exports.updateAgents=n.$t,exports.updateBusinessEntities=n.Yt,exports.updateCodePackages=n.Bt,exports.updateConsentManagerToLatest=n.Qr,exports.updateConsentManagerVersionToLatest=s.c,exports.updateDataCategories=n.Nt,exports.updateDataFlows=n.At,exports.updateIntlMessages=n.Tt,exports.updateOrCreateCookies=n.Ft,exports.updatePolicies=n.Ct,exports.updateProcessingPurposes=n.yt,exports.updatePrompts=n.gt,exports.updateRepositories=n.Ut,exports.updateSoftwareDevelopmentKits=n.Kt,exports.updateVendors=n.pt,exports.uploadConsents=c.r,exports.uploadCookiesFromCsv=s.o,exports.uploadDataFlowsFromCsv=s.s,exports.uploadPrivacyRequestsFromCsv=n.H,exports.uploadSiloDiscoveryResults=n.ut,exports.validateTranscendAuth=u.r,Object.defineProperty(exports,`version`,{enumerable:!0,get:function(){return t.h}}),exports.writeCsv=n.l,exports.writeCsvSync=n.u,exports.writeLargeCsv=n.d,exports.writeTranscendYaml=a.a;
|