@parqui/core 1.1.1 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +69 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +21 -1
- package/dist/index.d.ts +21 -1
- package/dist/index.js +62 -1
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -20,20 +20,26 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
|
+
asyncSort: () => asyncSort,
|
|
23
24
|
buildFilterIndex: () => buildFilterIndex,
|
|
24
25
|
buildGroups: () => buildGroups,
|
|
25
26
|
buildSortIndex: () => buildSortIndex,
|
|
26
27
|
collectUniqueValues: () => collectUniqueValues,
|
|
28
|
+
compareValues: () => compareValues,
|
|
27
29
|
createEmptyPipeline: () => createEmptyPipeline,
|
|
28
30
|
fetchParquetFromUrl: () => fetchParquetFromUrl,
|
|
29
31
|
fileToArrayBuffer: () => fileToArrayBuffer,
|
|
32
|
+
formatGroupKey: () => formatGroupKey,
|
|
33
|
+
matchesFilter: () => matchesFilter,
|
|
30
34
|
readColumnValues: () => readColumnValues,
|
|
31
35
|
readParquetData: () => readParquetData,
|
|
32
36
|
readParquetMetadata: () => readParquetMetadata,
|
|
33
37
|
readRowsByIndices: () => readRowsByIndices,
|
|
38
|
+
runWithConcurrency: () => runWithConcurrency,
|
|
34
39
|
sourceFromBuffer: () => sourceFromBuffer,
|
|
35
40
|
sourceFromFile: () => sourceFromFile,
|
|
36
|
-
sourceFromUrl: () => sourceFromUrl
|
|
41
|
+
sourceFromUrl: () => sourceFromUrl,
|
|
42
|
+
yieldToUI: () => yieldToUI
|
|
37
43
|
});
|
|
38
44
|
module.exports = __toCommonJS(index_exports);
|
|
39
45
|
|
|
@@ -438,21 +444,82 @@ function formatGroupKey(value) {
|
|
|
438
444
|
if (value instanceof Date) return value.toISOString();
|
|
439
445
|
return String(value);
|
|
440
446
|
}
|
|
447
|
+
|
|
448
|
+
// src/utils.ts
|
|
449
|
+
function yieldToUI() {
|
|
450
|
+
return new Promise((resolve) => setTimeout(resolve, 0));
|
|
451
|
+
}
|
|
452
|
+
async function asyncSort(arr, compareFn, isCancelled) {
|
|
453
|
+
const CHUNK = 5e4;
|
|
454
|
+
const chunks = [];
|
|
455
|
+
for (let i = 0; i < arr.length; i += CHUNK) {
|
|
456
|
+
if (isCancelled()) return arr;
|
|
457
|
+
const chunk = arr.slice(i, i + CHUNK);
|
|
458
|
+
chunk.sort(compareFn);
|
|
459
|
+
chunks.push(chunk);
|
|
460
|
+
await yieldToUI();
|
|
461
|
+
}
|
|
462
|
+
while (chunks.length > 1) {
|
|
463
|
+
if (isCancelled()) return arr;
|
|
464
|
+
const merged = [];
|
|
465
|
+
for (let i = 0; i < chunks.length; i += 2) {
|
|
466
|
+
if (i + 1 < chunks.length) {
|
|
467
|
+
merged.push(mergeSorted(chunks[i], chunks[i + 1], compareFn));
|
|
468
|
+
} else {
|
|
469
|
+
merged.push(chunks[i]);
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
chunks.length = 0;
|
|
473
|
+
chunks.push(...merged);
|
|
474
|
+
await yieldToUI();
|
|
475
|
+
}
|
|
476
|
+
return chunks[0] ?? [];
|
|
477
|
+
}
|
|
478
|
+
function mergeSorted(a, b, compareFn) {
|
|
479
|
+
const result = new Array(a.length + b.length);
|
|
480
|
+
let i = 0, j = 0, k = 0;
|
|
481
|
+
while (i < a.length && j < b.length) {
|
|
482
|
+
if (compareFn(a[i], b[j]) <= 0) result[k++] = a[i++];
|
|
483
|
+
else result[k++] = b[j++];
|
|
484
|
+
}
|
|
485
|
+
while (i < a.length) result[k++] = a[i++];
|
|
486
|
+
while (j < b.length) result[k++] = b[j++];
|
|
487
|
+
return result;
|
|
488
|
+
}
|
|
489
|
+
async function runWithConcurrency(items, maxConcurrency, fn) {
|
|
490
|
+
let index = 0;
|
|
491
|
+
const workers = Array.from(
|
|
492
|
+
{ length: Math.min(maxConcurrency, items.length) },
|
|
493
|
+
async () => {
|
|
494
|
+
while (index < items.length) {
|
|
495
|
+
const i = index++;
|
|
496
|
+
await fn(items[i]);
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
);
|
|
500
|
+
await Promise.all(workers);
|
|
501
|
+
}
|
|
441
502
|
// Annotate the CommonJS export names for ESM import in node:
|
|
442
503
|
0 && (module.exports = {
|
|
504
|
+
asyncSort,
|
|
443
505
|
buildFilterIndex,
|
|
444
506
|
buildGroups,
|
|
445
507
|
buildSortIndex,
|
|
446
508
|
collectUniqueValues,
|
|
509
|
+
compareValues,
|
|
447
510
|
createEmptyPipeline,
|
|
448
511
|
fetchParquetFromUrl,
|
|
449
512
|
fileToArrayBuffer,
|
|
513
|
+
formatGroupKey,
|
|
514
|
+
matchesFilter,
|
|
450
515
|
readColumnValues,
|
|
451
516
|
readParquetData,
|
|
452
517
|
readParquetMetadata,
|
|
453
518
|
readRowsByIndices,
|
|
519
|
+
runWithConcurrency,
|
|
454
520
|
sourceFromBuffer,
|
|
455
521
|
sourceFromFile,
|
|
456
|
-
sourceFromUrl
|
|
522
|
+
sourceFromUrl,
|
|
523
|
+
yieldToUI
|
|
457
524
|
});
|
|
458
525
|
//# sourceMappingURL=index.cjs.map
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/reader.ts","../src/pipeline.ts"],"sourcesContent":["export {\n readParquetMetadata,\n readParquetData,\n readColumnValues,\n readRowsByIndices,\n sourceFromFile,\n sourceFromBuffer,\n sourceFromUrl,\n // Legacy\n fileToArrayBuffer,\n fetchParquetFromUrl,\n} from \"./reader.js\";\n\nexport {\n buildSortIndex,\n buildFilterIndex,\n buildGroups,\n collectUniqueValues,\n createEmptyPipeline,\n} from \"./pipeline.js\";\n\nexport type {\n ParquetColumn,\n ParquetMetadata,\n ParquetRow,\n ParquetData,\n ReadOptions,\n ParquetSource,\n} from \"./types.js\";\n\nexport type {\n SortDef,\n SortDirection,\n FilterDef,\n FilterOperator,\n GroupDef,\n GroupNode,\n PipelineState,\n} from \"./pipeline.js\";\n","import {\n parquetMetadataAsync,\n parquetRead,\n asyncBufferFromUrl,\n} from \"hyparquet\";\nimport type { AsyncBuffer, FileMetaData } from \"hyparquet\";\nimport type {\n ParquetColumn,\n ParquetData,\n ParquetMetadata,\n ParquetRow,\n ParquetSource,\n ReadOptions,\n} from \"./types.js\";\n\nconst PARQUI_DEBUG_NS = \"[parqui/core]\";\n\nfunction coreDebugEnabled(): boolean {\n const g = globalThis as { __PARQUI_DEBUG?: boolean } | undefined;\n return g?.__PARQUI_DEBUG ?? false;\n}\n\nfunction coreLog(message: string, details?: unknown) {\n if (!coreDebugEnabled()) return;\n if (details !== undefined) {\n console.log(`${PARQUI_DEBUG_NS} ${message}`, details);\n } else {\n console.log(`${PARQUI_DEBUG_NS} ${message}`);\n }\n}\n\nfunction coreError(message: string, error: unknown) {\n if (!coreDebugEnabled()) return;\n console.error(`${PARQUI_DEBUG_NS} ${message}`, error);\n}\n\ntype SliceablePromise = Promise<ArrayBuffer> & {\n slice: (start: number, end?: number) => Promise<ArrayBuffer>;\n};\n\nfunction makeSliceablePromise(input: Promise<ArrayBuffer>): SliceablePromise {\n const promise = Promise.resolve(input) as SliceablePromise;\n promise.slice = (start: number, end?: number) =>\n promise.then((buffer) => buffer.slice(start, end));\n return promise;\n}\n\nconst normalizedSourceCache = new WeakMap<ParquetSource, ParquetSource>();\n\nfunction normalizeSource(source: ParquetSource): ParquetSource {\n const cached = normalizedSourceCache.get(source);\n if (cached) return cached;\n\n const wrapped: ParquetSource = {\n byteLength: source.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(source.slice(start, end)),\n );\n coreLog(\"normalizeSource:slice\", {\n start,\n end: end ?? source.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n\n normalizedSourceCache.set(source, wrapped);\n return wrapped;\n}\n\n// ── Source creation ──\n\n/**\n * Create a ParquetSource from a browser File object.\n * Does NOT load the file into memory — reads slices on demand.\n * Works with files of any size (including multi-GB).\n */\nexport function sourceFromFile(file: File): ParquetSource {\n coreLog(\"sourceFromFile:create\", { size: file.size });\n return {\n byteLength: file.size,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const blob = file.slice(start, end);\n // hyparquet may branch on `instanceof Promise` and then call `.slice()` on non-promises.\n // This object works in both branches: it's awaitable AND has a `.slice()` method.\n const p = makeSliceablePromise(blob.arrayBuffer());\n coreLog(\"sourceFromFile:slice\", {\n start,\n end: end ?? file.size,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from an ArrayBuffer (file already in memory).\n */\nexport function sourceFromBuffer(buffer: ArrayBuffer): ParquetSource {\n coreLog(\"sourceFromBuffer:create\", { byteLength: buffer.byteLength });\n return {\n byteLength: buffer.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(buffer.slice(start, end)),\n );\n coreLog(\"sourceFromBuffer:slice\", {\n start,\n end: end ?? buffer.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from a URL using HTTP Range requests.\n */\nexport async function sourceFromUrl(url: string): Promise<ParquetSource> {\n coreLog(\"sourceFromUrl:start\", { url });\n const asyncBuf = await asyncBufferFromUrl({ url });\n const wrapped: ParquetSource = {\n byteLength: asyncBuf.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(asyncBuf.slice(start, end)),\n );\n coreLog(\"sourceFromUrl:slice\", {\n start,\n end: end ?? asyncBuf.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n coreLog(\"sourceFromUrl:ready\", { byteLength: wrapped.byteLength });\n return wrapped;\n}\n\n// ── Internal caching ──\n\nconst asyncBufferCache = new WeakMap<ParquetSource, AsyncBuffer>();\nconst rawMetadataCache = new WeakMap<ParquetSource, Promise<FileMetaData>>();\n\nfunction getAsyncBuffer(source: ParquetSource): AsyncBuffer {\n const safeSource = normalizeSource(source);\n let buf = asyncBufferCache.get(safeSource);\n if (!buf) {\n // Use the direct source object. cachedAsyncBuffer can normalize slice()\n // into plain Promises, which breaks in some zone.js runtimes.\n buf = safeSource as AsyncBuffer;\n asyncBufferCache.set(safeSource, buf);\n }\n return buf;\n}\n\nfunction getRawMetadata(source: ParquetSource): Promise<FileMetaData> {\n const safeSource = normalizeSource(source);\n let promise = rawMetadataCache.get(safeSource);\n if (!promise) {\n const asyncBuf = getAsyncBuffer(safeSource);\n promise = parquetMetadataAsync(asyncBuf);\n rawMetadataCache.set(safeSource, promise);\n }\n return promise;\n}\n\nfunction toParquetMetadata(raw: FileMetaData): ParquetMetadata {\n const columns: ParquetColumn[] = raw.schema.slice(1).map((col) => ({\n name: col.name,\n type: col.type ?? \"UNKNOWN\",\n nullable: col.repetition_type !== \"REQUIRED\",\n }));\n\n // Compute row group boundaries: [0, rg0_rows, rg0+rg1_rows, ..., totalRows]\n const rowGroupOffsets: number[] = [0];\n for (const rg of raw.row_groups) {\n rowGroupOffsets.push(rowGroupOffsets[rowGroupOffsets.length - 1] + Number(rg.num_rows));\n }\n\n return {\n rowCount: Number(raw.num_rows),\n columns,\n rowGroups: raw.row_groups.length,\n rowGroupOffsets,\n createdBy: raw.created_by ?? undefined,\n };\n}\n\n// ── Reading ──\n\n/**\n * Read only the metadata from a parquet source.\n * Reads a small amount from the end of the file (footer).\n */\nexport async function readParquetMetadata(\n source: ParquetSource,\n): Promise<ParquetMetadata> {\n const raw = await getRawMetadata(source);\n return toParquetMetadata(raw);\n}\n\n/**\n * Read data (multiple columns, row range) from a parquet source.\n * Only reads the requested row range — does NOT load the entire file.\n */\nexport async function readParquetData(\n source: ParquetSource,\n options: ReadOptions = {},\n): Promise<ParquetData> {\n coreLog(\"readParquetData:start\", {\n offset: options.offset ?? 0,\n limit: options.limit ?? null,\n columns: options.columns?.length ?? \"all\",\n });\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n\n const rows: ParquetRow[] = [];\n\n try {\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: options.columns,\n rowStart: options.offset ?? 0,\n rowEnd:\n options.limit !== undefined\n ? (options.offset ?? 0) + options.limit\n : undefined,\n onComplete: (data: unknown[][]) => {\n const columnNames =\n options.columns ?? metadata.columns.map((c) => c.name);\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rows.push(obj);\n }\n },\n });\n } catch (error) {\n coreError(\"readParquetData:error\", error);\n throw error;\n }\n\n return { metadata, rows };\n}\n\n/**\n * Read ALL values of specific columns from the parquet source.\n * Used for building sort indices, group keys, and filter unique values.\n * Only reads the requested columns — not all data.\n *\n * Reads row-group by row-group with yields between each to avoid\n * blocking the main thread. This keeps the UI responsive (scroll, etc.)\n * during long reads on large files.\n *\n * @param isCancelled — optional callback checked between row groups.\n * If it returns true, reading stops early and partial results are returned.\n * This allows quick cancellation when the user changes sort/filter mid-read.\n *\n * Returns a Map of column name → array of values (one per row).\n */\nexport async function readColumnValues(\n source: ParquetSource,\n columnNames: string[],\n isCancelled?: () => boolean,\n): Promise<Map<string, unknown[]>> {\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n\n const result = new Map<string, unknown[]>();\n for (const name of columnNames) {\n result.set(name, []);\n }\n\n // Read row-group by row-group to keep the main thread responsive.\n // Check isCancelled between each row group for early abort.\n let rowOffset = 0;\n for (const rg of rawMetadata.row_groups) {\n // Early exit if this compute was superseded\n if (isCancelled?.()) return result;\n\n const rgRows = Number(rg.num_rows);\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: rowOffset,\n rowEnd: rowOffset + rgRows,\n onComplete: (data: unknown[][]) => {\n for (let rowIdx = 0; rowIdx < data.length; rowIdx++) {\n const row = data[rowIdx];\n for (let colIdx = 0; colIdx < columnNames.length; colIdx++) {\n result.get(columnNames[colIdx])!.push(row[colIdx]);\n }\n }\n },\n });\n\n rowOffset += rgRows;\n\n // Yield to browser between row groups so scroll events can fire\n await new Promise<void>((resolve) => setTimeout(resolve, 0));\n }\n\n return result;\n}\n\n/**\n * Read values of specific columns for specific row indices.\n * Useful for fetching display data for sorted/filtered views.\n */\nexport async function readRowsByIndices(\n source: ParquetSource,\n rowIndices: number[],\n columns?: string[],\n): Promise<ParquetRow[]> {\n if (rowIndices.length === 0) return [];\n\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n const columnNames = columns ?? metadata.columns.map((c) => c.name);\n\n // Sort indices to read sequentially, then re-order\n const sorted = rowIndices.map((idx, pos) => ({ idx, pos }));\n sorted.sort((a, b) => a.idx - b.idx);\n\n // Find contiguous ranges to batch reads\n const ranges: { start: number; end: number; positions: number[] }[] = [];\n let rangeStart = sorted[0].idx;\n let rangeEnd = sorted[0].idx + 1;\n let positions = [sorted[0].pos];\n\n for (let i = 1; i < sorted.length; i++) {\n if (sorted[i].idx <= rangeEnd + 50) {\n // Allow small gaps to merge ranges\n rangeEnd = sorted[i].idx + 1;\n positions.push(sorted[i].pos);\n } else {\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n rangeStart = sorted[i].idx;\n rangeEnd = sorted[i].idx + 1;\n positions = [sorted[i].pos];\n }\n }\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n\n const result: ParquetRow[] = new Array(rowIndices.length);\n\n for (const range of ranges) {\n const rangeRows: ParquetRow[] = [];\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: range.start,\n rowEnd: range.end,\n onComplete: (data: unknown[][]) => {\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rangeRows.push(obj);\n }\n },\n });\n\n // Map range rows back to the correct positions\n let sortedPosIdx = 0;\n for (const s of sorted) {\n if (s.idx >= range.start && s.idx < range.end) {\n const localIdx = s.idx - range.start;\n if (localIdx < rangeRows.length) {\n result[s.pos] = rangeRows[localIdx];\n }\n sortedPosIdx++;\n }\n }\n }\n\n return result;\n}\n\n// ── Legacy helpers (kept for backwards compatibility) ──\n\n/** @deprecated Use sourceFromFile() instead */\nexport async function fileToArrayBuffer(file: File): Promise<ArrayBuffer> {\n return file.arrayBuffer();\n}\n\n/** @deprecated Use sourceFromUrl() instead */\nexport async function fetchParquetFromUrl(url: string): Promise<ArrayBuffer> {\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch parquet file: ${response.status} ${response.statusText}`,\n );\n }\n return response.arrayBuffer();\n}\n","/**\n * Data pipeline: sorting, filtering, grouping.\n * Operates on column-level indices — never loads all columns at once.\n */\n\n// ── Sort ──\n\nexport type SortDirection = \"asc\" | \"desc\";\n\nexport interface SortDef {\n column: string;\n direction: SortDirection;\n}\n\n// ── Filter ──\n\nexport type FilterOperator =\n | \"eq\"\n | \"neq\"\n | \"gt\"\n | \"gte\"\n | \"lt\"\n | \"lte\"\n | \"contains\"\n | \"not_contains\"\n | \"is_null\"\n | \"is_not_null\"\n | \"in\";\n\nexport interface FilterDef {\n column: string;\n operator: FilterOperator;\n /** The value(s) to compare against. For \"in\" — an array; for \"is_null\"/\"is_not_null\" — ignored. */\n value?: unknown;\n}\n\n// ── Group ──\n\nexport interface GroupDef {\n column: string;\n}\n\nexport interface GroupNode {\n /** The value of the group key */\n key: unknown;\n /** Display label for this group */\n label: string;\n /** Number of rows in this group */\n count: number;\n /** Row indices belonging to this group (in the filtered/sorted order) */\n rowIndices: number[];\n /** Whether this group is expanded in the UI */\n expanded: boolean;\n}\n\n// ── Pipeline state ──\n\nexport interface PipelineState {\n sorts: SortDef[];\n filters: FilterDef[];\n groups: GroupDef[];\n}\n\nexport function createEmptyPipeline(): PipelineState {\n return { sorts: [], filters: [], groups: [] };\n}\n\n// ── Index building ──\n\n/**\n * Build a sorted index from column values.\n * Returns an array of original row indices in the sorted order.\n */\nexport function buildSortIndex(\n values: unknown[],\n sorts: SortDef[],\n columnValues: Map<string, unknown[]>,\n): number[] {\n const indices = Array.from({ length: values.length }, (_, i) => i);\n\n indices.sort((a, b) => {\n for (const sort of sorts) {\n const col = columnValues.get(sort.column);\n if (!col) continue;\n const va = col[a];\n const vb = col[b];\n const cmp = compareValues(va, vb);\n if (cmp !== 0) return sort.direction === \"asc\" ? cmp : -cmp;\n }\n return a - b; // stable: preserve original order for ties\n });\n\n return indices;\n}\n\n/**\n * Apply filters to produce a set of passing row indices.\n */\nexport function buildFilterIndex(\n totalRows: number,\n filters: FilterDef[],\n columnValues: Map<string, unknown[]>,\n): number[] | null {\n if (filters.length === 0) return null; // null = no filtering\n\n const passing: number[] = [];\n for (let i = 0; i < totalRows; i++) {\n let pass = true;\n for (const filter of filters) {\n const col = columnValues.get(filter.column);\n if (!col) continue;\n if (!matchesFilter(col[i], filter)) {\n pass = false;\n break;\n }\n }\n if (pass) passing.push(i);\n }\n return passing;\n}\n\n/**\n * Build group nodes from column values.\n * Supports multi-level grouping (first group column → second → etc.)\n */\nexport function buildGroups(\n rowIndices: number[],\n groups: GroupDef[],\n columnValues: Map<string, unknown[]>,\n): GroupNode[] {\n if (groups.length === 0) return [];\n\n const firstGroup = groups[0];\n const col = columnValues.get(firstGroup.column);\n if (!col) return [];\n\n // Bucket rows by group key — store DISPLAY indices (position in mapping),\n // not source indices, so getRow(displayIndex) works directly.\n const buckets = new Map<string, number[]>();\n const keyLabels = new Map<string, unknown>();\n\n for (let displayIdx = 0; displayIdx < rowIndices.length; displayIdx++) {\n const sourceIdx = rowIndices[displayIdx];\n const value = col[sourceIdx];\n const key = formatGroupKey(value);\n if (!buckets.has(key)) {\n buckets.set(key, []);\n keyLabels.set(key, value);\n }\n buckets.get(key)!.push(displayIdx);\n }\n\n // Sort group keys\n const sortedKeys = [...buckets.keys()].sort((a, b) => {\n const va = keyLabels.get(a);\n const vb = keyLabels.get(b);\n return compareValues(va, vb);\n });\n\n return sortedKeys.map((key) => ({\n key: keyLabels.get(key),\n label: key,\n count: buckets.get(key)!.length,\n rowIndices: buckets.get(key)!,\n expanded: false,\n }));\n}\n\n/**\n * Collect unique values from a column (up to a limit).\n * Returns sorted unique values.\n */\nexport function collectUniqueValues(\n values: unknown[],\n limit: number = 500,\n): unknown[] {\n const seen = new Set<string>();\n const unique: unknown[] = [];\n\n for (const v of values) {\n const key = formatGroupKey(v);\n if (!seen.has(key)) {\n seen.add(key);\n unique.push(v);\n if (unique.length >= limit) break;\n }\n }\n\n unique.sort((a, b) => compareValues(a, b));\n return unique;\n}\n\n// ── Helpers ──\n\nfunction compareValues(a: unknown, b: unknown): number {\n // nulls last\n if (a === null || a === undefined) return b === null || b === undefined ? 0 : 1;\n if (b === null || b === undefined) return -1;\n\n if (typeof a === \"number\" && typeof b === \"number\") return a - b;\n if (typeof a === \"bigint\" && typeof b === \"bigint\") return a < b ? -1 : a > b ? 1 : 0;\n if (typeof a === \"boolean\" && typeof b === \"boolean\") return Number(a) - Number(b);\n\n // Dates\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();\n\n // Default: string comparison\n return String(a).localeCompare(String(b));\n}\n\nfunction matchesFilter(value: unknown, filter: FilterDef): boolean {\n switch (filter.operator) {\n case \"is_null\":\n return value === null || value === undefined;\n case \"is_not_null\":\n return value !== null && value !== undefined;\n case \"eq\":\n return value === filter.value;\n case \"neq\":\n return value !== filter.value;\n case \"gt\":\n return compareValues(value, filter.value) > 0;\n case \"gte\":\n return compareValues(value, filter.value) >= 0;\n case \"lt\":\n return compareValues(value, filter.value) < 0;\n case \"lte\":\n return compareValues(value, filter.value) <= 0;\n case \"contains\":\n return String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"not_contains\":\n return !String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"in\":\n if (Array.isArray(filter.value)) {\n const set = new Set(filter.value.map(String));\n return set.has(String(value));\n }\n return false;\n default:\n return true;\n }\n}\n\nfunction formatGroupKey(value: unknown): string {\n if (value === null || value === undefined) return \"(null)\";\n if (value instanceof Date) return value.toISOString();\n return String(value);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,uBAIO;AAWP,IAAM,kBAAkB;AAExB,SAAS,mBAA4B;AACnC,QAAM,IAAI;AACV,SAAO,GAAG,kBAAkB;AAC9B;AAEA,SAAS,QAAQ,SAAiB,SAAmB;AACnD,MAAI,CAAC,iBAAiB,EAAG;AACzB,MAAI,YAAY,QAAW;AACzB,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,IAAI,OAAO;AAAA,EACtD,OAAO;AACL,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,EAAE;AAAA,EAC7C;AACF;AAEA,SAAS,UAAU,SAAiB,OAAgB;AAClD,MAAI,CAAC,iBAAiB,EAAG;AACzB,UAAQ,MAAM,GAAG,eAAe,IAAI,OAAO,IAAI,KAAK;AACtD;AAMA,SAAS,qBAAqB,OAA+C;AAC3E,QAAM,UAAU,QAAQ,QAAQ,KAAK;AACrC,UAAQ,QAAQ,CAAC,OAAe,QAC9B,QAAQ,KAAK,CAAC,WAAW,OAAO,MAAM,OAAO,GAAG,CAAC;AACnD,SAAO;AACT;AAEA,IAAM,wBAAwB,oBAAI,QAAsC;AAExE,SAAS,gBAAgB,QAAsC;AAC7D,QAAM,SAAS,sBAAsB,IAAI,MAAM;AAC/C,MAAI,OAAQ,QAAO;AAEnB,QAAM,UAAyB;AAAA,IAC7B,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,yBAAyB;AAAA,QAC/B;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,wBAAsB,IAAI,QAAQ,OAAO;AACzC,SAAO;AACT;AASO,SAAS,eAAe,MAA2B;AACxD,UAAQ,yBAAyB,EAAE,MAAM,KAAK,KAAK,CAAC;AACpD,SAAO;AAAA,IACL,YAAY,KAAK;AAAA,IACjB,MAAM,OAAe,KAAoC;AACvD,YAAM,OAAO,KAAK,MAAM,OAAO,GAAG;AAGlC,YAAM,IAAI,qBAAqB,KAAK,YAAY,CAAC;AACjD,cAAQ,wBAAwB;AAAA,QAC9B;AAAA,QACA,KAAK,OAAO,KAAK;AAAA,QACjB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,iBAAiB,QAAoC;AACnE,UAAQ,2BAA2B,EAAE,YAAY,OAAO,WAAW,CAAC;AACpE,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,0BAA0B;AAAA,QAChC;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKA,eAAsB,cAAc,KAAqC;AACvE,UAAQ,uBAAuB,EAAE,IAAI,CAAC;AACtC,QAAM,WAAW,UAAM,qCAAmB,EAAE,IAAI,CAAC;AACjD,QAAM,UAAyB;AAAA,IAC7B,YAAY,SAAS;AAAA,IACrB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,SAAS,MAAM,OAAO,GAAG,CAAC;AAAA,MAC5C;AACA,cAAQ,uBAAuB;AAAA,QAC7B;AAAA,QACA,KAAK,OAAO,SAAS;AAAA,QACrB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACA,UAAQ,uBAAuB,EAAE,YAAY,QAAQ,WAAW,CAAC;AACjE,SAAO;AACT;AAIA,IAAM,mBAAmB,oBAAI,QAAoC;AACjE,IAAM,mBAAmB,oBAAI,QAA8C;AAE3E,SAAS,eAAe,QAAoC;AAC1D,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,MAAM,iBAAiB,IAAI,UAAU;AACzC,MAAI,CAAC,KAAK;AAGR,UAAM;AACN,qBAAiB,IAAI,YAAY,GAAG;AAAA,EACtC;AACA,SAAO;AACT;AAEA,SAAS,eAAe,QAA8C;AACpE,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,UAAU,iBAAiB,IAAI,UAAU;AAC7C,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,eAAe,UAAU;AAC1C,kBAAU,uCAAqB,QAAQ;AACvC,qBAAiB,IAAI,YAAY,OAAO;AAAA,EAC1C;AACA,SAAO;AACT;AAEA,SAAS,kBAAkB,KAAoC;AAC7D,QAAM,UAA2B,IAAI,OAAO,MAAM,CAAC,EAAE,IAAI,CAAC,SAAS;AAAA,IACjE,MAAM,IAAI;AAAA,IACV,MAAM,IAAI,QAAQ;AAAA,IAClB,UAAU,IAAI,oBAAoB;AAAA,EACpC,EAAE;AAGF,QAAM,kBAA4B,CAAC,CAAC;AACpC,aAAW,MAAM,IAAI,YAAY;AAC/B,oBAAgB,KAAK,gBAAgB,gBAAgB,SAAS,CAAC,IAAI,OAAO,GAAG,QAAQ,CAAC;AAAA,EACxF;AAEA,SAAO;AAAA,IACL,UAAU,OAAO,IAAI,QAAQ;AAAA,IAC7B;AAAA,IACA,WAAW,IAAI,WAAW;AAAA,IAC1B;AAAA,IACA,WAAW,IAAI,cAAc;AAAA,EAC/B;AACF;AAQA,eAAsB,oBACpB,QAC0B;AAC1B,QAAM,MAAM,MAAM,eAAe,MAAM;AACvC,SAAO,kBAAkB,GAAG;AAC9B;AAMA,eAAsB,gBACpB,QACA,UAAuB,CAAC,GACF;AACtB,UAAQ,yBAAyB;AAAA,IAC/B,QAAQ,QAAQ,UAAU;AAAA,IAC1B,OAAO,QAAQ,SAAS;AAAA,IACxB,SAAS,QAAQ,SAAS,UAAU;AAAA,EACtC,CAAC;AACD,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAE9C,QAAM,OAAqB,CAAC;AAE5B,MAAI;AACF,cAAM,8BAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ,UAAU;AAAA,MAC5B,QACE,QAAQ,UAAU,UACb,QAAQ,UAAU,KAAK,QAAQ,QAChC;AAAA,MACN,YAAY,CAAC,SAAsB;AACjC,cAAM,cACJ,QAAQ,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AACvD,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,cAAU,yBAAyB,KAAK;AACxC,UAAM;AAAA,EACR;AAEA,SAAO,EAAE,UAAU,KAAK;AAC1B;AAiBA,eAAsB,iBACpB,QACA,aACA,aACiC;AACjC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAE/C,QAAM,SAAS,oBAAI,IAAuB;AAC1C,aAAW,QAAQ,aAAa;AAC9B,WAAO,IAAI,MAAM,CAAC,CAAC;AAAA,EACrB;AAIA,MAAI,YAAY;AAChB,aAAW,MAAM,YAAY,YAAY;AAEvC,QAAI,cAAc,EAAG,QAAO;AAE5B,UAAM,SAAS,OAAO,GAAG,QAAQ;AAEjC,cAAM,8BAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,MACV,QAAQ,YAAY;AAAA,MACpB,YAAY,CAAC,SAAsB;AACjC,iBAAS,SAAS,GAAG,SAAS,KAAK,QAAQ,UAAU;AACnD,gBAAM,MAAM,KAAK,MAAM;AACvB,mBAAS,SAAS,GAAG,SAAS,YAAY,QAAQ,UAAU;AAC1D,mBAAO,IAAI,YAAY,MAAM,CAAC,EAAG,KAAK,IAAI,MAAM,CAAC;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,iBAAa;AAGb,UAAM,IAAI,QAAc,CAAC,YAAY,WAAW,SAAS,CAAC,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAMA,eAAsB,kBACpB,QACA,YACA,SACuB;AACvB,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAC9C,QAAM,cAAc,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AAGjE,QAAM,SAAS,WAAW,IAAI,CAAC,KAAK,SAAS,EAAE,KAAK,IAAI,EAAE;AAC1D,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AAGnC,QAAM,SAAgE,CAAC;AACvE,MAAI,aAAa,OAAO,CAAC,EAAE;AAC3B,MAAI,WAAW,OAAO,CAAC,EAAE,MAAM;AAC/B,MAAI,YAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAE9B,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,QAAI,OAAO,CAAC,EAAE,OAAO,WAAW,IAAI;AAElC,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,gBAAU,KAAK,OAAO,CAAC,EAAE,GAAG;AAAA,IAC9B,OAAO;AACL,aAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAC3D,mBAAa,OAAO,CAAC,EAAE;AACvB,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,kBAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAAA,IAC5B;AAAA,EACF;AACA,SAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAE3D,QAAM,SAAuB,IAAI,MAAM,WAAW,MAAM;AAExD,aAAW,SAAS,QAAQ;AAC1B,UAAM,YAA0B,CAAC;AAEjC,cAAM,8BAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU,MAAM;AAAA,MAChB,QAAQ,MAAM;AAAA,MACd,YAAY,CAAC,SAAsB;AACjC,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,oBAAU,KAAK,GAAG;AAAA,QACpB;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,eAAe;AACnB,eAAW,KAAK,QAAQ;AACtB,UAAI,EAAE,OAAO,MAAM,SAAS,EAAE,MAAM,MAAM,KAAK;AAC7C,cAAM,WAAW,EAAE,MAAM,MAAM;AAC/B,YAAI,WAAW,UAAU,QAAQ;AAC/B,iBAAO,EAAE,GAAG,IAAI,UAAU,QAAQ;AAAA,QACpC;AACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,kBAAkB,MAAkC;AACxE,SAAO,KAAK,YAAY;AAC1B;AAGA,eAAsB,oBAAoB,KAAmC;AAC3E,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,iCAAiC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IACzE;AAAA,EACF;AACA,SAAO,SAAS,YAAY;AAC9B;;;AC5VO,SAAS,sBAAqC;AACnD,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,QAAQ,CAAC,EAAE;AAC9C;AAQO,SAAS,eACd,QACA,OACA,cACU;AACV,QAAM,UAAU,MAAM,KAAK,EAAE,QAAQ,OAAO,OAAO,GAAG,CAAC,GAAG,MAAM,CAAC;AAEjE,UAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,aAAa,IAAI,KAAK,MAAM;AACxC,UAAI,CAAC,IAAK;AACV,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,MAAM,cAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,KAAK,cAAc,QAAQ,MAAM,CAAC;AAAA,IAC1D;AACA,WAAO,IAAI;AAAA,EACb,CAAC;AAED,SAAO;AACT;AAKO,SAAS,iBACd,WACA,SACA,cACiB;AACjB,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,QAAM,UAAoB,CAAC;AAC3B,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,QAAI,OAAO;AACX,eAAW,UAAU,SAAS;AAC5B,YAAM,MAAM,aAAa,IAAI,OAAO,MAAM;AAC1C,UAAI,CAAC,IAAK;AACV,UAAI,CAAC,cAAc,IAAI,CAAC,GAAG,MAAM,GAAG;AAClC,eAAO;AACP;AAAA,MACF;AAAA,IACF;AACA,QAAI,KAAM,SAAQ,KAAK,CAAC;AAAA,EAC1B;AACA,SAAO;AACT;AAMO,SAAS,YACd,YACA,QACA,cACa;AACb,MAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAEjC,QAAM,aAAa,OAAO,CAAC;AAC3B,QAAM,MAAM,aAAa,IAAI,WAAW,MAAM;AAC9C,MAAI,CAAC,IAAK,QAAO,CAAC;AAIlB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,QAAM,YAAY,oBAAI,IAAqB;AAE3C,WAAS,aAAa,GAAG,aAAa,WAAW,QAAQ,cAAc;AACrE,UAAM,YAAY,WAAW,UAAU;AACvC,UAAM,QAAQ,IAAI,SAAS;AAC3B,UAAM,MAAM,eAAe,KAAK;AAChC,QAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,cAAQ,IAAI,KAAK,CAAC,CAAC;AACnB,gBAAU,IAAI,KAAK,KAAK;AAAA,IAC1B;AACA,YAAQ,IAAI,GAAG,EAAG,KAAK,UAAU;AAAA,EACnC;AAGA,QAAM,aAAa,CAAC,GAAG,QAAQ,KAAK,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM;AACpD,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,WAAO,cAAc,IAAI,EAAE;AAAA,EAC7B,CAAC;AAED,SAAO,WAAW,IAAI,CAAC,SAAS;AAAA,IAC9B,KAAK,UAAU,IAAI,GAAG;AAAA,IACtB,OAAO;AAAA,IACP,OAAO,QAAQ,IAAI,GAAG,EAAG;AAAA,IACzB,YAAY,QAAQ,IAAI,GAAG;AAAA,IAC3B,UAAU;AAAA,EACZ,EAAE;AACJ;AAMO,SAAS,oBACd,QACA,QAAgB,KACL;AACX,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAoB,CAAC;AAE3B,aAAW,KAAK,QAAQ;AACtB,UAAM,MAAM,eAAe,CAAC;AAC5B,QAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,WAAK,IAAI,GAAG;AACZ,aAAO,KAAK,CAAC;AACb,UAAI,OAAO,UAAU,MAAO;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,cAAc,GAAG,CAAC,CAAC;AACzC,SAAO;AACT;AAIA,SAAS,cAAc,GAAY,GAAoB;AAErD,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO,MAAM,QAAQ,MAAM,SAAY,IAAI;AAC9E,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO;AAE1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,OAAO,MAAM,aAAa,OAAO,MAAM,UAAW,QAAO,OAAO,CAAC,IAAI,OAAO,CAAC;AAGjF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAG3E,SAAO,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,CAAC;AAC1C;AAEA,SAAS,cAAc,OAAgB,QAA4B;AACjE,UAAQ,OAAO,UAAU;AAAA,IACvB,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IAChF,KAAK;AACH,aAAO,CAAC,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IACjF,KAAK;AACH,UAAI,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC/B,cAAM,MAAM,IAAI,IAAI,OAAO,MAAM,IAAI,MAAM,CAAC;AAC5C,eAAO,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAC9B;AACA,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEA,SAAS,eAAe,OAAwB;AAC9C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO,OAAO,KAAK;AACrB;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/reader.ts","../src/pipeline.ts","../src/utils.ts"],"sourcesContent":["export {\n readParquetMetadata,\n readParquetData,\n readColumnValues,\n readRowsByIndices,\n sourceFromFile,\n sourceFromBuffer,\n sourceFromUrl,\n // Legacy\n fileToArrayBuffer,\n fetchParquetFromUrl,\n} from \"./reader.js\";\n\nexport {\n buildSortIndex,\n buildFilterIndex,\n buildGroups,\n collectUniqueValues,\n createEmptyPipeline,\n compareValues,\n matchesFilter,\n formatGroupKey,\n} from \"./pipeline.js\";\n\nexport {\n yieldToUI,\n asyncSort,\n runWithConcurrency,\n} from \"./utils.js\";\n\nexport type {\n ParquetColumn,\n ParquetMetadata,\n ParquetRow,\n ParquetData,\n ReadOptions,\n ParquetSource,\n} from \"./types.js\";\n\nexport type {\n SortDef,\n SortDirection,\n FilterDef,\n FilterOperator,\n GroupDef,\n GroupNode,\n PipelineState,\n} from \"./pipeline.js\";\n","import {\n parquetMetadataAsync,\n parquetRead,\n asyncBufferFromUrl,\n} from \"hyparquet\";\nimport type { AsyncBuffer, FileMetaData } from \"hyparquet\";\nimport type {\n ParquetColumn,\n ParquetData,\n ParquetMetadata,\n ParquetRow,\n ParquetSource,\n ReadOptions,\n} from \"./types.js\";\n\nconst PARQUI_DEBUG_NS = \"[parqui/core]\";\n\nfunction coreDebugEnabled(): boolean {\n const g = globalThis as { __PARQUI_DEBUG?: boolean } | undefined;\n return g?.__PARQUI_DEBUG ?? false;\n}\n\nfunction coreLog(message: string, details?: unknown) {\n if (!coreDebugEnabled()) return;\n if (details !== undefined) {\n console.log(`${PARQUI_DEBUG_NS} ${message}`, details);\n } else {\n console.log(`${PARQUI_DEBUG_NS} ${message}`);\n }\n}\n\nfunction coreError(message: string, error: unknown) {\n if (!coreDebugEnabled()) return;\n console.error(`${PARQUI_DEBUG_NS} ${message}`, error);\n}\n\ntype SliceablePromise = Promise<ArrayBuffer> & {\n slice: (start: number, end?: number) => Promise<ArrayBuffer>;\n};\n\nfunction makeSliceablePromise(input: Promise<ArrayBuffer>): SliceablePromise {\n const promise = Promise.resolve(input) as SliceablePromise;\n promise.slice = (start: number, end?: number) =>\n promise.then((buffer) => buffer.slice(start, end));\n return promise;\n}\n\nconst normalizedSourceCache = new WeakMap<ParquetSource, ParquetSource>();\n\nfunction normalizeSource(source: ParquetSource): ParquetSource {\n const cached = normalizedSourceCache.get(source);\n if (cached) return cached;\n\n const wrapped: ParquetSource = {\n byteLength: source.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(source.slice(start, end)),\n );\n coreLog(\"normalizeSource:slice\", {\n start,\n end: end ?? source.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n\n normalizedSourceCache.set(source, wrapped);\n return wrapped;\n}\n\n// ── Source creation ──\n\n/**\n * Create a ParquetSource from a browser File object.\n * Does NOT load the file into memory — reads slices on demand.\n * Works with files of any size (including multi-GB).\n */\nexport function sourceFromFile(file: File): ParquetSource {\n coreLog(\"sourceFromFile:create\", { size: file.size });\n return {\n byteLength: file.size,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const blob = file.slice(start, end);\n // hyparquet may branch on `instanceof Promise` and then call `.slice()` on non-promises.\n // This object works in both branches: it's awaitable AND has a `.slice()` method.\n const p = makeSliceablePromise(blob.arrayBuffer());\n coreLog(\"sourceFromFile:slice\", {\n start,\n end: end ?? file.size,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from an ArrayBuffer (file already in memory).\n */\nexport function sourceFromBuffer(buffer: ArrayBuffer): ParquetSource {\n coreLog(\"sourceFromBuffer:create\", { byteLength: buffer.byteLength });\n return {\n byteLength: buffer.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(buffer.slice(start, end)),\n );\n coreLog(\"sourceFromBuffer:slice\", {\n start,\n end: end ?? buffer.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from a URL using HTTP Range requests.\n */\nexport async function sourceFromUrl(url: string): Promise<ParquetSource> {\n coreLog(\"sourceFromUrl:start\", { url });\n const asyncBuf = await asyncBufferFromUrl({ url });\n const wrapped: ParquetSource = {\n byteLength: asyncBuf.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(asyncBuf.slice(start, end)),\n );\n coreLog(\"sourceFromUrl:slice\", {\n start,\n end: end ?? asyncBuf.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n coreLog(\"sourceFromUrl:ready\", { byteLength: wrapped.byteLength });\n return wrapped;\n}\n\n// ── Internal caching ──\n\nconst asyncBufferCache = new WeakMap<ParquetSource, AsyncBuffer>();\nconst rawMetadataCache = new WeakMap<ParquetSource, Promise<FileMetaData>>();\n\nfunction getAsyncBuffer(source: ParquetSource): AsyncBuffer {\n const safeSource = normalizeSource(source);\n let buf = asyncBufferCache.get(safeSource);\n if (!buf) {\n // Use the direct source object. cachedAsyncBuffer can normalize slice()\n // into plain Promises, which breaks in some zone.js runtimes.\n buf = safeSource as AsyncBuffer;\n asyncBufferCache.set(safeSource, buf);\n }\n return buf;\n}\n\nfunction getRawMetadata(source: ParquetSource): Promise<FileMetaData> {\n const safeSource = normalizeSource(source);\n let promise = rawMetadataCache.get(safeSource);\n if (!promise) {\n const asyncBuf = getAsyncBuffer(safeSource);\n promise = parquetMetadataAsync(asyncBuf);\n rawMetadataCache.set(safeSource, promise);\n }\n return promise;\n}\n\nfunction toParquetMetadata(raw: FileMetaData): ParquetMetadata {\n const columns: ParquetColumn[] = raw.schema.slice(1).map((col) => ({\n name: col.name,\n type: col.type ?? \"UNKNOWN\",\n nullable: col.repetition_type !== \"REQUIRED\",\n }));\n\n // Compute row group boundaries: [0, rg0_rows, rg0+rg1_rows, ..., totalRows]\n const rowGroupOffsets: number[] = [0];\n for (const rg of raw.row_groups) {\n rowGroupOffsets.push(rowGroupOffsets[rowGroupOffsets.length - 1] + Number(rg.num_rows));\n }\n\n return {\n rowCount: Number(raw.num_rows),\n columns,\n rowGroups: raw.row_groups.length,\n rowGroupOffsets,\n createdBy: raw.created_by ?? undefined,\n };\n}\n\n// ── Reading ──\n\n/**\n * Read only the metadata from a parquet source.\n * Reads a small amount from the end of the file (footer).\n */\nexport async function readParquetMetadata(\n source: ParquetSource,\n): Promise<ParquetMetadata> {\n const raw = await getRawMetadata(source);\n return toParquetMetadata(raw);\n}\n\n/**\n * Read data (multiple columns, row range) from a parquet source.\n * Only reads the requested row range — does NOT load the entire file.\n */\nexport async function readParquetData(\n source: ParquetSource,\n options: ReadOptions = {},\n): Promise<ParquetData> {\n coreLog(\"readParquetData:start\", {\n offset: options.offset ?? 0,\n limit: options.limit ?? null,\n columns: options.columns?.length ?? \"all\",\n });\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n\n const rows: ParquetRow[] = [];\n\n try {\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: options.columns,\n rowStart: options.offset ?? 0,\n rowEnd:\n options.limit !== undefined\n ? (options.offset ?? 0) + options.limit\n : undefined,\n onComplete: (data: unknown[][]) => {\n const columnNames =\n options.columns ?? metadata.columns.map((c) => c.name);\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rows.push(obj);\n }\n },\n });\n } catch (error) {\n coreError(\"readParquetData:error\", error);\n throw error;\n }\n\n return { metadata, rows };\n}\n\n/**\n * Read ALL values of specific columns from the parquet source.\n * Used for building sort indices, group keys, and filter unique values.\n * Only reads the requested columns — not all data.\n *\n * Reads row-group by row-group with yields between each to avoid\n * blocking the main thread. This keeps the UI responsive (scroll, etc.)\n * during long reads on large files.\n *\n * @param isCancelled — optional callback checked between row groups.\n * If it returns true, reading stops early and partial results are returned.\n * This allows quick cancellation when the user changes sort/filter mid-read.\n *\n * Returns a Map of column name → array of values (one per row).\n */\nexport async function readColumnValues(\n source: ParquetSource,\n columnNames: string[],\n isCancelled?: () => boolean,\n): Promise<Map<string, unknown[]>> {\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n\n const result = new Map<string, unknown[]>();\n for (const name of columnNames) {\n result.set(name, []);\n }\n\n // Read row-group by row-group to keep the main thread responsive.\n // Check isCancelled between each row group for early abort.\n let rowOffset = 0;\n for (const rg of rawMetadata.row_groups) {\n // Early exit if this compute was superseded\n if (isCancelled?.()) return result;\n\n const rgRows = Number(rg.num_rows);\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: rowOffset,\n rowEnd: rowOffset + rgRows,\n onComplete: (data: unknown[][]) => {\n for (let rowIdx = 0; rowIdx < data.length; rowIdx++) {\n const row = data[rowIdx];\n for (let colIdx = 0; colIdx < columnNames.length; colIdx++) {\n result.get(columnNames[colIdx])!.push(row[colIdx]);\n }\n }\n },\n });\n\n rowOffset += rgRows;\n\n // Yield to browser between row groups so scroll events can fire\n await new Promise<void>((resolve) => setTimeout(resolve, 0));\n }\n\n return result;\n}\n\n/**\n * Read values of specific columns for specific row indices.\n * Useful for fetching display data for sorted/filtered views.\n */\nexport async function readRowsByIndices(\n source: ParquetSource,\n rowIndices: number[],\n columns?: string[],\n): Promise<ParquetRow[]> {\n if (rowIndices.length === 0) return [];\n\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n const columnNames = columns ?? metadata.columns.map((c) => c.name);\n\n // Sort indices to read sequentially, then re-order\n const sorted = rowIndices.map((idx, pos) => ({ idx, pos }));\n sorted.sort((a, b) => a.idx - b.idx);\n\n // Find contiguous ranges to batch reads\n const ranges: { start: number; end: number; positions: number[] }[] = [];\n let rangeStart = sorted[0].idx;\n let rangeEnd = sorted[0].idx + 1;\n let positions = [sorted[0].pos];\n\n for (let i = 1; i < sorted.length; i++) {\n if (sorted[i].idx <= rangeEnd + 50) {\n // Allow small gaps to merge ranges\n rangeEnd = sorted[i].idx + 1;\n positions.push(sorted[i].pos);\n } else {\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n rangeStart = sorted[i].idx;\n rangeEnd = sorted[i].idx + 1;\n positions = [sorted[i].pos];\n }\n }\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n\n const result: ParquetRow[] = new Array(rowIndices.length);\n\n for (const range of ranges) {\n const rangeRows: ParquetRow[] = [];\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: range.start,\n rowEnd: range.end,\n onComplete: (data: unknown[][]) => {\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rangeRows.push(obj);\n }\n },\n });\n\n // Map range rows back to the correct positions\n let sortedPosIdx = 0;\n for (const s of sorted) {\n if (s.idx >= range.start && s.idx < range.end) {\n const localIdx = s.idx - range.start;\n if (localIdx < rangeRows.length) {\n result[s.pos] = rangeRows[localIdx];\n }\n sortedPosIdx++;\n }\n }\n }\n\n return result;\n}\n\n// ── Legacy helpers (kept for backwards compatibility) ──\n\n/** @deprecated Use sourceFromFile() instead */\nexport async function fileToArrayBuffer(file: File): Promise<ArrayBuffer> {\n return file.arrayBuffer();\n}\n\n/** @deprecated Use sourceFromUrl() instead */\nexport async function fetchParquetFromUrl(url: string): Promise<ArrayBuffer> {\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch parquet file: ${response.status} ${response.statusText}`,\n );\n }\n return response.arrayBuffer();\n}\n","/**\n * Data pipeline: sorting, filtering, grouping.\n * Operates on column-level indices — never loads all columns at once.\n */\n\n// ── Sort ──\n\nexport type SortDirection = \"asc\" | \"desc\";\n\nexport interface SortDef {\n column: string;\n direction: SortDirection;\n}\n\n// ── Filter ──\n\nexport type FilterOperator =\n | \"eq\"\n | \"neq\"\n | \"gt\"\n | \"gte\"\n | \"lt\"\n | \"lte\"\n | \"contains\"\n | \"not_contains\"\n | \"is_null\"\n | \"is_not_null\"\n | \"in\";\n\nexport interface FilterDef {\n column: string;\n operator: FilterOperator;\n /** The value(s) to compare against. For \"in\" — an array; for \"is_null\"/\"is_not_null\" — ignored. */\n value?: unknown;\n}\n\n// ── Group ──\n\nexport interface GroupDef {\n column: string;\n}\n\nexport interface GroupNode {\n /** The value of the group key */\n key: unknown;\n /** Display label for this group */\n label: string;\n /** Number of rows in this group */\n count: number;\n /** Row indices belonging to this group (in the filtered/sorted order) */\n rowIndices: number[];\n /** Whether this group is expanded in the UI */\n expanded: boolean;\n}\n\n// ── Pipeline state ──\n\nexport interface PipelineState {\n sorts: SortDef[];\n filters: FilterDef[];\n groups: GroupDef[];\n}\n\nexport function createEmptyPipeline(): PipelineState {\n return { sorts: [], filters: [], groups: [] };\n}\n\n// ── Index building ──\n\n/**\n * Build a sorted index from column values.\n * Returns an array of original row indices in the sorted order.\n */\nexport function buildSortIndex(\n values: unknown[],\n sorts: SortDef[],\n columnValues: Map<string, unknown[]>,\n): number[] {\n const indices = Array.from({ length: values.length }, (_, i) => i);\n\n indices.sort((a, b) => {\n for (const sort of sorts) {\n const col = columnValues.get(sort.column);\n if (!col) continue;\n const va = col[a];\n const vb = col[b];\n const cmp = compareValues(va, vb);\n if (cmp !== 0) return sort.direction === \"asc\" ? cmp : -cmp;\n }\n return a - b; // stable: preserve original order for ties\n });\n\n return indices;\n}\n\n/**\n * Apply filters to produce a set of passing row indices.\n */\nexport function buildFilterIndex(\n totalRows: number,\n filters: FilterDef[],\n columnValues: Map<string, unknown[]>,\n): number[] | null {\n if (filters.length === 0) return null; // null = no filtering\n\n const passing: number[] = [];\n for (let i = 0; i < totalRows; i++) {\n let pass = true;\n for (const filter of filters) {\n const col = columnValues.get(filter.column);\n if (!col) continue;\n if (!matchesFilter(col[i], filter)) {\n pass = false;\n break;\n }\n }\n if (pass) passing.push(i);\n }\n return passing;\n}\n\n/**\n * Build group nodes from column values.\n * Supports multi-level grouping (first group column → second → etc.)\n */\nexport function buildGroups(\n rowIndices: number[],\n groups: GroupDef[],\n columnValues: Map<string, unknown[]>,\n): GroupNode[] {\n if (groups.length === 0) return [];\n\n const firstGroup = groups[0];\n const col = columnValues.get(firstGroup.column);\n if (!col) return [];\n\n // Bucket rows by group key — store DISPLAY indices (position in mapping),\n // not source indices, so getRow(displayIndex) works directly.\n const buckets = new Map<string, number[]>();\n const keyLabels = new Map<string, unknown>();\n\n for (let displayIdx = 0; displayIdx < rowIndices.length; displayIdx++) {\n const sourceIdx = rowIndices[displayIdx];\n const value = col[sourceIdx];\n const key = formatGroupKey(value);\n if (!buckets.has(key)) {\n buckets.set(key, []);\n keyLabels.set(key, value);\n }\n buckets.get(key)!.push(displayIdx);\n }\n\n // Sort group keys\n const sortedKeys = [...buckets.keys()].sort((a, b) => {\n const va = keyLabels.get(a);\n const vb = keyLabels.get(b);\n return compareValues(va, vb);\n });\n\n return sortedKeys.map((key) => ({\n key: keyLabels.get(key),\n label: key,\n count: buckets.get(key)!.length,\n rowIndices: buckets.get(key)!,\n expanded: false,\n }));\n}\n\n/**\n * Collect unique values from a column (up to a limit).\n * Returns sorted unique values.\n */\nexport function collectUniqueValues(\n values: unknown[],\n limit: number = 500,\n): unknown[] {\n const seen = new Set<string>();\n const unique: unknown[] = [];\n\n for (const v of values) {\n const key = formatGroupKey(v);\n if (!seen.has(key)) {\n seen.add(key);\n unique.push(v);\n if (unique.length >= limit) break;\n }\n }\n\n unique.sort((a, b) => compareValues(a, b));\n return unique;\n}\n\n// ── Helpers ──\n\nexport function compareValues(a: unknown, b: unknown): number {\n // nulls last\n if (a === null || a === undefined) return b === null || b === undefined ? 0 : 1;\n if (b === null || b === undefined) return -1;\n\n if (typeof a === \"number\" && typeof b === \"number\") return a - b;\n if (typeof a === \"bigint\" && typeof b === \"bigint\") return a < b ? -1 : a > b ? 1 : 0;\n if (typeof a === \"boolean\" && typeof b === \"boolean\") return Number(a) - Number(b);\n\n // Dates\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();\n\n // Default: string comparison\n return String(a).localeCompare(String(b));\n}\n\nexport function matchesFilter(value: unknown, filter: FilterDef): boolean {\n switch (filter.operator) {\n case \"is_null\":\n return value === null || value === undefined;\n case \"is_not_null\":\n return value !== null && value !== undefined;\n case \"eq\":\n return value === filter.value;\n case \"neq\":\n return value !== filter.value;\n case \"gt\":\n return compareValues(value, filter.value) > 0;\n case \"gte\":\n return compareValues(value, filter.value) >= 0;\n case \"lt\":\n return compareValues(value, filter.value) < 0;\n case \"lte\":\n return compareValues(value, filter.value) <= 0;\n case \"contains\":\n return String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"not_contains\":\n return !String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"in\":\n if (Array.isArray(filter.value)) {\n const set = new Set(filter.value.map(String));\n return set.has(String(value));\n }\n return false;\n default:\n return true;\n }\n}\n\nexport function formatGroupKey(value: unknown): string {\n if (value === null || value === undefined) return \"(null)\";\n if (value instanceof Date) return value.toISOString();\n return String(value);\n}\n","/**\n * Shared async utilities for pipeline computation.\n * Used by framework wrappers (React, Vue, Angular) for non-blocking sort/read operations.\n */\n\n/** Yield control to the browser to prevent UI freezes. */\nexport function yieldToUI(): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, 0));\n}\n\n/**\n * Async merge sort that yields to the UI periodically.\n * Prevents browser freeze when sorting large arrays (100K+ elements).\n */\nexport async function asyncSort(\n arr: number[],\n compareFn: (a: number, b: number) => number,\n isCancelled: () => boolean,\n): Promise<number[]> {\n const CHUNK = 50_000;\n const chunks: number[][] = [];\n\n for (let i = 0; i < arr.length; i += CHUNK) {\n if (isCancelled()) return arr;\n const chunk = arr.slice(i, i + CHUNK);\n chunk.sort(compareFn);\n chunks.push(chunk);\n await yieldToUI();\n }\n\n while (chunks.length > 1) {\n if (isCancelled()) return arr;\n const merged: number[][] = [];\n for (let i = 0; i < chunks.length; i += 2) {\n if (i + 1 < chunks.length) {\n merged.push(mergeSorted(chunks[i], chunks[i + 1], compareFn));\n } else {\n merged.push(chunks[i]);\n }\n }\n chunks.length = 0;\n chunks.push(...merged);\n await yieldToUI();\n }\n\n return chunks[0] ?? [];\n}\n\n/** Merge two sorted arrays into one. */\nfunction mergeSorted(\n a: number[],\n b: number[],\n compareFn: (a: number, b: number) => number,\n): number[] {\n const result = new Array(a.length + b.length);\n let i = 0, j = 0, k = 0;\n while (i < a.length && j < b.length) {\n if (compareFn(a[i], b[j]) <= 0) result[k++] = a[i++];\n else result[k++] = b[j++];\n }\n while (i < a.length) result[k++] = a[i++];\n while (j < b.length) result[k++] = b[j++];\n return result;\n}\n\n/**\n * Run async tasks with limited concurrency.\n * Prevents browser freeze from too many concurrent reads.\n */\nexport async function runWithConcurrency<T>(\n items: T[],\n maxConcurrency: number,\n fn: (item: T) => Promise<void>,\n): Promise<void> {\n let index = 0;\n const workers = Array.from(\n { length: Math.min(maxConcurrency, items.length) },\n async () => {\n while (index < items.length) {\n const i = index++;\n await fn(items[i]);\n }\n },\n );\n await Promise.all(workers);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,uBAIO;AAWP,IAAM,kBAAkB;AAExB,SAAS,mBAA4B;AACnC,QAAM,IAAI;AACV,SAAO,GAAG,kBAAkB;AAC9B;AAEA,SAAS,QAAQ,SAAiB,SAAmB;AACnD,MAAI,CAAC,iBAAiB,EAAG;AACzB,MAAI,YAAY,QAAW;AACzB,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,IAAI,OAAO;AAAA,EACtD,OAAO;AACL,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,EAAE;AAAA,EAC7C;AACF;AAEA,SAAS,UAAU,SAAiB,OAAgB;AAClD,MAAI,CAAC,iBAAiB,EAAG;AACzB,UAAQ,MAAM,GAAG,eAAe,IAAI,OAAO,IAAI,KAAK;AACtD;AAMA,SAAS,qBAAqB,OAA+C;AAC3E,QAAM,UAAU,QAAQ,QAAQ,KAAK;AACrC,UAAQ,QAAQ,CAAC,OAAe,QAC9B,QAAQ,KAAK,CAAC,WAAW,OAAO,MAAM,OAAO,GAAG,CAAC;AACnD,SAAO;AACT;AAEA,IAAM,wBAAwB,oBAAI,QAAsC;AAExE,SAAS,gBAAgB,QAAsC;AAC7D,QAAM,SAAS,sBAAsB,IAAI,MAAM;AAC/C,MAAI,OAAQ,QAAO;AAEnB,QAAM,UAAyB;AAAA,IAC7B,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,yBAAyB;AAAA,QAC/B;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,wBAAsB,IAAI,QAAQ,OAAO;AACzC,SAAO;AACT;AASO,SAAS,eAAe,MAA2B;AACxD,UAAQ,yBAAyB,EAAE,MAAM,KAAK,KAAK,CAAC;AACpD,SAAO;AAAA,IACL,YAAY,KAAK;AAAA,IACjB,MAAM,OAAe,KAAoC;AACvD,YAAM,OAAO,KAAK,MAAM,OAAO,GAAG;AAGlC,YAAM,IAAI,qBAAqB,KAAK,YAAY,CAAC;AACjD,cAAQ,wBAAwB;AAAA,QAC9B;AAAA,QACA,KAAK,OAAO,KAAK;AAAA,QACjB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,iBAAiB,QAAoC;AACnE,UAAQ,2BAA2B,EAAE,YAAY,OAAO,WAAW,CAAC;AACpE,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,0BAA0B;AAAA,QAChC;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKA,eAAsB,cAAc,KAAqC;AACvE,UAAQ,uBAAuB,EAAE,IAAI,CAAC;AACtC,QAAM,WAAW,UAAM,qCAAmB,EAAE,IAAI,CAAC;AACjD,QAAM,UAAyB;AAAA,IAC7B,YAAY,SAAS;AAAA,IACrB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,SAAS,MAAM,OAAO,GAAG,CAAC;AAAA,MAC5C;AACA,cAAQ,uBAAuB;AAAA,QAC7B;AAAA,QACA,KAAK,OAAO,SAAS;AAAA,QACrB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACA,UAAQ,uBAAuB,EAAE,YAAY,QAAQ,WAAW,CAAC;AACjE,SAAO;AACT;AAIA,IAAM,mBAAmB,oBAAI,QAAoC;AACjE,IAAM,mBAAmB,oBAAI,QAA8C;AAE3E,SAAS,eAAe,QAAoC;AAC1D,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,MAAM,iBAAiB,IAAI,UAAU;AACzC,MAAI,CAAC,KAAK;AAGR,UAAM;AACN,qBAAiB,IAAI,YAAY,GAAG;AAAA,EACtC;AACA,SAAO;AACT;AAEA,SAAS,eAAe,QAA8C;AACpE,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,UAAU,iBAAiB,IAAI,UAAU;AAC7C,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,eAAe,UAAU;AAC1C,kBAAU,uCAAqB,QAAQ;AACvC,qBAAiB,IAAI,YAAY,OAAO;AAAA,EAC1C;AACA,SAAO;AACT;AAEA,SAAS,kBAAkB,KAAoC;AAC7D,QAAM,UAA2B,IAAI,OAAO,MAAM,CAAC,EAAE,IAAI,CAAC,SAAS;AAAA,IACjE,MAAM,IAAI;AAAA,IACV,MAAM,IAAI,QAAQ;AAAA,IAClB,UAAU,IAAI,oBAAoB;AAAA,EACpC,EAAE;AAGF,QAAM,kBAA4B,CAAC,CAAC;AACpC,aAAW,MAAM,IAAI,YAAY;AAC/B,oBAAgB,KAAK,gBAAgB,gBAAgB,SAAS,CAAC,IAAI,OAAO,GAAG,QAAQ,CAAC;AAAA,EACxF;AAEA,SAAO;AAAA,IACL,UAAU,OAAO,IAAI,QAAQ;AAAA,IAC7B;AAAA,IACA,WAAW,IAAI,WAAW;AAAA,IAC1B;AAAA,IACA,WAAW,IAAI,cAAc;AAAA,EAC/B;AACF;AAQA,eAAsB,oBACpB,QAC0B;AAC1B,QAAM,MAAM,MAAM,eAAe,MAAM;AACvC,SAAO,kBAAkB,GAAG;AAC9B;AAMA,eAAsB,gBACpB,QACA,UAAuB,CAAC,GACF;AACtB,UAAQ,yBAAyB;AAAA,IAC/B,QAAQ,QAAQ,UAAU;AAAA,IAC1B,OAAO,QAAQ,SAAS;AAAA,IACxB,SAAS,QAAQ,SAAS,UAAU;AAAA,EACtC,CAAC;AACD,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAE9C,QAAM,OAAqB,CAAC;AAE5B,MAAI;AACF,cAAM,8BAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ,UAAU;AAAA,MAC5B,QACE,QAAQ,UAAU,UACb,QAAQ,UAAU,KAAK,QAAQ,QAChC;AAAA,MACN,YAAY,CAAC,SAAsB;AACjC,cAAM,cACJ,QAAQ,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AACvD,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,cAAU,yBAAyB,KAAK;AACxC,UAAM;AAAA,EACR;AAEA,SAAO,EAAE,UAAU,KAAK;AAC1B;AAiBA,eAAsB,iBACpB,QACA,aACA,aACiC;AACjC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAE/C,QAAM,SAAS,oBAAI,IAAuB;AAC1C,aAAW,QAAQ,aAAa;AAC9B,WAAO,IAAI,MAAM,CAAC,CAAC;AAAA,EACrB;AAIA,MAAI,YAAY;AAChB,aAAW,MAAM,YAAY,YAAY;AAEvC,QAAI,cAAc,EAAG,QAAO;AAE5B,UAAM,SAAS,OAAO,GAAG,QAAQ;AAEjC,cAAM,8BAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,MACV,QAAQ,YAAY;AAAA,MACpB,YAAY,CAAC,SAAsB;AACjC,iBAAS,SAAS,GAAG,SAAS,KAAK,QAAQ,UAAU;AACnD,gBAAM,MAAM,KAAK,MAAM;AACvB,mBAAS,SAAS,GAAG,SAAS,YAAY,QAAQ,UAAU;AAC1D,mBAAO,IAAI,YAAY,MAAM,CAAC,EAAG,KAAK,IAAI,MAAM,CAAC;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,iBAAa;AAGb,UAAM,IAAI,QAAc,CAAC,YAAY,WAAW,SAAS,CAAC,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAMA,eAAsB,kBACpB,QACA,YACA,SACuB;AACvB,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAC9C,QAAM,cAAc,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AAGjE,QAAM,SAAS,WAAW,IAAI,CAAC,KAAK,SAAS,EAAE,KAAK,IAAI,EAAE;AAC1D,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AAGnC,QAAM,SAAgE,CAAC;AACvE,MAAI,aAAa,OAAO,CAAC,EAAE;AAC3B,MAAI,WAAW,OAAO,CAAC,EAAE,MAAM;AAC/B,MAAI,YAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAE9B,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,QAAI,OAAO,CAAC,EAAE,OAAO,WAAW,IAAI;AAElC,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,gBAAU,KAAK,OAAO,CAAC,EAAE,GAAG;AAAA,IAC9B,OAAO;AACL,aAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAC3D,mBAAa,OAAO,CAAC,EAAE;AACvB,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,kBAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAAA,IAC5B;AAAA,EACF;AACA,SAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAE3D,QAAM,SAAuB,IAAI,MAAM,WAAW,MAAM;AAExD,aAAW,SAAS,QAAQ;AAC1B,UAAM,YAA0B,CAAC;AAEjC,cAAM,8BAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU,MAAM;AAAA,MAChB,QAAQ,MAAM;AAAA,MACd,YAAY,CAAC,SAAsB;AACjC,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,oBAAU,KAAK,GAAG;AAAA,QACpB;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,eAAe;AACnB,eAAW,KAAK,QAAQ;AACtB,UAAI,EAAE,OAAO,MAAM,SAAS,EAAE,MAAM,MAAM,KAAK;AAC7C,cAAM,WAAW,EAAE,MAAM,MAAM;AAC/B,YAAI,WAAW,UAAU,QAAQ;AAC/B,iBAAO,EAAE,GAAG,IAAI,UAAU,QAAQ;AAAA,QACpC;AACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,kBAAkB,MAAkC;AACxE,SAAO,KAAK,YAAY;AAC1B;AAGA,eAAsB,oBAAoB,KAAmC;AAC3E,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,iCAAiC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IACzE;AAAA,EACF;AACA,SAAO,SAAS,YAAY;AAC9B;;;AC5VO,SAAS,sBAAqC;AACnD,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,QAAQ,CAAC,EAAE;AAC9C;AAQO,SAAS,eACd,QACA,OACA,cACU;AACV,QAAM,UAAU,MAAM,KAAK,EAAE,QAAQ,OAAO,OAAO,GAAG,CAAC,GAAG,MAAM,CAAC;AAEjE,UAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,aAAa,IAAI,KAAK,MAAM;AACxC,UAAI,CAAC,IAAK;AACV,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,MAAM,cAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,KAAK,cAAc,QAAQ,MAAM,CAAC;AAAA,IAC1D;AACA,WAAO,IAAI;AAAA,EACb,CAAC;AAED,SAAO;AACT;AAKO,SAAS,iBACd,WACA,SACA,cACiB;AACjB,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,QAAM,UAAoB,CAAC;AAC3B,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,QAAI,OAAO;AACX,eAAW,UAAU,SAAS;AAC5B,YAAM,MAAM,aAAa,IAAI,OAAO,MAAM;AAC1C,UAAI,CAAC,IAAK;AACV,UAAI,CAAC,cAAc,IAAI,CAAC,GAAG,MAAM,GAAG;AAClC,eAAO;AACP;AAAA,MACF;AAAA,IACF;AACA,QAAI,KAAM,SAAQ,KAAK,CAAC;AAAA,EAC1B;AACA,SAAO;AACT;AAMO,SAAS,YACd,YACA,QACA,cACa;AACb,MAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAEjC,QAAM,aAAa,OAAO,CAAC;AAC3B,QAAM,MAAM,aAAa,IAAI,WAAW,MAAM;AAC9C,MAAI,CAAC,IAAK,QAAO,CAAC;AAIlB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,QAAM,YAAY,oBAAI,IAAqB;AAE3C,WAAS,aAAa,GAAG,aAAa,WAAW,QAAQ,cAAc;AACrE,UAAM,YAAY,WAAW,UAAU;AACvC,UAAM,QAAQ,IAAI,SAAS;AAC3B,UAAM,MAAM,eAAe,KAAK;AAChC,QAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,cAAQ,IAAI,KAAK,CAAC,CAAC;AACnB,gBAAU,IAAI,KAAK,KAAK;AAAA,IAC1B;AACA,YAAQ,IAAI,GAAG,EAAG,KAAK,UAAU;AAAA,EACnC;AAGA,QAAM,aAAa,CAAC,GAAG,QAAQ,KAAK,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM;AACpD,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,WAAO,cAAc,IAAI,EAAE;AAAA,EAC7B,CAAC;AAED,SAAO,WAAW,IAAI,CAAC,SAAS;AAAA,IAC9B,KAAK,UAAU,IAAI,GAAG;AAAA,IACtB,OAAO;AAAA,IACP,OAAO,QAAQ,IAAI,GAAG,EAAG;AAAA,IACzB,YAAY,QAAQ,IAAI,GAAG;AAAA,IAC3B,UAAU;AAAA,EACZ,EAAE;AACJ;AAMO,SAAS,oBACd,QACA,QAAgB,KACL;AACX,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAoB,CAAC;AAE3B,aAAW,KAAK,QAAQ;AACtB,UAAM,MAAM,eAAe,CAAC;AAC5B,QAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,WAAK,IAAI,GAAG;AACZ,aAAO,KAAK,CAAC;AACb,UAAI,OAAO,UAAU,MAAO;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,cAAc,GAAG,CAAC,CAAC;AACzC,SAAO;AACT;AAIO,SAAS,cAAc,GAAY,GAAoB;AAE5D,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO,MAAM,QAAQ,MAAM,SAAY,IAAI;AAC9E,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO;AAE1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,OAAO,MAAM,aAAa,OAAO,MAAM,UAAW,QAAO,OAAO,CAAC,IAAI,OAAO,CAAC;AAGjF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAG3E,SAAO,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,CAAC;AAC1C;AAEO,SAAS,cAAc,OAAgB,QAA4B;AACxE,UAAQ,OAAO,UAAU;AAAA,IACvB,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IAChF,KAAK;AACH,aAAO,CAAC,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IACjF,KAAK;AACH,UAAI,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC/B,cAAM,MAAM,IAAI,IAAI,OAAO,MAAM,IAAI,MAAM,CAAC;AAC5C,eAAO,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAC9B;AACA,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEO,SAAS,eAAe,OAAwB;AACrD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO,OAAO,KAAK;AACrB;;;ACjPO,SAAS,YAA2B;AACzC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,CAAC,CAAC;AACxD;AAMA,eAAsB,UACpB,KACA,WACA,aACmB;AACnB,QAAM,QAAQ;AACd,QAAM,SAAqB,CAAC;AAE5B,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,OAAO;AAC1C,QAAI,YAAY,EAAG,QAAO;AAC1B,UAAM,QAAQ,IAAI,MAAM,GAAG,IAAI,KAAK;AACpC,UAAM,KAAK,SAAS;AACpB,WAAO,KAAK,KAAK;AACjB,UAAM,UAAU;AAAA,EAClB;AAEA,SAAO,OAAO,SAAS,GAAG;AACxB,QAAI,YAAY,EAAG,QAAO;AAC1B,UAAM,SAAqB,CAAC;AAC5B,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,GAAG;AACzC,UAAI,IAAI,IAAI,OAAO,QAAQ;AACzB,eAAO,KAAK,YAAY,OAAO,CAAC,GAAG,OAAO,IAAI,CAAC,GAAG,SAAS,CAAC;AAAA,MAC9D,OAAO;AACL,eAAO,KAAK,OAAO,CAAC,CAAC;AAAA,MACvB;AAAA,IACF;AACA,WAAO,SAAS;AAChB,WAAO,KAAK,GAAG,MAAM;AACrB,UAAM,UAAU;AAAA,EAClB;AAEA,SAAO,OAAO,CAAC,KAAK,CAAC;AACvB;AAGA,SAAS,YACP,GACA,GACA,WACU;AACV,QAAM,SAAS,IAAI,MAAM,EAAE,SAAS,EAAE,MAAM;AAC5C,MAAI,IAAI,GAAG,IAAI,GAAG,IAAI;AACtB,SAAO,IAAI,EAAE,UAAU,IAAI,EAAE,QAAQ;AACnC,QAAI,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,KAAK,EAAG,QAAO,GAAG,IAAI,EAAE,GAAG;AAAA,QAC9C,QAAO,GAAG,IAAI,EAAE,GAAG;AAAA,EAC1B;AACA,SAAO,IAAI,EAAE,OAAQ,QAAO,GAAG,IAAI,EAAE,GAAG;AACxC,SAAO,IAAI,EAAE,OAAQ,QAAO,GAAG,IAAI,EAAE,GAAG;AACxC,SAAO;AACT;AAMA,eAAsB,mBACpB,OACA,gBACA,IACe;AACf,MAAI,QAAQ;AACZ,QAAM,UAAU,MAAM;AAAA,IACpB,EAAE,QAAQ,KAAK,IAAI,gBAAgB,MAAM,MAAM,EAAE;AAAA,IACjD,YAAY;AACV,aAAO,QAAQ,MAAM,QAAQ;AAC3B,cAAM,IAAI;AACV,cAAM,GAAG,MAAM,CAAC,CAAC;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,OAAO;AAC3B;","names":[]}
|
package/dist/index.d.cts
CHANGED
|
@@ -145,5 +145,25 @@ declare function buildGroups(rowIndices: number[], groups: GroupDef[], columnVal
|
|
|
145
145
|
* Returns sorted unique values.
|
|
146
146
|
*/
|
|
147
147
|
declare function collectUniqueValues(values: unknown[], limit?: number): unknown[];
|
|
148
|
+
declare function compareValues(a: unknown, b: unknown): number;
|
|
149
|
+
declare function matchesFilter(value: unknown, filter: FilterDef): boolean;
|
|
150
|
+
declare function formatGroupKey(value: unknown): string;
|
|
148
151
|
|
|
149
|
-
|
|
152
|
+
/**
|
|
153
|
+
* Shared async utilities for pipeline computation.
|
|
154
|
+
* Used by framework wrappers (React, Vue, Angular) for non-blocking sort/read operations.
|
|
155
|
+
*/
|
|
156
|
+
/** Yield control to the browser to prevent UI freezes. */
|
|
157
|
+
declare function yieldToUI(): Promise<void>;
|
|
158
|
+
/**
|
|
159
|
+
* Async merge sort that yields to the UI periodically.
|
|
160
|
+
* Prevents browser freeze when sorting large arrays (100K+ elements).
|
|
161
|
+
*/
|
|
162
|
+
declare function asyncSort(arr: number[], compareFn: (a: number, b: number) => number, isCancelled: () => boolean): Promise<number[]>;
|
|
163
|
+
/**
|
|
164
|
+
* Run async tasks with limited concurrency.
|
|
165
|
+
* Prevents browser freeze from too many concurrent reads.
|
|
166
|
+
*/
|
|
167
|
+
declare function runWithConcurrency<T>(items: T[], maxConcurrency: number, fn: (item: T) => Promise<void>): Promise<void>;
|
|
168
|
+
|
|
169
|
+
export { type FilterDef, type FilterOperator, type GroupDef, type GroupNode, type ParquetColumn, type ParquetData, type ParquetMetadata, type ParquetRow, type ParquetSource, type PipelineState, type ReadOptions, type SortDef, type SortDirection, asyncSort, buildFilterIndex, buildGroups, buildSortIndex, collectUniqueValues, compareValues, createEmptyPipeline, fetchParquetFromUrl, fileToArrayBuffer, formatGroupKey, matchesFilter, readColumnValues, readParquetData, readParquetMetadata, readRowsByIndices, runWithConcurrency, sourceFromBuffer, sourceFromFile, sourceFromUrl, yieldToUI };
|
package/dist/index.d.ts
CHANGED
|
@@ -145,5 +145,25 @@ declare function buildGroups(rowIndices: number[], groups: GroupDef[], columnVal
|
|
|
145
145
|
* Returns sorted unique values.
|
|
146
146
|
*/
|
|
147
147
|
declare function collectUniqueValues(values: unknown[], limit?: number): unknown[];
|
|
148
|
+
declare function compareValues(a: unknown, b: unknown): number;
|
|
149
|
+
declare function matchesFilter(value: unknown, filter: FilterDef): boolean;
|
|
150
|
+
declare function formatGroupKey(value: unknown): string;
|
|
148
151
|
|
|
149
|
-
|
|
152
|
+
/**
|
|
153
|
+
* Shared async utilities for pipeline computation.
|
|
154
|
+
* Used by framework wrappers (React, Vue, Angular) for non-blocking sort/read operations.
|
|
155
|
+
*/
|
|
156
|
+
/** Yield control to the browser to prevent UI freezes. */
|
|
157
|
+
declare function yieldToUI(): Promise<void>;
|
|
158
|
+
/**
|
|
159
|
+
* Async merge sort that yields to the UI periodically.
|
|
160
|
+
* Prevents browser freeze when sorting large arrays (100K+ elements).
|
|
161
|
+
*/
|
|
162
|
+
declare function asyncSort(arr: number[], compareFn: (a: number, b: number) => number, isCancelled: () => boolean): Promise<number[]>;
|
|
163
|
+
/**
|
|
164
|
+
* Run async tasks with limited concurrency.
|
|
165
|
+
* Prevents browser freeze from too many concurrent reads.
|
|
166
|
+
*/
|
|
167
|
+
declare function runWithConcurrency<T>(items: T[], maxConcurrency: number, fn: (item: T) => Promise<void>): Promise<void>;
|
|
168
|
+
|
|
169
|
+
export { type FilterDef, type FilterOperator, type GroupDef, type GroupNode, type ParquetColumn, type ParquetData, type ParquetMetadata, type ParquetRow, type ParquetSource, type PipelineState, type ReadOptions, type SortDef, type SortDirection, asyncSort, buildFilterIndex, buildGroups, buildSortIndex, collectUniqueValues, compareValues, createEmptyPipeline, fetchParquetFromUrl, fileToArrayBuffer, formatGroupKey, matchesFilter, readColumnValues, readParquetData, readParquetMetadata, readRowsByIndices, runWithConcurrency, sourceFromBuffer, sourceFromFile, sourceFromUrl, yieldToUI };
|
package/dist/index.js
CHANGED
|
@@ -403,20 +403,81 @@ function formatGroupKey(value) {
|
|
|
403
403
|
if (value instanceof Date) return value.toISOString();
|
|
404
404
|
return String(value);
|
|
405
405
|
}
|
|
406
|
+
|
|
407
|
+
// src/utils.ts
|
|
408
|
+
function yieldToUI() {
|
|
409
|
+
return new Promise((resolve) => setTimeout(resolve, 0));
|
|
410
|
+
}
|
|
411
|
+
async function asyncSort(arr, compareFn, isCancelled) {
|
|
412
|
+
const CHUNK = 5e4;
|
|
413
|
+
const chunks = [];
|
|
414
|
+
for (let i = 0; i < arr.length; i += CHUNK) {
|
|
415
|
+
if (isCancelled()) return arr;
|
|
416
|
+
const chunk = arr.slice(i, i + CHUNK);
|
|
417
|
+
chunk.sort(compareFn);
|
|
418
|
+
chunks.push(chunk);
|
|
419
|
+
await yieldToUI();
|
|
420
|
+
}
|
|
421
|
+
while (chunks.length > 1) {
|
|
422
|
+
if (isCancelled()) return arr;
|
|
423
|
+
const merged = [];
|
|
424
|
+
for (let i = 0; i < chunks.length; i += 2) {
|
|
425
|
+
if (i + 1 < chunks.length) {
|
|
426
|
+
merged.push(mergeSorted(chunks[i], chunks[i + 1], compareFn));
|
|
427
|
+
} else {
|
|
428
|
+
merged.push(chunks[i]);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
chunks.length = 0;
|
|
432
|
+
chunks.push(...merged);
|
|
433
|
+
await yieldToUI();
|
|
434
|
+
}
|
|
435
|
+
return chunks[0] ?? [];
|
|
436
|
+
}
|
|
437
|
+
function mergeSorted(a, b, compareFn) {
|
|
438
|
+
const result = new Array(a.length + b.length);
|
|
439
|
+
let i = 0, j = 0, k = 0;
|
|
440
|
+
while (i < a.length && j < b.length) {
|
|
441
|
+
if (compareFn(a[i], b[j]) <= 0) result[k++] = a[i++];
|
|
442
|
+
else result[k++] = b[j++];
|
|
443
|
+
}
|
|
444
|
+
while (i < a.length) result[k++] = a[i++];
|
|
445
|
+
while (j < b.length) result[k++] = b[j++];
|
|
446
|
+
return result;
|
|
447
|
+
}
|
|
448
|
+
async function runWithConcurrency(items, maxConcurrency, fn) {
|
|
449
|
+
let index = 0;
|
|
450
|
+
const workers = Array.from(
|
|
451
|
+
{ length: Math.min(maxConcurrency, items.length) },
|
|
452
|
+
async () => {
|
|
453
|
+
while (index < items.length) {
|
|
454
|
+
const i = index++;
|
|
455
|
+
await fn(items[i]);
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
);
|
|
459
|
+
await Promise.all(workers);
|
|
460
|
+
}
|
|
406
461
|
export {
|
|
462
|
+
asyncSort,
|
|
407
463
|
buildFilterIndex,
|
|
408
464
|
buildGroups,
|
|
409
465
|
buildSortIndex,
|
|
410
466
|
collectUniqueValues,
|
|
467
|
+
compareValues,
|
|
411
468
|
createEmptyPipeline,
|
|
412
469
|
fetchParquetFromUrl,
|
|
413
470
|
fileToArrayBuffer,
|
|
471
|
+
formatGroupKey,
|
|
472
|
+
matchesFilter,
|
|
414
473
|
readColumnValues,
|
|
415
474
|
readParquetData,
|
|
416
475
|
readParquetMetadata,
|
|
417
476
|
readRowsByIndices,
|
|
477
|
+
runWithConcurrency,
|
|
418
478
|
sourceFromBuffer,
|
|
419
479
|
sourceFromFile,
|
|
420
|
-
sourceFromUrl
|
|
480
|
+
sourceFromUrl,
|
|
481
|
+
yieldToUI
|
|
421
482
|
};
|
|
422
483
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/reader.ts","../src/pipeline.ts"],"sourcesContent":["import {\n parquetMetadataAsync,\n parquetRead,\n asyncBufferFromUrl,\n} from \"hyparquet\";\nimport type { AsyncBuffer, FileMetaData } from \"hyparquet\";\nimport type {\n ParquetColumn,\n ParquetData,\n ParquetMetadata,\n ParquetRow,\n ParquetSource,\n ReadOptions,\n} from \"./types.js\";\n\nconst PARQUI_DEBUG_NS = \"[parqui/core]\";\n\nfunction coreDebugEnabled(): boolean {\n const g = globalThis as { __PARQUI_DEBUG?: boolean } | undefined;\n return g?.__PARQUI_DEBUG ?? false;\n}\n\nfunction coreLog(message: string, details?: unknown) {\n if (!coreDebugEnabled()) return;\n if (details !== undefined) {\n console.log(`${PARQUI_DEBUG_NS} ${message}`, details);\n } else {\n console.log(`${PARQUI_DEBUG_NS} ${message}`);\n }\n}\n\nfunction coreError(message: string, error: unknown) {\n if (!coreDebugEnabled()) return;\n console.error(`${PARQUI_DEBUG_NS} ${message}`, error);\n}\n\ntype SliceablePromise = Promise<ArrayBuffer> & {\n slice: (start: number, end?: number) => Promise<ArrayBuffer>;\n};\n\nfunction makeSliceablePromise(input: Promise<ArrayBuffer>): SliceablePromise {\n const promise = Promise.resolve(input) as SliceablePromise;\n promise.slice = (start: number, end?: number) =>\n promise.then((buffer) => buffer.slice(start, end));\n return promise;\n}\n\nconst normalizedSourceCache = new WeakMap<ParquetSource, ParquetSource>();\n\nfunction normalizeSource(source: ParquetSource): ParquetSource {\n const cached = normalizedSourceCache.get(source);\n if (cached) return cached;\n\n const wrapped: ParquetSource = {\n byteLength: source.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(source.slice(start, end)),\n );\n coreLog(\"normalizeSource:slice\", {\n start,\n end: end ?? source.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n\n normalizedSourceCache.set(source, wrapped);\n return wrapped;\n}\n\n// ── Source creation ──\n\n/**\n * Create a ParquetSource from a browser File object.\n * Does NOT load the file into memory — reads slices on demand.\n * Works with files of any size (including multi-GB).\n */\nexport function sourceFromFile(file: File): ParquetSource {\n coreLog(\"sourceFromFile:create\", { size: file.size });\n return {\n byteLength: file.size,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const blob = file.slice(start, end);\n // hyparquet may branch on `instanceof Promise` and then call `.slice()` on non-promises.\n // This object works in both branches: it's awaitable AND has a `.slice()` method.\n const p = makeSliceablePromise(blob.arrayBuffer());\n coreLog(\"sourceFromFile:slice\", {\n start,\n end: end ?? file.size,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from an ArrayBuffer (file already in memory).\n */\nexport function sourceFromBuffer(buffer: ArrayBuffer): ParquetSource {\n coreLog(\"sourceFromBuffer:create\", { byteLength: buffer.byteLength });\n return {\n byteLength: buffer.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(buffer.slice(start, end)),\n );\n coreLog(\"sourceFromBuffer:slice\", {\n start,\n end: end ?? buffer.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from a URL using HTTP Range requests.\n */\nexport async function sourceFromUrl(url: string): Promise<ParquetSource> {\n coreLog(\"sourceFromUrl:start\", { url });\n const asyncBuf = await asyncBufferFromUrl({ url });\n const wrapped: ParquetSource = {\n byteLength: asyncBuf.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(asyncBuf.slice(start, end)),\n );\n coreLog(\"sourceFromUrl:slice\", {\n start,\n end: end ?? asyncBuf.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n coreLog(\"sourceFromUrl:ready\", { byteLength: wrapped.byteLength });\n return wrapped;\n}\n\n// ── Internal caching ──\n\nconst asyncBufferCache = new WeakMap<ParquetSource, AsyncBuffer>();\nconst rawMetadataCache = new WeakMap<ParquetSource, Promise<FileMetaData>>();\n\nfunction getAsyncBuffer(source: ParquetSource): AsyncBuffer {\n const safeSource = normalizeSource(source);\n let buf = asyncBufferCache.get(safeSource);\n if (!buf) {\n // Use the direct source object. cachedAsyncBuffer can normalize slice()\n // into plain Promises, which breaks in some zone.js runtimes.\n buf = safeSource as AsyncBuffer;\n asyncBufferCache.set(safeSource, buf);\n }\n return buf;\n}\n\nfunction getRawMetadata(source: ParquetSource): Promise<FileMetaData> {\n const safeSource = normalizeSource(source);\n let promise = rawMetadataCache.get(safeSource);\n if (!promise) {\n const asyncBuf = getAsyncBuffer(safeSource);\n promise = parquetMetadataAsync(asyncBuf);\n rawMetadataCache.set(safeSource, promise);\n }\n return promise;\n}\n\nfunction toParquetMetadata(raw: FileMetaData): ParquetMetadata {\n const columns: ParquetColumn[] = raw.schema.slice(1).map((col) => ({\n name: col.name,\n type: col.type ?? \"UNKNOWN\",\n nullable: col.repetition_type !== \"REQUIRED\",\n }));\n\n // Compute row group boundaries: [0, rg0_rows, rg0+rg1_rows, ..., totalRows]\n const rowGroupOffsets: number[] = [0];\n for (const rg of raw.row_groups) {\n rowGroupOffsets.push(rowGroupOffsets[rowGroupOffsets.length - 1] + Number(rg.num_rows));\n }\n\n return {\n rowCount: Number(raw.num_rows),\n columns,\n rowGroups: raw.row_groups.length,\n rowGroupOffsets,\n createdBy: raw.created_by ?? undefined,\n };\n}\n\n// ── Reading ──\n\n/**\n * Read only the metadata from a parquet source.\n * Reads a small amount from the end of the file (footer).\n */\nexport async function readParquetMetadata(\n source: ParquetSource,\n): Promise<ParquetMetadata> {\n const raw = await getRawMetadata(source);\n return toParquetMetadata(raw);\n}\n\n/**\n * Read data (multiple columns, row range) from a parquet source.\n * Only reads the requested row range — does NOT load the entire file.\n */\nexport async function readParquetData(\n source: ParquetSource,\n options: ReadOptions = {},\n): Promise<ParquetData> {\n coreLog(\"readParquetData:start\", {\n offset: options.offset ?? 0,\n limit: options.limit ?? null,\n columns: options.columns?.length ?? \"all\",\n });\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n\n const rows: ParquetRow[] = [];\n\n try {\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: options.columns,\n rowStart: options.offset ?? 0,\n rowEnd:\n options.limit !== undefined\n ? (options.offset ?? 0) + options.limit\n : undefined,\n onComplete: (data: unknown[][]) => {\n const columnNames =\n options.columns ?? metadata.columns.map((c) => c.name);\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rows.push(obj);\n }\n },\n });\n } catch (error) {\n coreError(\"readParquetData:error\", error);\n throw error;\n }\n\n return { metadata, rows };\n}\n\n/**\n * Read ALL values of specific columns from the parquet source.\n * Used for building sort indices, group keys, and filter unique values.\n * Only reads the requested columns — not all data.\n *\n * Reads row-group by row-group with yields between each to avoid\n * blocking the main thread. This keeps the UI responsive (scroll, etc.)\n * during long reads on large files.\n *\n * @param isCancelled — optional callback checked between row groups.\n * If it returns true, reading stops early and partial results are returned.\n * This allows quick cancellation when the user changes sort/filter mid-read.\n *\n * Returns a Map of column name → array of values (one per row).\n */\nexport async function readColumnValues(\n source: ParquetSource,\n columnNames: string[],\n isCancelled?: () => boolean,\n): Promise<Map<string, unknown[]>> {\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n\n const result = new Map<string, unknown[]>();\n for (const name of columnNames) {\n result.set(name, []);\n }\n\n // Read row-group by row-group to keep the main thread responsive.\n // Check isCancelled between each row group for early abort.\n let rowOffset = 0;\n for (const rg of rawMetadata.row_groups) {\n // Early exit if this compute was superseded\n if (isCancelled?.()) return result;\n\n const rgRows = Number(rg.num_rows);\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: rowOffset,\n rowEnd: rowOffset + rgRows,\n onComplete: (data: unknown[][]) => {\n for (let rowIdx = 0; rowIdx < data.length; rowIdx++) {\n const row = data[rowIdx];\n for (let colIdx = 0; colIdx < columnNames.length; colIdx++) {\n result.get(columnNames[colIdx])!.push(row[colIdx]);\n }\n }\n },\n });\n\n rowOffset += rgRows;\n\n // Yield to browser between row groups so scroll events can fire\n await new Promise<void>((resolve) => setTimeout(resolve, 0));\n }\n\n return result;\n}\n\n/**\n * Read values of specific columns for specific row indices.\n * Useful for fetching display data for sorted/filtered views.\n */\nexport async function readRowsByIndices(\n source: ParquetSource,\n rowIndices: number[],\n columns?: string[],\n): Promise<ParquetRow[]> {\n if (rowIndices.length === 0) return [];\n\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n const columnNames = columns ?? metadata.columns.map((c) => c.name);\n\n // Sort indices to read sequentially, then re-order\n const sorted = rowIndices.map((idx, pos) => ({ idx, pos }));\n sorted.sort((a, b) => a.idx - b.idx);\n\n // Find contiguous ranges to batch reads\n const ranges: { start: number; end: number; positions: number[] }[] = [];\n let rangeStart = sorted[0].idx;\n let rangeEnd = sorted[0].idx + 1;\n let positions = [sorted[0].pos];\n\n for (let i = 1; i < sorted.length; i++) {\n if (sorted[i].idx <= rangeEnd + 50) {\n // Allow small gaps to merge ranges\n rangeEnd = sorted[i].idx + 1;\n positions.push(sorted[i].pos);\n } else {\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n rangeStart = sorted[i].idx;\n rangeEnd = sorted[i].idx + 1;\n positions = [sorted[i].pos];\n }\n }\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n\n const result: ParquetRow[] = new Array(rowIndices.length);\n\n for (const range of ranges) {\n const rangeRows: ParquetRow[] = [];\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: range.start,\n rowEnd: range.end,\n onComplete: (data: unknown[][]) => {\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rangeRows.push(obj);\n }\n },\n });\n\n // Map range rows back to the correct positions\n let sortedPosIdx = 0;\n for (const s of sorted) {\n if (s.idx >= range.start && s.idx < range.end) {\n const localIdx = s.idx - range.start;\n if (localIdx < rangeRows.length) {\n result[s.pos] = rangeRows[localIdx];\n }\n sortedPosIdx++;\n }\n }\n }\n\n return result;\n}\n\n// ── Legacy helpers (kept for backwards compatibility) ──\n\n/** @deprecated Use sourceFromFile() instead */\nexport async function fileToArrayBuffer(file: File): Promise<ArrayBuffer> {\n return file.arrayBuffer();\n}\n\n/** @deprecated Use sourceFromUrl() instead */\nexport async function fetchParquetFromUrl(url: string): Promise<ArrayBuffer> {\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch parquet file: ${response.status} ${response.statusText}`,\n );\n }\n return response.arrayBuffer();\n}\n","/**\n * Data pipeline: sorting, filtering, grouping.\n * Operates on column-level indices — never loads all columns at once.\n */\n\n// ── Sort ──\n\nexport type SortDirection = \"asc\" | \"desc\";\n\nexport interface SortDef {\n column: string;\n direction: SortDirection;\n}\n\n// ── Filter ──\n\nexport type FilterOperator =\n | \"eq\"\n | \"neq\"\n | \"gt\"\n | \"gte\"\n | \"lt\"\n | \"lte\"\n | \"contains\"\n | \"not_contains\"\n | \"is_null\"\n | \"is_not_null\"\n | \"in\";\n\nexport interface FilterDef {\n column: string;\n operator: FilterOperator;\n /** The value(s) to compare against. For \"in\" — an array; for \"is_null\"/\"is_not_null\" — ignored. */\n value?: unknown;\n}\n\n// ── Group ──\n\nexport interface GroupDef {\n column: string;\n}\n\nexport interface GroupNode {\n /** The value of the group key */\n key: unknown;\n /** Display label for this group */\n label: string;\n /** Number of rows in this group */\n count: number;\n /** Row indices belonging to this group (in the filtered/sorted order) */\n rowIndices: number[];\n /** Whether this group is expanded in the UI */\n expanded: boolean;\n}\n\n// ── Pipeline state ──\n\nexport interface PipelineState {\n sorts: SortDef[];\n filters: FilterDef[];\n groups: GroupDef[];\n}\n\nexport function createEmptyPipeline(): PipelineState {\n return { sorts: [], filters: [], groups: [] };\n}\n\n// ── Index building ──\n\n/**\n * Build a sorted index from column values.\n * Returns an array of original row indices in the sorted order.\n */\nexport function buildSortIndex(\n values: unknown[],\n sorts: SortDef[],\n columnValues: Map<string, unknown[]>,\n): number[] {\n const indices = Array.from({ length: values.length }, (_, i) => i);\n\n indices.sort((a, b) => {\n for (const sort of sorts) {\n const col = columnValues.get(sort.column);\n if (!col) continue;\n const va = col[a];\n const vb = col[b];\n const cmp = compareValues(va, vb);\n if (cmp !== 0) return sort.direction === \"asc\" ? cmp : -cmp;\n }\n return a - b; // stable: preserve original order for ties\n });\n\n return indices;\n}\n\n/**\n * Apply filters to produce a set of passing row indices.\n */\nexport function buildFilterIndex(\n totalRows: number,\n filters: FilterDef[],\n columnValues: Map<string, unknown[]>,\n): number[] | null {\n if (filters.length === 0) return null; // null = no filtering\n\n const passing: number[] = [];\n for (let i = 0; i < totalRows; i++) {\n let pass = true;\n for (const filter of filters) {\n const col = columnValues.get(filter.column);\n if (!col) continue;\n if (!matchesFilter(col[i], filter)) {\n pass = false;\n break;\n }\n }\n if (pass) passing.push(i);\n }\n return passing;\n}\n\n/**\n * Build group nodes from column values.\n * Supports multi-level grouping (first group column → second → etc.)\n */\nexport function buildGroups(\n rowIndices: number[],\n groups: GroupDef[],\n columnValues: Map<string, unknown[]>,\n): GroupNode[] {\n if (groups.length === 0) return [];\n\n const firstGroup = groups[0];\n const col = columnValues.get(firstGroup.column);\n if (!col) return [];\n\n // Bucket rows by group key — store DISPLAY indices (position in mapping),\n // not source indices, so getRow(displayIndex) works directly.\n const buckets = new Map<string, number[]>();\n const keyLabels = new Map<string, unknown>();\n\n for (let displayIdx = 0; displayIdx < rowIndices.length; displayIdx++) {\n const sourceIdx = rowIndices[displayIdx];\n const value = col[sourceIdx];\n const key = formatGroupKey(value);\n if (!buckets.has(key)) {\n buckets.set(key, []);\n keyLabels.set(key, value);\n }\n buckets.get(key)!.push(displayIdx);\n }\n\n // Sort group keys\n const sortedKeys = [...buckets.keys()].sort((a, b) => {\n const va = keyLabels.get(a);\n const vb = keyLabels.get(b);\n return compareValues(va, vb);\n });\n\n return sortedKeys.map((key) => ({\n key: keyLabels.get(key),\n label: key,\n count: buckets.get(key)!.length,\n rowIndices: buckets.get(key)!,\n expanded: false,\n }));\n}\n\n/**\n * Collect unique values from a column (up to a limit).\n * Returns sorted unique values.\n */\nexport function collectUniqueValues(\n values: unknown[],\n limit: number = 500,\n): unknown[] {\n const seen = new Set<string>();\n const unique: unknown[] = [];\n\n for (const v of values) {\n const key = formatGroupKey(v);\n if (!seen.has(key)) {\n seen.add(key);\n unique.push(v);\n if (unique.length >= limit) break;\n }\n }\n\n unique.sort((a, b) => compareValues(a, b));\n return unique;\n}\n\n// ── Helpers ──\n\nfunction compareValues(a: unknown, b: unknown): number {\n // nulls last\n if (a === null || a === undefined) return b === null || b === undefined ? 0 : 1;\n if (b === null || b === undefined) return -1;\n\n if (typeof a === \"number\" && typeof b === \"number\") return a - b;\n if (typeof a === \"bigint\" && typeof b === \"bigint\") return a < b ? -1 : a > b ? 1 : 0;\n if (typeof a === \"boolean\" && typeof b === \"boolean\") return Number(a) - Number(b);\n\n // Dates\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();\n\n // Default: string comparison\n return String(a).localeCompare(String(b));\n}\n\nfunction matchesFilter(value: unknown, filter: FilterDef): boolean {\n switch (filter.operator) {\n case \"is_null\":\n return value === null || value === undefined;\n case \"is_not_null\":\n return value !== null && value !== undefined;\n case \"eq\":\n return value === filter.value;\n case \"neq\":\n return value !== filter.value;\n case \"gt\":\n return compareValues(value, filter.value) > 0;\n case \"gte\":\n return compareValues(value, filter.value) >= 0;\n case \"lt\":\n return compareValues(value, filter.value) < 0;\n case \"lte\":\n return compareValues(value, filter.value) <= 0;\n case \"contains\":\n return String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"not_contains\":\n return !String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"in\":\n if (Array.isArray(filter.value)) {\n const set = new Set(filter.value.map(String));\n return set.has(String(value));\n }\n return false;\n default:\n return true;\n }\n}\n\nfunction formatGroupKey(value: unknown): string {\n if (value === null || value === undefined) return \"(null)\";\n if (value instanceof Date) return value.toISOString();\n return String(value);\n}\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAWP,IAAM,kBAAkB;AAExB,SAAS,mBAA4B;AACnC,QAAM,IAAI;AACV,SAAO,GAAG,kBAAkB;AAC9B;AAEA,SAAS,QAAQ,SAAiB,SAAmB;AACnD,MAAI,CAAC,iBAAiB,EAAG;AACzB,MAAI,YAAY,QAAW;AACzB,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,IAAI,OAAO;AAAA,EACtD,OAAO;AACL,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,EAAE;AAAA,EAC7C;AACF;AAEA,SAAS,UAAU,SAAiB,OAAgB;AAClD,MAAI,CAAC,iBAAiB,EAAG;AACzB,UAAQ,MAAM,GAAG,eAAe,IAAI,OAAO,IAAI,KAAK;AACtD;AAMA,SAAS,qBAAqB,OAA+C;AAC3E,QAAM,UAAU,QAAQ,QAAQ,KAAK;AACrC,UAAQ,QAAQ,CAAC,OAAe,QAC9B,QAAQ,KAAK,CAAC,WAAW,OAAO,MAAM,OAAO,GAAG,CAAC;AACnD,SAAO;AACT;AAEA,IAAM,wBAAwB,oBAAI,QAAsC;AAExE,SAAS,gBAAgB,QAAsC;AAC7D,QAAM,SAAS,sBAAsB,IAAI,MAAM;AAC/C,MAAI,OAAQ,QAAO;AAEnB,QAAM,UAAyB;AAAA,IAC7B,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,yBAAyB;AAAA,QAC/B;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,wBAAsB,IAAI,QAAQ,OAAO;AACzC,SAAO;AACT;AASO,SAAS,eAAe,MAA2B;AACxD,UAAQ,yBAAyB,EAAE,MAAM,KAAK,KAAK,CAAC;AACpD,SAAO;AAAA,IACL,YAAY,KAAK;AAAA,IACjB,MAAM,OAAe,KAAoC;AACvD,YAAM,OAAO,KAAK,MAAM,OAAO,GAAG;AAGlC,YAAM,IAAI,qBAAqB,KAAK,YAAY,CAAC;AACjD,cAAQ,wBAAwB;AAAA,QAC9B;AAAA,QACA,KAAK,OAAO,KAAK;AAAA,QACjB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,iBAAiB,QAAoC;AACnE,UAAQ,2BAA2B,EAAE,YAAY,OAAO,WAAW,CAAC;AACpE,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,0BAA0B;AAAA,QAChC;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKA,eAAsB,cAAc,KAAqC;AACvE,UAAQ,uBAAuB,EAAE,IAAI,CAAC;AACtC,QAAM,WAAW,MAAM,mBAAmB,EAAE,IAAI,CAAC;AACjD,QAAM,UAAyB;AAAA,IAC7B,YAAY,SAAS;AAAA,IACrB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,SAAS,MAAM,OAAO,GAAG,CAAC;AAAA,MAC5C;AACA,cAAQ,uBAAuB;AAAA,QAC7B;AAAA,QACA,KAAK,OAAO,SAAS;AAAA,QACrB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACA,UAAQ,uBAAuB,EAAE,YAAY,QAAQ,WAAW,CAAC;AACjE,SAAO;AACT;AAIA,IAAM,mBAAmB,oBAAI,QAAoC;AACjE,IAAM,mBAAmB,oBAAI,QAA8C;AAE3E,SAAS,eAAe,QAAoC;AAC1D,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,MAAM,iBAAiB,IAAI,UAAU;AACzC,MAAI,CAAC,KAAK;AAGR,UAAM;AACN,qBAAiB,IAAI,YAAY,GAAG;AAAA,EACtC;AACA,SAAO;AACT;AAEA,SAAS,eAAe,QAA8C;AACpE,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,UAAU,iBAAiB,IAAI,UAAU;AAC7C,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,eAAe,UAAU;AAC1C,cAAU,qBAAqB,QAAQ;AACvC,qBAAiB,IAAI,YAAY,OAAO;AAAA,EAC1C;AACA,SAAO;AACT;AAEA,SAAS,kBAAkB,KAAoC;AAC7D,QAAM,UAA2B,IAAI,OAAO,MAAM,CAAC,EAAE,IAAI,CAAC,SAAS;AAAA,IACjE,MAAM,IAAI;AAAA,IACV,MAAM,IAAI,QAAQ;AAAA,IAClB,UAAU,IAAI,oBAAoB;AAAA,EACpC,EAAE;AAGF,QAAM,kBAA4B,CAAC,CAAC;AACpC,aAAW,MAAM,IAAI,YAAY;AAC/B,oBAAgB,KAAK,gBAAgB,gBAAgB,SAAS,CAAC,IAAI,OAAO,GAAG,QAAQ,CAAC;AAAA,EACxF;AAEA,SAAO;AAAA,IACL,UAAU,OAAO,IAAI,QAAQ;AAAA,IAC7B;AAAA,IACA,WAAW,IAAI,WAAW;AAAA,IAC1B;AAAA,IACA,WAAW,IAAI,cAAc;AAAA,EAC/B;AACF;AAQA,eAAsB,oBACpB,QAC0B;AAC1B,QAAM,MAAM,MAAM,eAAe,MAAM;AACvC,SAAO,kBAAkB,GAAG;AAC9B;AAMA,eAAsB,gBACpB,QACA,UAAuB,CAAC,GACF;AACtB,UAAQ,yBAAyB;AAAA,IAC/B,QAAQ,QAAQ,UAAU;AAAA,IAC1B,OAAO,QAAQ,SAAS;AAAA,IACxB,SAAS,QAAQ,SAAS,UAAU;AAAA,EACtC,CAAC;AACD,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAE9C,QAAM,OAAqB,CAAC;AAE5B,MAAI;AACF,UAAM,YAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ,UAAU;AAAA,MAC5B,QACE,QAAQ,UAAU,UACb,QAAQ,UAAU,KAAK,QAAQ,QAChC;AAAA,MACN,YAAY,CAAC,SAAsB;AACjC,cAAM,cACJ,QAAQ,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AACvD,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,cAAU,yBAAyB,KAAK;AACxC,UAAM;AAAA,EACR;AAEA,SAAO,EAAE,UAAU,KAAK;AAC1B;AAiBA,eAAsB,iBACpB,QACA,aACA,aACiC;AACjC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAE/C,QAAM,SAAS,oBAAI,IAAuB;AAC1C,aAAW,QAAQ,aAAa;AAC9B,WAAO,IAAI,MAAM,CAAC,CAAC;AAAA,EACrB;AAIA,MAAI,YAAY;AAChB,aAAW,MAAM,YAAY,YAAY;AAEvC,QAAI,cAAc,EAAG,QAAO;AAE5B,UAAM,SAAS,OAAO,GAAG,QAAQ;AAEjC,UAAM,YAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,MACV,QAAQ,YAAY;AAAA,MACpB,YAAY,CAAC,SAAsB;AACjC,iBAAS,SAAS,GAAG,SAAS,KAAK,QAAQ,UAAU;AACnD,gBAAM,MAAM,KAAK,MAAM;AACvB,mBAAS,SAAS,GAAG,SAAS,YAAY,QAAQ,UAAU;AAC1D,mBAAO,IAAI,YAAY,MAAM,CAAC,EAAG,KAAK,IAAI,MAAM,CAAC;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,iBAAa;AAGb,UAAM,IAAI,QAAc,CAAC,YAAY,WAAW,SAAS,CAAC,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAMA,eAAsB,kBACpB,QACA,YACA,SACuB;AACvB,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAC9C,QAAM,cAAc,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AAGjE,QAAM,SAAS,WAAW,IAAI,CAAC,KAAK,SAAS,EAAE,KAAK,IAAI,EAAE;AAC1D,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AAGnC,QAAM,SAAgE,CAAC;AACvE,MAAI,aAAa,OAAO,CAAC,EAAE;AAC3B,MAAI,WAAW,OAAO,CAAC,EAAE,MAAM;AAC/B,MAAI,YAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAE9B,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,QAAI,OAAO,CAAC,EAAE,OAAO,WAAW,IAAI;AAElC,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,gBAAU,KAAK,OAAO,CAAC,EAAE,GAAG;AAAA,IAC9B,OAAO;AACL,aAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAC3D,mBAAa,OAAO,CAAC,EAAE;AACvB,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,kBAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAAA,IAC5B;AAAA,EACF;AACA,SAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAE3D,QAAM,SAAuB,IAAI,MAAM,WAAW,MAAM;AAExD,aAAW,SAAS,QAAQ;AAC1B,UAAM,YAA0B,CAAC;AAEjC,UAAM,YAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU,MAAM;AAAA,MAChB,QAAQ,MAAM;AAAA,MACd,YAAY,CAAC,SAAsB;AACjC,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,oBAAU,KAAK,GAAG;AAAA,QACpB;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,eAAe;AACnB,eAAW,KAAK,QAAQ;AACtB,UAAI,EAAE,OAAO,MAAM,SAAS,EAAE,MAAM,MAAM,KAAK;AAC7C,cAAM,WAAW,EAAE,MAAM,MAAM;AAC/B,YAAI,WAAW,UAAU,QAAQ;AAC/B,iBAAO,EAAE,GAAG,IAAI,UAAU,QAAQ;AAAA,QACpC;AACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,kBAAkB,MAAkC;AACxE,SAAO,KAAK,YAAY;AAC1B;AAGA,eAAsB,oBAAoB,KAAmC;AAC3E,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,iCAAiC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IACzE;AAAA,EACF;AACA,SAAO,SAAS,YAAY;AAC9B;;;AC5VO,SAAS,sBAAqC;AACnD,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,QAAQ,CAAC,EAAE;AAC9C;AAQO,SAAS,eACd,QACA,OACA,cACU;AACV,QAAM,UAAU,MAAM,KAAK,EAAE,QAAQ,OAAO,OAAO,GAAG,CAAC,GAAG,MAAM,CAAC;AAEjE,UAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,aAAa,IAAI,KAAK,MAAM;AACxC,UAAI,CAAC,IAAK;AACV,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,MAAM,cAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,KAAK,cAAc,QAAQ,MAAM,CAAC;AAAA,IAC1D;AACA,WAAO,IAAI;AAAA,EACb,CAAC;AAED,SAAO;AACT;AAKO,SAAS,iBACd,WACA,SACA,cACiB;AACjB,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,QAAM,UAAoB,CAAC;AAC3B,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,QAAI,OAAO;AACX,eAAW,UAAU,SAAS;AAC5B,YAAM,MAAM,aAAa,IAAI,OAAO,MAAM;AAC1C,UAAI,CAAC,IAAK;AACV,UAAI,CAAC,cAAc,IAAI,CAAC,GAAG,MAAM,GAAG;AAClC,eAAO;AACP;AAAA,MACF;AAAA,IACF;AACA,QAAI,KAAM,SAAQ,KAAK,CAAC;AAAA,EAC1B;AACA,SAAO;AACT;AAMO,SAAS,YACd,YACA,QACA,cACa;AACb,MAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAEjC,QAAM,aAAa,OAAO,CAAC;AAC3B,QAAM,MAAM,aAAa,IAAI,WAAW,MAAM;AAC9C,MAAI,CAAC,IAAK,QAAO,CAAC;AAIlB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,QAAM,YAAY,oBAAI,IAAqB;AAE3C,WAAS,aAAa,GAAG,aAAa,WAAW,QAAQ,cAAc;AACrE,UAAM,YAAY,WAAW,UAAU;AACvC,UAAM,QAAQ,IAAI,SAAS;AAC3B,UAAM,MAAM,eAAe,KAAK;AAChC,QAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,cAAQ,IAAI,KAAK,CAAC,CAAC;AACnB,gBAAU,IAAI,KAAK,KAAK;AAAA,IAC1B;AACA,YAAQ,IAAI,GAAG,EAAG,KAAK,UAAU;AAAA,EACnC;AAGA,QAAM,aAAa,CAAC,GAAG,QAAQ,KAAK,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM;AACpD,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,WAAO,cAAc,IAAI,EAAE;AAAA,EAC7B,CAAC;AAED,SAAO,WAAW,IAAI,CAAC,SAAS;AAAA,IAC9B,KAAK,UAAU,IAAI,GAAG;AAAA,IACtB,OAAO;AAAA,IACP,OAAO,QAAQ,IAAI,GAAG,EAAG;AAAA,IACzB,YAAY,QAAQ,IAAI,GAAG;AAAA,IAC3B,UAAU;AAAA,EACZ,EAAE;AACJ;AAMO,SAAS,oBACd,QACA,QAAgB,KACL;AACX,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAoB,CAAC;AAE3B,aAAW,KAAK,QAAQ;AACtB,UAAM,MAAM,eAAe,CAAC;AAC5B,QAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,WAAK,IAAI,GAAG;AACZ,aAAO,KAAK,CAAC;AACb,UAAI,OAAO,UAAU,MAAO;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,cAAc,GAAG,CAAC,CAAC;AACzC,SAAO;AACT;AAIA,SAAS,cAAc,GAAY,GAAoB;AAErD,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO,MAAM,QAAQ,MAAM,SAAY,IAAI;AAC9E,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO;AAE1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,OAAO,MAAM,aAAa,OAAO,MAAM,UAAW,QAAO,OAAO,CAAC,IAAI,OAAO,CAAC;AAGjF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAG3E,SAAO,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,CAAC;AAC1C;AAEA,SAAS,cAAc,OAAgB,QAA4B;AACjE,UAAQ,OAAO,UAAU;AAAA,IACvB,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IAChF,KAAK;AACH,aAAO,CAAC,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IACjF,KAAK;AACH,UAAI,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC/B,cAAM,MAAM,IAAI,IAAI,OAAO,MAAM,IAAI,MAAM,CAAC;AAC5C,eAAO,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAC9B;AACA,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEA,SAAS,eAAe,OAAwB;AAC9C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO,OAAO,KAAK;AACrB;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/reader.ts","../src/pipeline.ts","../src/utils.ts"],"sourcesContent":["import {\n parquetMetadataAsync,\n parquetRead,\n asyncBufferFromUrl,\n} from \"hyparquet\";\nimport type { AsyncBuffer, FileMetaData } from \"hyparquet\";\nimport type {\n ParquetColumn,\n ParquetData,\n ParquetMetadata,\n ParquetRow,\n ParquetSource,\n ReadOptions,\n} from \"./types.js\";\n\nconst PARQUI_DEBUG_NS = \"[parqui/core]\";\n\nfunction coreDebugEnabled(): boolean {\n const g = globalThis as { __PARQUI_DEBUG?: boolean } | undefined;\n return g?.__PARQUI_DEBUG ?? false;\n}\n\nfunction coreLog(message: string, details?: unknown) {\n if (!coreDebugEnabled()) return;\n if (details !== undefined) {\n console.log(`${PARQUI_DEBUG_NS} ${message}`, details);\n } else {\n console.log(`${PARQUI_DEBUG_NS} ${message}`);\n }\n}\n\nfunction coreError(message: string, error: unknown) {\n if (!coreDebugEnabled()) return;\n console.error(`${PARQUI_DEBUG_NS} ${message}`, error);\n}\n\ntype SliceablePromise = Promise<ArrayBuffer> & {\n slice: (start: number, end?: number) => Promise<ArrayBuffer>;\n};\n\nfunction makeSliceablePromise(input: Promise<ArrayBuffer>): SliceablePromise {\n const promise = Promise.resolve(input) as SliceablePromise;\n promise.slice = (start: number, end?: number) =>\n promise.then((buffer) => buffer.slice(start, end));\n return promise;\n}\n\nconst normalizedSourceCache = new WeakMap<ParquetSource, ParquetSource>();\n\nfunction normalizeSource(source: ParquetSource): ParquetSource {\n const cached = normalizedSourceCache.get(source);\n if (cached) return cached;\n\n const wrapped: ParquetSource = {\n byteLength: source.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(source.slice(start, end)),\n );\n coreLog(\"normalizeSource:slice\", {\n start,\n end: end ?? source.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n\n normalizedSourceCache.set(source, wrapped);\n return wrapped;\n}\n\n// ── Source creation ──\n\n/**\n * Create a ParquetSource from a browser File object.\n * Does NOT load the file into memory — reads slices on demand.\n * Works with files of any size (including multi-GB).\n */\nexport function sourceFromFile(file: File): ParquetSource {\n coreLog(\"sourceFromFile:create\", { size: file.size });\n return {\n byteLength: file.size,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const blob = file.slice(start, end);\n // hyparquet may branch on `instanceof Promise` and then call `.slice()` on non-promises.\n // This object works in both branches: it's awaitable AND has a `.slice()` method.\n const p = makeSliceablePromise(blob.arrayBuffer());\n coreLog(\"sourceFromFile:slice\", {\n start,\n end: end ?? file.size,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from an ArrayBuffer (file already in memory).\n */\nexport function sourceFromBuffer(buffer: ArrayBuffer): ParquetSource {\n coreLog(\"sourceFromBuffer:create\", { byteLength: buffer.byteLength });\n return {\n byteLength: buffer.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(buffer.slice(start, end)),\n );\n coreLog(\"sourceFromBuffer:slice\", {\n start,\n end: end ?? buffer.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n}\n\n/**\n * Create a ParquetSource from a URL using HTTP Range requests.\n */\nexport async function sourceFromUrl(url: string): Promise<ParquetSource> {\n coreLog(\"sourceFromUrl:start\", { url });\n const asyncBuf = await asyncBufferFromUrl({ url });\n const wrapped: ParquetSource = {\n byteLength: asyncBuf.byteLength,\n slice(start: number, end?: number): Promise<ArrayBuffer> {\n const p = makeSliceablePromise(\n Promise.resolve(asyncBuf.slice(start, end)),\n );\n coreLog(\"sourceFromUrl:slice\", {\n start,\n end: end ?? asyncBuf.byteLength,\n hasSliceMethod: typeof (p as { slice?: unknown }).slice === \"function\",\n });\n return p;\n },\n };\n coreLog(\"sourceFromUrl:ready\", { byteLength: wrapped.byteLength });\n return wrapped;\n}\n\n// ── Internal caching ──\n\nconst asyncBufferCache = new WeakMap<ParquetSource, AsyncBuffer>();\nconst rawMetadataCache = new WeakMap<ParquetSource, Promise<FileMetaData>>();\n\nfunction getAsyncBuffer(source: ParquetSource): AsyncBuffer {\n const safeSource = normalizeSource(source);\n let buf = asyncBufferCache.get(safeSource);\n if (!buf) {\n // Use the direct source object. cachedAsyncBuffer can normalize slice()\n // into plain Promises, which breaks in some zone.js runtimes.\n buf = safeSource as AsyncBuffer;\n asyncBufferCache.set(safeSource, buf);\n }\n return buf;\n}\n\nfunction getRawMetadata(source: ParquetSource): Promise<FileMetaData> {\n const safeSource = normalizeSource(source);\n let promise = rawMetadataCache.get(safeSource);\n if (!promise) {\n const asyncBuf = getAsyncBuffer(safeSource);\n promise = parquetMetadataAsync(asyncBuf);\n rawMetadataCache.set(safeSource, promise);\n }\n return promise;\n}\n\nfunction toParquetMetadata(raw: FileMetaData): ParquetMetadata {\n const columns: ParquetColumn[] = raw.schema.slice(1).map((col) => ({\n name: col.name,\n type: col.type ?? \"UNKNOWN\",\n nullable: col.repetition_type !== \"REQUIRED\",\n }));\n\n // Compute row group boundaries: [0, rg0_rows, rg0+rg1_rows, ..., totalRows]\n const rowGroupOffsets: number[] = [0];\n for (const rg of raw.row_groups) {\n rowGroupOffsets.push(rowGroupOffsets[rowGroupOffsets.length - 1] + Number(rg.num_rows));\n }\n\n return {\n rowCount: Number(raw.num_rows),\n columns,\n rowGroups: raw.row_groups.length,\n rowGroupOffsets,\n createdBy: raw.created_by ?? undefined,\n };\n}\n\n// ── Reading ──\n\n/**\n * Read only the metadata from a parquet source.\n * Reads a small amount from the end of the file (footer).\n */\nexport async function readParquetMetadata(\n source: ParquetSource,\n): Promise<ParquetMetadata> {\n const raw = await getRawMetadata(source);\n return toParquetMetadata(raw);\n}\n\n/**\n * Read data (multiple columns, row range) from a parquet source.\n * Only reads the requested row range — does NOT load the entire file.\n */\nexport async function readParquetData(\n source: ParquetSource,\n options: ReadOptions = {},\n): Promise<ParquetData> {\n coreLog(\"readParquetData:start\", {\n offset: options.offset ?? 0,\n limit: options.limit ?? null,\n columns: options.columns?.length ?? \"all\",\n });\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n\n const rows: ParquetRow[] = [];\n\n try {\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: options.columns,\n rowStart: options.offset ?? 0,\n rowEnd:\n options.limit !== undefined\n ? (options.offset ?? 0) + options.limit\n : undefined,\n onComplete: (data: unknown[][]) => {\n const columnNames =\n options.columns ?? metadata.columns.map((c) => c.name);\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rows.push(obj);\n }\n },\n });\n } catch (error) {\n coreError(\"readParquetData:error\", error);\n throw error;\n }\n\n return { metadata, rows };\n}\n\n/**\n * Read ALL values of specific columns from the parquet source.\n * Used for building sort indices, group keys, and filter unique values.\n * Only reads the requested columns — not all data.\n *\n * Reads row-group by row-group with yields between each to avoid\n * blocking the main thread. This keeps the UI responsive (scroll, etc.)\n * during long reads on large files.\n *\n * @param isCancelled — optional callback checked between row groups.\n * If it returns true, reading stops early and partial results are returned.\n * This allows quick cancellation when the user changes sort/filter mid-read.\n *\n * Returns a Map of column name → array of values (one per row).\n */\nexport async function readColumnValues(\n source: ParquetSource,\n columnNames: string[],\n isCancelled?: () => boolean,\n): Promise<Map<string, unknown[]>> {\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n\n const result = new Map<string, unknown[]>();\n for (const name of columnNames) {\n result.set(name, []);\n }\n\n // Read row-group by row-group to keep the main thread responsive.\n // Check isCancelled between each row group for early abort.\n let rowOffset = 0;\n for (const rg of rawMetadata.row_groups) {\n // Early exit if this compute was superseded\n if (isCancelled?.()) return result;\n\n const rgRows = Number(rg.num_rows);\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: rowOffset,\n rowEnd: rowOffset + rgRows,\n onComplete: (data: unknown[][]) => {\n for (let rowIdx = 0; rowIdx < data.length; rowIdx++) {\n const row = data[rowIdx];\n for (let colIdx = 0; colIdx < columnNames.length; colIdx++) {\n result.get(columnNames[colIdx])!.push(row[colIdx]);\n }\n }\n },\n });\n\n rowOffset += rgRows;\n\n // Yield to browser between row groups so scroll events can fire\n await new Promise<void>((resolve) => setTimeout(resolve, 0));\n }\n\n return result;\n}\n\n/**\n * Read values of specific columns for specific row indices.\n * Useful for fetching display data for sorted/filtered views.\n */\nexport async function readRowsByIndices(\n source: ParquetSource,\n rowIndices: number[],\n columns?: string[],\n): Promise<ParquetRow[]> {\n if (rowIndices.length === 0) return [];\n\n const asyncBuf = getAsyncBuffer(source);\n const rawMetadata = await getRawMetadata(source);\n const metadata = toParquetMetadata(rawMetadata);\n const columnNames = columns ?? metadata.columns.map((c) => c.name);\n\n // Sort indices to read sequentially, then re-order\n const sorted = rowIndices.map((idx, pos) => ({ idx, pos }));\n sorted.sort((a, b) => a.idx - b.idx);\n\n // Find contiguous ranges to batch reads\n const ranges: { start: number; end: number; positions: number[] }[] = [];\n let rangeStart = sorted[0].idx;\n let rangeEnd = sorted[0].idx + 1;\n let positions = [sorted[0].pos];\n\n for (let i = 1; i < sorted.length; i++) {\n if (sorted[i].idx <= rangeEnd + 50) {\n // Allow small gaps to merge ranges\n rangeEnd = sorted[i].idx + 1;\n positions.push(sorted[i].pos);\n } else {\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n rangeStart = sorted[i].idx;\n rangeEnd = sorted[i].idx + 1;\n positions = [sorted[i].pos];\n }\n }\n ranges.push({ start: rangeStart, end: rangeEnd, positions });\n\n const result: ParquetRow[] = new Array(rowIndices.length);\n\n for (const range of ranges) {\n const rangeRows: ParquetRow[] = [];\n\n await parquetRead({\n file: asyncBuf,\n metadata: rawMetadata,\n columns: columnNames,\n rowStart: range.start,\n rowEnd: range.end,\n onComplete: (data: unknown[][]) => {\n for (const row of data) {\n const obj: ParquetRow = {};\n columnNames.forEach((name, i) => {\n obj[name] = row[i];\n });\n rangeRows.push(obj);\n }\n },\n });\n\n // Map range rows back to the correct positions\n let sortedPosIdx = 0;\n for (const s of sorted) {\n if (s.idx >= range.start && s.idx < range.end) {\n const localIdx = s.idx - range.start;\n if (localIdx < rangeRows.length) {\n result[s.pos] = rangeRows[localIdx];\n }\n sortedPosIdx++;\n }\n }\n }\n\n return result;\n}\n\n// ── Legacy helpers (kept for backwards compatibility) ──\n\n/** @deprecated Use sourceFromFile() instead */\nexport async function fileToArrayBuffer(file: File): Promise<ArrayBuffer> {\n return file.arrayBuffer();\n}\n\n/** @deprecated Use sourceFromUrl() instead */\nexport async function fetchParquetFromUrl(url: string): Promise<ArrayBuffer> {\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch parquet file: ${response.status} ${response.statusText}`,\n );\n }\n return response.arrayBuffer();\n}\n","/**\n * Data pipeline: sorting, filtering, grouping.\n * Operates on column-level indices — never loads all columns at once.\n */\n\n// ── Sort ──\n\nexport type SortDirection = \"asc\" | \"desc\";\n\nexport interface SortDef {\n column: string;\n direction: SortDirection;\n}\n\n// ── Filter ──\n\nexport type FilterOperator =\n | \"eq\"\n | \"neq\"\n | \"gt\"\n | \"gte\"\n | \"lt\"\n | \"lte\"\n | \"contains\"\n | \"not_contains\"\n | \"is_null\"\n | \"is_not_null\"\n | \"in\";\n\nexport interface FilterDef {\n column: string;\n operator: FilterOperator;\n /** The value(s) to compare against. For \"in\" — an array; for \"is_null\"/\"is_not_null\" — ignored. */\n value?: unknown;\n}\n\n// ── Group ──\n\nexport interface GroupDef {\n column: string;\n}\n\nexport interface GroupNode {\n /** The value of the group key */\n key: unknown;\n /** Display label for this group */\n label: string;\n /** Number of rows in this group */\n count: number;\n /** Row indices belonging to this group (in the filtered/sorted order) */\n rowIndices: number[];\n /** Whether this group is expanded in the UI */\n expanded: boolean;\n}\n\n// ── Pipeline state ──\n\nexport interface PipelineState {\n sorts: SortDef[];\n filters: FilterDef[];\n groups: GroupDef[];\n}\n\nexport function createEmptyPipeline(): PipelineState {\n return { sorts: [], filters: [], groups: [] };\n}\n\n// ── Index building ──\n\n/**\n * Build a sorted index from column values.\n * Returns an array of original row indices in the sorted order.\n */\nexport function buildSortIndex(\n values: unknown[],\n sorts: SortDef[],\n columnValues: Map<string, unknown[]>,\n): number[] {\n const indices = Array.from({ length: values.length }, (_, i) => i);\n\n indices.sort((a, b) => {\n for (const sort of sorts) {\n const col = columnValues.get(sort.column);\n if (!col) continue;\n const va = col[a];\n const vb = col[b];\n const cmp = compareValues(va, vb);\n if (cmp !== 0) return sort.direction === \"asc\" ? cmp : -cmp;\n }\n return a - b; // stable: preserve original order for ties\n });\n\n return indices;\n}\n\n/**\n * Apply filters to produce a set of passing row indices.\n */\nexport function buildFilterIndex(\n totalRows: number,\n filters: FilterDef[],\n columnValues: Map<string, unknown[]>,\n): number[] | null {\n if (filters.length === 0) return null; // null = no filtering\n\n const passing: number[] = [];\n for (let i = 0; i < totalRows; i++) {\n let pass = true;\n for (const filter of filters) {\n const col = columnValues.get(filter.column);\n if (!col) continue;\n if (!matchesFilter(col[i], filter)) {\n pass = false;\n break;\n }\n }\n if (pass) passing.push(i);\n }\n return passing;\n}\n\n/**\n * Build group nodes from column values.\n * Supports multi-level grouping (first group column → second → etc.)\n */\nexport function buildGroups(\n rowIndices: number[],\n groups: GroupDef[],\n columnValues: Map<string, unknown[]>,\n): GroupNode[] {\n if (groups.length === 0) return [];\n\n const firstGroup = groups[0];\n const col = columnValues.get(firstGroup.column);\n if (!col) return [];\n\n // Bucket rows by group key — store DISPLAY indices (position in mapping),\n // not source indices, so getRow(displayIndex) works directly.\n const buckets = new Map<string, number[]>();\n const keyLabels = new Map<string, unknown>();\n\n for (let displayIdx = 0; displayIdx < rowIndices.length; displayIdx++) {\n const sourceIdx = rowIndices[displayIdx];\n const value = col[sourceIdx];\n const key = formatGroupKey(value);\n if (!buckets.has(key)) {\n buckets.set(key, []);\n keyLabels.set(key, value);\n }\n buckets.get(key)!.push(displayIdx);\n }\n\n // Sort group keys\n const sortedKeys = [...buckets.keys()].sort((a, b) => {\n const va = keyLabels.get(a);\n const vb = keyLabels.get(b);\n return compareValues(va, vb);\n });\n\n return sortedKeys.map((key) => ({\n key: keyLabels.get(key),\n label: key,\n count: buckets.get(key)!.length,\n rowIndices: buckets.get(key)!,\n expanded: false,\n }));\n}\n\n/**\n * Collect unique values from a column (up to a limit).\n * Returns sorted unique values.\n */\nexport function collectUniqueValues(\n values: unknown[],\n limit: number = 500,\n): unknown[] {\n const seen = new Set<string>();\n const unique: unknown[] = [];\n\n for (const v of values) {\n const key = formatGroupKey(v);\n if (!seen.has(key)) {\n seen.add(key);\n unique.push(v);\n if (unique.length >= limit) break;\n }\n }\n\n unique.sort((a, b) => compareValues(a, b));\n return unique;\n}\n\n// ── Helpers ──\n\nexport function compareValues(a: unknown, b: unknown): number {\n // nulls last\n if (a === null || a === undefined) return b === null || b === undefined ? 0 : 1;\n if (b === null || b === undefined) return -1;\n\n if (typeof a === \"number\" && typeof b === \"number\") return a - b;\n if (typeof a === \"bigint\" && typeof b === \"bigint\") return a < b ? -1 : a > b ? 1 : 0;\n if (typeof a === \"boolean\" && typeof b === \"boolean\") return Number(a) - Number(b);\n\n // Dates\n if (a instanceof Date && b instanceof Date) return a.getTime() - b.getTime();\n\n // Default: string comparison\n return String(a).localeCompare(String(b));\n}\n\nexport function matchesFilter(value: unknown, filter: FilterDef): boolean {\n switch (filter.operator) {\n case \"is_null\":\n return value === null || value === undefined;\n case \"is_not_null\":\n return value !== null && value !== undefined;\n case \"eq\":\n return value === filter.value;\n case \"neq\":\n return value !== filter.value;\n case \"gt\":\n return compareValues(value, filter.value) > 0;\n case \"gte\":\n return compareValues(value, filter.value) >= 0;\n case \"lt\":\n return compareValues(value, filter.value) < 0;\n case \"lte\":\n return compareValues(value, filter.value) <= 0;\n case \"contains\":\n return String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"not_contains\":\n return !String(value).toLowerCase().includes(String(filter.value).toLowerCase());\n case \"in\":\n if (Array.isArray(filter.value)) {\n const set = new Set(filter.value.map(String));\n return set.has(String(value));\n }\n return false;\n default:\n return true;\n }\n}\n\nexport function formatGroupKey(value: unknown): string {\n if (value === null || value === undefined) return \"(null)\";\n if (value instanceof Date) return value.toISOString();\n return String(value);\n}\n","/**\n * Shared async utilities for pipeline computation.\n * Used by framework wrappers (React, Vue, Angular) for non-blocking sort/read operations.\n */\n\n/** Yield control to the browser to prevent UI freezes. */\nexport function yieldToUI(): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, 0));\n}\n\n/**\n * Async merge sort that yields to the UI periodically.\n * Prevents browser freeze when sorting large arrays (100K+ elements).\n */\nexport async function asyncSort(\n arr: number[],\n compareFn: (a: number, b: number) => number,\n isCancelled: () => boolean,\n): Promise<number[]> {\n const CHUNK = 50_000;\n const chunks: number[][] = [];\n\n for (let i = 0; i < arr.length; i += CHUNK) {\n if (isCancelled()) return arr;\n const chunk = arr.slice(i, i + CHUNK);\n chunk.sort(compareFn);\n chunks.push(chunk);\n await yieldToUI();\n }\n\n while (chunks.length > 1) {\n if (isCancelled()) return arr;\n const merged: number[][] = [];\n for (let i = 0; i < chunks.length; i += 2) {\n if (i + 1 < chunks.length) {\n merged.push(mergeSorted(chunks[i], chunks[i + 1], compareFn));\n } else {\n merged.push(chunks[i]);\n }\n }\n chunks.length = 0;\n chunks.push(...merged);\n await yieldToUI();\n }\n\n return chunks[0] ?? [];\n}\n\n/** Merge two sorted arrays into one. */\nfunction mergeSorted(\n a: number[],\n b: number[],\n compareFn: (a: number, b: number) => number,\n): number[] {\n const result = new Array(a.length + b.length);\n let i = 0, j = 0, k = 0;\n while (i < a.length && j < b.length) {\n if (compareFn(a[i], b[j]) <= 0) result[k++] = a[i++];\n else result[k++] = b[j++];\n }\n while (i < a.length) result[k++] = a[i++];\n while (j < b.length) result[k++] = b[j++];\n return result;\n}\n\n/**\n * Run async tasks with limited concurrency.\n * Prevents browser freeze from too many concurrent reads.\n */\nexport async function runWithConcurrency<T>(\n items: T[],\n maxConcurrency: number,\n fn: (item: T) => Promise<void>,\n): Promise<void> {\n let index = 0;\n const workers = Array.from(\n { length: Math.min(maxConcurrency, items.length) },\n async () => {\n while (index < items.length) {\n const i = index++;\n await fn(items[i]);\n }\n },\n );\n await Promise.all(workers);\n}\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAWP,IAAM,kBAAkB;AAExB,SAAS,mBAA4B;AACnC,QAAM,IAAI;AACV,SAAO,GAAG,kBAAkB;AAC9B;AAEA,SAAS,QAAQ,SAAiB,SAAmB;AACnD,MAAI,CAAC,iBAAiB,EAAG;AACzB,MAAI,YAAY,QAAW;AACzB,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,IAAI,OAAO;AAAA,EACtD,OAAO;AACL,YAAQ,IAAI,GAAG,eAAe,IAAI,OAAO,EAAE;AAAA,EAC7C;AACF;AAEA,SAAS,UAAU,SAAiB,OAAgB;AAClD,MAAI,CAAC,iBAAiB,EAAG;AACzB,UAAQ,MAAM,GAAG,eAAe,IAAI,OAAO,IAAI,KAAK;AACtD;AAMA,SAAS,qBAAqB,OAA+C;AAC3E,QAAM,UAAU,QAAQ,QAAQ,KAAK;AACrC,UAAQ,QAAQ,CAAC,OAAe,QAC9B,QAAQ,KAAK,CAAC,WAAW,OAAO,MAAM,OAAO,GAAG,CAAC;AACnD,SAAO;AACT;AAEA,IAAM,wBAAwB,oBAAI,QAAsC;AAExE,SAAS,gBAAgB,QAAsC;AAC7D,QAAM,SAAS,sBAAsB,IAAI,MAAM;AAC/C,MAAI,OAAQ,QAAO;AAEnB,QAAM,UAAyB;AAAA,IAC7B,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,yBAAyB;AAAA,QAC/B;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,wBAAsB,IAAI,QAAQ,OAAO;AACzC,SAAO;AACT;AASO,SAAS,eAAe,MAA2B;AACxD,UAAQ,yBAAyB,EAAE,MAAM,KAAK,KAAK,CAAC;AACpD,SAAO;AAAA,IACL,YAAY,KAAK;AAAA,IACjB,MAAM,OAAe,KAAoC;AACvD,YAAM,OAAO,KAAK,MAAM,OAAO,GAAG;AAGlC,YAAM,IAAI,qBAAqB,KAAK,YAAY,CAAC;AACjD,cAAQ,wBAAwB;AAAA,QAC9B;AAAA,QACA,KAAK,OAAO,KAAK;AAAA,QACjB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,iBAAiB,QAAoC;AACnE,UAAQ,2BAA2B,EAAE,YAAY,OAAO,WAAW,CAAC;AACpE,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,OAAO,MAAM,OAAO,GAAG,CAAC;AAAA,MAC1C;AACA,cAAQ,0BAA0B;AAAA,QAChC;AAAA,QACA,KAAK,OAAO,OAAO;AAAA,QACnB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKA,eAAsB,cAAc,KAAqC;AACvE,UAAQ,uBAAuB,EAAE,IAAI,CAAC;AACtC,QAAM,WAAW,MAAM,mBAAmB,EAAE,IAAI,CAAC;AACjD,QAAM,UAAyB;AAAA,IAC7B,YAAY,SAAS;AAAA,IACrB,MAAM,OAAe,KAAoC;AACvD,YAAM,IAAI;AAAA,QACR,QAAQ,QAAQ,SAAS,MAAM,OAAO,GAAG,CAAC;AAAA,MAC5C;AACA,cAAQ,uBAAuB;AAAA,QAC7B;AAAA,QACA,KAAK,OAAO,SAAS;AAAA,QACrB,gBAAgB,OAAQ,EAA0B,UAAU;AAAA,MAC9D,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACA,UAAQ,uBAAuB,EAAE,YAAY,QAAQ,WAAW,CAAC;AACjE,SAAO;AACT;AAIA,IAAM,mBAAmB,oBAAI,QAAoC;AACjE,IAAM,mBAAmB,oBAAI,QAA8C;AAE3E,SAAS,eAAe,QAAoC;AAC1D,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,MAAM,iBAAiB,IAAI,UAAU;AACzC,MAAI,CAAC,KAAK;AAGR,UAAM;AACN,qBAAiB,IAAI,YAAY,GAAG;AAAA,EACtC;AACA,SAAO;AACT;AAEA,SAAS,eAAe,QAA8C;AACpE,QAAM,aAAa,gBAAgB,MAAM;AACzC,MAAI,UAAU,iBAAiB,IAAI,UAAU;AAC7C,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,eAAe,UAAU;AAC1C,cAAU,qBAAqB,QAAQ;AACvC,qBAAiB,IAAI,YAAY,OAAO;AAAA,EAC1C;AACA,SAAO;AACT;AAEA,SAAS,kBAAkB,KAAoC;AAC7D,QAAM,UAA2B,IAAI,OAAO,MAAM,CAAC,EAAE,IAAI,CAAC,SAAS;AAAA,IACjE,MAAM,IAAI;AAAA,IACV,MAAM,IAAI,QAAQ;AAAA,IAClB,UAAU,IAAI,oBAAoB;AAAA,EACpC,EAAE;AAGF,QAAM,kBAA4B,CAAC,CAAC;AACpC,aAAW,MAAM,IAAI,YAAY;AAC/B,oBAAgB,KAAK,gBAAgB,gBAAgB,SAAS,CAAC,IAAI,OAAO,GAAG,QAAQ,CAAC;AAAA,EACxF;AAEA,SAAO;AAAA,IACL,UAAU,OAAO,IAAI,QAAQ;AAAA,IAC7B;AAAA,IACA,WAAW,IAAI,WAAW;AAAA,IAC1B;AAAA,IACA,WAAW,IAAI,cAAc;AAAA,EAC/B;AACF;AAQA,eAAsB,oBACpB,QAC0B;AAC1B,QAAM,MAAM,MAAM,eAAe,MAAM;AACvC,SAAO,kBAAkB,GAAG;AAC9B;AAMA,eAAsB,gBACpB,QACA,UAAuB,CAAC,GACF;AACtB,UAAQ,yBAAyB;AAAA,IAC/B,QAAQ,QAAQ,UAAU;AAAA,IAC1B,OAAO,QAAQ,SAAS;AAAA,IACxB,SAAS,QAAQ,SAAS,UAAU;AAAA,EACtC,CAAC;AACD,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAE9C,QAAM,OAAqB,CAAC;AAE5B,MAAI;AACF,UAAM,YAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ,UAAU;AAAA,MAC5B,QACE,QAAQ,UAAU,UACb,QAAQ,UAAU,KAAK,QAAQ,QAChC;AAAA,MACN,YAAY,CAAC,SAAsB;AACjC,cAAM,cACJ,QAAQ,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AACvD,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,cAAU,yBAAyB,KAAK;AACxC,UAAM;AAAA,EACR;AAEA,SAAO,EAAE,UAAU,KAAK;AAC1B;AAiBA,eAAsB,iBACpB,QACA,aACA,aACiC;AACjC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAE/C,QAAM,SAAS,oBAAI,IAAuB;AAC1C,aAAW,QAAQ,aAAa;AAC9B,WAAO,IAAI,MAAM,CAAC,CAAC;AAAA,EACrB;AAIA,MAAI,YAAY;AAChB,aAAW,MAAM,YAAY,YAAY;AAEvC,QAAI,cAAc,EAAG,QAAO;AAE5B,UAAM,SAAS,OAAO,GAAG,QAAQ;AAEjC,UAAM,YAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,MACV,QAAQ,YAAY;AAAA,MACpB,YAAY,CAAC,SAAsB;AACjC,iBAAS,SAAS,GAAG,SAAS,KAAK,QAAQ,UAAU;AACnD,gBAAM,MAAM,KAAK,MAAM;AACvB,mBAAS,SAAS,GAAG,SAAS,YAAY,QAAQ,UAAU;AAC1D,mBAAO,IAAI,YAAY,MAAM,CAAC,EAAG,KAAK,IAAI,MAAM,CAAC;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,iBAAa;AAGb,UAAM,IAAI,QAAc,CAAC,YAAY,WAAW,SAAS,CAAC,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAMA,eAAsB,kBACpB,QACA,YACA,SACuB;AACvB,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,WAAW,eAAe,MAAM;AACtC,QAAM,cAAc,MAAM,eAAe,MAAM;AAC/C,QAAM,WAAW,kBAAkB,WAAW;AAC9C,QAAM,cAAc,WAAW,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AAGjE,QAAM,SAAS,WAAW,IAAI,CAAC,KAAK,SAAS,EAAE,KAAK,IAAI,EAAE;AAC1D,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,GAAG;AAGnC,QAAM,SAAgE,CAAC;AACvE,MAAI,aAAa,OAAO,CAAC,EAAE;AAC3B,MAAI,WAAW,OAAO,CAAC,EAAE,MAAM;AAC/B,MAAI,YAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAE9B,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,QAAI,OAAO,CAAC,EAAE,OAAO,WAAW,IAAI;AAElC,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,gBAAU,KAAK,OAAO,CAAC,EAAE,GAAG;AAAA,IAC9B,OAAO;AACL,aAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAC3D,mBAAa,OAAO,CAAC,EAAE;AACvB,iBAAW,OAAO,CAAC,EAAE,MAAM;AAC3B,kBAAY,CAAC,OAAO,CAAC,EAAE,GAAG;AAAA,IAC5B;AAAA,EACF;AACA,SAAO,KAAK,EAAE,OAAO,YAAY,KAAK,UAAU,UAAU,CAAC;AAE3D,QAAM,SAAuB,IAAI,MAAM,WAAW,MAAM;AAExD,aAAW,SAAS,QAAQ;AAC1B,UAAM,YAA0B,CAAC;AAEjC,UAAM,YAAY;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU,MAAM;AAAA,MAChB,QAAQ,MAAM;AAAA,MACd,YAAY,CAAC,SAAsB;AACjC,mBAAW,OAAO,MAAM;AACtB,gBAAM,MAAkB,CAAC;AACzB,sBAAY,QAAQ,CAAC,MAAM,MAAM;AAC/B,gBAAI,IAAI,IAAI,IAAI,CAAC;AAAA,UACnB,CAAC;AACD,oBAAU,KAAK,GAAG;AAAA,QACpB;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,eAAe;AACnB,eAAW,KAAK,QAAQ;AACtB,UAAI,EAAE,OAAO,MAAM,SAAS,EAAE,MAAM,MAAM,KAAK;AAC7C,cAAM,WAAW,EAAE,MAAM,MAAM;AAC/B,YAAI,WAAW,UAAU,QAAQ;AAC/B,iBAAO,EAAE,GAAG,IAAI,UAAU,QAAQ;AAAA,QACpC;AACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,kBAAkB,MAAkC;AACxE,SAAO,KAAK,YAAY;AAC1B;AAGA,eAAsB,oBAAoB,KAAmC;AAC3E,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,iCAAiC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IACzE;AAAA,EACF;AACA,SAAO,SAAS,YAAY;AAC9B;;;AC5VO,SAAS,sBAAqC;AACnD,SAAO,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,GAAG,QAAQ,CAAC,EAAE;AAC9C;AAQO,SAAS,eACd,QACA,OACA,cACU;AACV,QAAM,UAAU,MAAM,KAAK,EAAE,QAAQ,OAAO,OAAO,GAAG,CAAC,GAAG,MAAM,CAAC;AAEjE,UAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,aAAa,IAAI,KAAK,MAAM;AACxC,UAAI,CAAC,IAAK;AACV,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,KAAK,IAAI,CAAC;AAChB,YAAM,MAAM,cAAc,IAAI,EAAE;AAChC,UAAI,QAAQ,EAAG,QAAO,KAAK,cAAc,QAAQ,MAAM,CAAC;AAAA,IAC1D;AACA,WAAO,IAAI;AAAA,EACb,CAAC;AAED,SAAO;AACT;AAKO,SAAS,iBACd,WACA,SACA,cACiB;AACjB,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,QAAM,UAAoB,CAAC;AAC3B,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,QAAI,OAAO;AACX,eAAW,UAAU,SAAS;AAC5B,YAAM,MAAM,aAAa,IAAI,OAAO,MAAM;AAC1C,UAAI,CAAC,IAAK;AACV,UAAI,CAAC,cAAc,IAAI,CAAC,GAAG,MAAM,GAAG;AAClC,eAAO;AACP;AAAA,MACF;AAAA,IACF;AACA,QAAI,KAAM,SAAQ,KAAK,CAAC;AAAA,EAC1B;AACA,SAAO;AACT;AAMO,SAAS,YACd,YACA,QACA,cACa;AACb,MAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAEjC,QAAM,aAAa,OAAO,CAAC;AAC3B,QAAM,MAAM,aAAa,IAAI,WAAW,MAAM;AAC9C,MAAI,CAAC,IAAK,QAAO,CAAC;AAIlB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,QAAM,YAAY,oBAAI,IAAqB;AAE3C,WAAS,aAAa,GAAG,aAAa,WAAW,QAAQ,cAAc;AACrE,UAAM,YAAY,WAAW,UAAU;AACvC,UAAM,QAAQ,IAAI,SAAS;AAC3B,UAAM,MAAM,eAAe,KAAK;AAChC,QAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,cAAQ,IAAI,KAAK,CAAC,CAAC;AACnB,gBAAU,IAAI,KAAK,KAAK;AAAA,IAC1B;AACA,YAAQ,IAAI,GAAG,EAAG,KAAK,UAAU;AAAA,EACnC;AAGA,QAAM,aAAa,CAAC,GAAG,QAAQ,KAAK,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM;AACpD,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,UAAM,KAAK,UAAU,IAAI,CAAC;AAC1B,WAAO,cAAc,IAAI,EAAE;AAAA,EAC7B,CAAC;AAED,SAAO,WAAW,IAAI,CAAC,SAAS;AAAA,IAC9B,KAAK,UAAU,IAAI,GAAG;AAAA,IACtB,OAAO;AAAA,IACP,OAAO,QAAQ,IAAI,GAAG,EAAG;AAAA,IACzB,YAAY,QAAQ,IAAI,GAAG;AAAA,IAC3B,UAAU;AAAA,EACZ,EAAE;AACJ;AAMO,SAAS,oBACd,QACA,QAAgB,KACL;AACX,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAoB,CAAC;AAE3B,aAAW,KAAK,QAAQ;AACtB,UAAM,MAAM,eAAe,CAAC;AAC5B,QAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,WAAK,IAAI,GAAG;AACZ,aAAO,KAAK,CAAC;AACb,UAAI,OAAO,UAAU,MAAO;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,cAAc,GAAG,CAAC,CAAC;AACzC,SAAO;AACT;AAIO,SAAS,cAAc,GAAY,GAAoB;AAE5D,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO,MAAM,QAAQ,MAAM,SAAY,IAAI;AAC9E,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO;AAE1C,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI;AAC/D,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,SAAU,QAAO,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI;AACpF,MAAI,OAAO,MAAM,aAAa,OAAO,MAAM,UAAW,QAAO,OAAO,CAAC,IAAI,OAAO,CAAC;AAGjF,MAAI,aAAa,QAAQ,aAAa,KAAM,QAAO,EAAE,QAAQ,IAAI,EAAE,QAAQ;AAG3E,SAAO,OAAO,CAAC,EAAE,cAAc,OAAO,CAAC,CAAC;AAC1C;AAEO,SAAS,cAAc,OAAgB,QAA4B;AACxE,UAAQ,OAAO,UAAU;AAAA,IACvB,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,QAAQ,UAAU;AAAA,IACrC,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,UAAU,OAAO;AAAA,IAC1B,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,IAAI;AAAA,IAC9C,KAAK;AACH,aAAO,cAAc,OAAO,OAAO,KAAK,KAAK;AAAA,IAC/C,KAAK;AACH,aAAO,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IAChF,KAAK;AACH,aAAO,CAAC,OAAO,KAAK,EAAE,YAAY,EAAE,SAAS,OAAO,OAAO,KAAK,EAAE,YAAY,CAAC;AAAA,IACjF,KAAK;AACH,UAAI,MAAM,QAAQ,OAAO,KAAK,GAAG;AAC/B,cAAM,MAAM,IAAI,IAAI,OAAO,MAAM,IAAI,MAAM,CAAC;AAC5C,eAAO,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAC9B;AACA,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEO,SAAS,eAAe,OAAwB;AACrD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,SAAO,OAAO,KAAK;AACrB;;;ACjPO,SAAS,YAA2B;AACzC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,CAAC,CAAC;AACxD;AAMA,eAAsB,UACpB,KACA,WACA,aACmB;AACnB,QAAM,QAAQ;AACd,QAAM,SAAqB,CAAC;AAE5B,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,OAAO;AAC1C,QAAI,YAAY,EAAG,QAAO;AAC1B,UAAM,QAAQ,IAAI,MAAM,GAAG,IAAI,KAAK;AACpC,UAAM,KAAK,SAAS;AACpB,WAAO,KAAK,KAAK;AACjB,UAAM,UAAU;AAAA,EAClB;AAEA,SAAO,OAAO,SAAS,GAAG;AACxB,QAAI,YAAY,EAAG,QAAO;AAC1B,UAAM,SAAqB,CAAC;AAC5B,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,GAAG;AACzC,UAAI,IAAI,IAAI,OAAO,QAAQ;AACzB,eAAO,KAAK,YAAY,OAAO,CAAC,GAAG,OAAO,IAAI,CAAC,GAAG,SAAS,CAAC;AAAA,MAC9D,OAAO;AACL,eAAO,KAAK,OAAO,CAAC,CAAC;AAAA,MACvB;AAAA,IACF;AACA,WAAO,SAAS;AAChB,WAAO,KAAK,GAAG,MAAM;AACrB,UAAM,UAAU;AAAA,EAClB;AAEA,SAAO,OAAO,CAAC,KAAK,CAAC;AACvB;AAGA,SAAS,YACP,GACA,GACA,WACU;AACV,QAAM,SAAS,IAAI,MAAM,EAAE,SAAS,EAAE,MAAM;AAC5C,MAAI,IAAI,GAAG,IAAI,GAAG,IAAI;AACtB,SAAO,IAAI,EAAE,UAAU,IAAI,EAAE,QAAQ;AACnC,QAAI,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,KAAK,EAAG,QAAO,GAAG,IAAI,EAAE,GAAG;AAAA,QAC9C,QAAO,GAAG,IAAI,EAAE,GAAG;AAAA,EAC1B;AACA,SAAO,IAAI,EAAE,OAAQ,QAAO,GAAG,IAAI,EAAE,GAAG;AACxC,SAAO,IAAI,EAAE,OAAQ,QAAO,GAAG,IAAI,EAAE,GAAG;AACxC,SAAO;AACT;AAMA,eAAsB,mBACpB,OACA,gBACA,IACe;AACf,MAAI,QAAQ;AACZ,QAAM,UAAU,MAAM;AAAA,IACpB,EAAE,QAAQ,KAAK,IAAI,gBAAgB,MAAM,MAAM,EAAE;AAAA,IACjD,YAAY;AACV,aAAO,QAAQ,MAAM,QAAQ;AAC3B,cAAM,IAAI;AACV,cAAM,GAAG,MAAM,CAAC,CAAC;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,OAAO;AAC3B;","names":[]}
|