lakesync 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -0
- package/dist/adapter.d.ts +369 -0
- package/dist/adapter.js +39 -0
- package/dist/adapter.js.map +1 -0
- package/dist/analyst.d.ts +268 -0
- package/dist/analyst.js +495 -0
- package/dist/analyst.js.map +1 -0
- package/dist/auth-CAVutXzx.d.ts +30 -0
- package/dist/base-poller-Qo_SmCZs.d.ts +82 -0
- package/dist/catalogue.d.ts +65 -0
- package/dist/catalogue.js +17 -0
- package/dist/catalogue.js.map +1 -0
- package/dist/chunk-4ARO6KTJ.js +257 -0
- package/dist/chunk-4ARO6KTJ.js.map +1 -0
- package/dist/chunk-5YOFCJQ7.js +1115 -0
- package/dist/chunk-5YOFCJQ7.js.map +1 -0
- package/dist/chunk-7D4SUZUM.js +38 -0
- package/dist/chunk-7D4SUZUM.js.map +1 -0
- package/dist/chunk-BNJOGBYK.js +335 -0
- package/dist/chunk-BNJOGBYK.js.map +1 -0
- package/dist/chunk-ICNT7I3K.js +1180 -0
- package/dist/chunk-ICNT7I3K.js.map +1 -0
- package/dist/chunk-P5DRFKIT.js +413 -0
- package/dist/chunk-P5DRFKIT.js.map +1 -0
- package/dist/chunk-X3RO5SYJ.js +880 -0
- package/dist/chunk-X3RO5SYJ.js.map +1 -0
- package/dist/client.d.ts +428 -0
- package/dist/client.js +2048 -0
- package/dist/client.js.map +1 -0
- package/dist/compactor.d.ts +342 -0
- package/dist/compactor.js +793 -0
- package/dist/compactor.js.map +1 -0
- package/dist/coordinator-CxckTzYW.d.ts +396 -0
- package/dist/db-types-BR6Kt4uf.d.ts +29 -0
- package/dist/gateway-D5SaaMvT.d.ts +337 -0
- package/dist/gateway-server.d.ts +306 -0
- package/dist/gateway-server.js +4663 -0
- package/dist/gateway-server.js.map +1 -0
- package/dist/gateway.d.ts +196 -0
- package/dist/gateway.js +79 -0
- package/dist/gateway.js.map +1 -0
- package/dist/hlc-DiD8QNG3.d.ts +70 -0
- package/dist/index.d.ts +245 -0
- package/dist/index.js +102 -0
- package/dist/index.js.map +1 -0
- package/dist/json-dYtqiL0F.d.ts +18 -0
- package/dist/nessie-client-DrNikVXy.d.ts +160 -0
- package/dist/parquet.d.ts +78 -0
- package/dist/parquet.js +15 -0
- package/dist/parquet.js.map +1 -0
- package/dist/proto.d.ts +434 -0
- package/dist/proto.js +67 -0
- package/dist/proto.js.map +1 -0
- package/dist/react.d.ts +147 -0
- package/dist/react.js +224 -0
- package/dist/react.js.map +1 -0
- package/dist/resolver-C3Wphi6O.d.ts +10 -0
- package/dist/result-CojzlFE2.d.ts +64 -0
- package/dist/src-QU2YLPZY.js +383 -0
- package/dist/src-QU2YLPZY.js.map +1 -0
- package/dist/src-WYBF5LOI.js +102 -0
- package/dist/src-WYBF5LOI.js.map +1 -0
- package/dist/src-WZNPHANQ.js +426 -0
- package/dist/src-WZNPHANQ.js.map +1 -0
- package/dist/types-Bs-QyOe-.d.ts +143 -0
- package/dist/types-DAQL_vU_.d.ts +118 -0
- package/dist/types-DSC_EiwR.d.ts +45 -0
- package/dist/types-V_jVu2sA.d.ts +73 -0
- package/package.json +119 -0
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { I as IcebergSchema, P as PartitionSpec } from './nessie-client-DrNikVXy.js';
|
|
2
|
+
export { C as CatalogueConfig, a as CatalogueError, D as DataFile, b as IcebergField, N as NessieCatalogueClient, S as Snapshot, T as TableMetadata } from './nessie-client-DrNikVXy.js';
|
|
3
|
+
import { T as TableSchema } from './types-V_jVu2sA.js';
|
|
4
|
+
import './result-CojzlFE2.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Converts a LakeSync `TableSchema` to an Iceberg `IcebergSchema`.
|
|
8
|
+
*
|
|
9
|
+
* The resulting schema always includes six system columns (required) followed
|
|
10
|
+
* by user-defined columns (not required). Column order and types are aligned
|
|
11
|
+
* with the Apache Arrow schema produced by `buildArrowSchema` from
|
|
12
|
+
* `@lakesync/parquet`.
|
|
13
|
+
*
|
|
14
|
+
* System columns (always present, in this order):
|
|
15
|
+
* 1. `op` — `"string"` (the delta operation type)
|
|
16
|
+
* 2. `table` — `"string"` (the table name)
|
|
17
|
+
* 3. `rowId` — `"string"` (the row identifier)
|
|
18
|
+
* 4. `clientId` — `"string"` (the client identifier)
|
|
19
|
+
* 5. `hlc` — `"long"` (HLC timestamp as Int64)
|
|
20
|
+
* 6. `deltaId` — `"string"` (the deterministic delta identifier)
|
|
21
|
+
*
|
|
22
|
+
* User columns are mapped according to their declared LakeSync type:
|
|
23
|
+
* - `"string"` → `"string"`
|
|
24
|
+
* - `"number"` → `"double"`
|
|
25
|
+
* - `"boolean"` → `"boolean"`
|
|
26
|
+
* - `"json"` → `"string"` (JSON-serialised text)
|
|
27
|
+
* - `"null"` → `"string"`
|
|
28
|
+
*
|
|
29
|
+
* @param schema - The LakeSync `TableSchema` to convert
|
|
30
|
+
* @returns An `IcebergSchema` with system and user columns, `schema-id` 0
|
|
31
|
+
*/
|
|
32
|
+
declare function tableSchemaToIceberg(schema: TableSchema): IcebergSchema;
|
|
33
|
+
/**
|
|
34
|
+
* Builds an Iceberg `PartitionSpec` from an `IcebergSchema`.
|
|
35
|
+
*
|
|
36
|
+
* The partition strategy extracts the day from the `hlc` column using the
|
|
37
|
+
* Iceberg `day` transform, which partitions data by the wall-clock date
|
|
38
|
+
* encoded in the HLC timestamp. This ensures efficient time-range queries.
|
|
39
|
+
*
|
|
40
|
+
* The resulting spec has a single partition field:
|
|
41
|
+
* - `source-id`: the field ID of the `hlc` column
|
|
42
|
+
* - `field-id`: 1000 (Iceberg convention — partition field IDs start at 1000)
|
|
43
|
+
* - `name`: `"hlc_day"`
|
|
44
|
+
* - `transform`: `"day"`
|
|
45
|
+
*
|
|
46
|
+
* @param schema - The Iceberg schema containing an `hlc` field
|
|
47
|
+
* @returns A `PartitionSpec` with `spec-id` 0 and a single day-partitioned field
|
|
48
|
+
* @throws If the schema does not contain an `hlc` field
|
|
49
|
+
*/
|
|
50
|
+
declare function buildPartitionSpec(schema: IcebergSchema): PartitionSpec;
|
|
51
|
+
/**
|
|
52
|
+
* Maps a LakeSync table name to an Iceberg namespace and table name.
|
|
53
|
+
*
|
|
54
|
+
* All LakeSync tables reside under the `["lakesync"]` namespace. The table
|
|
55
|
+
* name is passed through as-is, preserving the original casing and format.
|
|
56
|
+
*
|
|
57
|
+
* @param table - The LakeSync table name (e.g. `"todos"`)
|
|
58
|
+
* @returns An object with `namespace` (`["lakesync"]`) and `name` (the table name)
|
|
59
|
+
*/
|
|
60
|
+
declare function lakeSyncTableName(table: string): {
|
|
61
|
+
namespace: string[];
|
|
62
|
+
name: string;
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
export { IcebergSchema, PartitionSpec, buildPartitionSpec, lakeSyncTableName, tableSchemaToIceberg };
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import {
|
|
2
|
+
CatalogueError,
|
|
3
|
+
NessieCatalogueClient,
|
|
4
|
+
buildPartitionSpec,
|
|
5
|
+
lakeSyncTableName,
|
|
6
|
+
tableSchemaToIceberg
|
|
7
|
+
} from "./chunk-P5DRFKIT.js";
|
|
8
|
+
import "./chunk-ICNT7I3K.js";
|
|
9
|
+
import "./chunk-7D4SUZUM.js";
|
|
10
|
+
export {
|
|
11
|
+
CatalogueError,
|
|
12
|
+
NessieCatalogueClient,
|
|
13
|
+
buildPartitionSpec,
|
|
14
|
+
lakeSyncTableName,
|
|
15
|
+
tableSchemaToIceberg
|
|
16
|
+
};
|
|
17
|
+
//# sourceMappingURL=catalogue.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Err,
|
|
3
|
+
FlushError,
|
|
4
|
+
Ok
|
|
5
|
+
} from "./chunk-ICNT7I3K.js";
|
|
6
|
+
|
|
7
|
+
// ../parquet/src/arrow-schema.ts
|
|
8
|
+
import * as arrow from "apache-arrow";
|
|
9
|
+
var ARROW_TYPE_MAP = {
|
|
10
|
+
string: () => new arrow.Utf8(),
|
|
11
|
+
number: () => new arrow.Float64(),
|
|
12
|
+
boolean: () => new arrow.Bool(),
|
|
13
|
+
json: () => new arrow.Utf8(),
|
|
14
|
+
null: () => new arrow.Utf8()
|
|
15
|
+
};
|
|
16
|
+
function lakeSyncTypeToArrow(colType) {
|
|
17
|
+
return ARROW_TYPE_MAP[colType]();
|
|
18
|
+
}
|
|
19
|
+
function buildArrowSchema(schema) {
|
|
20
|
+
const systemFields = [
|
|
21
|
+
new arrow.Field("op", new arrow.Utf8(), false),
|
|
22
|
+
new arrow.Field("table", new arrow.Utf8(), false),
|
|
23
|
+
new arrow.Field("rowId", new arrow.Utf8(), false),
|
|
24
|
+
new arrow.Field("clientId", new arrow.Utf8(), false),
|
|
25
|
+
new arrow.Field("hlc", new arrow.Int64(), false),
|
|
26
|
+
new arrow.Field("deltaId", new arrow.Utf8(), false)
|
|
27
|
+
];
|
|
28
|
+
const userFields = schema.columns.map((col) => {
|
|
29
|
+
const nullable = col.type === "null" || col.type === "json" || col.type === "boolean" || col.type === "number" || col.type === "string";
|
|
30
|
+
return new arrow.Field(col.name, lakeSyncTypeToArrow(col.type), nullable);
|
|
31
|
+
});
|
|
32
|
+
return new arrow.Schema([...systemFields, ...userFields]);
|
|
33
|
+
}
|
|
34
|
+
function deltasToArrowTable(deltas, schema) {
|
|
35
|
+
const arrowSchema = buildArrowSchema(schema);
|
|
36
|
+
if (deltas.length === 0) {
|
|
37
|
+
return new arrow.Table(arrowSchema);
|
|
38
|
+
}
|
|
39
|
+
const columnTypeMap = /* @__PURE__ */ new Map();
|
|
40
|
+
for (const col of schema.columns) {
|
|
41
|
+
columnTypeMap.set(col.name, col.type);
|
|
42
|
+
}
|
|
43
|
+
const ops = [];
|
|
44
|
+
const tables = [];
|
|
45
|
+
const rowIds = [];
|
|
46
|
+
const clientIds = [];
|
|
47
|
+
const hlcs = [];
|
|
48
|
+
const deltaIds = [];
|
|
49
|
+
const userColumns = /* @__PURE__ */ new Map();
|
|
50
|
+
for (const col of schema.columns) {
|
|
51
|
+
userColumns.set(col.name, []);
|
|
52
|
+
}
|
|
53
|
+
for (const delta of deltas) {
|
|
54
|
+
ops.push(delta.op);
|
|
55
|
+
tables.push(delta.table);
|
|
56
|
+
rowIds.push(delta.rowId);
|
|
57
|
+
clientIds.push(delta.clientId);
|
|
58
|
+
hlcs.push(delta.hlc);
|
|
59
|
+
deltaIds.push(delta.deltaId);
|
|
60
|
+
const deltaColMap = /* @__PURE__ */ new Map();
|
|
61
|
+
for (const colDelta of delta.columns) {
|
|
62
|
+
deltaColMap.set(colDelta.column, colDelta.value);
|
|
63
|
+
}
|
|
64
|
+
for (const col of schema.columns) {
|
|
65
|
+
const arr = userColumns.get(col.name);
|
|
66
|
+
if (!arr) continue;
|
|
67
|
+
if (deltaColMap.has(col.name)) {
|
|
68
|
+
const value = deltaColMap.get(col.name);
|
|
69
|
+
if (col.type === "json") {
|
|
70
|
+
arr.push(value != null ? JSON.stringify(value) : null);
|
|
71
|
+
} else {
|
|
72
|
+
arr.push(value ?? null);
|
|
73
|
+
}
|
|
74
|
+
} else {
|
|
75
|
+
arr.push(null);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
const columnData = {};
|
|
80
|
+
columnData.op = arrow.vectorFromArray(ops, new arrow.Utf8());
|
|
81
|
+
columnData.table = arrow.vectorFromArray(tables, new arrow.Utf8());
|
|
82
|
+
columnData.rowId = arrow.vectorFromArray(rowIds, new arrow.Utf8());
|
|
83
|
+
columnData.clientId = arrow.vectorFromArray(clientIds, new arrow.Utf8());
|
|
84
|
+
columnData.hlc = arrow.vectorFromArray(hlcs, new arrow.Int64());
|
|
85
|
+
columnData.deltaId = arrow.vectorFromArray(deltaIds, new arrow.Utf8());
|
|
86
|
+
for (const col of schema.columns) {
|
|
87
|
+
const values = userColumns.get(col.name);
|
|
88
|
+
if (!values) continue;
|
|
89
|
+
const arrowType = lakeSyncTypeToArrow(col.type);
|
|
90
|
+
columnData[col.name] = arrow.vectorFromArray(values, arrowType);
|
|
91
|
+
}
|
|
92
|
+
return new arrow.Table(columnData);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// ../parquet/src/reader.ts
|
|
96
|
+
import { tableFromIPC } from "apache-arrow";
|
|
97
|
+
import { readParquet } from "parquet-wasm/esm";
|
|
98
|
+
|
|
99
|
+
// ../parquet/src/wasm.ts
|
|
100
|
+
import { readFileSync } from "fs";
|
|
101
|
+
import { createRequire } from "module";
|
|
102
|
+
import { initSync } from "parquet-wasm/esm";
|
|
103
|
+
var initialised = false;
|
|
104
|
+
function ensureWasmInitialised() {
|
|
105
|
+
if (initialised) return;
|
|
106
|
+
const require2 = createRequire(import.meta.url);
|
|
107
|
+
const esmEntryPath = require2.resolve("parquet-wasm/esm");
|
|
108
|
+
const wasmPath = esmEntryPath.replace("parquet_wasm.js", "parquet_wasm_bg.wasm");
|
|
109
|
+
const wasmBytes = readFileSync(wasmPath);
|
|
110
|
+
initSync({ module: wasmBytes });
|
|
111
|
+
initialised = true;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// ../parquet/src/reader.ts
|
|
115
|
+
var SYSTEM_COLUMNS = /* @__PURE__ */ new Set(["op", "table", "rowId", "clientId", "hlc", "deltaId"]);
|
|
116
|
+
var BOOL_COLUMNS_METADATA_KEY = "lakesync:bool_columns";
|
|
117
|
+
async function readParquetToDeltas(data) {
|
|
118
|
+
try {
|
|
119
|
+
ensureWasmInitialised();
|
|
120
|
+
const wasmTable = readParquet(data);
|
|
121
|
+
const ipcBytes = wasmTable.intoIPCStream();
|
|
122
|
+
const arrowTable = tableFromIPC(ipcBytes);
|
|
123
|
+
const numRows = arrowTable.numRows;
|
|
124
|
+
const deltas = [];
|
|
125
|
+
const opCol = arrowTable.getChild("op");
|
|
126
|
+
const tableCol = arrowTable.getChild("table");
|
|
127
|
+
const rowIdCol = arrowTable.getChild("rowId");
|
|
128
|
+
const clientIdCol = arrowTable.getChild("clientId");
|
|
129
|
+
const hlcCol = arrowTable.getChild("hlc");
|
|
130
|
+
const deltaIdCol = arrowTable.getChild("deltaId");
|
|
131
|
+
if (!opCol || !tableCol || !rowIdCol || !clientIdCol || !hlcCol || !deltaIdCol) {
|
|
132
|
+
return Err(new FlushError("Parquet data is missing required system columns"));
|
|
133
|
+
}
|
|
134
|
+
const userColumnNames = [];
|
|
135
|
+
for (const field of arrowTable.schema.fields) {
|
|
136
|
+
if (!SYSTEM_COLUMNS.has(field.name)) {
|
|
137
|
+
userColumnNames.push(field.name);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
const boolColumnsRaw = arrowTable.schema.metadata.get(BOOL_COLUMNS_METADATA_KEY);
|
|
141
|
+
const boolColumnSet = new Set(
|
|
142
|
+
boolColumnsRaw ? JSON.parse(boolColumnsRaw) : []
|
|
143
|
+
);
|
|
144
|
+
const userColumnVectors = /* @__PURE__ */ new Map();
|
|
145
|
+
for (const colName of userColumnNames) {
|
|
146
|
+
const vec = arrowTable.getChild(colName);
|
|
147
|
+
if (vec) {
|
|
148
|
+
userColumnVectors.set(colName, vec);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
for (let i = 0; i < numRows; i++) {
|
|
152
|
+
const op = opCol.get(i);
|
|
153
|
+
const tableName = tableCol.get(i);
|
|
154
|
+
const rowId = rowIdCol.get(i);
|
|
155
|
+
const clientId = clientIdCol.get(i);
|
|
156
|
+
const hlc = hlcCol.get(i);
|
|
157
|
+
const deltaId = deltaIdCol.get(i);
|
|
158
|
+
const columns = [];
|
|
159
|
+
for (const [colName, col] of userColumnVectors) {
|
|
160
|
+
const rawValue = col.get(i);
|
|
161
|
+
if (rawValue === null || rawValue === void 0) {
|
|
162
|
+
continue;
|
|
163
|
+
}
|
|
164
|
+
let value = rawValue;
|
|
165
|
+
if (boolColumnSet.has(colName) && typeof rawValue === "number") {
|
|
166
|
+
value = rawValue !== 0;
|
|
167
|
+
} else if (typeof rawValue === "string") {
|
|
168
|
+
const trimmed = rawValue.trim();
|
|
169
|
+
if (trimmed.startsWith("{") && trimmed.endsWith("}") || trimmed.startsWith("[") && trimmed.endsWith("]")) {
|
|
170
|
+
try {
|
|
171
|
+
value = JSON.parse(rawValue);
|
|
172
|
+
} catch {
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
columns.push({ column: colName, value });
|
|
177
|
+
}
|
|
178
|
+
deltas.push({
|
|
179
|
+
op,
|
|
180
|
+
table: tableName,
|
|
181
|
+
rowId,
|
|
182
|
+
clientId,
|
|
183
|
+
columns,
|
|
184
|
+
hlc,
|
|
185
|
+
deltaId
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
return Ok(deltas);
|
|
189
|
+
} catch (err) {
|
|
190
|
+
const cause = err instanceof Error ? err : new Error(String(err));
|
|
191
|
+
return Err(new FlushError(`Failed to read deltas from Parquet: ${cause.message}`, cause));
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// ../parquet/src/writer.ts
|
|
196
|
+
import * as arrow2 from "apache-arrow";
|
|
197
|
+
import {
|
|
198
|
+
Compression,
|
|
199
|
+
Table as WasmTable,
|
|
200
|
+
WriterPropertiesBuilder,
|
|
201
|
+
writeParquet
|
|
202
|
+
} from "parquet-wasm/esm";
|
|
203
|
+
var BOOL_COLUMNS_METADATA_KEY2 = "lakesync:bool_columns";
|
|
204
|
+
function convertBoolColumnsToInt8(table) {
|
|
205
|
+
const boolColumnNames = [];
|
|
206
|
+
const columns = {};
|
|
207
|
+
for (const field of table.schema.fields) {
|
|
208
|
+
const col = table.getChild(field.name);
|
|
209
|
+
if (!col) continue;
|
|
210
|
+
if (field.type instanceof arrow2.Bool) {
|
|
211
|
+
boolColumnNames.push(field.name);
|
|
212
|
+
const int8Values = [];
|
|
213
|
+
for (let i = 0; i < col.length; i++) {
|
|
214
|
+
const val = col.get(i);
|
|
215
|
+
if (val === null || val === void 0) {
|
|
216
|
+
int8Values.push(null);
|
|
217
|
+
} else {
|
|
218
|
+
int8Values.push(val ? 1 : 0);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
columns[field.name] = arrow2.vectorFromArray(int8Values, new arrow2.Int8());
|
|
222
|
+
} else {
|
|
223
|
+
columns[field.name] = col;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
return [new arrow2.Table(columns), boolColumnNames];
|
|
227
|
+
}
|
|
228
|
+
async function writeDeltasToParquet(deltas, schema) {
|
|
229
|
+
try {
|
|
230
|
+
ensureWasmInitialised();
|
|
231
|
+
const arrowTable = deltasToArrowTable(deltas, schema);
|
|
232
|
+
const [patchedTable, boolColumnNames] = convertBoolColumnsToInt8(arrowTable);
|
|
233
|
+
const ipcBytes = arrow2.tableToIPC(patchedTable, "stream");
|
|
234
|
+
const wasmTable = WasmTable.fromIPCStream(ipcBytes);
|
|
235
|
+
let builder = new WriterPropertiesBuilder();
|
|
236
|
+
builder = builder.setCompression(Compression.SNAPPY);
|
|
237
|
+
if (boolColumnNames.length > 0) {
|
|
238
|
+
const metadata = /* @__PURE__ */ new Map();
|
|
239
|
+
metadata.set(BOOL_COLUMNS_METADATA_KEY2, JSON.stringify(boolColumnNames));
|
|
240
|
+
builder = builder.setKeyValueMetadata(metadata);
|
|
241
|
+
}
|
|
242
|
+
const writerProperties = builder.build();
|
|
243
|
+
const parquetBytes = writeParquet(wasmTable, writerProperties);
|
|
244
|
+
return Ok(parquetBytes);
|
|
245
|
+
} catch (err) {
|
|
246
|
+
const cause = err instanceof Error ? err : new Error(String(err));
|
|
247
|
+
return Err(new FlushError(`Failed to write deltas to Parquet: ${cause.message}`, cause));
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
export {
|
|
252
|
+
buildArrowSchema,
|
|
253
|
+
deltasToArrowTable,
|
|
254
|
+
readParquetToDeltas,
|
|
255
|
+
writeDeltasToParquet
|
|
256
|
+
};
|
|
257
|
+
//# sourceMappingURL=chunk-4ARO6KTJ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../parquet/src/arrow-schema.ts","../../parquet/src/reader.ts","../../parquet/src/wasm.ts","../../parquet/src/writer.ts"],"sourcesContent":["import type { RowDelta, TableSchema } from \"@lakesync/core\";\nimport * as arrow from \"apache-arrow\";\n\n/**\n * Column type from a TableSchema definition.\n */\ntype ColumnType = TableSchema[\"columns\"][number][\"type\"];\n\n/**\n * Maps a LakeSync column type to an Apache Arrow data type.\n *\n * @param colType - The LakeSync column type to convert\n * @returns The corresponding Apache Arrow data type\n */\nconst ARROW_TYPE_MAP: Record<ColumnType, () => arrow.DataType> = {\n\tstring: () => new arrow.Utf8(),\n\tnumber: () => new arrow.Float64(),\n\tboolean: () => new arrow.Bool(),\n\tjson: () => new arrow.Utf8(),\n\tnull: () => new arrow.Utf8(),\n};\n\nfunction lakeSyncTypeToArrow(colType: ColumnType): arrow.DataType {\n\treturn ARROW_TYPE_MAP[colType]();\n}\n\n/**\n * Builds an Apache Arrow Schema from a LakeSync TableSchema.\n *\n * The resulting schema always includes the following system columns:\n * - `op` (Utf8) — the delta operation type (INSERT, UPDATE, DELETE)\n * - `table` (Utf8) — the table name\n * - `rowId` (Utf8) — the row identifier\n * - `clientId` (Utf8) — the client identifier\n * - `hlc` (Int64) — the HLC timestamp as a 64-bit integer\n * - `deltaId` (Utf8) — the deterministic delta identifier\n *\n * User-defined columns from the TableSchema are appended after system columns,\n * with types mapped as follows:\n * - `string` → Utf8\n * - `number` → Float64\n * - `boolean` → Bool\n * - `json` → Utf8 (values are JSON-serialised)\n * - `null` → Utf8, nullable\n *\n * @param schema - The LakeSync TableSchema to convert\n * @returns An Apache Arrow Schema with system and user columns\n */\nexport function buildArrowSchema(schema: TableSchema): arrow.Schema {\n\tconst systemFields: arrow.Field[] = [\n\t\tnew arrow.Field(\"op\", new arrow.Utf8(), false),\n\t\tnew arrow.Field(\"table\", new arrow.Utf8(), false),\n\t\tnew arrow.Field(\"rowId\", new arrow.Utf8(), false),\n\t\tnew arrow.Field(\"clientId\", new arrow.Utf8(), false),\n\t\tnew arrow.Field(\"hlc\", new arrow.Int64(), false),\n\t\tnew arrow.Field(\"deltaId\", new arrow.Utf8(), false),\n\t];\n\n\tconst userFields: arrow.Field[] = schema.columns.map((col) => {\n\t\tconst nullable =\n\t\t\tcol.type === \"null\" ||\n\t\t\tcol.type === \"json\" ||\n\t\t\tcol.type === \"boolean\" ||\n\t\t\tcol.type === \"number\" ||\n\t\t\tcol.type === \"string\";\n\t\treturn new arrow.Field(col.name, lakeSyncTypeToArrow(col.type), nullable);\n\t});\n\n\treturn new arrow.Schema([...systemFields, ...userFields]);\n}\n\n/**\n * Converts an array of RowDelta objects into an Apache Arrow Table.\n *\n * System columns (op, table, rowId, clientId, hlc, deltaId) are extracted\n * directly from each delta. User columns are looked up from each delta's\n * `columns` array; missing columns produce `null` for that row.\n *\n * Type conversions:\n * - `json` columns are serialised via `JSON.stringify`\n * - `hlc` is passed as a bigint directly to Int64 vectors\n * - All other types are passed through as-is\n *\n * @param deltas - Array of RowDelta objects to convert\n * @param schema - The LakeSync TableSchema describing user columns\n * @returns An Apache Arrow Table containing all deltas\n */\nexport function deltasToArrowTable(deltas: RowDelta[], schema: TableSchema): arrow.Table {\n\tconst arrowSchema = buildArrowSchema(schema);\n\n\tif (deltas.length === 0) {\n\t\treturn new arrow.Table(arrowSchema);\n\t}\n\n\t// Build column index: map column name → column type from schema\n\tconst columnTypeMap = new Map<string, ColumnType>();\n\tfor (const col of schema.columns) {\n\t\tcolumnTypeMap.set(col.name, col.type);\n\t}\n\n\t// System column arrays\n\tconst ops: string[] = [];\n\tconst tables: string[] = [];\n\tconst rowIds: string[] = [];\n\tconst clientIds: string[] = [];\n\tconst hlcs: bigint[] = [];\n\tconst deltaIds: string[] = [];\n\n\t// User column arrays — initialise as arrays of null\n\tconst userColumns = new Map<string, (unknown | null)[]>();\n\tfor (const col of schema.columns) {\n\t\tuserColumns.set(col.name, []);\n\t}\n\n\t// Populate arrays from deltas\n\tfor (const delta of deltas) {\n\t\tops.push(delta.op);\n\t\ttables.push(delta.table);\n\t\trowIds.push(delta.rowId);\n\t\tclientIds.push(delta.clientId);\n\t\thlcs.push(delta.hlc as bigint);\n\t\tdeltaIds.push(delta.deltaId);\n\n\t\t// Build a lookup for this delta's columns\n\t\tconst deltaColMap = new Map<string, unknown>();\n\t\tfor (const colDelta of delta.columns) {\n\t\t\tdeltaColMap.set(colDelta.column, colDelta.value);\n\t\t}\n\n\t\t// Fill user columns — missing columns get null\n\t\tfor (const col of schema.columns) {\n\t\t\tconst arr = userColumns.get(col.name);\n\t\t\tif (!arr) continue;\n\n\t\t\tif (deltaColMap.has(col.name)) {\n\t\t\t\tconst value = deltaColMap.get(col.name);\n\t\t\t\tif (col.type === \"json\") {\n\t\t\t\t\tarr.push(value != null ? JSON.stringify(value) : null);\n\t\t\t\t} else {\n\t\t\t\t\tarr.push(value ?? null);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tarr.push(null);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Build the data map for arrow.tableFromArrays-style construction\n\t// We need to build vectors for each column and construct the table\n\tconst columnData: Record<string, arrow.Vector> = {};\n\n\t// System vectors\n\tcolumnData.op = arrow.vectorFromArray(ops, new arrow.Utf8());\n\tcolumnData.table = arrow.vectorFromArray(tables, new arrow.Utf8());\n\tcolumnData.rowId = arrow.vectorFromArray(rowIds, new arrow.Utf8());\n\tcolumnData.clientId = arrow.vectorFromArray(clientIds, new arrow.Utf8());\n\tcolumnData.hlc = arrow.vectorFromArray(hlcs, new arrow.Int64());\n\tcolumnData.deltaId = arrow.vectorFromArray(deltaIds, new arrow.Utf8());\n\n\t// User vectors\n\tfor (const col of schema.columns) {\n\t\tconst values = userColumns.get(col.name);\n\t\tif (!values) continue;\n\n\t\tconst arrowType = lakeSyncTypeToArrow(col.type);\n\t\tcolumnData[col.name] = arrow.vectorFromArray(values, arrowType);\n\t}\n\n\t// Build the table from the schema and vectors\n\treturn new arrow.Table(columnData);\n}\n","import {\n\ttype ColumnDelta,\n\ttype DeltaOp,\n\tErr,\n\tFlushError,\n\ttype HLCTimestamp,\n\tOk,\n\ttype Result,\n\ttype RowDelta,\n} from \"@lakesync/core\";\nimport { tableFromIPC, type Vector } from \"apache-arrow\";\nimport { readParquet } from \"parquet-wasm/esm\";\nimport { ensureWasmInitialised } from \"./wasm\";\n\n/** Set of system column names that are not user-defined columns */\nconst SYSTEM_COLUMNS = new Set([\"op\", \"table\", \"rowId\", \"clientId\", \"hlc\", \"deltaId\"]);\n\n/**\n * Metadata key used to identify columns that were originally boolean\n * but stored as Int8 for Parquet compatibility.\n */\nconst BOOL_COLUMNS_METADATA_KEY = \"lakesync:bool_columns\";\n\n/**\n * Deserialises Parquet bytes back into an array of RowDelta objects.\n *\n * Reads the Parquet data using parquet-wasm, converts to an Apache Arrow Table\n * via IPC stream, then iterates over rows to reconstruct RowDelta objects.\n * Int64 `hlc` values are cast back to branded HLCTimestamp bigints.\n * Columns stored as JSON-serialised Utf8 strings (objects and arrays) are\n * automatically parsed back to their original values.\n * Int8 columns marked as booleans in metadata are converted back to true/false.\n *\n * @param data - The Parquet file bytes to deserialise\n * @returns A Result containing the reconstructed RowDelta array, or a FlushError on failure\n */\nexport async function readParquetToDeltas(\n\tdata: Uint8Array,\n): Promise<Result<RowDelta[], FlushError>> {\n\ttry {\n\t\tensureWasmInitialised();\n\n\t\t// Read Parquet data into a WASM table\n\t\tconst wasmTable = readParquet(data);\n\n\t\t// Convert to IPC stream, then to Arrow JS Table\n\t\tconst ipcBytes = wasmTable.intoIPCStream();\n\t\tconst arrowTable = tableFromIPC(ipcBytes);\n\n\t\tconst numRows = arrowTable.numRows;\n\t\tconst deltas: RowDelta[] = [];\n\n\t\t// Get system column vectors\n\t\tconst opCol = arrowTable.getChild(\"op\");\n\t\tconst tableCol = arrowTable.getChild(\"table\");\n\t\tconst rowIdCol = arrowTable.getChild(\"rowId\");\n\t\tconst clientIdCol = arrowTable.getChild(\"clientId\");\n\t\tconst hlcCol = arrowTable.getChild(\"hlc\");\n\t\tconst deltaIdCol = arrowTable.getChild(\"deltaId\");\n\n\t\tif (!opCol || !tableCol || !rowIdCol || !clientIdCol || !hlcCol || !deltaIdCol) {\n\t\t\treturn Err(new FlushError(\"Parquet data is missing required system columns\"));\n\t\t}\n\n\t\t// Identify user columns (everything that is not a system column)\n\t\tconst userColumnNames: string[] = [];\n\t\tfor (const field of arrowTable.schema.fields) {\n\t\t\tif (!SYSTEM_COLUMNS.has(field.name)) {\n\t\t\t\tuserColumnNames.push(field.name);\n\t\t\t}\n\t\t}\n\n\t\t// Check metadata for boolean columns that were stored as Int8\n\t\tconst boolColumnsRaw = arrowTable.schema.metadata.get(BOOL_COLUMNS_METADATA_KEY);\n\t\tconst boolColumnSet = new Set<string>(\n\t\t\tboolColumnsRaw ? (JSON.parse(boolColumnsRaw) as string[]) : [],\n\t\t);\n\n\t\tconst userColumnVectors = new Map<string, Vector>();\n\t\tfor (const colName of userColumnNames) {\n\t\t\tconst vec = arrowTable.getChild(colName);\n\t\t\tif (vec) {\n\t\t\t\tuserColumnVectors.set(colName, vec);\n\t\t\t}\n\t\t}\n\n\t\t// Reconstruct RowDelta objects from each row\n\t\tfor (let i = 0; i < numRows; i++) {\n\t\t\tconst op = opCol.get(i) as DeltaOp;\n\t\t\tconst tableName = tableCol.get(i) as string;\n\t\t\tconst rowId = rowIdCol.get(i) as string;\n\t\t\tconst clientId = clientIdCol.get(i) as string;\n\t\t\tconst hlc = hlcCol.get(i) as bigint as HLCTimestamp;\n\t\t\tconst deltaId = deltaIdCol.get(i) as string;\n\n\t\t\t// Build column deltas from user columns\n\t\t\tconst columns: ColumnDelta[] = [];\n\t\t\tfor (const [colName, col] of userColumnVectors) {\n\t\t\t\tconst rawValue: unknown = col.get(i);\n\n\t\t\t\t// Skip null values — they represent missing columns for this delta\n\t\t\t\tif (rawValue === null || rawValue === undefined) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tlet value: unknown = rawValue;\n\n\t\t\t\t// Convert Int8 back to boolean if this column was originally boolean\n\t\t\t\tif (boolColumnSet.has(colName) && typeof rawValue === \"number\") {\n\t\t\t\t\tvalue = rawValue !== 0;\n\t\t\t\t}\n\t\t\t\t// Attempt to parse JSON-serialised strings back to objects/arrays.\n\t\t\t\t// JSON columns are stored as Utf8 strings via JSON.stringify during write.\n\t\t\t\t// We detect these by checking if the string starts with { or [.\n\t\t\t\telse if (typeof rawValue === \"string\") {\n\t\t\t\t\tconst trimmed = rawValue.trim();\n\t\t\t\t\tif (\n\t\t\t\t\t\t(trimmed.startsWith(\"{\") && trimmed.endsWith(\"}\")) ||\n\t\t\t\t\t\t(trimmed.startsWith(\"[\") && trimmed.endsWith(\"]\"))\n\t\t\t\t\t) {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tvalue = JSON.parse(rawValue);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\t// Not valid JSON — keep as plain string\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tcolumns.push({ column: colName, value });\n\t\t\t}\n\n\t\t\tdeltas.push({\n\t\t\t\top,\n\t\t\t\ttable: tableName,\n\t\t\t\trowId,\n\t\t\t\tclientId,\n\t\t\t\tcolumns,\n\t\t\t\thlc,\n\t\t\t\tdeltaId,\n\t\t\t});\n\t\t}\n\n\t\treturn Ok(deltas);\n\t} catch (err) {\n\t\tconst cause = err instanceof Error ? err : new Error(String(err));\n\t\treturn Err(new FlushError(`Failed to read deltas from Parquet: ${cause.message}`, cause));\n\t}\n}\n","import { readFileSync } from \"node:fs\";\nimport { createRequire } from \"node:module\";\nimport { initSync } from \"parquet-wasm/esm\";\n\nlet initialised = false;\n\n/**\n * Ensures the parquet-wasm WASM module is initialised.\n *\n * Uses `initSync` with the WASM binary loaded from disk.\n * Safe to call multiple times — subsequent calls are no-ops.\n */\nexport function ensureWasmInitialised(): void {\n\tif (initialised) return;\n\n\t// Resolve the path to the WASM binary relative to the parquet-wasm/esm entry\n\tconst require = createRequire(import.meta.url);\n\tconst esmEntryPath = require.resolve(\"parquet-wasm/esm\");\n\tconst wasmPath = esmEntryPath.replace(\"parquet_wasm.js\", \"parquet_wasm_bg.wasm\");\n\n\tconst wasmBytes = readFileSync(wasmPath);\n\tinitSync({ module: wasmBytes });\n\tinitialised = true;\n}\n","import { Err, FlushError, Ok, type Result, type RowDelta, type TableSchema } from \"@lakesync/core\";\nimport * as arrow from \"apache-arrow\";\nimport {\n\tCompression,\n\tTable as WasmTable,\n\tWriterPropertiesBuilder,\n\twriteParquet,\n} from \"parquet-wasm/esm\";\nimport { deltasToArrowTable } from \"./arrow-schema\";\nimport { ensureWasmInitialised } from \"./wasm\";\n\n/**\n * Metadata key used to store the names of columns that were originally\n * boolean but have been encoded as Int8 for Parquet compatibility.\n *\n * This works around an Arrow JS IPC serialisation issue where all-null\n * boolean columns produce a 0-byte data buffer that parquet-wasm rejects.\n */\nconst BOOL_COLUMNS_METADATA_KEY = \"lakesync:bool_columns\";\n\n/**\n * Converts Bool columns in an Arrow Table to Int8 (true=1, false=0, null=null)\n * to work around an Arrow JS IPC serialisation bug where all-null boolean\n * columns produce invalid IPC bytes for parquet-wasm.\n *\n * @param table - The Arrow Table to patch\n * @returns A tuple of [patched table, list of boolean column names]\n */\nfunction convertBoolColumnsToInt8(table: arrow.Table): [arrow.Table, string[]] {\n\tconst boolColumnNames: string[] = [];\n\tconst columns: Record<string, arrow.Vector> = {};\n\n\tfor (const field of table.schema.fields) {\n\t\tconst col = table.getChild(field.name);\n\t\tif (!col) continue;\n\n\t\tif (field.type instanceof arrow.Bool) {\n\t\t\tboolColumnNames.push(field.name);\n\t\t\t// Convert Bool -> Int8: true=1, false=0, null=null\n\t\t\tconst int8Values: (number | null)[] = [];\n\t\t\tfor (let i = 0; i < col.length; i++) {\n\t\t\t\tconst val: unknown = col.get(i);\n\t\t\t\tif (val === null || val === undefined) {\n\t\t\t\t\tint8Values.push(null);\n\t\t\t\t} else {\n\t\t\t\t\tint8Values.push(val ? 1 : 0);\n\t\t\t\t}\n\t\t\t}\n\t\t\tcolumns[field.name] = arrow.vectorFromArray(int8Values, new arrow.Int8());\n\t\t} else {\n\t\t\tcolumns[field.name] = col;\n\t\t}\n\t}\n\n\treturn [new arrow.Table(columns), boolColumnNames];\n}\n\n/**\n * Serialises an array of RowDelta objects into Parquet bytes.\n *\n * Converts deltas to an Apache Arrow Table via `deltasToArrowTable`,\n * then encodes the Arrow data as IPC stream bytes and writes them\n * to Parquet format using Snappy compression via parquet-wasm.\n *\n * Boolean columns are stored as Int8 (1/0/null) to work around an\n * Arrow JS IPC serialisation limitation. The original column types\n * are preserved in Parquet metadata for the reader to restore.\n *\n * @param deltas - The row deltas to serialise\n * @param schema - The table schema describing user-defined columns\n * @returns A Result containing the Parquet file as a Uint8Array, or a FlushError on failure\n */\nexport async function writeDeltasToParquet(\n\tdeltas: RowDelta[],\n\tschema: TableSchema,\n): Promise<Result<Uint8Array, FlushError>> {\n\ttry {\n\t\tensureWasmInitialised();\n\n\t\t// Convert deltas to Arrow Table\n\t\tconst arrowTable = deltasToArrowTable(deltas, schema);\n\n\t\t// Convert Bool columns to Int8 to avoid Arrow JS IPC boolean buffer bug\n\t\tconst [patchedTable, boolColumnNames] = convertBoolColumnsToInt8(arrowTable);\n\n\t\t// Serialise Arrow Table to IPC stream bytes\n\t\tconst ipcBytes = arrow.tableToIPC(patchedTable, \"stream\");\n\n\t\t// Create a WASM table from the IPC stream\n\t\tconst wasmTable = WasmTable.fromIPCStream(ipcBytes);\n\n\t\t// Configure writer properties with Snappy compression\n\t\t// and metadata to track original boolean columns.\n\t\t// Note: each builder method consumes the previous instance,\n\t\t// so the result must be reassigned at every step.\n\t\tlet builder = new WriterPropertiesBuilder();\n\t\tbuilder = builder.setCompression(Compression.SNAPPY);\n\n\t\tif (boolColumnNames.length > 0) {\n\t\t\tconst metadata = new Map<string, string>();\n\t\t\tmetadata.set(BOOL_COLUMNS_METADATA_KEY, JSON.stringify(boolColumnNames));\n\t\t\tbuilder = builder.setKeyValueMetadata(metadata);\n\t\t}\n\n\t\tconst writerProperties = builder.build();\n\n\t\t// Write to Parquet format\n\t\tconst parquetBytes = writeParquet(wasmTable, writerProperties);\n\n\t\treturn Ok(parquetBytes);\n\t} catch (err) {\n\t\tconst cause = err instanceof Error ? err : new Error(String(err));\n\t\treturn Err(new FlushError(`Failed to write deltas to Parquet: ${cause.message}`, cause));\n\t}\n}\n"],"mappings":";;;;;;;AACA,YAAY,WAAW;AAavB,IAAM,iBAA2D;AAAA,EAChE,QAAQ,MAAM,IAAU,WAAK;AAAA,EAC7B,QAAQ,MAAM,IAAU,cAAQ;AAAA,EAChC,SAAS,MAAM,IAAU,WAAK;AAAA,EAC9B,MAAM,MAAM,IAAU,WAAK;AAAA,EAC3B,MAAM,MAAM,IAAU,WAAK;AAC5B;AAEA,SAAS,oBAAoB,SAAqC;AACjE,SAAO,eAAe,OAAO,EAAE;AAChC;AAwBO,SAAS,iBAAiB,QAAmC;AACnE,QAAM,eAA8B;AAAA,IACnC,IAAU,YAAM,MAAM,IAAU,WAAK,GAAG,KAAK;AAAA,IAC7C,IAAU,YAAM,SAAS,IAAU,WAAK,GAAG,KAAK;AAAA,IAChD,IAAU,YAAM,SAAS,IAAU,WAAK,GAAG,KAAK;AAAA,IAChD,IAAU,YAAM,YAAY,IAAU,WAAK,GAAG,KAAK;AAAA,IACnD,IAAU,YAAM,OAAO,IAAU,YAAM,GAAG,KAAK;AAAA,IAC/C,IAAU,YAAM,WAAW,IAAU,WAAK,GAAG,KAAK;AAAA,EACnD;AAEA,QAAM,aAA4B,OAAO,QAAQ,IAAI,CAAC,QAAQ;AAC7D,UAAM,WACL,IAAI,SAAS,UACb,IAAI,SAAS,UACb,IAAI,SAAS,aACb,IAAI,SAAS,YACb,IAAI,SAAS;AACd,WAAO,IAAU,YAAM,IAAI,MAAM,oBAAoB,IAAI,IAAI,GAAG,QAAQ;AAAA,EACzE,CAAC;AAED,SAAO,IAAU,aAAO,CAAC,GAAG,cAAc,GAAG,UAAU,CAAC;AACzD;AAkBO,SAAS,mBAAmB,QAAoB,QAAkC;AACxF,QAAM,cAAc,iBAAiB,MAAM;AAE3C,MAAI,OAAO,WAAW,GAAG;AACxB,WAAO,IAAU,YAAM,WAAW;AAAA,EACnC;AAGA,QAAM,gBAAgB,oBAAI,IAAwB;AAClD,aAAW,OAAO,OAAO,SAAS;AACjC,kBAAc,IAAI,IAAI,MAAM,IAAI,IAAI;AAAA,EACrC;AAGA,QAAM,MAAgB,CAAC;AACvB,QAAM,SAAmB,CAAC;AAC1B,QAAM,SAAmB,CAAC;AAC1B,QAAM,YAAsB,CAAC;AAC7B,QAAM,OAAiB,CAAC;AACxB,QAAM,WAAqB,CAAC;AAG5B,QAAM,cAAc,oBAAI,IAAgC;AACxD,aAAW,OAAO,OAAO,SAAS;AACjC,gBAAY,IAAI,IAAI,MAAM,CAAC,CAAC;AAAA,EAC7B;AAGA,aAAW,SAAS,QAAQ;AAC3B,QAAI,KAAK,MAAM,EAAE;AACjB,WAAO,KAAK,MAAM,KAAK;AACvB,WAAO,KAAK,MAAM,KAAK;AACvB,cAAU,KAAK,MAAM,QAAQ;AAC7B,SAAK,KAAK,MAAM,GAAa;AAC7B,aAAS,KAAK,MAAM,OAAO;AAG3B,UAAM,cAAc,oBAAI,IAAqB;AAC7C,eAAW,YAAY,MAAM,SAAS;AACrC,kBAAY,IAAI,SAAS,QAAQ,SAAS,KAAK;AAAA,IAChD;AAGA,eAAW,OAAO,OAAO,SAAS;AACjC,YAAM,MAAM,YAAY,IAAI,IAAI,IAAI;AACpC,UAAI,CAAC,IAAK;AAEV,UAAI,YAAY,IAAI,IAAI,IAAI,GAAG;AAC9B,cAAM,QAAQ,YAAY,IAAI,IAAI,IAAI;AACtC,YAAI,IAAI,SAAS,QAAQ;AACxB,cAAI,KAAK,SAAS,OAAO,KAAK,UAAU,KAAK,IAAI,IAAI;AAAA,QACtD,OAAO;AACN,cAAI,KAAK,SAAS,IAAI;AAAA,QACvB;AAAA,MACD,OAAO;AACN,YAAI,KAAK,IAAI;AAAA,MACd;AAAA,IACD;AAAA,EACD;AAIA,QAAM,aAA2C,CAAC;AAGlD,aAAW,KAAW,sBAAgB,KAAK,IAAU,WAAK,CAAC;AAC3D,aAAW,QAAc,sBAAgB,QAAQ,IAAU,WAAK,CAAC;AACjE,aAAW,QAAc,sBAAgB,QAAQ,IAAU,WAAK,CAAC;AACjE,aAAW,WAAiB,sBAAgB,WAAW,IAAU,WAAK,CAAC;AACvE,aAAW,MAAY,sBAAgB,MAAM,IAAU,YAAM,CAAC;AAC9D,aAAW,UAAgB,sBAAgB,UAAU,IAAU,WAAK,CAAC;AAGrE,aAAW,OAAO,OAAO,SAAS;AACjC,UAAM,SAAS,YAAY,IAAI,IAAI,IAAI;AACvC,QAAI,CAAC,OAAQ;AAEb,UAAM,YAAY,oBAAoB,IAAI,IAAI;AAC9C,eAAW,IAAI,IAAI,IAAU,sBAAgB,QAAQ,SAAS;AAAA,EAC/D;AAGA,SAAO,IAAU,YAAM,UAAU;AAClC;;;AChKA,SAAS,oBAAiC;AAC1C,SAAS,mBAAmB;;;ACX5B,SAAS,oBAAoB;AAC7B,SAAS,qBAAqB;AAC9B,SAAS,gBAAgB;AAEzB,IAAI,cAAc;AAQX,SAAS,wBAA8B;AAC7C,MAAI,YAAa;AAGjB,QAAMA,WAAU,cAAc,YAAY,GAAG;AAC7C,QAAM,eAAeA,SAAQ,QAAQ,kBAAkB;AACvD,QAAM,WAAW,aAAa,QAAQ,mBAAmB,sBAAsB;AAE/E,QAAM,YAAY,aAAa,QAAQ;AACvC,WAAS,EAAE,QAAQ,UAAU,CAAC;AAC9B,gBAAc;AACf;;;ADRA,IAAM,iBAAiB,oBAAI,IAAI,CAAC,MAAM,SAAS,SAAS,YAAY,OAAO,SAAS,CAAC;AAMrF,IAAM,4BAA4B;AAelC,eAAsB,oBACrB,MAC0C;AAC1C,MAAI;AACH,0BAAsB;AAGtB,UAAM,YAAY,YAAY,IAAI;AAGlC,UAAM,WAAW,UAAU,cAAc;AACzC,UAAM,aAAa,aAAa,QAAQ;AAExC,UAAM,UAAU,WAAW;AAC3B,UAAM,SAAqB,CAAC;AAG5B,UAAM,QAAQ,WAAW,SAAS,IAAI;AACtC,UAAM,WAAW,WAAW,SAAS,OAAO;AAC5C,UAAM,WAAW,WAAW,SAAS,OAAO;AAC5C,UAAM,cAAc,WAAW,SAAS,UAAU;AAClD,UAAM,SAAS,WAAW,SAAS,KAAK;AACxC,UAAM,aAAa,WAAW,SAAS,SAAS;AAEhD,QAAI,CAAC,SAAS,CAAC,YAAY,CAAC,YAAY,CAAC,eAAe,CAAC,UAAU,CAAC,YAAY;AAC/E,aAAO,IAAI,IAAI,WAAW,iDAAiD,CAAC;AAAA,IAC7E;AAGA,UAAM,kBAA4B,CAAC;AACnC,eAAW,SAAS,WAAW,OAAO,QAAQ;AAC7C,UAAI,CAAC,eAAe,IAAI,MAAM,IAAI,GAAG;AACpC,wBAAgB,KAAK,MAAM,IAAI;AAAA,MAChC;AAAA,IACD;AAGA,UAAM,iBAAiB,WAAW,OAAO,SAAS,IAAI,yBAAyB;AAC/E,UAAM,gBAAgB,IAAI;AAAA,MACzB,iBAAkB,KAAK,MAAM,cAAc,IAAiB,CAAC;AAAA,IAC9D;AAEA,UAAM,oBAAoB,oBAAI,IAAoB;AAClD,eAAW,WAAW,iBAAiB;AACtC,YAAM,MAAM,WAAW,SAAS,OAAO;AACvC,UAAI,KAAK;AACR,0BAAkB,IAAI,SAAS,GAAG;AAAA,MACnC;AAAA,IACD;AAGA,aAAS,IAAI,GAAG,IAAI,SAAS,KAAK;AACjC,YAAM,KAAK,MAAM,IAAI,CAAC;AACtB,YAAM,YAAY,SAAS,IAAI,CAAC;AAChC,YAAM,QAAQ,SAAS,IAAI,CAAC;AAC5B,YAAM,WAAW,YAAY,IAAI,CAAC;AAClC,YAAM,MAAM,OAAO,IAAI,CAAC;AACxB,YAAM,UAAU,WAAW,IAAI,CAAC;AAGhC,YAAM,UAAyB,CAAC;AAChC,iBAAW,CAAC,SAAS,GAAG,KAAK,mBAAmB;AAC/C,cAAM,WAAoB,IAAI,IAAI,CAAC;AAGnC,YAAI,aAAa,QAAQ,aAAa,QAAW;AAChD;AAAA,QACD;AAEA,YAAI,QAAiB;AAGrB,YAAI,cAAc,IAAI,OAAO,KAAK,OAAO,aAAa,UAAU;AAC/D,kBAAQ,aAAa;AAAA,QACtB,WAIS,OAAO,aAAa,UAAU;AACtC,gBAAM,UAAU,SAAS,KAAK;AAC9B,cACE,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,KAC/C,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,GAC/C;AACD,gBAAI;AACH,sBAAQ,KAAK,MAAM,QAAQ;AAAA,YAC5B,QAAQ;AAAA,YAER;AAAA,UACD;AAAA,QACD;AAEA,gBAAQ,KAAK,EAAE,QAAQ,SAAS,MAAM,CAAC;AAAA,MACxC;AAEA,aAAO,KAAK;AAAA,QACX;AAAA,QACA,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD,CAAC;AAAA,IACF;AAEA,WAAO,GAAG,MAAM;AAAA,EACjB,SAAS,KAAK;AACb,UAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAChE,WAAO,IAAI,IAAI,WAAW,uCAAuC,MAAM,OAAO,IAAI,KAAK,CAAC;AAAA,EACzF;AACD;;;AElJA,YAAYC,YAAW;AACvB;AAAA,EACC;AAAA,EACA,SAAS;AAAA,EACT;AAAA,EACA;AAAA,OACM;AAWP,IAAMC,6BAA4B;AAUlC,SAAS,yBAAyB,OAA6C;AAC9E,QAAM,kBAA4B,CAAC;AACnC,QAAM,UAAwC,CAAC;AAE/C,aAAW,SAAS,MAAM,OAAO,QAAQ;AACxC,UAAM,MAAM,MAAM,SAAS,MAAM,IAAI;AACrC,QAAI,CAAC,IAAK;AAEV,QAAI,MAAM,gBAAsB,aAAM;AACrC,sBAAgB,KAAK,MAAM,IAAI;AAE/B,YAAM,aAAgC,CAAC;AACvC,eAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACpC,cAAM,MAAe,IAAI,IAAI,CAAC;AAC9B,YAAI,QAAQ,QAAQ,QAAQ,QAAW;AACtC,qBAAW,KAAK,IAAI;AAAA,QACrB,OAAO;AACN,qBAAW,KAAK,MAAM,IAAI,CAAC;AAAA,QAC5B;AAAA,MACD;AACA,cAAQ,MAAM,IAAI,IAAU,uBAAgB,YAAY,IAAU,YAAK,CAAC;AAAA,IACzE,OAAO;AACN,cAAQ,MAAM,IAAI,IAAI;AAAA,IACvB;AAAA,EACD;AAEA,SAAO,CAAC,IAAU,aAAM,OAAO,GAAG,eAAe;AAClD;AAiBA,eAAsB,qBACrB,QACA,QAC0C;AAC1C,MAAI;AACH,0BAAsB;AAGtB,UAAM,aAAa,mBAAmB,QAAQ,MAAM;AAGpD,UAAM,CAAC,cAAc,eAAe,IAAI,yBAAyB,UAAU;AAG3E,UAAM,WAAiB,kBAAW,cAAc,QAAQ;AAGxD,UAAM,YAAY,UAAU,cAAc,QAAQ;AAMlD,QAAI,UAAU,IAAI,wBAAwB;AAC1C,cAAU,QAAQ,eAAe,YAAY,MAAM;AAEnD,QAAI,gBAAgB,SAAS,GAAG;AAC/B,YAAM,WAAW,oBAAI,IAAoB;AACzC,eAAS,IAAIA,4BAA2B,KAAK,UAAU,eAAe,CAAC;AACvE,gBAAU,QAAQ,oBAAoB,QAAQ;AAAA,IAC/C;AAEA,UAAM,mBAAmB,QAAQ,MAAM;AAGvC,UAAM,eAAe,aAAa,WAAW,gBAAgB;AAE7D,WAAO,GAAG,YAAY;AAAA,EACvB,SAAS,KAAK;AACb,UAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAChE,WAAO,IAAI,IAAI,WAAW,sCAAsC,MAAM,OAAO,IAAI,KAAK,CAAC;AAAA,EACxF;AACD;","names":["require","arrow","BOOL_COLUMNS_METADATA_KEY"]}
|