@housekit/orm 0.1.47 → 0.1.49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +120 -5
  2. package/dist/builders/delete.js +112 -0
  3. package/dist/builders/insert.d.ts +0 -91
  4. package/dist/builders/insert.js +393 -0
  5. package/dist/builders/prepared.d.ts +1 -2
  6. package/dist/builders/prepared.js +30 -0
  7. package/dist/builders/select.d.ts +0 -161
  8. package/dist/builders/select.js +562 -0
  9. package/dist/builders/select.types.js +1 -0
  10. package/dist/builders/update.js +136 -0
  11. package/dist/client.d.ts +0 -6
  12. package/dist/client.js +140 -0
  13. package/dist/codegen/zod.js +107 -0
  14. package/dist/column.d.ts +1 -25
  15. package/dist/column.js +133 -0
  16. package/dist/compiler.d.ts +0 -7
  17. package/dist/compiler.js +513 -0
  18. package/dist/core.js +6 -0
  19. package/dist/data-types.d.ts +0 -61
  20. package/dist/data-types.js +127 -0
  21. package/dist/dictionary.d.ts +0 -149
  22. package/dist/dictionary.js +158 -0
  23. package/dist/engines.d.ts +0 -385
  24. package/dist/engines.js +292 -0
  25. package/dist/expressions.d.ts +0 -10
  26. package/dist/expressions.js +268 -0
  27. package/dist/external.d.ts +0 -112
  28. package/dist/external.js +224 -0
  29. package/dist/index.d.ts +0 -51
  30. package/dist/index.js +139 -6853
  31. package/dist/logger.js +36 -0
  32. package/dist/materialized-views.d.ts +0 -188
  33. package/dist/materialized-views.js +380 -0
  34. package/dist/metadata.js +59 -0
  35. package/dist/modules/aggregates.d.ts +0 -164
  36. package/dist/modules/aggregates.js +121 -0
  37. package/dist/modules/array.d.ts +0 -98
  38. package/dist/modules/array.js +71 -0
  39. package/dist/modules/conditional.d.ts +0 -84
  40. package/dist/modules/conditional.js +138 -0
  41. package/dist/modules/conversion.d.ts +0 -147
  42. package/dist/modules/conversion.js +109 -0
  43. package/dist/modules/geo.d.ts +0 -164
  44. package/dist/modules/geo.js +112 -0
  45. package/dist/modules/hash.js +4 -0
  46. package/dist/modules/index.js +12 -0
  47. package/dist/modules/json.d.ts +0 -106
  48. package/dist/modules/json.js +76 -0
  49. package/dist/modules/math.d.ts +0 -16
  50. package/dist/modules/math.js +16 -0
  51. package/dist/modules/string.d.ts +0 -136
  52. package/dist/modules/string.js +89 -0
  53. package/dist/modules/time.d.ts +0 -123
  54. package/dist/modules/time.js +91 -0
  55. package/dist/modules/types.d.ts +0 -133
  56. package/dist/modules/types.js +114 -0
  57. package/dist/modules/window.js +140 -0
  58. package/dist/relational.d.ts +0 -82
  59. package/dist/relational.js +290 -0
  60. package/dist/relations.js +21 -0
  61. package/dist/schema-builder.d.ts +0 -90
  62. package/dist/schema-builder.js +140 -0
  63. package/dist/table.d.ts +0 -42
  64. package/dist/table.js +406 -0
  65. package/dist/utils/background-batcher.js +75 -0
  66. package/dist/utils/batch-transform.js +51 -0
  67. package/dist/utils/binary-reader.d.ts +0 -6
  68. package/dist/utils/binary-reader.js +334 -0
  69. package/dist/utils/binary-serializer.d.ts +0 -125
  70. package/dist/utils/binary-serializer.js +637 -0
  71. package/dist/utils/binary-worker-code.js +1 -0
  72. package/dist/utils/binary-worker-pool.d.ts +0 -34
  73. package/dist/utils/binary-worker-pool.js +206 -0
  74. package/dist/utils/binary-worker.d.ts +0 -11
  75. package/dist/utils/binary-worker.js +63 -0
  76. package/dist/utils/insert-processing.d.ts +0 -2
  77. package/dist/utils/insert-processing.js +163 -0
  78. package/dist/utils/lru-cache.js +30 -0
  79. package/package.json +68 -3
@@ -0,0 +1,75 @@
1
+ class BackgroundBatcher {
2
+ client;
3
+ queues = new Map();
4
+ timers = new Map();
5
+ tables = new Map();
6
+ constructor(client) {
7
+ this.client = client;
8
+ }
9
+ async add(table, row, config) {
10
+ const tableName = table.$table;
11
+ if (!this.queues.has(tableName)) {
12
+ this.queues.set(tableName, []);
13
+ this.tables.set(tableName, table);
14
+ }
15
+ const queue = this.queues.get(tableName);
16
+ queue.push(row);
17
+ if (queue.length >= config.maxRows) {
18
+ await this.flush(tableName);
19
+ return;
20
+ }
21
+ if (queue.length === 1) {
22
+ const timer = setTimeout(() => {
23
+ this.flush(tableName);
24
+ }, config.flushIntervalMs);
25
+ if (typeof timer.unref === 'function') {
26
+ timer.unref();
27
+ }
28
+ this.timers.set(tableName, timer);
29
+ }
30
+ }
31
+ async flush(tableName) {
32
+ const queue = this.queues.get(tableName);
33
+ const table = this.tables.get(tableName);
34
+ if (!queue || queue.length === 0 || !table)
35
+ return;
36
+ const timer = this.timers.get(tableName);
37
+ if (timer)
38
+ clearTimeout(timer);
39
+ this.timers.delete(tableName);
40
+ const dataToInsert = [...queue];
41
+ this.queues.set(tableName, []);
42
+ try {
43
+ const { Readable } = await import('stream');
44
+ const stream = Readable.from(dataToInsert, { objectMode: true });
45
+ await this.client.insert({
46
+ table: tableName,
47
+ values: stream,
48
+ format: 'JSONEachRow',
49
+ clickhouse_settings: {
50
+ async_insert: 1,
51
+ wait_for_async_insert: 0,
52
+ }
53
+ });
54
+ }
55
+ catch (err) {
56
+ console.error(`[housekit] Background flush failed for ${tableName}:`, err);
57
+ }
58
+ }
59
+ async flushAll() {
60
+ const promises = [];
61
+ for (const tableName of this.queues.keys()) {
62
+ promises.push(this.flush(tableName));
63
+ }
64
+ await Promise.all(promises);
65
+ }
66
+ }
67
+ const batchers = new WeakMap();
68
+ export const globalBatcher = (client) => {
69
+ let batcher = batchers.get(client);
70
+ if (!batcher) {
71
+ batcher = new BackgroundBatcher(client);
72
+ batchers.set(client, batcher);
73
+ }
74
+ return batcher;
75
+ };
@@ -0,0 +1,51 @@
1
+ import { Transform } from 'stream';
2
+ import { processRowWithPlan } from './insert-processing';
3
+ export class BatchTransformStream extends Transform {
4
+ plan;
5
+ mode;
6
+ batch = [];
7
+ batchSize;
8
+ constructor(plan, mode, options = {}) {
9
+ super({ objectMode: true });
10
+ this.plan = plan;
11
+ this.mode = mode;
12
+ this.batchSize = options.batchSize || 100;
13
+ }
14
+ _transform(chunk, _encoding, callback) {
15
+ this.batch.push(chunk);
16
+ if (this.batch.length >= this.batchSize) {
17
+ this.processBatch(callback);
18
+ }
19
+ else {
20
+ this.scheduleProcessing(callback);
21
+ }
22
+ }
23
+ _flush(callback) {
24
+ if (this.batch.length > 0) {
25
+ this.processBatch(callback);
26
+ }
27
+ else {
28
+ callback();
29
+ }
30
+ }
31
+ scheduleProcessing(callback) {
32
+ callback();
33
+ }
34
+ processBatch(callback) {
35
+ const batchToProcess = this.batch;
36
+ this.batch = [];
37
+ try {
38
+ for (const row of batchToProcess) {
39
+ const processed = processRowWithPlan(row, this.plan, this.mode);
40
+ this.push(processed);
41
+ }
42
+ callback();
43
+ }
44
+ catch (error) {
45
+ callback(error);
46
+ }
47
+ }
48
+ }
49
+ export function createBatchTransformStream(plan, mode, options) {
50
+ return new BatchTransformStream(plan, mode, options);
51
+ }
@@ -1,9 +1,3 @@
1
- /**
2
- * HouseKit Binary Reader - Ultra-Fast RowBinary Decoding
3
- *
4
- * optimized for reading ClickHouse RowBinary format directly from buffers.
5
- * This is 10-20x faster than JSON.parse() for large datasets.
6
- */
7
1
  export declare class BinaryReader {
8
2
  private buffer;
9
3
  private offset;
@@ -0,0 +1,334 @@
1
+ export class BinaryReader {
2
+ buffer;
3
+ offset = 0;
4
+ view;
5
+ constructor(buffer) {
6
+ this.buffer = buffer;
7
+ this.view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
8
+ }
9
+ reset(buffer) {
10
+ this.buffer = buffer;
11
+ this.offset = 0;
12
+ this.view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
13
+ }
14
+ getOffset() {
15
+ return this.offset;
16
+ }
17
+ isEOF() {
18
+ return this.offset >= this.buffer.length;
19
+ }
20
+ readInt8() {
21
+ const val = this.view.getInt8(this.offset);
22
+ this.offset += 1;
23
+ return val;
24
+ }
25
+ readUInt8() {
26
+ const val = this.view.getUint8(this.offset);
27
+ this.offset += 1;
28
+ return val;
29
+ }
30
+ readInt16() {
31
+ const val = this.view.getInt16(this.offset, true);
32
+ this.offset += 2;
33
+ return val;
34
+ }
35
+ readUInt16() {
36
+ const val = this.view.getUint16(this.offset, true);
37
+ this.offset += 2;
38
+ return val;
39
+ }
40
+ readInt32() {
41
+ const val = this.view.getInt32(this.offset, true);
42
+ this.offset += 4;
43
+ return val;
44
+ }
45
+ readUInt32() {
46
+ const val = this.view.getUint32(this.offset, true);
47
+ this.offset += 4;
48
+ return val;
49
+ }
50
+ readInt64() {
51
+ const val = this.view.getBigInt64(this.offset, true);
52
+ this.offset += 8;
53
+ return val;
54
+ }
55
+ readUInt64() {
56
+ const val = this.view.getBigUint64(this.offset, true);
57
+ this.offset += 8;
58
+ return val;
59
+ }
60
+ readInt128() {
61
+ const low = this.view.getBigUint64(this.offset, true);
62
+ const high = this.view.getBigInt64(this.offset + 8, true);
63
+ this.offset += 16;
64
+ return (high << 64n) | low;
65
+ }
66
+ readUInt128() {
67
+ const low = this.view.getBigUint64(this.offset, true);
68
+ const high = this.view.getBigUint64(this.offset + 8, true);
69
+ this.offset += 16;
70
+ return (high << 64n) | low;
71
+ }
72
+ readInt256() {
73
+ let val = 0n;
74
+ for (let i = 0; i < 4; i++) {
75
+ const word = this.view.getBigUint64(this.offset + i * 8, true);
76
+ val |= word << (BigInt(i) * 64n);
77
+ }
78
+ this.offset += 32;
79
+ return val;
80
+ }
81
+ readUInt256() {
82
+ return this.readInt256();
83
+ }
84
+ readFloat32() {
85
+ const val = this.view.getFloat32(this.offset, true);
86
+ this.offset += 4;
87
+ return val;
88
+ }
89
+ readFloat64() {
90
+ const val = this.view.getFloat64(this.offset, true);
91
+ this.offset += 8;
92
+ return val;
93
+ }
94
+ readVarInt() {
95
+ let result = 0;
96
+ let shift = 0;
97
+ while (true) {
98
+ const byte = this.buffer[this.offset++];
99
+ result |= (byte & 0x7f) << shift;
100
+ if ((byte & 0x80) === 0)
101
+ break;
102
+ shift += 7;
103
+ }
104
+ return result;
105
+ }
106
+ readString() {
107
+ const len = this.readVarInt();
108
+ if (len === 0)
109
+ return '';
110
+ const str = this.buffer.toString('utf-8', this.offset, this.offset + len);
111
+ this.offset += len;
112
+ return str;
113
+ }
114
+ readFixedString(length) {
115
+ const str = this.buffer.toString('utf-8', this.offset, this.offset + length);
116
+ this.offset += length;
117
+ return str.replace(/\u0000+$/, '');
118
+ }
119
+ readUUID() {
120
+ const bytes = this.buffer.subarray(this.offset, this.offset + 16);
121
+ this.offset += 16;
122
+ const hex = Buffer.allocUnsafe(16);
123
+ for (let i = 0; i < 8; i++) {
124
+ hex[i] = bytes[7 - i];
125
+ }
126
+ for (let i = 0; i < 8; i++) {
127
+ hex[8 + i] = bytes[15 - (i)];
128
+ }
129
+ const s = hex.toString('hex');
130
+ return `${s.slice(0, 8)}-${s.slice(8, 12)}-${s.slice(12, 16)}-${s.slice(16, 20)}-${s.slice(20)}`;
131
+ }
132
+ readDate() {
133
+ const days = this.readUInt16();
134
+ const date = new Date(days * 24 * 60 * 60 * 1000);
135
+ return date.toISOString().split('T')[0];
136
+ }
137
+ readDate32() {
138
+ const days = this.readInt32();
139
+ const date = new Date(days * 24 * 60 * 60 * 1000);
140
+ return date.toISOString().split('T')[0];
141
+ }
142
+ readDateTime() {
143
+ const seconds = this.readUInt32();
144
+ return new Date(seconds * 1000);
145
+ }
146
+ readDateTime64(precision = 3) {
147
+ const ticks = this.readInt64();
148
+ const divisor = BigInt(Math.pow(10, precision));
149
+ let ms;
150
+ if (precision === 3) {
151
+ ms = ticks;
152
+ }
153
+ else if (precision > 3) {
154
+ ms = ticks / BigInt(Math.pow(10, precision - 3));
155
+ }
156
+ else {
157
+ ms = ticks * BigInt(Math.pow(10, 3 - precision));
158
+ }
159
+ return new Date(Number(ms));
160
+ }
161
+ readNullable(reader) {
162
+ const isNull = this.readUInt8();
163
+ if (isNull === 1)
164
+ return null;
165
+ return reader();
166
+ }
167
+ readArray(itemReader) {
168
+ const lenBig = this.readUInt64();
169
+ const length = Number(lenBig);
170
+ const res = new Array(length);
171
+ for (let i = 0; i < length; i++) {
172
+ res[i] = itemReader();
173
+ }
174
+ return res;
175
+ }
176
+ readMap(keyReader, valueReader) {
177
+ const lenBig = this.readUInt64();
178
+ const length = Number(lenBig);
179
+ const res = {};
180
+ for (let i = 0; i < length; i++) {
181
+ const key = keyReader();
182
+ const value = valueReader();
183
+ res[key] = value;
184
+ }
185
+ return res;
186
+ }
187
+ readDecimal32(scale) {
188
+ const val = this.readInt32();
189
+ return val / Math.pow(10, scale);
190
+ }
191
+ readDecimal64(scale) {
192
+ const val = this.readInt64();
193
+ return Number(val) / Math.pow(10, scale);
194
+ }
195
+ readDecimal128(scale) {
196
+ const val = this.readInt128();
197
+ return Number(val) / Math.pow(10, scale);
198
+ }
199
+ readBool() {
200
+ return this.readUInt8() === 1;
201
+ }
202
+ readIPv4() {
203
+ const val = this.readUInt32();
204
+ return [
205
+ (val) & 0xFF,
206
+ (val >> 8) & 0xFF,
207
+ (val >> 16) & 0xFF,
208
+ (val >> 24) & 0xFF
209
+ ].join('.');
210
+ }
211
+ readIPv6() {
212
+ const buffer = this.buffer.subarray(this.offset, this.offset + 16);
213
+ this.offset += 16;
214
+ const parts = [];
215
+ for (let i = 0; i < 16; i += 2) {
216
+ const group = buffer.readUInt16BE(i).toString(16);
217
+ parts.push(group);
218
+ }
219
+ return parts.join(':').replace(/(^|:)0(:0)*:0(:|$)/, '::');
220
+ }
221
+ }
222
+ export function createBinaryDecoder(type) {
223
+ const t = type.toLowerCase().trim();
224
+ if (t.startsWith('nullable(')) {
225
+ const inner = t.match(/^nullable\((.+)\)$/)[1];
226
+ const innerDecoder = createBinaryDecoder(inner);
227
+ return (r) => r.readNullable(() => innerDecoder(r));
228
+ }
229
+ if (t.startsWith('array(')) {
230
+ const inner = t.match(/^array\((.+)\)$/)[1];
231
+ const innerDecoder = createBinaryDecoder(inner);
232
+ return (r) => r.readArray(() => innerDecoder(r));
233
+ }
234
+ if (t.startsWith('map(')) {
235
+ const inner = t.match(/^map\((.+)\)$/)[1];
236
+ const [keyType, valueType] = parseGenericTypes(inner);
237
+ const keyDecoder = createBinaryDecoder(keyType);
238
+ const valueDecoder = createBinaryDecoder(valueType);
239
+ return (r) => r.readMap(() => keyDecoder(r), () => valueDecoder(r));
240
+ }
241
+ if (t.startsWith('tuple(')) {
242
+ const inner = t.match(/^tuple\((.+)\)$/)[1];
243
+ const types = parseGenericTypes(inner);
244
+ const decoders = types.map(type => createBinaryDecoder(type));
245
+ return (r) => decoders.map(d => d(r));
246
+ }
247
+ if (t.startsWith('nested(')) {
248
+ const inner = t.match(/^nested\((.+)\)$/)[1];
249
+ const fields = parseGenericTypes(inner);
250
+ const types = fields.map(f => {
251
+ const parts = f.trim().split(/\s+/);
252
+ return parts.length < 2 ? 'String' : parts.slice(1).join(' ');
253
+ });
254
+ const tupleDecoders = types.map(type => createBinaryDecoder(type));
255
+ return (r) => r.readArray(() => tupleDecoders.map(d => d(r)));
256
+ }
257
+ if (t.startsWith('lowcardinality(')) {
258
+ const inner = t.match(/^lowcardinality\((.+)\)$/)[1];
259
+ return createBinaryDecoder(inner);
260
+ }
261
+ if (t.startsWith('fixedstring(')) {
262
+ const len = parseInt(t.match(/\d+/)[0], 10);
263
+ return (r) => r.readFixedString(len);
264
+ }
265
+ if (t.startsWith('decimal')) {
266
+ const match = t.match(/^decimal(?:32|64|128)?\((\d+),\s*(\d+)\)$/);
267
+ if (match) {
268
+ const scale = parseInt(match[2], 10);
269
+ if (t.includes('decimal128'))
270
+ return (r) => r.readDecimal128(scale);
271
+ if (t.includes('decimal64'))
272
+ return (r) => r.readDecimal64(scale);
273
+ return (r) => r.readDecimal32(scale);
274
+ }
275
+ }
276
+ if (t.startsWith('datetime64')) {
277
+ const match = t.match(/^datetime64\((\d+)/);
278
+ const precision = match ? parseInt(match[1], 10) : 3;
279
+ return (r) => r.readDateTime64(precision);
280
+ }
281
+ if (t.startsWith('enum')) {
282
+ if (t.startsWith('enum8'))
283
+ return (r) => r.readInt8();
284
+ return (r) => r.readInt16();
285
+ }
286
+ switch (t) {
287
+ case 'uint8': return (r) => r.readUInt8();
288
+ case 'int8': return (r) => r.readInt8();
289
+ case 'uint16': return (r) => r.readUInt16();
290
+ case 'int16': return (r) => r.readInt16();
291
+ case 'uint32': return (r) => r.readUInt32();
292
+ case 'int32': return (r) => r.readInt32();
293
+ case 'uint64': return (r) => r.readUInt64();
294
+ case 'int64': return (r) => r.readInt64();
295
+ case 'uint128': return (r) => r.readUInt128();
296
+ case 'int128': return (r) => r.readInt128();
297
+ case 'uint256': return (r) => r.readUInt256();
298
+ case 'int256': return (r) => r.readInt256();
299
+ case 'float32': return (r) => r.readFloat32();
300
+ case 'float64': return (r) => r.readFloat64();
301
+ case 'string': return (r) => r.readString();
302
+ case 'uuid': return (r) => r.readUUID();
303
+ case 'bool': return (r) => r.readBool();
304
+ case 'date': return (r) => r.readDate();
305
+ case 'date32': return (r) => r.readDate32();
306
+ case 'datetime': return (r) => r.readDateTime();
307
+ case 'ipv4': return (r) => r.readIPv4();
308
+ case 'ipv6': return (r) => r.readIPv6();
309
+ default: return (r) => r.readString();
310
+ }
311
+ }
312
+ function parseGenericTypes(typeString) {
313
+ const args = [];
314
+ let current = '';
315
+ let parenDepth = 0;
316
+ for (let i = 0; i < typeString.length; i++) {
317
+ const char = typeString[i];
318
+ if (char === ',' && parenDepth === 0) {
319
+ args.push(current.trim());
320
+ current = '';
321
+ }
322
+ else {
323
+ if (char === '(')
324
+ parenDepth++;
325
+ if (char === ')')
326
+ parenDepth--;
327
+ current += char;
328
+ }
329
+ }
330
+ if (current.trim()) {
331
+ args.push(current.trim());
332
+ }
333
+ return args;
334
+ }
@@ -1,47 +1,12 @@
1
- /**
2
- * Acquire a BinaryWriter from the pool (or create new if pool empty)
3
- */
4
1
  export declare function acquireWriter(): BinaryWriter;
5
- /**
6
- * Release a BinaryWriter back to the pool
7
- */
8
2
  export declare function releaseWriter(writer: BinaryWriter): void;
9
- /**
10
- * HouseKit Binary Serializer - Ultra-Fast RowBinary Encoding
11
- *
12
- * ClickHouse's RowBinary format sends data directly as bytes with no parsing overhead.
13
- * This is the fastest possible way to insert data into ClickHouse.
14
- *
15
- * Benefits over JSONEachRow:
16
- * - No JSON.stringify() overhead
17
- * - No string escaping
18
- * - No parsing on ClickHouse side
19
- * - Smaller payload (Int64 = 8 bytes vs up to 20 bytes as string)
20
- * - Lower GC pressure (no intermediate strings)
21
- */
22
- /**
23
- * Efficient binary buffer writer that minimizes allocations.
24
- * Uses a pre-allocated buffer that grows as needed.
25
- */
26
3
  export declare class BinaryWriter {
27
4
  private buffer;
28
5
  private offset;
29
6
  constructor(initialSize?: number);
30
- /**
31
- * Ensure buffer has enough space for n more bytes
32
- */
33
7
  private ensureCapacity;
34
- /**
35
- * Reset the writer for reuse (avoids allocating new buffer)
36
- */
37
8
  reset(): void;
38
- /**
39
- * Get the final buffer with only written bytes
40
- */
41
9
  getBuffer(): Buffer;
42
- /**
43
- * Get a copy of the buffer (safe to use after reset)
44
- */
45
10
  toBuffer(): Buffer;
46
11
  writeInt8(value: number): void;
47
12
  writeUInt8(value: number): void;
@@ -57,124 +22,48 @@ export declare class BinaryWriter {
57
22
  writeUInt256(value: bigint): void;
58
23
  writeFloat32(value: number): void;
59
24
  writeFloat64(value: number): void;
60
- /**
61
- * Write a variable-length integer (LEB128).
62
- * Used for string lengths in RowBinary format.
63
- */
64
25
  writeVarInt(value: number): void;
65
- /**
66
- * Write a string in RowBinary format: [VarInt length][UTF-8 bytes]
67
- */
68
26
  writeString(value: string): void;
69
- /**
70
- * Write a FixedString(N) - padded with null bytes if shorter
71
- */
72
27
  writeFixedString(value: string, length: number): void;
73
- /**
74
- * Write raw bytes directly
75
- */
76
28
  writeBytes(data: Buffer): void;
77
- /**
78
- * Write a UUID (16 bytes).
79
- * ClickHouse stores UUIDs as two UInt64 in big-endian order!
80
- * This is different from the usual little-endian storage.
81
- */
82
29
  writeUUID(value: string): void;
83
- /**
84
- * Write a Date (days since epoch as UInt16)
85
- */
86
30
  writeDate(value: Date | number): void;
87
- /**
88
- * Write a Date32 (days since epoch as Int32)
89
- */
90
31
  writeDate32(value: Date | number): void;
91
- /**
92
- * Write a DateTime (seconds since epoch as UInt32)
93
- */
94
32
  writeDateTime(value: Date | number): void;
95
- /**
96
- * Write a DateTime64 with specified precision
97
- */
98
33
  writeDateTime64(value: Date | number, precision?: number): void;
99
- /**
100
- * Write a nullable prefix (0 = not null, 1 = null)
101
- */
102
34
  writeNullable(isNull: boolean): void;
103
- /**
104
- * Write array length prefix
105
- */
106
35
  writeArrayLength(length: number): void;
107
- /**
108
- * Write Decimal32 (stored as Int32)
109
- */
110
36
  writeDecimal32(value: number, scale: number): void;
111
- /**
112
- * Write Decimal64 (stored as Int64)
113
- */
114
37
  writeDecimal64(value: number, scale: number): void;
115
- /**
116
- * Write Decimal128 (stored as Int128)
117
- */
118
38
  writeDecimal128(value: number | bigint, scale: number): void;
119
39
  writeBool(value: boolean): void;
120
- /**
121
- * Write IPv4 address (UInt32 in network byte order)
122
- */
123
40
  writeIPv4(value: string | number): void;
124
- /**
125
- * Write IPv6 address (16 bytes)
126
- */
127
41
  writeIPv6(value: string | Buffer): void;
128
42
  private expandIPv6;
129
43
  writeEnum8(value: number): void;
130
44
  writeEnum16(value: number): void;
131
45
  }
132
46
  export type BinaryEncoder = (writer: BinaryWriter, value: any) => void;
133
- /**
134
- * Create a binary encoder for a ClickHouse column type
135
- */
136
47
  export declare function createBinaryEncoder(clickhouseType: string, isNullable?: boolean): BinaryEncoder;
137
- /**
138
- * Configuration for binary serialization
139
- */
140
48
  export interface BinarySerializationConfig {
141
- /** Column names in order */
142
49
  columns: Array<{
143
50
  name: string;
144
51
  type: string;
145
52
  isNullable: boolean;
146
53
  }>;
147
- /** Property key mapping (propKey -> column index) */
148
54
  keyMapping: Map<string, number>;
149
- /** Pre-compiled encoders for each column */
150
55
  encoders: BinaryEncoder[];
151
56
  }
152
- /**
153
- * Build a binary serialization configuration from a table definition
154
- */
155
57
  export declare function buildBinaryConfig(columns: Array<{
156
58
  name: string;
157
59
  type: string;
158
60
  isNull: boolean;
159
61
  propKey: string;
160
62
  }>): BinarySerializationConfig;
161
- /**
162
- * Serialize a single row to RowBinary format
163
- */
164
63
  export declare function serializeRowBinary(row: Record<string, any>, config: BinarySerializationConfig, writer?: BinaryWriter): Buffer;
165
- /**
166
- * Serialize multiple rows to a single RowBinary buffer
167
- */
168
64
  export declare function serializeRowsBinary(rows: Array<Record<string, any>>, config: BinarySerializationConfig): Buffer;
169
65
  export type RowAccessor = (row: any) => any;
170
- /**
171
- * Create an optimized accessor function for a column.
172
- * Uses direct property access instead of dynamic lookup.
173
- */
174
66
  export declare function createAccessor(propKey: string, columnName: string): RowAccessor;
175
- /**
176
- * Optimized serialization config with pre-compiled accessors
177
- */
178
67
  export interface OptimizedBinaryConfig {
179
68
  columns: Array<{
180
69
  name: string;
@@ -183,12 +72,8 @@ export interface OptimizedBinaryConfig {
183
72
  }>;
184
73
  encoders: BinaryEncoder[];
185
74
  accessors: RowAccessor[];
186
- /** Skip validation for maximum performance (Optimization #4) */
187
75
  skipValidation?: boolean;
188
76
  }
189
- /**
190
- * Build an optimized binary config with pre-compiled accessors
191
- */
192
77
  export declare function buildOptimizedBinaryConfig(columns: Array<{
193
78
  name: string;
194
79
  type: string;
@@ -197,20 +82,10 @@ export declare function buildOptimizedBinaryConfig(columns: Array<{
197
82
  }>, options?: {
198
83
  skipValidation?: boolean;
199
84
  }): OptimizedBinaryConfig;
200
- /**
201
- * Ultra-fast serialization using pre-compiled accessors and pooled writer
202
- */
203
85
  export declare function serializeRowsOptimized(rows: Array<Record<string, any>>, config: OptimizedBinaryConfig): Buffer;
204
- /**
205
- * Check if a schema is numeric-heavy (>50% numeric columns)
206
- */
207
86
  export declare function isNumericHeavySchema(columns: Array<{
208
87
  type: string;
209
88
  }>): boolean;
210
- /**
211
- * Batch serialize numeric columns using TypedArrays for better performance.
212
- * Only use for schemas with mostly numeric columns.
213
- */
214
89
  export declare function serializeNumericBatch(rows: Array<Record<string, any>>, config: OptimizedBinaryConfig, numericIndices: number[]): {
215
90
  numericBuffer: ArrayBuffer;
216
91
  otherData: any[][];