@housekit/orm 0.1.47 → 0.1.49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +120 -5
  2. package/dist/builders/delete.js +112 -0
  3. package/dist/builders/insert.d.ts +0 -91
  4. package/dist/builders/insert.js +393 -0
  5. package/dist/builders/prepared.d.ts +1 -2
  6. package/dist/builders/prepared.js +30 -0
  7. package/dist/builders/select.d.ts +0 -161
  8. package/dist/builders/select.js +562 -0
  9. package/dist/builders/select.types.js +1 -0
  10. package/dist/builders/update.js +136 -0
  11. package/dist/client.d.ts +0 -6
  12. package/dist/client.js +140 -0
  13. package/dist/codegen/zod.js +107 -0
  14. package/dist/column.d.ts +1 -25
  15. package/dist/column.js +133 -0
  16. package/dist/compiler.d.ts +0 -7
  17. package/dist/compiler.js +513 -0
  18. package/dist/core.js +6 -0
  19. package/dist/data-types.d.ts +0 -61
  20. package/dist/data-types.js +127 -0
  21. package/dist/dictionary.d.ts +0 -149
  22. package/dist/dictionary.js +158 -0
  23. package/dist/engines.d.ts +0 -385
  24. package/dist/engines.js +292 -0
  25. package/dist/expressions.d.ts +0 -10
  26. package/dist/expressions.js +268 -0
  27. package/dist/external.d.ts +0 -112
  28. package/dist/external.js +224 -0
  29. package/dist/index.d.ts +0 -51
  30. package/dist/index.js +139 -6853
  31. package/dist/logger.js +36 -0
  32. package/dist/materialized-views.d.ts +0 -188
  33. package/dist/materialized-views.js +380 -0
  34. package/dist/metadata.js +59 -0
  35. package/dist/modules/aggregates.d.ts +0 -164
  36. package/dist/modules/aggregates.js +121 -0
  37. package/dist/modules/array.d.ts +0 -98
  38. package/dist/modules/array.js +71 -0
  39. package/dist/modules/conditional.d.ts +0 -84
  40. package/dist/modules/conditional.js +138 -0
  41. package/dist/modules/conversion.d.ts +0 -147
  42. package/dist/modules/conversion.js +109 -0
  43. package/dist/modules/geo.d.ts +0 -164
  44. package/dist/modules/geo.js +112 -0
  45. package/dist/modules/hash.js +4 -0
  46. package/dist/modules/index.js +12 -0
  47. package/dist/modules/json.d.ts +0 -106
  48. package/dist/modules/json.js +76 -0
  49. package/dist/modules/math.d.ts +0 -16
  50. package/dist/modules/math.js +16 -0
  51. package/dist/modules/string.d.ts +0 -136
  52. package/dist/modules/string.js +89 -0
  53. package/dist/modules/time.d.ts +0 -123
  54. package/dist/modules/time.js +91 -0
  55. package/dist/modules/types.d.ts +0 -133
  56. package/dist/modules/types.js +114 -0
  57. package/dist/modules/window.js +140 -0
  58. package/dist/relational.d.ts +0 -82
  59. package/dist/relational.js +290 -0
  60. package/dist/relations.js +21 -0
  61. package/dist/schema-builder.d.ts +0 -90
  62. package/dist/schema-builder.js +140 -0
  63. package/dist/table.d.ts +0 -42
  64. package/dist/table.js +406 -0
  65. package/dist/utils/background-batcher.js +75 -0
  66. package/dist/utils/batch-transform.js +51 -0
  67. package/dist/utils/binary-reader.d.ts +0 -6
  68. package/dist/utils/binary-reader.js +334 -0
  69. package/dist/utils/binary-serializer.d.ts +0 -125
  70. package/dist/utils/binary-serializer.js +637 -0
  71. package/dist/utils/binary-worker-code.js +1 -0
  72. package/dist/utils/binary-worker-pool.d.ts +0 -34
  73. package/dist/utils/binary-worker-pool.js +206 -0
  74. package/dist/utils/binary-worker.d.ts +0 -11
  75. package/dist/utils/binary-worker.js +63 -0
  76. package/dist/utils/insert-processing.d.ts +0 -2
  77. package/dist/utils/insert-processing.js +163 -0
  78. package/dist/utils/lru-cache.js +30 -0
  79. package/package.json +68 -3
@@ -1,9 +1,3 @@
1
- /**
2
- * HouseKit Binary Worker Pool - High-Performance Parallel Serialization
3
- *
4
- * Manages a pool of worker threads for parallel RowBinary serialization.
5
- * Automatically distributes work across available CPU cores.
6
- */
7
1
  import { EventEmitter } from 'events';
8
2
  interface ColumnConfig {
9
3
  name: string;
@@ -11,11 +5,8 @@ interface ColumnConfig {
11
5
  isNullable: boolean;
12
6
  }
13
7
  export interface BinaryWorkerPoolOptions {
14
- /** Number of worker threads (default: CPU cores - 1) */
15
8
  poolSize?: number;
16
- /** Maximum rows per batch sent to a worker */
17
9
  batchSize?: number;
18
- /** Enable high-water mark backpressure */
19
10
  highWaterMark?: number;
20
11
  }
21
12
  export declare class BinaryWorkerPool extends EventEmitter {
@@ -26,46 +17,21 @@ export declare class BinaryWorkerPool extends EventEmitter {
26
17
  private isShutdown;
27
18
  private readonly options;
28
19
  constructor(options?: BinaryWorkerPoolOptions);
29
- /**
30
- * Initialize the worker pool with column configuration
31
- */
32
20
  initialize(columns: ColumnConfig[]): Promise<void>;
33
21
  private spawnWorker;
34
22
  private handleWorkerMessage;
35
- /**
36
- * Serialize rows to binary format using worker pool
37
- */
38
23
  serialize(rows: Array<Record<string, any>>): Promise<Buffer>;
39
- /**
40
- * Serialize rows in batches, returning an async iterator of buffers
41
- */
42
24
  serializeStream(rows: AsyncIterable<Record<string, any>> | Iterable<Record<string, any>>): AsyncGenerator<Buffer>;
43
25
  private processQueue;
44
- /**
45
- * Get pool statistics
46
- */
47
26
  getStats(): {
48
27
  workers: number;
49
28
  busy: number;
50
29
  queueSize: number;
51
30
  };
52
- /**
53
- * Shutdown all workers
54
- */
55
31
  shutdown(): Promise<void>;
56
32
  }
57
- /**
58
- * Get or create the default worker pool
59
- */
60
33
  export declare function getDefaultBinaryPool(columns?: ColumnConfig[]): Promise<BinaryWorkerPool>;
61
- /**
62
- * Shutdown the default pool
63
- */
64
34
  export declare function shutdownDefaultBinaryPool(): Promise<void>;
65
- /**
66
- * Synchronous fallback serializer for environments without Worker support.
67
- * Uses object pooling for BinaryWriter instances.
68
- */
69
35
  export declare class SyncBinarySerializer {
70
36
  private encoders;
71
37
  private columns;
@@ -0,0 +1,206 @@
1
+ import { Worker } from 'worker_threads';
2
+ import { EventEmitter } from 'events';
3
+ import * as os from 'os';
4
+ import { binaryWorkerCode } from './binary-worker-code';
5
+ export class BinaryWorkerPool extends EventEmitter {
6
+ workers = [];
7
+ columns = [];
8
+ batchIdCounter = 0;
9
+ queue = [];
10
+ isShutdown = false;
11
+ options;
12
+ constructor(options = {}) {
13
+ super();
14
+ this.options = {
15
+ poolSize: options.poolSize ?? Math.max(1, os.cpus().length - 1),
16
+ batchSize: options.batchSize ?? 10000,
17
+ highWaterMark: options.highWaterMark ?? 100,
18
+ };
19
+ }
20
+ async initialize(columns) {
21
+ this.columns = columns;
22
+ await Promise.all(Array(this.options.poolSize).fill(0).map(() => this.spawnWorker()));
23
+ }
24
+ async spawnWorker() {
25
+ return new Promise((resolve, reject) => {
26
+ const worker = new Worker(binaryWorkerCode, {
27
+ eval: true,
28
+ workerData: { columns: this.columns },
29
+ });
30
+ const workerInfo = {
31
+ worker,
32
+ busy: false,
33
+ configured: false,
34
+ pendingBatches: new Map(),
35
+ };
36
+ worker.on('message', (message) => {
37
+ this.handleWorkerMessage(workerInfo, message);
38
+ if (message.type === 'ready' && !workerInfo.configured) {
39
+ workerInfo.configured = true;
40
+ resolve();
41
+ }
42
+ });
43
+ worker.on('error', (error) => {
44
+ this.emit('error', error);
45
+ for (const [, pending] of workerInfo.pendingBatches) {
46
+ pending.reject(error);
47
+ }
48
+ workerInfo.pendingBatches.clear();
49
+ const index = this.workers.indexOf(workerInfo);
50
+ if (index !== -1) {
51
+ this.workers.splice(index, 1);
52
+ }
53
+ if (!this.isShutdown) {
54
+ this.spawnWorker().catch(e => this.emit('error', e));
55
+ }
56
+ });
57
+ worker.on('exit', (code) => {
58
+ if (code !== 0 && !this.isShutdown) {
59
+ this.emit('error', new Error(`Worker exited with code ${code}`));
60
+ }
61
+ });
62
+ this.workers.push(workerInfo);
63
+ });
64
+ }
65
+ handleWorkerMessage(workerInfo, message) {
66
+ switch (message.type) {
67
+ case 'result': {
68
+ const pending = workerInfo.pendingBatches.get(message.batchId);
69
+ if (pending) {
70
+ workerInfo.pendingBatches.delete(message.batchId);
71
+ pending.resolve(message.buffer);
72
+ }
73
+ workerInfo.busy = workerInfo.pendingBatches.size > 0;
74
+ this.processQueue();
75
+ break;
76
+ }
77
+ case 'error': {
78
+ const pending = workerInfo.pendingBatches.get(message.batchId);
79
+ if (pending) {
80
+ workerInfo.pendingBatches.delete(message.batchId);
81
+ pending.reject(new Error(message.error));
82
+ }
83
+ workerInfo.busy = workerInfo.pendingBatches.size > 0;
84
+ this.processQueue();
85
+ break;
86
+ }
87
+ case 'ready': {
88
+ workerInfo.configured = true;
89
+ this.processQueue();
90
+ break;
91
+ }
92
+ }
93
+ }
94
+ async serialize(rows) {
95
+ if (this.isShutdown) {
96
+ throw new Error('Worker pool is shut down');
97
+ }
98
+ return new Promise((resolve, reject) => {
99
+ this.queue.push({ rows, resolve, reject });
100
+ this.processQueue();
101
+ });
102
+ }
103
+ async *serializeStream(rows) {
104
+ let batch = [];
105
+ const batchSize = this.options.batchSize;
106
+ for await (const row of rows) {
107
+ batch.push(row);
108
+ if (batch.length >= batchSize) {
109
+ yield await this.serialize(batch);
110
+ batch = [];
111
+ }
112
+ }
113
+ if (batch.length > 0) {
114
+ yield await this.serialize(batch);
115
+ }
116
+ }
117
+ processQueue() {
118
+ if (this.queue.length === 0)
119
+ return;
120
+ const available = this.workers.find(w => w.configured && !w.busy);
121
+ if (!available)
122
+ return;
123
+ const { rows, resolve, reject } = this.queue.shift();
124
+ const batchId = ++this.batchIdCounter;
125
+ available.busy = true;
126
+ available.pendingBatches.set(batchId, { batchId, resolve, reject });
127
+ available.worker.postMessage({
128
+ type: 'serialize',
129
+ rows,
130
+ batchId,
131
+ });
132
+ }
133
+ getStats() {
134
+ return {
135
+ workers: this.workers.length,
136
+ busy: this.workers.filter(w => w.busy).length,
137
+ queueSize: this.queue.length,
138
+ };
139
+ }
140
+ async shutdown() {
141
+ this.isShutdown = true;
142
+ for (const item of this.queue) {
143
+ item.reject(new Error('Worker pool shutting down'));
144
+ }
145
+ this.queue = [];
146
+ await Promise.all(this.workers.map(w => new Promise(resolve => {
147
+ w.worker.on('exit', () => resolve());
148
+ w.worker.postMessage({ type: 'shutdown' });
149
+ })));
150
+ this.workers = [];
151
+ }
152
+ }
153
+ let defaultPool = null;
154
+ export async function getDefaultBinaryPool(columns) {
155
+ if (!defaultPool) {
156
+ defaultPool = new BinaryWorkerPool();
157
+ if (columns) {
158
+ await defaultPool.initialize(columns);
159
+ }
160
+ }
161
+ return defaultPool;
162
+ }
163
+ export async function shutdownDefaultBinaryPool() {
164
+ if (defaultPool) {
165
+ await defaultPool.shutdown();
166
+ defaultPool = null;
167
+ }
168
+ }
169
+ import { createBinaryEncoder, acquireWriter, releaseWriter } from './binary-serializer';
170
+ export class SyncBinarySerializer {
171
+ encoders = [];
172
+ columns = [];
173
+ constructor(columns) {
174
+ this.columns = columns;
175
+ this.encoders = columns.map(col => createBinaryEncoder(col.type, col.isNullable));
176
+ }
177
+ serialize(rows) {
178
+ const writer = acquireWriter();
179
+ try {
180
+ for (const row of rows) {
181
+ for (let i = 0; i < this.columns.length; i++) {
182
+ const col = this.columns[i];
183
+ const value = row[col.name];
184
+ this.encoders[i](writer, value);
185
+ }
186
+ }
187
+ return writer.toBuffer();
188
+ }
189
+ finally {
190
+ releaseWriter(writer);
191
+ }
192
+ }
193
+ async *serializeStream(rows, batchSize = 10000) {
194
+ let batch = [];
195
+ for await (const row of rows) {
196
+ batch.push(row);
197
+ if (batch.length >= batchSize) {
198
+ yield this.serialize(batch);
199
+ batch = [];
200
+ }
201
+ }
202
+ if (batch.length > 0) {
203
+ yield this.serialize(batch);
204
+ }
205
+ }
206
+ }
@@ -1,12 +1 @@
1
- /**
2
- * HouseKit Binary Worker - Parallel RowBinary Serialization
3
- *
4
- * This worker thread handles binary serialization off the main thread,
5
- * allowing the main thread to focus on I/O while serialization happens in parallel.
6
- *
7
- * Benefits:
8
- * - Main thread stays responsive for network I/O
9
- * - Serialization uses separate CPU core
10
- * - Can saturate 10Gbps links without blocking Node.js event loop
11
- */
12
1
  export {};
@@ -0,0 +1,63 @@
1
+ import { parentPort, workerData } from 'worker_threads';
2
+ import { BinaryWriter, createBinaryEncoder } from './binary-serializer';
3
+ let config = null;
4
+ let writer = null;
5
+ function serializeRows(rows, batchId) {
6
+ if (!config || !writer) {
7
+ throw new Error('Worker not configured');
8
+ }
9
+ writer.reset();
10
+ for (const row of rows) {
11
+ for (let i = 0; i < config.columns.length; i++) {
12
+ const col = config.columns[i];
13
+ const value = row[col.name];
14
+ config.encoders[i](writer, value);
15
+ }
16
+ }
17
+ return writer.toBuffer();
18
+ }
19
+ function handleMessage(message) {
20
+ try {
21
+ switch (message.type) {
22
+ case 'configure': {
23
+ const encoders = message.columns.map(col => createBinaryEncoder(col.type, col.isNullable));
24
+ config = {
25
+ columns: message.columns,
26
+ keyMapping: new Map(),
27
+ encoders,
28
+ };
29
+ writer = new BinaryWriter(1024 * 1024);
30
+ parentPort?.postMessage({ type: 'ready' });
31
+ break;
32
+ }
33
+ case 'serialize': {
34
+ const buffer = serializeRows(message.rows, message.batchId);
35
+ const arrayBuffer = new ArrayBuffer(buffer.length);
36
+ new Uint8Array(arrayBuffer).set(buffer);
37
+ parentPort?.postMessage({
38
+ type: 'result',
39
+ batchId: message.batchId,
40
+ buffer: Buffer.from(arrayBuffer),
41
+ rowCount: message.rows.length,
42
+ }, [arrayBuffer]);
43
+ break;
44
+ }
45
+ case 'shutdown': {
46
+ process.exit(0);
47
+ }
48
+ }
49
+ }
50
+ catch (error) {
51
+ parentPort?.postMessage({
52
+ type: 'error',
53
+ batchId: message.batchId,
54
+ error: error instanceof Error ? error.message : String(error),
55
+ });
56
+ }
57
+ }
58
+ if (parentPort) {
59
+ parentPort.on('message', handleMessage);
60
+ if (workerData?.columns) {
61
+ handleMessage({ type: 'configure', columns: workerData.columns });
62
+ }
63
+ }
@@ -16,11 +16,9 @@ export type InsertPlan = {
16
16
  keyToColumn: Map<string, PreparedInsertColumn>;
17
17
  columnNames: string[];
18
18
  useCompact: boolean;
19
- /** Skip enum validation for maximum performance */
20
19
  skipValidation?: boolean;
21
20
  };
22
21
  export interface InsertPlanOptions {
23
- /** Skip enum validation in production for better performance */
24
22
  skipValidation?: boolean;
25
23
  }
26
24
  export declare function buildInsertPlan(table: TableRuntime<any, any>, options?: InsertPlanOptions): InsertPlan;
@@ -0,0 +1,163 @@
1
+ const hasNativeUUID = typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function';
2
+ let uuidFns = null;
3
+ function loadUUIDFns() {
4
+ if (uuidFns)
5
+ return uuidFns;
6
+ const uuid = require('uuid');
7
+ uuidFns = {
8
+ 1: uuid.v1,
9
+ 4: hasNativeUUID ? () => crypto.randomUUID() : uuid.v4,
10
+ 6: uuid.v6,
11
+ 7: uuid.v7,
12
+ };
13
+ return uuidFns;
14
+ }
15
+ export function buildInsertPlan(table, options) {
16
+ const columns = [];
17
+ const keyToColumn = new Map();
18
+ for (const [propKey, colRaw] of Object.entries(table.$columns)) {
19
+ const column = colRaw;
20
+ const columnName = column.name;
21
+ const transform = createTransform(column, options?.skipValidation);
22
+ const autoUUIDVersion = column.meta?.autoGenerate?.type === 'uuid'
23
+ ? normalizeUUIDVersion(column.meta.autoGenerate.version)
24
+ : null;
25
+ const prepared = {
26
+ propKey,
27
+ column,
28
+ columnName,
29
+ hasDefault: column.meta?.default !== undefined,
30
+ defaultValue: column.meta?.default,
31
+ defaultFn: column.meta?.defaultFn || null,
32
+ autoUUIDVersion,
33
+ useServerUUID: Boolean(autoUUIDVersion && column.meta?.defaultExpr && /generateuuid/i.test(column.meta.defaultExpr)),
34
+ transform,
35
+ };
36
+ columns.push(prepared);
37
+ keyToColumn.set(propKey, prepared);
38
+ keyToColumn.set(columnName, prepared);
39
+ }
40
+ const columnNames = columns.map(c => c.columnName);
41
+ const useCompact = columns.every(c => !c.useServerUUID);
42
+ return { columns, keyToColumn, columnNames, useCompact, skipValidation: options?.skipValidation };
43
+ }
44
+ export function processRowWithPlan(row, plan, mode = plan.useCompact ? 'compact' : 'json') {
45
+ if (mode === 'compact') {
46
+ return processRowCompact(row, plan);
47
+ }
48
+ return processRowJson(row, plan);
49
+ }
50
+ function processRowJson(row, plan) {
51
+ const newRow = {};
52
+ for (const col of plan.columns) {
53
+ let value = row[col.propKey] !== undefined ? row[col.propKey] : row[col.columnName];
54
+ if (value === undefined) {
55
+ if (col.defaultFn) {
56
+ value = col.defaultFn(row);
57
+ }
58
+ else if (col.autoUUIDVersion !== null) {
59
+ if (!col.useServerUUID) {
60
+ value = generateUUID(col.autoUUIDVersion);
61
+ }
62
+ }
63
+ else if (col.hasDefault) {
64
+ value = col.defaultValue;
65
+ }
66
+ }
67
+ if (value !== undefined) {
68
+ newRow[col.columnName] = col.transform(value);
69
+ }
70
+ }
71
+ return newRow;
72
+ }
73
+ export async function* processRowsStream(rows, plan, mode = plan.useCompact ? 'compact' : 'json') {
74
+ for await (const row of rows) {
75
+ yield processRowWithPlan(row, plan, mode);
76
+ }
77
+ }
78
+ function createTransform(col, skipValidation) {
79
+ const hasJson = Boolean(col.meta?.isJson);
80
+ const enumValues = col.meta?.enumValues;
81
+ const enumSet = (!skipValidation && enumValues) ? new Set(enumValues) : null;
82
+ const needsJson = hasJson;
83
+ const needsEnum = Boolean(enumSet);
84
+ if (!needsJson && !needsEnum) {
85
+ return (value) => {
86
+ if (value instanceof Date) {
87
+ return formatDate(value);
88
+ }
89
+ return value;
90
+ };
91
+ }
92
+ return (value) => {
93
+ let result = value;
94
+ if (result instanceof Date) {
95
+ result = formatDate(result);
96
+ }
97
+ if (needsJson && typeof result === 'object' && result !== null) {
98
+ result = JSON.stringify(result);
99
+ }
100
+ if (enumSet && !enumSet.has(result)) {
101
+ throw new Error(`❌ Invalid value '${result}' for enum column '${col.name}'. Allowed: ${enumValues.join(', ')}`);
102
+ }
103
+ return result;
104
+ };
105
+ }
106
+ function formatDate(date) {
107
+ const iso = date.toISOString();
108
+ const [datePart, timePart] = iso.split('T');
109
+ const [time] = timePart.split('.');
110
+ return `${datePart} ${time}`;
111
+ }
112
+ function normalizeUUIDVersion(version) {
113
+ if (version === 1 || version === 3 || version === 4 || version === 5 || version === 6 || version === 7) {
114
+ return version;
115
+ }
116
+ return 4;
117
+ }
118
+ function generateUUID(version) {
119
+ if (version === 4 && hasNativeUUID) {
120
+ return crypto.randomUUID();
121
+ }
122
+ const fns = loadUUIDFns();
123
+ const fn = fns[version];
124
+ if (!fn) {
125
+ if (version === 3 || version === 5) {
126
+ throw new Error(`UUID v${version} requires a name and namespace. Use v4, v6, or v7 for auto-generation.`);
127
+ }
128
+ return fns[4]();
129
+ }
130
+ return fn();
131
+ }
132
+ function processRowCompact(row, plan) {
133
+ const out = new Array(plan.columns.length);
134
+ for (let i = 0; i < plan.columns.length; i++) {
135
+ const col = plan.columns[i];
136
+ const provided = row[col.propKey];
137
+ const providedByName = row[col.columnName];
138
+ const hasValue = provided !== undefined || providedByName !== undefined;
139
+ if (hasValue) {
140
+ const value = col.transform(provided !== undefined ? provided : providedByName);
141
+ out[i] = value;
142
+ continue;
143
+ }
144
+ if (col.defaultFn) {
145
+ const computed = col.defaultFn(row);
146
+ out[i] = col.transform(computed);
147
+ continue;
148
+ }
149
+ if (col.autoUUIDVersion !== null) {
150
+ out[i] = col.useServerUUID ? undefined : generateUUID(col.autoUUIDVersion);
151
+ continue;
152
+ }
153
+ if (col.hasDefault) {
154
+ out[i] = col.defaultValue;
155
+ continue;
156
+ }
157
+ if (col.useServerUUID) {
158
+ throw new Error(`❌ Cannot use JSONCompactEachRow: column '${col.columnName}' relies on server-side default`);
159
+ }
160
+ throw new Error(`❌ Missing value for column '${col.columnName}'`);
161
+ }
162
+ return out;
163
+ }
@@ -0,0 +1,30 @@
1
+ export class LRUCache {
2
+ map = new Map();
3
+ max;
4
+ constructor(options) {
5
+ this.max = options.max;
6
+ }
7
+ get(key) {
8
+ const item = this.map.get(key);
9
+ if (item !== undefined) {
10
+ this.map.delete(key);
11
+ this.map.set(key, item);
12
+ }
13
+ return item;
14
+ }
15
+ set(key, value) {
16
+ if (this.map.has(key)) {
17
+ this.map.delete(key);
18
+ }
19
+ else if (this.map.size >= this.max) {
20
+ const firstKey = this.map.keys().next().value;
21
+ if (firstKey !== undefined) {
22
+ this.map.delete(firstKey);
23
+ }
24
+ }
25
+ this.map.set(key, value);
26
+ }
27
+ clear() {
28
+ this.map.clear();
29
+ }
30
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@housekit/orm",
3
- "version": "0.1.47",
3
+ "version": "0.1.49",
4
4
  "description": "Type-safe ClickHouse ORM with modern DX and ClickHouse-specific optimizations. Features optimized JSONCompact streaming, full engine support, and advanced query capabilities.",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -10,6 +10,70 @@
10
10
  ".": {
11
11
  "types": "./dist/index.d.ts",
12
12
  "import": "./dist/index.js"
13
+ },
14
+ "./client": {
15
+ "types": "./dist/client.d.ts",
16
+ "import": "./dist/client.js"
17
+ },
18
+ "./table": {
19
+ "types": "./dist/table.d.ts",
20
+ "import": "./dist/table.js"
21
+ },
22
+ "./column": {
23
+ "types": "./dist/column.d.ts",
24
+ "import": "./dist/column.js"
25
+ },
26
+ "./expressions": {
27
+ "types": "./dist/expressions.d.ts",
28
+ "import": "./dist/expressions.js"
29
+ },
30
+ "./schema-builder": {
31
+ "types": "./dist/schema-builder.d.ts",
32
+ "import": "./dist/schema-builder.js"
33
+ },
34
+ "./relational": {
35
+ "types": "./dist/relational.d.ts",
36
+ "import": "./dist/relational.js"
37
+ },
38
+ "./engines": {
39
+ "types": "./dist/engines.d.ts",
40
+ "import": "./dist/engines.js"
41
+ },
42
+ "./data-types": {
43
+ "types": "./dist/data-types.d.ts",
44
+ "import": "./dist/data-types.js"
45
+ },
46
+ "./materialized-views": {
47
+ "types": "./dist/materialized-views.d.ts",
48
+ "import": "./dist/materialized-views.js"
49
+ },
50
+ "./compiler": {
51
+ "types": "./dist/compiler.d.ts",
52
+ "import": "./dist/compiler.js"
53
+ },
54
+ "./external": {
55
+ "types": "./dist/external.d.ts",
56
+ "import": "./dist/external.js"
57
+ },
58
+ "./dictionary": {
59
+ "types": "./dist/dictionary.d.ts",
60
+ "import": "./dist/dictionary.js"
61
+ },
62
+ "./codegen": {
63
+ "types": "./dist/codegen/index.d.ts",
64
+ "import": "./dist/codegen/index.js"
65
+ },
66
+ "./utils": {
67
+ "types": "./dist/utils/index.d.ts",
68
+ "import": "./dist/utils/index.js"
69
+ },
70
+ "./modules": {
71
+ "types": "./dist/modules/index.d.ts",
72
+ "import": "./dist/modules/index.js"
73
+ },
74
+ "./builders": {
75
+ "types": "./dist/builders/index.d.ts",
76
+ "import": "./dist/builders/index.js"
13
77
  }
14
78
  },
15
79
  "license": "MIT",
@@ -43,7 +107,7 @@
43
107
  "README.md"
44
108
  ],
45
109
  "scripts": {
46
- "build": "bun run build-worker.ts && bun build ./src/index.ts --outdir ./dist --target node --external @clickhouse/client --external uuid --external zod && tsc --project tsconfig.build.json",
110
+ "build": "bun run build-worker.ts && tsc --project tsconfig.build.json",
47
111
  "prepublishOnly": "bun run build",
48
112
  "clean": "rm -rf dist"
49
113
  },
@@ -57,7 +121,8 @@
57
121
  "devDependencies": {
58
122
  "@clickhouse/client": "^1.15.0",
59
123
  "@types/uuid": "^11.0.0",
60
- "typescript": "^5.0.0"
124
+ "typescript": "^5.0.0",
125
+ "esbuild": "^0.27.2"
61
126
  },
62
127
  "publishConfig": {
63
128
  "access": "public"