@hypequery/clickhouse 0.2.1 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/bin.js +128 -36
- package/dist/cli/generate-types.js +101 -12
- package/dist/core/connection.d.ts +136 -0
- package/dist/core/connection.d.ts.map +1 -1
- package/dist/core/connection.js +58 -0
- package/dist/core/cross-filter.d.ts +85 -0
- package/dist/core/features/aggregations.d.ts +102 -0
- package/dist/core/features/analytics.d.ts +66 -0
- package/dist/core/features/cross-filtering.d.ts +31 -0
- package/dist/core/features/cross-filtering.d.ts.map +1 -0
- package/dist/core/features/cross-filtering.js +123 -0
- package/dist/core/features/executor.d.ts +19 -0
- package/dist/core/features/filtering.d.ts +95 -0
- package/dist/core/features/filtering.d.ts.map +1 -1
- package/dist/core/features/filtering.js +59 -1
- package/dist/core/features/joins.d.ts +29 -0
- package/dist/core/features/pagination.d.ts +23 -0
- package/dist/core/features/query-modifiers.d.ts +119 -0
- package/dist/core/formatters/sql-formatter.d.ts +9 -0
- package/dist/core/formatters/sql-formatter.d.ts.map +1 -1
- package/dist/core/formatters/sql-formatter.js +24 -5
- package/dist/core/join-relationships.d.ts +50 -0
- package/dist/core/query-builder.d.ts +222 -0
- package/dist/core/query-builder.d.ts.map +1 -1
- package/dist/core/query-builder.js +38 -6
- package/dist/core/tests/index.d.ts +2 -0
- package/dist/core/tests/integration/pagination-test-tbc.d.ts +2 -0
- package/dist/core/tests/integration/pagination-test-tbc.d.ts.map +1 -0
- package/dist/core/tests/integration/pagination-test-tbc.js +189 -0
- package/dist/core/tests/integration/setup.d.ts +40 -0
- package/dist/core/tests/integration/setup.d.ts.map +1 -1
- package/dist/core/tests/integration/setup.js +278 -237
- package/dist/core/tests/integration/test-config.d.ts +15 -0
- package/dist/core/tests/integration/test-config.d.ts.map +1 -0
- package/dist/core/tests/integration/test-config.js +15 -0
- package/dist/core/tests/test-utils.d.ts +30 -0
- package/dist/core/utils/logger.d.ts +37 -0
- package/dist/core/utils/logger.js +2 -2
- package/dist/core/utils/sql-expressions.d.ts +59 -0
- package/dist/core/utils.d.ts +3 -0
- package/dist/core/validators/filter-validator.d.ts +8 -0
- package/dist/core/validators/value-validator.d.ts +6 -0
- package/dist/formatters/index.d.ts +1 -0
- package/dist/index.d.ts +10 -27
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +14 -2
- package/dist/types/base.d.ts +77 -0
- package/dist/types/base.d.ts.map +1 -1
- package/dist/types/clickhouse-types.d.ts +13 -0
- package/dist/types/filters.d.ts +37 -0
- package/dist/types/index.d.ts +3 -0
- package/package.json +15 -8
|
@@ -1,274 +1,315 @@
|
|
|
1
|
-
import
|
|
2
|
-
import {
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { fileURLToPath } from 'url';
|
|
3
3
|
import { ClickHouseConnection } from '../../connection';
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
|
|
4
|
+
import { exec } from 'child_process';
|
|
5
|
+
import { promisify } from 'util';
|
|
6
|
+
import { logger as hypeQueryLogger } from '../../utils/logger';
|
|
7
|
+
// Disable the HypeQuery logger to prevent "logs after tests" errors
|
|
8
|
+
// This must be done early in the setup, before any queries run
|
|
9
|
+
hypeQueryLogger.configure({ enabled: false });
|
|
10
|
+
// Setup a logger that respects test environment
|
|
11
|
+
const logger = {
|
|
12
|
+
info: (message, ...args) => {
|
|
13
|
+
if (process.env.DEBUG === 'true') {
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
error: (message, ...args) => {
|
|
17
|
+
if (process.env.DEBUG === 'true' || process.env.SUPPRESS_ERRORS !== 'true') {
|
|
18
|
+
console.error(`[ERROR] ${message}`, ...args);
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
warn: (message, ...args) => {
|
|
22
|
+
if (process.env.DEBUG === 'true') {
|
|
23
|
+
console.warn(`[WARN] ${message}`, ...args);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
const execAsync = promisify(exec);
|
|
28
|
+
// Create a path to the project root
|
|
29
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
30
|
+
const __dirname = path.dirname(__filename);
|
|
31
|
+
const projectRoot = path.resolve(__dirname, '../../../../../');
|
|
32
|
+
// Connection configuration (with defaults that can be overridden by env variables)
|
|
33
|
+
const config = {
|
|
34
|
+
host: process.env.CLICKHOUSE_TEST_HOST || 'http://localhost:8123',
|
|
35
|
+
user: process.env.CLICKHOUSE_TEST_USER || 'default',
|
|
36
|
+
password: process.env.CLICKHOUSE_TEST_PASSWORD || 'hypequery_test',
|
|
37
|
+
database: process.env.CLICKHOUSE_TEST_DB || 'test_db',
|
|
38
|
+
};
|
|
39
|
+
// Initialize the ClickHouse connection
|
|
40
|
+
export const initializeTestConnection = async () => {
|
|
41
|
+
logger.info('Initializing ClickHouse connection with config:', config);
|
|
18
42
|
try {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
password: CLICKHOUSE_PASSWORD,
|
|
23
|
-
database: CLICKHOUSE_DB
|
|
24
|
-
});
|
|
25
|
-
// Test the connection
|
|
43
|
+
// Make sure ClickHouse is initialized
|
|
44
|
+
ensureConnectionInitialized();
|
|
45
|
+
// Test connection by getting client and pinging
|
|
26
46
|
const client = ClickHouseConnection.getClient();
|
|
27
47
|
await client.ping();
|
|
28
|
-
|
|
48
|
+
logger.info('ClickHouse connection successful');
|
|
49
|
+
// Return the query builder from the index file
|
|
50
|
+
const { createQueryBuilder } = await import('../../../index.js');
|
|
29
51
|
return createQueryBuilder({
|
|
30
|
-
host:
|
|
31
|
-
username:
|
|
32
|
-
password:
|
|
33
|
-
database:
|
|
52
|
+
host: config.host,
|
|
53
|
+
username: config.user,
|
|
54
|
+
password: config.password,
|
|
55
|
+
database: config.database,
|
|
34
56
|
});
|
|
35
57
|
}
|
|
36
58
|
catch (error) {
|
|
37
|
-
|
|
59
|
+
logger.error('Failed to connect to ClickHouse:', error);
|
|
38
60
|
throw error;
|
|
39
61
|
}
|
|
40
|
-
}
|
|
41
|
-
// SQL to create test tables
|
|
42
|
-
const CREATE_TEST_TABLE = `
|
|
43
|
-
CREATE TABLE IF NOT EXISTS test_table (
|
|
44
|
-
id Int32,
|
|
45
|
-
name String,
|
|
46
|
-
price Float64,
|
|
47
|
-
created_at DateTime,
|
|
48
|
-
category String,
|
|
49
|
-
active UInt8
|
|
50
|
-
) ENGINE = MergeTree()
|
|
51
|
-
ORDER BY id
|
|
52
|
-
`;
|
|
53
|
-
const CREATE_USERS_TABLE = `
|
|
54
|
-
CREATE TABLE IF NOT EXISTS users (
|
|
55
|
-
id Int32,
|
|
56
|
-
user_name String,
|
|
57
|
-
email String,
|
|
58
|
-
created_at DateTime,
|
|
59
|
-
status String
|
|
60
|
-
) ENGINE = MergeTree()
|
|
61
|
-
ORDER BY id
|
|
62
|
-
`;
|
|
63
|
-
const CREATE_ORDERS_TABLE = `
|
|
64
|
-
CREATE TABLE IF NOT EXISTS orders (
|
|
65
|
-
id Int32,
|
|
66
|
-
user_id Int32,
|
|
67
|
-
product_id Int32,
|
|
68
|
-
quantity Int32,
|
|
69
|
-
total Float64,
|
|
70
|
-
status String,
|
|
71
|
-
created_at DateTime
|
|
72
|
-
) ENGINE = MergeTree()
|
|
73
|
-
ORDER BY id
|
|
74
|
-
`;
|
|
75
|
-
const CREATE_PRODUCTS_TABLE = `
|
|
76
|
-
CREATE TABLE IF NOT EXISTS products (
|
|
77
|
-
id Int32,
|
|
78
|
-
name String,
|
|
79
|
-
price Float64,
|
|
80
|
-
category String,
|
|
81
|
-
description String
|
|
82
|
-
) ENGINE = MergeTree()
|
|
83
|
-
ORDER BY id
|
|
84
|
-
`;
|
|
85
|
-
// Sample data for tests
|
|
86
|
-
export const TEST_DATA = {
|
|
87
|
-
test_table: [
|
|
88
|
-
{ id: 1, name: 'Product 1', price: 10.99, created_at: '2023-01-01 00:00:00', category: 'A', active: 1 },
|
|
89
|
-
{ id: 2, name: 'Product 2', price: 20.50, created_at: '2023-01-02 00:00:00', category: 'B', active: 1 },
|
|
90
|
-
{ id: 3, name: 'Product 3', price: 15.75, created_at: '2023-01-03 00:00:00', category: 'A', active: 0 },
|
|
91
|
-
{ id: 4, name: 'Product 4', price: 25.00, created_at: '2023-01-04 00:00:00', category: 'C', active: 1 },
|
|
92
|
-
{ id: 5, name: 'Product 5', price: 30.25, created_at: '2023-01-05 00:00:00', category: 'B', active: 0 },
|
|
93
|
-
{ id: 6, name: 'Product 6', price: 12.99, created_at: '2023-01-06 00:00:00', category: 'A', active: 1 },
|
|
94
|
-
{ id: 7, name: 'Product 7', price: 22.50, created_at: '2023-01-07 00:00:00', category: 'B', active: 1 },
|
|
95
|
-
{ id: 8, name: 'Product 8', price: 18.75, created_at: '2023-01-08 00:00:00', category: 'C', active: 0 }
|
|
96
|
-
],
|
|
97
|
-
users: [
|
|
98
|
-
{ id: 1, user_name: 'user1', email: 'user1@example.com', created_at: '2023-01-01 00:00:00', status: 'active' },
|
|
99
|
-
{ id: 2, user_name: 'user2', email: 'user2@example.com', created_at: '2023-01-02 00:00:00', status: 'active' },
|
|
100
|
-
{ id: 3, user_name: 'user3', email: 'user3@example.com', created_at: '2023-01-03 00:00:00', status: 'inactive' },
|
|
101
|
-
{ id: 4, user_name: 'user4', email: 'user4@example.com', created_at: '2023-01-04 00:00:00', status: 'active' },
|
|
102
|
-
{ id: 5, user_name: 'user5', email: 'user5@example.com', created_at: '2023-01-05 00:00:00', status: 'pending' }
|
|
103
|
-
],
|
|
104
|
-
orders: [
|
|
105
|
-
{ id: 1, user_id: 1, product_id: 1, quantity: 2, total: 21.98, status: 'completed', created_at: '2023-01-10 10:00:00' },
|
|
106
|
-
{ id: 2, user_id: 1, product_id: 3, quantity: 1, total: 15.75, status: 'completed', created_at: '2023-01-11 11:00:00' },
|
|
107
|
-
{ id: 3, user_id: 2, product_id: 2, quantity: 3, total: 61.50, status: 'completed', created_at: '2023-01-12 12:00:00' },
|
|
108
|
-
{ id: 4, user_id: 3, product_id: 5, quantity: 1, total: 30.25, status: 'pending', created_at: '2023-01-13 13:00:00' },
|
|
109
|
-
{ id: 5, user_id: 4, product_id: 4, quantity: 2, total: 50.00, status: 'completed', created_at: '2023-01-14 14:00:00' },
|
|
110
|
-
{ id: 6, user_id: 2, product_id: 6, quantity: 1, total: 12.99, status: 'cancelled', created_at: '2023-01-15 15:00:00' },
|
|
111
|
-
{ id: 7, user_id: 5, product_id: 7, quantity: 4, total: 90.00, status: 'pending', created_at: '2023-01-16 16:00:00' },
|
|
112
|
-
{ id: 8, user_id: 1, product_id: 8, quantity: 1, total: 18.75, status: 'completed', created_at: '2023-01-17 17:00:00' }
|
|
113
|
-
],
|
|
114
|
-
products: [
|
|
115
|
-
{ id: 1, name: 'Product A', price: 10.99, category: 'Electronics', description: 'A great electronic device' },
|
|
116
|
-
{ id: 2, name: 'Product B', price: 20.50, category: 'Clothing', description: 'Comfortable clothing item' },
|
|
117
|
-
{ id: 3, name: 'Product C', price: 15.75, category: 'Electronics', description: 'Another electronic gadget' },
|
|
118
|
-
{ id: 4, name: 'Product D', price: 25.00, category: 'Home', description: 'Home decoration item' },
|
|
119
|
-
{ id: 5, name: 'Product E', price: 30.25, category: 'Kitchen', description: 'Useful kitchen tool' },
|
|
120
|
-
{ id: 6, name: 'Product F', price: 12.99, category: 'Office', description: 'Office supplies' },
|
|
121
|
-
{ id: 7, name: 'Product G', price: 22.50, category: 'Electronics', description: 'Premium electronic device' },
|
|
122
|
-
{ id: 8, name: 'Product H', price: 18.75, category: 'Clothing', description: 'Stylish clothing piece' }
|
|
123
|
-
]
|
|
124
62
|
};
|
|
125
|
-
// Helper to
|
|
126
|
-
export
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
query: `CREATE DATABASE IF NOT EXISTS ${CLICKHOUSE_DB}`
|
|
131
|
-
});
|
|
132
|
-
// Use the test database
|
|
133
|
-
await client.command({
|
|
134
|
-
query: `USE ${CLICKHOUSE_DB}`
|
|
135
|
-
});
|
|
136
|
-
// Create test tables
|
|
137
|
-
await client.command({
|
|
138
|
-
query: CREATE_TEST_TABLE
|
|
139
|
-
});
|
|
140
|
-
await client.command({
|
|
141
|
-
query: CREATE_USERS_TABLE
|
|
142
|
-
});
|
|
143
|
-
await client.command({
|
|
144
|
-
query: CREATE_ORDERS_TABLE
|
|
145
|
-
});
|
|
146
|
-
await client.command({
|
|
147
|
-
query: CREATE_PRODUCTS_TABLE
|
|
148
|
-
});
|
|
149
|
-
// Truncate tables if they exist
|
|
150
|
-
await client.command({
|
|
151
|
-
query: `TRUNCATE TABLE IF EXISTS test_table`
|
|
152
|
-
});
|
|
153
|
-
await client.command({
|
|
154
|
-
query: `TRUNCATE TABLE IF EXISTS users`
|
|
155
|
-
});
|
|
156
|
-
await client.command({
|
|
157
|
-
query: `TRUNCATE TABLE IF EXISTS orders`
|
|
158
|
-
});
|
|
159
|
-
await client.command({
|
|
160
|
-
query: `TRUNCATE TABLE IF EXISTS products`
|
|
161
|
-
});
|
|
162
|
-
// Insert test data
|
|
163
|
-
// For test_table
|
|
164
|
-
for (const item of TEST_DATA.test_table) {
|
|
165
|
-
await client.command({
|
|
166
|
-
query: `
|
|
167
|
-
INSERT INTO test_table (id, name, price, created_at, category, active)
|
|
168
|
-
VALUES (${item.id}, '${item.name}', ${item.price}, '${item.created_at}', '${item.category}', ${item.active})
|
|
169
|
-
`
|
|
170
|
-
});
|
|
63
|
+
// Helper function to ensure connection is initialized
|
|
64
|
+
export const ensureConnectionInitialized = () => {
|
|
65
|
+
// If connection hasn't been initialized yet, initialize it
|
|
66
|
+
try {
|
|
67
|
+
ClickHouseConnection.getClient();
|
|
171
68
|
}
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
69
|
+
catch (error) {
|
|
70
|
+
// If we get "not initialized" error, initialize the connection
|
|
71
|
+
logger.info('Initializing ClickHouse connection...');
|
|
72
|
+
ClickHouseConnection.initialize({
|
|
73
|
+
host: config.host,
|
|
74
|
+
username: config.user,
|
|
75
|
+
password: config.password,
|
|
76
|
+
database: config.database,
|
|
179
77
|
});
|
|
180
78
|
}
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
});
|
|
79
|
+
return ClickHouseConnection.getClient();
|
|
80
|
+
};
|
|
81
|
+
// Check if Docker is installed
|
|
82
|
+
export const isDockerAvailable = async () => {
|
|
83
|
+
try {
|
|
84
|
+
await execAsync('docker --version');
|
|
85
|
+
return true;
|
|
189
86
|
}
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
await client.command({
|
|
193
|
-
query: `
|
|
194
|
-
INSERT INTO products (id, name, price, category, description)
|
|
195
|
-
VALUES (${product.id}, '${product.name}', ${product.price}, '${product.category}', '${product.description}')
|
|
196
|
-
`
|
|
197
|
-
});
|
|
87
|
+
catch (error) {
|
|
88
|
+
return false;
|
|
198
89
|
}
|
|
199
|
-
}
|
|
200
|
-
//
|
|
201
|
-
export
|
|
90
|
+
};
|
|
91
|
+
// Check if Docker Compose is installed
|
|
92
|
+
export const isDockerComposeAvailable = async () => {
|
|
202
93
|
try {
|
|
203
|
-
|
|
94
|
+
await execAsync('docker compose version');
|
|
204
95
|
return true;
|
|
205
96
|
}
|
|
206
97
|
catch (error) {
|
|
207
|
-
|
|
98
|
+
try {
|
|
99
|
+
// Try the hyphenated version for older installations
|
|
100
|
+
await execAsync('docker-compose --version');
|
|
101
|
+
return true;
|
|
102
|
+
}
|
|
103
|
+
catch {
|
|
104
|
+
return false;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
// Check if a docker container is running
|
|
109
|
+
export const isContainerRunning = async (containerName) => {
|
|
110
|
+
try {
|
|
111
|
+
const { stdout } = await execAsync(`docker ps --filter "name=${containerName}" --format "{{.Names}}"`);
|
|
112
|
+
return stdout.trim() === containerName;
|
|
208
113
|
}
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
export function startClickHouseContainer() {
|
|
212
|
-
if (!isDockerAvailable()) {
|
|
213
|
-
console.warn('Docker is not available. Integration tests will use the configured ClickHouse instance.');
|
|
214
|
-
return;
|
|
114
|
+
catch (error) {
|
|
115
|
+
return false;
|
|
215
116
|
}
|
|
117
|
+
};
|
|
118
|
+
// Check if ClickHouse is ready
|
|
119
|
+
export const isClickHouseReady = async () => {
|
|
216
120
|
try {
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
121
|
+
const client = ClickHouseConnection.getClient();
|
|
122
|
+
await client.ping();
|
|
123
|
+
return true;
|
|
124
|
+
}
|
|
125
|
+
catch (error) {
|
|
126
|
+
return false;
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
// Start the ClickHouse container
|
|
130
|
+
export const startClickHouseContainer = async () => {
|
|
131
|
+
const dockerAvailable = await isDockerAvailable();
|
|
132
|
+
if (!dockerAvailable) {
|
|
133
|
+
throw new Error('Docker is not available. Please install Docker to run integration tests.');
|
|
134
|
+
}
|
|
135
|
+
const composeAvailable = await isDockerComposeAvailable();
|
|
136
|
+
// Use Docker Compose if available
|
|
137
|
+
if (composeAvailable) {
|
|
138
|
+
logger.info('Starting ClickHouse container with Docker Compose...');
|
|
139
|
+
try {
|
|
140
|
+
// Fix the path to the docker-compose.test.yml file
|
|
141
|
+
const composePath = path.resolve(projectRoot, 'packages/clickhouse/docker-compose.test.yml');
|
|
142
|
+
logger.info(`Using Docker Compose file at: ${composePath}`);
|
|
143
|
+
// Make sure we're executing the command from the correct directory
|
|
144
|
+
await execAsync(`docker compose -f "${composePath}" up -d`);
|
|
222
145
|
}
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
// Wait for ClickHouse to be ready
|
|
227
|
-
let attempts = 0;
|
|
228
|
-
const maxAttempts = 30;
|
|
229
|
-
while (attempts < maxAttempts) {
|
|
230
|
-
try {
|
|
231
|
-
execSync('curl -s http://localhost:8123/ping', { stdio: 'ignore' });
|
|
232
|
-
console.log('ClickHouse is ready.');
|
|
233
|
-
break;
|
|
234
|
-
}
|
|
235
|
-
catch (error) {
|
|
236
|
-
attempts++;
|
|
237
|
-
if (attempts >= maxAttempts) {
|
|
238
|
-
throw new Error('ClickHouse failed to start in time.');
|
|
239
|
-
}
|
|
240
|
-
console.log(`Waiting for ClickHouse to be ready... (${attempts}/${maxAttempts})`);
|
|
241
|
-
execSync('sleep 1');
|
|
242
|
-
}
|
|
146
|
+
catch (error) {
|
|
147
|
+
logger.error('Failed to start ClickHouse container with Docker Compose:', error);
|
|
148
|
+
throw error;
|
|
243
149
|
}
|
|
244
|
-
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
// Fallback to Docker run
|
|
153
|
+
logger.info('Starting ClickHouse container with Docker...');
|
|
245
154
|
try {
|
|
246
|
-
|
|
247
|
-
docker
|
|
248
|
-
|
|
249
|
-
|
|
155
|
+
await execAsync(`
|
|
156
|
+
docker run -d --name hypequery-test-clickhouse
|
|
157
|
+
-p 8123:8123 -p 9000:9000
|
|
158
|
+
-e CLICKHOUSE_USER=${config.user}
|
|
159
|
+
-e CLICKHOUSE_PASSWORD=${config.password}
|
|
160
|
+
-e CLICKHOUSE_DB=${config.database}
|
|
161
|
+
--ulimit nofile=262144:262144
|
|
162
|
+
clickhouse/clickhouse-server:latest
|
|
163
|
+
`);
|
|
250
164
|
}
|
|
251
165
|
catch (error) {
|
|
252
|
-
|
|
166
|
+
logger.error('Failed to start ClickHouse container with Docker:', error);
|
|
253
167
|
throw error;
|
|
254
168
|
}
|
|
255
169
|
}
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
170
|
+
};
|
|
171
|
+
// Wait for ClickHouse to be ready
|
|
172
|
+
export const waitForClickHouse = async (maxAttempts = 30, retryInterval = 1000) => {
|
|
173
|
+
logger.info('Waiting for ClickHouse to be ready...');
|
|
174
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
175
|
+
if (await isClickHouseReady()) {
|
|
176
|
+
logger.info('ClickHouse is ready!');
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
logger.info(`Waiting for ClickHouse... Attempt ${attempt}/${maxAttempts}`);
|
|
180
|
+
await new Promise(resolve => setTimeout(resolve, retryInterval));
|
|
181
|
+
}
|
|
182
|
+
throw new Error(`ClickHouse failed to start after ${maxAttempts} attempts`);
|
|
183
|
+
};
|
|
184
|
+
// Stop the ClickHouse container
|
|
185
|
+
export const stopClickHouseContainer = async () => {
|
|
186
|
+
const composeAvailable = await isDockerComposeAvailable();
|
|
187
|
+
if (composeAvailable) {
|
|
188
|
+
logger.info('Stopping ClickHouse container with Docker Compose...');
|
|
189
|
+
try {
|
|
190
|
+
// Fix the path to the docker-compose.test.yml file
|
|
191
|
+
const composePath = path.resolve(projectRoot, 'packages/clickhouse/docker-compose.test.yml');
|
|
192
|
+
logger.info(`Using Docker Compose file at: ${composePath}`);
|
|
193
|
+
// Make sure we're executing the command from the correct directory
|
|
194
|
+
await execAsync(`docker compose -f "${composePath}" down -v`);
|
|
195
|
+
}
|
|
196
|
+
catch (error) {
|
|
197
|
+
logger.error('Failed to stop ClickHouse container with Docker Compose:', error);
|
|
198
|
+
// Log the error but don't throw, so the tests can complete
|
|
199
|
+
// This allows for manual cleanup if needed
|
|
200
|
+
}
|
|
259
201
|
}
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
202
|
+
else {
|
|
203
|
+
logger.info('Stopping ClickHouse container with Docker...');
|
|
204
|
+
try {
|
|
205
|
+
await execAsync('docker stop hypequery-test-clickhouse && docker rm hypequery-test-clickhouse');
|
|
206
|
+
}
|
|
207
|
+
catch (error) {
|
|
208
|
+
logger.error('Failed to stop ClickHouse container with Docker:', error);
|
|
209
|
+
// Log the error but don't throw, so the tests can complete
|
|
210
|
+
}
|
|
265
211
|
}
|
|
212
|
+
};
|
|
213
|
+
// Test data
|
|
214
|
+
export const TEST_DATA = {
|
|
215
|
+
test_table: [
|
|
216
|
+
{ id: 1, name: 'Product A', category: 'A', price: 10.5, created_at: '2023-01-01', is_active: true },
|
|
217
|
+
{ id: 2, name: 'Product B', category: 'B', price: 20.75, created_at: '2023-01-02', is_active: true },
|
|
218
|
+
{ id: 3, name: 'Product C', category: 'A', price: 15.0, created_at: '2023-01-03', is_active: false },
|
|
219
|
+
{ id: 4, name: 'Product D', category: 'C', price: 8.25, created_at: '2023-01-04', is_active: true },
|
|
220
|
+
{ id: 5, name: 'Product E', category: 'B', price: 30.0, created_at: '2023-01-05', is_active: true },
|
|
221
|
+
],
|
|
222
|
+
users: [
|
|
223
|
+
{ id: 1, user_name: 'john_doe', email: 'john@example.com', status: 'active', created_at: '2023-01-01' },
|
|
224
|
+
{ id: 2, user_name: 'jane_smith', email: 'jane@example.com', status: 'active', created_at: '2023-01-02' },
|
|
225
|
+
{ id: 3, user_name: 'bob_jones', email: 'bob@example.com', status: 'inactive', created_at: '2023-01-03' },
|
|
226
|
+
],
|
|
227
|
+
orders: [
|
|
228
|
+
{ id: 1, user_id: 1, product_id: 1, quantity: 2, total: 21.0, status: 'completed', created_at: '2023-01-10' },
|
|
229
|
+
{ id: 2, user_id: 1, product_id: 3, quantity: 1, total: 15.0, status: 'completed', created_at: '2023-01-11' },
|
|
230
|
+
{ id: 3, user_id: 2, product_id: 2, quantity: 3, total: 62.25, status: 'pending', created_at: '2023-01-12' },
|
|
231
|
+
{ id: 4, user_id: 2, product_id: 5, quantity: 1, total: 30.0, status: 'completed', created_at: '2023-01-13' },
|
|
232
|
+
{ id: 5, user_id: 3, product_id: 4, quantity: 2, total: 16.5, status: 'cancelled', created_at: '2023-01-14' },
|
|
233
|
+
],
|
|
234
|
+
};
|
|
235
|
+
// Setup the test database
|
|
236
|
+
export const setupTestDatabase = async () => {
|
|
237
|
+
// Make sure connection is initialized before getting client
|
|
238
|
+
const client = ensureConnectionInitialized();
|
|
266
239
|
try {
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
240
|
+
// Create and use database if it doesn't exist
|
|
241
|
+
await client.exec({ query: `CREATE DATABASE IF NOT EXISTS ${config.database}` });
|
|
242
|
+
await client.exec({ query: `USE ${config.database}` });
|
|
243
|
+
// Drop tables if they exist
|
|
244
|
+
await client.exec({ query: 'DROP TABLE IF EXISTS test_table' });
|
|
245
|
+
await client.exec({ query: 'DROP TABLE IF EXISTS users' });
|
|
246
|
+
await client.exec({ query: 'DROP TABLE IF EXISTS orders' });
|
|
247
|
+
// Create tables
|
|
248
|
+
await client.exec({
|
|
249
|
+
query: `
|
|
250
|
+
CREATE TABLE test_table (
|
|
251
|
+
id UInt32,
|
|
252
|
+
name String,
|
|
253
|
+
category String,
|
|
254
|
+
price Float64,
|
|
255
|
+
created_at Date,
|
|
256
|
+
is_active Boolean
|
|
257
|
+
) ENGINE = MergeTree()
|
|
258
|
+
ORDER BY id
|
|
259
|
+
`
|
|
260
|
+
});
|
|
261
|
+
await client.exec({
|
|
262
|
+
query: `
|
|
263
|
+
CREATE TABLE users (
|
|
264
|
+
id UInt32,
|
|
265
|
+
user_name String,
|
|
266
|
+
email String,
|
|
267
|
+
status String,
|
|
268
|
+
created_at Date
|
|
269
|
+
) ENGINE = MergeTree()
|
|
270
|
+
ORDER BY id
|
|
271
|
+
`
|
|
272
|
+
});
|
|
273
|
+
await client.exec({
|
|
274
|
+
query: `
|
|
275
|
+
CREATE TABLE orders (
|
|
276
|
+
id UInt32,
|
|
277
|
+
user_id UInt32,
|
|
278
|
+
product_id UInt32,
|
|
279
|
+
quantity UInt32,
|
|
280
|
+
total Float64,
|
|
281
|
+
status String,
|
|
282
|
+
created_at Date
|
|
283
|
+
) ENGINE = MergeTree()
|
|
284
|
+
ORDER BY id
|
|
285
|
+
`
|
|
286
|
+
});
|
|
287
|
+
// Insert test data
|
|
288
|
+
for (const row of TEST_DATA.test_table) {
|
|
289
|
+
await client.insert({
|
|
290
|
+
table: 'test_table',
|
|
291
|
+
values: [row],
|
|
292
|
+
format: 'JSONEachRow'
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
for (const row of TEST_DATA.users) {
|
|
296
|
+
await client.insert({
|
|
297
|
+
table: 'users',
|
|
298
|
+
values: [row],
|
|
299
|
+
format: 'JSONEachRow'
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
for (const row of TEST_DATA.orders) {
|
|
303
|
+
await client.insert({
|
|
304
|
+
table: 'orders',
|
|
305
|
+
values: [row],
|
|
306
|
+
format: 'JSONEachRow'
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
logger.info('Test database setup complete');
|
|
270
310
|
}
|
|
271
311
|
catch (error) {
|
|
272
|
-
|
|
312
|
+
logger.error('Failed to set up test database:', error);
|
|
313
|
+
throw error;
|
|
273
314
|
}
|
|
274
|
-
}
|
|
315
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Centralized configuration for integration tests
|
|
3
|
+
* Import this in all integration test files to ensure consistent behavior
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Whether to skip integration tests:
|
|
7
|
+
* - Skip if SKIP_INTEGRATION_TESTS is explicitly set to 'true'
|
|
8
|
+
* - In CI environments, skip unless ENABLE_CI_INTEGRATION_TESTS is set to 'true'
|
|
9
|
+
*/
|
|
10
|
+
export declare const SKIP_INTEGRATION_TESTS: boolean;
|
|
11
|
+
/**
|
|
12
|
+
* Default timeout for test setup (in milliseconds)
|
|
13
|
+
*/
|
|
14
|
+
export declare const SETUP_TIMEOUT = 30000;
|
|
15
|
+
//# sourceMappingURL=test-config.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"test-config.d.ts","sourceRoot":"","sources":["../../../../src/core/tests/integration/test-config.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;GAIG;AACH,eAAO,MAAM,sBAAsB,SAEgD,CAAC;AAEpF;;GAEG;AACH,eAAO,MAAM,aAAa,QAAQ,CAAC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Centralized configuration for integration tests
|
|
3
|
+
* Import this in all integration test files to ensure consistent behavior
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Whether to skip integration tests:
|
|
7
|
+
* - Skip if SKIP_INTEGRATION_TESTS is explicitly set to 'true'
|
|
8
|
+
* - In CI environments, skip unless ENABLE_CI_INTEGRATION_TESTS is set to 'true'
|
|
9
|
+
*/
|
|
10
|
+
export const SKIP_INTEGRATION_TESTS = process.env.SKIP_INTEGRATION_TESTS === 'true' ||
|
|
11
|
+
(process.env.CI === 'true' && process.env.ENABLE_CI_INTEGRATION_TESTS !== 'true');
|
|
12
|
+
/**
|
|
13
|
+
* Default timeout for test setup (in milliseconds)
|
|
14
|
+
*/
|
|
15
|
+
export const SETUP_TIMEOUT = 30000;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { QueryBuilder } from '../query-builder';
|
|
2
|
+
type ColumnType = 'Int32' | 'String' | 'Float64' | 'Date' | 'UInt8';
|
|
3
|
+
export type TestTableSchema = {
|
|
4
|
+
id: 'Int32';
|
|
5
|
+
name: 'String';
|
|
6
|
+
price: 'Float64';
|
|
7
|
+
created_at: 'Date';
|
|
8
|
+
category: 'String';
|
|
9
|
+
active: 'UInt8';
|
|
10
|
+
created_by: 'Int32';
|
|
11
|
+
updated_by: 'Int32';
|
|
12
|
+
};
|
|
13
|
+
export type UsersSchema = {
|
|
14
|
+
id: 'Int32';
|
|
15
|
+
user_name: 'String';
|
|
16
|
+
email: 'String';
|
|
17
|
+
created_at: 'Date';
|
|
18
|
+
};
|
|
19
|
+
export interface TestSchema {
|
|
20
|
+
test_table: TestTableSchema;
|
|
21
|
+
users: UsersSchema;
|
|
22
|
+
[tableName: string]: {
|
|
23
|
+
[columnName: string]: ColumnType;
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
export declare const TEST_SCHEMAS: TestSchema;
|
|
27
|
+
export declare function setupUsersBuilder(): QueryBuilder<TestSchema, TestSchema['users'], false, {}, TestSchema['users']>;
|
|
28
|
+
export declare function setupTestBuilder(): QueryBuilder<TestSchema, TestSchema['test_table'], false, {}, TestSchema['test_table']>;
|
|
29
|
+
export {};
|
|
30
|
+
//# sourceMappingURL=test-utils.d.ts.map
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
|
2
|
+
export interface QueryLog {
|
|
3
|
+
query: string;
|
|
4
|
+
parameters?: any[];
|
|
5
|
+
startTime: number;
|
|
6
|
+
endTime?: number;
|
|
7
|
+
duration?: number;
|
|
8
|
+
status: 'started' | 'completed' | 'error';
|
|
9
|
+
error?: Error;
|
|
10
|
+
rowCount?: number;
|
|
11
|
+
queryId?: string;
|
|
12
|
+
}
|
|
13
|
+
export interface LoggerOptions {
|
|
14
|
+
level?: LogLevel;
|
|
15
|
+
enabled?: boolean;
|
|
16
|
+
onQueryLog?: (log: QueryLog) => void;
|
|
17
|
+
}
|
|
18
|
+
declare class Logger {
|
|
19
|
+
private static instance;
|
|
20
|
+
private level;
|
|
21
|
+
private enabled;
|
|
22
|
+
private onQueryLog?;
|
|
23
|
+
private querySubscribers;
|
|
24
|
+
private constructor();
|
|
25
|
+
static getInstance(): Logger;
|
|
26
|
+
configure(options: LoggerOptions): void;
|
|
27
|
+
subscribeToQuery(queryId: string, callback: (log: QueryLog) => void): () => void;
|
|
28
|
+
private shouldLog;
|
|
29
|
+
debug(message: string, ...args: any[]): void;
|
|
30
|
+
info(message: string, ...args: any[]): void;
|
|
31
|
+
warn(message: string, ...args: any[]): void;
|
|
32
|
+
error(message: string, ...args: any[]): void;
|
|
33
|
+
logQuery(log: QueryLog): void;
|
|
34
|
+
}
|
|
35
|
+
export declare const logger: Logger;
|
|
36
|
+
export {};
|
|
37
|
+
//# sourceMappingURL=logger.d.ts.map
|