@trafficbyintent/kysely-bigquery 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +504 -0
- package/dist/BigQueryAdapter.d.ts +10 -0
- package/dist/BigQueryAdapter.d.ts.map +1 -0
- package/dist/BigQueryAdapter.js +16 -0
- package/dist/BigQueryAdapter.js.map +1 -0
- package/dist/BigQueryCompiler.d.ts +24 -0
- package/dist/BigQueryCompiler.d.ts.map +1 -0
- package/dist/BigQueryCompiler.js +244 -0
- package/dist/BigQueryCompiler.js.map +1 -0
- package/dist/BigQueryConnection.d.ts +41 -0
- package/dist/BigQueryConnection.d.ts.map +1 -0
- package/dist/BigQueryConnection.js +222 -0
- package/dist/BigQueryConnection.js.map +1 -0
- package/dist/BigQueryDialect.d.ts +80 -0
- package/dist/BigQueryDialect.d.ts.map +1 -0
- package/dist/BigQueryDialect.js +77 -0
- package/dist/BigQueryDialect.js.map +1 -0
- package/dist/BigQueryDriver.d.ts +20 -0
- package/dist/BigQueryDriver.d.ts.map +1 -0
- package/dist/BigQueryDriver.js +47 -0
- package/dist/BigQueryDriver.js.map +1 -0
- package/dist/BigQueryIntrospector.d.ts +15 -0
- package/dist/BigQueryIntrospector.d.ts.map +1 -0
- package/dist/BigQueryIntrospector.js +90 -0
- package/dist/BigQueryIntrospector.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +11 -0
- package/dist/index.js.map +1 -0
- package/dist/jsonColumnDetector.d.ts +53 -0
- package/dist/jsonColumnDetector.d.ts.map +1 -0
- package/dist/jsonColumnDetector.js +187 -0
- package/dist/jsonColumnDetector.js.map +1 -0
- package/package.json +60 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Makara Sok
|
|
4
|
+
Copyright (c) 2025 Traffic by Intent
|
|
5
|
+
|
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
in the Software without restriction, including without limitation the rights
|
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,504 @@
|
|
|
1
|
+
# @trafficbyintent/kysely-bigquery
|
|
2
|
+
|
|
3
|
+
[Kysely](https://github.com/koskimas/kysely) adapter for [BigQuery](https://cloud.google.com/bigquery?hl=en).
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @google-cloud/bigquery @trafficbyintent/kysely-bigquery
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Or with yarn:
|
|
12
|
+
```bash
|
|
13
|
+
yarn add @google-cloud/bigquery @trafficbyintent/kysely-bigquery
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
This project was largely adapted from [kysely-planetscale](https://github.com/depot/kysely-planetscale) and forked from [@maktouch/kysely-bigquery](https://github.com/maktouch/kysely-bigquery).
|
|
17
|
+
|
|
18
|
+
## Requirements
|
|
19
|
+
|
|
20
|
+
- Node.js 18+ (tested with 18.x, 20.x, 22.x)
|
|
21
|
+
- BigQuery project with appropriate permissions
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
Pass your BigQuery connection options, a BigQuery instance, a Dataset instance, or a Table instance into the dialect in
|
|
26
|
+
order to configure the Kysely client.
|
|
27
|
+
Follow [these docs](https://www.npmjs.com/package/@google-cloud/bigquery) for instructions on how to do so.
|
|
28
|
+
|
|
29
|
+
```typescript
|
|
30
|
+
import { Kysely } from 'kysely';
|
|
31
|
+
import { BigQueryDialect } from '@trafficbyintent/kysely-bigquery';
|
|
32
|
+
|
|
33
|
+
interface SomeTable {
|
|
34
|
+
key: string;
|
|
35
|
+
value: string;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
interface Database {
|
|
39
|
+
'some_dataset.some_table': SomeTable
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Let BigQueryDialect create the BigQuery instance:
|
|
43
|
+
const options: BigQueryOptions = ...;
|
|
44
|
+
const db = new Kysely<Database>({
|
|
45
|
+
dialect: new BigQueryDialect({
|
|
46
|
+
options,
|
|
47
|
+
// Optional: prepend project ID to all table references
|
|
48
|
+
defaultProject: 'my-gcp-project',
|
|
49
|
+
// Optional: configure JSON columns for automatic serialization
|
|
50
|
+
jsonColumns: {
|
|
51
|
+
'some_dataset.some_table': ['metadata', 'settings']
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
// Or pass in an existing instance
|
|
57
|
+
const bigquery: BigQuery | Dataset | Table = ...;
|
|
58
|
+
const db = new Kysely<Database>({ dialect: new BigQueryDialect({ bigquery }) });
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
The dialect accepts either BigQuery connection options or an existing BigQuery/Dataset/Table instance. Authentication is handled by the `@google-cloud/bigquery` library itself. See their [documentation](https://www.npmjs.com/package/@google-cloud/bigquery) for authentication options.
|
|
62
|
+
|
|
63
|
+
For test environment setup, see [tests/README.md](tests/README.md).
|
|
64
|
+
|
|
65
|
+
### Key Features
|
|
66
|
+
|
|
67
|
+
- **Automatic null parameter handling** - The dialect automatically provides type hints for null parameters
|
|
68
|
+
- **JSON serialization/deserialization** - Registered JSON columns are automatically stringified on write and parsed on read
|
|
69
|
+
- **BigQuery SQL compatibility** - Automatic translation of MySQL-style queries to BigQuery syntax
|
|
70
|
+
- **Constraint support** - Handles BigQuery's unenforced constraints with proper `NOT ENFORCED` syntax
|
|
71
|
+
|
|
72
|
+
## Data Type Mapping
|
|
73
|
+
|
|
74
|
+
BigQuery data types are mapped to TypeScript types as follows:
|
|
75
|
+
|
|
76
|
+
| BigQuery Type | TypeScript Type | Notes |
|
|
77
|
+
| ------------- | ---------------- | --------------------------------- |
|
|
78
|
+
| INT64 | number or string | Large values returned as strings |
|
|
79
|
+
| FLOAT64 | number | |
|
|
80
|
+
| NUMERIC | string | Preserved precision |
|
|
81
|
+
| BIGNUMERIC | string | Preserved precision |
|
|
82
|
+
| STRING | string | |
|
|
83
|
+
| BYTES | Buffer | Use `FROM_BASE64()` for insertion |
|
|
84
|
+
| BOOL | boolean | |
|
|
85
|
+
| DATE | string | Format: 'YYYY-MM-DD' |
|
|
86
|
+
| DATETIME | string | Format: 'YYYY-MM-DD HH:MM:SS' |
|
|
87
|
+
| TIMESTAMP | Date | JavaScript Date object |
|
|
88
|
+
| TIME | string | Format: 'HH:MM:SS' |
|
|
89
|
+
| JSON | any | Use JSON literals for insertion |
|
|
90
|
+
| ARRAY<T> | T[] | |
|
|
91
|
+
| STRUCT<...> | object | Nested object structure |
|
|
92
|
+
|
|
93
|
+
### Special Type Handling Examples
|
|
94
|
+
|
|
95
|
+
```typescript
|
|
96
|
+
// INT64
|
|
97
|
+
await db
|
|
98
|
+
.insertInto("users")
|
|
99
|
+
.values({
|
|
100
|
+
id: 12345,
|
|
101
|
+
big_id: "9223372036854775807",
|
|
102
|
+
})
|
|
103
|
+
.execute();
|
|
104
|
+
|
|
105
|
+
// BYTES
|
|
106
|
+
await sql`
|
|
107
|
+
INSERT INTO files (content)
|
|
108
|
+
VALUES (FROM_BASE64(${Buffer.from("Hello").toString("base64")}))
|
|
109
|
+
`.execute(db);
|
|
110
|
+
|
|
111
|
+
// JSON
|
|
112
|
+
await sql`
|
|
113
|
+
INSERT INTO logs (data)
|
|
114
|
+
VALUES (JSON '{"level": "info", "message": "test"}')
|
|
115
|
+
`.execute(db);
|
|
116
|
+
|
|
117
|
+
// ARRAY
|
|
118
|
+
await db
|
|
119
|
+
.insertInto("products")
|
|
120
|
+
.values({
|
|
121
|
+
tags: ["electronics", "laptop", "computer"],
|
|
122
|
+
})
|
|
123
|
+
.execute();
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
## JSON Data Handling
|
|
127
|
+
|
|
128
|
+
The dialect provides automatic JSON serialization for better developer experience when working with JSON data in BigQuery.
|
|
129
|
+
|
|
130
|
+
### Automatic JSON Serialization
|
|
131
|
+
|
|
132
|
+
When using STRING columns to store JSON data (the most common pattern), the dialect can automatically stringify JavaScript objects:
|
|
133
|
+
|
|
134
|
+
```typescript
|
|
135
|
+
// Configure JSON columns for automatic serialization
|
|
136
|
+
const db = new Kysely<Database>({
|
|
137
|
+
dialect: new BigQueryDialect({
|
|
138
|
+
bigquery: bigquery,
|
|
139
|
+
jsonColumns: {
|
|
140
|
+
'dataset.users': ['metadata', 'settings'],
|
|
141
|
+
'dataset.products': ['specifications']
|
|
142
|
+
}
|
|
143
|
+
})
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
// Objects are automatically stringified for registered columns on write
|
|
147
|
+
await db
|
|
148
|
+
.insertInto('dataset.users')
|
|
149
|
+
.values({
|
|
150
|
+
id: '123',
|
|
151
|
+
name: 'John',
|
|
152
|
+
metadata: { role: 'admin', permissions: ['read', 'write'] }, // Auto-stringified
|
|
153
|
+
settings: { theme: 'dark', notifications: true } // Auto-stringified
|
|
154
|
+
})
|
|
155
|
+
.execute();
|
|
156
|
+
|
|
157
|
+
// Registered columns are automatically parsed back to objects on read
|
|
158
|
+
const user = await db
|
|
159
|
+
.selectFrom('dataset.users')
|
|
160
|
+
.selectAll()
|
|
161
|
+
.where('id', '=', '123')
|
|
162
|
+
.executeTakeFirst();
|
|
163
|
+
|
|
164
|
+
console.log(user.metadata.role); // 'admin' - automatically parsed
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
### Manual JSON Handling
|
|
168
|
+
|
|
169
|
+
Without explicit configuration, you need to manually stringify JSON:
|
|
170
|
+
|
|
171
|
+
```typescript
|
|
172
|
+
await db
|
|
173
|
+
.insertInto('dataset.users')
|
|
174
|
+
.values({
|
|
175
|
+
id: '123',
|
|
176
|
+
metadata: JSON.stringify({ role: 'admin' }) // Manual stringify required
|
|
177
|
+
})
|
|
178
|
+
.execute();
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
### Native JSON Columns
|
|
182
|
+
|
|
183
|
+
For BigQuery's native JSON column type, you need to use `PARSE_JSON()`:
|
|
184
|
+
|
|
185
|
+
```typescript
|
|
186
|
+
// Native JSON columns require PARSE_JSON
|
|
187
|
+
await sql`
|
|
188
|
+
INSERT INTO dataset.orders (id, data)
|
|
189
|
+
VALUES (${orderId}, PARSE_JSON(${JSON.stringify(orderData)}))
|
|
190
|
+
`.execute(db);
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
### Querying JSON Data
|
|
194
|
+
|
|
195
|
+
Use BigQuery's JSON functions to query JSON data:
|
|
196
|
+
|
|
197
|
+
```typescript
|
|
198
|
+
const results = await sql`
|
|
199
|
+
SELECT
|
|
200
|
+
JSON_VALUE(metadata, '$.role') as role,
|
|
201
|
+
JSON_QUERY(settings, '$.features') as features
|
|
202
|
+
FROM dataset.users
|
|
203
|
+
WHERE JSON_VALUE(metadata, '$.role') = 'admin'
|
|
204
|
+
`.execute(db);
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
## Project-Qualified Table Names
|
|
208
|
+
|
|
209
|
+
BigQuery supports three-level table names: `project.dataset.table`. Since Kysely's parser only handles two-level names (`schema.table`), use the `defaultProject` config to automatically prepend your project ID:
|
|
210
|
+
|
|
211
|
+
```typescript
|
|
212
|
+
const db = new Kysely<Database>({
|
|
213
|
+
dialect: new BigQueryDialect({
|
|
214
|
+
bigquery: client,
|
|
215
|
+
defaultProject: 'my-gcp-project',
|
|
216
|
+
})
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
// Write queries with dataset.table — project is prepended automatically
|
|
220
|
+
db.selectFrom('analytics.events').selectAll();
|
|
221
|
+
// Generates: select * from `my-gcp-project`.`analytics`.`events`
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
Without `defaultProject`, two-level names work as expected:
|
|
225
|
+
|
|
226
|
+
```typescript
|
|
227
|
+
db.selectFrom('analytics.events').selectAll();
|
|
228
|
+
// Generates: select * from `analytics`.`events`
|
|
229
|
+
```
|
|
230
|
+
|
|
231
|
+
## BigQuery SQL Compatibility
|
|
232
|
+
|
|
233
|
+
The `BigQueryCompiler` extends Kysely's MySQL query compiler to handle BigQuery-specific SQL syntax differences. It automatically translates common MySQL patterns to their BigQuery equivalents, allowing you to write more portable code.
|
|
234
|
+
|
|
235
|
+
### Automatic SQL Translations
|
|
236
|
+
|
|
237
|
+
#### Set Operations
|
|
238
|
+
|
|
239
|
+
- `UNION` → `UNION DISTINCT` (BigQuery requires explicit DISTINCT)
|
|
240
|
+
|
|
241
|
+
#### Function Translations
|
|
242
|
+
|
|
243
|
+
- `NOW()` → `CURRENT_TIMESTAMP()`
|
|
244
|
+
- `LENGTH()` → `CHAR_LENGTH()` (for character count instead of byte count)
|
|
245
|
+
- `DATE_FORMAT(date, format)` → `FORMAT_TIMESTAMP(format, date)` (parameter order is swapped)
|
|
246
|
+
|
|
247
|
+
#### DML Requirements
|
|
248
|
+
|
|
249
|
+
- **UPDATE without WHERE**: Automatically adds `WHERE TRUE` (BigQuery requires a WHERE clause)
|
|
250
|
+
- **DELETE without WHERE**: Automatically adds `WHERE TRUE` (BigQuery requires a WHERE clause)
|
|
251
|
+
|
|
252
|
+
#### Table Naming
|
|
253
|
+
|
|
254
|
+
- Supports BigQuery's two-level `dataset.table` naming by default
|
|
255
|
+
- Use `defaultProject` config to enable three-level `project.dataset.table` references (project is prepended automatically)
|
|
256
|
+
|
|
257
|
+
### Example Translations
|
|
258
|
+
|
|
259
|
+
```typescript
|
|
260
|
+
await db
|
|
261
|
+
.selectFrom("users")
|
|
262
|
+
.select(sql`NOW()`.as("current_time"))
|
|
263
|
+
.where("name", "like", sql`CONCAT('%', ${search}, '%')`)
|
|
264
|
+
.union(db.selectFrom("archived_users").select(sql`NOW()`.as("current_time")))
|
|
265
|
+
.execute();
|
|
266
|
+
|
|
267
|
+
await db.updateTable("users").set({ status: "active" }).execute();
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
### Raw SQL Support
|
|
271
|
+
|
|
272
|
+
The compiler also translates functions within raw SQL strings:
|
|
273
|
+
|
|
274
|
+
```typescript
|
|
275
|
+
await sql`SELECT DATE_FORMAT(created_at, '%Y-%m-%d') as date FROM users`.execute(
|
|
276
|
+
db
|
|
277
|
+
);
|
|
278
|
+
// Generates: SELECT FORMAT_TIMESTAMP('%Y-%m-%d', created_at) as date FROM users
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
## BigQuery Constraints
|
|
282
|
+
|
|
283
|
+
BigQuery supports constraint syntax (PRIMARY KEY, FOREIGN KEY, UNIQUE) but these constraints are **not enforced** at runtime. They serve as metadata for query optimization and documentation purposes.
|
|
284
|
+
|
|
285
|
+
When using Kysely with the BigQuery dialect, all constraints automatically include the `NOT ENFORCED` qualifier as required by BigQuery.
|
|
286
|
+
|
|
287
|
+
### Constraint Examples
|
|
288
|
+
|
|
289
|
+
#### Primary Key
|
|
290
|
+
|
|
291
|
+
```typescript
|
|
292
|
+
await db.schema
|
|
293
|
+
.createTable("users")
|
|
294
|
+
.addColumn("id", "integer", (col) => col.primaryKey())
|
|
295
|
+
.execute();
|
|
296
|
+
|
|
297
|
+
await db.schema
|
|
298
|
+
.createTable("order_items")
|
|
299
|
+
.addColumn("order_id", "integer")
|
|
300
|
+
.addColumn("product_id", "integer")
|
|
301
|
+
.addPrimaryKeyConstraint("pk_order_items", ["order_id", "product_id"])
|
|
302
|
+
.execute();
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
#### Unique Constraint
|
|
306
|
+
|
|
307
|
+
```typescript
|
|
308
|
+
await db.schema
|
|
309
|
+
.createTable("users")
|
|
310
|
+
.addColumn("email", "varchar", (col) => col.unique())
|
|
311
|
+
.execute();
|
|
312
|
+
|
|
313
|
+
await db.schema
|
|
314
|
+
.createTable("products")
|
|
315
|
+
.addColumn("category", "varchar")
|
|
316
|
+
.addColumn("name", "varchar")
|
|
317
|
+
.addUniqueConstraint("unique_category_name", ["category", "name"])
|
|
318
|
+
.execute();
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
#### Foreign Key
|
|
322
|
+
|
|
323
|
+
```typescript
|
|
324
|
+
await db.schema
|
|
325
|
+
.createTable("orders")
|
|
326
|
+
.addColumn("customer_id", "integer", (col) =>
|
|
327
|
+
col.references("customers.id").onDelete("cascade")
|
|
328
|
+
)
|
|
329
|
+
.execute();
|
|
330
|
+
|
|
331
|
+
await db.schema
|
|
332
|
+
.createTable("orders")
|
|
333
|
+
.addColumn("customer_id", "integer")
|
|
334
|
+
.addForeignKeyConstraint("fk_customer", ["customer_id"], "customers", ["id"])
|
|
335
|
+
.execute();
|
|
336
|
+
```
|
|
337
|
+
|
|
338
|
+
### Important Notes
|
|
339
|
+
|
|
340
|
+
- **No Enforcement**: BigQuery constraints are metadata only and not enforced at runtime
|
|
341
|
+
- **Query Optimization**: Constraints help BigQuery's query optimizer improve performance
|
|
342
|
+
- **Foreign Key Restrictions**: Can only reference tables within the same dataset
|
|
343
|
+
|
|
344
|
+
### Complete Table Example
|
|
345
|
+
|
|
346
|
+
```typescript
|
|
347
|
+
await db.schema
|
|
348
|
+
.createTable("orders")
|
|
349
|
+
.addColumn("id", "integer", (col) => col.primaryKey())
|
|
350
|
+
.addColumn("order_number", "varchar", (col) => col.unique().notNull())
|
|
351
|
+
.addColumn("customer_id", "integer", (col) =>
|
|
352
|
+
col.references("customers.id").notNull()
|
|
353
|
+
)
|
|
354
|
+
.addColumn("total_amount", "decimal", (col) => col.notNull())
|
|
355
|
+
.addColumn("created_at", "timestamp", (col) =>
|
|
356
|
+
col.defaultTo(sql`CURRENT_TIMESTAMP`).notNull()
|
|
357
|
+
)
|
|
358
|
+
.execute();
|
|
359
|
+
```
|
|
360
|
+
|
|
361
|
+
This generates:
|
|
362
|
+
|
|
363
|
+
```sql
|
|
364
|
+
CREATE TABLE `orders` (
|
|
365
|
+
`id` integer PRIMARY KEY NOT ENFORCED,
|
|
366
|
+
`order_number` varchar UNIQUE NOT ENFORCED NOT NULL,
|
|
367
|
+
`customer_id` integer REFERENCES `customers` (`id`) NOT ENFORCED NOT NULL,
|
|
368
|
+
`total_amount` decimal NOT NULL,
|
|
369
|
+
`created_at` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL
|
|
370
|
+
)
|
|
371
|
+
```
|
|
372
|
+
|
|
373
|
+
### Raw SQL Constraints
|
|
374
|
+
|
|
375
|
+
When using raw SQL, you must manually add the `NOT ENFORCED` qualifier:
|
|
376
|
+
|
|
377
|
+
```typescript
|
|
378
|
+
await sql`
|
|
379
|
+
CREATE TABLE users (
|
|
380
|
+
id INT64 NOT NULL,
|
|
381
|
+
email STRING,
|
|
382
|
+
CONSTRAINT pk_users PRIMARY KEY (id) NOT ENFORCED,
|
|
383
|
+
CONSTRAINT unique_email UNIQUE (email) NOT ENFORCED
|
|
384
|
+
)
|
|
385
|
+
`.execute(db);
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
Without `NOT ENFORCED`, BigQuery will reject the constraint definition.
|
|
389
|
+
|
|
390
|
+
## Limitations
|
|
391
|
+
|
|
392
|
+
### Core BigQuery Limitations
|
|
393
|
+
|
|
394
|
+
1. **No Transaction Support** - All operations are auto-committed
|
|
395
|
+
|
|
396
|
+
```typescript
|
|
397
|
+
await db.transaction().execute(async (trx) => {
|
|
398
|
+
// No-op - BigQuery doesn't support transactions.
|
|
399
|
+
// Operations execute but are not wrapped in a transaction.
|
|
400
|
+
});
|
|
401
|
+
```
|
|
402
|
+
|
|
403
|
+
2. **No Indexes** - BigQuery uses automatic optimization instead
|
|
404
|
+
|
|
405
|
+
3. **Case Sensitivity** - Table and column names are case-sensitive
|
|
406
|
+
|
|
407
|
+
4. **Query Size Limits** - Maximum result size is 10GB (use streaming for larger results)
|
|
408
|
+
|
|
409
|
+
### SQL Restrictions
|
|
410
|
+
|
|
411
|
+
- **UPDATE/DELETE** require WHERE clause (library automatically adds `WHERE TRUE` if missing)
|
|
412
|
+
- **INSERT** operations don't support `ON DUPLICATE KEY UPDATE`
|
|
413
|
+
- **Subqueries** have limited support for correlated subqueries in DML
|
|
414
|
+
- **Constraints** are metadata only and not enforced (see [BigQuery Constraints](#bigquery-constraints) section)
|
|
415
|
+
|
|
416
|
+
### Platform-Specific Limitations
|
|
417
|
+
|
|
418
|
+
These are BigQuery platform limitations that cannot be addressed by the dialect:
|
|
419
|
+
|
|
420
|
+
1. **Streaming Buffer Conflicts**
|
|
421
|
+
- BigQuery doesn't allow UPDATE/DELETE operations on recently streamed data
|
|
422
|
+
- Error: `UPDATE or DELETE statement over table would affect rows in the streaming buffer`
|
|
423
|
+
- **Workaround**: Add delays between insert and update/delete operations, or use load jobs instead of streaming
|
|
424
|
+
|
|
425
|
+
```typescript
|
|
426
|
+
// In tests or operations, add delays
|
|
427
|
+
await db.insertInto('users').values(userData).execute();
|
|
428
|
+
await new Promise(resolve => setTimeout(resolve, 2000)); // Wait for streaming buffer
|
|
429
|
+
await db.updateTable('users').set(updates).where('id', '=', userId).execute();
|
|
430
|
+
```
|
|
431
|
+
|
|
432
|
+
2. **Eventual Consistency**
|
|
433
|
+
- Table metadata and schema changes may not be immediately visible
|
|
434
|
+
- INFORMATION_SCHEMA queries might not reflect recent changes
|
|
435
|
+
- **Workaround**: Add delays or implement retry logic for metadata operations
|
|
436
|
+
|
|
437
|
+
3. **Complex Query Limitations**
|
|
438
|
+
- Very complex WHERE conditions or joins may exceed BigQuery's query complexity limits
|
|
439
|
+
- Some advanced SQL features may not be supported
|
|
440
|
+
- **Workaround**: Simplify queries or use raw SQL for complex operations
|
|
441
|
+
|
|
442
|
+
4. **Rate Limits and Quotas**
|
|
443
|
+
- BigQuery has various quotas for queries, DML statements, and API calls
|
|
444
|
+
- Error: `Quota exceeded` or rate limit errors
|
|
445
|
+
- **Workaround**: Implement exponential backoff and respect quota limits
|
|
446
|
+
|
|
447
|
+
5. **Data Type Restrictions**
|
|
448
|
+
- ARRAY types cannot contain NULL values
|
|
449
|
+
- STRUCT fields have naming restrictions
|
|
450
|
+
- JSON columns (native) require specific syntax that differs from standard JSON operations
|
|
451
|
+
|
|
452
|
+
## Testing
|
|
453
|
+
|
|
454
|
+
### Running Tests Locally
|
|
455
|
+
|
|
456
|
+
```bash
|
|
457
|
+
# Run unit tests (no credentials required)
|
|
458
|
+
npm test
|
|
459
|
+
|
|
460
|
+
# Run unit tests with coverage
|
|
461
|
+
npm run test:coverage
|
|
462
|
+
```
|
|
463
|
+
|
|
464
|
+
### Integration Tests (Local Only)
|
|
465
|
+
|
|
466
|
+
Integration tests hit a real BigQuery instance and are **not** run in CI. They require a `.secrets` file with BigQuery credentials (see `.secrets.example`):
|
|
467
|
+
|
|
468
|
+
```bash
|
|
469
|
+
# Export credentials then run integration tests
|
|
470
|
+
export $(grep -v '^#' .secrets | xargs)
|
|
471
|
+
npm run test:integration
|
|
472
|
+
|
|
473
|
+
# Run all tests (unit + integration)
|
|
474
|
+
npm run test:all
|
|
475
|
+
```
|
|
476
|
+
|
|
477
|
+
### Testing GitHub Actions Locally
|
|
478
|
+
|
|
479
|
+
This project includes comprehensive GitHub Actions testing using [act](https://github.com/nektos/act):
|
|
480
|
+
|
|
481
|
+
```bash
|
|
482
|
+
# Install act (macOS)
|
|
483
|
+
brew install act
|
|
484
|
+
|
|
485
|
+
# Test all workflows
|
|
486
|
+
npm run test:github-actions
|
|
487
|
+
|
|
488
|
+
# Test specific workflow
|
|
489
|
+
./.github/test-actions.sh ci
|
|
490
|
+
```
|
|
491
|
+
|
|
492
|
+
#### Dry-Run Mode for Release Workflows
|
|
493
|
+
|
|
494
|
+
When testing release workflows locally, operations that would affect external services automatically run in safe mode:
|
|
495
|
+
|
|
496
|
+
- **NPM Publishing**: Uses `--dry-run` flag to simulate publishing without actually uploading
|
|
497
|
+
- **Git Operations**: Skipped with informative messages showing what would be pushed
|
|
498
|
+
- **GitHub Releases**: Mocked with detailed console output
|
|
499
|
+
|
|
500
|
+
This allows you to fully test release workflows without accidental deployments.
|
|
501
|
+
|
|
502
|
+
#### Apple Silicon Support
|
|
503
|
+
|
|
504
|
+
The test scripts automatically detect Apple Silicon (M1/M2) Macs and configure the appropriate container architecture.
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { MysqlAdapter } from 'kysely';
|
|
2
|
+
/**
|
|
3
|
+
* BigQuery adapter that extends MysqlAdapter.
|
|
4
|
+
*
|
|
5
|
+
* Disables the RETURNING clause, which BigQuery does not support.
|
|
6
|
+
*/
|
|
7
|
+
export declare class BigQueryAdapter extends MysqlAdapter {
|
|
8
|
+
get supportsReturning(): boolean;
|
|
9
|
+
}
|
|
10
|
+
//# sourceMappingURL=BigQueryAdapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BigQueryAdapter.d.ts","sourceRoot":"","sources":["../src/BigQueryAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAEtC;;;;GAIG;AACH,qBAAa,eAAgB,SAAQ,YAAY;IAC/C,IAAW,iBAAiB,IAAI,OAAO,CAEtC;CACF"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BigQueryAdapter = void 0;
|
|
4
|
+
const kysely_1 = require("kysely");
|
|
5
|
+
/**
|
|
6
|
+
* BigQuery adapter that extends MysqlAdapter.
|
|
7
|
+
*
|
|
8
|
+
* Disables the RETURNING clause, which BigQuery does not support.
|
|
9
|
+
*/
|
|
10
|
+
class BigQueryAdapter extends kysely_1.MysqlAdapter {
|
|
11
|
+
get supportsReturning() {
|
|
12
|
+
return false;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
exports.BigQueryAdapter = BigQueryAdapter;
|
|
16
|
+
//# sourceMappingURL=BigQueryAdapter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BigQueryAdapter.js","sourceRoot":"","sources":["../src/BigQueryAdapter.ts"],"names":[],"mappings":";;;AAAA,mCAAsC;AAEtC;;;;GAIG;AACH,MAAa,eAAgB,SAAQ,qBAAY;IAC/C,IAAW,iBAAiB;QAC1B,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AAJD,0CAIC"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { type ColumnDefinitionNode, type DeleteQueryNode, type ForeignKeyConstraintNode, type FunctionNode, MysqlQueryCompiler, type OperationNode, type PrimaryKeyConstraintNode, type RawNode, type SetOperationNode, type TableNode, type UniqueConstraintNode, type UpdateQueryNode } from 'kysely';
|
|
2
|
+
/**
|
|
3
|
+
* Query compiler for BigQuery dialect.
|
|
4
|
+
*
|
|
5
|
+
* Extends MysqlQueryCompiler and overrides methods to generate
|
|
6
|
+
* BigQuery-compatible SQL.
|
|
7
|
+
*/
|
|
8
|
+
export declare class BigQueryCompiler extends MysqlQueryCompiler {
|
|
9
|
+
#private;
|
|
10
|
+
constructor(defaultProject?: string);
|
|
11
|
+
protected visitSetOperation(node: SetOperationNode): void;
|
|
12
|
+
protected visitFunction(node: FunctionNode): void;
|
|
13
|
+
protected visitUpdateQuery(node: UpdateQueryNode): void;
|
|
14
|
+
protected visitDeleteQuery(node: DeleteQueryNode): void;
|
|
15
|
+
protected visitTable(node: TableNode): void;
|
|
16
|
+
protected visitRaw(node: RawNode): void;
|
|
17
|
+
private appendFragmentsWithParams;
|
|
18
|
+
protected visitFunctionArgumentList(args: ReadonlyArray<OperationNode>): void;
|
|
19
|
+
protected visitColumnDefinition(node: ColumnDefinitionNode): void;
|
|
20
|
+
protected visitPrimaryKeyConstraint(node: PrimaryKeyConstraintNode): void;
|
|
21
|
+
protected visitUniqueConstraint(node: UniqueConstraintNode): void;
|
|
22
|
+
protected visitForeignKeyConstraint(node: ForeignKeyConstraintNode): void;
|
|
23
|
+
}
|
|
24
|
+
//# sourceMappingURL=BigQueryCompiler.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BigQueryCompiler.d.ts","sourceRoot":"","sources":["../src/BigQueryCompiler.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,oBAAoB,EACzB,KAAK,eAAe,EACpB,KAAK,wBAAwB,EAC7B,KAAK,YAAY,EAEjB,kBAAkB,EAClB,KAAK,aAAa,EAClB,KAAK,wBAAwB,EAC7B,KAAK,OAAO,EACZ,KAAK,gBAAgB,EACrB,KAAK,SAAS,EACd,KAAK,oBAAoB,EACzB,KAAK,eAAe,EACrB,MAAM,QAAQ,CAAC;AAEhB;;;;;GAKG;AACH,qBAAa,gBAAiB,SAAQ,kBAAkB;;gBAG1C,cAAc,CAAC,EAAE,MAAM;cAKhB,iBAAiB,CAAC,IAAI,EAAE,gBAAgB,GAAG,IAAI;cAS/C,aAAa,CAAC,IAAI,EAAE,YAAY,GAAG,IAAI;cAgDvC,gBAAgB,CAAC,IAAI,EAAE,eAAe,GAAG,IAAI;cAa7C,gBAAgB,CAAC,IAAI,EAAE,eAAe,GAAG,IAAI;cAa7C,UAAU,CAAC,IAAI,EAAE,SAAS,GAAG,IAAI;cAoBjC,QAAQ,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI;IAwDhD,OAAO,CAAC,yBAAyB;IAkBjC,SAAS,CAAC,yBAAyB,CAAC,IAAI,EAAE,aAAa,CAAC,aAAa,CAAC,GAAG,IAAI;cAgB1D,qBAAqB,CAAC,IAAI,EAAE,oBAAoB,GAAG,IAAI;cAUvD,yBAAyB,CAAC,IAAI,EAAE,wBAAwB,GAAG,IAAI;cAgB/D,qBAAqB,CAAC,IAAI,EAAE,oBAAoB,GAAG,IAAI;cAgBvD,yBAAyB,CAAC,IAAI,EAAE,wBAAwB,GAAG,IAAI;CAqBnF"}
|