@synode/adapter-bigquery 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +35 -0
- package/README.md +89 -0
- package/dist/index.cjs +134 -0
- package/dist/index.d.cts +122 -0
- package/dist/index.d.mts +122 -0
- package/dist/index.mjs +133 -0
- package/package.json +48 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
Synode Proprietary License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Digitl Cloud GmbH. All rights reserved.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person or organization
|
|
6
|
+
obtaining a copy of this software and associated documentation files (the
|
|
7
|
+
"Software"), to use the Software for personal, internal, and commercial
|
|
8
|
+
purposes, subject to the following conditions:
|
|
9
|
+
|
|
10
|
+
1. PERMITTED USE. You may use, copy, and modify the Software for your own
|
|
11
|
+
personal, internal, or commercial purposes.
|
|
12
|
+
|
|
13
|
+
2. NO REDISTRIBUTION. You may not distribute, publish, sublicense, or
|
|
14
|
+
otherwise make the Software or any derivative works available to third
|
|
15
|
+
parties, whether in source code or compiled form, free of charge or for
|
|
16
|
+
a fee.
|
|
17
|
+
|
|
18
|
+
3. NO RESALE. You may not sell, rent, lease, or otherwise commercially
|
|
19
|
+
exploit the Software itself as a standalone product or as part of a
|
|
20
|
+
software distribution.
|
|
21
|
+
|
|
22
|
+
4. NO HOSTING AS A SERVICE. You may not offer the Software to third parties
|
|
23
|
+
as a hosted, managed, or software-as-a-service product where the primary
|
|
24
|
+
value derives from the Software.
|
|
25
|
+
|
|
26
|
+
5. ATTRIBUTION. You must retain this license notice and copyright notice in
|
|
27
|
+
all copies or substantial portions of the Software.
|
|
28
|
+
|
|
29
|
+
6. NO WARRANTY. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
|
30
|
+
KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
31
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
32
|
+
IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES
|
|
33
|
+
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
|
34
|
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
35
|
+
OTHER DEALINGS IN THE SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# @synode/adapter-bigquery
|
|
2
|
+
|
|
3
|
+
BigQuery adapter for Synode. Import datasets from BigQuery tables and export generated events with automatic batching.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @synode/adapter-bigquery @google-cloud/bigquery
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Requires `@synode/core` and `@google-cloud/bigquery` as peer dependencies.
|
|
12
|
+
|
|
13
|
+
## Export (Writing Events)
|
|
14
|
+
|
|
15
|
+
```ts
|
|
16
|
+
import { generate } from '@synode/core';
|
|
17
|
+
import { BigQueryAdapter } from '@synode/adapter-bigquery';
|
|
18
|
+
|
|
19
|
+
const adapter = new BigQueryAdapter({
|
|
20
|
+
projectId: 'my-project',
|
|
21
|
+
datasetId: 'analytics',
|
|
22
|
+
tableId: 'events',
|
|
23
|
+
batchSize: 200,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
await generate(journey, { users: 100, adapter });
|
|
27
|
+
await adapter.close();
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
### Export Options
|
|
31
|
+
|
|
32
|
+
| Option | Type | Default | Description |
|
|
33
|
+
|--------------------|-----------------------------------------------------------|---------|------------------------------------------------|
|
|
34
|
+
| `projectId` | `string` | -- | GCP project ID |
|
|
35
|
+
| `datasetId` | `string` | -- | BigQuery dataset ID |
|
|
36
|
+
| `tableId` | `string` | -- | BigQuery table ID |
|
|
37
|
+
| `batchSize` | `number` | `100` | Events to buffer before inserting |
|
|
38
|
+
| `flushInterval` | `number` | `5000` | Interval (ms) to flush partial batches |
|
|
39
|
+
| `autoCreateTable` | `boolean` | `false` | Create the table if it doesn't exist |
|
|
40
|
+
| `transform` | `(event: Record<string, unknown>) => Record<string, unknown>` | -- | Transform each event row before insert |
|
|
41
|
+
|
|
42
|
+
Events are serialized as flat rows with `id`, `user_id`, `session_id`, `name`, `timestamp` (ISO 8601), and `payload` (JSON string). Use `transform` to reshape rows for your schema.
|
|
43
|
+
|
|
44
|
+
## Import (Loading Datasets)
|
|
45
|
+
|
|
46
|
+
```ts
|
|
47
|
+
import { generate } from '@synode/core';
|
|
48
|
+
import { BigQueryAdapter, importFromBigQuery } from '@synode/adapter-bigquery';
|
|
49
|
+
|
|
50
|
+
const products = await importFromBigQuery({
|
|
51
|
+
projectId: 'my-project',
|
|
52
|
+
datasetId: 'ecommerce',
|
|
53
|
+
tableId: 'products',
|
|
54
|
+
id: 'products',
|
|
55
|
+
name: 'Products',
|
|
56
|
+
where: 'active = true',
|
|
57
|
+
limit: 1000,
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
await generate(journey, {
|
|
61
|
+
users: 100,
|
|
62
|
+
preloadedDatasets: [products],
|
|
63
|
+
adapter: new BigQueryAdapter({ ... }),
|
|
64
|
+
});
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
### Import Options
|
|
68
|
+
|
|
69
|
+
| Option | Type | Default | Description |
|
|
70
|
+
|-------------|----------|-------------|------------------------------------------------|
|
|
71
|
+
| `projectId` | `string` | -- | GCP project ID |
|
|
72
|
+
| `datasetId` | `string` | -- | BigQuery dataset ID |
|
|
73
|
+
| `tableId` | `string` | -- | BigQuery table ID to read from |
|
|
74
|
+
| `where` | `string` | -- | SQL WHERE clause to filter rows |
|
|
75
|
+
| `limit` | `number` | unlimited | Maximum number of rows to import |
|
|
76
|
+
| `id` | `string` | -- | Synode dataset ID for the imported dataset |
|
|
77
|
+
| `name` | `string` | -- | Synode dataset name |
|
|
78
|
+
|
|
79
|
+
## Documentation
|
|
80
|
+
|
|
81
|
+
- [Output Adapters Guide](https://synode-docs-1013393206713.europe-west1.run.app/guides/output-adapters)
|
|
82
|
+
- [API Reference](https://synode-docs-1013393206713.europe-west1.run.app/api/types)
|
|
83
|
+
- [GitHub](https://github.com/digitl-cloud/synode/tree/main/packages/adapter-bigquery)
|
|
84
|
+
|
|
85
|
+
## License
|
|
86
|
+
|
|
87
|
+
Proprietary -- see [LICENSE](https://github.com/digitl-cloud/synode/blob/main/LICENSE) for details.
|
|
88
|
+
|
|
89
|
+
Copyright © 2026 [Digitl Cloud GmbH](https://digitlcloud.com)
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
let __google_cloud_bigquery = require("@google-cloud/bigquery");
|
|
2
|
+
|
|
3
|
+
//#region src/adapter.ts
|
|
4
|
+
/**
|
|
5
|
+
* BigQuery output adapter for Synode.
|
|
6
|
+
*
|
|
7
|
+
* Buffers generated events and inserts them into a BigQuery table in batches.
|
|
8
|
+
* Events are serialized to flat rows with ISO timestamps and JSON-stringified payloads.
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```ts
|
|
12
|
+
* const adapter = new BigQueryAdapter({
|
|
13
|
+
* projectId: 'my-project',
|
|
14
|
+
* datasetId: 'analytics',
|
|
15
|
+
* tableId: 'events',
|
|
16
|
+
* batchSize: 200,
|
|
17
|
+
* });
|
|
18
|
+
* await generate(journey, { users: 100, adapter });
|
|
19
|
+
* await adapter.close();
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
var BigQueryAdapter = class {
|
|
23
|
+
client;
|
|
24
|
+
options;
|
|
25
|
+
buffer = [];
|
|
26
|
+
flushTimer = null;
|
|
27
|
+
constructor(options) {
|
|
28
|
+
this.client = new __google_cloud_bigquery.BigQuery({ projectId: options.projectId });
|
|
29
|
+
this.options = {
|
|
30
|
+
batchSize: 100,
|
|
31
|
+
flushInterval: 5e3,
|
|
32
|
+
...options
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Buffers a single event for batch insertion.
|
|
37
|
+
* Triggers a flush when the buffer reaches {@link BigQueryAdapterOptions.batchSize}.
|
|
38
|
+
*
|
|
39
|
+
* @param event - The generated event to write
|
|
40
|
+
*/
|
|
41
|
+
async write(event) {
|
|
42
|
+
const row = this.serializeEvent(event);
|
|
43
|
+
const transformed = this.options.transform ? this.options.transform(row) : row;
|
|
44
|
+
this.buffer.push(transformed);
|
|
45
|
+
if (this.buffer.length >= this.options.batchSize) await this.flush();
|
|
46
|
+
else this.flushTimer ??= setTimeout(() => {
|
|
47
|
+
this.flush();
|
|
48
|
+
}, this.options.flushInterval);
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Flushes any remaining buffered events and cleans up timers.
|
|
52
|
+
*/
|
|
53
|
+
async close() {
|
|
54
|
+
if (this.flushTimer) {
|
|
55
|
+
clearTimeout(this.flushTimer);
|
|
56
|
+
this.flushTimer = null;
|
|
57
|
+
}
|
|
58
|
+
if (this.buffer.length > 0) await this.flush();
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Inserts all buffered rows into BigQuery and clears the buffer.
|
|
62
|
+
*/
|
|
63
|
+
async flush() {
|
|
64
|
+
if (this.buffer.length === 0) return;
|
|
65
|
+
if (this.flushTimer) {
|
|
66
|
+
clearTimeout(this.flushTimer);
|
|
67
|
+
this.flushTimer = null;
|
|
68
|
+
}
|
|
69
|
+
const rows = this.buffer.splice(0);
|
|
70
|
+
await this.client.dataset(this.options.datasetId).table(this.options.tableId).insert(rows);
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Converts a synode Event into a flat BigQuery row.
|
|
74
|
+
*
|
|
75
|
+
* @param event - The event to serialize
|
|
76
|
+
* @returns A flat record with id, user_id, session_id, name, timestamp (ISO), and payload (JSON)
|
|
77
|
+
*/
|
|
78
|
+
serializeEvent(event) {
|
|
79
|
+
return {
|
|
80
|
+
id: event.id,
|
|
81
|
+
user_id: event.userId,
|
|
82
|
+
session_id: event.sessionId,
|
|
83
|
+
name: event.name,
|
|
84
|
+
timestamp: event.timestamp.toISOString(),
|
|
85
|
+
payload: JSON.stringify(event.payload)
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
//#endregion
|
|
91
|
+
//#region src/import.ts
|
|
92
|
+
/**
|
|
93
|
+
* Import a BigQuery table as a synode Dataset.
|
|
94
|
+
*
|
|
95
|
+
* Queries the specified table and returns a Dataset object that can be
|
|
96
|
+
* passed to `generate()` via the `preloadedDatasets` option.
|
|
97
|
+
*
|
|
98
|
+
* @param options - Import configuration including project, table, and synode dataset metadata
|
|
99
|
+
* @returns A Dataset containing all matching rows from the BigQuery table
|
|
100
|
+
*
|
|
101
|
+
* @example
|
|
102
|
+
* ```ts
|
|
103
|
+
* const products = await importFromBigQuery({
|
|
104
|
+
* projectId: 'my-project',
|
|
105
|
+
* datasetId: 'ecommerce',
|
|
106
|
+
* tableId: 'products',
|
|
107
|
+
* id: 'products',
|
|
108
|
+
* name: 'Products',
|
|
109
|
+
* limit: 1000,
|
|
110
|
+
* });
|
|
111
|
+
*
|
|
112
|
+
* await generate(journey, {
|
|
113
|
+
* users: 100,
|
|
114
|
+
* preloadedDatasets: [products],
|
|
115
|
+
* adapter,
|
|
116
|
+
* });
|
|
117
|
+
* ```
|
|
118
|
+
*/
|
|
119
|
+
async function importFromBigQuery(options) {
|
|
120
|
+
const client = new __google_cloud_bigquery.BigQuery({ projectId: options.projectId });
|
|
121
|
+
let query = `SELECT * FROM \`${options.projectId}.${options.datasetId}.${options.tableId}\``;
|
|
122
|
+
if (options.where) query += ` WHERE ${options.where}`;
|
|
123
|
+
if (options.limit) query += ` LIMIT ${String(options.limit)}`;
|
|
124
|
+
const [rows] = await client.query({ query });
|
|
125
|
+
return {
|
|
126
|
+
id: options.id,
|
|
127
|
+
name: options.name,
|
|
128
|
+
rows
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
//#endregion
|
|
133
|
+
exports.BigQueryAdapter = BigQueryAdapter;
|
|
134
|
+
exports.importFromBigQuery = importFromBigQuery;
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { Dataset, Event, OutputAdapter } from "@synode/core";
|
|
2
|
+
|
|
3
|
+
//#region src/types.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Configuration for BigQuery event export.
|
|
6
|
+
*/
|
|
7
|
+
interface BigQueryAdapterOptions {
|
|
8
|
+
/** GCP project ID. */
|
|
9
|
+
projectId: string;
|
|
10
|
+
/** BigQuery dataset ID (not synode dataset -- BQ dataset). */
|
|
11
|
+
datasetId: string;
|
|
12
|
+
/** BigQuery table ID to write events to. */
|
|
13
|
+
tableId: string;
|
|
14
|
+
/** Number of events to batch before inserting. @default 100 */
|
|
15
|
+
batchSize?: number;
|
|
16
|
+
/** Maximum time (ms) to wait before flushing a partial batch. @default 5000 */
|
|
17
|
+
flushInterval?: number;
|
|
18
|
+
/** If true, create the table if it doesn't exist. @default false */
|
|
19
|
+
autoCreateTable?: boolean;
|
|
20
|
+
/** Optional transform function applied to each event before insert. */
|
|
21
|
+
transform?: (event: Record<string, unknown>) => Record<string, unknown>;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Configuration for BigQuery dataset import.
|
|
25
|
+
*/
|
|
26
|
+
interface BigQueryImportOptions {
|
|
27
|
+
/** GCP project ID. */
|
|
28
|
+
projectId: string;
|
|
29
|
+
/** BigQuery dataset ID. */
|
|
30
|
+
datasetId: string;
|
|
31
|
+
/** BigQuery table ID to read from. */
|
|
32
|
+
tableId: string;
|
|
33
|
+
/** Optional SQL WHERE clause to filter rows. */
|
|
34
|
+
where?: string;
|
|
35
|
+
/** Maximum number of rows to import. @default unlimited */
|
|
36
|
+
limit?: number;
|
|
37
|
+
/** Synode dataset ID to assign to the imported dataset. */
|
|
38
|
+
id: string;
|
|
39
|
+
/** Synode dataset name. */
|
|
40
|
+
name: string;
|
|
41
|
+
}
|
|
42
|
+
//#endregion
|
|
43
|
+
//#region src/adapter.d.ts
|
|
44
|
+
/**
|
|
45
|
+
* BigQuery output adapter for Synode.
|
|
46
|
+
*
|
|
47
|
+
* Buffers generated events and inserts them into a BigQuery table in batches.
|
|
48
|
+
* Events are serialized to flat rows with ISO timestamps and JSON-stringified payloads.
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* ```ts
|
|
52
|
+
* const adapter = new BigQueryAdapter({
|
|
53
|
+
* projectId: 'my-project',
|
|
54
|
+
* datasetId: 'analytics',
|
|
55
|
+
* tableId: 'events',
|
|
56
|
+
* batchSize: 200,
|
|
57
|
+
* });
|
|
58
|
+
* await generate(journey, { users: 100, adapter });
|
|
59
|
+
* await adapter.close();
|
|
60
|
+
* ```
|
|
61
|
+
*/
|
|
62
|
+
declare class BigQueryAdapter implements OutputAdapter {
|
|
63
|
+
private client;
|
|
64
|
+
private options;
|
|
65
|
+
private buffer;
|
|
66
|
+
private flushTimer;
|
|
67
|
+
constructor(options: BigQueryAdapterOptions);
|
|
68
|
+
/**
|
|
69
|
+
* Buffers a single event for batch insertion.
|
|
70
|
+
* Triggers a flush when the buffer reaches {@link BigQueryAdapterOptions.batchSize}.
|
|
71
|
+
*
|
|
72
|
+
* @param event - The generated event to write
|
|
73
|
+
*/
|
|
74
|
+
write(event: Event): Promise<void>;
|
|
75
|
+
/**
|
|
76
|
+
* Flushes any remaining buffered events and cleans up timers.
|
|
77
|
+
*/
|
|
78
|
+
close(): Promise<void>;
|
|
79
|
+
/**
|
|
80
|
+
* Inserts all buffered rows into BigQuery and clears the buffer.
|
|
81
|
+
*/
|
|
82
|
+
private flush;
|
|
83
|
+
/**
|
|
84
|
+
* Converts a synode Event into a flat BigQuery row.
|
|
85
|
+
*
|
|
86
|
+
* @param event - The event to serialize
|
|
87
|
+
* @returns A flat record with id, user_id, session_id, name, timestamp (ISO), and payload (JSON)
|
|
88
|
+
*/
|
|
89
|
+
private serializeEvent;
|
|
90
|
+
}
|
|
91
|
+
//#endregion
|
|
92
|
+
//#region src/import.d.ts
|
|
93
|
+
/**
|
|
94
|
+
* Import a BigQuery table as a synode Dataset.
|
|
95
|
+
*
|
|
96
|
+
* Queries the specified table and returns a Dataset object that can be
|
|
97
|
+
* passed to `generate()` via the `preloadedDatasets` option.
|
|
98
|
+
*
|
|
99
|
+
* @param options - Import configuration including project, table, and synode dataset metadata
|
|
100
|
+
* @returns A Dataset containing all matching rows from the BigQuery table
|
|
101
|
+
*
|
|
102
|
+
* @example
|
|
103
|
+
* ```ts
|
|
104
|
+
* const products = await importFromBigQuery({
|
|
105
|
+
* projectId: 'my-project',
|
|
106
|
+
* datasetId: 'ecommerce',
|
|
107
|
+
* tableId: 'products',
|
|
108
|
+
* id: 'products',
|
|
109
|
+
* name: 'Products',
|
|
110
|
+
* limit: 1000,
|
|
111
|
+
* });
|
|
112
|
+
*
|
|
113
|
+
* await generate(journey, {
|
|
114
|
+
* users: 100,
|
|
115
|
+
* preloadedDatasets: [products],
|
|
116
|
+
* adapter,
|
|
117
|
+
* });
|
|
118
|
+
* ```
|
|
119
|
+
*/
|
|
120
|
+
declare function importFromBigQuery(options: BigQueryImportOptions): Promise<Dataset>;
|
|
121
|
+
//#endregion
|
|
122
|
+
export { BigQueryAdapter, type BigQueryAdapterOptions, type BigQueryImportOptions, importFromBigQuery };
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { Dataset, Event, OutputAdapter } from "@synode/core";
|
|
2
|
+
|
|
3
|
+
//#region src/types.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Configuration for BigQuery event export.
|
|
6
|
+
*/
|
|
7
|
+
interface BigQueryAdapterOptions {
|
|
8
|
+
/** GCP project ID. */
|
|
9
|
+
projectId: string;
|
|
10
|
+
/** BigQuery dataset ID (not synode dataset -- BQ dataset). */
|
|
11
|
+
datasetId: string;
|
|
12
|
+
/** BigQuery table ID to write events to. */
|
|
13
|
+
tableId: string;
|
|
14
|
+
/** Number of events to batch before inserting. @default 100 */
|
|
15
|
+
batchSize?: number;
|
|
16
|
+
/** Maximum time (ms) to wait before flushing a partial batch. @default 5000 */
|
|
17
|
+
flushInterval?: number;
|
|
18
|
+
/** If true, create the table if it doesn't exist. @default false */
|
|
19
|
+
autoCreateTable?: boolean;
|
|
20
|
+
/** Optional transform function applied to each event before insert. */
|
|
21
|
+
transform?: (event: Record<string, unknown>) => Record<string, unknown>;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Configuration for BigQuery dataset import.
|
|
25
|
+
*/
|
|
26
|
+
interface BigQueryImportOptions {
|
|
27
|
+
/** GCP project ID. */
|
|
28
|
+
projectId: string;
|
|
29
|
+
/** BigQuery dataset ID. */
|
|
30
|
+
datasetId: string;
|
|
31
|
+
/** BigQuery table ID to read from. */
|
|
32
|
+
tableId: string;
|
|
33
|
+
/** Optional SQL WHERE clause to filter rows. */
|
|
34
|
+
where?: string;
|
|
35
|
+
/** Maximum number of rows to import. @default unlimited */
|
|
36
|
+
limit?: number;
|
|
37
|
+
/** Synode dataset ID to assign to the imported dataset. */
|
|
38
|
+
id: string;
|
|
39
|
+
/** Synode dataset name. */
|
|
40
|
+
name: string;
|
|
41
|
+
}
|
|
42
|
+
//#endregion
|
|
43
|
+
//#region src/adapter.d.ts
|
|
44
|
+
/**
|
|
45
|
+
* BigQuery output adapter for Synode.
|
|
46
|
+
*
|
|
47
|
+
* Buffers generated events and inserts them into a BigQuery table in batches.
|
|
48
|
+
* Events are serialized to flat rows with ISO timestamps and JSON-stringified payloads.
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* ```ts
|
|
52
|
+
* const adapter = new BigQueryAdapter({
|
|
53
|
+
* projectId: 'my-project',
|
|
54
|
+
* datasetId: 'analytics',
|
|
55
|
+
* tableId: 'events',
|
|
56
|
+
* batchSize: 200,
|
|
57
|
+
* });
|
|
58
|
+
* await generate(journey, { users: 100, adapter });
|
|
59
|
+
* await adapter.close();
|
|
60
|
+
* ```
|
|
61
|
+
*/
|
|
62
|
+
declare class BigQueryAdapter implements OutputAdapter {
|
|
63
|
+
private client;
|
|
64
|
+
private options;
|
|
65
|
+
private buffer;
|
|
66
|
+
private flushTimer;
|
|
67
|
+
constructor(options: BigQueryAdapterOptions);
|
|
68
|
+
/**
|
|
69
|
+
* Buffers a single event for batch insertion.
|
|
70
|
+
* Triggers a flush when the buffer reaches {@link BigQueryAdapterOptions.batchSize}.
|
|
71
|
+
*
|
|
72
|
+
* @param event - The generated event to write
|
|
73
|
+
*/
|
|
74
|
+
write(event: Event): Promise<void>;
|
|
75
|
+
/**
|
|
76
|
+
* Flushes any remaining buffered events and cleans up timers.
|
|
77
|
+
*/
|
|
78
|
+
close(): Promise<void>;
|
|
79
|
+
/**
|
|
80
|
+
* Inserts all buffered rows into BigQuery and clears the buffer.
|
|
81
|
+
*/
|
|
82
|
+
private flush;
|
|
83
|
+
/**
|
|
84
|
+
* Converts a synode Event into a flat BigQuery row.
|
|
85
|
+
*
|
|
86
|
+
* @param event - The event to serialize
|
|
87
|
+
* @returns A flat record with id, user_id, session_id, name, timestamp (ISO), and payload (JSON)
|
|
88
|
+
*/
|
|
89
|
+
private serializeEvent;
|
|
90
|
+
}
|
|
91
|
+
//#endregion
|
|
92
|
+
//#region src/import.d.ts
|
|
93
|
+
/**
|
|
94
|
+
* Import a BigQuery table as a synode Dataset.
|
|
95
|
+
*
|
|
96
|
+
* Queries the specified table and returns a Dataset object that can be
|
|
97
|
+
* passed to `generate()` via the `preloadedDatasets` option.
|
|
98
|
+
*
|
|
99
|
+
* @param options - Import configuration including project, table, and synode dataset metadata
|
|
100
|
+
* @returns A Dataset containing all matching rows from the BigQuery table
|
|
101
|
+
*
|
|
102
|
+
* @example
|
|
103
|
+
* ```ts
|
|
104
|
+
* const products = await importFromBigQuery({
|
|
105
|
+
* projectId: 'my-project',
|
|
106
|
+
* datasetId: 'ecommerce',
|
|
107
|
+
* tableId: 'products',
|
|
108
|
+
* id: 'products',
|
|
109
|
+
* name: 'Products',
|
|
110
|
+
* limit: 1000,
|
|
111
|
+
* });
|
|
112
|
+
*
|
|
113
|
+
* await generate(journey, {
|
|
114
|
+
* users: 100,
|
|
115
|
+
* preloadedDatasets: [products],
|
|
116
|
+
* adapter,
|
|
117
|
+
* });
|
|
118
|
+
* ```
|
|
119
|
+
*/
|
|
120
|
+
declare function importFromBigQuery(options: BigQueryImportOptions): Promise<Dataset>;
|
|
121
|
+
//#endregion
|
|
122
|
+
export { BigQueryAdapter, type BigQueryAdapterOptions, type BigQueryImportOptions, importFromBigQuery };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { BigQuery } from "@google-cloud/bigquery";
|
|
2
|
+
|
|
3
|
+
//#region src/adapter.ts
|
|
4
|
+
/**
|
|
5
|
+
* BigQuery output adapter for Synode.
|
|
6
|
+
*
|
|
7
|
+
* Buffers generated events and inserts them into a BigQuery table in batches.
|
|
8
|
+
* Events are serialized to flat rows with ISO timestamps and JSON-stringified payloads.
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```ts
|
|
12
|
+
* const adapter = new BigQueryAdapter({
|
|
13
|
+
* projectId: 'my-project',
|
|
14
|
+
* datasetId: 'analytics',
|
|
15
|
+
* tableId: 'events',
|
|
16
|
+
* batchSize: 200,
|
|
17
|
+
* });
|
|
18
|
+
* await generate(journey, { users: 100, adapter });
|
|
19
|
+
* await adapter.close();
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
var BigQueryAdapter = class {
|
|
23
|
+
client;
|
|
24
|
+
options;
|
|
25
|
+
buffer = [];
|
|
26
|
+
flushTimer = null;
|
|
27
|
+
constructor(options) {
|
|
28
|
+
this.client = new BigQuery({ projectId: options.projectId });
|
|
29
|
+
this.options = {
|
|
30
|
+
batchSize: 100,
|
|
31
|
+
flushInterval: 5e3,
|
|
32
|
+
...options
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Buffers a single event for batch insertion.
|
|
37
|
+
* Triggers a flush when the buffer reaches {@link BigQueryAdapterOptions.batchSize}.
|
|
38
|
+
*
|
|
39
|
+
* @param event - The generated event to write
|
|
40
|
+
*/
|
|
41
|
+
async write(event) {
|
|
42
|
+
const row = this.serializeEvent(event);
|
|
43
|
+
const transformed = this.options.transform ? this.options.transform(row) : row;
|
|
44
|
+
this.buffer.push(transformed);
|
|
45
|
+
if (this.buffer.length >= this.options.batchSize) await this.flush();
|
|
46
|
+
else this.flushTimer ??= setTimeout(() => {
|
|
47
|
+
this.flush();
|
|
48
|
+
}, this.options.flushInterval);
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Flushes any remaining buffered events and cleans up timers.
|
|
52
|
+
*/
|
|
53
|
+
async close() {
|
|
54
|
+
if (this.flushTimer) {
|
|
55
|
+
clearTimeout(this.flushTimer);
|
|
56
|
+
this.flushTimer = null;
|
|
57
|
+
}
|
|
58
|
+
if (this.buffer.length > 0) await this.flush();
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Inserts all buffered rows into BigQuery and clears the buffer.
|
|
62
|
+
*/
|
|
63
|
+
async flush() {
|
|
64
|
+
if (this.buffer.length === 0) return;
|
|
65
|
+
if (this.flushTimer) {
|
|
66
|
+
clearTimeout(this.flushTimer);
|
|
67
|
+
this.flushTimer = null;
|
|
68
|
+
}
|
|
69
|
+
const rows = this.buffer.splice(0);
|
|
70
|
+
await this.client.dataset(this.options.datasetId).table(this.options.tableId).insert(rows);
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Converts a synode Event into a flat BigQuery row.
|
|
74
|
+
*
|
|
75
|
+
* @param event - The event to serialize
|
|
76
|
+
* @returns A flat record with id, user_id, session_id, name, timestamp (ISO), and payload (JSON)
|
|
77
|
+
*/
|
|
78
|
+
serializeEvent(event) {
|
|
79
|
+
return {
|
|
80
|
+
id: event.id,
|
|
81
|
+
user_id: event.userId,
|
|
82
|
+
session_id: event.sessionId,
|
|
83
|
+
name: event.name,
|
|
84
|
+
timestamp: event.timestamp.toISOString(),
|
|
85
|
+
payload: JSON.stringify(event.payload)
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
//#endregion
|
|
91
|
+
//#region src/import.ts
|
|
92
|
+
/**
|
|
93
|
+
* Import a BigQuery table as a synode Dataset.
|
|
94
|
+
*
|
|
95
|
+
* Queries the specified table and returns a Dataset object that can be
|
|
96
|
+
* passed to `generate()` via the `preloadedDatasets` option.
|
|
97
|
+
*
|
|
98
|
+
* @param options - Import configuration including project, table, and synode dataset metadata
|
|
99
|
+
* @returns A Dataset containing all matching rows from the BigQuery table
|
|
100
|
+
*
|
|
101
|
+
* @example
|
|
102
|
+
* ```ts
|
|
103
|
+
* const products = await importFromBigQuery({
|
|
104
|
+
* projectId: 'my-project',
|
|
105
|
+
* datasetId: 'ecommerce',
|
|
106
|
+
* tableId: 'products',
|
|
107
|
+
* id: 'products',
|
|
108
|
+
* name: 'Products',
|
|
109
|
+
* limit: 1000,
|
|
110
|
+
* });
|
|
111
|
+
*
|
|
112
|
+
* await generate(journey, {
|
|
113
|
+
* users: 100,
|
|
114
|
+
* preloadedDatasets: [products],
|
|
115
|
+
* adapter,
|
|
116
|
+
* });
|
|
117
|
+
* ```
|
|
118
|
+
*/
|
|
119
|
+
async function importFromBigQuery(options) {
|
|
120
|
+
const client = new BigQuery({ projectId: options.projectId });
|
|
121
|
+
let query = `SELECT * FROM \`${options.projectId}.${options.datasetId}.${options.tableId}\``;
|
|
122
|
+
if (options.where) query += ` WHERE ${options.where}`;
|
|
123
|
+
if (options.limit) query += ` LIMIT ${String(options.limit)}`;
|
|
124
|
+
const [rows] = await client.query({ query });
|
|
125
|
+
return {
|
|
126
|
+
id: options.id,
|
|
127
|
+
name: options.name,
|
|
128
|
+
rows
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
//#endregion
|
|
133
|
+
export { BigQueryAdapter, importFromBigQuery };
|
package/package.json
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@synode/adapter-bigquery",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "BigQuery adapter for Synode — import datasets and export events",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/index.cjs",
|
|
7
|
+
"module": "dist/index.mjs",
|
|
8
|
+
"types": "dist/index.d.mts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./dist/index.d.mts",
|
|
12
|
+
"import": "./dist/index.mjs",
|
|
13
|
+
"require": "./dist/index.cjs"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"files": [
|
|
17
|
+
"dist"
|
|
18
|
+
],
|
|
19
|
+
"author": "Digitl Cloud GmbH",
|
|
20
|
+
"license": "SEE LICENSE IN LICENSE",
|
|
21
|
+
"repository": {
|
|
22
|
+
"type": "git",
|
|
23
|
+
"url": "https://github.com/digitl-cloud/synode",
|
|
24
|
+
"directory": "packages/adapter-bigquery"
|
|
25
|
+
},
|
|
26
|
+
"peerDependencies": {
|
|
27
|
+
"@synode/core": "^1.0.0",
|
|
28
|
+
"@google-cloud/bigquery": "^7.0.0 || ^8.0.0"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@eslint/js": "^9.39.4",
|
|
32
|
+
"@google-cloud/bigquery": "^8.1.1",
|
|
33
|
+
"@types/node": "^24.12.0",
|
|
34
|
+
"eslint": "^9.39.4",
|
|
35
|
+
"eslint-config-prettier": "^10.1.8",
|
|
36
|
+
"eslint-plugin-prettier": "^5.5.5",
|
|
37
|
+
"tsdown": "^0.16.8",
|
|
38
|
+
"typescript": "^5.9.3",
|
|
39
|
+
"typescript-eslint": "^8.58.0",
|
|
40
|
+
"vitest": "^4.1.2",
|
|
41
|
+
"@synode/core": "1.0.0"
|
|
42
|
+
},
|
|
43
|
+
"scripts": {
|
|
44
|
+
"build": "tsdown",
|
|
45
|
+
"test": "vitest run",
|
|
46
|
+
"lint": "eslint src tests"
|
|
47
|
+
}
|
|
48
|
+
}
|