@tinybirdco/sdk 0.0.44 → 0.0.45

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/README.md +53 -0
  2. package/dist/cli/commands/migrate.d.ts.map +1 -1
  3. package/dist/cli/commands/migrate.js +4 -3
  4. package/dist/cli/commands/migrate.js.map +1 -1
  5. package/dist/cli/commands/migrate.test.js +42 -4
  6. package/dist/cli/commands/migrate.test.js.map +1 -1
  7. package/dist/generator/connection.d.ts +1 -1
  8. package/dist/generator/connection.d.ts.map +1 -1
  9. package/dist/generator/connection.js +25 -2
  10. package/dist/generator/connection.js.map +1 -1
  11. package/dist/generator/connection.test.js +37 -14
  12. package/dist/generator/connection.test.js.map +1 -1
  13. package/dist/generator/datasource.d.ts.map +1 -1
  14. package/dist/generator/datasource.js +23 -0
  15. package/dist/generator/datasource.js.map +1 -1
  16. package/dist/generator/datasource.test.js +49 -5
  17. package/dist/generator/datasource.test.js.map +1 -1
  18. package/dist/index.d.ts +3 -3
  19. package/dist/index.d.ts.map +1 -1
  20. package/dist/index.js +1 -1
  21. package/dist/index.js.map +1 -1
  22. package/dist/migrate/emit-ts.d.ts.map +1 -1
  23. package/dist/migrate/emit-ts.js +69 -13
  24. package/dist/migrate/emit-ts.js.map +1 -1
  25. package/dist/migrate/parse-connection.d.ts +2 -2
  26. package/dist/migrate/parse-connection.d.ts.map +1 -1
  27. package/dist/migrate/parse-connection.js +61 -18
  28. package/dist/migrate/parse-connection.js.map +1 -1
  29. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  30. package/dist/migrate/parse-datasource.js +31 -0
  31. package/dist/migrate/parse-datasource.js.map +1 -1
  32. package/dist/migrate/types.d.ts +18 -1
  33. package/dist/migrate/types.d.ts.map +1 -1
  34. package/dist/schema/connection.d.ts +49 -6
  35. package/dist/schema/connection.d.ts.map +1 -1
  36. package/dist/schema/connection.js +44 -9
  37. package/dist/schema/connection.js.map +1 -1
  38. package/dist/schema/connection.test.js +72 -17
  39. package/dist/schema/connection.test.js.map +1 -1
  40. package/dist/schema/datasource.d.ts +16 -1
  41. package/dist/schema/datasource.d.ts.map +1 -1
  42. package/dist/schema/datasource.js +3 -0
  43. package/dist/schema/datasource.js.map +1 -1
  44. package/dist/schema/datasource.test.js +21 -0
  45. package/dist/schema/datasource.test.js.map +1 -1
  46. package/package.json +1 -1
  47. package/src/cli/commands/migrate.test.ts +58 -4
  48. package/src/cli/commands/migrate.ts +6 -4
  49. package/src/generator/connection.test.ts +45 -14
  50. package/src/generator/connection.ts +30 -2
  51. package/src/generator/datasource.test.ts +57 -5
  52. package/src/generator/datasource.ts +38 -1
  53. package/src/index.ts +12 -1
  54. package/src/migrate/emit-ts.ts +80 -16
  55. package/src/migrate/parse-connection.ts +108 -30
  56. package/src/migrate/parse-datasource.ts +46 -1
  57. package/src/migrate/types.ts +24 -2
  58. package/src/schema/connection.test.ts +92 -17
  59. package/src/schema/connection.ts +86 -10
  60. package/src/schema/datasource.test.ts +25 -0
  61. package/src/schema/datasource.ts +21 -1
@@ -15,11 +15,11 @@ const EXPECTED_COMPLEX_OUTPUT = `/**
15
15
  * Review endpoint output schemas and any defaults before production use.
16
16
  */
17
17
 
18
- import { createKafkaConnection, defineDatasource, definePipe, defineMaterializedView, defineCopyPipe, node, t, engine, column, p } from "@tinybirdco/sdk";
18
+ import { defineKafkaConnection, defineDatasource, definePipe, defineMaterializedView, defineCopyPipe, node, t, engine, column, p } from "@tinybirdco/sdk";
19
19
 
20
20
  // Connections
21
21
 
22
- export const stream = createKafkaConnection("stream", {
22
+ export const stream = defineKafkaConnection("stream", {
23
23
  bootstrapServers: "localhost:9092",
24
24
  securityProtocol: "SASL_SSL",
25
25
  saslMechanism: "PLAIN",
@@ -183,11 +183,11 @@ const EXPECTED_PARTIAL_OUTPUT = `/**
183
183
  * Review endpoint output schemas and any defaults before production use.
184
184
  */
185
185
 
186
- import { createKafkaConnection, defineDatasource, definePipe, defineMaterializedView, defineCopyPipe, node, t, engine, p } from "@tinybirdco/sdk";
186
+ import { defineKafkaConnection, defineDatasource, definePipe, defineMaterializedView, defineCopyPipe, node, t, engine, p } from "@tinybirdco/sdk";
187
187
 
188
188
  // Connections
189
189
 
190
- export const stream = createKafkaConnection("stream", {
190
+ export const stream = defineKafkaConnection("stream", {
191
191
  bootstrapServers: "localhost:9092",
192
192
  });
193
193
 
@@ -561,4 +561,58 @@ TOKEN endpoint_token READ
561
561
  expect(result.outputContent).toBe(EXPECTED_PARTIAL_OUTPUT);
562
562
  expect(fs.existsSync(result.outputPath)).toBe(false);
563
563
  });
564
+
565
+ it("migrates s3 connection and import datasource directives", async () => {
566
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "tinybird-migrate-"));
567
+ tempDirs.push(tempDir);
568
+
569
+ writeFile(
570
+ tempDir,
571
+ "s3sample.connection",
572
+ `TYPE s3
573
+ S3_REGION "us-east-1"
574
+ S3_ARN "arn:aws:iam::123456789012:role/tinybird-s3-access"
575
+ `
576
+ );
577
+
578
+ writeFile(
579
+ tempDir,
580
+ "events_landing.datasource",
581
+ `SCHEMA >
582
+ timestamp DateTime,
583
+ session_id String
584
+
585
+ ENGINE "MergeTree"
586
+ ENGINE_SORTING_KEY "timestamp"
587
+ IMPORT_CONNECTION_NAME s3sample
588
+ IMPORT_BUCKET_URI s3://my-bucket/events/*.csv
589
+ IMPORT_SCHEDULE @auto
590
+ IMPORT_FROM_TIMESTAMP 2024-01-01T00:00:00Z
591
+ `
592
+ );
593
+
594
+ const result = await runMigrate({
595
+ cwd: tempDir,
596
+ patterns: ["."],
597
+ strict: true,
598
+ });
599
+
600
+ expect(result.success).toBe(true);
601
+ expect(result.errors).toHaveLength(0);
602
+ expect(result.migrated.filter((resource) => resource.kind === "connection")).toHaveLength(1);
603
+ expect(result.migrated.filter((resource) => resource.kind === "datasource")).toHaveLength(1);
604
+
605
+ const output = fs.readFileSync(result.outputPath, "utf-8");
606
+ expect(output).toContain("defineS3Connection");
607
+ expect(output).toContain('export const s3sample = defineS3Connection("s3sample", {');
608
+ expect(output).toContain('region: "us-east-1"');
609
+ expect(output).toContain(
610
+ 'arn: "arn:aws:iam::123456789012:role/tinybird-s3-access"'
611
+ );
612
+ expect(output).toContain("s3: {");
613
+ expect(output).toContain("connection: s3sample");
614
+ expect(output).toContain('bucketUri: "s3://my-bucket/events/*.csv"');
615
+ expect(output).toContain('schedule: "@auto"');
616
+ expect(output).toContain('fromTimestamp: "2024-01-01T00:00:00Z"');
617
+ });
564
618
  });
@@ -129,15 +129,18 @@ export async function runMigrate(
129
129
  }
130
130
 
131
131
  for (const datasource of parsedDatasources) {
132
+ const referencedConnectionName =
133
+ datasource.kafka?.connectionName ?? datasource.s3?.connectionName;
134
+
132
135
  if (
133
- datasource.kafka &&
134
- !migratedConnectionNames.has(datasource.kafka.connectionName)
136
+ referencedConnectionName &&
137
+ !migratedConnectionNames.has(referencedConnectionName)
135
138
  ) {
136
139
  errors.push({
137
140
  filePath: datasource.filePath,
138
141
  resourceName: datasource.name,
139
142
  resourceKind: datasource.kind,
140
- message: `Datasource references missing/unmigrated connection "${datasource.kafka.connectionName}".`,
143
+ message: `Datasource references missing/unmigrated connection "${referencedConnectionName}".`,
141
144
  });
142
145
  continue;
143
146
  }
@@ -237,4 +240,3 @@ export async function runMigrate(
237
240
  outputContent,
238
241
  };
239
242
  }
240
-
@@ -1,11 +1,11 @@
1
1
  import { describe, it, expect } from "vitest";
2
2
  import { generateConnection, generateAllConnections } from "./connection.js";
3
- import { createKafkaConnection } from "../schema/connection.js";
3
+ import { defineKafkaConnection, defineS3Connection } from "../schema/connection.js";
4
4
 
5
5
  describe("Connection Generator", () => {
6
6
  describe("generateConnection", () => {
7
7
  it("generates basic Kafka connection with required fields", () => {
8
- const conn = createKafkaConnection("my_kafka", {
8
+ const conn = defineKafkaConnection("my_kafka", {
9
9
  bootstrapServers: "kafka.example.com:9092",
10
10
  });
11
11
 
@@ -17,7 +17,7 @@ describe("Connection Generator", () => {
17
17
  });
18
18
 
19
19
  it("includes security protocol when provided", () => {
20
- const conn = createKafkaConnection("my_kafka", {
20
+ const conn = defineKafkaConnection("my_kafka", {
21
21
  bootstrapServers: "kafka.example.com:9092",
22
22
  securityProtocol: "SASL_SSL",
23
23
  });
@@ -28,7 +28,7 @@ describe("Connection Generator", () => {
28
28
  });
29
29
 
30
30
  it("includes SASL mechanism when provided", () => {
31
- const conn = createKafkaConnection("my_kafka", {
31
+ const conn = defineKafkaConnection("my_kafka", {
32
32
  bootstrapServers: "kafka.example.com:9092",
33
33
  saslMechanism: "PLAIN",
34
34
  });
@@ -39,7 +39,7 @@ describe("Connection Generator", () => {
39
39
  });
40
40
 
41
41
  it("includes key and secret when provided", () => {
42
- const conn = createKafkaConnection("my_kafka", {
42
+ const conn = defineKafkaConnection("my_kafka", {
43
43
  bootstrapServers: "kafka.example.com:9092",
44
44
  key: '{{ tb_secret("KAFKA_KEY") }}',
45
45
  secret: '{{ tb_secret("KAFKA_SECRET") }}',
@@ -52,7 +52,7 @@ describe("Connection Generator", () => {
52
52
  });
53
53
 
54
54
  it("includes SSL CA PEM when provided", () => {
55
- const conn = createKafkaConnection("my_kafka", {
55
+ const conn = defineKafkaConnection("my_kafka", {
56
56
  bootstrapServers: "kafka.example.com:9092",
57
57
  sslCaPem: '{{ tb_secret("KAFKA_CA_CERT") }}',
58
58
  });
@@ -63,7 +63,7 @@ describe("Connection Generator", () => {
63
63
  });
64
64
 
65
65
  it("generates full Kafka connection with all options", () => {
66
- const conn = createKafkaConnection("my_kafka", {
66
+ const conn = defineKafkaConnection("my_kafka", {
67
67
  bootstrapServers: "kafka.example.com:9092",
68
68
  securityProtocol: "SASL_SSL",
69
69
  saslMechanism: "SCRAM-SHA-256",
@@ -85,7 +85,7 @@ describe("Connection Generator", () => {
85
85
  });
86
86
 
87
87
  it("supports PLAINTEXT security protocol", () => {
88
- const conn = createKafkaConnection("local_kafka", {
88
+ const conn = defineKafkaConnection("local_kafka", {
89
89
  bootstrapServers: "localhost:9092",
90
90
  securityProtocol: "PLAINTEXT",
91
91
  });
@@ -99,7 +99,7 @@ describe("Connection Generator", () => {
99
99
  const mechanisms = ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512", "OAUTHBEARER"] as const;
100
100
 
101
101
  mechanisms.forEach((mechanism) => {
102
- const conn = createKafkaConnection("my_kafka", {
102
+ const conn = defineKafkaConnection("my_kafka", {
103
103
  bootstrapServers: "kafka.example.com:9092",
104
104
  saslMechanism: mechanism,
105
105
  });
@@ -109,21 +109,52 @@ describe("Connection Generator", () => {
109
109
  expect(result.content).toContain(`KAFKA_SASL_MECHANISM ${mechanism}`);
110
110
  });
111
111
  });
112
+
113
+ it("generates basic S3 connection with IAM role auth", () => {
114
+ const conn = defineS3Connection("my_s3", {
115
+ region: "us-east-1",
116
+ arn: "arn:aws:iam::123456789012:role/tinybird-s3-access",
117
+ });
118
+
119
+ const result = generateConnection(conn);
120
+
121
+ expect(result.name).toBe("my_s3");
122
+ expect(result.content).toContain("TYPE s3");
123
+ expect(result.content).toContain("S3_REGION us-east-1");
124
+ expect(result.content).toContain(
125
+ "S3_ARN arn:aws:iam::123456789012:role/tinybird-s3-access"
126
+ );
127
+ });
128
+
129
+ it("generates S3 connection with access key auth", () => {
130
+ const conn = defineS3Connection("my_s3", {
131
+ region: "us-east-1",
132
+ accessKey: '{{ tb_secret("S3_ACCESS_KEY") }}',
133
+ secret: '{{ tb_secret("S3_SECRET") }}',
134
+ });
135
+
136
+ const result = generateConnection(conn);
137
+
138
+ expect(result.content).toContain("TYPE s3");
139
+ expect(result.content).toContain('S3_ACCESS_KEY {{ tb_secret("S3_ACCESS_KEY") }}');
140
+ expect(result.content).toContain('S3_SECRET {{ tb_secret("S3_SECRET") }}');
141
+ });
112
142
  });
113
143
 
114
144
  describe("generateAllConnections", () => {
115
145
  it("generates all connections", () => {
116
- const conn1 = createKafkaConnection("kafka1", {
146
+ const conn1 = defineKafkaConnection("kafka1", {
117
147
  bootstrapServers: "kafka1.example.com:9092",
118
148
  });
119
- const conn2 = createKafkaConnection("kafka2", {
120
- bootstrapServers: "kafka2.example.com:9092",
149
+ const conn2 = defineS3Connection("s3_logs", {
150
+ region: "us-east-1",
151
+ arn: "arn:aws:iam::123456789012:role/tinybird-s3-access",
121
152
  });
122
153
 
123
- const results = generateAllConnections({ kafka1: conn1, kafka2: conn2 });
154
+ const results = generateAllConnections({ kafka1: conn1, s3_logs: conn2 });
124
155
 
125
156
  expect(results).toHaveLength(2);
126
- expect(results.map((r) => r.name).sort()).toEqual(["kafka1", "kafka2"]);
157
+ expect(results.map((r) => r.name).sort()).toEqual(["kafka1", "s3_logs"]);
127
158
  });
128
159
 
129
160
  it("returns empty array for empty connections", () => {
@@ -4,6 +4,7 @@
4
4
  */
5
5
 
6
6
  import type { ConnectionDefinition, KafkaConnectionDefinition } from "../schema/connection.js";
7
+ import { isS3ConnectionDefinition, type S3ConnectionDefinition } from "../schema/connection.js";
7
8
 
8
9
  /**
9
10
  * Generated connection content
@@ -48,6 +49,31 @@ function generateKafkaConnection(connection: KafkaConnectionDefinition): string
48
49
  return parts.join("\n");
49
50
  }
50
51
 
52
+ /**
53
+ * Generate an S3 connection content
54
+ */
55
+ function generateS3Connection(connection: S3ConnectionDefinition): string {
56
+ const parts: string[] = [];
57
+ const options = connection.options;
58
+
59
+ parts.push("TYPE s3");
60
+ parts.push(`S3_REGION ${options.region}`);
61
+
62
+ if (options.arn) {
63
+ parts.push(`S3_ARN ${options.arn}`);
64
+ }
65
+
66
+ if (options.accessKey) {
67
+ parts.push(`S3_ACCESS_KEY ${options.accessKey}`);
68
+ }
69
+
70
+ if (options.secret) {
71
+ parts.push(`S3_SECRET ${options.secret}`);
72
+ }
73
+
74
+ return parts.join("\n");
75
+ }
76
+
51
77
  /**
52
78
  * Generate a .connection file content from a ConnectionDefinition
53
79
  *
@@ -56,7 +82,7 @@ function generateKafkaConnection(connection: KafkaConnectionDefinition): string
56
82
  *
57
83
  * @example
58
84
  * ```ts
59
- * const myKafka = createKafkaConnection('my_kafka', {
85
+ * const myKafka = defineKafkaConnection('my_kafka', {
60
86
  * bootstrapServers: 'kafka.example.com:9092',
61
87
  * securityProtocol: 'SASL_SSL',
62
88
  * saslMechanism: 'PLAIN',
@@ -81,8 +107,10 @@ export function generateConnection(
81
107
 
82
108
  if (connection._connectionType === "kafka") {
83
109
  content = generateKafkaConnection(connection as KafkaConnectionDefinition);
110
+ } else if (isS3ConnectionDefinition(connection)) {
111
+ content = generateS3Connection(connection);
84
112
  } else {
85
- throw new Error(`Unsupported connection type: ${connection._connectionType}`);
113
+ throw new Error("Unsupported connection type.");
86
114
  }
87
115
 
88
116
  return {
@@ -1,7 +1,7 @@
1
1
  import { describe, it, expect } from 'vitest';
2
2
  import { generateDatasource, generateAllDatasources } from './datasource.js';
3
3
  import { defineDatasource } from '../schema/datasource.js';
4
- import { createKafkaConnection } from '../schema/connection.js';
4
+ import { defineKafkaConnection, defineS3Connection } from '../schema/connection.js';
5
5
  import { defineToken } from '../schema/token.js';
6
6
  import { t } from '../schema/types.js';
7
7
  import { engine } from '../schema/engines.js';
@@ -312,7 +312,7 @@ describe('Datasource Generator', () => {
312
312
 
313
313
  describe('Kafka configuration', () => {
314
314
  it('includes Kafka connection name and topic', () => {
315
- const kafkaConn = createKafkaConnection('my_kafka', {
315
+ const kafkaConn = defineKafkaConnection('my_kafka', {
316
316
  bootstrapServers: 'kafka.example.com:9092',
317
317
  });
318
318
 
@@ -335,7 +335,7 @@ describe('Datasource Generator', () => {
335
335
  });
336
336
 
337
337
  it('includes Kafka group ID when provided', () => {
338
- const kafkaConn = createKafkaConnection('my_kafka', {
338
+ const kafkaConn = defineKafkaConnection('my_kafka', {
339
339
  bootstrapServers: 'kafka.example.com:9092',
340
340
  });
341
341
 
@@ -358,7 +358,7 @@ describe('Datasource Generator', () => {
358
358
  });
359
359
 
360
360
  it('includes auto offset reset when provided', () => {
361
- const kafkaConn = createKafkaConnection('my_kafka', {
361
+ const kafkaConn = defineKafkaConnection('my_kafka', {
362
362
  bootstrapServers: 'kafka.example.com:9092',
363
363
  });
364
364
 
@@ -381,7 +381,7 @@ describe('Datasource Generator', () => {
381
381
  });
382
382
 
383
383
  it('generates complete Kafka datasource with all options', () => {
384
- const kafkaConn = createKafkaConnection('my_kafka', {
384
+ const kafkaConn = defineKafkaConnection('my_kafka', {
385
385
  bootstrapServers: 'kafka.example.com:9092',
386
386
  securityProtocol: 'SASL_SSL',
387
387
  saslMechanism: 'PLAIN',
@@ -417,6 +417,58 @@ describe('Datasource Generator', () => {
417
417
  });
418
418
  });
419
419
 
420
+ describe('S3 configuration', () => {
421
+ it('includes S3 connection name and bucket uri', () => {
422
+ const s3Conn = defineS3Connection('my_s3', {
423
+ region: 'us-east-1',
424
+ arn: 'arn:aws:iam::123456789012:role/tinybird-s3-access',
425
+ });
426
+
427
+ const ds = defineDatasource('s3_events', {
428
+ schema: {
429
+ timestamp: t.dateTime(),
430
+ event: t.string(),
431
+ },
432
+ engine: engine.mergeTree({ sortingKey: ['timestamp'] }),
433
+ s3: {
434
+ connection: s3Conn,
435
+ bucketUri: 's3://my-bucket/events/*.csv',
436
+ },
437
+ });
438
+
439
+ const result = generateDatasource(ds);
440
+
441
+ expect(result.content).toContain('IMPORT_CONNECTION_NAME my_s3');
442
+ expect(result.content).toContain('IMPORT_BUCKET_URI s3://my-bucket/events/*.csv');
443
+ });
444
+
445
+ it('includes optional S3 schedule and from timestamp', () => {
446
+ const s3Conn = defineS3Connection('my_s3', {
447
+ region: 'us-east-1',
448
+ arn: 'arn:aws:iam::123456789012:role/tinybird-s3-access',
449
+ });
450
+
451
+ const ds = defineDatasource('s3_events', {
452
+ schema: {
453
+ timestamp: t.dateTime(),
454
+ event: t.string(),
455
+ },
456
+ engine: engine.mergeTree({ sortingKey: ['timestamp'] }),
457
+ s3: {
458
+ connection: s3Conn,
459
+ bucketUri: 's3://my-bucket/events/*.csv',
460
+ schedule: '@auto',
461
+ fromTimestamp: '2024-01-01T00:00:00Z',
462
+ },
463
+ });
464
+
465
+ const result = generateDatasource(ds);
466
+
467
+ expect(result.content).toContain('IMPORT_SCHEDULE @auto');
468
+ expect(result.content).toContain('IMPORT_FROM_TIMESTAMP 2024-01-01T00:00:00Z');
469
+ });
470
+ });
471
+
420
472
  describe('Token generation', () => {
421
473
  it('generates TOKEN lines with inline config', () => {
422
474
  const ds = defineDatasource('test_ds', {
@@ -3,7 +3,14 @@
3
3
  * Converts DatasourceDefinition to native .datasource file format
4
4
  */
5
5
 
6
- import type { DatasourceDefinition, SchemaDefinition, ColumnDefinition, KafkaConfig, TokenConfig } from "../schema/datasource.js";
6
+ import type {
7
+ DatasourceDefinition,
8
+ SchemaDefinition,
9
+ ColumnDefinition,
10
+ KafkaConfig,
11
+ S3Config,
12
+ TokenConfig,
13
+ } from "../schema/datasource.js";
7
14
  import type { AnyTypeValidator, TypeModifiers } from "../schema/types.js";
8
15
  import { getColumnType, getColumnJsonPath } from "../schema/datasource.js";
9
16
  import { getEngineClause, type EngineConfig } from "../schema/engines.js";
@@ -163,6 +170,26 @@ function generateKafkaConfig(kafka: KafkaConfig): string {
163
170
  return parts.join("\n");
164
171
  }
165
172
 
173
+ /**
174
+ * Generate S3 import configuration lines
175
+ */
176
+ function generateS3Config(s3: S3Config): string {
177
+ const parts: string[] = [];
178
+
179
+ parts.push(`IMPORT_CONNECTION_NAME ${s3.connection._name}`);
180
+ parts.push(`IMPORT_BUCKET_URI ${s3.bucketUri}`);
181
+
182
+ if (s3.schedule) {
183
+ parts.push(`IMPORT_SCHEDULE ${s3.schedule}`);
184
+ }
185
+
186
+ if (s3.fromTimestamp) {
187
+ parts.push(`IMPORT_FROM_TIMESTAMP ${s3.fromTimestamp}`);
188
+ }
189
+
190
+ return parts.join("\n");
191
+ }
192
+
166
193
  /**
167
194
  * Generate forward query section
168
195
  */
@@ -261,6 +288,10 @@ export function generateDatasource(
261
288
  ): GeneratedDatasource {
262
289
  const parts: string[] = [];
263
290
 
291
+ if (datasource.options.kafka && datasource.options.s3) {
292
+ throw new Error("Datasource cannot define both `kafka` and `s3` ingestion options.");
293
+ }
294
+
264
295
  // Add description if present
265
296
  if (datasource.options.description) {
266
297
  parts.push(`DESCRIPTION >\n ${datasource.options.description}`);
@@ -283,6 +314,12 @@ export function generateDatasource(
283
314
  parts.push(generateKafkaConfig(datasource.options.kafka));
284
315
  }
285
316
 
317
+ // Add S3 configuration if present
318
+ if (datasource.options.s3) {
319
+ parts.push("");
320
+ parts.push(generateS3Config(datasource.options.s3));
321
+ }
322
+
286
323
  // Add forward query if present
287
324
  const forwardQuery = generateForwardQuery(datasource.options.forwardQuery);
288
325
  if (forwardQuery) {
package/src/index.ts CHANGED
@@ -109,16 +109,27 @@ export type {
109
109
  DatasourceTokenReference,
110
110
  ExtractSchema,
111
111
  KafkaConfig,
112
+ S3Config,
112
113
  } from "./schema/datasource.js";
113
114
 
114
115
  // ============ Connection ============
115
- export { createKafkaConnection, isConnectionDefinition, isKafkaConnectionDefinition, getConnectionType } from "./schema/connection.js";
116
+ export {
117
+ defineKafkaConnection,
118
+ createKafkaConnection,
119
+ defineS3Connection,
120
+ isConnectionDefinition,
121
+ isKafkaConnectionDefinition,
122
+ isS3ConnectionDefinition,
123
+ getConnectionType,
124
+ } from "./schema/connection.js";
116
125
  export type {
117
126
  ConnectionDefinition,
118
127
  KafkaConnectionDefinition,
119
128
  KafkaConnectionOptions,
120
129
  KafkaSecurityProtocol,
121
130
  KafkaSaslMechanism,
131
+ S3ConnectionDefinition,
132
+ S3ConnectionOptions,
122
133
  } from "./schema/connection.js";
123
134
 
124
135
  // ============ Token ============
@@ -6,6 +6,7 @@ import type {
6
6
  KafkaConnectionModel,
7
7
  ParsedResource,
8
8
  PipeModel,
9
+ S3ConnectionModel,
9
10
  } from "./types.js";
10
11
 
11
12
  function escapeString(value: string): string {
@@ -243,6 +244,20 @@ function emitDatasource(ds: DatasourceModel): string {
243
244
  lines.push(" },");
244
245
  }
245
246
 
247
+ if (ds.s3) {
248
+ const connectionVar = toCamelCase(ds.s3.connectionName);
249
+ lines.push(" s3: {");
250
+ lines.push(` connection: ${connectionVar},`);
251
+ lines.push(` bucketUri: ${escapeString(ds.s3.bucketUri)},`);
252
+ if (ds.s3.schedule) {
253
+ lines.push(` schedule: ${escapeString(ds.s3.schedule)},`);
254
+ }
255
+ if (ds.s3.fromTimestamp) {
256
+ lines.push(` fromTimestamp: ${escapeString(ds.s3.fromTimestamp)},`);
257
+ }
258
+ lines.push(" },");
259
+ }
260
+
246
261
  if (ds.forwardQuery) {
247
262
  lines.push(" forwardQuery: `");
248
263
  lines.push(ds.forwardQuery.replace(/`/g, "\\`").replace(/\${/g, "\\${"));
@@ -270,28 +285,48 @@ function emitDatasource(ds: DatasourceModel): string {
270
285
  return lines.join("\n");
271
286
  }
272
287
 
273
- function emitConnection(connection: KafkaConnectionModel): string {
288
+ function emitConnection(connection: KafkaConnectionModel | S3ConnectionModel): string {
274
289
  const variableName = toCamelCase(connection.name);
275
290
  const lines: string[] = [];
291
+
292
+ if (connection.connectionType === "kafka") {
293
+ lines.push(
294
+ `export const ${variableName} = defineKafkaConnection(${escapeString(connection.name)}, {`
295
+ );
296
+ lines.push(` bootstrapServers: ${escapeString(connection.bootstrapServers)},`);
297
+ if (connection.securityProtocol) {
298
+ lines.push(` securityProtocol: ${escapeString(connection.securityProtocol)},`);
299
+ }
300
+ if (connection.saslMechanism) {
301
+ lines.push(` saslMechanism: ${escapeString(connection.saslMechanism)},`);
302
+ }
303
+ if (connection.key) {
304
+ lines.push(` key: ${escapeString(connection.key)},`);
305
+ }
306
+ if (connection.secret) {
307
+ lines.push(` secret: ${escapeString(connection.secret)},`);
308
+ }
309
+ if (connection.sslCaPem) {
310
+ lines.push(` sslCaPem: ${escapeString(connection.sslCaPem)},`);
311
+ }
312
+ lines.push("});");
313
+ lines.push("");
314
+ return lines.join("\n");
315
+ }
316
+
276
317
  lines.push(
277
- `export const ${variableName} = createKafkaConnection(${escapeString(connection.name)}, {`
318
+ `export const ${variableName} = defineS3Connection(${escapeString(connection.name)}, {`
278
319
  );
279
- lines.push(` bootstrapServers: ${escapeString(connection.bootstrapServers)},`);
280
- if (connection.securityProtocol) {
281
- lines.push(` securityProtocol: ${escapeString(connection.securityProtocol)},`);
282
- }
283
- if (connection.saslMechanism) {
284
- lines.push(` saslMechanism: ${escapeString(connection.saslMechanism)},`);
320
+ lines.push(` region: ${escapeString(connection.region)},`);
321
+ if (connection.arn) {
322
+ lines.push(` arn: ${escapeString(connection.arn)},`);
285
323
  }
286
- if (connection.key) {
287
- lines.push(` key: ${escapeString(connection.key)},`);
324
+ if (connection.accessKey) {
325
+ lines.push(` accessKey: ${escapeString(connection.accessKey)},`);
288
326
  }
289
327
  if (connection.secret) {
290
328
  lines.push(` secret: ${escapeString(connection.secret)},`);
291
329
  }
292
- if (connection.sslCaPem) {
293
- lines.push(` sslCaPem: ${escapeString(connection.sslCaPem)},`);
294
- }
295
330
  lines.push("});");
296
331
  lines.push("");
297
332
  return lines.join("\n");
@@ -398,7 +433,8 @@ function emitPipe(pipe: PipeModel): string {
398
433
 
399
434
  export function emitMigrationFileContent(resources: ParsedResource[]): string {
400
435
  const connections = resources.filter(
401
- (resource): resource is KafkaConnectionModel => resource.kind === "connection"
436
+ (resource): resource is KafkaConnectionModel | S3ConnectionModel =>
437
+ resource.kind === "connection"
402
438
  );
403
439
  const datasources = resources.filter(
404
440
  (resource): resource is DatasourceModel => resource.kind === "datasource"
@@ -412,7 +448,21 @@ export function emitMigrationFileContent(resources: ParsedResource[]): string {
412
448
  );
413
449
  const needsParams = pipes.some((pipe) => pipe.params.length > 0);
414
450
 
415
- const imports = new Set<string>(["createKafkaConnection", "defineDatasource", "definePipe", "defineMaterializedView", "defineCopyPipe", "node", "t", "engine"]);
451
+ const imports = new Set<string>([
452
+ "defineDatasource",
453
+ "definePipe",
454
+ "defineMaterializedView",
455
+ "defineCopyPipe",
456
+ "node",
457
+ "t",
458
+ "engine",
459
+ ]);
460
+ if (connections.some((connection) => connection.connectionType === "kafka")) {
461
+ imports.add("defineKafkaConnection");
462
+ }
463
+ if (connections.some((connection) => connection.connectionType === "s3")) {
464
+ imports.add("defineS3Connection");
465
+ }
416
466
  if (needsColumn) {
417
467
  imports.add("column");
418
468
  }
@@ -420,13 +470,27 @@ export function emitMigrationFileContent(resources: ParsedResource[]): string {
420
470
  imports.add("p");
421
471
  }
422
472
 
473
+ const orderedImports = [
474
+ "defineKafkaConnection",
475
+ "defineS3Connection",
476
+ "defineDatasource",
477
+ "definePipe",
478
+ "defineMaterializedView",
479
+ "defineCopyPipe",
480
+ "node",
481
+ "t",
482
+ "engine",
483
+ "column",
484
+ "p",
485
+ ].filter((name) => imports.has(name));
486
+
423
487
  const lines: string[] = [];
424
488
  lines.push("/**");
425
489
  lines.push(" * Generated by tinybird migrate.");
426
490
  lines.push(" * Review endpoint output schemas and any defaults before production use.");
427
491
  lines.push(" */");
428
492
  lines.push("");
429
- lines.push(`import { ${Array.from(imports).join(", ")} } from "@tinybirdco/sdk";`);
493
+ lines.push(`import { ${orderedImports.join(", ")} } from "@tinybirdco/sdk";`);
430
494
  lines.push("");
431
495
 
432
496
  if (connections.length > 0) {