@tinybirdco/sdk 0.0.47 → 0.0.49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/README.md +53 -3
  2. package/dist/cli/commands/migrate.d.ts.map +1 -1
  3. package/dist/cli/commands/migrate.js +32 -0
  4. package/dist/cli/commands/migrate.js.map +1 -1
  5. package/dist/cli/commands/migrate.test.js +585 -8
  6. package/dist/cli/commands/migrate.test.js.map +1 -1
  7. package/dist/generator/connection.d.ts.map +1 -1
  8. package/dist/generator/connection.js +3 -0
  9. package/dist/generator/connection.js.map +1 -1
  10. package/dist/generator/connection.test.js +8 -0
  11. package/dist/generator/connection.test.js.map +1 -1
  12. package/dist/generator/datasource.d.ts.map +1 -1
  13. package/dist/generator/datasource.js +3 -0
  14. package/dist/generator/datasource.js.map +1 -1
  15. package/dist/generator/datasource.test.js +50 -0
  16. package/dist/generator/datasource.test.js.map +1 -1
  17. package/dist/generator/pipe.d.ts.map +1 -1
  18. package/dist/generator/pipe.js +31 -1
  19. package/dist/generator/pipe.js.map +1 -1
  20. package/dist/generator/pipe.test.js +50 -1
  21. package/dist/generator/pipe.test.js.map +1 -1
  22. package/dist/index.d.ts +3 -2
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +3 -1
  25. package/dist/index.js.map +1 -1
  26. package/dist/index.test.js +3 -0
  27. package/dist/index.test.js.map +1 -1
  28. package/dist/migrate/emit-ts.d.ts.map +1 -1
  29. package/dist/migrate/emit-ts.js +159 -41
  30. package/dist/migrate/emit-ts.js.map +1 -1
  31. package/dist/migrate/parse-connection.d.ts.map +1 -1
  32. package/dist/migrate/parse-connection.js +13 -2
  33. package/dist/migrate/parse-connection.js.map +1 -1
  34. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  35. package/dist/migrate/parse-datasource.js +115 -52
  36. package/dist/migrate/parse-datasource.js.map +1 -1
  37. package/dist/migrate/parse-pipe.d.ts.map +1 -1
  38. package/dist/migrate/parse-pipe.js +257 -46
  39. package/dist/migrate/parse-pipe.js.map +1 -1
  40. package/dist/migrate/parser-utils.d.ts +5 -0
  41. package/dist/migrate/parser-utils.d.ts.map +1 -1
  42. package/dist/migrate/parser-utils.js +22 -0
  43. package/dist/migrate/parser-utils.js.map +1 -1
  44. package/dist/migrate/types.d.ts +25 -3
  45. package/dist/migrate/types.d.ts.map +1 -1
  46. package/dist/schema/connection.d.ts +2 -0
  47. package/dist/schema/connection.d.ts.map +1 -1
  48. package/dist/schema/connection.js.map +1 -1
  49. package/dist/schema/datasource.d.ts +3 -1
  50. package/dist/schema/datasource.d.ts.map +1 -1
  51. package/dist/schema/datasource.js +8 -1
  52. package/dist/schema/datasource.js.map +1 -1
  53. package/dist/schema/datasource.test.js +13 -0
  54. package/dist/schema/datasource.test.js.map +1 -1
  55. package/dist/schema/engines.d.ts.map +1 -1
  56. package/dist/schema/engines.js +3 -0
  57. package/dist/schema/engines.js.map +1 -1
  58. package/dist/schema/engines.test.js +16 -0
  59. package/dist/schema/engines.test.js.map +1 -1
  60. package/dist/schema/pipe.d.ts +90 -3
  61. package/dist/schema/pipe.d.ts.map +1 -1
  62. package/dist/schema/pipe.js +84 -0
  63. package/dist/schema/pipe.js.map +1 -1
  64. package/dist/schema/pipe.test.js +70 -1
  65. package/dist/schema/pipe.test.js.map +1 -1
  66. package/dist/schema/secret.d.ts +6 -0
  67. package/dist/schema/secret.d.ts.map +1 -0
  68. package/dist/schema/secret.js +14 -0
  69. package/dist/schema/secret.js.map +1 -0
  70. package/dist/schema/secret.test.d.ts +2 -0
  71. package/dist/schema/secret.test.d.ts.map +1 -0
  72. package/dist/schema/secret.test.js +14 -0
  73. package/dist/schema/secret.test.js.map +1 -0
  74. package/dist/schema/types.d.ts +5 -0
  75. package/dist/schema/types.d.ts.map +1 -1
  76. package/dist/schema/types.js +6 -0
  77. package/dist/schema/types.js.map +1 -1
  78. package/dist/schema/types.test.js +12 -0
  79. package/dist/schema/types.test.js.map +1 -1
  80. package/package.json +1 -1
  81. package/src/cli/commands/migrate.test.ts +859 -8
  82. package/src/cli/commands/migrate.ts +35 -0
  83. package/src/generator/connection.test.ts +13 -0
  84. package/src/generator/connection.ts +4 -0
  85. package/src/generator/datasource.test.ts +60 -0
  86. package/src/generator/datasource.ts +3 -0
  87. package/src/generator/pipe.test.ts +56 -1
  88. package/src/generator/pipe.ts +41 -1
  89. package/src/index.test.ts +4 -0
  90. package/src/index.ts +12 -0
  91. package/src/migrate/emit-ts.ts +161 -48
  92. package/src/migrate/parse-connection.ts +15 -2
  93. package/src/migrate/parse-datasource.ts +134 -71
  94. package/src/migrate/parse-pipe.ts +364 -69
  95. package/src/migrate/parser-utils.ts +36 -1
  96. package/src/migrate/types.ts +28 -3
  97. package/src/schema/connection.ts +2 -0
  98. package/src/schema/datasource.test.ts +17 -0
  99. package/src/schema/datasource.ts +13 -2
  100. package/src/schema/engines.test.ts +18 -0
  101. package/src/schema/engines.ts +3 -0
  102. package/src/schema/pipe.test.ts +89 -0
  103. package/src/schema/pipe.ts +188 -4
  104. package/src/schema/secret.test.ts +19 -0
  105. package/src/schema/secret.ts +16 -0
  106. package/src/schema/types.test.ts +14 -0
  107. package/src/schema/types.ts +10 -0
@@ -112,6 +112,9 @@ export async function runMigrate(
112
112
  const migrated: ParsedResource[] = [];
113
113
  const migratedConnectionNames = new Set<string>();
114
114
  const migratedDatasourceNames = new Set<string>();
115
+ const parsedConnectionTypeByName = new Map(
116
+ parsedConnections.map((connection) => [connection.name, connection.connectionType] as const)
117
+ );
115
118
 
116
119
  for (const connection of parsedConnections) {
117
120
  try {
@@ -160,6 +163,38 @@ export async function runMigrate(
160
163
  }
161
164
 
162
165
  for (const pipe of parsedPipes) {
166
+ if (pipe.type === "sink") {
167
+ const sinkConnectionName = pipe.sink?.connectionName;
168
+ if (!sinkConnectionName || !migratedConnectionNames.has(sinkConnectionName)) {
169
+ errors.push({
170
+ filePath: pipe.filePath,
171
+ resourceName: pipe.name,
172
+ resourceKind: pipe.kind,
173
+ message: `Sink pipe references missing/unmigrated connection "${sinkConnectionName ?? "(none)"}".`,
174
+ });
175
+ continue;
176
+ }
177
+ const sinkConnectionType = parsedConnectionTypeByName.get(sinkConnectionName);
178
+ if (!sinkConnectionType) {
179
+ errors.push({
180
+ filePath: pipe.filePath,
181
+ resourceName: pipe.name,
182
+ resourceKind: pipe.kind,
183
+ message: `Sink pipe connection "${sinkConnectionName}" could not be resolved.`,
184
+ });
185
+ continue;
186
+ }
187
+ if (sinkConnectionType !== pipe.sink?.service) {
188
+ errors.push({
189
+ filePath: pipe.filePath,
190
+ resourceName: pipe.name,
191
+ resourceKind: pipe.kind,
192
+ message: `Sink pipe service "${pipe.sink?.service}" is incompatible with connection "${sinkConnectionName}" type "${sinkConnectionType}".`,
193
+ });
194
+ continue;
195
+ }
196
+ }
197
+
163
198
  if (
164
199
  pipe.type === "materialized" &&
165
200
  (!pipe.materializedDatasource ||
@@ -51,6 +51,19 @@ describe("Connection Generator", () => {
51
51
  expect(result.content).toContain('KAFKA_SECRET {{ tb_secret("KAFKA_SECRET") }}');
52
52
  });
53
53
 
54
+ it("includes schema registry URL when provided", () => {
55
+ const conn = defineKafkaConnection("my_kafka", {
56
+ bootstrapServers: "kafka.example.com:9092",
57
+ schemaRegistryUrl: "https://registry-user:registry-pass@registry.example.com",
58
+ });
59
+
60
+ const result = generateConnection(conn);
61
+
62
+ expect(result.content).toContain(
63
+ "KAFKA_SCHEMA_REGISTRY_URL https://registry-user:registry-pass@registry.example.com"
64
+ );
65
+ });
66
+
54
67
  it("includes SSL CA PEM when provided", () => {
55
68
  const conn = defineKafkaConnection("my_kafka", {
56
69
  bootstrapServers: "kafka.example.com:9092",
@@ -42,6 +42,10 @@ function generateKafkaConnection(connection: KafkaConnectionDefinition): string
42
42
  parts.push(`KAFKA_SECRET ${options.secret}`);
43
43
  }
44
44
 
45
+ if (options.schemaRegistryUrl) {
46
+ parts.push(`KAFKA_SCHEMA_REGISTRY_URL ${options.schemaRegistryUrl}`);
47
+ }
48
+
45
49
  if (options.sslCaPem) {
46
50
  parts.push(`KAFKA_SSL_CA_PEM ${options.sslCaPem}`);
47
51
  }
@@ -263,6 +263,43 @@ describe('Datasource Generator', () => {
263
263
  expect(schemaLines[1]).toContain(',');
264
264
  expect(schemaLines[2]).not.toContain(',');
265
265
  });
266
+
267
+ it('autogenerates jsonPath when jsonPaths is enabled and no explicit path is set', () => {
268
+ const ds = defineDatasource('test_ds', {
269
+ schema: {
270
+ event_id: t.string().nullable(),
271
+ },
272
+ });
273
+
274
+ const result = generateDatasource(ds);
275
+ expect(result.content).toContain('event_id Nullable(String) `json:$.event_id`');
276
+ });
277
+
278
+ it('uses explicit jsonPath from validator modifier when jsonPaths is enabled', () => {
279
+ const ds = defineDatasource('test_ds', {
280
+ schema: {
281
+ event_id: t.string().nullable().jsonPath('$.explicit_path'),
282
+ },
283
+ });
284
+
285
+ const result = generateDatasource(ds);
286
+ expect(result.content).toContain('event_id Nullable(String) `json:$.explicit_path`');
287
+ expect(result.content).not.toContain('`json:$.event_id`');
288
+ });
289
+
290
+ it('omits json paths when jsonPaths is false even if column has explicit jsonPath modifier', () => {
291
+ const ds = defineDatasource('test_ds', {
292
+ jsonPaths: false,
293
+ schema: {
294
+ event_id: t.string().nullable().jsonPath('$.explicit_path'),
295
+ },
296
+ });
297
+
298
+ const result = generateDatasource(ds);
299
+ expect(result.content).toContain('event_id Nullable(String)');
300
+ expect(result.content).not.toContain('`json:$.explicit_path`');
301
+ expect(result.content).not.toContain('`json:$.event_id`');
302
+ });
266
303
  });
267
304
 
268
305
  describe('generateAllDatasources', () => {
@@ -380,6 +417,29 @@ describe('Datasource Generator', () => {
380
417
  expect(result.content).toContain('KAFKA_AUTO_OFFSET_RESET earliest');
381
418
  });
382
419
 
420
+ it('includes store raw value when provided', () => {
421
+ const kafkaConn = defineKafkaConnection('my_kafka', {
422
+ bootstrapServers: 'kafka.example.com:9092',
423
+ });
424
+
425
+ const ds = defineDatasource('kafka_events', {
426
+ schema: {
427
+ timestamp: t.dateTime(),
428
+ event: t.string(),
429
+ },
430
+ engine: engine.mergeTree({ sortingKey: ['timestamp'] }),
431
+ kafka: {
432
+ connection: kafkaConn,
433
+ topic: 'events',
434
+ storeRawValue: true,
435
+ },
436
+ });
437
+
438
+ const result = generateDatasource(ds);
439
+
440
+ expect(result.content).toContain('KAFKA_STORE_RAW_VALUE True');
441
+ });
442
+
383
443
  it('generates complete Kafka datasource with all options', () => {
384
444
  const kafkaConn = defineKafkaConnection('my_kafka', {
385
445
  bootstrapServers: 'kafka.example.com:9092',
@@ -166,6 +166,9 @@ function generateKafkaConfig(kafka: KafkaConfig): string {
166
166
  if (kafka.autoOffsetReset) {
167
167
  parts.push(`KAFKA_AUTO_OFFSET_RESET ${kafka.autoOffsetReset}`);
168
168
  }
169
+ if (kafka.storeRawValue !== undefined) {
170
+ parts.push(`KAFKA_STORE_RAW_VALUE ${kafka.storeRawValue ? "True" : "False"}`);
171
+ }
169
172
 
170
173
  return parts.join("\n");
171
174
  }
@@ -1,7 +1,8 @@
1
1
  import { describe, it, expect } from 'vitest';
2
2
  import { generatePipe, generateAllPipes } from './pipe.js';
3
- import { definePipe, defineMaterializedView, node } from '../schema/pipe.js';
3
+ import { definePipe, defineMaterializedView, defineSinkPipe, node } from '../schema/pipe.js';
4
4
  import { defineDatasource } from '../schema/datasource.js';
5
+ import { defineKafkaConnection, defineS3Connection } from '../schema/connection.js';
5
6
  import { defineToken } from '../schema/token.js';
6
7
  import { t } from '../schema/types.js';
7
8
  import { p } from '../schema/params.js';
@@ -472,6 +473,60 @@ GROUP BY day, country
472
473
  });
473
474
  });
474
475
 
476
+ describe('Sink configuration', () => {
477
+ it('generates Kafka sink directives', () => {
478
+ const kafka = defineKafkaConnection('events_kafka', {
479
+ bootstrapServers: 'localhost:9092',
480
+ });
481
+
482
+ const pipe = defineSinkPipe('events_sink', {
483
+ nodes: [node({ name: 'publish', sql: 'SELECT * FROM events' })],
484
+ sink: {
485
+ connection: kafka,
486
+ topic: 'events_out',
487
+ schedule: '@on-demand',
488
+ },
489
+ });
490
+
491
+ const result = generatePipe(pipe);
492
+ expect(result.content).toContain('TYPE sink');
493
+ expect(result.content).toContain('EXPORT_CONNECTION_NAME events_kafka');
494
+ expect(result.content).toContain('EXPORT_KAFKA_TOPIC events_out');
495
+ expect(result.content).toContain('EXPORT_SCHEDULE @on-demand');
496
+ expect(result.content).not.toContain('EXPORT_STRATEGY');
497
+ });
498
+
499
+ it('generates S3 sink directives', () => {
500
+ const s3 = defineS3Connection('exports_s3', {
501
+ region: 'us-east-1',
502
+ arn: 'arn:aws:iam::123456789012:role/tinybird-s3-access',
503
+ });
504
+
505
+ const pipe = defineSinkPipe('events_s3_sink', {
506
+ nodes: [node({ name: 'export', sql: 'SELECT * FROM events' })],
507
+ sink: {
508
+ connection: s3,
509
+ bucketUri: 's3://bucket/events/',
510
+ fileTemplate: 'events_{date}',
511
+ format: 'csv',
512
+ schedule: '@once',
513
+ compression: 'gzip',
514
+ strategy: 'replace',
515
+ },
516
+ });
517
+
518
+ const result = generatePipe(pipe);
519
+ expect(result.content).toContain('TYPE sink');
520
+ expect(result.content).toContain('EXPORT_CONNECTION_NAME exports_s3');
521
+ expect(result.content).toContain('EXPORT_BUCKET_URI s3://bucket/events/');
522
+ expect(result.content).toContain('EXPORT_FILE_TEMPLATE events_{date}');
523
+ expect(result.content).toContain('EXPORT_FORMAT csv');
524
+ expect(result.content).toContain('EXPORT_SCHEDULE @once');
525
+ expect(result.content).toContain('EXPORT_STRATEGY replace');
526
+ expect(result.content).toContain('EXPORT_COMPRESSION gzip');
527
+ });
528
+ });
529
+
475
530
  describe('Token generation', () => {
476
531
  it('generates TOKEN lines with inline config', () => {
477
532
  const pipe = definePipe('test_pipe', {
@@ -9,9 +9,15 @@ import type {
9
9
  EndpointConfig,
10
10
  MaterializedConfig,
11
11
  CopyConfig,
12
+ SinkConfig,
12
13
  PipeTokenConfig,
13
14
  } from "../schema/pipe.js";
14
- import { getEndpointConfig, getMaterializedConfig, getCopyConfig } from "../schema/pipe.js";
15
+ import {
16
+ getEndpointConfig,
17
+ getMaterializedConfig,
18
+ getCopyConfig,
19
+ getSinkConfig,
20
+ } from "../schema/pipe.js";
15
21
 
16
22
  /**
17
23
  * Generated pipe content
@@ -114,6 +120,33 @@ function generateCopy(config: CopyConfig): string {
114
120
  return parts.join("\n");
115
121
  }
116
122
 
123
+ /**
124
+ * Generate the TYPE sink section
125
+ */
126
+ function generateSink(config: SinkConfig): string {
127
+ const parts: string[] = ["TYPE sink"];
128
+
129
+ parts.push(`EXPORT_CONNECTION_NAME ${config.connection._name}`);
130
+
131
+ if ("topic" in config) {
132
+ parts.push(`EXPORT_KAFKA_TOPIC ${config.topic}`);
133
+ parts.push(`EXPORT_SCHEDULE ${config.schedule}`);
134
+ } else {
135
+ parts.push(`EXPORT_BUCKET_URI ${config.bucketUri}`);
136
+ parts.push(`EXPORT_FILE_TEMPLATE ${config.fileTemplate}`);
137
+ parts.push(`EXPORT_SCHEDULE ${config.schedule}`);
138
+ parts.push(`EXPORT_FORMAT ${config.format}`);
139
+ if (config.strategy) {
140
+ parts.push(`EXPORT_STRATEGY ${config.strategy}`);
141
+ }
142
+ if (config.compression) {
143
+ parts.push(`EXPORT_COMPRESSION ${config.compression}`);
144
+ }
145
+ }
146
+
147
+ return parts.join("\n");
148
+ }
149
+
117
150
  /**
118
151
  * Generate TOKEN lines for a pipe
119
152
  */
@@ -220,6 +253,13 @@ export function generatePipe(pipe: PipeDefinition): GeneratedPipe {
220
253
  parts.push(generateCopy(copyConfig));
221
254
  }
222
255
 
256
+ // Add sink configuration if this is a sink pipe
257
+ const sinkConfig = getSinkConfig(pipe);
258
+ if (sinkConfig) {
259
+ parts.push("");
260
+ parts.push(generateSink(sinkConfig));
261
+ }
262
+
223
263
  // Add tokens if present
224
264
  const tokenLines = generateTokens(pipe.options.tokens);
225
265
  if (tokenLines.length > 0) {
package/src/index.test.ts CHANGED
@@ -10,4 +10,8 @@ describe("root public exports", () => {
10
10
  expect(typeof sdk.defineProject).toBe("function");
11
11
  expect(typeof sdk.Tinybird).toBe("function");
12
12
  });
13
+
14
+ it("exports secret utility", () => {
15
+ expect(typeof sdk.secret).toBe("function");
16
+ });
13
17
  });
package/src/index.ts CHANGED
@@ -97,6 +97,9 @@ export type {
97
97
  VersionedCollapsingMergeTreeConfig,
98
98
  } from "./schema/engines.js";
99
99
 
100
+ // ============ Utilities ============
101
+ export { secret } from "./schema/secret.js";
102
+
100
103
  // ============ Datasource ============
101
104
  export { defineDatasource, isDatasourceDefinition, column, getColumnType, getColumnJsonPath, getColumnNames } from "./schema/datasource.js";
102
105
  export type {
@@ -146,14 +149,17 @@ export {
146
149
  defineEndpoint,
147
150
  defineMaterializedView,
148
151
  defineCopyPipe,
152
+ defineSinkPipe,
149
153
  node,
150
154
  isPipeDefinition,
151
155
  isNodeDefinition,
152
156
  getEndpointConfig,
153
157
  getMaterializedConfig,
154
158
  getCopyConfig,
159
+ getSinkConfig,
155
160
  isMaterializedView,
156
161
  isCopyPipe,
162
+ isSinkPipe,
157
163
  getNodeNames,
158
164
  getNode,
159
165
  sql,
@@ -163,7 +169,13 @@ export type {
163
169
  PipeOptions,
164
170
  EndpointOptions,
165
171
  CopyPipeOptions,
172
+ SinkPipeOptions,
166
173
  CopyConfig,
174
+ SinkConfig,
175
+ SinkStrategy,
176
+ SinkCompression,
177
+ KafkaSinkConfig,
178
+ S3SinkConfig,
167
179
  NodeDefinition,
168
180
  NodeOptions,
169
181
  ParamsDefinition,