@tinybirdco/sdk 0.0.47 → 0.0.49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/README.md +53 -3
  2. package/dist/cli/commands/migrate.d.ts.map +1 -1
  3. package/dist/cli/commands/migrate.js +32 -0
  4. package/dist/cli/commands/migrate.js.map +1 -1
  5. package/dist/cli/commands/migrate.test.js +585 -8
  6. package/dist/cli/commands/migrate.test.js.map +1 -1
  7. package/dist/generator/connection.d.ts.map +1 -1
  8. package/dist/generator/connection.js +3 -0
  9. package/dist/generator/connection.js.map +1 -1
  10. package/dist/generator/connection.test.js +8 -0
  11. package/dist/generator/connection.test.js.map +1 -1
  12. package/dist/generator/datasource.d.ts.map +1 -1
  13. package/dist/generator/datasource.js +3 -0
  14. package/dist/generator/datasource.js.map +1 -1
  15. package/dist/generator/datasource.test.js +50 -0
  16. package/dist/generator/datasource.test.js.map +1 -1
  17. package/dist/generator/pipe.d.ts.map +1 -1
  18. package/dist/generator/pipe.js +31 -1
  19. package/dist/generator/pipe.js.map +1 -1
  20. package/dist/generator/pipe.test.js +50 -1
  21. package/dist/generator/pipe.test.js.map +1 -1
  22. package/dist/index.d.ts +3 -2
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +3 -1
  25. package/dist/index.js.map +1 -1
  26. package/dist/index.test.js +3 -0
  27. package/dist/index.test.js.map +1 -1
  28. package/dist/migrate/emit-ts.d.ts.map +1 -1
  29. package/dist/migrate/emit-ts.js +159 -41
  30. package/dist/migrate/emit-ts.js.map +1 -1
  31. package/dist/migrate/parse-connection.d.ts.map +1 -1
  32. package/dist/migrate/parse-connection.js +13 -2
  33. package/dist/migrate/parse-connection.js.map +1 -1
  34. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  35. package/dist/migrate/parse-datasource.js +115 -52
  36. package/dist/migrate/parse-datasource.js.map +1 -1
  37. package/dist/migrate/parse-pipe.d.ts.map +1 -1
  38. package/dist/migrate/parse-pipe.js +257 -46
  39. package/dist/migrate/parse-pipe.js.map +1 -1
  40. package/dist/migrate/parser-utils.d.ts +5 -0
  41. package/dist/migrate/parser-utils.d.ts.map +1 -1
  42. package/dist/migrate/parser-utils.js +22 -0
  43. package/dist/migrate/parser-utils.js.map +1 -1
  44. package/dist/migrate/types.d.ts +25 -3
  45. package/dist/migrate/types.d.ts.map +1 -1
  46. package/dist/schema/connection.d.ts +2 -0
  47. package/dist/schema/connection.d.ts.map +1 -1
  48. package/dist/schema/connection.js.map +1 -1
  49. package/dist/schema/datasource.d.ts +3 -1
  50. package/dist/schema/datasource.d.ts.map +1 -1
  51. package/dist/schema/datasource.js +8 -1
  52. package/dist/schema/datasource.js.map +1 -1
  53. package/dist/schema/datasource.test.js +13 -0
  54. package/dist/schema/datasource.test.js.map +1 -1
  55. package/dist/schema/engines.d.ts.map +1 -1
  56. package/dist/schema/engines.js +3 -0
  57. package/dist/schema/engines.js.map +1 -1
  58. package/dist/schema/engines.test.js +16 -0
  59. package/dist/schema/engines.test.js.map +1 -1
  60. package/dist/schema/pipe.d.ts +90 -3
  61. package/dist/schema/pipe.d.ts.map +1 -1
  62. package/dist/schema/pipe.js +84 -0
  63. package/dist/schema/pipe.js.map +1 -1
  64. package/dist/schema/pipe.test.js +70 -1
  65. package/dist/schema/pipe.test.js.map +1 -1
  66. package/dist/schema/secret.d.ts +6 -0
  67. package/dist/schema/secret.d.ts.map +1 -0
  68. package/dist/schema/secret.js +14 -0
  69. package/dist/schema/secret.js.map +1 -0
  70. package/dist/schema/secret.test.d.ts +2 -0
  71. package/dist/schema/secret.test.d.ts.map +1 -0
  72. package/dist/schema/secret.test.js +14 -0
  73. package/dist/schema/secret.test.js.map +1 -0
  74. package/dist/schema/types.d.ts +5 -0
  75. package/dist/schema/types.d.ts.map +1 -1
  76. package/dist/schema/types.js +6 -0
  77. package/dist/schema/types.js.map +1 -1
  78. package/dist/schema/types.test.js +12 -0
  79. package/dist/schema/types.test.js.map +1 -1
  80. package/package.json +1 -1
  81. package/src/cli/commands/migrate.test.ts +859 -8
  82. package/src/cli/commands/migrate.ts +35 -0
  83. package/src/generator/connection.test.ts +13 -0
  84. package/src/generator/connection.ts +4 -0
  85. package/src/generator/datasource.test.ts +60 -0
  86. package/src/generator/datasource.ts +3 -0
  87. package/src/generator/pipe.test.ts +56 -1
  88. package/src/generator/pipe.ts +41 -1
  89. package/src/index.test.ts +4 -0
  90. package/src/index.ts +12 -0
  91. package/src/migrate/emit-ts.ts +161 -48
  92. package/src/migrate/parse-connection.ts +15 -2
  93. package/src/migrate/parse-datasource.ts +134 -71
  94. package/src/migrate/parse-pipe.ts +364 -69
  95. package/src/migrate/parser-utils.ts +36 -1
  96. package/src/migrate/types.ts +28 -3
  97. package/src/schema/connection.ts +2 -0
  98. package/src/schema/datasource.test.ts +17 -0
  99. package/src/schema/datasource.ts +13 -2
  100. package/src/schema/engines.test.ts +18 -0
  101. package/src/schema/engines.ts +3 -0
  102. package/src/schema/pipe.test.ts +89 -0
  103. package/src/schema/pipe.ts +188 -4
  104. package/src/schema/secret.test.ts +19 -0
  105. package/src/schema/secret.ts +16 -0
  106. package/src/schema/types.test.ts +14 -0
  107. package/src/schema/types.ts +10 -0
@@ -24,6 +24,7 @@ describe("Datasource Schema", () => {
24
24
  expect(ds._name).toBe("events");
25
25
  expect(ds._type).toBe("datasource");
26
26
  expect(ds.options.schema).toBeDefined();
27
+ expect(ds.options.engine).toBeUndefined();
27
28
  });
28
29
 
29
30
  it("creates a datasource with description", () => {
@@ -169,6 +170,22 @@ describe("Datasource Schema", () => {
169
170
 
170
171
  expect(result).toBeUndefined();
171
172
  });
173
+
174
+ it("returns jsonPath from validator modifier", () => {
175
+ const validator = t.string().jsonPath("$.user.id");
176
+ const result = getColumnJsonPath(validator);
177
+
178
+ expect(result).toBe("$.user.id");
179
+ });
180
+
181
+ it("prefers column definition jsonPath over validator modifier", () => {
182
+ const col = column(t.string().jsonPath("$.from_validator"), {
183
+ jsonPath: "$.from_column",
184
+ });
185
+ const result = getColumnJsonPath(col);
186
+
187
+ expect(result).toBe("$.from_column");
188
+ });
172
189
  });
173
190
 
174
191
  describe("getColumnNames", () => {
@@ -3,7 +3,7 @@
3
3
  * Define table schemas as TypeScript with full type safety
4
4
  */
5
5
 
6
- import type { AnyTypeValidator } from "./types.js";
6
+ import { getModifiers, isTypeValidator, type AnyTypeValidator } from "./types.js";
7
7
  import type { EngineConfig } from "./engines.js";
8
8
  import type { KafkaConnectionDefinition, S3ConnectionDefinition } from "./connection.js";
9
9
  import type { TokenDefinition, DatasourceTokenScope } from "./token.js";
@@ -66,6 +66,8 @@ export interface KafkaConfig {
66
66
  groupId?: string;
67
67
  /** Where to start reading: 'earliest' or 'latest' (default: 'latest') */
68
68
  autoOffsetReset?: "earliest" | "latest";
69
+ /** Whether to store the raw Kafka value payload */
70
+ storeRawValue?: boolean;
69
71
  }
70
72
 
71
73
  /**
@@ -207,9 +209,18 @@ export function getColumnType(column: AnyTypeValidator | ColumnDefinition): AnyT
207
209
  * Get the JSON path for a column if defined
208
210
  */
209
211
  export function getColumnJsonPath(column: AnyTypeValidator | ColumnDefinition): string | undefined {
210
- if ("jsonPath" in column) {
212
+ if (isTypeValidator(column)) {
213
+ return getModifiers(column).jsonPath;
214
+ }
215
+
216
+ if (column.jsonPath !== undefined) {
211
217
  return column.jsonPath;
212
218
  }
219
+
220
+ if (isTypeValidator(column.type)) {
221
+ return getModifiers(column.type).jsonPath;
222
+ }
223
+
213
224
  return undefined;
214
225
  }
215
226
 
@@ -44,6 +44,14 @@ describe('Engine Configurations', () => {
44
44
  });
45
45
  expect(config.ver).toBe('updated_at');
46
46
  });
47
+
48
+ it('supports isDeleted column', () => {
49
+ const config = engine.replacingMergeTree({
50
+ sortingKey: ['id'],
51
+ isDeleted: '_is_deleted',
52
+ });
53
+ expect(config.isDeleted).toBe('_is_deleted');
54
+ });
47
55
  });
48
56
 
49
57
  describe('SummingMergeTree', () => {
@@ -138,6 +146,16 @@ describe('Engine Configurations', () => {
138
146
  expect(clause).toContain('ENGINE_VER "updated_at"');
139
147
  });
140
148
 
149
+ it('includes ReplacingMergeTree isDeleted column', () => {
150
+ const config = engine.replacingMergeTree({
151
+ sortingKey: ['id'],
152
+ isDeleted: '_is_deleted',
153
+ });
154
+ const clause = getEngineClause(config);
155
+ expect(clause).toContain('ENGINE "ReplacingMergeTree"');
156
+ expect(clause).toContain('ENGINE_IS_DELETED "_is_deleted"');
157
+ });
158
+
141
159
  it('includes SummingMergeTree columns', () => {
142
160
  const config = engine.summingMergeTree({
143
161
  sortingKey: ['date'],
@@ -241,6 +241,9 @@ export function getEngineClause(config: EngineConfig): string {
241
241
  if (config.type === "ReplacingMergeTree" && config.ver) {
242
242
  parts.push(`ENGINE_VER "${config.ver}"`);
243
243
  }
244
+ if (config.type === "ReplacingMergeTree" && config.isDeleted) {
245
+ parts.push(`ENGINE_IS_DELETED "${config.isDeleted}"`);
246
+ }
244
247
 
245
248
  if (config.type === "CollapsingMergeTree" || config.type === "VersionedCollapsingMergeTree") {
246
249
  parts.push(`ENGINE_SIGN "${config.sign}"`);
@@ -1,17 +1,21 @@
1
1
  import { describe, it, expect } from "vitest";
2
2
  import {
3
3
  definePipe,
4
+ defineSinkPipe,
4
5
  defineMaterializedView,
5
6
  node,
6
7
  isPipeDefinition,
7
8
  getEndpointConfig,
8
9
  getMaterializedConfig,
10
+ getSinkConfig,
9
11
  isMaterializedView,
12
+ isSinkPipe,
10
13
  getNodeNames,
11
14
  getNode,
12
15
  sql,
13
16
  } from "./pipe.js";
14
17
  import { defineDatasource } from "./datasource.js";
18
+ import { defineKafkaConnection, defineS3Connection } from "./connection.js";
15
19
  import { t } from "./types.js";
16
20
  import { p } from "./params.js";
17
21
  import { engine } from "./engines.js";
@@ -154,6 +158,91 @@ describe("Pipe Schema", () => {
154
158
  });
155
159
  });
156
160
 
161
+ describe("Sink pipes", () => {
162
+ it("creates a Kafka sink pipe", () => {
163
+ const kafka = defineKafkaConnection("events_kafka", {
164
+ bootstrapServers: "localhost:9092",
165
+ });
166
+
167
+ const pipe = defineSinkPipe("events_sink", {
168
+ nodes: [node({ name: "publish", sql: "SELECT * FROM events" })],
169
+ sink: {
170
+ connection: kafka,
171
+ topic: "events_out",
172
+ schedule: "@on-demand",
173
+ },
174
+ });
175
+
176
+ const sink = getSinkConfig(pipe);
177
+ expect(sink).toBeTruthy();
178
+ expect(sink && "topic" in sink ? sink.topic : undefined).toBe("events_out");
179
+ expect(isSinkPipe(pipe)).toBe(true);
180
+ });
181
+
182
+ it("creates an S3 sink pipe", () => {
183
+ const s3 = defineS3Connection("exports_s3", {
184
+ region: "us-east-1",
185
+ arn: "arn:aws:iam::123456789012:role/tinybird-s3-access",
186
+ });
187
+
188
+ const pipe = defineSinkPipe("exports_sink", {
189
+ nodes: [node({ name: "export", sql: "SELECT * FROM events" })],
190
+ sink: {
191
+ connection: s3,
192
+ bucketUri: "s3://exports/events/",
193
+ fileTemplate: "events_{date}",
194
+ schedule: "@once",
195
+ format: "csv",
196
+ strategy: "create_new",
197
+ compression: "gzip",
198
+ },
199
+ });
200
+
201
+ const sink = getSinkConfig(pipe);
202
+ expect(sink).toBeTruthy();
203
+ expect(sink && "bucketUri" in sink ? sink.bucketUri : undefined).toBe("s3://exports/events/");
204
+ expect(isSinkPipe(pipe)).toBe(true);
205
+ });
206
+
207
+ it("throws when Kafka sink connection type is invalid", () => {
208
+ const s3 = defineS3Connection("exports_s3", {
209
+ region: "us-east-1",
210
+ arn: "arn:aws:iam::123456789012:role/tinybird-s3-access",
211
+ });
212
+
213
+ expect(() =>
214
+ defineSinkPipe("bad_sink", {
215
+ nodes: [node({ name: "export", sql: "SELECT * FROM events" })],
216
+ sink: {
217
+ // Runtime validation rejects mismatched connection/type
218
+ connection: s3 as unknown as ReturnType<typeof defineKafkaConnection>,
219
+ topic: "events_out",
220
+ schedule: "@on-demand",
221
+ },
222
+ })
223
+ ).toThrow("requires a Kafka connection");
224
+ });
225
+
226
+ it("throws when sink configuration is passed to definePipe", () => {
227
+ const kafka = defineKafkaConnection("events_kafka", {
228
+ bootstrapServers: "localhost:9092",
229
+ });
230
+
231
+ expect(() =>
232
+ definePipe(
233
+ "bad_via_define_pipe",
234
+ {
235
+ nodes: [node({ name: "export", sql: "SELECT * FROM events" })],
236
+ sink: {
237
+ connection: kafka,
238
+ topic: "events_out",
239
+ },
240
+ } as unknown as Parameters<typeof definePipe>[1]
241
+ )
242
+ ).toThrow("must be created with defineSinkPipe");
243
+ });
244
+ });
245
+
157
246
  describe("getEndpointConfig", () => {
158
247
  it("returns null when endpoint is false", () => {
159
248
  const pipe = definePipe("my_pipe", {
@@ -9,6 +9,8 @@ import type { DatasourceDefinition, SchemaDefinition, ColumnDefinition } from ".
9
9
  import { getColumnType } from "./datasource.js";
10
10
  import { getTinybirdType } from "./types.js";
11
11
  import type { TokenDefinition, PipeTokenScope } from "./token.js";
12
+ import type { KafkaConnectionDefinition, S3ConnectionDefinition } from "./connection.js";
13
+ import { isKafkaConnectionDefinition, isS3ConnectionDefinition } from "./connection.js";
12
14
 
13
15
  /** Symbol for brand typing pipes - use Symbol.for() for global registry */
14
16
  export const PIPE_BRAND = Symbol.for("tinybird.pipe");
@@ -155,6 +157,55 @@ export interface CopyConfig<
155
157
  copy_schedule?: string;
156
158
  }
157
159
 
160
+ /**
161
+ * Sink export strategy.
162
+ * - 'create_new': write new files on each run
163
+ * - 'replace': replace destination data on each run
164
+ */
165
+ export type SinkStrategy = "create_new" | "replace";
166
+
167
+ /**
168
+ * S3 sink compression codec.
169
+ */
170
+ export type SinkCompression = "none" | "gzip" | "snappy";
171
+
172
+ /**
173
+ * Kafka sink configuration
174
+ */
175
+ export interface KafkaSinkConfig {
176
+ /** Kafka connection used to publish records */
177
+ connection: KafkaConnectionDefinition;
178
+ /** Destination Kafka topic */
179
+ topic: string;
180
+ /** Sink schedule (for example: @on-demand, @once, cron expression) */
181
+ schedule: string;
182
+ }
183
+
184
+ /**
185
+ * S3 sink configuration
186
+ */
187
+ export interface S3SinkConfig {
188
+ /** S3 connection used to write exported files */
189
+ connection: S3ConnectionDefinition;
190
+ /** Destination bucket URI (for example: s3://bucket/prefix/) */
191
+ bucketUri: string;
192
+ /** Output filename template (supports Tinybird placeholders) */
193
+ fileTemplate: string;
194
+ /** Output format (for example: csv, ndjson) */
195
+ format: string;
196
+ /** Sink schedule (for example: @on-demand, @once, cron expression) */
197
+ schedule: string;
198
+ /** Export strategy */
199
+ strategy?: SinkStrategy;
200
+ /** Compression codec */
201
+ compression?: SinkCompression;
202
+ }
203
+
204
+ /**
205
+ * Sink pipe configuration (Kafka or S3 only)
206
+ */
207
+ export type SinkConfig = KafkaSinkConfig | S3SinkConfig;
208
+
158
209
  /**
159
210
  * Inline token configuration for pipe access
160
211
  */
@@ -194,9 +245,9 @@ export interface PipeOptions<
194
245
  nodes: readonly NodeDefinition[];
195
246
  /** Output schema (optional for reusable pipes, required for endpoints) */
196
247
  output?: TOutput;
197
- /** Whether this pipe is an API endpoint (shorthand for { enabled: true }). Mutually exclusive with materialized and copy. */
248
+ /** Whether this pipe is an API endpoint (shorthand for { enabled: true }). Mutually exclusive with materialized, copy, and sink. */
198
249
  endpoint?: boolean | EndpointConfig;
199
- /** Materialized view configuration. Mutually exclusive with endpoint and copy. */
250
+ /** Materialized view configuration. Mutually exclusive with endpoint, copy, and sink. */
200
251
  materialized?: MaterializedConfig;
201
252
  /** Copy pipe configuration. Mutually exclusive with endpoint and materialized. */
202
253
  copy?: CopyConfig;
@@ -204,6 +255,33 @@ export interface PipeOptions<
204
255
  tokens?: readonly PipeTokenConfig[];
205
256
  }
206
257
 
258
+ /**
259
+ * Options for defining a sink pipe
260
+ */
261
+ export interface SinkPipeOptions<TParams extends ParamsDefinition> {
262
+ /** Human-readable description of the sink pipe */
263
+ description?: string;
264
+ /** Parameter definitions for query inputs */
265
+ params?: TParams;
266
+ /** Nodes in the transformation pipeline */
267
+ nodes: readonly NodeDefinition[];
268
+ /** Sink export configuration */
269
+ sink: SinkConfig;
270
+ /** Sink pipes are not endpoints */
271
+ endpoint?: never;
272
+ /** Sink pipes are not materialized views */
273
+ materialized?: never;
274
+ /** Sink pipes are not copy pipes */
275
+ copy?: never;
276
+ /** Access tokens for this sink pipe */
277
+ tokens?: readonly PipeTokenConfig[];
278
+ }
279
+
280
+ type PipeRuntimeOptions<
281
+ TParams extends ParamsDefinition,
282
+ TOutput extends OutputDefinition
283
+ > = (PipeOptions<TParams, TOutput> & { sink?: never }) | SinkPipeOptions<TParams>;
284
+
207
285
  /**
208
286
  * Options for defining an endpoint (API-exposed pipe)
209
287
  */
@@ -277,7 +355,7 @@ export interface PipeDefinition<
277
355
  /** Output schema (optional for reusable pipes) */
278
356
  readonly _output?: TOutput;
279
357
  /** Full options */
280
- readonly options: PipeOptions<TParams, TOutput>;
358
+ readonly options: PipeRuntimeOptions<TParams, TOutput>;
281
359
  }
282
360
 
283
361
  /**
@@ -428,6 +506,52 @@ function validateMaterializedSchema(
428
506
  }
429
507
  }
430
508
 
509
+ function validateSinkConfig(pipeName: string, sink: SinkConfig): void {
510
+ if ("topic" in sink) {
511
+ if (!isKafkaConnectionDefinition(sink.connection)) {
512
+ throw new Error(
513
+ `Pipe "${pipeName}" sink with topic requires a Kafka connection.`
514
+ );
515
+ }
516
+ if (typeof sink.topic !== "string" || !sink.topic.trim()) {
517
+ throw new Error(`Pipe "${pipeName}" sink topic cannot be empty.`);
518
+ }
519
+ if (typeof sink.schedule !== "string" || !sink.schedule.trim()) {
520
+ throw new Error(`Pipe "${pipeName}" sink schedule cannot be empty.`);
521
+ }
522
+ return;
523
+ }
524
+
525
+ if (!isS3ConnectionDefinition(sink.connection)) {
526
+ throw new Error(
527
+ `Pipe "${pipeName}" S3 sink requires an S3 connection.`
528
+ );
529
+ }
530
+ if (typeof sink.bucketUri !== "string" || !sink.bucketUri.trim()) {
531
+ throw new Error(`Pipe "${pipeName}" sink bucketUri cannot be empty.`);
532
+ }
533
+ if (typeof sink.fileTemplate !== "string" || !sink.fileTemplate.trim()) {
534
+ throw new Error(`Pipe "${pipeName}" sink fileTemplate cannot be empty.`);
535
+ }
536
+ if (typeof sink.format !== "string" || !sink.format.trim()) {
537
+ throw new Error(`Pipe "${pipeName}" sink format cannot be empty.`);
538
+ }
539
+ if (typeof sink.schedule !== "string" || !sink.schedule.trim()) {
540
+ throw new Error(`Pipe "${pipeName}" sink schedule cannot be empty.`);
541
+ }
542
+ if (sink.strategy && sink.strategy !== "create_new" && sink.strategy !== "replace") {
543
+ throw new Error(`Pipe "${pipeName}" sink strategy must be "create_new" or "replace".`);
544
+ }
545
+ if (
546
+ sink.compression &&
547
+ sink.compression !== "none" &&
548
+ sink.compression !== "gzip" &&
549
+ sink.compression !== "snappy"
550
+ ) {
551
+ throw new Error(`Pipe "${pipeName}" sink compression must be "none", "gzip", or "snappy".`);
552
+ }
553
+ }
554
+
431
555
  export function definePipe<
432
556
  TParams extends ParamsDefinition,
433
557
  TOutput extends OutputDefinition
@@ -447,6 +571,12 @@ export function definePipe<
447
571
  throw new Error(`Pipe "${name}" must have at least one node.`);
448
572
  }
449
573
 
574
+ if ("sink" in (options as unknown as object)) {
575
+ throw new Error(
576
+ `Pipe "${name}" sink configuration must be created with defineSinkPipe().`
577
+ );
578
+ }
579
+
450
580
  // Validate output is provided for endpoints and materialized views
451
581
  if ((options.endpoint || options.materialized) && (!options.output || Object.keys(options.output).length === 0)) {
452
582
  throw new Error(
@@ -480,7 +610,47 @@ export function definePipe<
480
610
  options: {
481
611
  ...options,
482
612
  params,
483
- },
613
+ } as PipeRuntimeOptions<TParams, TOutput>,
614
+ };
615
+ }
616
+
617
+ /**
618
+ * Define a Tinybird sink pipe
619
+ *
620
+ * Sink pipes export query results to external systems via Kafka or S3.
621
+ *
622
+ * @param name - The sink pipe name (must be valid identifier)
623
+ * @param options - Sink pipe configuration
624
+ * @returns A pipe definition configured as a sink pipe
625
+ */
626
+ export function defineSinkPipe<TParams extends ParamsDefinition>(
627
+ name: string,
628
+ options: SinkPipeOptions<TParams>
629
+ ): PipeDefinition<TParams, Record<string, never>> {
630
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
631
+ throw new Error(
632
+ `Invalid pipe name: "${name}". Must start with a letter or underscore and contain only alphanumeric characters and underscores.`
633
+ );
634
+ }
635
+
636
+ if (!options.nodes || options.nodes.length === 0) {
637
+ throw new Error(`Pipe "${name}" must have at least one node.`);
638
+ }
639
+
640
+ validateSinkConfig(name, options.sink);
641
+
642
+ const params = (options.params ?? {}) as TParams;
643
+
644
+ return {
645
+ [PIPE_BRAND]: true,
646
+ _name: name,
647
+ _type: "pipe",
648
+ _params: params,
649
+ _output: undefined,
650
+ options: {
651
+ ...options,
652
+ params,
653
+ } as PipeRuntimeOptions<TParams, Record<string, never>>,
484
654
  };
485
655
  }
486
656
 
@@ -790,6 +960,20 @@ export function isCopyPipe(pipe: PipeDefinition): boolean {
790
960
  return pipe.options.copy !== undefined;
791
961
  }
792
962
 
963
+ /**
964
+ * Get the sink configuration from a pipe
965
+ */
966
+ export function getSinkConfig(pipe: PipeDefinition): SinkConfig | null {
967
+ return "sink" in pipe.options ? (pipe.options.sink ?? null) : null;
968
+ }
969
+
970
+ /**
971
+ * Check if a pipe is a sink pipe
972
+ */
973
+ export function isSinkPipe(pipe: PipeDefinition): boolean {
974
+ return pipe.options.sink !== undefined;
975
+ }
976
+
793
977
  /**
794
978
  * Get all node names from a pipe
795
979
  */
@@ -0,0 +1,19 @@
1
+ import { describe, expect, it } from "vitest";
2
+ import { secret } from "./secret.js";
3
+
4
+ describe("secret helper", () => {
5
+ it("creates a secret template without default", () => {
6
+ expect(secret("KAFKA_KEY")).toBe('{{ tb_secret("KAFKA_KEY") }}');
7
+ });
8
+
9
+ it("creates a secret template with default", () => {
10
+ expect(secret("KAFKA_GROUP_ID", "events_group")).toBe(
11
+ '{{ tb_secret("KAFKA_GROUP_ID", "events_group") }}'
12
+ );
13
+ });
14
+
15
+ it("throws on empty secret name", () => {
16
+ expect(() => secret("")).toThrow("Secret name must be a non-empty string.");
17
+ });
18
+ });
19
+
@@ -0,0 +1,16 @@
1
+ /**
2
+ * Secret template helper.
3
+ * Produces Tinybird-compatible `tb_secret(...)` template strings.
4
+ */
5
+ export function secret(name: string, defaultValue?: string): string {
6
+ if (!name || name.trim().length === 0) {
7
+ throw new Error("Secret name must be a non-empty string.");
8
+ }
9
+
10
+ if (defaultValue === undefined) {
11
+ return `{{ tb_secret("${name}") }}`;
12
+ }
13
+
14
+ return `{{ tb_secret("${name}", "${defaultValue}") }}`;
15
+ }
16
+
@@ -115,6 +115,20 @@ describe('Type Validators (t.*)', () => {
115
115
  });
116
116
  });
117
117
 
118
+ describe('jsonPath modifier', () => {
119
+ it('sets jsonPath in modifiers', () => {
120
+ const type = t.string().jsonPath('$.payload.id');
121
+ expect(type._modifiers.jsonPath).toBe('$.payload.id');
122
+ });
123
+
124
+ it('supports chaining with other modifiers', () => {
125
+ const type = t.string().nullable().jsonPath('$.user.name');
126
+ expect(type._tinybirdType).toBe('Nullable(String)');
127
+ expect(type._modifiers.nullable).toBe(true);
128
+ expect(type._modifiers.jsonPath).toBe('$.user.name');
129
+ });
130
+ });
131
+
118
132
  describe('Complex types', () => {
119
133
  it('generates Array type', () => {
120
134
  const type = t.array(t.string());
@@ -32,6 +32,8 @@ export interface TypeValidator<
32
32
  default(value: TType): TypeValidator<TType, TTinybirdType, TModifiers & { hasDefault: true; defaultValue: TType }>;
33
33
  /** Set a codec for compression */
34
34
  codec(codec: string): TypeValidator<TType, TTinybirdType, TModifiers & { codec: string }>;
35
+ /** Set an explicit JSON path for extraction (overrides autogenerated path) */
36
+ jsonPath(path: string): TypeValidator<TType, TTinybirdType, TModifiers & { jsonPath: string }>;
35
37
  }
36
38
 
37
39
  export interface TypeModifiers {
@@ -40,6 +42,7 @@ export interface TypeModifiers {
40
42
  hasDefault?: boolean;
41
43
  defaultValue?: unknown;
42
44
  codec?: string;
45
+ jsonPath?: string;
43
46
  }
44
47
 
45
48
  // Internal implementation
@@ -110,6 +113,13 @@ function createValidator<TType, TTinybirdType extends string>(
110
113
  codec,
111
114
  }) as TypeValidator<TType, TTinybirdType, TypeModifiers & { codec: string }>;
112
115
  },
116
+
117
+ jsonPath(path: string) {
118
+ return createValidator<TType, TTinybirdType>(tinybirdType, {
119
+ ...modifiers,
120
+ jsonPath: path,
121
+ }) as TypeValidator<TType, TTinybirdType, TypeModifiers & { jsonPath: string }>;
122
+ },
113
123
  };
114
124
 
115
125
  return validator;