@tinybirdco/sdk 0.0.48 → 0.0.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/README.md +71 -4
  2. package/dist/cli/commands/migrate.d.ts.map +1 -1
  3. package/dist/cli/commands/migrate.js +68 -1
  4. package/dist/cli/commands/migrate.js.map +1 -1
  5. package/dist/cli/commands/migrate.test.js +458 -1
  6. package/dist/cli/commands/migrate.test.js.map +1 -1
  7. package/dist/generator/connection.d.ts.map +1 -1
  8. package/dist/generator/connection.js +14 -1
  9. package/dist/generator/connection.js.map +1 -1
  10. package/dist/generator/connection.test.js +20 -4
  11. package/dist/generator/connection.test.js.map +1 -1
  12. package/dist/generator/datasource.d.ts.map +1 -1
  13. package/dist/generator/datasource.js +20 -10
  14. package/dist/generator/datasource.js.map +1 -1
  15. package/dist/generator/datasource.test.js +26 -1
  16. package/dist/generator/datasource.test.js.map +1 -1
  17. package/dist/generator/pipe.d.ts.map +1 -1
  18. package/dist/generator/pipe.js +31 -1
  19. package/dist/generator/pipe.js.map +1 -1
  20. package/dist/generator/pipe.test.js +50 -1
  21. package/dist/generator/pipe.test.js.map +1 -1
  22. package/dist/index.d.ts +5 -5
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +2 -2
  25. package/dist/index.js.map +1 -1
  26. package/dist/migrate/emit-ts.d.ts.map +1 -1
  27. package/dist/migrate/emit-ts.js +95 -20
  28. package/dist/migrate/emit-ts.js.map +1 -1
  29. package/dist/migrate/parse-connection.d.ts +2 -2
  30. package/dist/migrate/parse-connection.d.ts.map +1 -1
  31. package/dist/migrate/parse-connection.js +34 -4
  32. package/dist/migrate/parse-connection.js.map +1 -1
  33. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  34. package/dist/migrate/parse-datasource.js +79 -51
  35. package/dist/migrate/parse-datasource.js.map +1 -1
  36. package/dist/migrate/parse-pipe.d.ts.map +1 -1
  37. package/dist/migrate/parse-pipe.js +254 -44
  38. package/dist/migrate/parse-pipe.js.map +1 -1
  39. package/dist/migrate/parser-utils.d.ts +5 -0
  40. package/dist/migrate/parser-utils.d.ts.map +1 -1
  41. package/dist/migrate/parser-utils.js +22 -0
  42. package/dist/migrate/parser-utils.js.map +1 -1
  43. package/dist/migrate/types.d.ts +37 -4
  44. package/dist/migrate/types.d.ts.map +1 -1
  45. package/dist/schema/connection.d.ts +34 -1
  46. package/dist/schema/connection.d.ts.map +1 -1
  47. package/dist/schema/connection.js +26 -0
  48. package/dist/schema/connection.js.map +1 -1
  49. package/dist/schema/connection.test.js +35 -1
  50. package/dist/schema/connection.test.js.map +1 -1
  51. package/dist/schema/datasource.d.ts +16 -1
  52. package/dist/schema/datasource.d.ts.map +1 -1
  53. package/dist/schema/datasource.js +3 -2
  54. package/dist/schema/datasource.js.map +1 -1
  55. package/dist/schema/datasource.test.js +33 -3
  56. package/dist/schema/datasource.test.js.map +1 -1
  57. package/dist/schema/pipe.d.ts +90 -3
  58. package/dist/schema/pipe.d.ts.map +1 -1
  59. package/dist/schema/pipe.js +84 -0
  60. package/dist/schema/pipe.js.map +1 -1
  61. package/dist/schema/pipe.test.js +70 -1
  62. package/dist/schema/pipe.test.js.map +1 -1
  63. package/package.json +1 -1
  64. package/src/cli/commands/migrate.test.ts +671 -1
  65. package/src/cli/commands/migrate.ts +74 -1
  66. package/src/generator/connection.test.ts +29 -4
  67. package/src/generator/connection.ts +25 -2
  68. package/src/generator/datasource.test.ts +30 -1
  69. package/src/generator/datasource.ts +22 -10
  70. package/src/generator/pipe.test.ts +56 -1
  71. package/src/generator/pipe.ts +41 -1
  72. package/src/index.ts +14 -0
  73. package/src/migrate/emit-ts.ts +106 -24
  74. package/src/migrate/parse-connection.ts +56 -6
  75. package/src/migrate/parse-datasource.ts +84 -70
  76. package/src/migrate/parse-pipe.ts +359 -66
  77. package/src/migrate/parser-utils.ts +36 -1
  78. package/src/migrate/types.ts +43 -4
  79. package/src/schema/connection.test.ts +48 -0
  80. package/src/schema/connection.ts +60 -1
  81. package/src/schema/datasource.test.ts +39 -3
  82. package/src/schema/datasource.ts +24 -3
  83. package/src/schema/pipe.test.ts +89 -0
  84. package/src/schema/pipe.ts +188 -4
@@ -3,44 +3,32 @@ import {
3
3
  MigrationParseError,
4
4
  isBlank,
5
5
  parseDirectiveLine,
6
+ parseQuotedValue,
7
+ readDirectiveBlock,
6
8
  splitLines,
7
9
  splitTopLevelComma,
8
- stripIndent,
9
10
  } from "./parser-utils.js";
10
11
 
11
- interface BlockReadResult {
12
- lines: string[];
13
- nextIndex: number;
14
- }
15
-
16
- function readIndentedBlock(lines: string[], startIndex: number): BlockReadResult {
17
- const collected: string[] = [];
18
- let i = startIndex;
19
-
20
- while (i < lines.length) {
21
- const line = lines[i] ?? "";
22
- if (line.startsWith(" ")) {
23
- collected.push(stripIndent(line));
24
- i += 1;
25
- continue;
26
- }
27
-
28
- if (isBlank(line)) {
29
- let j = i + 1;
30
- while (j < lines.length && isBlank(lines[j] ?? "")) {
31
- j += 1;
32
- }
33
- if (j < lines.length && (lines[j] ?? "").startsWith(" ")) {
34
- collected.push("");
35
- i += 1;
36
- continue;
37
- }
38
- }
39
-
40
- break;
12
+ const PIPE_DIRECTIVES = new Set([
13
+ "DESCRIPTION",
14
+ "NODE",
15
+ "SQL",
16
+ "TYPE",
17
+ "CACHE",
18
+ "DATASOURCE",
19
+ "DEPLOYMENT_METHOD",
20
+ "TARGET_DATASOURCE",
21
+ "COPY_SCHEDULE",
22
+ "COPY_MODE",
23
+ "TOKEN",
24
+ ]);
25
+
26
+ function isPipeDirectiveLine(line: string): boolean {
27
+ if (!line) {
28
+ return false;
41
29
  }
42
-
43
- return { lines: collected, nextIndex: i };
30
+ const { key } = parseDirectiveLine(line);
31
+ return PIPE_DIRECTIVES.has(key);
44
32
  }
45
33
 
46
34
  function nextNonBlank(lines: string[], startIndex: number): number {
@@ -117,11 +105,14 @@ function mapTemplateFunctionToParamType(func: string): string | null {
117
105
  return null;
118
106
  }
119
107
 
120
- function parseParamDefault(rawValue: string): string | number {
108
+ function parseParamDefault(rawValue: string): string | number | boolean {
121
109
  const trimmed = rawValue.trim();
122
110
  if (/^-?\d+(\.\d+)?$/.test(trimmed)) {
123
111
  return Number(trimmed);
124
112
  }
113
+ if (/^(true|false)$/i.test(trimmed)) {
114
+ return trimmed.toLowerCase() === "true";
115
+ }
125
116
  if (
126
117
  (trimmed.startsWith("'") && trimmed.endsWith("'")) ||
127
118
  (trimmed.startsWith('"') && trimmed.endsWith('"'))
@@ -131,6 +122,92 @@ function parseParamDefault(rawValue: string): string | number {
131
122
  throw new Error(`Unsupported parameter default value: "${rawValue}"`);
132
123
  }
133
124
 
125
+ function parseKeywordArgument(rawArg: string): { key: string; value: string } | null {
126
+ const equalsIndex = rawArg.indexOf("=");
127
+ if (equalsIndex <= 0) {
128
+ return null;
129
+ }
130
+
131
+ const key = rawArg.slice(0, equalsIndex).trim();
132
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(key)) {
133
+ return null;
134
+ }
135
+
136
+ const value = rawArg.slice(equalsIndex + 1).trim();
137
+ if (!value) {
138
+ return null;
139
+ }
140
+
141
+ return { key, value };
142
+ }
143
+
144
+ function parseRequiredFlag(rawValue: string): boolean {
145
+ const normalized = rawValue.trim().toLowerCase();
146
+ if (normalized === "true" || normalized === "1") {
147
+ return true;
148
+ }
149
+ if (normalized === "false" || normalized === "0") {
150
+ return false;
151
+ }
152
+ throw new Error(`Unsupported required value: "${rawValue}"`);
153
+ }
154
+
155
+ function parseParamOptions(rawArgs: string[]): {
156
+ defaultValue?: string | number | boolean;
157
+ required?: boolean;
158
+ description?: string;
159
+ } {
160
+ let positionalDefault: string | number | boolean | undefined;
161
+ let keywordDefault: string | number | boolean | undefined;
162
+ let required: boolean | undefined;
163
+ let description: string | undefined;
164
+
165
+ for (const rawArg of rawArgs) {
166
+ const trimmed = rawArg.trim();
167
+ if (!trimmed) {
168
+ continue;
169
+ }
170
+
171
+ const keyword = parseKeywordArgument(trimmed);
172
+ if (!keyword) {
173
+ if (positionalDefault === undefined) {
174
+ positionalDefault = parseParamDefault(trimmed);
175
+ }
176
+ continue;
177
+ }
178
+
179
+ const keyLower = keyword.key.toLowerCase();
180
+ if (keyLower === "default") {
181
+ keywordDefault = parseParamDefault(keyword.value);
182
+ continue;
183
+ }
184
+ if (keyLower === "required") {
185
+ required = parseRequiredFlag(keyword.value);
186
+ continue;
187
+ }
188
+ if (keyLower === "description") {
189
+ const parsedDescription = parseParamDefault(keyword.value);
190
+ if (typeof parsedDescription !== "string") {
191
+ throw new Error(`Unsupported description value: "${keyword.value}"`);
192
+ }
193
+ description = parsedDescription;
194
+ continue;
195
+ }
196
+ }
197
+
198
+ let defaultValue = keywordDefault ?? positionalDefault;
199
+ if (keywordDefault !== undefined && positionalDefault !== undefined) {
200
+ if (keywordDefault !== positionalDefault) {
201
+ throw new Error(
202
+ `Parameter has conflicting positional and keyword defaults: "${positionalDefault}" and "${keywordDefault}".`
203
+ );
204
+ }
205
+ defaultValue = positionalDefault;
206
+ }
207
+
208
+ return { defaultValue, required, description };
209
+ }
210
+
134
211
  function inferParamsFromSql(
135
212
  sql: string,
136
213
  filePath: string,
@@ -173,10 +250,15 @@ function inferParamsFromSql(
173
250
  );
174
251
  }
175
252
 
176
- let defaultValue: string | number | undefined;
253
+ let defaultValue: string | number | boolean | undefined;
254
+ let required: boolean | undefined;
255
+ let description: string | undefined;
177
256
  if (args.length > 1) {
178
257
  try {
179
- defaultValue = parseParamDefault(args[1] ?? "");
258
+ const parsedOptions = parseParamOptions(args.slice(1));
259
+ defaultValue = parsedOptions.defaultValue;
260
+ required = parsedOptions.required;
261
+ description = parsedOptions.description;
180
262
  } catch (error) {
181
263
  throw new MigrationParseError(
182
264
  filePath,
@@ -209,14 +291,21 @@ function inferParamsFromSql(
209
291
  }
210
292
  if (existing.defaultValue === undefined && defaultValue !== undefined) {
211
293
  existing.defaultValue = defaultValue;
212
- existing.required = false;
213
294
  }
295
+ if (existing.description === undefined && description !== undefined) {
296
+ existing.description = description;
297
+ }
298
+ const optionalInAnyUsage =
299
+ existing.required === false || required === false || defaultValue !== undefined;
300
+ existing.required = !optionalInAnyUsage;
214
301
  } else {
302
+ const isRequired = required ?? defaultValue === undefined;
215
303
  params.set(paramName, {
216
304
  name: paramName,
217
305
  type: mappedType,
218
- required: defaultValue === undefined,
306
+ required: isRequired,
219
307
  defaultValue,
308
+ description,
220
309
  });
221
310
  }
222
311
 
@@ -227,7 +316,13 @@ function inferParamsFromSql(
227
316
  }
228
317
 
229
318
  function parseToken(filePath: string, resourceName: string, value: string): PipeTokenModel {
230
- const parts = value.split(/\s+/).filter(Boolean);
319
+ const trimmed = value.trim();
320
+ const quotedMatch = trimmed.match(/^"([^"]+)"(?:\s+(READ))?$/);
321
+ if (quotedMatch) {
322
+ return { name: quotedMatch[1] ?? "", scope: "READ" };
323
+ }
324
+
325
+ const parts = trimmed.split(/\s+/).filter(Boolean);
231
326
  if (parts.length === 0) {
232
327
  throw new MigrationParseError(filePath, "pipe", resourceName, "Invalid TOKEN line.");
233
328
  }
@@ -240,7 +335,13 @@ function parseToken(filePath: string, resourceName: string, value: string): Pipe
240
335
  );
241
336
  }
242
337
 
243
- const tokenName = parts[0];
338
+ const rawTokenName = parts[0] ?? "";
339
+ const tokenName =
340
+ rawTokenName.startsWith('"') &&
341
+ rawTokenName.endsWith('"') &&
342
+ rawTokenName.length >= 2
343
+ ? rawTokenName.slice(1, -1)
344
+ : rawTokenName;
244
345
  const scope = parts[1] ?? "READ";
245
346
  if (scope !== "READ") {
246
347
  throw new MigrationParseError(
@@ -266,6 +367,15 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
266
367
  let copyTargetDatasource: string | undefined;
267
368
  let copySchedule: string | undefined;
268
369
  let copyMode: "append" | "replace" | undefined;
370
+ let exportService: "kafka" | "s3" | undefined;
371
+ let exportConnectionName: string | undefined;
372
+ let exportTopic: string | undefined;
373
+ let exportBucketUri: string | undefined;
374
+ let exportFileTemplate: string | undefined;
375
+ let exportFormat: string | undefined;
376
+ let exportSchedule: string | undefined;
377
+ let exportStrategy: "create_new" | "replace" | undefined;
378
+ let exportCompression: "none" | "gzip" | "snappy" | undefined;
269
379
 
270
380
  let i = 0;
271
381
  while (i < lines.length) {
@@ -276,17 +386,8 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
276
386
  }
277
387
 
278
388
  if (line === "DESCRIPTION >") {
279
- const block = readIndentedBlock(lines, i + 1);
280
- if (block.lines.length === 0) {
281
- throw new MigrationParseError(
282
- resource.filePath,
283
- "pipe",
284
- resource.name,
285
- "DESCRIPTION block is empty."
286
- );
287
- }
288
-
289
- if (!description) {
389
+ const block = readDirectiveBlock(lines, i + 1, isPipeDirectiveLine);
390
+ if (description === undefined) {
290
391
  description = block.lines.join("\n");
291
392
  } else if (nodes.length > 0) {
292
393
  nodes[nodes.length - 1] = {
@@ -321,15 +422,7 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
321
422
 
322
423
  let nodeDescription: string | undefined;
323
424
  if ((lines[i] ?? "").trim() === "DESCRIPTION >") {
324
- const descriptionBlock = readIndentedBlock(lines, i + 1);
325
- if (descriptionBlock.lines.length === 0) {
326
- throw new MigrationParseError(
327
- resource.filePath,
328
- "pipe",
329
- resource.name,
330
- `Node "${nodeName}" has an empty DESCRIPTION block.`
331
- );
332
- }
425
+ const descriptionBlock = readDirectiveBlock(lines, i + 1, isPipeDirectiveLine);
333
426
  nodeDescription = descriptionBlock.lines.join("\n");
334
427
  i = descriptionBlock.nextIndex;
335
428
  i = nextNonBlank(lines, i);
@@ -343,7 +436,7 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
343
436
  `Node "${nodeName}" is missing SQL > block.`
344
437
  );
345
438
  }
346
- const sqlBlock = readIndentedBlock(lines, i + 1);
439
+ const sqlBlock = readDirectiveBlock(lines, i + 1, isPipeDirectiveLine);
347
440
  if (sqlBlock.lines.length === 0) {
348
441
  throw new MigrationParseError(
349
442
  resource.filePath,
@@ -377,22 +470,26 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
377
470
 
378
471
  const { key, value } = parseDirectiveLine(line);
379
472
  switch (key) {
380
- case "TYPE":
381
- if (value === "endpoint") {
473
+ case "TYPE": {
474
+ const normalizedType = parseQuotedValue(value).toLowerCase();
475
+ if (normalizedType === "endpoint") {
382
476
  pipeType = "endpoint";
383
- } else if (value === "MATERIALIZED") {
477
+ } else if (normalizedType === "materialized") {
384
478
  pipeType = "materialized";
385
- } else if (value === "COPY") {
479
+ } else if (normalizedType === "copy") {
386
480
  pipeType = "copy";
481
+ } else if (normalizedType === "sink") {
482
+ pipeType = "sink";
387
483
  } else {
388
484
  throw new MigrationParseError(
389
485
  resource.filePath,
390
486
  "pipe",
391
487
  resource.name,
392
- `Unsupported TYPE value in strict mode: "${value}"`
488
+ `Unsupported TYPE value in strict mode: "${parseQuotedValue(value)}"`
393
489
  );
394
490
  }
395
491
  break;
492
+ }
396
493
  case "CACHE": {
397
494
  const ttl = Number(value);
398
495
  if (!Number.isFinite(ttl) || ttl < 0) {
@@ -437,6 +534,63 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
437
534
  }
438
535
  copyMode = value;
439
536
  break;
537
+ case "EXPORT_SERVICE": {
538
+ const normalized = parseQuotedValue(value).toLowerCase();
539
+ if (normalized !== "kafka" && normalized !== "s3") {
540
+ throw new MigrationParseError(
541
+ resource.filePath,
542
+ "pipe",
543
+ resource.name,
544
+ `Unsupported EXPORT_SERVICE in strict mode: "${value}"`
545
+ );
546
+ }
547
+ exportService = normalized;
548
+ break;
549
+ }
550
+ case "EXPORT_CONNECTION_NAME":
551
+ exportConnectionName = parseQuotedValue(value);
552
+ break;
553
+ case "EXPORT_KAFKA_TOPIC":
554
+ exportTopic = parseQuotedValue(value);
555
+ break;
556
+ case "EXPORT_BUCKET_URI":
557
+ exportBucketUri = parseQuotedValue(value);
558
+ break;
559
+ case "EXPORT_FILE_TEMPLATE":
560
+ exportFileTemplate = parseQuotedValue(value);
561
+ break;
562
+ case "EXPORT_FORMAT":
563
+ exportFormat = parseQuotedValue(value);
564
+ break;
565
+ case "EXPORT_SCHEDULE":
566
+ exportSchedule = parseQuotedValue(value);
567
+ break;
568
+ case "EXPORT_STRATEGY": {
569
+ const normalized = parseQuotedValue(value).toLowerCase();
570
+ if (normalized !== "create_new" && normalized !== "replace") {
571
+ throw new MigrationParseError(
572
+ resource.filePath,
573
+ "pipe",
574
+ resource.name,
575
+ `Unsupported EXPORT_STRATEGY in strict mode: "${value}"`
576
+ );
577
+ }
578
+ exportStrategy = normalized;
579
+ break;
580
+ }
581
+ case "EXPORT_COMPRESSION": {
582
+ const normalized = parseQuotedValue(value).toLowerCase();
583
+ if (normalized !== "none" && normalized !== "gzip" && normalized !== "snappy") {
584
+ throw new MigrationParseError(
585
+ resource.filePath,
586
+ "pipe",
587
+ resource.name,
588
+ `Unsupported EXPORT_COMPRESSION in strict mode: "${value}"`
589
+ );
590
+ }
591
+ exportCompression = normalized;
592
+ break;
593
+ }
440
594
  case "TOKEN":
441
595
  tokens.push(parseToken(resource.filePath, resource.name, value));
442
596
  break;
@@ -488,6 +642,144 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
488
642
  );
489
643
  }
490
644
 
645
+ const hasSinkDirectives =
646
+ exportService !== undefined ||
647
+ exportConnectionName !== undefined ||
648
+ exportTopic !== undefined ||
649
+ exportBucketUri !== undefined ||
650
+ exportFileTemplate !== undefined ||
651
+ exportFormat !== undefined ||
652
+ exportSchedule !== undefined ||
653
+ exportStrategy !== undefined ||
654
+ exportCompression !== undefined;
655
+
656
+ if (pipeType !== "sink" && hasSinkDirectives) {
657
+ throw new MigrationParseError(
658
+ resource.filePath,
659
+ "pipe",
660
+ resource.name,
661
+ "EXPORT_* directives are only supported for TYPE sink."
662
+ );
663
+ }
664
+
665
+ let sink: PipeModel["sink"];
666
+ if (pipeType === "sink") {
667
+ if (!exportConnectionName) {
668
+ throw new MigrationParseError(
669
+ resource.filePath,
670
+ "pipe",
671
+ resource.name,
672
+ "EXPORT_CONNECTION_NAME is required for TYPE sink."
673
+ );
674
+ }
675
+
676
+ const hasKafkaDirectives = exportTopic !== undefined;
677
+ const hasS3Directives =
678
+ exportBucketUri !== undefined ||
679
+ exportFileTemplate !== undefined ||
680
+ exportFormat !== undefined ||
681
+ exportCompression !== undefined;
682
+
683
+ if (hasKafkaDirectives && hasS3Directives) {
684
+ throw new MigrationParseError(
685
+ resource.filePath,
686
+ "pipe",
687
+ resource.name,
688
+ "Sink pipe cannot mix Kafka and S3 export directives."
689
+ );
690
+ }
691
+
692
+ const inferredService =
693
+ exportService ?? (hasKafkaDirectives ? "kafka" : hasS3Directives ? "s3" : undefined);
694
+
695
+ if (!inferredService) {
696
+ throw new MigrationParseError(
697
+ resource.filePath,
698
+ "pipe",
699
+ resource.name,
700
+ "Sink pipe must define EXPORT_SERVICE or include service-specific export directives."
701
+ );
702
+ }
703
+
704
+ if (inferredService === "kafka") {
705
+ if (hasS3Directives) {
706
+ throw new MigrationParseError(
707
+ resource.filePath,
708
+ "pipe",
709
+ resource.name,
710
+ "S3 export directives are not valid for Kafka sinks."
711
+ );
712
+ }
713
+ if (!exportTopic) {
714
+ throw new MigrationParseError(
715
+ resource.filePath,
716
+ "pipe",
717
+ resource.name,
718
+ "EXPORT_KAFKA_TOPIC is required for Kafka sinks."
719
+ );
720
+ }
721
+ if (!exportSchedule) {
722
+ throw new MigrationParseError(
723
+ resource.filePath,
724
+ "pipe",
725
+ resource.name,
726
+ "EXPORT_SCHEDULE is required for Kafka sinks."
727
+ );
728
+ }
729
+ if (exportStrategy !== undefined) {
730
+ throw new MigrationParseError(
731
+ resource.filePath,
732
+ "pipe",
733
+ resource.name,
734
+ "EXPORT_STRATEGY is only valid for S3 sinks."
735
+ );
736
+ }
737
+ if (exportCompression !== undefined) {
738
+ throw new MigrationParseError(
739
+ resource.filePath,
740
+ "pipe",
741
+ resource.name,
742
+ "EXPORT_COMPRESSION is only valid for S3 sinks."
743
+ );
744
+ }
745
+
746
+ sink = {
747
+ service: "kafka",
748
+ connectionName: exportConnectionName,
749
+ topic: exportTopic,
750
+ schedule: exportSchedule,
751
+ };
752
+ } else {
753
+ if (hasKafkaDirectives) {
754
+ throw new MigrationParseError(
755
+ resource.filePath,
756
+ "pipe",
757
+ resource.name,
758
+ "Kafka export directives are not valid for S3 sinks."
759
+ );
760
+ }
761
+ if (!exportBucketUri || !exportFileTemplate || !exportFormat || !exportSchedule) {
762
+ throw new MigrationParseError(
763
+ resource.filePath,
764
+ "pipe",
765
+ resource.name,
766
+ "S3 sinks require EXPORT_BUCKET_URI, EXPORT_FILE_TEMPLATE, EXPORT_FORMAT, and EXPORT_SCHEDULE."
767
+ );
768
+ }
769
+
770
+ sink = {
771
+ service: "s3",
772
+ connectionName: exportConnectionName,
773
+ bucketUri: exportBucketUri,
774
+ fileTemplate: exportFileTemplate,
775
+ format: exportFormat,
776
+ schedule: exportSchedule,
777
+ strategy: exportStrategy,
778
+ compression: exportCompression,
779
+ };
780
+ }
781
+ }
782
+
491
783
  const params =
492
784
  pipeType === "materialized" || pipeType === "copy"
493
785
  ? []
@@ -513,6 +805,7 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
513
805
  copyTargetDatasource,
514
806
  copySchedule,
515
807
  copyMode,
808
+ sink,
516
809
  tokens,
517
810
  params,
518
811
  inferredOutputColumns,
@@ -27,6 +27,42 @@ export function stripIndent(line: string): string {
27
27
  return line.trimStart();
28
28
  }
29
29
 
30
+ export interface BlockReadResult {
31
+ lines: string[];
32
+ nextIndex: number;
33
+ }
34
+
35
+ export function readDirectiveBlock(
36
+ lines: string[],
37
+ startIndex: number,
38
+ isDirectiveLine: (line: string) => boolean
39
+ ): BlockReadResult {
40
+ const collected: string[] = [];
41
+ let i = startIndex;
42
+
43
+ while (i < lines.length) {
44
+ const line = (lines[i] ?? "").trim();
45
+ if (isDirectiveLine(line)) {
46
+ break;
47
+ }
48
+ collected.push(line);
49
+ i += 1;
50
+ }
51
+
52
+ let first = 0;
53
+ while (first < collected.length && collected[first] === "") {
54
+ first += 1;
55
+ }
56
+
57
+ let last = collected.length - 1;
58
+ while (last >= first && collected[last] === "") {
59
+ last -= 1;
60
+ }
61
+
62
+ const normalized = first <= last ? collected.slice(first, last + 1) : [];
63
+ return { lines: normalized, nextIndex: i };
64
+ }
65
+
30
66
  export function splitCommaSeparated(input: string): string[] {
31
67
  return input
32
68
  .split(",")
@@ -157,4 +193,3 @@ export function splitTopLevelComma(input: string): string[] {
157
193
 
158
194
  return parts;
159
195
  }
160
-
@@ -52,6 +52,13 @@ export interface DatasourceS3Model {
52
52
  fromTimestamp?: string;
53
53
  }
54
54
 
55
+ export interface DatasourceGCSModel {
56
+ connectionName: string;
57
+ bucketUri: string;
58
+ schedule?: string;
59
+ fromTimestamp?: string;
60
+ }
61
+
55
62
  export interface DatasourceTokenModel {
56
63
  name: string;
57
64
  scope: "READ" | "APPEND";
@@ -63,9 +70,10 @@ export interface DatasourceModel {
63
70
  filePath: string;
64
71
  description?: string;
65
72
  columns: DatasourceColumnModel[];
66
- engine: DatasourceEngineModel;
73
+ engine?: DatasourceEngineModel;
67
74
  kafka?: DatasourceKafkaModel;
68
75
  s3?: DatasourceS3Model;
76
+ gcs?: DatasourceGCSModel;
69
77
  forwardQuery?: string;
70
78
  tokens: DatasourceTokenModel[];
71
79
  sharedWith: string[];
@@ -82,13 +90,34 @@ export interface PipeTokenModel {
82
90
  scope: "READ";
83
91
  }
84
92
 
85
- export type PipeTypeModel = "pipe" | "endpoint" | "materialized" | "copy";
93
+ export type PipeTypeModel = "pipe" | "endpoint" | "materialized" | "copy" | "sink";
94
+
95
+ export interface PipeKafkaSinkModel {
96
+ service: "kafka";
97
+ connectionName: string;
98
+ topic: string;
99
+ schedule: string;
100
+ }
101
+
102
+ export interface PipeS3SinkModel {
103
+ service: "s3";
104
+ connectionName: string;
105
+ bucketUri: string;
106
+ fileTemplate: string;
107
+ format: string;
108
+ schedule: string;
109
+ strategy?: "create_new" | "replace";
110
+ compression?: "none" | "gzip" | "snappy";
111
+ }
112
+
113
+ export type PipeSinkModel = PipeKafkaSinkModel | PipeS3SinkModel;
86
114
 
87
115
  export interface PipeParamModel {
88
116
  name: string;
89
117
  type: string;
90
118
  required: boolean;
91
- defaultValue?: string | number;
119
+ defaultValue?: string | number | boolean;
120
+ description?: string;
92
121
  }
93
122
 
94
123
  export interface PipeModel {
@@ -104,6 +133,7 @@ export interface PipeModel {
104
133
  copyTargetDatasource?: string;
105
134
  copySchedule?: string;
106
135
  copyMode?: "append" | "replace";
136
+ sink?: PipeSinkModel;
107
137
  tokens: PipeTokenModel[];
108
138
  params: PipeParamModel[];
109
139
  inferredOutputColumns: string[];
@@ -134,11 +164,20 @@ export interface S3ConnectionModel {
134
164
  secret?: string;
135
165
  }
136
166
 
167
+ export interface GCSConnectionModel {
168
+ kind: "connection";
169
+ name: string;
170
+ filePath: string;
171
+ connectionType: "gcs";
172
+ serviceAccountCredentialsJson: string;
173
+ }
174
+
137
175
  export type ParsedResource =
138
176
  | DatasourceModel
139
177
  | PipeModel
140
178
  | KafkaConnectionModel
141
- | S3ConnectionModel;
179
+ | S3ConnectionModel
180
+ | GCSConnectionModel;
142
181
 
143
182
  export interface MigrationResult {
144
183
  success: boolean;