@tinybirdco/sdk 0.0.47 → 0.0.49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/README.md +53 -3
  2. package/dist/cli/commands/migrate.d.ts.map +1 -1
  3. package/dist/cli/commands/migrate.js +32 -0
  4. package/dist/cli/commands/migrate.js.map +1 -1
  5. package/dist/cli/commands/migrate.test.js +585 -8
  6. package/dist/cli/commands/migrate.test.js.map +1 -1
  7. package/dist/generator/connection.d.ts.map +1 -1
  8. package/dist/generator/connection.js +3 -0
  9. package/dist/generator/connection.js.map +1 -1
  10. package/dist/generator/connection.test.js +8 -0
  11. package/dist/generator/connection.test.js.map +1 -1
  12. package/dist/generator/datasource.d.ts.map +1 -1
  13. package/dist/generator/datasource.js +3 -0
  14. package/dist/generator/datasource.js.map +1 -1
  15. package/dist/generator/datasource.test.js +50 -0
  16. package/dist/generator/datasource.test.js.map +1 -1
  17. package/dist/generator/pipe.d.ts.map +1 -1
  18. package/dist/generator/pipe.js +31 -1
  19. package/dist/generator/pipe.js.map +1 -1
  20. package/dist/generator/pipe.test.js +50 -1
  21. package/dist/generator/pipe.test.js.map +1 -1
  22. package/dist/index.d.ts +3 -2
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +3 -1
  25. package/dist/index.js.map +1 -1
  26. package/dist/index.test.js +3 -0
  27. package/dist/index.test.js.map +1 -1
  28. package/dist/migrate/emit-ts.d.ts.map +1 -1
  29. package/dist/migrate/emit-ts.js +159 -41
  30. package/dist/migrate/emit-ts.js.map +1 -1
  31. package/dist/migrate/parse-connection.d.ts.map +1 -1
  32. package/dist/migrate/parse-connection.js +13 -2
  33. package/dist/migrate/parse-connection.js.map +1 -1
  34. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  35. package/dist/migrate/parse-datasource.js +115 -52
  36. package/dist/migrate/parse-datasource.js.map +1 -1
  37. package/dist/migrate/parse-pipe.d.ts.map +1 -1
  38. package/dist/migrate/parse-pipe.js +257 -46
  39. package/dist/migrate/parse-pipe.js.map +1 -1
  40. package/dist/migrate/parser-utils.d.ts +5 -0
  41. package/dist/migrate/parser-utils.d.ts.map +1 -1
  42. package/dist/migrate/parser-utils.js +22 -0
  43. package/dist/migrate/parser-utils.js.map +1 -1
  44. package/dist/migrate/types.d.ts +25 -3
  45. package/dist/migrate/types.d.ts.map +1 -1
  46. package/dist/schema/connection.d.ts +2 -0
  47. package/dist/schema/connection.d.ts.map +1 -1
  48. package/dist/schema/connection.js.map +1 -1
  49. package/dist/schema/datasource.d.ts +3 -1
  50. package/dist/schema/datasource.d.ts.map +1 -1
  51. package/dist/schema/datasource.js +8 -1
  52. package/dist/schema/datasource.js.map +1 -1
  53. package/dist/schema/datasource.test.js +13 -0
  54. package/dist/schema/datasource.test.js.map +1 -1
  55. package/dist/schema/engines.d.ts.map +1 -1
  56. package/dist/schema/engines.js +3 -0
  57. package/dist/schema/engines.js.map +1 -1
  58. package/dist/schema/engines.test.js +16 -0
  59. package/dist/schema/engines.test.js.map +1 -1
  60. package/dist/schema/pipe.d.ts +90 -3
  61. package/dist/schema/pipe.d.ts.map +1 -1
  62. package/dist/schema/pipe.js +84 -0
  63. package/dist/schema/pipe.js.map +1 -1
  64. package/dist/schema/pipe.test.js +70 -1
  65. package/dist/schema/pipe.test.js.map +1 -1
  66. package/dist/schema/secret.d.ts +6 -0
  67. package/dist/schema/secret.d.ts.map +1 -0
  68. package/dist/schema/secret.js +14 -0
  69. package/dist/schema/secret.js.map +1 -0
  70. package/dist/schema/secret.test.d.ts +2 -0
  71. package/dist/schema/secret.test.d.ts.map +1 -0
  72. package/dist/schema/secret.test.js +14 -0
  73. package/dist/schema/secret.test.js.map +1 -0
  74. package/dist/schema/types.d.ts +5 -0
  75. package/dist/schema/types.d.ts.map +1 -1
  76. package/dist/schema/types.js +6 -0
  77. package/dist/schema/types.js.map +1 -1
  78. package/dist/schema/types.test.js +12 -0
  79. package/dist/schema/types.test.js.map +1 -1
  80. package/package.json +1 -1
  81. package/src/cli/commands/migrate.test.ts +859 -8
  82. package/src/cli/commands/migrate.ts +35 -0
  83. package/src/generator/connection.test.ts +13 -0
  84. package/src/generator/connection.ts +4 -0
  85. package/src/generator/datasource.test.ts +60 -0
  86. package/src/generator/datasource.ts +3 -0
  87. package/src/generator/pipe.test.ts +56 -1
  88. package/src/generator/pipe.ts +41 -1
  89. package/src/index.test.ts +4 -0
  90. package/src/index.ts +12 -0
  91. package/src/migrate/emit-ts.ts +161 -48
  92. package/src/migrate/parse-connection.ts +15 -2
  93. package/src/migrate/parse-datasource.ts +134 -71
  94. package/src/migrate/parse-pipe.ts +364 -69
  95. package/src/migrate/parser-utils.ts +36 -1
  96. package/src/migrate/types.ts +28 -3
  97. package/src/schema/connection.ts +2 -0
  98. package/src/schema/datasource.test.ts +17 -0
  99. package/src/schema/datasource.ts +13 -2
  100. package/src/schema/engines.test.ts +18 -0
  101. package/src/schema/engines.ts +3 -0
  102. package/src/schema/pipe.test.ts +89 -0
  103. package/src/schema/pipe.ts +188 -4
  104. package/src/schema/secret.test.ts +19 -0
  105. package/src/schema/secret.ts +16 -0
  106. package/src/schema/types.test.ts +14 -0
  107. package/src/schema/types.ts +10 -0
@@ -4,45 +4,44 @@ import {
4
4
  isBlank,
5
5
  parseDirectiveLine,
6
6
  parseQuotedValue,
7
+ readDirectiveBlock,
7
8
  splitCommaSeparated,
8
9
  splitLines,
9
10
  splitTopLevelComma,
10
- stripIndent,
11
11
  } from "./parser-utils.js";
12
12
 
13
- interface BlockReadResult {
14
- lines: string[];
15
- nextIndex: number;
16
- }
17
-
18
- function readIndentedBlock(lines: string[], startIndex: number): BlockReadResult {
19
- const collected: string[] = [];
20
- let i = startIndex;
21
-
22
- while (i < lines.length) {
23
- const line = lines[i] ?? "";
24
- if (line.startsWith(" ")) {
25
- collected.push(stripIndent(line));
26
- i += 1;
27
- continue;
28
- }
29
-
30
- if (isBlank(line)) {
31
- let j = i + 1;
32
- while (j < lines.length && isBlank(lines[j] ?? "")) {
33
- j += 1;
34
- }
35
- if (j < lines.length && (lines[j] ?? "").startsWith(" ")) {
36
- collected.push("");
37
- i += 1;
38
- continue;
39
- }
40
- }
41
-
42
- break;
13
+ const DATASOURCE_DIRECTIVES = new Set([
14
+ "DESCRIPTION",
15
+ "SCHEMA",
16
+ "FORWARD_QUERY",
17
+ "SHARED_WITH",
18
+ "ENGINE",
19
+ "ENGINE_SORTING_KEY",
20
+ "ENGINE_PARTITION_KEY",
21
+ "ENGINE_PRIMARY_KEY",
22
+ "ENGINE_TTL",
23
+ "ENGINE_VER",
24
+ "ENGINE_SIGN",
25
+ "ENGINE_VERSION",
26
+ "ENGINE_SUMMING_COLUMNS",
27
+ "ENGINE_SETTINGS",
28
+ "KAFKA_CONNECTION_NAME",
29
+ "KAFKA_TOPIC",
30
+ "KAFKA_GROUP_ID",
31
+ "KAFKA_AUTO_OFFSET_RESET",
32
+ "IMPORT_CONNECTION_NAME",
33
+ "IMPORT_BUCKET_URI",
34
+ "IMPORT_SCHEDULE",
35
+ "IMPORT_FROM_TIMESTAMP",
36
+ "TOKEN",
37
+ ]);
38
+
39
+ function isDatasourceDirectiveLine(line: string): boolean {
40
+ if (!line) {
41
+ return false;
43
42
  }
44
-
45
- return { lines: collected, nextIndex: i };
43
+ const { key } = parseDirectiveLine(line);
44
+ return DATASOURCE_DIRECTIVES.has(key);
46
45
  }
47
46
 
48
47
  function findTokenOutsideContexts(input: string, token: string): number {
@@ -95,9 +94,19 @@ function parseColumnLine(filePath: string, resourceName: string, rawLine: string
95
94
  );
96
95
  }
97
96
 
98
- const columnName = line.slice(0, firstSpace).trim();
97
+ const rawColumnName = line.slice(0, firstSpace).trim();
98
+ const columnName = normalizeColumnName(rawColumnName);
99
99
  let rest = line.slice(firstSpace + 1).trim();
100
100
 
101
+ if (!columnName) {
102
+ throw new MigrationParseError(
103
+ filePath,
104
+ "datasource",
105
+ resourceName,
106
+ `Invalid schema column name: "${rawLine}"`
107
+ );
108
+ }
109
+
101
110
  const codecMatch = rest.match(/\s+CODEC\((.+)\)\s*$/);
102
111
  const codec = codecMatch ? codecMatch[1].trim() : undefined;
103
112
  if (codecMatch?.index !== undefined) {
@@ -136,6 +145,17 @@ function parseColumnLine(filePath: string, resourceName: string, rawLine: string
136
145
  };
137
146
  }
138
147
 
148
+ function normalizeColumnName(value: string): string {
149
+ const trimmed = value.trim();
150
+ if (
151
+ (trimmed.startsWith("`") && trimmed.endsWith("`")) ||
152
+ (trimmed.startsWith('"') && trimmed.endsWith('"'))
153
+ ) {
154
+ return trimmed.slice(1, -1);
155
+ }
156
+ return trimmed;
157
+ }
158
+
139
159
  function parseEngineSettings(value: string): Record<string, string | number | boolean> {
140
160
  const raw = parseQuotedValue(value);
141
161
  const parts = splitTopLevelComma(raw);
@@ -176,7 +196,15 @@ function parseEngineSettings(value: string): Record<string, string | number | bo
176
196
  }
177
197
 
178
198
  function parseToken(filePath: string, resourceName: string, value: string): DatasourceTokenModel {
179
- const parts = value.split(/\s+/).filter(Boolean);
199
+ const trimmed = value.trim();
200
+ const quotedMatch = trimmed.match(/^"([^"]+)"\s+(READ|APPEND)$/);
201
+ if (quotedMatch) {
202
+ const name = quotedMatch[1];
203
+ const scope = quotedMatch[2] as "READ" | "APPEND";
204
+ return { name, scope };
205
+ }
206
+
207
+ const parts = trimmed.split(/\s+/).filter(Boolean);
180
208
  if (parts.length < 2) {
181
209
  throw new MigrationParseError(
182
210
  filePath,
@@ -195,7 +223,11 @@ function parseToken(filePath: string, resourceName: string, value: string): Data
195
223
  );
196
224
  }
197
225
 
198
- const name = parts[0];
226
+ const rawName = parts[0] ?? "";
227
+ const name =
228
+ rawName.startsWith('"') && rawName.endsWith('"') && rawName.length >= 2
229
+ ? rawName.slice(1, -1)
230
+ : rawName;
199
231
  const scope = parts[1];
200
232
  if (scope !== "READ" && scope !== "APPEND") {
201
233
  throw new MigrationParseError(
@@ -223,6 +255,7 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
223
255
  let primaryKey: string[] | undefined;
224
256
  let ttl: string | undefined;
225
257
  let ver: string | undefined;
258
+ let isDeleted: string | undefined;
226
259
  let sign: string | undefined;
227
260
  let version: string | undefined;
228
261
  let summingColumns: string[] | undefined;
@@ -232,6 +265,7 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
232
265
  let kafkaTopic: string | undefined;
233
266
  let kafkaGroupId: string | undefined;
234
267
  let kafkaAutoOffsetReset: "earliest" | "latest" | undefined;
268
+ let kafkaStoreRawValue: boolean | undefined;
235
269
 
236
270
  let importConnectionName: string | undefined;
237
271
  let importBucketUri: string | undefined;
@@ -242,28 +276,20 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
242
276
  while (i < lines.length) {
243
277
  const rawLine = lines[i] ?? "";
244
278
  const line = rawLine.trim();
245
- if (!line) {
279
+ if (!line || line.startsWith("#")) {
246
280
  i += 1;
247
281
  continue;
248
282
  }
249
283
 
250
284
  if (line === "DESCRIPTION >") {
251
- const block = readIndentedBlock(lines, i + 1);
252
- if (block.lines.length === 0) {
253
- throw new MigrationParseError(
254
- resource.filePath,
255
- "datasource",
256
- resource.name,
257
- "DESCRIPTION block is empty."
258
- );
259
- }
285
+ const block = readDirectiveBlock(lines, i + 1, isDatasourceDirectiveLine);
260
286
  description = block.lines.join("\n");
261
287
  i = block.nextIndex;
262
288
  continue;
263
289
  }
264
290
 
265
291
  if (line === "SCHEMA >") {
266
- const block = readIndentedBlock(lines, i + 1);
292
+ const block = readDirectiveBlock(lines, i + 1, isDatasourceDirectiveLine);
267
293
  if (block.lines.length === 0) {
268
294
  throw new MigrationParseError(
269
295
  resource.filePath,
@@ -273,7 +299,7 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
273
299
  );
274
300
  }
275
301
  for (const schemaLine of block.lines) {
276
- if (isBlank(schemaLine)) {
302
+ if (isBlank(schemaLine) || schemaLine.trim().startsWith("#")) {
277
303
  continue;
278
304
  }
279
305
  columns.push(parseColumnLine(resource.filePath, resource.name, schemaLine));
@@ -283,7 +309,7 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
283
309
  }
284
310
 
285
311
  if (line === "FORWARD_QUERY >") {
286
- const block = readIndentedBlock(lines, i + 1);
312
+ const block = readDirectiveBlock(lines, i + 1, isDatasourceDirectiveLine);
287
313
  if (block.lines.length === 0) {
288
314
  throw new MigrationParseError(
289
315
  resource.filePath,
@@ -298,7 +324,7 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
298
324
  }
299
325
 
300
326
  if (line === "SHARED_WITH >") {
301
- const block = readIndentedBlock(lines, i + 1);
327
+ const block = readDirectiveBlock(lines, i + 1, isDatasourceDirectiveLine);
302
328
  for (const sharedLine of block.lines) {
303
329
  const normalized = sharedLine.trim().replace(/,$/, "");
304
330
  if (normalized) {
@@ -329,6 +355,9 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
329
355
  case "ENGINE_VER":
330
356
  ver = parseQuotedValue(value);
331
357
  break;
358
+ case "ENGINE_IS_DELETED":
359
+ isDeleted = parseQuotedValue(value);
360
+ break;
332
361
  case "ENGINE_SIGN":
333
362
  sign = parseQuotedValue(value);
334
363
  break;
@@ -370,6 +399,23 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
370
399
  }
371
400
  kafkaAutoOffsetReset = value;
372
401
  break;
402
+ case "KAFKA_STORE_RAW_VALUE": {
403
+ const normalized = value.toLowerCase();
404
+ if (normalized === "true" || normalized === "1") {
405
+ kafkaStoreRawValue = true;
406
+ break;
407
+ }
408
+ if (normalized === "false" || normalized === "0") {
409
+ kafkaStoreRawValue = false;
410
+ break;
411
+ }
412
+ throw new MigrationParseError(
413
+ resource.filePath,
414
+ "datasource",
415
+ resource.name,
416
+ `Invalid KAFKA_STORE_RAW_VALUE value: "${value}"`
417
+ );
418
+ }
373
419
  case "IMPORT_CONNECTION_NAME":
374
420
  importConnectionName = parseQuotedValue(value);
375
421
  break;
@@ -406,16 +452,25 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
406
452
  );
407
453
  }
408
454
 
409
- if (!engineType) {
410
- throw new MigrationParseError(
411
- resource.filePath,
412
- "datasource",
413
- resource.name,
414
- "ENGINE directive is required."
415
- );
455
+ const hasEngineDirectives =
456
+ sortingKey.length > 0 ||
457
+ partitionKey !== undefined ||
458
+ (primaryKey !== undefined && primaryKey.length > 0) ||
459
+ ttl !== undefined ||
460
+ ver !== undefined ||
461
+ isDeleted !== undefined ||
462
+ sign !== undefined ||
463
+ version !== undefined ||
464
+ (summingColumns !== undefined && summingColumns.length > 0) ||
465
+ settings !== undefined;
466
+
467
+ if (!engineType && hasEngineDirectives) {
468
+ // Tinybird defaults to MergeTree when ENGINE is omitted.
469
+ // If engine-specific options are present, preserve them by inferring MergeTree.
470
+ engineType = "MergeTree";
416
471
  }
417
472
 
418
- if (sortingKey.length === 0) {
473
+ if (engineType && sortingKey.length === 0) {
419
474
  throw new MigrationParseError(
420
475
  resource.filePath,
421
476
  "datasource",
@@ -425,12 +480,17 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
425
480
  }
426
481
 
427
482
  const kafka =
428
- kafkaConnectionName || kafkaTopic || kafkaGroupId || kafkaAutoOffsetReset
483
+ kafkaConnectionName ||
484
+ kafkaTopic ||
485
+ kafkaGroupId ||
486
+ kafkaAutoOffsetReset ||
487
+ kafkaStoreRawValue !== undefined
429
488
  ? {
430
489
  connectionName: kafkaConnectionName ?? "",
431
490
  topic: kafkaTopic ?? "",
432
491
  groupId: kafkaGroupId,
433
492
  autoOffsetReset: kafkaAutoOffsetReset,
493
+ storeRawValue: kafkaStoreRawValue,
434
494
  }
435
495
  : undefined;
436
496
 
@@ -477,18 +537,21 @@ export function parseDatasourceFile(resource: ResourceFile): DatasourceModel {
477
537
  filePath: resource.filePath,
478
538
  description,
479
539
  columns,
480
- engine: {
481
- type: engineType,
482
- sortingKey,
483
- partitionKey,
484
- primaryKey,
485
- ttl,
486
- ver,
487
- sign,
488
- version,
489
- summingColumns,
490
- settings,
491
- },
540
+ engine: engineType
541
+ ? {
542
+ type: engineType,
543
+ sortingKey,
544
+ partitionKey,
545
+ primaryKey,
546
+ ttl,
547
+ ver,
548
+ isDeleted,
549
+ sign,
550
+ version,
551
+ summingColumns,
552
+ settings,
553
+ }
554
+ : undefined,
492
555
  kafka,
493
556
  s3,
494
557
  forwardQuery,