@tinybirdco/sdk 0.0.50 → 0.0.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/README.md +5 -0
  2. package/dist/api/deploy.d.ts.map +1 -1
  3. package/dist/api/deploy.js +13 -3
  4. package/dist/api/deploy.js.map +1 -1
  5. package/dist/api/deploy.test.js +32 -0
  6. package/dist/api/deploy.test.js.map +1 -1
  7. package/dist/cli/commands/deploy.d.ts +2 -0
  8. package/dist/cli/commands/deploy.d.ts.map +1 -1
  9. package/dist/cli/commands/deploy.js +1 -0
  10. package/dist/cli/commands/deploy.js.map +1 -1
  11. package/dist/cli/commands/deploy.test.d.ts +2 -0
  12. package/dist/cli/commands/deploy.test.d.ts.map +1 -0
  13. package/dist/cli/commands/deploy.test.js +68 -0
  14. package/dist/cli/commands/deploy.test.js.map +1 -0
  15. package/dist/cli/commands/migrate.test.js +247 -2
  16. package/dist/cli/commands/migrate.test.js.map +1 -1
  17. package/dist/cli/index.js +2 -0
  18. package/dist/cli/index.js.map +1 -1
  19. package/dist/codegen/type-mapper.d.ts.map +1 -1
  20. package/dist/codegen/type-mapper.js +70 -7
  21. package/dist/codegen/type-mapper.js.map +1 -1
  22. package/dist/codegen/type-mapper.test.js +9 -0
  23. package/dist/codegen/type-mapper.test.js.map +1 -1
  24. package/dist/generator/datasource.d.ts.map +1 -1
  25. package/dist/generator/datasource.js +19 -0
  26. package/dist/generator/datasource.js.map +1 -1
  27. package/dist/generator/datasource.test.js +16 -0
  28. package/dist/generator/datasource.test.js.map +1 -1
  29. package/dist/generator/pipe.d.ts.map +1 -1
  30. package/dist/generator/pipe.js +92 -3
  31. package/dist/generator/pipe.js.map +1 -1
  32. package/dist/generator/pipe.test.js +19 -0
  33. package/dist/generator/pipe.test.js.map +1 -1
  34. package/dist/index.d.ts +1 -1
  35. package/dist/index.d.ts.map +1 -1
  36. package/dist/index.js.map +1 -1
  37. package/dist/migrate/emit-ts.d.ts.map +1 -1
  38. package/dist/migrate/emit-ts.js +11 -0
  39. package/dist/migrate/emit-ts.js.map +1 -1
  40. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  41. package/dist/migrate/parse-datasource.js +37 -0
  42. package/dist/migrate/parse-datasource.js.map +1 -1
  43. package/dist/migrate/parse-pipe.d.ts.map +1 -1
  44. package/dist/migrate/parse-pipe.js +212 -93
  45. package/dist/migrate/parse-pipe.js.map +1 -1
  46. package/dist/migrate/parser-utils.d.ts.map +1 -1
  47. package/dist/migrate/parser-utils.js +3 -1
  48. package/dist/migrate/parser-utils.js.map +1 -1
  49. package/dist/migrate/types.d.ts +7 -0
  50. package/dist/migrate/types.d.ts.map +1 -1
  51. package/dist/schema/datasource.d.ts +16 -0
  52. package/dist/schema/datasource.d.ts.map +1 -1
  53. package/dist/schema/datasource.js +16 -0
  54. package/dist/schema/datasource.js.map +1 -1
  55. package/dist/schema/datasource.test.js +39 -0
  56. package/dist/schema/datasource.test.js.map +1 -1
  57. package/package.json +1 -1
  58. package/src/api/deploy.test.ts +55 -0
  59. package/src/api/deploy.ts +19 -3
  60. package/src/cli/commands/deploy.test.ts +82 -0
  61. package/src/cli/commands/deploy.ts +3 -0
  62. package/src/cli/commands/migrate.test.ts +357 -2
  63. package/src/cli/index.ts +5 -0
  64. package/src/codegen/type-mapper.test.ts +18 -0
  65. package/src/codegen/type-mapper.ts +79 -7
  66. package/src/generator/datasource.test.ts +22 -0
  67. package/src/generator/datasource.ts +25 -0
  68. package/src/generator/pipe.test.ts +21 -0
  69. package/src/generator/pipe.ts +119 -3
  70. package/src/index.ts +1 -0
  71. package/src/migrate/emit-ts.ts +13 -0
  72. package/src/migrate/parse-datasource.ts +72 -1
  73. package/src/migrate/parse-pipe.ts +250 -111
  74. package/src/migrate/parser-utils.ts +5 -1
  75. package/src/migrate/types.ts +8 -0
  76. package/src/schema/datasource.test.ts +53 -0
  77. package/src/schema/datasource.ts +38 -0
@@ -70,35 +70,41 @@ function inferOutputColumnsFromSql(sql: string): string[] {
70
70
  }
71
71
 
72
72
  function mapTemplateFunctionToParamType(func: string): string | null {
73
- const known = new Set([
74
- "String",
75
- "UUID",
76
- "Int8",
77
- "Int16",
78
- "Int32",
79
- "Int64",
80
- "UInt8",
81
- "UInt16",
82
- "UInt32",
83
- "UInt64",
84
- "Float32",
85
- "Float64",
86
- "Boolean",
87
- "Bool",
88
- "Date",
89
- "DateTime",
90
- "DateTime64",
91
- "Array",
92
- ]);
93
-
94
- if (known.has(func)) {
95
- return func;
73
+ const lower = func.toLowerCase();
74
+ const aliases: Record<string, string> = {
75
+ string: "String",
76
+ uuid: "UUID",
77
+ int: "Int32",
78
+ integer: "Int32",
79
+ int8: "Int8",
80
+ int16: "Int16",
81
+ int32: "Int32",
82
+ int64: "Int64",
83
+ uint8: "UInt8",
84
+ uint16: "UInt16",
85
+ uint32: "UInt32",
86
+ uint64: "UInt64",
87
+ float32: "Float32",
88
+ float64: "Float64",
89
+ boolean: "Boolean",
90
+ bool: "Boolean",
91
+ date: "Date",
92
+ datetime: "DateTime",
93
+ datetime64: "DateTime64",
94
+ array: "Array",
95
+ column: "column",
96
+ json: "JSON",
97
+ };
98
+
99
+ const mapped = aliases[lower];
100
+ if (mapped) {
101
+ return mapped;
96
102
  }
97
103
 
98
- if (func.startsWith("DateTime64")) {
104
+ if (lower.startsWith("datetime64")) {
99
105
  return "DateTime64";
100
106
  }
101
- if (func.startsWith("DateTime")) {
107
+ if (lower.startsWith("datetime")) {
102
108
  return "DateTime";
103
109
  }
104
110
 
@@ -157,8 +163,7 @@ function parseParamOptions(rawArgs: string[]): {
157
163
  required?: boolean;
158
164
  description?: string;
159
165
  } {
160
- let positionalDefault: string | number | boolean | undefined;
161
- let keywordDefault: string | number | boolean | undefined;
166
+ let defaultValue: string | number | boolean | undefined;
162
167
  let required: boolean | undefined;
163
168
  let description: string | undefined;
164
169
 
@@ -170,15 +175,13 @@ function parseParamOptions(rawArgs: string[]): {
170
175
 
171
176
  const keyword = parseKeywordArgument(trimmed);
172
177
  if (!keyword) {
173
- if (positionalDefault === undefined) {
174
- positionalDefault = parseParamDefault(trimmed);
175
- }
178
+ defaultValue = parseParamDefault(trimmed);
176
179
  continue;
177
180
  }
178
181
 
179
182
  const keyLower = keyword.key.toLowerCase();
180
183
  if (keyLower === "default") {
181
- keywordDefault = parseParamDefault(keyword.value);
184
+ defaultValue = parseParamDefault(keyword.value);
182
185
  continue;
183
186
  }
184
187
  if (keyLower === "required") {
@@ -195,17 +198,122 @@ function parseParamOptions(rawArgs: string[]): {
195
198
  }
196
199
  }
197
200
 
198
- let defaultValue = keywordDefault ?? positionalDefault;
199
- if (keywordDefault !== undefined && positionalDefault !== undefined) {
200
- if (keywordDefault !== positionalDefault) {
201
- throw new Error(
202
- `Parameter has conflicting positional and keyword defaults: "${positionalDefault}" and "${keywordDefault}".`
203
- );
201
+ return { defaultValue, required, description };
202
+ }
203
+
204
+ function extractTemplateFunctionCalls(expression: string): Array<{
205
+ functionName: string;
206
+ argsRaw: string;
207
+ fullCall: string;
208
+ start: number;
209
+ end: number;
210
+ }> {
211
+ const maskParenthesesInsideQuotes = (value: string): string => {
212
+ let output = "";
213
+ let inSingleQuote = false;
214
+ let inDoubleQuote = false;
215
+
216
+ for (let i = 0; i < value.length; i += 1) {
217
+ const char = value[i] ?? "";
218
+ const prev = i > 0 ? value[i - 1] ?? "" : "";
219
+
220
+ if (char === "'" && !inDoubleQuote && prev !== "\\") {
221
+ inSingleQuote = !inSingleQuote;
222
+ output += char;
223
+ continue;
224
+ }
225
+ if (char === '"' && !inSingleQuote && prev !== "\\") {
226
+ inDoubleQuote = !inDoubleQuote;
227
+ output += char;
228
+ continue;
229
+ }
230
+
231
+ if ((inSingleQuote || inDoubleQuote) && (char === "(" || char === ")")) {
232
+ output += " ";
233
+ continue;
234
+ }
235
+
236
+ output += char;
204
237
  }
205
- defaultValue = positionalDefault;
238
+
239
+ return output;
240
+ };
241
+
242
+ const maskedExpression = maskParenthesesInsideQuotes(expression);
243
+ const callRegex = /([a-zA-Z_][a-zA-Z0-9_]*)\s*\(([^()]*)\)/g;
244
+ const calls: Array<{
245
+ functionName: string;
246
+ argsRaw: string;
247
+ fullCall: string;
248
+ start: number;
249
+ end: number;
250
+ }> = [];
251
+ let match: RegExpExecArray | null = callRegex.exec(maskedExpression);
252
+ while (match) {
253
+ const start = match.index;
254
+ const fullCall = expression.slice(start, start + (match[0]?.length ?? 0));
255
+ const openParen = fullCall.indexOf("(");
256
+ const closeParen = fullCall.lastIndexOf(")");
257
+
258
+ calls.push({
259
+ functionName: match[1] ?? "",
260
+ argsRaw: openParen >= 0 && closeParen > openParen ? fullCall.slice(openParen + 1, closeParen) : "",
261
+ fullCall,
262
+ start,
263
+ end: start + fullCall.length,
264
+ });
265
+ match = callRegex.exec(maskedExpression);
206
266
  }
267
+ return calls;
268
+ }
207
269
 
208
- return { defaultValue, required, description };
270
+ function shouldParseTemplateFunctionAsParam(mappedType: string): boolean {
271
+ return mappedType !== "Array";
272
+ }
273
+
274
+ function normalizeSqlPlaceholders(sql: string): string {
275
+ const placeholderRegex = /\{\{\s*([^{}]+?)\s*\}\}/g;
276
+ return sql.replace(placeholderRegex, (fullMatch, rawExpression) => {
277
+ const expression = String(rawExpression);
278
+ const calls = extractTemplateFunctionCalls(expression);
279
+ if (calls.length === 0) {
280
+ return fullMatch;
281
+ }
282
+
283
+ let rewritten = "";
284
+ let cursor = 0;
285
+ let changed = false;
286
+ for (const call of calls) {
287
+ rewritten += expression.slice(cursor, call.start);
288
+
289
+ let replacement = call.fullCall;
290
+ const normalizedFunction = String(call.functionName).toLowerCase();
291
+ if (normalizedFunction !== "error" && normalizedFunction !== "custom_error") {
292
+ const mappedType = mapTemplateFunctionToParamType(String(call.functionName));
293
+ if (mappedType && shouldParseTemplateFunctionAsParam(mappedType)) {
294
+ const args = splitTopLevelComma(String(call.argsRaw));
295
+ if (args.length > 0) {
296
+ const paramName = args[0]?.trim() ?? "";
297
+ if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(paramName)) {
298
+ replacement = `${String(call.functionName)}(${paramName})`;
299
+ }
300
+ }
301
+ }
302
+ }
303
+
304
+ if (replacement !== call.fullCall) {
305
+ changed = true;
306
+ }
307
+ rewritten += replacement;
308
+ cursor = call.end;
309
+ }
310
+ rewritten += expression.slice(cursor);
311
+
312
+ if (!changed) {
313
+ return fullMatch;
314
+ }
315
+ return `{{ ${rewritten.trim()} }}`;
316
+ });
209
317
  }
210
318
 
211
319
  function inferParamsFromSql(
@@ -213,100 +321,105 @@ function inferParamsFromSql(
213
321
  filePath: string,
214
322
  resourceName: string
215
323
  ): PipeParamModel[] {
216
- const regex = /\{\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\(([^{}]*)\)\s*\}\}/g;
324
+ const regex = /\{\{\s*([^{}]+?)\s*\}\}/g;
217
325
  const params = new Map<string, PipeParamModel>();
218
326
  let match: RegExpExecArray | null = regex.exec(sql);
219
327
 
220
328
  while (match) {
221
- const templateFunction = match[1] ?? "";
222
- const argsRaw = match[2] ?? "";
223
- const args = splitTopLevelComma(argsRaw);
224
- if (args.length === 0) {
225
- throw new MigrationParseError(
226
- filePath,
227
- "pipe",
228
- resourceName,
229
- `Invalid template placeholder: "${match[0]}"`
230
- );
231
- }
232
-
233
- const paramName = args[0]?.trim();
234
- if (!paramName || !/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(paramName)) {
235
- throw new MigrationParseError(
236
- filePath,
237
- "pipe",
238
- resourceName,
239
- `Unsupported parameter name in placeholder: "${match[0]}"`
240
- );
241
- }
329
+ const expression = match[1] ?? "";
330
+ const calls = extractTemplateFunctionCalls(expression);
331
+
332
+ for (const call of calls) {
333
+ const templateFunction = call.functionName;
334
+ const normalizedTemplateFunction = templateFunction.toLowerCase();
335
+ if (normalizedTemplateFunction === "error" || normalizedTemplateFunction === "custom_error") {
336
+ continue;
337
+ }
242
338
 
243
- const mappedType = mapTemplateFunctionToParamType(templateFunction);
244
- if (!mappedType) {
245
- throw new MigrationParseError(
246
- filePath,
247
- "pipe",
248
- resourceName,
249
- `Unsupported placeholder function in strict mode: "${templateFunction}"`
250
- );
251
- }
339
+ const mappedType = mapTemplateFunctionToParamType(templateFunction);
340
+ if (!mappedType) {
341
+ throw new MigrationParseError(
342
+ filePath,
343
+ "pipe",
344
+ resourceName,
345
+ `Unsupported placeholder function in strict mode: "${templateFunction}"`
346
+ );
347
+ }
252
348
 
253
- let defaultValue: string | number | boolean | undefined;
254
- let required: boolean | undefined;
255
- let description: string | undefined;
256
- if (args.length > 1) {
257
- try {
258
- const parsedOptions = parseParamOptions(args.slice(1));
259
- defaultValue = parsedOptions.defaultValue;
260
- required = parsedOptions.required;
261
- description = parsedOptions.description;
262
- } catch (error) {
349
+ const args = splitTopLevelComma(call.argsRaw);
350
+ if (args.length === 0) {
263
351
  throw new MigrationParseError(
264
352
  filePath,
265
353
  "pipe",
266
354
  resourceName,
267
- (error as Error).message
355
+ `Invalid template placeholder: "${call.fullCall}"`
268
356
  );
269
357
  }
270
- }
271
358
 
272
- const existing = params.get(paramName);
273
- if (existing) {
274
- if (existing.type !== mappedType) {
359
+ const paramName = args[0]?.trim() ?? "";
360
+ const isIdentifier = /^[a-zA-Z_][a-zA-Z0-9_]*$/.test(paramName);
361
+ if (!isIdentifier) {
362
+ if (mappedType === "column") {
363
+ continue;
364
+ }
275
365
  throw new MigrationParseError(
276
366
  filePath,
277
367
  "pipe",
278
368
  resourceName,
279
- `Parameter "${paramName}" is used with multiple types: "${existing.type}" and "${mappedType}".`
369
+ `Unsupported parameter name in placeholder: "{{ ${call.fullCall} }}"`
280
370
  );
281
371
  }
282
- if (existing.defaultValue !== undefined && defaultValue !== undefined) {
283
- if (existing.defaultValue !== defaultValue) {
372
+
373
+ let defaultValue: string | number | boolean | undefined;
374
+ let required: boolean | undefined;
375
+ let description: string | undefined;
376
+ if (args.length > 1 && shouldParseTemplateFunctionAsParam(mappedType)) {
377
+ try {
378
+ const parsedOptions = parseParamOptions(args.slice(1));
379
+ defaultValue = parsedOptions.defaultValue;
380
+ required = parsedOptions.required;
381
+ description = parsedOptions.description;
382
+ } catch (error) {
284
383
  throw new MigrationParseError(
285
384
  filePath,
286
385
  "pipe",
287
386
  resourceName,
288
- `Parameter "${paramName}" uses multiple defaults: "${existing.defaultValue}" and "${defaultValue}".`
387
+ (error as Error).message
289
388
  );
290
389
  }
291
390
  }
292
- if (existing.defaultValue === undefined && defaultValue !== undefined) {
293
- existing.defaultValue = defaultValue;
294
- }
295
- if (existing.description === undefined && description !== undefined) {
296
- existing.description = description;
391
+
392
+ const existing = params.get(paramName);
393
+ if (existing) {
394
+ if (existing.type !== mappedType) {
395
+ // Keep the last explicit type seen in SQL.
396
+ existing.type = mappedType;
397
+ }
398
+
399
+ // Match backend merge semantics: prefer the latest truthy value.
400
+ if (defaultValue !== undefined || existing.defaultValue !== undefined) {
401
+ existing.defaultValue =
402
+ (defaultValue as string | number | boolean | undefined) || existing.defaultValue;
403
+ }
404
+ if (description !== undefined || existing.description !== undefined) {
405
+ existing.description = description || existing.description;
406
+ }
407
+ const optionalInAnyUsage =
408
+ existing.required === false ||
409
+ required === false ||
410
+ existing.defaultValue !== undefined ||
411
+ defaultValue !== undefined;
412
+ existing.required = !optionalInAnyUsage;
413
+ } else {
414
+ const isRequired = required ?? defaultValue === undefined;
415
+ params.set(paramName, {
416
+ name: paramName,
417
+ type: mappedType,
418
+ required: isRequired,
419
+ defaultValue,
420
+ description,
421
+ });
297
422
  }
298
- const optionalInAnyUsage =
299
- existing.required === false || required === false || defaultValue !== undefined;
300
- existing.required = !optionalInAnyUsage;
301
- } else {
302
- const isRequired = required ?? defaultValue === undefined;
303
- params.set(paramName, {
304
- name: paramName,
305
- type: mappedType,
306
- required: isRequired,
307
- defaultValue,
308
- description,
309
- });
310
423
  }
311
424
 
312
425
  match = regex.exec(sql);
@@ -355,9 +468,21 @@ function parseToken(filePath: string, resourceName: string, value: string): Pipe
355
468
  return { name: tokenName, scope: "READ" };
356
469
  }
357
470
 
471
+ function normalizeExportStrategy(rawValue: string): "create_new" | "replace" {
472
+ const normalized = parseQuotedValue(rawValue).toLowerCase();
473
+ if (normalized === "create_new") {
474
+ return "create_new";
475
+ }
476
+ if (normalized === "replace" || normalized === "truncate") {
477
+ return "replace";
478
+ }
479
+ throw new Error(`Unsupported sink strategy in strict mode: "${rawValue}"`);
480
+ }
481
+
358
482
  export function parsePipeFile(resource: ResourceFile): PipeModel {
359
483
  const lines = splitLines(resource.content);
360
484
  const nodes: PipeModel["nodes"] = [];
485
+ const rawNodeSqls: string[] = [];
361
486
  const tokens: PipeTokenModel[] = [];
362
487
  let description: string | undefined;
363
488
  let pipeType: PipeModel["type"] = "pipe";
@@ -458,10 +583,11 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
458
583
  );
459
584
  }
460
585
 
586
+ rawNodeSqls.push(sql);
461
587
  nodes.push({
462
588
  name: nodeName,
463
589
  description: nodeDescription,
464
- sql,
590
+ sql: normalizeSqlPlaceholders(sql),
465
591
  });
466
592
 
467
593
  i = sqlBlock.nextIndex;
@@ -566,8 +692,9 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
566
692
  exportSchedule = parseQuotedValue(value);
567
693
  break;
568
694
  case "EXPORT_STRATEGY": {
569
- const normalized = parseQuotedValue(value).toLowerCase();
570
- if (normalized !== "create_new" && normalized !== "replace") {
695
+ try {
696
+ exportStrategy = normalizeExportStrategy(value);
697
+ } catch {
571
698
  throw new MigrationParseError(
572
699
  resource.filePath,
573
700
  "pipe",
@@ -575,7 +702,19 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
575
702
  `Unsupported EXPORT_STRATEGY in strict mode: "${value}"`
576
703
  );
577
704
  }
578
- exportStrategy = normalized;
705
+ break;
706
+ }
707
+ case "EXPORT_WRITE_STRATEGY": {
708
+ try {
709
+ exportStrategy = normalizeExportStrategy(value);
710
+ } catch {
711
+ throw new MigrationParseError(
712
+ resource.filePath,
713
+ "pipe",
714
+ resource.name,
715
+ `Unsupported EXPORT_WRITE_STRATEGY in strict mode: "${value}"`
716
+ );
717
+ }
579
718
  break;
580
719
  }
581
720
  case "EXPORT_COMPRESSION": {
@@ -784,7 +923,7 @@ export function parsePipeFile(resource: ResourceFile): PipeModel {
784
923
  pipeType === "materialized" || pipeType === "copy"
785
924
  ? []
786
925
  : inferParamsFromSql(
787
- nodes.map((node) => node.sql).join("\n"),
926
+ rawNodeSqls.join("\n"),
788
927
  resource.filePath,
789
928
  resource.name
790
929
  );
@@ -72,7 +72,11 @@ export function splitCommaSeparated(input: string): string[] {
72
72
 
73
73
  export function parseQuotedValue(input: string): string {
74
74
  const trimmed = input.trim();
75
- if (trimmed.startsWith('"') && trimmed.endsWith('"') && trimmed.length >= 2) {
75
+ if (
76
+ trimmed.length >= 2 &&
77
+ ((trimmed.startsWith('"') && trimmed.endsWith('"')) ||
78
+ (trimmed.startsWith("'") && trimmed.endsWith("'")))
79
+ ) {
76
80
  return trimmed.slice(1, -1);
77
81
  }
78
82
  return trimmed;
@@ -64,6 +64,13 @@ export interface DatasourceTokenModel {
64
64
  scope: "READ" | "APPEND";
65
65
  }
66
66
 
67
+ export interface DatasourceIndexModel {
68
+ name: string;
69
+ expr: string;
70
+ type: string;
71
+ granularity: number;
72
+ }
73
+
67
74
  export interface DatasourceModel {
68
75
  kind: "datasource";
69
76
  name: string;
@@ -71,6 +78,7 @@ export interface DatasourceModel {
71
78
  description?: string;
72
79
  columns: DatasourceColumnModel[];
73
80
  engine?: DatasourceEngineModel;
81
+ indexes: DatasourceIndexModel[];
74
82
  kafka?: DatasourceKafkaModel;
75
83
  s3?: DatasourceS3Model;
76
84
  gcs?: DatasourceGCSModel;
@@ -146,6 +146,59 @@ describe("Datasource Schema", () => {
146
146
  expect(ds.options.gcs?.connection._name).toBe("my_gcs");
147
147
  expect(ds.options.gcs?.bucketUri).toBe("gs://my-bucket/events/*.csv");
148
148
  });
149
+
150
+ it("accepts datasource indexes", () => {
151
+ const ds = defineDatasource("events", {
152
+ schema: { id: t.string() },
153
+ indexes: [
154
+ {
155
+ name: "id_set",
156
+ expr: "id",
157
+ type: "set(100)",
158
+ granularity: 1,
159
+ },
160
+ ],
161
+ });
162
+
163
+ expect(ds.options.indexes).toEqual([
164
+ {
165
+ name: "id_set",
166
+ expr: "id",
167
+ type: "set(100)",
168
+ granularity: 1,
169
+ },
170
+ ]);
171
+ });
172
+
173
+ it("validates datasource index fields", () => {
174
+ expect(() =>
175
+ defineDatasource("events", {
176
+ schema: { id: t.string() },
177
+ indexes: [{ name: "invalid name", expr: "id", type: "set(100)", granularity: 1 }],
178
+ })
179
+ ).toThrow("Invalid datasource index name");
180
+
181
+ expect(() =>
182
+ defineDatasource("events", {
183
+ schema: { id: t.string() },
184
+ indexes: [{ name: "id_set", expr: "", type: "set(100)", granularity: 1 }],
185
+ })
186
+ ).toThrow('Invalid datasource index "id_set": expr is required.');
187
+
188
+ expect(() =>
189
+ defineDatasource("events", {
190
+ schema: { id: t.string() },
191
+ indexes: [{ name: "id_set", expr: "id", type: "", granularity: 1 }],
192
+ })
193
+ ).toThrow('Invalid datasource index "id_set": type is required.');
194
+
195
+ expect(() =>
196
+ defineDatasource("events", {
197
+ schema: { id: t.string() },
198
+ indexes: [{ name: "id_set", expr: "id", type: "set(100)", granularity: 0 }],
199
+ })
200
+ ).toThrow('Invalid datasource index "id_set": granularity must be a positive integer.');
201
+ });
149
202
  });
150
203
 
151
204
  describe("isDatasourceDefinition", () => {
@@ -102,6 +102,21 @@ export interface GCSConfig {
102
102
  fromTimestamp?: string;
103
103
  }
104
104
 
105
+ /**
106
+ * Datasource index configuration.
107
+ * Emits as: `<name> <expr> TYPE <type> GRANULARITY <n>`
108
+ */
109
+ export interface DatasourceIndex {
110
+ /** Index name */
111
+ name: string;
112
+ /** Index expression */
113
+ expr: string;
114
+ /** Index type and parameters (for example: `set(100)`) */
115
+ type: string;
116
+ /** Index granularity */
117
+ granularity: number;
118
+ }
119
+
105
120
  /**
106
121
  * Options for defining a datasource
107
122
  */
@@ -127,6 +142,8 @@ export interface DatasourceOptions<TSchema extends SchemaDefinition> {
127
142
  * This should be the SELECT clause only (no FROM/WHERE).
128
143
  */
129
144
  forwardQuery?: string;
145
+ /** Secondary indexes for MergeTree-family engines */
146
+ indexes?: readonly DatasourceIndex[];
130
147
  /** Kafka ingestion configuration */
131
148
  kafka?: KafkaConfig;
132
149
  /** S3 ingestion configuration */
@@ -195,6 +212,27 @@ export function defineDatasource<TSchema extends SchemaDefinition>(
195
212
  throw new Error("Datasource can only define one ingestion option: `kafka`, `s3`, or `gcs`.");
196
213
  }
197
214
 
215
+ if (options.indexes) {
216
+ for (const index of options.indexes) {
217
+ if (!index.name || /\s/.test(index.name)) {
218
+ throw new Error(
219
+ `Invalid datasource index name: "${index.name}". Index names must be non-empty and cannot contain whitespace.`
220
+ );
221
+ }
222
+ if (!index.expr?.trim()) {
223
+ throw new Error(`Invalid datasource index "${index.name}": expr is required.`);
224
+ }
225
+ if (!index.type?.trim()) {
226
+ throw new Error(`Invalid datasource index "${index.name}": type is required.`);
227
+ }
228
+ if (!Number.isInteger(index.granularity) || index.granularity <= 0) {
229
+ throw new Error(
230
+ `Invalid datasource index "${index.name}": granularity must be a positive integer.`
231
+ );
232
+ }
233
+ }
234
+ }
235
+
198
236
  return {
199
237
  [DATASOURCE_BRAND]: true,
200
238
  _name: name,