@tinybirdco/sdk 0.0.49 → 0.0.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/README.md +19 -2
  2. package/dist/cli/commands/migrate.d.ts.map +1 -1
  3. package/dist/cli/commands/migrate.js +36 -1
  4. package/dist/cli/commands/migrate.js.map +1 -1
  5. package/dist/cli/commands/migrate.test.js +307 -2
  6. package/dist/cli/commands/migrate.test.js.map +1 -1
  7. package/dist/codegen/type-mapper.d.ts.map +1 -1
  8. package/dist/codegen/type-mapper.js +70 -7
  9. package/dist/codegen/type-mapper.js.map +1 -1
  10. package/dist/codegen/type-mapper.test.js +9 -0
  11. package/dist/codegen/type-mapper.test.js.map +1 -1
  12. package/dist/generator/connection.d.ts.map +1 -1
  13. package/dist/generator/connection.js +14 -1
  14. package/dist/generator/connection.js.map +1 -1
  15. package/dist/generator/connection.test.js +20 -4
  16. package/dist/generator/connection.test.js.map +1 -1
  17. package/dist/generator/datasource.d.ts.map +1 -1
  18. package/dist/generator/datasource.js +39 -10
  19. package/dist/generator/datasource.js.map +1 -1
  20. package/dist/generator/datasource.test.js +42 -1
  21. package/dist/generator/datasource.test.js.map +1 -1
  22. package/dist/generator/pipe.d.ts.map +1 -1
  23. package/dist/generator/pipe.js +92 -3
  24. package/dist/generator/pipe.js.map +1 -1
  25. package/dist/generator/pipe.test.js +19 -0
  26. package/dist/generator/pipe.test.js.map +1 -1
  27. package/dist/index.d.ts +3 -3
  28. package/dist/index.d.ts.map +1 -1
  29. package/dist/index.js +1 -1
  30. package/dist/index.js.map +1 -1
  31. package/dist/migrate/emit-ts.d.ts.map +1 -1
  32. package/dist/migrate/emit-ts.js +56 -11
  33. package/dist/migrate/emit-ts.js.map +1 -1
  34. package/dist/migrate/parse-connection.d.ts +2 -2
  35. package/dist/migrate/parse-connection.d.ts.map +1 -1
  36. package/dist/migrate/parse-connection.js +34 -4
  37. package/dist/migrate/parse-connection.js.map +1 -1
  38. package/dist/migrate/parse-datasource.d.ts.map +1 -1
  39. package/dist/migrate/parse-datasource.js +39 -2
  40. package/dist/migrate/parse-datasource.js.map +1 -1
  41. package/dist/migrate/parse-pipe.d.ts.map +1 -1
  42. package/dist/migrate/parse-pipe.js +212 -93
  43. package/dist/migrate/parse-pipe.js.map +1 -1
  44. package/dist/migrate/parser-utils.d.ts.map +1 -1
  45. package/dist/migrate/parser-utils.js +3 -1
  46. package/dist/migrate/parser-utils.js.map +1 -1
  47. package/dist/migrate/types.d.ts +22 -1
  48. package/dist/migrate/types.d.ts.map +1 -1
  49. package/dist/schema/connection.d.ts +34 -1
  50. package/dist/schema/connection.d.ts.map +1 -1
  51. package/dist/schema/connection.js +26 -0
  52. package/dist/schema/connection.js.map +1 -1
  53. package/dist/schema/connection.test.js +35 -1
  54. package/dist/schema/connection.test.js.map +1 -1
  55. package/dist/schema/datasource.d.ts +32 -1
  56. package/dist/schema/datasource.d.ts.map +1 -1
  57. package/dist/schema/datasource.js +19 -2
  58. package/dist/schema/datasource.js.map +1 -1
  59. package/dist/schema/datasource.test.js +71 -3
  60. package/dist/schema/datasource.test.js.map +1 -1
  61. package/package.json +1 -1
  62. package/src/cli/commands/migrate.test.ts +448 -2
  63. package/src/cli/commands/migrate.ts +39 -1
  64. package/src/codegen/type-mapper.test.ts +18 -0
  65. package/src/codegen/type-mapper.ts +79 -7
  66. package/src/generator/connection.test.ts +29 -4
  67. package/src/generator/connection.ts +25 -2
  68. package/src/generator/datasource.test.ts +52 -1
  69. package/src/generator/datasource.ts +47 -10
  70. package/src/generator/pipe.test.ts +21 -0
  71. package/src/generator/pipe.ts +119 -3
  72. package/src/index.ts +6 -0
  73. package/src/migrate/emit-ts.ts +67 -14
  74. package/src/migrate/parse-connection.ts +56 -6
  75. package/src/migrate/parse-datasource.ts +74 -3
  76. package/src/migrate/parse-pipe.ts +250 -111
  77. package/src/migrate/parser-utils.ts +5 -1
  78. package/src/migrate/types.ts +26 -1
  79. package/src/schema/connection.test.ts +48 -0
  80. package/src/schema/connection.ts +60 -1
  81. package/src/schema/datasource.test.ts +91 -3
  82. package/src/schema/datasource.ts +62 -3
@@ -81,10 +81,36 @@ export interface S3ConnectionDefinition {
81
81
  readonly options: S3ConnectionOptions;
82
82
  }
83
83
 
84
+ /**
85
+ * Options for defining a GCS connection
86
+ */
87
+ export interface GCSConnectionOptions {
88
+ /** Service account credentials JSON */
89
+ serviceAccountCredentialsJson: string;
90
+ }
91
+
92
+ /**
93
+ * GCS-specific connection definition
94
+ */
95
+ export interface GCSConnectionDefinition {
96
+ readonly [CONNECTION_BRAND]: true;
97
+ /** Connection name */
98
+ readonly _name: string;
99
+ /** Type marker for inference */
100
+ readonly _type: "connection";
101
+ /** Connection type */
102
+ readonly _connectionType: "gcs";
103
+ /** GCS options */
104
+ readonly options: GCSConnectionOptions;
105
+ }
106
+
84
107
  /**
85
108
  * A connection definition - union of all connection types
86
109
  */
87
- export type ConnectionDefinition = KafkaConnectionDefinition | S3ConnectionDefinition;
110
+ export type ConnectionDefinition =
111
+ | KafkaConnectionDefinition
112
+ | S3ConnectionDefinition
113
+ | GCSConnectionDefinition;
88
114
 
89
115
  function validateConnectionName(name: string): void {
90
116
  if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
@@ -166,6 +192,32 @@ export function defineS3Connection(
166
192
  };
167
193
  }
168
194
 
195
+ /**
196
+ * Define a GCS connection
197
+ *
198
+ * @param name - The connection name (must be valid identifier)
199
+ * @param options - GCS connection configuration
200
+ * @returns A connection definition that can be used in a project
201
+ */
202
+ export function defineGCSConnection(
203
+ name: string,
204
+ options: GCSConnectionOptions
205
+ ): GCSConnectionDefinition {
206
+ validateConnectionName(name);
207
+
208
+ if (!options.serviceAccountCredentialsJson.trim()) {
209
+ throw new Error("GCS connection `serviceAccountCredentialsJson` is required.");
210
+ }
211
+
212
+ return {
213
+ [CONNECTION_BRAND]: true,
214
+ _name: name,
215
+ _type: "connection",
216
+ _connectionType: "gcs",
217
+ options,
218
+ };
219
+ }
220
+
169
221
  /**
170
222
  * Check if a value is a connection definition
171
223
  */
@@ -192,6 +244,13 @@ export function isS3ConnectionDefinition(value: unknown): value is S3ConnectionD
192
244
  return isConnectionDefinition(value) && value._connectionType === "s3";
193
245
  }
194
246
 
247
+ /**
248
+ * Check if a value is a GCS connection definition
249
+ */
250
+ export function isGCSConnectionDefinition(value: unknown): value is GCSConnectionDefinition {
251
+ return isConnectionDefinition(value) && value._connectionType === "gcs";
252
+ }
253
+
195
254
  /**
196
255
  * Get the connection type from a connection definition
197
256
  */
@@ -9,7 +9,7 @@ import {
9
9
  } from "./datasource.js";
10
10
  import { t } from "./types.js";
11
11
  import { engine } from "./engines.js";
12
- import { defineKafkaConnection, defineS3Connection } from "./connection.js";
12
+ import { defineKafkaConnection, defineS3Connection, defineGCSConnection } from "./connection.js";
13
13
 
14
14
  describe("Datasource Schema", () => {
15
15
  describe("defineDatasource", () => {
@@ -88,7 +88,7 @@ describe("Datasource Schema", () => {
88
88
  expect(ds2._name).toBe("events_v2");
89
89
  });
90
90
 
91
- it("throws when both kafka and s3 ingestion are configured", () => {
91
+ it("throws when multiple ingestion configs are configured", () => {
92
92
  const kafkaConn = defineKafkaConnection("my_kafka", {
93
93
  bootstrapServers: "kafka.example.com:9092",
94
94
  });
@@ -96,6 +96,9 @@ describe("Datasource Schema", () => {
96
96
  region: "us-east-1",
97
97
  arn: "arn:aws:iam::123456789012:role/tinybird-s3-access",
98
98
  });
99
+ const gcsConn = defineGCSConnection("my_gcs", {
100
+ serviceAccountCredentialsJson: '{{ tb_secret("GCS_SERVICE_ACCOUNT_CREDENTIALS_JSON") }}',
101
+ });
99
102
 
100
103
  expect(() =>
101
104
  defineDatasource("events", {
@@ -109,7 +112,92 @@ describe("Datasource Schema", () => {
109
112
  bucketUri: "s3://my-bucket/events/*.csv",
110
113
  },
111
114
  })
112
- ).toThrow("Datasource cannot define both `kafka` and `s3` ingestion options.");
115
+ ).toThrow("Datasource can only define one ingestion option: `kafka`, `s3`, or `gcs`.");
116
+
117
+ expect(() =>
118
+ defineDatasource("events_gcs", {
119
+ schema: { id: t.string() },
120
+ kafka: {
121
+ connection: kafkaConn,
122
+ topic: "events",
123
+ },
124
+ gcs: {
125
+ connection: gcsConn,
126
+ bucketUri: "gs://my-bucket/events/*.csv",
127
+ },
128
+ })
129
+ ).toThrow("Datasource can only define one ingestion option: `kafka`, `s3`, or `gcs`.");
130
+ });
131
+
132
+ it("accepts gcs ingestion configuration", () => {
133
+ const gcsConn = defineGCSConnection("my_gcs", {
134
+ serviceAccountCredentialsJson: '{{ tb_secret("GCS_SERVICE_ACCOUNT_CREDENTIALS_JSON") }}',
135
+ });
136
+
137
+ const ds = defineDatasource("events_gcs", {
138
+ schema: { id: t.string() },
139
+ gcs: {
140
+ connection: gcsConn,
141
+ bucketUri: "gs://my-bucket/events/*.csv",
142
+ schedule: "@auto",
143
+ },
144
+ });
145
+
146
+ expect(ds.options.gcs?.connection._name).toBe("my_gcs");
147
+ expect(ds.options.gcs?.bucketUri).toBe("gs://my-bucket/events/*.csv");
148
+ });
149
+
150
+ it("accepts datasource indexes", () => {
151
+ const ds = defineDatasource("events", {
152
+ schema: { id: t.string() },
153
+ indexes: [
154
+ {
155
+ name: "id_set",
156
+ expr: "id",
157
+ type: "set(100)",
158
+ granularity: 1,
159
+ },
160
+ ],
161
+ });
162
+
163
+ expect(ds.options.indexes).toEqual([
164
+ {
165
+ name: "id_set",
166
+ expr: "id",
167
+ type: "set(100)",
168
+ granularity: 1,
169
+ },
170
+ ]);
171
+ });
172
+
173
+ it("validates datasource index fields", () => {
174
+ expect(() =>
175
+ defineDatasource("events", {
176
+ schema: { id: t.string() },
177
+ indexes: [{ name: "invalid name", expr: "id", type: "set(100)", granularity: 1 }],
178
+ })
179
+ ).toThrow("Invalid datasource index name");
180
+
181
+ expect(() =>
182
+ defineDatasource("events", {
183
+ schema: { id: t.string() },
184
+ indexes: [{ name: "id_set", expr: "", type: "set(100)", granularity: 1 }],
185
+ })
186
+ ).toThrow('Invalid datasource index "id_set": expr is required.');
187
+
188
+ expect(() =>
189
+ defineDatasource("events", {
190
+ schema: { id: t.string() },
191
+ indexes: [{ name: "id_set", expr: "id", type: "", granularity: 1 }],
192
+ })
193
+ ).toThrow('Invalid datasource index "id_set": type is required.');
194
+
195
+ expect(() =>
196
+ defineDatasource("events", {
197
+ schema: { id: t.string() },
198
+ indexes: [{ name: "id_set", expr: "id", type: "set(100)", granularity: 0 }],
199
+ })
200
+ ).toThrow('Invalid datasource index "id_set": granularity must be a positive integer.');
113
201
  });
114
202
  });
115
203
 
@@ -5,7 +5,11 @@
5
5
 
6
6
  import { getModifiers, isTypeValidator, type AnyTypeValidator } from "./types.js";
7
7
  import type { EngineConfig } from "./engines.js";
8
- import type { KafkaConnectionDefinition, S3ConnectionDefinition } from "./connection.js";
8
+ import type {
9
+ KafkaConnectionDefinition,
10
+ S3ConnectionDefinition,
11
+ GCSConnectionDefinition,
12
+ } from "./connection.js";
9
13
  import type { TokenDefinition, DatasourceTokenScope } from "./token.js";
10
14
 
11
15
  // Symbol for brand typing - use Symbol.for() for global registry
@@ -84,6 +88,35 @@ export interface S3Config {
84
88
  fromTimestamp?: string;
85
89
  }
86
90
 
91
+ /**
92
+ * GCS import configuration for a datasource
93
+ */
94
+ export interface GCSConfig {
95
+ /** GCS connection to use */
96
+ connection: GCSConnectionDefinition;
97
+ /** GCS bucket URI, for example: gs://my-bucket/path/*.csv */
98
+ bucketUri: string;
99
+ /** Import schedule, for example: @auto or @once */
100
+ schedule?: string;
101
+ /** Incremental import lower bound timestamp expression */
102
+ fromTimestamp?: string;
103
+ }
104
+
105
+ /**
106
+ * Datasource index configuration.
107
+ * Emits as: `<name> <expr> TYPE <type> GRANULARITY <n>`
108
+ */
109
+ export interface DatasourceIndex {
110
+ /** Index name */
111
+ name: string;
112
+ /** Index expression */
113
+ expr: string;
114
+ /** Index type and parameters (for example: `set(100)`) */
115
+ type: string;
116
+ /** Index granularity */
117
+ granularity: number;
118
+ }
119
+
87
120
  /**
88
121
  * Options for defining a datasource
89
122
  */
@@ -109,10 +142,14 @@ export interface DatasourceOptions<TSchema extends SchemaDefinition> {
109
142
  * This should be the SELECT clause only (no FROM/WHERE).
110
143
  */
111
144
  forwardQuery?: string;
145
+ /** Secondary indexes for MergeTree-family engines */
146
+ indexes?: readonly DatasourceIndex[];
112
147
  /** Kafka ingestion configuration */
113
148
  kafka?: KafkaConfig;
114
149
  /** S3 ingestion configuration */
115
150
  s3?: S3Config;
151
+ /** GCS ingestion configuration */
152
+ gcs?: GCSConfig;
116
153
  }
117
154
 
118
155
  /**
@@ -170,8 +207,30 @@ export function defineDatasource<TSchema extends SchemaDefinition>(
170
207
  );
171
208
  }
172
209
 
173
- if (options.kafka && options.s3) {
174
- throw new Error("Datasource cannot define both `kafka` and `s3` ingestion options.");
210
+ const ingestionConfigCount = [options.kafka, options.s3, options.gcs].filter(Boolean).length;
211
+ if (ingestionConfigCount > 1) {
212
+ throw new Error("Datasource can only define one ingestion option: `kafka`, `s3`, or `gcs`.");
213
+ }
214
+
215
+ if (options.indexes) {
216
+ for (const index of options.indexes) {
217
+ if (!index.name || /\s/.test(index.name)) {
218
+ throw new Error(
219
+ `Invalid datasource index name: "${index.name}". Index names must be non-empty and cannot contain whitespace.`
220
+ );
221
+ }
222
+ if (!index.expr?.trim()) {
223
+ throw new Error(`Invalid datasource index "${index.name}": expr is required.`);
224
+ }
225
+ if (!index.type?.trim()) {
226
+ throw new Error(`Invalid datasource index "${index.name}": type is required.`);
227
+ }
228
+ if (!Number.isInteger(index.granularity) || index.granularity <= 0) {
229
+ throw new Error(
230
+ `Invalid datasource index "${index.name}": granularity must be a positive integer.`
231
+ );
232
+ }
233
+ }
175
234
  }
176
235
 
177
236
  return {