tinybird 0.0.1.dev189__py3-none-any.whl → 0.0.1.dev191__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

tinybird/prompts.py CHANGED
@@ -1010,3 +1010,22 @@ Current README.md file:
1010
1010
  <readme>[readme content here]</readme>
1011
1011
  </readme_instructions>
1012
1012
  """
1013
+
1014
+
1015
+ def quarantine_prompt(datasource_definition: str) -> str:
1016
+ return f"""
1017
+ - You are an expert in Tinybird.
1018
+ - You are given a list of rows that went to quarantine during ingestion because of data quality issues.
1019
+ - Return the errors in a human readable format so the user can understand what is the problem and fix it.
1020
+ - Be concise and to the point.
1021
+ - Do not mention clickhouse tables. Refer to data sources instead.
1022
+ - The possible fixes recommended fixes are:
1023
+ - Changing a column type in the datasource definition: tell the user what to change in the datasource file, then build again before appending the data.
1024
+ - Changing the data because of the wrong data type: tell the user what to change in the data file, then append the data again.
1025
+ - The format of the response always inside the tag <quarantine_errors>[response]</quarantine_errors>
1026
+ - Do not use markdown format in the response, because it is a CLI output.
1027
+ - The datasource definition is the following:
1028
+ <datasource_definition>
1029
+ {datasource_definition}
1030
+ </datasource_definition>
1031
+ """
tinybird/tb/__cli__.py CHANGED
@@ -4,5 +4,5 @@ __description__ = 'Tinybird Command Line Tool'
4
4
  __url__ = 'https://www.tinybird.co/docs/forward/commands'
5
5
  __author__ = 'Tinybird'
6
6
  __author_email__ = 'support@tinybird.co'
7
- __version__ = '0.0.1.dev189'
8
- __revision__ = 'd55a4dc'
7
+ __version__ = '0.0.1.dev191'
8
+ __revision__ = 'c8535c7'
tinybird/tb/client.py CHANGED
@@ -1024,11 +1024,7 @@ class TinyB:
1024
1024
  )
1025
1025
 
1026
1026
  async def kafka_list_topics(self, connection_id: str, timeout=5):
1027
- resp = await self._req(
1028
- f"/v0/connectors/{connection_id}/preview?preview_activity=false",
1029
- connect_timeout=timeout,
1030
- request_timeout=timeout,
1031
- )
1027
+ resp = await self._req(f"/v0/connectors/{connection_id}/preview?preview_activity=false", timeout=timeout)
1032
1028
  return [x["topic"] for x in resp["preview"]]
1033
1029
 
1034
1030
  async def get_gcp_service_account_details(self) -> Dict[str, Any]:
@@ -28,7 +28,9 @@ from tinybird.tb.modules.common import (
28
28
  CLIException,
29
29
  _get_tb_client,
30
30
  coro,
31
+ echo_json,
31
32
  echo_safe_format_table,
33
+ force_echo,
32
34
  getenv_bool,
33
35
  try_update_config_with_remote,
34
36
  )
@@ -72,7 +74,7 @@ VERSION = f"{__cli__.__version__} (rev {__cli__.__revision__})"
72
74
  @click.option("--cloud/--local", is_flag=True, default=False, help="Run against cloud or local.")
73
75
  @click.option("--staging", is_flag=True, default=False, help="Run against a staging deployment.")
74
76
  @click.option(
75
- "--output", type=click.Choice(["json", "human"], case_sensitive=False), default="human", help="Output format"
77
+ "--output", type=click.Choice(["human", "json", "csv"], case_sensitive=False), default="human", help="Output format"
76
78
  )
77
79
  @click.option("--max-depth", type=int, default=3, help="Maximum depth of the project files.")
78
80
  @click.version_option(version=VERSION)
@@ -95,7 +97,7 @@ async def cli(
95
97
  Use `OBFUSCATE_REGEX_PATTERN` and `OBFUSCATE_PATTERN_SEPARATOR` environment variables to define a regex pattern and a separator (in case of a single string with multiple regex) to obfuscate secrets in the CLI output.
96
98
  """
97
99
  # We need to unpatch for our tests not to break
98
- if output == "json":
100
+ if output != "human":
99
101
  __hide_click_output()
100
102
  else:
101
103
  if show_tokens or not cloud or ctx.invoked_subcommand == "build":
@@ -195,17 +197,10 @@ async def pull(ctx: Context, force: bool, fmt: bool) -> None:
195
197
 
196
198
  @cli.command()
197
199
  @click.argument("query", required=False)
198
- @click.option("--rows_limit", default=100, help="Max number of rows retrieved")
200
+ @click.option("--rows-limit", default=100, help="Max number of rows retrieved")
199
201
  @click.option("--pipeline", default=None, help="The name of the pipe to run the SQL Query")
200
202
  @click.option("--pipe", default=None, help="The path to the .pipe file to run the SQL Query of a specific NODE")
201
203
  @click.option("--node", default=None, help="The NODE name")
202
- @click.option(
203
- "--format",
204
- "format_",
205
- type=click.Choice(["json", "csv", "human"], case_sensitive=False),
206
- default="human",
207
- help="Output format",
208
- )
209
204
  @click.option("--stats/--no-stats", default=False, help="Show query stats")
210
205
  @click.pass_context
211
206
  @coro
@@ -216,13 +211,13 @@ async def sql(
216
211
  pipeline: Optional[str],
217
212
  pipe: Optional[str],
218
213
  node: Optional[str],
219
- format_: str,
220
214
  stats: bool,
221
215
  ) -> None:
222
216
  """Run SQL query over data sources and pipes."""
223
-
224
217
  client = ctx.ensure_object(dict)["client"]
225
- req_format = "CSVWithNames" if format_ == "csv" else "JSON"
218
+ output = ctx.ensure_object(dict)["output"]
219
+
220
+ req_format = "CSVWithNames" if output == "csv" else "JSON"
226
221
  res = None
227
222
  try:
228
223
  if query:
@@ -275,11 +270,11 @@ async def sql(
275
270
  bytes_read = humanfriendly.format_size(stats_dict["bytes_read"])
276
271
  click.echo(FeedbackManager.info_query_stats(seconds=seconds, rows=rows_read, bytes=bytes_read))
277
272
 
278
- if format_ == "csv":
279
- click.echo(res)
273
+ if output == "csv":
274
+ force_echo(str(res))
280
275
  elif isinstance(res, dict) and "data" in res and res["data"]:
281
- if format_ == "json":
282
- click.echo(json.dumps(res, indent=8))
276
+ if output == "json":
277
+ echo_json(res, indent=8)
283
278
  else:
284
279
  dd = []
285
280
  for d in res["data"]:
@@ -424,6 +424,14 @@ async def _analyze(filename: str, client: TinyB, format: str, connector: Optiona
424
424
  return meta, data
425
425
 
426
426
 
427
+ async def analyze_file(filename: str, client: TinyB, format: str):
428
+ meta, data = await _analyze(filename, client, format)
429
+ schema = meta["analysis"]["schema"]
430
+ schema = schema.replace(", ", ",\n ")
431
+ content = f"""DESCRIPTION >\n Generated from {filename}\n\nSCHEMA >\n {schema}"""
432
+ return content
433
+
434
+
427
435
  async def _generate_datafile(
428
436
  filename: str,
429
437
  client: TinyB,
@@ -971,7 +979,7 @@ async def push_data(
971
979
  cb.prev_done = 0 # type: ignore[attr-defined]
972
980
 
973
981
  if not silent:
974
- click.echo(FeedbackManager.gray(message=f"\nImporting data to {datasource_name}..."))
982
+ click.echo(FeedbackManager.highlight(message=f"\ Appending data to {datasource_name}..."))
975
983
 
976
984
  if isinstance(url, list):
977
985
  urls = url
@@ -1882,15 +1890,19 @@ def get_gcs_connection_name(project_folder) -> str:
1882
1890
  return get_connection_name(project_folder=project_folder, connection_type="GCS")
1883
1891
 
1884
1892
 
1885
- def get_connection_name(project_folder: str, connection_type: str) -> str:
1886
- connection_name = None
1893
+ def get_kafka_connection_name(project_folder: str, connection_name: Optional[str] = None) -> str:
1894
+ return get_connection_name(project_folder=project_folder, connection_type="KAFKA", connection_name=connection_name)
1895
+
1896
+
1897
+ def get_connection_name(project_folder: str, connection_type: str, connection_name: Optional[str] = None) -> str:
1887
1898
  valid_pattern = r"^[a-zA-Z][a-zA-Z0-9_]*$"
1888
1899
 
1889
1900
  while not connection_name:
1901
+ short_id = str(uuid.uuid4())[:4]
1890
1902
  connection_name = click.prompt(
1891
- f"🔗 Enter a name for your new Tinybird {connection_type} connection (use alphanumeric characters, and underscores)",
1892
- prompt_suffix="\n> ",
1903
+ "Enter a name (only alphanumeric characters and underscores)",
1893
1904
  show_default=True,
1905
+ default=f"{connection_type.lower()}_{short_id}",
1894
1906
  )
1895
1907
  assert isinstance(connection_name, str)
1896
1908
 
@@ -2238,5 +2250,9 @@ def get_error_event(error: str) -> Tuple[str, str]:
2238
2250
  return error_event, silent_error_msg
2239
2251
 
2240
2252
 
2241
- def echo_json(data: Dict[str, Any]) -> None:
2242
- click.echo(json.dumps(data), force_output=True) # type: ignore
2253
+ def force_echo(string: str) -> None:
2254
+ click.echo(string, force_output=True) # type: ignore
2255
+
2256
+
2257
+ def echo_json(data: Dict[str, Any], indent: Union[None, int, str] = None) -> None:
2258
+ force_echo(json.dumps(data, indent=indent))
@@ -18,6 +18,7 @@ from tinybird.tb.modules.common import (
18
18
  echo_safe_humanfriendly_tables_format_smart_table,
19
19
  get_gcs_connection_name,
20
20
  get_gcs_svc_account_creds,
21
+ get_kafka_connection_name,
21
22
  get_s3_connection_name,
22
23
  production_aws_iamrole_only,
23
24
  run_aws_iamrole_connection_flow,
@@ -26,6 +27,7 @@ from tinybird.tb.modules.common import (
26
27
  from tinybird.tb.modules.create import (
27
28
  generate_aws_iamrole_connection_file_with_secret,
28
29
  generate_gcs_connection_file_with_secrets,
30
+ generate_kafka_connection_with_secrets,
29
31
  )
30
32
  from tinybird.tb.modules.feedback_manager import FeedbackManager
31
33
  from tinybird.tb.modules.project import Project
@@ -275,3 +277,21 @@ async def connection_create_gcs(ctx: Context) -> None:
275
277
  connection_path=connection_path,
276
278
  )
277
279
  )
280
+
281
+
282
+ @connection_create.command(name="kafka", short_help="Creates a Kafka connection.")
283
+ @click.option("--name", help="The name of the connection")
284
+ @click.pass_context
285
+ @coro
286
+ async def connection_create_kafka(ctx: Context, name: Optional[str] = None) -> None:
287
+ """
288
+ Creates a Kafka connection.
289
+
290
+ \b
291
+ $ tb connection create kafka
292
+ """
293
+ click.echo(FeedbackManager.highlight(message="» Creating Kafka connection..."))
294
+ project: Project = ctx.ensure_object(dict)["project"]
295
+ name = get_kafka_connection_name(project.folder, name)
296
+ await generate_kafka_connection_with_secrets(name=name, folder=project.folder)
297
+ click.echo(FeedbackManager.success(message="✓ Done!"))
@@ -409,10 +409,14 @@ def generate_connection_file(name: str, content: str, folder: str, skip_feedback
409
409
 
410
410
 
411
411
  async def generate_aws_iamrole_connection_file_with_secret(
412
- name: str, service: str, role_arn_secret_name: str, region: str, folder: str
412
+ name: str, service: str, role_arn_secret_name: str, region: str, folder: str, with_default_secret: bool = False
413
413
  ) -> Path:
414
+ if with_default_secret:
415
+ default_secret = ', "arn:aws:iam::123456789012:role/my-role"'
416
+ else:
417
+ default_secret = ""
414
418
  content = f"""TYPE {service}
415
- S3_ARN {{{{ tb_secret("{role_arn_secret_name}") }}}}
419
+ S3_ARN {{{{ tb_secret("{role_arn_secret_name}"{default_secret}) }}}}
416
420
  S3_REGION {region}
417
421
  """
418
422
  file_path = generate_connection_file(name, content, folder, skip_feedback=True)
@@ -483,6 +487,7 @@ async def create_resources_from_data(
483
487
  data: str,
484
488
  project: Project,
485
489
  config: Dict[str, Any],
490
+ skip_pipes: bool = False,
486
491
  ) -> List[Path]:
487
492
  local_client = await get_tinybird_local_client(config)
488
493
  folder_path = project.path
@@ -495,7 +500,7 @@ async def create_resources_from_data(
495
500
  result.append(ds_file)
496
501
  name = ds_file.stem
497
502
  no_pipes = len(project.get_pipe_files()) == 0
498
- if no_pipes:
503
+ if not skip_pipes and no_pipes:
499
504
  pipe_file = generate_pipe_file(
500
505
  f"{name}_endpoint",
501
506
  f"""
@@ -510,7 +515,9 @@ TYPE ENDPOINT
510
515
  return result
511
516
 
512
517
 
513
- async def create_resources_from_url(url: str, project: Project, config: Dict[str, Any]) -> List[Path]:
518
+ async def create_resources_from_url(
519
+ url: str, project: Project, config: Dict[str, Any], skip_pipes: bool = False
520
+ ) -> List[Path]:
514
521
  result: List[Path] = []
515
522
  local_client = await get_tinybird_local_client(config)
516
523
  format = url.split(".")[-1]
@@ -518,7 +525,7 @@ async def create_resources_from_url(url: str, project: Project, config: Dict[str
518
525
  result.append(ds_file)
519
526
  name = ds_file.stem
520
527
  no_pipes = len(project.get_pipe_files()) == 0
521
- if no_pipes:
528
+ if not skip_pipes and no_pipes:
522
529
  pipe_file = generate_pipe_file(
523
530
  f"{name}_endpoint",
524
531
  f"""
@@ -531,3 +538,14 @@ TYPE ENDPOINT
531
538
  )
532
539
  result.append(pipe_file)
533
540
  return result
541
+
542
+
543
+ async def generate_kafka_connection_with_secrets(name: str, folder: str) -> Path:
544
+ content = """TYPE kafka
545
+ KAFKA_BOOTSTRAP_SERVERS {{ tb_secret("KAFKA_SERVERS", "localhost:9092") }}
546
+ KAFKA_SECURITY_PROTOCOL SASL_SSL
547
+ KAFKA_SASL_MECHANISM PLAIN
548
+ KAFKA_KEY {{ tb_secret("KAFKA_USERNAME", "") }}
549
+ KAFKA_SECRET {{ tb_secret("KAFKA_PASSWORD", "") }}
550
+ """
551
+ return generate_connection_file(name, content, folder, skip_feedback=True)
@@ -7,27 +7,46 @@ import asyncio
7
7
  import json
8
8
  import os
9
9
  import re
10
+ import uuid
11
+ from datetime import datetime
12
+ from pathlib import Path
10
13
  from typing import Optional
14
+ from urllib.parse import urlparse
11
15
 
12
16
  import click
13
17
  import humanfriendly
18
+ import requests
14
19
  from click import Context
15
20
 
21
+ from tinybird.prompts import quarantine_prompt
22
+ from tinybird.syncasync import sync_to_async
16
23
  from tinybird.tb.client import AuthNoTokenException, DoesNotExistException, TinyB
17
24
  from tinybird.tb.modules.cli import cli
18
25
  from tinybird.tb.modules.common import (
19
26
  _analyze,
27
+ analyze_file,
20
28
  coro,
21
29
  echo_safe_humanfriendly_tables_format_smart_table,
22
30
  get_format_from_filename_or_url,
23
31
  load_connector_config,
32
+ normalize_datasource_name,
24
33
  push_data,
25
34
  )
35
+ from tinybird.tb.modules.config import CLIConfig
36
+ from tinybird.tb.modules.create import (
37
+ create_resources_from_prompt,
38
+ generate_aws_iamrole_connection_file_with_secret,
39
+ generate_gcs_connection_file_with_secrets,
40
+ generate_kafka_connection_with_secrets,
41
+ )
26
42
  from tinybird.tb.modules.datafile.common import get_name_version
27
43
  from tinybird.tb.modules.datafile.fixture import persist_fixture
28
44
  from tinybird.tb.modules.exceptions import CLIDatasourceException
29
45
  from tinybird.tb.modules.feedback_manager import FeedbackManager
46
+ from tinybird.tb.modules.llm import LLM
47
+ from tinybird.tb.modules.llm_utils import extract_xml
30
48
  from tinybird.tb.modules.project import Project
49
+ from tinybird.tb.modules.telemetry import add_telemetry_event
31
50
 
32
51
 
33
52
  @cli.group()
@@ -107,33 +126,176 @@ async def datasource_ls(ctx: Context, match: Optional[str], format_: str):
107
126
 
108
127
 
109
128
  @datasource.command(name="append")
110
- @click.argument("datasource_name", required=True)
111
- @click.argument("url", nargs=-1, required=True)
129
+ @click.argument("datasource_name", required=False)
130
+ @click.argument("data", nargs=-1, required=False)
131
+ @click.option("--url", type=str, help="URL to append data from")
132
+ @click.option("--file", type=str, help="Local file to append data from")
133
+ @click.option("--events", type=str, help="Events to append data from")
112
134
  @click.option("--concurrency", help="How many files to submit concurrently", default=1, hidden=True)
113
135
  @click.pass_context
114
136
  @coro
115
137
  async def datasource_append(
116
138
  ctx: Context,
117
139
  datasource_name: str,
118
- url,
140
+ data: Optional[str],
141
+ url: str,
142
+ file: str,
143
+ events: str,
119
144
  concurrency: int,
120
145
  ):
121
146
  """
122
147
  Appends data to an existing data source from URL, local file or a connector
123
148
 
124
- - Load from URL `tb datasource append [datasource_name] https://url_to_csv`
149
+ - Events API: `tb datasource append [datasource_name] --events '{"a":"b, "c":"d"}'`\n
150
+ - Local File: `tb datasource append [datasource_name] --file /path/to/local/file`\n
151
+ - Remote URL: `tb datasource append [datasource_name] --url https://url_to_csv`\n
152
+ - Kafka, S3 and GCS: https://www.tinybird.co/docs/forward/get-data-in/connectors\n
125
153
 
126
- - Load from local file `tb datasource append [datasource_name] /path/to/local/file`
154
+ More info: https://www.tinybird.co/docs/forward/get-data-in
127
155
  """
128
-
156
+ env: str = ctx.ensure_object(dict)["env"]
129
157
  client: TinyB = ctx.obj["client"]
130
- await push_data(
131
- client,
132
- datasource_name,
133
- url,
134
- mode="append",
135
- concurrency=concurrency,
136
- )
158
+ project: Project = ctx.ensure_object(dict)["project"]
159
+
160
+ # If data is passed as argument, we detect if it's a JSON object, a URL or a file
161
+ if data:
162
+ try:
163
+ json.loads(data)
164
+ events = data
165
+ except Exception:
166
+ pass
167
+ if urlparse(data).scheme in ("http", "https"):
168
+ url = data
169
+ if not events and not url:
170
+ file = data
171
+
172
+ # If data is not passed as argument, we use the data from the options
173
+ if not data:
174
+ data = file or url or events
175
+
176
+ if env == "local":
177
+ tip = "Did you build your project? Run `tb build` first."
178
+ else:
179
+ tip = "Did you deploy your project? Run `tb --cloud deploy` first."
180
+
181
+ datasources = await client.datasources()
182
+ if not datasources:
183
+ raise CLIDatasourceException(FeedbackManager.error(message=f"No data sources found. {tip}"))
184
+
185
+ if datasource_name and datasource_name not in [ds["name"] for ds in datasources]:
186
+ raise CLIDatasourceException(FeedbackManager.error(message=f"Datasource {datasource_name} not found. {tip}"))
187
+
188
+ if not datasource_name:
189
+ datasource_index = -1
190
+
191
+ click.echo(FeedbackManager.info(message="\n? Which data source do you want to ingest data into?"))
192
+ while datasource_index == -1:
193
+ for index, datasource in enumerate(datasources):
194
+ click.echo(f" [{index + 1}] {datasource['name']}")
195
+ click.echo(
196
+ FeedbackManager.gray(message="Tip: Run tb datasource append [datasource_name] to skip this step.")
197
+ )
198
+
199
+ datasource_index = click.prompt("\nSelect option", default=1)
200
+
201
+ if datasource_index == 0:
202
+ click.echo(FeedbackManager.warning(message="Datasource type selection cancelled by user"))
203
+ return None
204
+
205
+ try:
206
+ datasource_name = datasources[int(datasource_index) - 1]["name"]
207
+ except Exception:
208
+ datasource_index = -1
209
+
210
+ if not datasource_name:
211
+ raise CLIDatasourceException(FeedbackManager.error_datasource_name())
212
+
213
+ if not data:
214
+ data_index = -1
215
+ options = (
216
+ "Events API",
217
+ "Local File",
218
+ "Remote URL",
219
+ )
220
+ click.echo(FeedbackManager.info(message="\n? How do you want to ingest data?"))
221
+ while data_index == -1:
222
+ for index, option in enumerate(options):
223
+ click.echo(f" [{index + 1}] {option}")
224
+ click.echo(
225
+ FeedbackManager.gray(
226
+ message="Tip: Run tb datasource append [datasource_name] --events | --file | --url to skip this step"
227
+ )
228
+ )
229
+
230
+ data_index = click.prompt("\nSelect option", default=1)
231
+
232
+ if data_index == 0:
233
+ click.echo(FeedbackManager.warning(message="Data selection cancelled by user"))
234
+ return None
235
+
236
+ try:
237
+ data_index = int(data_index)
238
+ except Exception:
239
+ data_index = -1
240
+
241
+ if data_index == 1:
242
+ events = click.prompt("Events data")
243
+ elif data_index == 2:
244
+ data = click.prompt("Path to local file")
245
+ elif data_index == 3:
246
+ data = click.prompt("URL to remote file")
247
+ else:
248
+ raise CLIDatasourceException(FeedbackManager.error(message="Invalid ingestion option"))
249
+
250
+ if events:
251
+ click.echo(FeedbackManager.highlight(message=f"\n» Sending events to {datasource_name}"))
252
+ response = await sync_to_async(requests.post)(
253
+ f"{client.host}/v0/events?name={datasource_name}",
254
+ headers={"Authorization": f"Bearer {client.token}"},
255
+ data=events,
256
+ )
257
+
258
+ try:
259
+ res = response.json()
260
+ except Exception:
261
+ raise CLIDatasourceException(FeedbackManager.error(message=response.text))
262
+
263
+ successful_rows = res["successful_rows"]
264
+ quarantined_rows = res["quarantined_rows"]
265
+ if successful_rows > 0:
266
+ click.echo(
267
+ FeedbackManager.success(
268
+ message=f"✓ {successful_rows} row{'' if successful_rows == 1 else 's'} appended!"
269
+ )
270
+ )
271
+ if quarantined_rows > 0:
272
+ click.echo(
273
+ FeedbackManager.error(
274
+ message=f"✗ {quarantined_rows} row{'' if quarantined_rows == 1 else 's'} went to quarantine"
275
+ )
276
+ )
277
+ await analyze_quarantine(datasource_name, project, client)
278
+ return
279
+ else:
280
+ click.echo(FeedbackManager.highlight(message=f"\n» Appending data to {datasource_name}"))
281
+ try:
282
+ await push_data(
283
+ client,
284
+ datasource_name,
285
+ data,
286
+ mode="append",
287
+ concurrency=concurrency,
288
+ silent=True,
289
+ )
290
+ except Exception as e:
291
+ is_quarantined = "quarantine" in str(e)
292
+ click.echo(FeedbackManager.error(message="✗ " + str(e)))
293
+ if is_quarantined:
294
+ await analyze_quarantine(datasource_name, project, client)
295
+ return
296
+ else:
297
+ raise e
298
+ click.echo(FeedbackManager.success(message="✓ Rows appended!"))
137
299
 
138
300
 
139
301
  @datasource.command(name="replace")
@@ -473,3 +635,311 @@ async def datasource_sync(ctx: Context, datasource_name: str, yes: bool):
473
635
  raise
474
636
  except Exception as e:
475
637
  raise CLIDatasourceException(FeedbackManager.error_syncing_datasource(datasource=datasource_name, error=str(e)))
638
+
639
+
640
+ @datasource.command(name="create")
641
+ @click.option("--name", type=str, help="Name of the data source")
642
+ @click.option("--blank", is_flag=True, default=False, help="Create a blank data source")
643
+ @click.option("--file", type=str, help="Create a data source from a local file")
644
+ @click.option("--url", type=str, help="Create a data source from a remote URL")
645
+ @click.option("--connection", type=str, help="Create a data source from a connection")
646
+ @click.option("--prompt", type=str, help="Create a data source from a prompt")
647
+ @click.option("--s3", is_flag=True, default=False, help="Create a data source from a S3 connection")
648
+ @click.option("--gcs", is_flag=True, default=False, help="Create a data source from a GCS connection")
649
+ @click.option("--kafka", is_flag=True, default=False, help="Create a data source from a Kafka connection")
650
+ @click.pass_context
651
+ @coro
652
+ async def datasource_create(
653
+ ctx: Context,
654
+ name: str,
655
+ blank: bool,
656
+ file: str,
657
+ url: str,
658
+ connection: str,
659
+ prompt: str,
660
+ s3: bool,
661
+ gcs: bool,
662
+ kafka: bool,
663
+ ):
664
+ project: Project = ctx.ensure_object(dict)["project"]
665
+ client: TinyB = ctx.ensure_object(dict)["client"]
666
+ config = ctx.ensure_object(dict)["config"]
667
+ env: str = ctx.ensure_object(dict)["env"]
668
+
669
+ if env == "cloud":
670
+ raise CLIDatasourceException(
671
+ FeedbackManager.error(message="`tb datasource create` is not available against Tinybird Cloud.")
672
+ )
673
+
674
+ datasource_types = (
675
+ "Blank",
676
+ "Local file",
677
+ "Remote URL",
678
+ "Kafka",
679
+ "S3",
680
+ "GCS",
681
+ )
682
+ datasource_type: Optional[str] = None
683
+ connection_file: Optional[str] = None
684
+ ds_content = ""
685
+
686
+ if file:
687
+ datasource_type = "Local file"
688
+ elif url:
689
+ datasource_type = "Remote URL"
690
+ elif blank:
691
+ datasource_type = "Blank"
692
+ elif connection:
693
+ connection_files = project.get_connection_files()
694
+ connection_file = next((f for f in connection_files if f.endswith(f"{connection}.connection")), None)
695
+ if connection_file:
696
+ connection_content = Path(connection_file).read_text()
697
+ if project.is_kafka_connection(connection_content):
698
+ datasource_type = "Kafka"
699
+ elif project.is_s3_connection(connection_content):
700
+ datasource_type = "S3"
701
+ elif project.is_gcs_connection(connection_content):
702
+ datasource_type = "GCS"
703
+ elif s3:
704
+ datasource_type = "S3"
705
+ elif gcs:
706
+ datasource_type = "GCS"
707
+ elif kafka:
708
+ datasource_type = "Kafka"
709
+ elif prompt:
710
+ click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
711
+ user_token = config.get("user_token")
712
+ if not user_token:
713
+ raise Exception("This action requires authentication. Run 'tb login' first.")
714
+ project_config = CLIConfig.get_project_config()
715
+ tb_client: TinyB = project_config.get_client(token=config.get("token"), host=config.get("host"))
716
+ await create_resources_from_prompt(tb_client, user_token, prompt, project)
717
+ click.echo(FeedbackManager.success(message="✓ .datasource created!"))
718
+ return
719
+
720
+ if datasource_type is None:
721
+ click.echo(FeedbackManager.highlight(message="? Where do you want to create your .datasource from?"))
722
+ datasource_type_index = -1
723
+
724
+ while datasource_type_index == -1:
725
+ for index, datasource_type in enumerate(datasource_types):
726
+ click.echo(f" [{index + 1}] {datasource_type}")
727
+ click.echo(
728
+ FeedbackManager.gray(
729
+ message="Tip: Run `tb datasource create --file | --url | --connection` to skip this step."
730
+ )
731
+ )
732
+ datasource_type_index = click.prompt("\nSelect option", default=1)
733
+
734
+ if datasource_type_index == 0:
735
+ click.echo(FeedbackManager.warning(message="Datasource type selection cancelled by user"))
736
+ return None
737
+
738
+ try:
739
+ datasource_type = datasource_types[int(datasource_type_index) - 1]
740
+ except Exception:
741
+ datasource_type_index = -1
742
+
743
+ if not datasource_type:
744
+ click.echo(
745
+ FeedbackManager.error(
746
+ message=f"Invalid option: {datasource_type_index}. Please select a valid option from the list above."
747
+ )
748
+ )
749
+ return
750
+
751
+ connection_required = datasource_type in ("Kafka", "S3", "GCS")
752
+
753
+ if connection_required:
754
+ click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
755
+ connection_type = datasource_type.lower()
756
+
757
+ def get_connection_files():
758
+ connection_files = []
759
+ if connection_type == "kafka":
760
+ connection_files = project.get_kafka_connection_files()
761
+ elif connection_type == "s3":
762
+ connection_files = project.get_s3_connection_files()
763
+ elif connection_type == "gcs":
764
+ connection_files = project.get_gcs_connection_files()
765
+ return connection_files
766
+
767
+ connection_files = get_connection_files()
768
+ if len(connection_files) == 0:
769
+ click.echo(FeedbackManager.error(message=f"x No {datasource_type} connections found."))
770
+ if click.confirm(
771
+ FeedbackManager.highlight(message=f"\n? Do you want to create a {datasource_type} connection? [Y/n]"),
772
+ show_default=False,
773
+ default=True,
774
+ ):
775
+ click.echo(FeedbackManager.gray(message="\n» Creating .connection file..."))
776
+ default_connection_name = f"{datasource_type.lower()}_{generate_short_id()}"
777
+ connection_name = click.prompt(
778
+ FeedbackManager.highlight(message=f"? Connection name [{default_connection_name}]"),
779
+ show_default=False,
780
+ default=default_connection_name,
781
+ )
782
+ if datasource_type == "Kafka":
783
+ await generate_kafka_connection_with_secrets(connection_name, folder=project.folder)
784
+ elif datasource_type == "S3":
785
+ await generate_aws_iamrole_connection_file_with_secret(
786
+ connection_name,
787
+ service="s3",
788
+ role_arn_secret_name="S3_ARN",
789
+ region="eu-west-1",
790
+ folder=project.folder,
791
+ with_default_secret=True,
792
+ )
793
+ elif datasource_type == "GCS":
794
+ await generate_gcs_connection_file_with_secrets(
795
+ connection_name,
796
+ service="gcs",
797
+ svc_account_creds="GCS_SERVICE_ACCOUNT_CREDENTIALS_JSON",
798
+ folder=project.folder,
799
+ )
800
+ click.echo(FeedbackManager.info(message=f"/connections/{connection_name}.connection"))
801
+ click.echo(FeedbackManager.success(message="✓ .connection created!"))
802
+ connection_files = get_connection_files()
803
+ else:
804
+ click.echo(
805
+ FeedbackManager.info(message=f"→ Run `tb connection create {datasource_type.lower()}` to add one.")
806
+ )
807
+ return
808
+
809
+ if not connection_file:
810
+ connection_file = connection_files[0]
811
+ connection_path = Path(connection_file)
812
+ connection = connection_path.stem
813
+
814
+ ds_content = """SCHEMA >
815
+ `timestamp` DateTime `json:$.timestamp`,
816
+ `session_id` String `json:$.session_id`
817
+ """
818
+
819
+ if datasource_type == "Local file":
820
+ click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
821
+ if not file:
822
+ file = click.prompt(FeedbackManager.highlight(message="? Path"))
823
+ if file.startswith("~"):
824
+ file = os.path.expanduser(file)
825
+
826
+ folder_path = project.path
827
+ path = folder_path / file
828
+ if not path.exists():
829
+ path = Path(file)
830
+
831
+ data_format = path.suffix.lstrip(".")
832
+ ds_content = await analyze_file(str(path), client, format=data_format)
833
+ default_name = normalize_datasource_name(path.stem)
834
+ name = name or click.prompt(
835
+ FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
836
+ default=default_name,
837
+ show_default=False,
838
+ )
839
+
840
+ if datasource_type == "Remote URL":
841
+ click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
842
+ if not url:
843
+ url = click.prompt(FeedbackManager.highlight(message="? URL"))
844
+ format = url.split(".")[-1]
845
+ ds_content = await analyze_file(url, client, format)
846
+ default_name = normalize_datasource_name(Path(url).stem)
847
+ name = name or click.prompt(
848
+ FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
849
+ default=default_name,
850
+ show_default=False,
851
+ )
852
+
853
+ if datasource_type == "Blank":
854
+ click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
855
+
856
+ if datasource_type not in ("Remote URL", "Local file"):
857
+ default_name = f"ds_{generate_short_id()}"
858
+ name = name or click.prompt(
859
+ FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
860
+ default=default_name,
861
+ show_default=False,
862
+ )
863
+
864
+ if datasource_type == "Kafka":
865
+ connections = await client.connections("kafka")
866
+ connection_id = next((c["id"] for c in connections if c["name"] == connection), connection)
867
+ try:
868
+ topics = await client.kafka_list_topics(connection_id) if connection_id else []
869
+ except Exception:
870
+ topics = []
871
+ topic = topics[0] if len(topics) > 0 else "topic_0"
872
+ group_id = generate_kafka_group_id(topic)
873
+ ds_content += f"""
874
+ KAFKA_CONNECTION_NAME {connection}
875
+ KAFKA_TOPIC {{{{ tb_secret("KAFKA_TOPIC", "{topic}") }}}}
876
+ KAFKA_GROUP_ID {{{{ tb_secret("KAFKA_GROUP_ID", "{group_id}") }}}}
877
+ """
878
+
879
+ if datasource_type == "S3":
880
+ if not connection:
881
+ connections = await client.connections("s3")
882
+ connection = next((c["name"] for c in connections if c["name"] == connection), connection)
883
+ ds_content += f"""
884
+ IMPORT_CONNECTION_NAME "{connection}"
885
+ IMPORT_BUCKET_URI "s3://my-bucket/*.csv"
886
+ IMPORT_SCHEDULE "@auto"
887
+ """
888
+
889
+ if datasource_type == "GCS":
890
+ if not connection:
891
+ connections = await client.connections("gcs")
892
+ connection = next((c["name"] for c in connections if c["name"] == connection), connection)
893
+ ds_content += f"""
894
+ IMPORT_CONNECTION_NAME "{connection}"
895
+ IMPORT_BUCKET_URI "gs://my-bucket/*.csv"
896
+ IMPORT_SCHEDULE "@auto"
897
+ """
898
+
899
+ click.echo(FeedbackManager.info(message=f"/datasources/{name}.datasource"))
900
+ datasources_path = project.path / "datasources"
901
+ if not datasources_path.exists():
902
+ datasources_path.mkdir()
903
+ ds_file = datasources_path / f"{name}.datasource"
904
+ if not ds_file.exists():
905
+ ds_file.touch()
906
+ ds_file.write_text(ds_content)
907
+ click.echo(FeedbackManager.success(message="✓ .datasource created!"))
908
+
909
+
910
+ def generate_short_id():
911
+ return str(uuid.uuid4())[:4]
912
+
913
+
914
+ def generate_kafka_group_id(topic: str):
915
+ return f"{topic}_{int(datetime.timestamp(datetime.now()))}"
916
+
917
+
918
+ async def analyze_quarantine(datasource_name: str, project: Project, client: TinyB):
919
+ config = CLIConfig.get_project_config()
920
+ res = await client.query(
921
+ f"SELECT * FROM {datasource_name}_quarantine ORDER BY insertion_date DESC LIMIT 1 FORMAT JSON"
922
+ )
923
+ quarantine_data = res["data"]
924
+ error_message = json.dumps(res["data"])
925
+ user_token = config.get_user_token()
926
+ click.echo(FeedbackManager.gray(message=f"\n» Analyzing errors in {datasource_name}_quarantine..."))
927
+ if user_token:
928
+ llm = LLM(user_token=user_token, host=config.get_client().host)
929
+ ds_filenames = project.get_datasource_files()
930
+ datasource_definition = next(
931
+ (Path(f).read_text() for f in ds_filenames if f.endswith(f"{datasource_name}.datasource")), ""
932
+ )
933
+ response_llm = llm.ask(
934
+ system_prompt=quarantine_prompt(datasource_definition),
935
+ prompt=f"The quarantine errors are:\n{json.dumps(quarantine_data)}",
936
+ )
937
+ response = extract_xml(response_llm, "quarantine_errors")
938
+ error_message += "\n" + response
939
+ click.echo(response)
940
+ else:
941
+ echo_safe_humanfriendly_tables_format_smart_table(
942
+ data=[d.values() for d in res["data"]], column_names=res["data"][0].keys()
943
+ )
944
+
945
+ add_telemetry_event("datasource_error", error=f"quarantine_error: {error_message}")
@@ -437,6 +437,9 @@ class FeedbackManager:
437
437
  error_tag_not_found = error_message("Tag {tag_name} not found.")
438
438
  error_build_failed = error_message("Build failed")
439
439
  error_request_failed = error_message("Request failed with status code {status_code}, please try again later.")
440
+ error_invalid_output_format = error_message(
441
+ "Invalid output format for this command. Supported formats are: {formats}"
442
+ )
440
443
 
441
444
  info_incl_relative_path = info_message("** Relative path {path} does not exist, skipping.")
442
445
  info_ignoring_incl_file = info_message(
@@ -6,7 +6,7 @@ import click
6
6
  from tinybird.tb.client import TinyB
7
7
  from tinybird.tb.config import get_display_cloud_host
8
8
  from tinybird.tb.modules.cli import CLIConfig, cli
9
- from tinybird.tb.modules.common import coro, echo_json, format_robust_table
9
+ from tinybird.tb.modules.common import coro, echo_json, force_echo, format_robust_table
10
10
  from tinybird.tb.modules.feedback_manager import FeedbackManager
11
11
  from tinybird.tb.modules.local_common import TB_LOCAL_ADDRESS, get_tinybird_local_config
12
12
  from tinybird.tb.modules.project import Project
@@ -19,14 +19,19 @@ async def info(ctx: click.Context) -> None:
19
19
  """Get information about the project that is currently being used"""
20
20
  ctx_config = ctx.ensure_object(dict)["config"]
21
21
  project: Project = ctx.ensure_object(dict)["project"]
22
- is_json = ctx.ensure_object(dict)["output"] == "json"
22
+ output = ctx.ensure_object(dict)["output"]
23
+
24
+ if output not in {"human", "json"}:
25
+ force_echo(FeedbackManager.error_invalid_output_format(formats=", ".join(["human", "json"])))
26
+ return
27
+
23
28
  click.echo(FeedbackManager.highlight(message="» Tinybird Cloud:"))
24
29
  cloud_table, cloud_columns = await get_cloud_info(ctx_config)
25
30
  click.echo(FeedbackManager.highlight(message="\n» Tinybird Local:"))
26
31
  local_table, local_columns = await get_local_info(ctx_config)
27
32
  click.echo(FeedbackManager.highlight(message="\n» Project:"))
28
33
  project_table, project_columns = await get_project_info(project.folder)
29
- if is_json:
34
+ if output == "json":
30
35
  cloud_data = {}
31
36
  if cloud_columns and cloud_table and isinstance(cloud_table, list) and len(cloud_table) > 0:
32
37
  cloud_data = {column: cloud_table[0][i] for i, column in enumerate(cloud_columns)}
@@ -77,6 +77,15 @@ class Project:
77
77
  def get_connection_files(self) -> List[str]:
78
78
  return self.get_files("connection")
79
79
 
80
+ def get_kafka_connection_files(self) -> List[str]:
81
+ return [f for f in self.get_connection_files() if self.is_kafka_connection(Path(f).read_text())]
82
+
83
+ def get_s3_connection_files(self) -> List[str]:
84
+ return [f for f in self.get_connection_files() if self.is_s3_connection(Path(f).read_text())]
85
+
86
+ def get_gcs_connection_files(self) -> List[str]:
87
+ return [f for f in self.get_connection_files() if self.is_gcs_connection(Path(f).read_text())]
88
+
80
89
  def get_pipe_datafile(self, filename: str) -> Optional[Datafile]:
81
90
  try:
82
91
  return parse_pipe(filename).datafile
@@ -107,3 +116,27 @@ class Project:
107
116
  @staticmethod
108
117
  def is_endpoint(content: str) -> bool:
109
118
  return re.search(r"TYPE endpoint", content, re.IGNORECASE) is not None
119
+
120
+ @staticmethod
121
+ def is_kafka_connection(content: str) -> bool:
122
+ return re.search(r"TYPE kafka", content, re.IGNORECASE) is not None
123
+
124
+ @staticmethod
125
+ def is_s3_connection(content: str) -> bool:
126
+ return re.search(r"TYPE s3", content, re.IGNORECASE) is not None
127
+
128
+ @staticmethod
129
+ def is_gcs_connection(content: str) -> bool:
130
+ return re.search(r"TYPE gcs", content, re.IGNORECASE) is not None
131
+
132
+ @staticmethod
133
+ def is_kafka_datasource(content: str) -> bool:
134
+ return re.search(r"KAFKA_CONNECTION_NAME", content, re.IGNORECASE) is not None
135
+
136
+ @staticmethod
137
+ def is_s3_datasource(content: str) -> bool:
138
+ return re.search(r"IMPORT_CONNECTION_NAME", content, re.IGNORECASE) is not None
139
+
140
+ @staticmethod
141
+ def is_gcs_datasource(content: str) -> bool:
142
+ return re.search(r"IMPORT_CONNECTION_NAME", content, re.IGNORECASE) is not None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: tinybird
3
- Version: 0.0.1.dev189
3
+ Version: 0.0.1.dev191
4
4
  Summary: Tinybird Command Line Tool
5
5
  Home-page: https://www.tinybird.co/docs/forward/commands
6
6
  Author: Tinybird
@@ -3,7 +3,7 @@ tinybird/context.py,sha256=FfqYfrGX_I7PKGTQo93utaKPDNVYWelg4Hsp3evX5wM,1291
3
3
  tinybird/datatypes.py,sha256=r4WCvspmrXTJHiPjjyOTiZyZl31FO3Ynkwq4LQsYm6E,11059
4
4
  tinybird/feedback_manager.py,sha256=1INQFfRfuMCb9lfB8KNf4r6qC2khW568hoHjtk-wshI,69305
5
5
  tinybird/git_settings.py,sha256=Sw_8rGmribEFJ4Z_6idrVytxpFYk7ez8ei0qHULzs3E,3934
6
- tinybird/prompts.py,sha256=uFzlVsGgWFwEOBhXkaLL4zY-wz8jCQgTFiMEfoFcgro,36550
6
+ tinybird/prompts.py,sha256=rpgvMpR103niDnoDMSz8hRAYBdgfrorfD5-7g23BMQQ,37596
7
7
  tinybird/sql.py,sha256=C_B81wwv3BsqyXGhF5oTk9DcTUkrp7NwIFqSzd3Dmjc,47854
8
8
  tinybird/sql_template.py,sha256=hWW8JawSWLl9GeWPYkC_Yrxj7P0MHEVMJ0Px9bedEgM,99817
9
9
  tinybird/sql_template_fmt.py,sha256=KUHdj5rYCYm_rKKdXYSJAE9vIyXUQLB0YSZnUXHeBlY,10196
@@ -12,27 +12,27 @@ tinybird/syncasync.py,sha256=IPnOx6lMbf9SNddN1eBtssg8vCLHMt76SuZ6YNYm-Yk,27761
12
12
  tinybird/tornado_template.py,sha256=jjNVDMnkYFWXflmT8KU_Ssbo5vR8KQq3EJMk5vYgXRw,41959
13
13
  tinybird/ch_utils/constants.py,sha256=aYvg2C_WxYWsnqPdZB1ZFoIr8ZY-XjUXYyHKE9Ansj0,3890
14
14
  tinybird/ch_utils/engine.py,sha256=X4tE9OrfaUy6kO9cqVEzyI9cDcmOF3IAssRRzsTsfEQ,40781
15
- tinybird/tb/__cli__.py,sha256=Jw96cYl5Uk5poOJYvRJJfYGQbYLgCXUaCwjiBxNu3Zg,247
15
+ tinybird/tb/__cli__.py,sha256=IuO2njWcdIXnuscWmXWEMTbCM9GnrTyYky59OtsEi3I,247
16
16
  tinybird/tb/check_pypi.py,sha256=rW4QmDRbtgKdUUwJCnBkVjmTjZSZGN-XgZhx7vMkC0w,1009
17
17
  tinybird/tb/cli.py,sha256=u3eGOhX0MHkuT6tiwaZ0_3twqLmqKXDAOxF7yV_Nn9Q,1075
18
- tinybird/tb/client.py,sha256=59GH0IoYSV_KUG0eEbDDYHSWH4OlkVnSRFXE3mYAM0s,56571
18
+ tinybird/tb/client.py,sha256=CO-dQw8h28X6T6IO-Z79yPBKaJQT1Rwya5b6gexvw58,56491
19
19
  tinybird/tb/config.py,sha256=jT9xndpeCY_g0HdB5qE2EquC0TFRRnkPnQFWZWd04jo,3998
20
20
  tinybird/tb/modules/build.py,sha256=KvF0s8hGgY_rZs7jSqYiauCk3MAlCmW_gQtnsJDJWBk,19411
21
21
  tinybird/tb/modules/cicd.py,sha256=Njb6eZOHHbUkoJJx6KoixO9PsfA_T-3Ybkya9-50Ca8,7328
22
- tinybird/tb/modules/cli.py,sha256=dXZs-MuqYPvxStVj7aLg36LwXtEB8NzTobDmHV9nzZI,15508
23
- tinybird/tb/modules/common.py,sha256=DYCjpj0iBaCDZ8BJ0MNG_6m6NyFMCrpQShIajHKLIfM,83373
22
+ tinybird/tb/modules/cli.py,sha256=LW77oBqvZ6QB5X96158Kp13Tl24vKdxlyWvsSawpmCo,15399
23
+ tinybird/tb/modules/common.py,sha256=2NRDRll0czmYjwLh3qv3DYL9aP8XgRkRAv5S3meCGfM,84062
24
24
  tinybird/tb/modules/config.py,sha256=ziqW_t_mRVvWOd85VoB4vKyvgMkEfpXDf9H4v38p2xc,11422
25
- tinybird/tb/modules/connection.py,sha256=7oOR7x4PhBcm1ETFFCH2YJ_3oeGXjAbmx1cnZX9_L70,9014
25
+ tinybird/tb/modules/connection.py,sha256=z1xWP2gtjKEbjc4ZF1aD7QUgl8V--wf2IRNy-4sRFm8,9779
26
26
  tinybird/tb/modules/copy.py,sha256=2Mm4FWKehOG7CoOhiF1m9UZJgJn0W1_cMolqju8ONYg,5805
27
- tinybird/tb/modules/create.py,sha256=sfIOcN3tujt7O1r9RNWqhhI-gQTDnO6zEgMwZHH2D8s,20201
28
- tinybird/tb/modules/datasource.py,sha256=0_6Cn07p5GoNBBGdu88pSeLvTWojln1-k23FsS8jTDs,17801
27
+ tinybird/tb/modules/create.py,sha256=2uW-4t7c7e4xkZ-GpK_8XaA-nuXwklq7rTks4k6qrtI,20917
28
+ tinybird/tb/modules/datasource.py,sha256=Uomj3bpVL5m4A0_ezWFXvQAjVER4EfmDz5gFkfGR7eo,37027
29
29
  tinybird/tb/modules/deployment.py,sha256=OGJdriqywhCtsG7rKLFtVSLjoEbbva1Nb29-jQBk3wM,27432
30
30
  tinybird/tb/modules/deprecations.py,sha256=rrszC1f_JJeJ8mUxGoCxckQTJFBCR8wREf4XXXN-PRc,4507
31
31
  tinybird/tb/modules/dev_server.py,sha256=57FCKuWpErwYUYgHspYDkLWEm9F4pbvVOtMrFXX1fVU,10129
32
32
  tinybird/tb/modules/endpoint.py,sha256=XySDt3pk66vxOZ0egUfz4bY8bEk3BjOXkv-L0OIJ3sc,12083
33
33
  tinybird/tb/modules/exceptions.py,sha256=5jK91w1LPmtqIUfDpHe_Op5OxGz8-p1BPgtLREMIni0,5217
34
- tinybird/tb/modules/feedback_manager.py,sha256=sWJYBIZDPYLFWMKPv7FoVLbQxE7xx85bn5SbK9yYXoE,76782
35
- tinybird/tb/modules/info.py,sha256=iKeFbFkos7vYaBU7Vr5SI-fa1x7AbuUHB748jsGsaA4,5944
34
+ tinybird/tb/modules/feedback_manager.py,sha256=cnMGTCXa04Zmwq0tPHuAeq_hxqTz3F8c7mXqTJ-MZsw,76920
35
+ tinybird/tb/modules/info.py,sha256=qsXr2g45jRv2xD4Kt2sgsT0AR8sjwrAZf2s7mI-zxL4,6113
36
36
  tinybird/tb/modules/infra.py,sha256=fve30Gj3mG9zbquGxS2e4ipcOYOxviWQCpNFfEzJN_Q,33195
37
37
  tinybird/tb/modules/job.py,sha256=AsUCRNzy7HG5oJ4fyk9NpIm5NtNJgBZSy8MtJdXBe5A,3167
38
38
  tinybird/tb/modules/llm.py,sha256=KfsCYmKeW1VQz0iDZhGKCRkQv_Y3kTHh6JuxvofOguE,1076
@@ -45,7 +45,7 @@ tinybird/tb/modules/materialization.py,sha256=QJX5kCPhhm6IXBO1JsalVfbQdypCe_eOUD
45
45
  tinybird/tb/modules/mock.py,sha256=IyHweMUM6bUH8IhyiX2tTMpdVpTFUeAJ41lZ5P42-HQ,5303
46
46
  tinybird/tb/modules/open.py,sha256=OuctINN77oexpSjth9uoIZPCelKO4Li-yyVxeSnk1io,1371
47
47
  tinybird/tb/modules/pipe.py,sha256=AQKEDagO6e3psPVjJkS_MDbn8aK-apAiLp26k7jgAV0,2432
48
- tinybird/tb/modules/project.py,sha256=wOL0vmkbM6Jhxbv0zYwt3JjAoRQDvkbbus01bpzfhA0,3846
48
+ tinybird/tb/modules/project.py,sha256=iIEaBQsdLXyzJ_45Paf1jwbSrtTwWv131VCuPsTQttA,5215
49
49
  tinybird/tb/modules/regions.py,sha256=QjsL5H6Kg-qr0aYVLrvb1STeJ5Sx_sjvbOYO0LrEGMk,166
50
50
  tinybird/tb/modules/secret.py,sha256=WsqzxxLh9W_jkuHL2JofMXdIJy0lT5WEI-7bQSIDgAc,2921
51
51
  tinybird/tb/modules/shell.py,sha256=Zd_4Ak_5tKVX-cw6B4ag36xZeEGHeh-jZpAsIXkoMoE,14116
@@ -80,8 +80,8 @@ tinybird/tb_cli_modules/config.py,sha256=IsgdtFRnUrkY8-Zo32lmk6O7u3bHie1QCxLwgp4
80
80
  tinybird/tb_cli_modules/exceptions.py,sha256=pmucP4kTF4irIt7dXiG-FcnI-o3mvDusPmch1L8RCWk,3367
81
81
  tinybird/tb_cli_modules/regions.py,sha256=QjsL5H6Kg-qr0aYVLrvb1STeJ5Sx_sjvbOYO0LrEGMk,166
82
82
  tinybird/tb_cli_modules/telemetry.py,sha256=Hh2Io8ZPROSunbOLuMvuIFU4TqwWPmQTqal4WS09K1A,10449
83
- tinybird-0.0.1.dev189.dist-info/METADATA,sha256=fz7av4cjpbqq8H_bP6CsG5NpL3Dg9iBRFHEG1U7luSs,1608
84
- tinybird-0.0.1.dev189.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
85
- tinybird-0.0.1.dev189.dist-info/entry_points.txt,sha256=LwdHU6TfKx4Qs7BqqtaczEZbImgU7Abe9Lp920zb_fo,43
86
- tinybird-0.0.1.dev189.dist-info/top_level.txt,sha256=VqqqEmkAy7UNaD8-V51FCoMMWXjLUlR0IstvK7tJYVY,54
87
- tinybird-0.0.1.dev189.dist-info/RECORD,,
83
+ tinybird-0.0.1.dev191.dist-info/METADATA,sha256=6DYh0XzR-JKDXhw_nDCp0ZXErl2PTGkPEBMDqKLTLB8,1608
84
+ tinybird-0.0.1.dev191.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
85
+ tinybird-0.0.1.dev191.dist-info/entry_points.txt,sha256=LwdHU6TfKx4Qs7BqqtaczEZbImgU7Abe9Lp920zb_fo,43
86
+ tinybird-0.0.1.dev191.dist-info/top_level.txt,sha256=VqqqEmkAy7UNaD8-V51FCoMMWXjLUlR0IstvK7tJYVY,54
87
+ tinybird-0.0.1.dev191.dist-info/RECORD,,