tinybird-cli 5.21.2.dev0__tar.gz → 5.22.1.dev0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/PKG-INFO +7 -2
  2. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/__cli__.py +2 -2
  3. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/ch_utils/constants.py +6 -0
  4. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/config.py +0 -6
  5. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/connectors.py +1 -7
  6. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/context.py +3 -3
  7. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/datafile_common.py +137 -114
  8. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/sql.py +31 -23
  9. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/sql_template.py +39 -10
  10. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/sql_template_fmt.py +10 -2
  11. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/sql_toolset.py +2 -2
  12. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/cli.py +8 -8
  13. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/common.py +2 -2
  14. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/pipe.py +1 -1
  15. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/tinyunit/tinyunit.py +0 -14
  16. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +0 -6
  17. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tornado_template.py +6 -7
  18. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird_cli.egg-info/PKG-INFO +7 -2
  19. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird_cli.egg-info/requires.txt +1 -1
  20. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/setup.cfg +0 -0
  21. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/ch_utils/engine.py +0 -0
  22. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/check_pypi.py +0 -0
  23. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/client.py +0 -0
  24. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/datatypes.py +0 -0
  25. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/feedback_manager.py +0 -0
  26. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/git_settings.py +0 -0
  27. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/syncasync.py +0 -0
  28. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli.py +0 -0
  29. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/auth.py +0 -0
  30. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/branch.py +0 -0
  31. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/cicd.py +0 -0
  32. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/config.py +0 -0
  33. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/connection.py +0 -0
  34. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/datasource.py +0 -0
  35. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/exceptions.py +0 -0
  36. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/fmt.py +0 -0
  37. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/job.py +0 -0
  38. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/regions.py +0 -0
  39. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/tag.py +0 -0
  40. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/telemetry.py +0 -0
  41. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/test.py +0 -0
  42. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/token.py +0 -0
  43. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/workspace.py +0 -0
  44. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird/tb_cli_modules/workspace_members.py +0 -0
  45. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird_cli.egg-info/SOURCES.txt +0 -0
  46. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird_cli.egg-info/dependency_links.txt +0 -0
  47. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird_cli.egg-info/entry_points.txt +0 -0
  48. {tinybird_cli-5.21.2.dev0 → tinybird_cli-5.22.1.dev0}/tinybird_cli.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: tinybird_cli
3
- Version: 5.21.2.dev0
3
+ Version: 5.22.1.dev0
4
4
  Summary: Tinybird Command Line Tool
5
5
  Home-page: https://www.tinybird.co/docs/cli
6
6
  Author: Tinybird
@@ -16,7 +16,7 @@ Requires-Dist: croniter==1.3.15
16
16
  Requires-Dist: GitPython~=3.1.32
17
17
  Requires-Dist: humanfriendly~=8.2
18
18
  Requires-Dist: pydantic~=2.8.0
19
- Requires-Dist: pyperclip==1.8.2
19
+ Requires-Dist: pyperclip==1.9.0
20
20
  Requires-Dist: pyyaml<6.1,>=6.0
21
21
  Requires-Dist: requests<3,>=2.28.1
22
22
  Requires-Dist: shandy-sqlfmt==0.11.1
@@ -61,6 +61,11 @@ The Tinybird command-line tool allows you to use all the Tinybird functionality
61
61
  Changelog
62
62
  ----------
63
63
 
64
+ 5.21.2
65
+ ***********
66
+
67
+ - `Improved` Error message when trying to format a file using `tb fmt` or when we are running `tb init --git` to include the resource name and source (local/remote)
68
+
64
69
  5.21.1
65
70
  ***********
66
71
 
@@ -4,5 +4,5 @@ __description__ = 'Tinybird Command Line Tool'
4
4
  __url__ = 'https://www.tinybird.co/docs/cli'
5
5
  __author__ = 'Tinybird'
6
6
  __author_email__ = 'support@tinybird.co'
7
- __version__ = '5.21.2.dev0'
8
- __revision__ = '5949c64'
7
+ __version__ = '5.22.1.dev0'
8
+ __revision__ = '1515157'
@@ -257,4 +257,10 @@ VALID_QUERY_FORMATS = (
257
257
  "Native",
258
258
  "RowBinaryWithNamesAndTypes",
259
259
  "TabSeparated",
260
+ "JSONCompactEachRowWithNamesAndTypes",
261
+ "TabSeparatedWithNamesAndTypes",
262
+ "JSONCompactEachRow",
263
+ "JSONCompact",
264
+ "JSONStringsEachRowWithProgress",
265
+ "ODBCDriver2",
260
266
  )
@@ -38,13 +38,10 @@ LEGACY_HOSTS = {
38
38
  "https://api.wadus3.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus3",
39
39
  "https://api.wadus4.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus4",
40
40
  "https://api.wadus5.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus5",
41
- "https://api.wadus6.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus6",
42
41
  "https://api.wadus1.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus1",
43
42
  "https://api.wadus2.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus2",
44
43
  "https://api.wadus3.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus3",
45
44
  "https://api.wadus4.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus4",
46
- "https://api.wadus5.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus5",
47
- "https://api.wadus6.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus6",
48
45
  "https://ui.tinybird.co": "https://app.tinybird.co/gcp/europe-west3",
49
46
  "https://ui.us-east.tinybird.co": "https://app.tinybird.co/gcp/us-east4",
50
47
  "https://ui.us-east.aws.tinybird.co": "https://app.tinybird.co/aws/us-east-1",
@@ -62,13 +59,10 @@ LEGACY_HOSTS = {
62
59
  "https://ui.wadus3.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus3",
63
60
  "https://ui.wadus4.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus4",
64
61
  "https://ui.wadus5.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus5",
65
- "https://ui.wadus6.gcp.tinybird.co": "https://app.wadus.tinybird.co/gcp/wadus6",
66
62
  "https://ui.wadus1.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus1",
67
63
  "https://ui.wadus2.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus2",
68
64
  "https://ui.wadus3.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus3",
69
65
  "https://ui.wadus4.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus4",
70
- "https://ui.wadus5.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus5",
71
- "https://ui.wadus6.aws.tinybird.co": "https://app.wadus.tinybird.co/aws/wadus6",
72
66
  }
73
67
 
74
68
 
@@ -369,13 +369,7 @@ class Snowflake(Connector):
369
369
  the_type = "String"
370
370
  if t.startswith("NUMBER"):
371
371
  the_type = "Int32"
372
- if (
373
- t.startswith("FLOAT")
374
- or t.startswith("DOUBLE")
375
- or t.startswith("REAL")
376
- or t.startswith("NUMERIC")
377
- or t.startswith("DECIMAL")
378
- ):
372
+ if t.startswith(("FLOAT", "DOUBLE", "REAL", "NUMERIC", "DECIMAL")):
379
373
  the_type = "Float32"
380
374
  if t == "DATE":
381
375
  the_type = "Date"
@@ -3,15 +3,15 @@ from typing import TYPE_CHECKING
3
3
 
4
4
  # Avoid circular import error
5
5
  if TYPE_CHECKING:
6
- from tinybird.user import User
6
+ from hfi.hfi_workspace_data import HfiWorkspaceData
7
+
7
8
 
8
9
  workspace_id: ContextVar[str] = ContextVar("workspace_id")
9
- workspace: ContextVar["User"] = ContextVar("workspace")
10
+ hfi_workspace_data: ContextVar["HfiWorkspaceData"] = ContextVar("hfi_workspace_data")
10
11
  table_id: ContextVar[str] = ContextVar("table_id")
11
12
  hfi_frequency: ContextVar[float] = ContextVar("hfi_frequency")
12
13
  hfi_frequency_gatherer: ContextVar[float] = ContextVar("hfi_frequency_gatherer")
13
14
  use_gatherer: ContextVar[bool] = ContextVar("use_gatherer")
14
- allow_gatherer_fallback: ContextVar[bool] = ContextVar("allow_gatherer_fallback")
15
15
  gatherer_allow_s3_backup_on_user_errors: ContextVar[bool] = ContextVar("gatherer_allow_s3_backup_on_user_errors")
16
16
  disable_template_security_validation: ContextVar[bool] = ContextVar("disable_template_security_validation")
17
17
  origin: ContextVar[str] = ContextVar("origin")
@@ -147,6 +147,9 @@ ON_DEMAND = "@on-demand"
147
147
  DEFAULT_CRON_PERIOD: int = 60
148
148
 
149
149
 
150
+ # TODO: This class is duplicated in tinybird/datafile/common.py with a slightly different
151
+ # _REPLACEMENTS tuple. The duplication happened during the CLI/server code split (commit
152
+ # f86d02cdd7). Consider extracting shared code into a common module that both files can import.
150
153
  class ImportReplacements:
151
154
  _REPLACEMENTS: Tuple[Tuple[str, str, Optional[str]], ...] = (
152
155
  ("import_service", "service", None),
@@ -167,7 +170,7 @@ class ImportReplacements:
167
170
  return [x[0] for x in ImportReplacements._REPLACEMENTS]
168
171
 
169
172
  @staticmethod
170
- def get_api_param_for_datafile_param(connector_service: str, key: str) -> Tuple[Optional[str], Optional[str]]:
173
+ def get_api_param_for_datafile_param(key: str) -> Tuple[Optional[str], Optional[str]]:
171
174
  """Returns the API parameter name and default value for a given
172
175
  datafile parameter.
173
176
  """
@@ -552,9 +555,7 @@ class Deployment:
552
555
  and self.cli_git_release.ChangeType(type) == self.cli_git_release.ChangeType.MODIFIED
553
556
  ):
554
557
  filenames_from_changed += [
555
- filename
556
- for filename in filenames
557
- if filename.endswith(f"{change}.pipe") or filename.endswith(f"{change}.datasource")
558
+ filename for filename in filenames if filename.endswith((f"{change}.pipe", f"{change}.datasource"))
558
559
  ]
559
560
  return filenames_from_changed
560
561
 
@@ -1280,7 +1281,7 @@ def parse(
1280
1281
  if (
1281
1282
  parser_state.multiline
1282
1283
  and cmd.lower() in cmds
1283
- and not (line.startswith(" ") or line.startswith("\t") or line.lower().startswith("from"))
1284
+ and not (line.startswith((" ", "\t")) or line.lower().startswith("from"))
1284
1285
  ):
1285
1286
  parser_state.multiline = False
1286
1287
  cmds[parser_state.command](
@@ -1458,11 +1459,8 @@ async def process_file(
1458
1459
  raise click.ClickException(FeedbackManager.error_missing_table_arn(datasource=datasource["name"]))
1459
1460
  if not params.get("import_export_bucket", None):
1460
1461
  raise click.ClickException(FeedbackManager.error_missing_export_bucket(datasource=datasource["name"]))
1461
- else:
1462
- if not params.get("import_external_datasource", None):
1463
- raise click.ClickException(
1464
- FeedbackManager.error_missing_external_datasource(datasource=datasource["name"])
1465
- )
1462
+ elif not params.get("import_external_datasource", None):
1463
+ raise click.ClickException(FeedbackManager.error_missing_external_datasource(datasource=datasource["name"]))
1466
1464
 
1467
1465
  return params
1468
1466
 
@@ -1585,7 +1583,7 @@ async def process_file(
1585
1583
  #
1586
1584
  # Note: any unknown import_ parameter is leaved as is.
1587
1585
  for key in ImportReplacements.get_datafile_parameter_keys():
1588
- replacement, default_value = ImportReplacements.get_api_param_for_datafile_param(service, key)
1586
+ replacement, default_value = ImportReplacements.get_api_param_for_datafile_param(key)
1589
1587
  if not replacement:
1590
1588
  continue # We should not reach this never, but just in case...
1591
1589
 
@@ -3057,26 +3055,23 @@ async def new_pipe(
3057
3055
  if wait_populate:
3058
3056
  result = await wait_job(tb_client, job_id, job_url, "Populating")
3059
3057
  click.echo(FeedbackManager.info_populate_job_result(result=result))
3060
- else:
3061
- if data.get("type") == "default" and not skip_tokens and not as_standard and not copy_node and not sink_node:
3062
- # FIXME: set option to add last node as endpoint in the API
3063
- endpoint_node = next(
3064
- (node for node in data.get("nodes", []) if node.get("type") == "endpoint"), data.get("nodes", [])[-1]
3058
+ elif data.get("type") == "default" and not skip_tokens and not as_standard and not copy_node and not sink_node:
3059
+ # FIXME: set option to add last node as endpoint in the API
3060
+ endpoint_node = next(
3061
+ (node for node in data.get("nodes", []) if node.get("type") == "endpoint"), data.get("nodes", [])[-1]
3062
+ )
3063
+ try:
3064
+ data = await tb_client._req(
3065
+ f"/v0/pipes/{p['name']}/nodes/{endpoint_node.get('id')}/endpoint?{urlencode(cli_params)}",
3066
+ method="POST",
3067
+ headers=headers,
3068
+ )
3069
+ except Exception as e:
3070
+ raise Exception(
3071
+ FeedbackManager.error_creating_endpoint(node=endpoint_node.get("name"), pipe=p["name"], error=str(e))
3065
3072
  )
3066
- try:
3067
- data = await tb_client._req(
3068
- f"/v0/pipes/{p['name']}/nodes/{endpoint_node.get('id')}/endpoint?{urlencode(cli_params)}",
3069
- method="POST",
3070
- headers=headers,
3071
- )
3072
- except Exception as e:
3073
- raise Exception(
3074
- FeedbackManager.error_creating_endpoint(
3075
- node=endpoint_node.get("name"), pipe=p["name"], error=str(e)
3076
- )
3077
- )
3078
3073
 
3079
- click.echo(FeedbackManager.success_test_endpoint_no_token(host=host, pipe=p["name"]))
3074
+ click.echo(FeedbackManager.success_test_endpoint_no_token(host=host, pipe=p["name"]))
3080
3075
 
3081
3076
  if copy_node:
3082
3077
  pipe_id = data["id"]
@@ -3523,32 +3518,28 @@ async def new_ds(
3523
3518
  if alter_response and make_changes:
3524
3519
  # alter operation finished
3525
3520
  pass
3521
+ elif (
3522
+ os.getenv("TB_I_KNOW_WHAT_I_AM_DOING")
3523
+ and click.prompt(FeedbackManager.info_ask_for_datasource_confirmation()) == ds_name
3524
+ ): # TODO move to CLI
3525
+ try:
3526
+ await client.datasource_delete(ds_name)
3527
+ click.echo(FeedbackManager.success_delete_datasource(datasource=ds_name))
3528
+ except Exception:
3529
+ raise click.ClickException(FeedbackManager.error_removing_datasource(datasource=ds_name))
3530
+ return
3531
+ elif alter_error_message:
3532
+ raise click.ClickException(
3533
+ FeedbackManager.error_datasource_already_exists_and_alter_failed(
3534
+ datasource=ds_name, alter_error_message=alter_error_message
3535
+ )
3536
+ )
3537
+ elif promote_error_message:
3538
+ raise click.ClickException(
3539
+ FeedbackManager.error_promoting_datasource(datasource=ds_name, error=promote_error_message)
3540
+ )
3526
3541
  else:
3527
- # removed replacing by default. When a datasource is removed data is
3528
- # removed and all the references needs to be updated
3529
- if (
3530
- os.getenv("TB_I_KNOW_WHAT_I_AM_DOING")
3531
- and click.prompt(FeedbackManager.info_ask_for_datasource_confirmation()) == ds_name
3532
- ): # TODO move to CLI
3533
- try:
3534
- await client.datasource_delete(ds_name)
3535
- click.echo(FeedbackManager.success_delete_datasource(datasource=ds_name))
3536
- except Exception:
3537
- raise click.ClickException(FeedbackManager.error_removing_datasource(datasource=ds_name))
3538
- return
3539
- else:
3540
- if alter_error_message:
3541
- raise click.ClickException(
3542
- FeedbackManager.error_datasource_already_exists_and_alter_failed(
3543
- datasource=ds_name, alter_error_message=alter_error_message
3544
- )
3545
- )
3546
- if promote_error_message:
3547
- raise click.ClickException(
3548
- FeedbackManager.error_promoting_datasource(datasource=ds_name, error=promote_error_message)
3549
- )
3550
- else:
3551
- click.echo(FeedbackManager.warning_datasource_already_exists(datasource=ds_name))
3542
+ click.echo(FeedbackManager.warning_datasource_already_exists(datasource=ds_name))
3552
3543
 
3553
3544
 
3554
3545
  async def new_token(token: Dict[str, Any], client: TinyB, force: bool = False):
@@ -4450,52 +4441,44 @@ async def folder_push(
4450
4441
  error=e,
4451
4442
  )
4452
4443
  raise click.ClickException(exception)
4453
- else:
4454
- if raise_on_exists:
4455
- raise AlreadyExistsException(
4456
- FeedbackManager.warning_name_already_exists(
4457
- name=name if to_run[name]["version"] is None else f"{name}__v{to_run[name]['version']}"
4458
- )
4444
+ elif raise_on_exists:
4445
+ raise AlreadyExistsException(
4446
+ FeedbackManager.warning_name_already_exists(
4447
+ name=name if to_run[name]["version"] is None else f"{name}__v{to_run[name]['version']}"
4459
4448
  )
4449
+ )
4450
+ elif await name_matches_existing_resource(resource, name, tb_client):
4451
+ if resource == "pipes":
4452
+ click.echo(FeedbackManager.error_pipe_cannot_be_pushed(name=name))
4460
4453
  else:
4461
- if await name_matches_existing_resource(resource, name, tb_client):
4462
- if resource == "pipes":
4463
- click.echo(FeedbackManager.error_pipe_cannot_be_pushed(name=name))
4464
- else:
4465
- click.echo(FeedbackManager.error_datasource_cannot_be_pushed(name=name))
4466
- else:
4467
- click.echo(
4468
- FeedbackManager.warning_name_already_exists(
4469
- name=(
4470
- name
4471
- if to_run[name]["version"] is None
4472
- else f"{name}__v{to_run[name]['version']}"
4473
- )
4474
- )
4475
- )
4476
- else:
4477
- if should_push_file(name, remote_resource_names, latest_datasource_versions, force, run_tests):
4478
- if name not in resource_versions:
4479
- version = ""
4480
- if name in latest_datasource_versions:
4481
- version = f"(v{latest_datasource_versions[name]})"
4482
- click.echo(FeedbackManager.info_dry_processing_new_resource(name=name, version=version))
4483
- else:
4484
- click.echo(
4485
- FeedbackManager.info_dry_processing_resource(
4486
- name=name,
4487
- version=latest_datasource_versions[name],
4488
- latest_version=resource_versions.get(name),
4489
- )
4454
+ click.echo(FeedbackManager.error_datasource_cannot_be_pushed(name=name))
4455
+ else:
4456
+ click.echo(
4457
+ FeedbackManager.warning_name_already_exists(
4458
+ name=(name if to_run[name]["version"] is None else f"{name}__v{to_run[name]['version']}")
4490
4459
  )
4460
+ )
4461
+ elif should_push_file(name, remote_resource_names, latest_datasource_versions, force, run_tests):
4462
+ if name not in resource_versions:
4463
+ version = ""
4464
+ if name in latest_datasource_versions:
4465
+ version = f"(v{latest_datasource_versions[name]})"
4466
+ click.echo(FeedbackManager.info_dry_processing_new_resource(name=name, version=version))
4491
4467
  else:
4492
- if await name_matches_existing_resource(resource, name, tb_client):
4493
- if resource == "pipes":
4494
- click.echo(FeedbackManager.warning_pipe_cannot_be_pushed(name=name))
4495
- else:
4496
- click.echo(FeedbackManager.warning_datasource_cannot_be_pushed(name=name))
4497
- else:
4498
- click.echo(FeedbackManager.warning_dry_name_already_exists(name=name))
4468
+ click.echo(
4469
+ FeedbackManager.info_dry_processing_resource(
4470
+ name=name,
4471
+ version=latest_datasource_versions[name],
4472
+ latest_version=resource_versions.get(name),
4473
+ )
4474
+ )
4475
+ elif await name_matches_existing_resource(resource, name, tb_client):
4476
+ if resource == "pipes":
4477
+ click.echo(FeedbackManager.warning_pipe_cannot_be_pushed(name=name))
4478
+ else:
4479
+ click.echo(FeedbackManager.warning_datasource_cannot_be_pushed(name=name))
4480
+ else:
4481
+ click.echo(FeedbackManager.warning_dry_name_already_exists(name=name))
4499
4482
 
4500
4483
  async def push_files(
4501
4484
  dependency_graph: GraphDependencies,
@@ -4745,9 +4728,8 @@ async def folder_push(
4745
4728
  force,
4746
4729
  mode="append" if is_branch else "replace",
4747
4730
  )
4748
- else:
4749
- if verbose:
4750
- click.echo(FeedbackManager.info_not_pushing_fixtures())
4731
+ elif verbose:
4732
+ click.echo(FeedbackManager.info_not_pushing_fixtures())
4751
4733
 
4752
4734
  await deployment.update_release(has_semver=has_semver, release_created=release_created)
4753
4735
 
@@ -4873,6 +4855,7 @@ async def format_datasource(
4873
4855
  for_deploy_diff: bool = False,
4874
4856
  skip_eval: bool = False,
4875
4857
  content: Optional[str] = None,
4858
+ resource_source: Optional[str] = None, # Not used for datasources, but kept for API consistency with format_pipe
4876
4859
  ) -> str:
4877
4860
  if datafile:
4878
4861
  doc = datafile
@@ -4976,11 +4959,25 @@ def format_tokens(file_parts: List[str], doc: Datafile) -> List[str]:
4976
4959
 
4977
4960
 
4978
4961
  def format_node_sql(
4979
- file_parts: List[str], node: Dict[str, Any], line_length: Optional[int] = None, lower_keywords: bool = False
4962
+ file_parts: List[str],
4963
+ node: Dict[str, Any],
4964
+ line_length: Optional[int] = None,
4965
+ lower_keywords: bool = False,
4966
+ resource_name: Optional[str] = None,
4967
+ resource_source: Optional[str] = None,
4980
4968
  ) -> List[str]:
4981
4969
  file_parts.append("SQL >")
4982
4970
  file_parts.append(DATAFILE_NEW_LINE)
4983
- file_parts.append(format_sql(node["sql"], DATAFILE_INDENT, line_length=line_length, lower_keywords=lower_keywords))
4971
+ file_parts.append(
4972
+ format_sql(
4973
+ node["sql"],
4974
+ DATAFILE_INDENT,
4975
+ line_length=line_length,
4976
+ lower_keywords=lower_keywords,
4977
+ resource_name=resource_name,
4978
+ resource_source=resource_source,
4979
+ )
4980
+ )
4984
4981
  file_parts.append(DATAFILE_NEW_LINE)
4985
4982
  file_parts.append(DATAFILE_NEW_LINE)
4986
4983
  return file_parts
@@ -5100,6 +5097,8 @@ async def format_node(
5100
5097
  line_length: Optional[int] = None,
5101
5098
  unroll_includes: bool = False,
5102
5099
  lower_keywords: bool = False,
5100
+ resource_name: Optional[str] = None,
5101
+ resource_source: Optional[str] = None,
5103
5102
  ) -> None:
5104
5103
  if not unroll_includes:
5105
5104
  format_pipe_include(file_parts, node, includes)
@@ -5114,7 +5113,14 @@ async def format_node(
5114
5113
 
5115
5114
  Doc = namedtuple("Doc", ["description"])
5116
5115
  format_description(file_parts, Doc(node.get("description", "")))
5117
- format_node_sql(file_parts, node, line_length=line_length, lower_keywords=lower_keywords)
5116
+ format_node_sql(
5117
+ file_parts,
5118
+ node,
5119
+ line_length=line_length,
5120
+ lower_keywords=lower_keywords,
5121
+ resource_name=resource_name,
5122
+ resource_source=resource_source,
5123
+ )
5118
5124
  await format_node_type(file_parts, node)
5119
5125
 
5120
5126
 
@@ -5127,6 +5133,7 @@ async def format_pipe(
5127
5133
  for_deploy_diff: bool = False,
5128
5134
  skip_eval: bool = False,
5129
5135
  content: Optional[str] = None,
5136
+ resource_source: Optional[str] = None,
5130
5137
  ) -> str:
5131
5138
  if datafile:
5132
5139
  doc = datafile
@@ -5159,7 +5166,7 @@ async def format_pipe(
5159
5166
  if "." in include_file
5160
5167
  else eval_var(include_file)
5161
5168
  )
5162
- included_pipe = parse_pipe(include_file, skip_eval=skip_eval)
5169
+ included_pipe = parse_pipe(str(include_file), skip_eval=skip_eval)
5163
5170
  pipe_nodes = doc.nodes.copy()
5164
5171
  for included_node in included_pipe.nodes.copy():
5165
5172
  unrolled_included_node = next(
@@ -5175,10 +5182,12 @@ async def format_pipe(
5175
5182
  line_length=line_length,
5176
5183
  unroll_includes=unroll_includes,
5177
5184
  lower_keywords=bool(for_deploy_diff),
5185
+ resource_name=filename,
5186
+ resource_source=resource_source,
5178
5187
  )
5179
5188
 
5180
5189
  if not unroll_includes:
5181
- for k, _ in doc.includes.items():
5190
+ for k in doc.includes.keys():
5182
5191
  if ".incl" not in k:
5183
5192
  continue
5184
5193
  file_parts.append(f"INCLUDE {k}")
@@ -5190,8 +5199,21 @@ async def format_pipe(
5190
5199
  return result
5191
5200
 
5192
5201
 
5193
- def format_sql(sql: str, DATAFILE_INDENT: str, line_length: Optional[int] = None, lower_keywords: bool = False) -> str:
5194
- sql = format_sql_template(sql.strip(), line_length=line_length, lower_keywords=lower_keywords)
5202
+ def format_sql(
5203
+ sql: str,
5204
+ DATAFILE_INDENT: str,
5205
+ line_length: Optional[int] = None,
5206
+ lower_keywords: bool = False,
5207
+ resource_name: Optional[str] = None,
5208
+ resource_source: Optional[str] = None,
5209
+ ) -> str:
5210
+ sql = format_sql_template(
5211
+ sql.strip(),
5212
+ line_length=line_length,
5213
+ lower_keywords=lower_keywords,
5214
+ resource_name=resource_name,
5215
+ resource_source=resource_source,
5216
+ )
5195
5217
  return "\n".join([f"{DATAFILE_INDENT}{part}" for part in sql.split("\n") if len(part.strip())])
5196
5218
 
5197
5219
 
@@ -5299,9 +5321,8 @@ async def folder_pull(
5299
5321
  if m in resources or m in resource_names:
5300
5322
  resource_to_write = resource_to_write.replace(match, m)
5301
5323
  await fd.write(resource_to_write)
5302
- else:
5303
- if verbose:
5304
- click.echo(FeedbackManager.info_skip_already_exists())
5324
+ elif verbose:
5325
+ click.echo(FeedbackManager.info_skip_already_exists())
5305
5326
  except Exception as e:
5306
5327
  raise click.ClickException(FeedbackManager.error_exception(error=e))
5307
5328
 
@@ -5429,7 +5450,7 @@ async def diff_command(
5429
5450
  sys.stdout.writelines(diff_lines)
5430
5451
  click.echo("")
5431
5452
 
5432
- for rfilename, _ in local_resources.items():
5453
+ for rfilename in local_resources.keys():
5433
5454
  if rfilename not in changed:
5434
5455
  for resource in remote_datasources + remote_pipes:
5435
5456
  properties = get_name_version(resource["name"])
@@ -5460,7 +5481,7 @@ async def diff_files(
5460
5481
  with open(filename) as file:
5461
5482
  return file.readlines()
5462
5483
 
5463
- async def parse(filename, with_format=True, unroll_includes=False):
5484
+ async def parse(filename, with_format=True, unroll_includes=False, resource_source=None):
5464
5485
  extensions = Path(filename).suffixes
5465
5486
  lines = None
5466
5487
  if is_file_a_datasource(filename):
@@ -5472,6 +5493,7 @@ async def diff_files(
5472
5493
  client=client,
5473
5494
  replace_includes=True,
5474
5495
  for_deploy_diff=for_deploy,
5496
+ resource_source=resource_source,
5475
5497
  )
5476
5498
  if with_format
5477
5499
  else file_lines(filename)
@@ -5484,6 +5506,7 @@ async def diff_files(
5484
5506
  unroll_includes=unroll_includes,
5485
5507
  replace_includes=True,
5486
5508
  for_deploy_diff=for_deploy,
5509
+ resource_source=resource_source,
5487
5510
  )
5488
5511
  if with_format
5489
5512
  else file_lines(filename)
@@ -5494,8 +5517,8 @@ async def diff_files(
5494
5517
  return [f"{l}\n" for l in lines.split("\n")] if with_format else lines # noqa: E741
5495
5518
 
5496
5519
  try:
5497
- lines1 = await parse(from_file, with_format)
5498
- lines2 = await parse(to_file, with_format, unroll_includes=True)
5520
+ lines1 = await parse(from_file, with_format, resource_source="remote")
5521
+ lines2 = await parse(to_file, with_format, unroll_includes=True, resource_source="local")
5499
5522
  except FileNotFoundError as e:
5500
5523
  filename = os.path.basename(str(e)).strip("'")
5501
5524
  raise click.ClickException(FeedbackManager.error_diff_file(filename=filename))
@@ -5616,7 +5639,7 @@ def is_file_a_datasource(filename: str) -> bool:
5616
5639
 
5617
5640
  for line in lines:
5618
5641
  trimmed_line = line.strip().lower()
5619
- if trimmed_line.startswith("schema") or trimmed_line.startswith("engine"):
5642
+ if trimmed_line.startswith(("schema", "engine")):
5620
5643
  return True
5621
5644
 
5622
5645
  return False
@@ -7,6 +7,9 @@ from typing import Any, Dict, Iterable, List, Optional
7
7
 
8
8
  valid_chars_name: str = string.ascii_letters + string.digits + "._`*<>+-'"
9
9
  valid_chars_fn: str = valid_chars_name + "[](),=!?:/ \n\t\r"
10
+ # Use sets for O(1) membership checks in hot loops
11
+ _VALID_CHARS_NAME_SET = set(valid_chars_name)
12
+ _VALID_CHARS_FN_SET = set(valid_chars_fn)
10
13
 
11
14
  INDEX_WHITELIST = ["minmax", "set", "bloom_filter", "ngrambf_v1", "tokenbf_v1"]
12
15
  INDEX_SUPPORTED_TYPES = {
@@ -28,6 +31,15 @@ INDEX_SUPPORTED_TYPES = {
28
31
  "ngrambf_v1": ["String", "FixedString", "Map"],
29
32
  }
30
33
 
34
+ # Precompiled regexes used across functions to avoid recompilation overhead
35
+ _RE_FORMAT = re.compile(r"\s+format\s+(\w+)\s*$", re.IGNORECASE)
36
+ _RE_REMOVE_FORMAT = re.compile(r"\s+(format)\s+(\w+)\s*$", re.IGNORECASE)
37
+ _RE_TRY_FIX_NULLABLE_SAF = re.compile(r"SimpleAggregateFunction\((\w+),\s*(?!(?:Nullable))([\w,.()]+)\)")
38
+ _RE_INDEX_ENTRY = re.compile(
39
+ r"(\w+)\s+([\w\s*\[\]\*\(\),\'\"-><.]+)\s+TYPE\s+(\w+)(?:\(([\w\s*.,]+)\))?(?:\s+GRANULARITY\s+(\d+))?"
40
+ )
41
+ _RE_REPLICATED_MT = re.compile(r"Replicated(.*)MergeTree\(([^\)]*)\)(.*)")
42
+
31
43
 
32
44
  @dataclass
33
45
  class TableIndex:
@@ -137,10 +149,9 @@ def get_format(sql: str) -> Optional[str]:
137
149
  >>> get_format('select * from test formAt JSON')
138
150
  'JSON'
139
151
  """
140
- FORMAT_RE = r"\s+format\s+(\w+)\s*$"
141
152
  sql = sql.strip()
142
- format = re.findall(FORMAT_RE, sql, re.IGNORECASE)
143
- return format[0] if format else None
153
+ match = _RE_FORMAT.findall(sql)
154
+ return match[0] if match else None
144
155
 
145
156
 
146
157
  def get_format_group(sql: str) -> str:
@@ -151,10 +162,9 @@ def get_format_group(sql: str) -> str:
151
162
  >>> get_format_group('select * from test formAt JSON')
152
163
  ' formAt JSON'
153
164
  """
154
- FORMAT_RE = r"\s+format\s+(\w+)\s*$"
155
165
  sql = sql.strip()
156
- format = re.search(FORMAT_RE, sql, re.IGNORECASE)
157
- return format.group() if format else ""
166
+ match = _RE_FORMAT.search(sql)
167
+ return match.group() if match else ""
158
168
 
159
169
 
160
170
  def wrap_finalize_aggregation(sql: str, describe_result: Dict[str, Any], fm_group: Optional[str] = None) -> str:
@@ -186,9 +196,8 @@ def remove_format(sql: str) -> str:
186
196
  >>> remove_format('select * from test formAt JSON')
187
197
  'select * from test'
188
198
  """
189
- FORMAT_RE = r"\s+(format)\s+(\w+)\s*$"
190
199
  sql = sql.strip()
191
- return re.sub(FORMAT_RE, "", sql, flags=re.IGNORECASE)
200
+ return _RE_REMOVE_FORMAT.sub("", sql)
192
201
 
193
202
 
194
203
  def col_name(name: str, backquotes: bool = True) -> str:
@@ -220,7 +229,7 @@ def try_to_fix_nullable_in_simple_aggregating_function(t: str) -> Optional[str]:
220
229
  # as it is done with other aggregate functions.
221
230
  # If not, the aggregation could return incorrect results.
222
231
  result = None
223
- if match := re.search(r"SimpleAggregateFunction\((\w+),\s*(?!(?:Nullable))([\w,.()]+)\)", t):
232
+ if match := _RE_TRY_FIX_NULLABLE_SAF.search(t):
224
233
  fn = match.group(1)
225
234
  inner_type = match.group(2)
226
235
  result = f"SimpleAggregateFunction({fn}, Nullable({inner_type}))"
@@ -342,10 +351,7 @@ def parse_indexes_structure(indexes: Optional[List[str]]) -> List[TableIndex]:
342
351
  if index.count("TYPE") != 1:
343
352
  raise ValueError("invalid INDEX format. Usage: `name expr TYPE type_full GRANULARITY granularity`")
344
353
 
345
- match = re.match(
346
- r"(\w+)\s+([\w\s*\[\]\*\(\),\'\"-><.]+)\s+TYPE\s+(\w+)(?:\(([\w\s*.,]+)\))?(?:\s+GRANULARITY\s+(\d+))?",
347
- index,
348
- )
354
+ match = _RE_INDEX_ENTRY.match(index)
349
355
  if match:
350
356
  index_name, a, index_type, value, granularity = match.groups()
351
357
  index_expr = f"{index_type}({value})" if value else index_type
@@ -552,7 +558,7 @@ def clean_comments(schema_to_clean: str) -> str:
552
558
  if i + 1 < len(line) and line[i] == "-" and line[i + 1] == "-" and not inside_json_path:
553
559
  return line[:i].strip()
554
560
 
555
- if not inside_json_path and line[i:].startswith("`json:"):
561
+ if not inside_json_path and line.startswith("`json:", i):
556
562
  inside_json_path = True
557
563
  elif inside_json_path and line[i] == "`":
558
564
  inside_json_path = False
@@ -562,12 +568,12 @@ def clean_comments(schema_to_clean: str) -> str:
562
568
  if schema_to_clean is None:
563
569
  return schema_to_clean
564
570
 
565
- cleaned_schema = ""
571
+ out_lines: List[str] = []
566
572
  for line in schema_to_clean.splitlines():
567
573
  cleaned_line = clean_line_comments(line)
568
574
  if cleaned_line:
569
- cleaned_schema += cleaned_line + "\n"
570
- return cleaned_schema.strip()
575
+ out_lines.append(cleaned_line)
576
+ return "\n".join(out_lines).strip()
571
577
 
572
578
 
573
579
  SyntaxExpr = namedtuple("SyntaxExpr", ["name", "regex"])
@@ -608,9 +614,9 @@ def _parse_table_structure(schema: str) -> List[Dict[str, Any]]:
608
614
 
609
615
  # Find the first SyntaxExpr in lookup that matches the schema at the current offset
610
616
  def lookahead_matches(lookup: Iterable) -> Optional[SyntaxExpr]:
617
+ # Use substring to preserve semantics of patterns anchored to end ($)
611
618
  s = schema[i:]
612
- match = next((x for x in lookup if x.regex.match(s)), None)
613
- return match
619
+ return next((x for x in lookup if x.regex.match(s)), None)
614
620
 
615
621
  def advance_single_char() -> None:
616
622
  nonlocal i, line, pos
@@ -647,7 +653,7 @@ def _parse_table_structure(schema: str) -> List[Dict[str, Any]]:
647
653
  c = schema[i]
648
654
  if c in " \t\r\n":
649
655
  return schema[begin:i]
650
- if c not in valid_chars_name:
656
+ if c not in _VALID_CHARS_NAME_SET:
651
657
  raise ValueError(
652
658
  format_parse_error(schema, i, pos, "wrong value, please check the schema syntax", line=line)
653
659
  )
@@ -677,7 +683,9 @@ def _parse_table_structure(schema: str) -> List[Dict[str, Any]]:
677
683
  context_stack.append("(")
678
684
  elif context is None and lookahead_matches(lookup):
679
685
  return schema[begin:i].strip(" \t\r\n")
680
- elif (context is None and c not in valid_chars_fn) or (context == "(" and c not in valid_chars_fn):
686
+ elif (context is None and c not in _VALID_CHARS_FN_SET) or (
687
+ context == "(" and c not in _VALID_CHARS_FN_SET
688
+ ):
681
689
  raise ValueError(
682
690
  format_parse_error(schema, i, pos, "wrong value, please check the schema syntax", line=line)
683
691
  )
@@ -884,7 +892,7 @@ def engine_replicated_to_local(engine: str) -> str:
884
892
  if "Replicated" not in engine:
885
893
  return engine
886
894
 
887
- return re.sub(r"Replicated(.*)MergeTree\(([^\)]*)\)(.*)", _replace, engine.strip())
895
+ return _RE_REPLICATED_MT.sub(_replace, engine.strip())
888
896
 
889
897
 
890
898
  def engine_patch_replicated_engine(engine: str, engine_full: Optional[str], new_table_name: str) -> Optional[str]:
@@ -907,7 +915,7 @@ def engine_patch_replicated_engine(engine: str, engine_full: Optional[str], new_
907
915
  paths = parts[2].split("/")
908
916
  paths[-1] = new_table_name
909
917
  zoo_path = "/".join(paths)
910
- return "".join(parts[:2] + [zoo_path] + parts[3:])
918
+ return "".join([*parts[:2], zoo_path, *parts[3:]])
911
919
  return engine_full
912
920
 
913
921
 
@@ -384,14 +384,13 @@ def array_type(types):
384
384
  if isinstance(x, Placeholder):
385
385
  if default:
386
386
  x = default
387
- else:
388
- if _type and _type in types:
389
- if _type == "String":
390
- x = ""
391
- else:
392
- x = ",".join(map(str, [types[_type](x) for _ in range(2)]))
393
- else:
387
+ elif _type and _type in types:
388
+ if _type == "String":
394
389
  x = ""
390
+ else:
391
+ x = ",".join(map(str, [types[_type](x) for _ in range(2)]))
392
+ else:
393
+ x = ""
395
394
  elif x is None:
396
395
  x = default
397
396
  if x is None:
@@ -1405,23 +1404,33 @@ def generate(self, **kwargs) -> Tuple[str, TemplateExecutionResults]:
1405
1404
  namespace = {}
1406
1405
  template_execution_results = TemplateExecutionResults()
1407
1406
  for key in kwargs.get("tb_secrets", []):
1407
+ # Avoid double-prefixing if the key already has the tb_secret_ prefix
1408
1408
  if is_secret_template_key(key):
1409
1409
  template_execution_results.add_template_param(key)
1410
+ else:
1411
+ template_execution_results.add_template_param(secret_template_key(key))
1410
1412
 
1411
1413
  if TB_SECRET_IN_TEST_MODE in kwargs:
1412
1414
  template_execution_results[TB_SECRET_IN_TEST_MODE] = None
1413
1415
 
1414
- def set_tb_secret(x):
1416
+ def set_tb_secret(x, default=None):
1415
1417
  try:
1416
1418
  key = secret_template_key(x)
1417
1419
  if key in template_execution_results.template_params:
1420
+ # secret available: Always use workspace secret regardless of test mode
1418
1421
  template_execution_results.add_ch_param(x)
1419
1422
  return Symbol("{" + sqlescape(x) + ": String}")
1420
1423
  else:
1424
+ # secret not available: Check test mode and defaults
1421
1425
  is_test_mode = TB_SECRET_IN_TEST_MODE in template_execution_results
1422
- if is_test_mode:
1426
+ if default is not None:
1427
+ # Use provided default value
1428
+ return default
1429
+ elif is_test_mode:
1430
+ # In test mode without default - return placeholder
1423
1431
  return Symbol("{" + sqlescape(x) + ": String}")
1424
1432
  else:
1433
+ # Not in test mode, no secret, no default - raise error
1425
1434
  raise SQLTemplateException(
1426
1435
  f"Cannot access secret '{x}'. Check the secret exists in the Workspace and the token has the required scope."
1427
1436
  )
@@ -1832,7 +1841,7 @@ def get_var_names_and_types(t, node_id=None):
1832
1841
  raise SQLTemplateException(e)
1833
1842
 
1834
1843
 
1835
- @lru_cache(maxsize=256)
1844
+ @lru_cache(maxsize=512)
1836
1845
  def get_var_names_and_types_cached(t: Template):
1837
1846
  return get_var_names_and_types(t)
1838
1847
 
@@ -2266,6 +2275,18 @@ def render_sql_template(
2266
2275
  Traceback (most recent call last):
2267
2276
  ...
2268
2277
  tinybird.sql_template.SQLTemplateException: Template Syntax Error: Cannot access secret 'test'. Check the secret exists in the Workspace and the token has the required scope.
2278
+ >>> render_sql_template("select * from table where str = {{tb_secret('test', 'default_value')}}", secrets = [])
2279
+ ("select * from table where str = 'default_value'", {}, [])
2280
+ >>> render_sql_template("select * from table where str = {{tb_secret('test', 'default_value')}}", secrets = [ 'tb_secret_test' ])
2281
+ ('select * from table where str = {test: String}', {}, [])
2282
+ >>> render_sql_template("select * from table where str = {{tb_secret('test', '')}}")
2283
+ ("select * from table where str = ''", {}, [])
2284
+ >>> render_sql_template("select * from table where str = {{tb_secret('test', 'default_value')}}", test_mode=True)
2285
+ ("select * from table where str = 'default_value'", {}, [])
2286
+ >>> render_sql_template("select * from table where str = {{tb_secret('test', '')}}", test_mode=True)
2287
+ ("select * from table where str = ''", {}, [])
2288
+ >>> render_sql_template("select * from table where str = {{tb_secret('test', 'default_value')}}", secrets = [ 'tb_secret_test' ], test_mode=True)
2289
+ ('select * from table where str = {test: String}', {}, [])
2269
2290
  >>> render_sql_template("select * from table where str = {{String(test)}} and category = {{String(category, 'shirts')}} and color = {{ Int32(color)}}", test_mode=False)
2270
2291
  Traceback (most recent call last):
2271
2292
  ...
@@ -2364,6 +2385,14 @@ def render_sql_template(
2364
2385
  documentation="/cli/advanced-templates.html",
2365
2386
  )
2366
2387
  raise SQLTemplateException(str(e), documentation="/cli/advanced-templates.html")
2388
+ except IndexError as e:
2389
+ # This happens when trying to access string indices on empty strings
2390
+ if "string index out of range" in str(e):
2391
+ raise SQLTemplateException(
2392
+ "String index out of range. Check that string parameters have values before accessing specific characters (e.g., param[0]). Provide default values or add length checks in your template.",
2393
+ documentation="/cli/advanced-templates.html",
2394
+ )
2395
+ raise SQLTemplateException(str(e), documentation="/cli/advanced-templates.html")
2367
2396
  except Exception as e:
2368
2397
  # errors might vary here, we need to support as much as possible
2369
2398
  # https://gitlab.com/tinybird/analytics/-/issues/943
@@ -261,7 +261,13 @@ def _calc_str(self) -> str:
261
261
  Comment._calc_str = property(_calc_str)
262
262
 
263
263
 
264
- def format_sql_template(sql: str, line_length: Optional[int] = None, lower_keywords: bool = False) -> str:
264
+ def format_sql_template(
265
+ sql: str,
266
+ line_length: Optional[int] = None,
267
+ lower_keywords: bool = False,
268
+ resource_name: Optional[str] = None,
269
+ resource_source: Optional[str] = None,
270
+ ) -> str:
265
271
  try:
266
272
  # https://github.com/tconbeer/sqlfmt/blob/c11775b92d8a45f0e91d871b81a88a894d620bec/src/sqlfmt/mode.py#L16-L29
267
273
  config: Dict[str, Any] = {
@@ -277,5 +283,7 @@ def format_sql_template(sql: str, line_length: Optional[int] = None, lower_keywo
277
283
  else api.format_string(sql, mode=mode).strip()
278
284
  )
279
285
  except Exception as e:
280
- logging.warning(f"sqlfmt error: {str(e)}")
286
+ resource_info = f" in '{resource_name}'" if resource_name else ""
287
+ source_info = f" ({resource_source})" if resource_source else ""
288
+ logging.warning(f"sqlfmt error{resource_info}{source_info}: {str(e)}")
281
289
  return sql
@@ -144,7 +144,7 @@ def format_where_for_mutation_command(where_clause: str) -> str:
144
144
  return f"DELETE WHERE {quoted_condition[1:-1]}"
145
145
 
146
146
 
147
- @lru_cache(maxsize=2**13)
147
+ @lru_cache(maxsize=2**15)
148
148
  def sql_get_used_tables_cached(
149
149
  sql: str,
150
150
  raising: bool = False,
@@ -301,7 +301,7 @@ def replacements_to_tuples(replacements: dict) -> dict:
301
301
  return parsed_replacements
302
302
 
303
303
 
304
- @lru_cache(maxsize=2**13)
304
+ @lru_cache(maxsize=2**15)
305
305
  def replace_tables_chquery_cached(
306
306
  sql: str,
307
307
  sorted_replacements: Optional[tuple] = None,
@@ -19,7 +19,7 @@ import humanfriendly
19
19
  from click import Context
20
20
  from packaging import version
21
21
 
22
- import tinybird.context as context
22
+ from tinybird import context
23
23
  from tinybird.client import (
24
24
  AuthException,
25
25
  AuthNoTokenException,
@@ -904,7 +904,7 @@ async def sql(
904
904
  verbose=False,
905
905
  )
906
906
  query = ""
907
- for _, elem in dependencies_graph.to_run.items():
907
+ for elem in dependencies_graph.to_run.values():
908
908
  for _node in elem["nodes"]:
909
909
  if _node["params"]["name"].lower() == node.lower():
910
910
  query = "".join(_node["sql"])
@@ -983,8 +983,8 @@ async def materialize(
983
983
  ctx: Context,
984
984
  filename: str,
985
985
  push_deps: bool,
986
- workspace_map: List[str],
987
- workspace: List[str],
986
+ workspace_map: Tuple[Tuple[str, str], ...],
987
+ workspace: Tuple[Tuple[str, str], ...],
988
988
  no_versions: bool,
989
989
  verbose: bool,
990
990
  force_populate: Optional[str],
@@ -999,7 +999,7 @@ async def materialize(
999
999
  click.echo(deprecation_notice)
1000
1000
  cl = create_tb_client(ctx)
1001
1001
 
1002
- async def _try_push_pipe_to_analyze(pipe_name):
1002
+ async def _try_push_pipe_to_analyze(pipe_name: str) -> Optional[Any]:
1003
1003
  try:
1004
1004
  to_run = await folder_push(
1005
1005
  cl,
@@ -1020,7 +1020,7 @@ async def materialize(
1020
1020
  except AlreadyExistsException as e:
1021
1021
  if "Datasource" in str(e):
1022
1022
  click.echo(str(e))
1023
- return
1023
+ return None
1024
1024
  if override_pipe or click.confirm(FeedbackManager.info_pipe_exists(name=pipe_name)):
1025
1025
  to_run = await folder_push(
1026
1026
  cl,
@@ -1038,7 +1038,7 @@ async def materialize(
1038
1038
  verbose=verbose,
1039
1039
  )
1040
1040
  else:
1041
- return
1041
+ return None
1042
1042
  except click.ClickException as ex:
1043
1043
  # HACK: By now, datafile raises click.ClickException instead of
1044
1044
  # CLIException to avoid circular imports. Thats we need to trace
@@ -1229,7 +1229,7 @@ async def materialize(
1229
1229
  raise CLIException(FeedbackManager.error_exception(error=str(e)))
1230
1230
 
1231
1231
 
1232
- def __patch_click_output():
1232
+ def __patch_click_output() -> None:
1233
1233
  import re
1234
1234
 
1235
1235
  CUSTOM_PATTERNS: List[str] = []
@@ -1042,7 +1042,7 @@ def get_format_from_filename_or_url(filename_or_url: str) -> str:
1042
1042
  'csv'
1043
1043
  """
1044
1044
  filename_or_url = filename_or_url.lower()
1045
- if filename_or_url.endswith("json") or filename_or_url.endswith("ndjson"):
1045
+ if filename_or_url.endswith(("json", "ndjson")):
1046
1046
  return "ndjson"
1047
1047
  if filename_or_url.endswith("parquet"):
1048
1048
  return "parquet"
@@ -1050,7 +1050,7 @@ def get_format_from_filename_or_url(filename_or_url: str) -> str:
1050
1050
  return "csv"
1051
1051
  try:
1052
1052
  parsed = urlparse(filename_or_url)
1053
- if parsed.path.endswith("json") or parsed.path.endswith("ndjson"):
1053
+ if parsed.path.endswith(("json", "ndjson")):
1054
1054
  return "ndjson"
1055
1055
  if parsed.path.endswith("parquet"):
1056
1056
  return "parquet"
@@ -13,7 +13,7 @@ import click
13
13
  import humanfriendly
14
14
  from click import Context
15
15
 
16
- import tinybird.context as context
16
+ from tinybird import context
17
17
  from tinybird.client import AuthNoTokenException, DoesNotExistException, TinyB
18
18
  from tinybird.config import DEFAULT_API_HOST, FeatureFlags
19
19
  from tinybird.datafile_common import PipeNodeTypes, PipeTypes, folder_push, get_name_version, process_file, wait_job
@@ -5,7 +5,6 @@ from typing import Any, Dict, Iterable, List, Optional
5
5
  import click
6
6
  import yaml
7
7
  from humanfriendly.tables import format_smart_table
8
- from typing_extensions import override
9
8
 
10
9
  from tinybird.client import TinyB
11
10
  from tinybird.feedback_manager import FeedbackManager
@@ -118,19 +117,6 @@ class TestResult:
118
117
  return PASS_OVER_TIME
119
118
  return PASS
120
119
 
121
- @override
122
- def __dict__(self):
123
- return {
124
- "name": self.name,
125
- "data": self.data,
126
- "elapsed_time": self.elapsed_time,
127
- "read_bytes": self.read_bytes,
128
- "max_elapsed_time": self.max_elapsed_time,
129
- "max_bytes_read": self.max_bytes_read,
130
- "error": self.error,
131
- "status": self.status.name,
132
- }
133
-
134
120
 
135
121
  @dataclass()
136
122
  class TestSummaryResults:
@@ -3,8 +3,6 @@ from collections import namedtuple
3
3
  from json import JSONEncoder
4
4
  from typing import Optional
5
5
 
6
- from typing_extensions import override
7
-
8
6
 
9
7
  class MyJSONEncoder(JSONEncoder):
10
8
  # def default(self, in_obj):
@@ -56,10 +54,6 @@ class DataUnitTest:
56
54
  def __str__(self):
57
55
  return json.dumps(dict(self), ensure_ascii=False)
58
56
 
59
- @override
60
- def __dict__(self):
61
- return dict(self)
62
-
63
57
  def __repr__(self):
64
58
  return self.__str__()
65
59
 
@@ -289,12 +289,11 @@ class Template:
289
289
  if whitespace is None:
290
290
  if loader and loader.whitespace:
291
291
  whitespace = loader.whitespace
292
- else:
292
+ elif name.endswith((".html", ".js")):
293
293
  # Whitespace defaults by filename.
294
- if name.endswith(".html") or name.endswith(".js"):
295
- whitespace = "single"
296
- else:
297
- whitespace = "all"
294
+ whitespace = "single"
295
+ else:
296
+ whitespace = "all"
298
297
  # Validate the whitespace setting.
299
298
  filter_whitespace(whitespace, "")
300
299
 
@@ -1169,7 +1168,7 @@ def check_valid_expr(expr):
1169
1168
  check_valid_expr(expr.slice.lower)
1170
1169
  if expr.slice.upper is not None:
1171
1170
  check_valid_expr(expr.slice.upper)
1172
- elif isinstance(expr.slice, ast.Constant) or isinstance(expr.slice, ast.Subscript):
1171
+ elif isinstance(expr.slice, (ast.Constant, ast.Subscript)):
1173
1172
  check_valid_expr(expr.slice)
1174
1173
  else:
1175
1174
  raise SecurityException(f"Invalid Slice expression: {ast.dump(expr.slice)}")
@@ -1178,7 +1177,7 @@ def check_valid_expr(expr):
1178
1177
  check_valid_expr(key)
1179
1178
  for value in expr.values:
1180
1179
  check_valid_expr(value)
1181
- elif isinstance(expr, ast.Tuple) or isinstance(expr, ast.List) or isinstance(expr, ast.Set):
1180
+ elif isinstance(expr, (ast.Tuple, ast.List, ast.Set)):
1182
1181
  for x in expr.elts:
1183
1182
  check_valid_expr(x)
1184
1183
  elif isinstance(expr, ast.JoinedStr):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: tinybird_cli
3
- Version: 5.21.2.dev0
3
+ Version: 5.22.1.dev0
4
4
  Summary: Tinybird Command Line Tool
5
5
  Home-page: https://www.tinybird.co/docs/cli
6
6
  Author: Tinybird
@@ -16,7 +16,7 @@ Requires-Dist: croniter==1.3.15
16
16
  Requires-Dist: GitPython~=3.1.32
17
17
  Requires-Dist: humanfriendly~=8.2
18
18
  Requires-Dist: pydantic~=2.8.0
19
- Requires-Dist: pyperclip==1.8.2
19
+ Requires-Dist: pyperclip==1.9.0
20
20
  Requires-Dist: pyyaml<6.1,>=6.0
21
21
  Requires-Dist: requests<3,>=2.28.1
22
22
  Requires-Dist: shandy-sqlfmt==0.11.1
@@ -61,6 +61,11 @@ The Tinybird command-line tool allows you to use all the Tinybird functionality
61
61
  Changelog
62
62
  ----------
63
63
 
64
+ 5.21.2
65
+ ***********
66
+
67
+ - `Improved` Error message when trying to format a file using `tb fmt` or when we are running `tb init --git` to include the resource name and source (local/remote)
68
+
64
69
  5.21.1
65
70
  ***********
66
71
 
@@ -7,7 +7,7 @@ croniter==1.3.15
7
7
  GitPython~=3.1.32
8
8
  humanfriendly~=8.2
9
9
  pydantic~=2.8.0
10
- pyperclip==1.8.2
10
+ pyperclip==1.9.0
11
11
  pyyaml<6.1,>=6.0
12
12
  requests<3,>=2.28.1
13
13
  shandy-sqlfmt==0.11.1