tinybird 0.0.1.dev33__tar.gz → 0.0.1.dev35__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

Files changed (102) hide show
  1. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/PKG-INFO +1 -1
  2. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/context.py +1 -1
  3. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/datafile.py +55 -1
  4. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/feedback_manager.py +6 -0
  5. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/prompts.py +4 -0
  6. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/sql_toolset.py +9 -2
  7. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/__cli__.py +2 -2
  8. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/cli.py +2 -2
  9. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/build.py +21 -1
  10. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/cli.py +1 -1
  11. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/common.py +17 -1
  12. tinybird-0.0.1.dev35/tinybird/tb/modules/datasource.py +360 -0
  13. tinybird-0.0.1.dev33/tinybird/tb/modules/deploy.py → tinybird-0.0.1.dev35/tinybird/tb/modules/deployment.py +22 -12
  14. tinybird-0.0.1.dev35/tinybird/tb/modules/endpoint.py +187 -0
  15. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/local.py +19 -4
  16. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/pipe.py +1 -254
  17. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/shell.py +8 -1
  18. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/watch.py +4 -4
  19. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/workspace.py +0 -96
  20. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/common.py +19 -17
  21. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird.egg-info/PKG-INFO +1 -1
  22. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird.egg-info/SOURCES.txt +2 -2
  23. tinybird-0.0.1.dev33/tinybird/tb/modules/connection.py +0 -803
  24. tinybird-0.0.1.dev33/tinybird/tb/modules/datasource.py +0 -828
  25. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/setup.cfg +0 -0
  26. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/__cli__.py +0 -0
  27. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/ch_utils/constants.py +0 -0
  28. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/ch_utils/engine.py +0 -0
  29. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/check_pypi.py +0 -0
  30. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/client.py +0 -0
  31. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/config.py +0 -0
  32. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/connectors.py +0 -0
  33. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/datatypes.py +0 -0
  34. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/git_settings.py +0 -0
  35. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/sql.py +0 -0
  36. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/sql_template.py +0 -0
  37. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/sql_template_fmt.py +0 -0
  38. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/syncasync.py +0 -0
  39. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/auth.py +0 -0
  40. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/build_client.py +0 -0
  41. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/cicd.py +0 -0
  42. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/common.py +0 -0
  43. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/config.py +0 -0
  44. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/create.py +0 -0
  45. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/build.py +0 -0
  46. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/build_common.py +0 -0
  47. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/build_datasource.py +0 -0
  48. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/build_pipe.py +0 -0
  49. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/diff.py +0 -0
  50. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/exceptions.py +0 -0
  51. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/fixture.py +0 -0
  52. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/format_common.py +0 -0
  53. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/format_datasource.py +0 -0
  54. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/format_pipe.py +0 -0
  55. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/parse_datasource.py +0 -0
  56. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/parse_pipe.py +0 -0
  57. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/pipe_checker.py +0 -0
  58. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/datafile/pull.py +0 -0
  59. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/exceptions.py +0 -0
  60. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/feedback_manager.py +0 -0
  61. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/fmt.py +0 -0
  62. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/job.py +0 -0
  63. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/llm.py +0 -0
  64. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/llm_utils.py +0 -0
  65. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/local_common.py +0 -0
  66. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/login.py +0 -0
  67. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/mock.py +0 -0
  68. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/regions.py +0 -0
  69. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/table.py +0 -0
  70. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/tag.py +0 -0
  71. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/telemetry.py +0 -0
  72. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/test.py +0 -0
  73. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/tinyunit/tinyunit.py +0 -0
  74. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/tinyunit/tinyunit_lib.py +0 -0
  75. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/token.py +0 -0
  76. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/update.py +0 -0
  77. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb/modules/workspace_members.py +0 -0
  78. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli.py +0 -0
  79. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/auth.py +0 -0
  80. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/branch.py +0 -0
  81. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/cicd.py +0 -0
  82. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/cli.py +0 -0
  83. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/config.py +0 -0
  84. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/connection.py +0 -0
  85. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/datasource.py +0 -0
  86. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/exceptions.py +0 -0
  87. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/fmt.py +0 -0
  88. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/job.py +0 -0
  89. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/pipe.py +0 -0
  90. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/regions.py +0 -0
  91. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/tag.py +0 -0
  92. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/telemetry.py +0 -0
  93. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/test.py +0 -0
  94. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/tinyunit/tinyunit.py +0 -0
  95. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +0 -0
  96. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/workspace.py +0 -0
  97. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tb_cli_modules/workspace_members.py +0 -0
  98. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird/tornado_template.py +0 -0
  99. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird.egg-info/dependency_links.txt +0 -0
  100. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird.egg-info/entry_points.txt +0 -0
  101. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird.egg-info/requires.txt +0 -0
  102. {tinybird-0.0.1.dev33 → tinybird-0.0.1.dev35}/tinybird.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tinybird
3
- Version: 0.0.1.dev33
3
+ Version: 0.0.1.dev35
4
4
  Summary: Tinybird Command Line Tool
5
5
  Home-page: https://www.tinybird.co/docs/cli/introduction.html
6
6
  Author: Tinybird
@@ -7,7 +7,7 @@ if TYPE_CHECKING:
7
7
 
8
8
  workspace_id: ContextVar[str] = ContextVar("workspace_id")
9
9
  workspace: ContextVar["User"] = ContextVar("workspace")
10
- table_id: ContextVar[str] = ContextVar("table_id")
10
+ datasource_id: ContextVar[str] = ContextVar("datasource_id")
11
11
  hfi_frequency: ContextVar[float] = ContextVar("hfi_frequency")
12
12
  hfi_frequency_gatherer: ContextVar[float] = ContextVar("hfi_frequency_gatherer")
13
13
  use_gatherer: ContextVar[bool] = ContextVar("use_gatherer")
@@ -3434,14 +3434,33 @@ async def new_ds(
3434
3434
  DATASOURCE_VALID_SERVICES_TO_UPDATE = ["bigquery", "snowflake"]
3435
3435
  if datasource_exists and service and service in [*DATASOURCE_VALID_SERVICES_TO_UPDATE, *PREVIEW_CONNECTOR_SERVICES]:
3436
3436
  connector_required_params = {
3437
- "bigquery": ["service", "cron", "external_data_source"],
3437
+ "bigquery": ["service", "cron"],
3438
3438
  "snowflake": ["connector", "service", "cron", "external_data_source"],
3439
3439
  "s3": ["connector", "service", "cron", "bucket_uri"],
3440
3440
  "s3_iamrole": ["connector", "service", "cron", "bucket_uri"],
3441
3441
  "gcs": ["connector", "service", "cron", "bucket_uri"],
3442
3442
  }.get(service, [])
3443
3443
 
3444
+ connector_at_least_one_required_param = {
3445
+ "bigquery": ["external_data_source", "query"],
3446
+ }.get(service, [])
3447
+
3448
+ if connector_at_least_one_required_param and not any(
3449
+ key in ds_params for key in connector_at_least_one_required_param
3450
+ ):
3451
+ params = [
3452
+ (ImportReplacements.get_datafile_param_for_linker_param(service, param) or param).upper()
3453
+ for param in connector_at_least_one_required_param
3454
+ ]
3455
+ click.echo(FeedbackManager.error_updating_connector_missing_at_least_one_param(param=" or ".join(params)))
3456
+ return
3457
+
3444
3458
  if not all(key in ds_params for key in connector_required_params):
3459
+ params = [
3460
+ (ImportReplacements.get_datafile_param_for_linker_param(service, param) or param).upper()
3461
+ for param in connector_required_params
3462
+ ]
3463
+ click.echo(FeedbackManager.error_updating_connector_missing_params(param=", ".join(params)))
3445
3464
  return
3446
3465
 
3447
3466
  connector = ds_params.get("connector", None)
@@ -5589,3 +5608,38 @@ def is_file_a_datasource(filename: str) -> bool:
5589
5608
  return True
5590
5609
 
5591
5610
  return False
5611
+
5612
+
5613
+ def update_connector_params(service: str, ds_params: Dict[str, Any], connector_required_params: List[str]) -> None:
5614
+ """
5615
+ Update connector parameters for a given service, ensuring required parameters exist.
5616
+
5617
+ :param service: The name of the service (e.g., 'bigquery').
5618
+ :param ds_params: The data source parameters to be checked.
5619
+ :param connector_required_params: The list of required parameters for the connector.
5620
+ :return: None
5621
+ """
5622
+
5623
+ connector_at_least_one_required_param: List[str] = {
5624
+ "bigquery": ["external_data_source", "query"],
5625
+ }.get(service, [])
5626
+
5627
+ # Handle the "at least one param" requirement
5628
+ if connector_at_least_one_required_param and not any(
5629
+ key in ds_params for key in connector_at_least_one_required_param
5630
+ ):
5631
+ params = [
5632
+ (ImportReplacements.get_datafile_param_for_linker_param(service, param) or param).upper()
5633
+ for param in connector_at_least_one_required_param
5634
+ ]
5635
+ click.echo(FeedbackManager.error_updating_connector_missing_at_least_one_param(param=" or ".join(params)))
5636
+ return
5637
+
5638
+ # Handle the mandatory params requirement
5639
+ if not all(key in ds_params for key in connector_required_params):
5640
+ params = [
5641
+ (ImportReplacements.get_datafile_param_for_linker_param(service, param) or param).upper()
5642
+ for param in connector_required_params
5643
+ ]
5644
+ click.echo(FeedbackManager.error_updating_connector_missing_params(param=", ".join(params)))
5645
+ return
@@ -130,6 +130,12 @@ class FeedbackManager:
130
130
  error_remove_no_endpoint = error_message("Pipe does not have any endpoint")
131
131
  error_updating_pipe = error_message("Failed updating pipe {error}")
132
132
  error_updating_connector_not_supported = error_message("Changing {param} is not currently supported")
133
+ error_updating_connector_missing_at_least_one_param = error_message(
134
+ "Connection settings not updated. Connection info should have at least one of {param} settings"
135
+ )
136
+ error_updating_connector_missing_params = error_message(
137
+ "Connection settings not updated. Connection info should have {param} settings"
138
+ )
133
139
  error_removing_node = error_message("Failed removing node from pipe {pipe}: {error}")
134
140
  error_pushing_pipe = error_message("Failed pushing pipe {pipe}: {error}")
135
141
  error_creating_endpoint = error_message("Failed creating endpoint in node {node} on pipe {pipe}: {error}")
@@ -643,6 +643,8 @@ datasource_instructions = """
643
643
  <datasource_file_instructions>
644
644
  - The datasource names must be unique.
645
645
  - No indentation is allowed for property names: DESCRIPTION, SCHEMA, ENGINE, ENGINE_PARTITION_KEY, ENGINE_SORTING_KEY, etc.
646
+ - Use MergeTree engine by default.
647
+ - Use AggregatingMergeTree engine when the datasource is the target of a materialized pipe.
646
648
  </datasource_file_instructions>
647
649
  """
648
650
 
@@ -732,6 +734,8 @@ sql_instructions = """
732
734
  - When aliasing a column, use first the column name and then the alias.
733
735
  - General functions and aggregate functions are case sensitive.
734
736
  - Character insensitive functions are case insensitive.
737
+ - When you use defined function with a paremeter inside, do NOT add quotes around the parameter.
738
+ - Parameters are never quoted in any case.
735
739
  </sql_instructions>
736
740
  """.format(
737
741
  general_functions=general_functions,
@@ -3,7 +3,7 @@ import logging
3
3
  from collections import defaultdict
4
4
  from datetime import datetime
5
5
  from functools import lru_cache
6
- from typing import Any, FrozenSet, List, Optional, Set, Tuple
6
+ from typing import FrozenSet, List, Optional, Set, Tuple
7
7
 
8
8
  from chtoolset import query as chquery
9
9
  from toposort import toposort
@@ -172,7 +172,7 @@ def tables_or_sql(replacement: dict, table_functions=False) -> set:
172
172
  return {replacement}
173
173
 
174
174
 
175
- def _separate_as_tuple_if_contains_database_and_table(definition: str) -> Any:
175
+ def _separate_as_tuple_if_contains_database_and_table(definition: str) -> str | Tuple[str, str]:
176
176
  if "." in definition:
177
177
  database_and_table_separated = definition.split(".")
178
178
  return database_and_table_separated[0], database_and_table_separated[1]
@@ -219,6 +219,7 @@ def replace_tables(
219
219
  output_one_line: bool = False,
220
220
  timestamp: Optional[datetime] = None,
221
221
  function_allow_list: Optional[FrozenSet[str]] = None,
222
+ original_replacements: Optional[dict] = None,
222
223
  ) -> str:
223
224
  """
224
225
  Given a query and a list of table replacements, returns the query after applying the table replacements.
@@ -239,6 +240,12 @@ def replace_tables(
239
240
  _replacements[rk] = r if isinstance(r, tuple) else (default_database, r)
240
241
  _replaced_with.add(r)
241
242
 
243
+ if original_replacements:
244
+ # Some replacements have been expanded by filters and turned to a query str, but we need to send the original
245
+ # ones to is_invalid_resource()
246
+ for r in original_replacements.values():
247
+ _replaced_with.add(r)
248
+
242
249
  deps: defaultdict = defaultdict(set)
243
250
  _tables = sql_get_used_tables(
244
251
  sql,
@@ -4,5 +4,5 @@ __description__ = 'Tinybird Command Line Tool'
4
4
  __url__ = 'https://www.tinybird.co/docs/cli/introduction.html'
5
5
  __author__ = 'Tinybird'
6
6
  __author_email__ = 'support@tinybird.co'
7
- __version__ = '0.0.1.dev33'
8
- __revision__ = '0cbe672'
7
+ __version__ = '0.0.1.dev35'
8
+ __revision__ = '5046116'
@@ -9,10 +9,10 @@ import tinybird.tb.modules.build
9
9
  import tinybird.tb.modules.build_client
10
10
  import tinybird.tb.modules.cli
11
11
  import tinybird.tb.modules.common
12
- import tinybird.tb.modules.connection
13
12
  import tinybird.tb.modules.create
14
13
  import tinybird.tb.modules.datasource
15
- import tinybird.tb.modules.deploy
14
+ import tinybird.tb.modules.deployment
15
+ import tinybird.tb.modules.endpoint
16
16
  import tinybird.tb.modules.fmt
17
17
  import tinybird.tb.modules.job
18
18
  import tinybird.tb.modules.local
@@ -3,6 +3,7 @@ import glob
3
3
  import json
4
4
  import logging
5
5
  import threading
6
+ import time
6
7
  from pathlib import Path
7
8
  from typing import List
8
9
 
@@ -27,11 +28,16 @@ def build(folder: str, watch: bool) -> None:
27
28
  """
28
29
 
29
30
  tb_client = asyncio.run(get_tinybird_local_client(folder))
31
+ click.echo(FeedbackManager.highlight(message="\n» Building project..."))
30
32
 
31
33
  def process() -> None:
32
34
  build_project(folder, tb_client)
33
35
 
36
+ time_start = time.time()
34
37
  process()
38
+ time_end = time.time()
39
+ elapsed_time = time_end - time_start
40
+ click.echo(FeedbackManager.success(message=f"\n✓ Build completed in {elapsed_time:.1f}s"))
35
41
 
36
42
  if watch:
37
43
  shell = Shell(folder=folder, client=tb_client)
@@ -71,6 +77,7 @@ def build_project(folder: str, tb_client: TinyB) -> None:
71
77
  fds = []
72
78
  project_path = Path(folder)
73
79
  project_files = get_project_files(project_path)
80
+
74
81
  for file_path in project_files:
75
82
  relative_path = str(Path(file_path).relative_to(project_path))
76
83
  fd = open(file_path, "rb")
@@ -91,7 +98,20 @@ def build_project(folder: str, tb_client: TinyB) -> None:
91
98
 
92
99
  build_result = result.get("result")
93
100
  if build_result == "success":
94
- click.echo(FeedbackManager.success(message="Build completed successfully"))
101
+ datasources = result.get("datasources", [])
102
+ pipes = result.get("pipes", [])
103
+ for ds in datasources:
104
+ ds_path = next((p for p in project_files if p.endswith(ds.get("name") + ".datasource")), None)
105
+ if ds_path:
106
+ ds_path = ds_path.replace(f"{folder}/", "")
107
+ click.echo(FeedbackManager.info(message=f"✓ {ds_path} created"))
108
+ for pipe in pipes:
109
+ pipe_name = pipe.get("name")
110
+ pipe_path = next((p for p in project_files if p.endswith(pipe_name + ".pipe")), None)
111
+ if pipe_path:
112
+ pipe_path = pipe_path.replace(f"{folder}/", "")
113
+ click.echo(FeedbackManager.info(message=f"✓ {pipe_path} created"))
114
+
95
115
  elif build_result == "failed":
96
116
  click.echo(FeedbackManager.error(message="Build failed"))
97
117
  build_errors = result.get("errors")
@@ -210,7 +210,7 @@ async def cli(
210
210
 
211
211
  logging.debug("debug enabled")
212
212
 
213
- skip_client = ctx.invoked_subcommand in ["login", "workspace", "local"]
213
+ skip_client = ctx.invoked_subcommand in ["login", "workspace", "local", "build"]
214
214
  client = await create_ctx_client(config, prod, skip_client)
215
215
 
216
216
  if client:
@@ -204,6 +204,10 @@ class Datafile:
204
204
  if self.kind == DatafileKind.pipe:
205
205
  # TODO(eclbg):
206
206
  # [x] node names are unique
207
+ # [x] SQL in all nodes
208
+ # [x] Materialized nodes have target datasource
209
+ # [x] Only one materialized node
210
+ # [ ] Only one node of any specific type
207
211
  # [ ] ...
208
212
  repeated_node_names = [
209
213
  name for name, count in filter(lambda x: x[1] > 1, Counter(n["name"] for n in self.nodes).items())
@@ -212,7 +216,19 @@ class Datafile:
212
216
  raise DatafileValidationError(
213
217
  f"Pipe node names must be unique. These names are repeated: {repeated_node_names}"
214
218
  )
215
- pass
219
+ for node in self.nodes:
220
+ if "sql" not in node:
221
+ raise DatafileValidationError(f"SQL missing for node {repr(node['name'])}")
222
+ materialized_nodes_count = 0
223
+ for node in self.nodes:
224
+ if node.get("type", "").lower() == "materialized":
225
+ materialized_nodes_count += 1
226
+ if materialized_nodes_count > 1:
227
+ raise DatafileValidationError("Multiple materialized nodes in pipe. There can only be one")
228
+ if "datasource" not in node:
229
+ raise DatafileValidationError(
230
+ f"Materialized node {repr(node['name'])} missing target datasource"
231
+ )
216
232
  elif self.kind == DatafileKind.datasource:
217
233
  # TODO(eclbg):
218
234
  # [x] Just one node
@@ -0,0 +1,360 @@
1
+ # This is a command file for our CLI. Please keep it clean.
2
+ #
3
+ # - If it makes sense and only when strictly necessary, you can create utility functions in this file.
4
+ # - But please, **do not** interleave utility functions and command definitions.
5
+
6
+ import asyncio
7
+ import json
8
+ import re
9
+ from typing import Optional
10
+
11
+ import click
12
+ import humanfriendly
13
+ from click import Context
14
+
15
+ from tinybird.client import AuthNoTokenException, DoesNotExistException, TinyB
16
+ from tinybird.tb.modules.cli import cli
17
+ from tinybird.tb.modules.common import (
18
+ _analyze,
19
+ coro,
20
+ echo_safe_humanfriendly_tables_format_smart_table,
21
+ get_format_from_filename_or_url,
22
+ load_connector_config,
23
+ push_data,
24
+ )
25
+ from tinybird.tb.modules.datafile.common import get_name_version
26
+ from tinybird.tb.modules.exceptions import CLIDatasourceException
27
+ from tinybird.tb.modules.feedback_manager import FeedbackManager
28
+
29
+
30
+ @cli.group()
31
+ @click.pass_context
32
+ def datasource(ctx):
33
+ """Data Sources commands"""
34
+
35
+
36
+ @datasource.command(name="ls")
37
+ @click.option("--match", default=None, help="Retrieve any resources matching the pattern. eg --match _test")
38
+ @click.option(
39
+ "--format",
40
+ "format_",
41
+ type=click.Choice(["json"], case_sensitive=False),
42
+ default=None,
43
+ help="Force a type of the output",
44
+ )
45
+ @click.pass_context
46
+ @coro
47
+ async def datasource_ls(ctx: Context, match: Optional[str], format_: str):
48
+ """List data sources"""
49
+
50
+ client: TinyB = ctx.ensure_object(dict)["client"]
51
+ ds = await client.datasources()
52
+ columns = ["shared from", "name", "row_count", "size", "created at", "updated at", "connection"]
53
+ table_human_readable = []
54
+ table_machine_readable = []
55
+ pattern = re.compile(match) if match else None
56
+
57
+ for t in ds:
58
+ stats = t.get("stats", None)
59
+ if not stats:
60
+ stats = t.get("statistics", {"bytes": ""})
61
+ if not stats:
62
+ stats = {"bytes": ""}
63
+
64
+ tk = get_name_version(t["name"])
65
+ if pattern and not pattern.search(tk["name"]):
66
+ continue
67
+
68
+ if "." in tk["name"]:
69
+ shared_from, name = tk["name"].split(".")
70
+ else:
71
+ shared_from, name = "", tk["name"]
72
+
73
+ table_human_readable.append(
74
+ (
75
+ shared_from,
76
+ name,
77
+ humanfriendly.format_number(stats.get("row_count")) if stats.get("row_count", None) else "-",
78
+ humanfriendly.format_size(int(stats.get("bytes"))) if stats.get("bytes", None) else "-",
79
+ t["created_at"][:-7],
80
+ t["updated_at"][:-7],
81
+ t.get("service", ""),
82
+ )
83
+ )
84
+ table_machine_readable.append(
85
+ {
86
+ "shared from": shared_from,
87
+ "name": name,
88
+ "row_count": stats.get("row_count", None) or "-",
89
+ "size": stats.get("bytes", None) or "-",
90
+ "created at": t["created_at"][:-7],
91
+ "updated at": t["updated_at"][:-7],
92
+ "connection": t.get("service", ""),
93
+ }
94
+ )
95
+
96
+ if not format_:
97
+ click.echo(FeedbackManager.info_datasources())
98
+ echo_safe_humanfriendly_tables_format_smart_table(table_human_readable, column_names=columns)
99
+ click.echo("\n")
100
+ elif format_ == "json":
101
+ click.echo(json.dumps({"datasources": table_machine_readable}, indent=2))
102
+ else:
103
+ raise CLIDatasourceException(FeedbackManager.error_datasource_ls_type())
104
+
105
+
106
+ @datasource.command(name="append")
107
+ @click.argument("datasource_name")
108
+ @click.argument("url", nargs=-1)
109
+ @click.option("--concurrency", help="How many files to submit concurrently", default=1, hidden=True)
110
+ @click.pass_context
111
+ @coro
112
+ async def datasource_append(
113
+ ctx: Context,
114
+ datasource_name: str,
115
+ url,
116
+ concurrency: int,
117
+ ):
118
+ """
119
+ Appends data to an existing Data Source from URL, local file or a connector
120
+
121
+ - Load from URL `tb datasource append [datasource_name] https://url_to_csv`
122
+
123
+ - Load from local file `tb datasource append [datasource_name] /path/to/local/file`
124
+ """
125
+
126
+ client: TinyB = ctx.obj["client"]
127
+ await push_data(
128
+ client,
129
+ datasource_name,
130
+ url,
131
+ mode="append",
132
+ concurrency=concurrency,
133
+ )
134
+
135
+
136
+ @datasource.command(name="replace")
137
+ @click.argument("datasource_name")
138
+ @click.argument("url", nargs=-1)
139
+ @click.option("--sql-condition", default=None, help="SQL WHERE condition to replace data", hidden=True)
140
+ @click.option("--skip-incompatible-partition-key", is_flag=True, default=False, hidden=True)
141
+ @click.pass_context
142
+ @coro
143
+ async def datasource_replace(
144
+ ctx: Context,
145
+ datasource_name,
146
+ url,
147
+ sql_condition,
148
+ skip_incompatible_partition_key,
149
+ ):
150
+ """
151
+ Replaces the data in a data source from a URL, local file or a connector
152
+
153
+ - Replace from URL `tb datasource replace [datasource_name] https://url_to_csv --sql-condition "country='ES'"`
154
+
155
+ - Replace from local file `tb datasource replace [datasource_name] /path/to/local/file --sql-condition "country='ES'"`
156
+ """
157
+
158
+ replace_options = set()
159
+ if skip_incompatible_partition_key:
160
+ replace_options.add("skip_incompatible_partition_key")
161
+ client: TinyB = ctx.obj["client"]
162
+ await push_data(
163
+ client,
164
+ datasource_name,
165
+ url,
166
+ mode="replace",
167
+ sql_condition=sql_condition,
168
+ replace_options=replace_options,
169
+ )
170
+
171
+
172
+ @datasource.command(name="analyze")
173
+ @click.argument("url_or_file")
174
+ @click.option(
175
+ "--connector",
176
+ type=click.Choice(["bigquery", "snowflake"], case_sensitive=True),
177
+ help="Use from one of the selected connectors. In this case pass a table name as a parameter instead of a file name or an URL",
178
+ hidden=True,
179
+ )
180
+ @click.pass_context
181
+ @coro
182
+ async def datasource_analyze(ctx, url_or_file, connector):
183
+ """Analyze a URL or a file before creating a new data source"""
184
+ client = ctx.obj["client"]
185
+
186
+ _connector = None
187
+ if connector:
188
+ load_connector_config(ctx, connector, False, check_uninstalled=False)
189
+ if connector not in ctx.obj:
190
+ raise CLIDatasourceException(FeedbackManager.error_connector_not_configured(connector=connector))
191
+ else:
192
+ _connector = ctx.obj[connector]
193
+
194
+ def _table(title, columns, data):
195
+ row_format = "{:<25}" * len(columns)
196
+ click.echo(FeedbackManager.info_datasource_title(title=title))
197
+ click.echo(FeedbackManager.info_datasource_row(row=row_format.format(*columns)))
198
+ for t in data:
199
+ click.echo(FeedbackManager.info_datasource_row(row=row_format.format(*[str(element) for element in t])))
200
+
201
+ analysis, _ = await _analyze(
202
+ url_or_file, client, format=get_format_from_filename_or_url(url_or_file), connector=_connector
203
+ )
204
+
205
+ columns = ("name", "type", "nullable")
206
+ if "columns" in analysis["analysis"]:
207
+ _table(
208
+ "columns",
209
+ columns,
210
+ [
211
+ (t["name"], t["recommended_type"], "false" if t["present_pct"] == 1 else "true")
212
+ for t in analysis["analysis"]["columns"]
213
+ ],
214
+ )
215
+
216
+ click.echo(FeedbackManager.info_datasource_title(title="SQL Schema"))
217
+ click.echo(analysis["analysis"]["schema"])
218
+
219
+ values = []
220
+
221
+ if "dialect" in analysis:
222
+ for x in analysis["dialect"].items():
223
+ if x[1] == " ":
224
+ values.append((x[0], '" "'))
225
+ elif type(x[1]) == str and ("\n" in x[1] or "\r" in x[1]): # noqa: E721
226
+ values.append((x[0], x[1].replace("\n", "\\n").replace("\r", "\\r")))
227
+ else:
228
+ values.append(x)
229
+
230
+ _table("dialect", ("name", "value"), values)
231
+
232
+
233
+ @datasource.command(name="truncate")
234
+ @click.argument("datasource_name", required=True)
235
+ @click.option("--yes", is_flag=True, default=False, help="Do not ask for confirmation")
236
+ @click.option(
237
+ "--cascade", is_flag=True, default=False, help="Truncate dependent DS attached in cascade to the given DS"
238
+ )
239
+ @click.pass_context
240
+ @coro
241
+ async def datasource_truncate(ctx, datasource_name, yes, cascade):
242
+ """Truncate a data source"""
243
+
244
+ client = ctx.obj["client"]
245
+ if yes or click.confirm(FeedbackManager.warning_confirm_truncate_datasource(datasource=datasource_name)):
246
+ try:
247
+ await client.datasource_truncate(datasource_name)
248
+ except AuthNoTokenException:
249
+ raise
250
+ except DoesNotExistException:
251
+ raise CLIDatasourceException(FeedbackManager.error_datasource_does_not_exist(datasource=datasource_name))
252
+ except Exception as e:
253
+ raise CLIDatasourceException(FeedbackManager.error_exception(error=e))
254
+
255
+ click.echo(FeedbackManager.success_truncate_datasource(datasource=datasource_name))
256
+
257
+ if cascade:
258
+ try:
259
+ ds_cascade_dependencies = await client.datasource_dependencies(
260
+ no_deps=False,
261
+ match=None,
262
+ pipe=None,
263
+ datasource=datasource_name,
264
+ check_for_partial_replace=True,
265
+ recursive=False,
266
+ )
267
+ except Exception as e:
268
+ raise CLIDatasourceException(FeedbackManager.error_exception(error=e))
269
+
270
+ cascade_dependent_ds = list(ds_cascade_dependencies.get("dependencies", {}).keys()) + list(
271
+ ds_cascade_dependencies.get("incompatible_datasources", {}).keys()
272
+ )
273
+ for cascade_ds in cascade_dependent_ds:
274
+ if yes or click.confirm(FeedbackManager.warning_confirm_truncate_datasource(datasource=cascade_ds)):
275
+ try:
276
+ await client.datasource_truncate(cascade_ds)
277
+ except DoesNotExistException:
278
+ raise CLIDatasourceException(
279
+ FeedbackManager.error_datasource_does_not_exist(datasource=datasource_name)
280
+ )
281
+ except Exception as e:
282
+ raise CLIDatasourceException(FeedbackManager.error_exception(error=e))
283
+ click.echo(FeedbackManager.success_truncate_datasource(datasource=cascade_ds))
284
+
285
+
286
+ @datasource.command(name="delete")
287
+ @click.argument("datasource_name")
288
+ @click.option("--sql-condition", default=None, help="SQL WHERE condition to remove rows", hidden=True, required=True)
289
+ @click.option("--yes", is_flag=True, default=False, help="Do not ask for confirmation")
290
+ @click.option("--wait", is_flag=True, default=False, help="Wait for delete job to finish, disabled by default")
291
+ @click.option("--dry-run", is_flag=True, default=False, help="Run the command without deleting anything")
292
+ @click.pass_context
293
+ @coro
294
+ async def datasource_delete_rows(ctx, datasource_name, sql_condition, yes, wait, dry_run):
295
+ """
296
+ Delete rows from a datasource
297
+
298
+ - Delete rows with SQL condition: `tb datasource delete [datasource_name] --sql-condition "country='ES'"`
299
+
300
+ - Delete rows with SQL condition and wait for the job to finish: `tb datasource delete [datasource_name] --sql-condition "country='ES'" --wait`
301
+ """
302
+
303
+ client: TinyB = ctx.ensure_object(dict)["client"]
304
+ if (
305
+ dry_run
306
+ or yes
307
+ or click.confirm(
308
+ FeedbackManager.warning_confirm_delete_rows_datasource(
309
+ datasource=datasource_name, delete_condition=sql_condition
310
+ )
311
+ )
312
+ ):
313
+ try:
314
+ res = await client.datasource_delete_rows(datasource_name, sql_condition, dry_run)
315
+ if dry_run:
316
+ click.echo(
317
+ FeedbackManager.success_dry_run_delete_rows_datasource(
318
+ rows=res["rows_to_be_deleted"], datasource=datasource_name, delete_condition=sql_condition
319
+ )
320
+ )
321
+ return
322
+ job_id = res["job_id"]
323
+ job_url = res["job_url"]
324
+ click.echo(FeedbackManager.info_datasource_delete_rows_job_url(url=job_url))
325
+ if wait:
326
+ progress_symbols = ["-", "\\", "|", "/"]
327
+ progress_str = "Waiting for the job to finish"
328
+ # TODO: Use click.echo instead of print and see if the behavior is the same
329
+ print(f"\n{progress_str}", end="") # noqa: T201
330
+
331
+ def progress_line(n):
332
+ print(f"\r{progress_str} {progress_symbols[n % len(progress_symbols)]}", end="") # noqa: T201
333
+
334
+ i = 0
335
+ while True:
336
+ try:
337
+ res = await client._req(f"v0/jobs/{job_id}")
338
+ except Exception:
339
+ raise CLIDatasourceException(FeedbackManager.error_job_status(url=job_url))
340
+ if res["status"] == "done":
341
+ print("\n") # noqa: T201
342
+ click.echo(
343
+ FeedbackManager.success_delete_rows_datasource(
344
+ datasource=datasource_name, delete_condition=sql_condition
345
+ )
346
+ )
347
+ break
348
+ elif res["status"] == "error":
349
+ print("\n") # noqa: T201
350
+ raise CLIDatasourceException(FeedbackManager.error_exception(error=res["error"]))
351
+ await asyncio.sleep(1)
352
+ i += 1
353
+ progress_line(i)
354
+
355
+ except AuthNoTokenException:
356
+ raise
357
+ except DoesNotExistException:
358
+ raise CLIDatasourceException(FeedbackManager.error_datasource_does_not_exist(datasource=datasource_name))
359
+ except Exception as e:
360
+ raise CLIDatasourceException(FeedbackManager.error_exception(error=e))