tinybird 0.0.1.dev234__py3-none-any.whl → 0.0.1.dev236__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

Files changed (49) hide show
  1. tinybird/tb/__cli__.py +2 -2
  2. tinybird/tb/check_pypi.py +3 -8
  3. tinybird/tb/cli.py +0 -6
  4. tinybird/tb/client.py +314 -340
  5. tinybird/tb/config.py +4 -5
  6. tinybird/tb/modules/build.py +21 -24
  7. tinybird/tb/modules/cicd.py +2 -2
  8. tinybird/tb/modules/cli.py +18 -28
  9. tinybird/tb/modules/common.py +123 -138
  10. tinybird/tb/modules/config.py +2 -4
  11. tinybird/tb/modules/connection.py +21 -26
  12. tinybird/tb/modules/copy.py +7 -9
  13. tinybird/tb/modules/create.py +18 -21
  14. tinybird/tb/modules/datafile/build.py +39 -39
  15. tinybird/tb/modules/datafile/build_common.py +9 -9
  16. tinybird/tb/modules/datafile/build_datasource.py +24 -24
  17. tinybird/tb/modules/datafile/build_pipe.py +11 -13
  18. tinybird/tb/modules/datafile/diff.py +12 -12
  19. tinybird/tb/modules/datafile/format_datasource.py +5 -5
  20. tinybird/tb/modules/datafile/format_pipe.py +6 -6
  21. tinybird/tb/modules/datafile/playground.py +42 -42
  22. tinybird/tb/modules/datafile/pull.py +24 -26
  23. tinybird/tb/modules/datasource.py +42 -56
  24. tinybird/tb/modules/endpoint.py +14 -19
  25. tinybird/tb/modules/info.py +14 -15
  26. tinybird/tb/modules/infra.py +43 -48
  27. tinybird/tb/modules/job.py +7 -10
  28. tinybird/tb/modules/local.py +22 -18
  29. tinybird/tb/modules/local_common.py +13 -4
  30. tinybird/tb/modules/login.py +9 -10
  31. tinybird/tb/modules/materialization.py +7 -10
  32. tinybird/tb/modules/mock.py +8 -9
  33. tinybird/tb/modules/open.py +1 -3
  34. tinybird/tb/modules/pipe.py +2 -4
  35. tinybird/tb/modules/secret.py +12 -16
  36. tinybird/tb/modules/shell.py +7 -20
  37. tinybird/tb/modules/sink.py +6 -8
  38. tinybird/tb/modules/test.py +9 -14
  39. tinybird/tb/modules/tinyunit/tinyunit.py +3 -3
  40. tinybird/tb/modules/token.py +16 -24
  41. tinybird/tb/modules/watch.py +3 -7
  42. tinybird/tb/modules/workspace.py +26 -37
  43. tinybird/tb/modules/workspace_members.py +16 -23
  44. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev236.dist-info}/METADATA +1 -1
  45. tinybird-0.0.1.dev236.dist-info/RECORD +89 -0
  46. tinybird-0.0.1.dev234.dist-info/RECORD +0 -89
  47. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev236.dist-info}/WHEEL +0 -0
  48. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev236.dist-info}/entry_points.txt +0 -0
  49. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev236.dist-info}/top_level.txt +0 -0
@@ -45,7 +45,7 @@ from tinybird.tb.modules.feedback_manager import FeedbackManager
45
45
  from tinybird.tb.modules.project import Project
46
46
 
47
47
 
48
- async def folder_build(
48
+ def folder_build(
49
49
  project: Project,
50
50
  tb_client: TinyB,
51
51
  filenames: Optional[List[str]] = None,
@@ -67,8 +67,8 @@ async def folder_build(
67
67
  fork = False
68
68
  release_created = False
69
69
  folder = str(project.path)
70
- datasources: List[Dict[str, Any]] = await tb_client.datasources()
71
- pipes: List[Dict[str, Any]] = await tb_client.pipes(dependencies=True)
70
+ datasources: List[Dict[str, Any]] = tb_client.datasources()
71
+ pipes: List[Dict[str, Any]] = tb_client.pipes(dependencies=True)
72
72
 
73
73
  existing_resources: List[str] = [x["name"] for x in datasources] + [x["name"] for x in pipes]
74
74
  remote_resource_names = [get_remote_resource_name_without_version(x) for x in existing_resources]
@@ -78,7 +78,7 @@ async def folder_build(
78
78
 
79
79
  # build graph to get new versions for all the files involved in the query
80
80
  # dependencies need to be processed always to get the versions
81
- dependencies_graph = await build_graph(
81
+ dependencies_graph = build_graph(
82
82
  filenames,
83
83
  tb_client,
84
84
  dir_path=folder,
@@ -114,7 +114,7 @@ async def folder_build(
114
114
  return True
115
115
  return False
116
116
 
117
- async def push(
117
+ def push(
118
118
  name: str,
119
119
  to_run: Dict[str, Dict[str, Any]],
120
120
  dry_run: bool,
@@ -136,7 +136,7 @@ async def folder_build(
136
136
  )
137
137
  )
138
138
  else:
139
- if await name_matches_existing_resource(resource, name, tb_client):
139
+ if name_matches_existing_resource(resource, name, tb_client):
140
140
  if resource == "pipes":
141
141
  click.echo(FeedbackManager.error_pipe_cannot_be_pushed(name=name))
142
142
  else:
@@ -156,7 +156,7 @@ async def folder_build(
156
156
  extension = "pipe" if resource == "pipes" else "datasource"
157
157
  click.echo(FeedbackManager.info_building_resource(name=f"{name}.{extension}", version=""))
158
158
  else:
159
- if await name_matches_existing_resource(resource, name, tb_client):
159
+ if name_matches_existing_resource(resource, name, tb_client):
160
160
  if resource == "pipes":
161
161
  click.echo(FeedbackManager.warning_pipe_cannot_be_pushed(name=name))
162
162
  else:
@@ -164,7 +164,7 @@ async def folder_build(
164
164
  else:
165
165
  click.echo(FeedbackManager.warning_dry_name_already_exists(name=name))
166
166
 
167
- async def push_files(
167
+ def push_files(
168
168
  dependency_graph: GraphDependencies,
169
169
  dry_run: bool = False,
170
170
  ):
@@ -205,7 +205,7 @@ async def folder_build(
205
205
  error_msg = FeedbackManager.error_connector_require_post_release(connector=connector)
206
206
  raise click.ClickException(error_msg)
207
207
 
208
- await push(
208
+ push(
209
209
  name,
210
210
  resources_to_run_fork_downstream,
211
211
  dry_run,
@@ -240,7 +240,7 @@ async def folder_build(
240
240
  if name in processed or not is_endpoint(resources_to_run_fork_downstream[name]):
241
241
  continue
242
242
 
243
- await push(
243
+ push(
244
244
  name,
245
245
  resources_to_run_fork_downstream,
246
246
  dry_run,
@@ -258,7 +258,7 @@ async def folder_build(
258
258
  if name in processed or is_materialized(resources_to_run_fork_downstream.get(name)):
259
259
  continue
260
260
 
261
- await push(
261
+ push(
262
262
  name,
263
263
  resources_to_run_fork_downstream,
264
264
  dry_run,
@@ -276,7 +276,7 @@ async def folder_build(
276
276
  if name in processed or not is_materialized(resources_to_run_fork_downstream.get(name)):
277
277
  continue
278
278
 
279
- await push(
279
+ push(
280
280
  name,
281
281
  resources_to_run_fork_downstream,
282
282
  dry_run,
@@ -285,7 +285,7 @@ async def folder_build(
285
285
  )
286
286
  processed.add(name)
287
287
 
288
- await push_files(dependencies_graph, dry_run)
288
+ push_files(dependencies_graph, dry_run)
289
289
 
290
290
  if not dry_run and not run_tests and verbose:
291
291
  click.echo(FeedbackManager.info_not_pushing_fixtures())
@@ -293,13 +293,13 @@ async def folder_build(
293
293
  return dependencies_graph.to_run
294
294
 
295
295
 
296
- async def name_matches_existing_resource(resource: str, name: str, tb_client: TinyB):
296
+ def name_matches_existing_resource(resource: str, name: str, tb_client: TinyB):
297
297
  if resource == "datasources":
298
- current_pipes: List[Dict[str, Any]] = await tb_client.pipes()
298
+ current_pipes: List[Dict[str, Any]] = tb_client.pipes()
299
299
  if name in [x["name"] for x in current_pipes]:
300
300
  return True
301
301
  else:
302
- current_datasources: List[Dict[str, Any]] = await tb_client.datasources()
302
+ current_datasources: List[Dict[str, Any]] = tb_client.datasources()
303
303
  if name in [x["name"] for x in current_datasources]:
304
304
  return True
305
305
  return False
@@ -479,7 +479,7 @@ class GraphDependencies:
479
479
  all_resources: Dict[str, Dict[str, Any]]
480
480
 
481
481
 
482
- async def process(
482
+ def process(
483
483
  filename: str,
484
484
  tb_client: TinyB,
485
485
  deps: List[str],
@@ -500,7 +500,7 @@ async def process(
500
500
  embedded_datasources = {} if embedded_datasources is None else embedded_datasources
501
501
 
502
502
  try:
503
- res = await process_file(
503
+ res = process_file(
504
504
  filename,
505
505
  tb_client,
506
506
  skip_connectors=skip_connectors,
@@ -588,7 +588,7 @@ async def process(
588
588
  return os.path.basename(name), warnings
589
589
 
590
590
 
591
- async def get_processed(
591
+ def get_processed(
592
592
  filenames: Iterable[str],
593
593
  changed: Optional[Dict[str, Any]] = None,
594
594
  verbose: bool = False,
@@ -619,7 +619,7 @@ async def get_processed(
619
619
  if resource in changed and (not changed[resource] or changed[resource] in ["shared", "remote"]):
620
620
  continue
621
621
  if os.path.isdir(filename):
622
- await get_processed(
622
+ get_processed(
623
623
  filenames=get_project_filenames(filename),
624
624
  changed=changed,
625
625
  verbose=verbose,
@@ -646,7 +646,7 @@ async def get_processed(
646
646
  if tb_client is None:
647
647
  raise ValueError("tb_client cannot be None")
648
648
 
649
- name, warnings = await process(
649
+ name, warnings = process(
650
650
  filename=filename,
651
651
  tb_client=tb_client,
652
652
  deps=deps_list,
@@ -676,7 +676,7 @@ async def get_processed(
676
676
  )
677
677
 
678
678
 
679
- async def build_graph(
679
+ def build_graph(
680
680
  filenames: Iterable[str],
681
681
  tb_client: TinyB,
682
682
  dir_path: Optional[str] = None,
@@ -711,7 +711,7 @@ async def build_graph(
711
711
  # When using fork-downstream or --only-changes, we need to generate all the graph of all the resources and their dependencies
712
712
  # This way we can add more resources into the to_run dictionary if needed.
713
713
  if process_dependencies and only_changes:
714
- all_dependencies_graph = await build_graph(
714
+ all_dependencies_graph = build_graph(
715
715
  get_project_filenames(dir_path),
716
716
  tb_client,
717
717
  dir_path=dir_path,
@@ -729,7 +729,7 @@ async def build_graph(
729
729
 
730
730
  processed: Set[str] = set()
731
731
 
732
- await get_processed(
732
+ get_processed(
733
733
  filenames=filenames,
734
734
  changed=changed,
735
735
  verbose=verbose,
@@ -781,7 +781,7 @@ async def build_graph(
781
781
  click.echo(FeedbackManager.info_skipping_resource(resource=processed_filename))
782
782
  continue
783
783
  click.echo(FeedbackManager.info_processing_file(filename=processed_filename))
784
- await process(
784
+ process(
785
785
  filename=str(f),
786
786
  tb_client=tb_client,
787
787
  deps=deps,
@@ -800,7 +800,7 @@ async def build_graph(
800
800
  return GraphDependencies(dep_map, to_run, all_dep_map, all_resources)
801
801
 
802
802
 
803
- async def process_file(
803
+ def process_file(
804
804
  filename: str,
805
805
  tb_client: TinyB,
806
806
  skip_connectors: bool = False,
@@ -821,7 +821,7 @@ async def process_file(
821
821
  params[f"engine_{k}"] = v
822
822
  return params
823
823
 
824
- async def get_kafka_params(node: Dict[str, Any]):
824
+ def get_kafka_params(node: Dict[str, Any]):
825
825
  params = {key: value for key, value in node.items() if key.startswith("kafka")}
826
826
 
827
827
  if not skip_connectors:
@@ -837,7 +837,7 @@ async def process_file(
837
837
  "kafka_sasl_mechanism": params.get("kafka_sasl_mechanism", None),
838
838
  }
839
839
 
840
- connector = await tb_client.get_connection(**connector_params)
840
+ connector = tb_client.get_connection(**connector_params)
841
841
  if not connector:
842
842
  click.echo(
843
843
  FeedbackManager.info_creating_kafka_connection(connection_name=params["kafka_connection_name"])
@@ -851,7 +851,7 @@ async def process_file(
851
851
  if not all(required_params):
852
852
  raise click.ClickException(FeedbackManager.error_unknown_kafka_connection(datasource=name))
853
853
 
854
- connector = await tb_client.connection_create_kafka(**connector_params)
854
+ connector = tb_client.connection_create_kafka(**connector_params)
855
855
  except Exception as e:
856
856
  raise click.ClickException(
857
857
  FeedbackManager.error_connection_create(
@@ -870,7 +870,7 @@ async def process_file(
870
870
 
871
871
  return params
872
872
 
873
- async def get_import_params(datasource: Dict[str, Any], node: Dict[str, Any]) -> Dict[str, Any]:
873
+ def get_import_params(datasource: Dict[str, Any], node: Dict[str, Any]) -> Dict[str, Any]:
874
874
  params: Dict[str, Any] = {key: value for key, value in node.items() if key.startswith("import_")}
875
875
 
876
876
  if len(params) == 0 or skip_connectors:
@@ -879,7 +879,7 @@ async def process_file(
879
879
  service: Optional[str] = node.get("import_service", None)
880
880
 
881
881
  if service and service.lower() == "bigquery":
882
- if not await tb_client.check_gcp_read_permissions():
882
+ if not tb_client.check_gcp_read_permissions():
883
883
  raise click.ClickException(FeedbackManager.error_unknown_bq_connection(datasource=datasource["name"]))
884
884
 
885
885
  # Bigquery doesn't have a datalink, so we can stop here
@@ -895,7 +895,7 @@ async def process_file(
895
895
  if not connector_id:
896
896
  assert isinstance(connector_name, str)
897
897
 
898
- connector: Optional[Dict[str, Any]] = await tb_client.get_connector(connector_name, service)
898
+ connector: Optional[Dict[str, Any]] = tb_client.get_connector(connector_name, service)
899
899
 
900
900
  if not connector:
901
901
  raise Exception(
@@ -952,13 +952,13 @@ async def process_file(
952
952
  if x["default_value"] and x["default_value"].lower().startswith("materialized"):
953
953
  # turn expression to a select query to sql_get_used_tables can get the used tables
954
954
  q = "select " + x["default_value"][len("materialized") :]
955
- tables = await tb_client.sql_get_used_tables(q)
955
+ tables = tb_client.sql_get_used_tables(q)
956
956
  # materialized columns expressions could have joins so we need to add them as a dep
957
957
  deps += tables
958
958
  # generate replacements and replace the query
959
959
  replacements = {t: t for t in tables}
960
960
 
961
- replaced_results = await tb_client.replace_tables(q, replacements)
961
+ replaced_results = tb_client.replace_tables(q, replacements)
962
962
  x["default_value"] = replaced_results.replace("SELECT", "materialized", 1)
963
963
  if x.get("jsonpath", None):
964
964
  _format = "ndjson"
@@ -996,7 +996,7 @@ async def process_file(
996
996
  if "import_service" in node or "import_connection_name" in node:
997
997
  VALID_SERVICES: Tuple[str, ...] = ("bigquery", "snowflake", "s3", "s3_iamrole", "gcs", "dynamodb")
998
998
 
999
- import_params = await get_import_params(params, node)
999
+ import_params = get_import_params(params, node)
1000
1000
 
1001
1001
  service = import_params.get("import_service", None)
1002
1002
  if service and service not in VALID_SERVICES:
@@ -1022,7 +1022,7 @@ async def process_file(
1022
1022
  period: int = DEFAULT_CRON_PERIOD
1023
1023
 
1024
1024
  if current_ws is not None:
1025
- workspaces = (await tb_client.user_workspaces(version="v1")).get("workspaces", [])
1025
+ workspaces = (tb_client.user_workspaces(version="v1")).get("workspaces", [])
1026
1026
  workspace_rate_limits: Dict[str, Dict[str, int]] = next(
1027
1027
  (w.get("rate_limits", {}) for w in workspaces if w["id"] == current_ws["id"]), {}
1028
1028
  )
@@ -1074,7 +1074,7 @@ async def process_file(
1074
1074
  params[replacement] = value
1075
1075
 
1076
1076
  if "kafka_connection_name" in node:
1077
- kafka_params = await get_kafka_params(node)
1077
+ kafka_params = get_kafka_params(node)
1078
1078
  params.update(kafka_params)
1079
1079
  del params["format"]
1080
1080
 
@@ -1139,7 +1139,7 @@ async def process_file(
1139
1139
  sql_rendered = sql
1140
1140
 
1141
1141
  try:
1142
- dependencies = await tb_client.sql_get_used_tables(sql_rendered, raising=True, is_copy=is_copy)
1142
+ dependencies = tb_client.sql_get_used_tables(sql_rendered, raising=True, is_copy=is_copy)
1143
1143
  deps += [t for t in dependencies if t not in [n["name"] for n in doc.nodes]]
1144
1144
 
1145
1145
  except Exception as e:
@@ -1166,7 +1166,7 @@ async def process_file(
1166
1166
 
1167
1167
  replacements = {x: x for x in deps if x not in [n["name"] for n in doc.nodes]}
1168
1168
 
1169
- # FIXME: Ideally we should use await tb_client.replace_tables(sql, replacements)
1169
+ # FIXME: Ideally we should use tb_client.replace_tables(sql, replacements)
1170
1170
  for old, new in replacements.items():
1171
1171
  sql = re.sub("([\t \\n']+|^)" + old + "([\t \\n'\\)]+|$)", "\\1" + new + "\\2", sql)
1172
1172
 
@@ -6,7 +6,7 @@ from tinybird.tb.client import DoesNotExistException, TinyB
6
6
  from tinybird.tb.modules.feedback_manager import FeedbackManager
7
7
 
8
8
 
9
- async def update_tags(resource_id: str, resource_name: str, resource_type: str, tags: List[str], tb_client: TinyB):
9
+ def update_tags(resource_id: str, resource_name: str, resource_type: str, tags: List[str], tb_client: TinyB):
10
10
  def get_tags_for_resource(all_tags: dict, resource_id: str, resource_name: str) -> List[str]:
11
11
  tag_names = []
12
12
 
@@ -30,7 +30,7 @@ async def update_tags(resource_id: str, resource_name: str, resource_type: str,
30
30
  return tags_to_add, tags_to_remove
31
31
 
32
32
  try:
33
- all_tags = await tb_client.get_all_tags()
33
+ all_tags = tb_client.get_all_tags()
34
34
  except Exception as e:
35
35
  raise Exception(FeedbackManager.error_getting_tags(error=str(e)))
36
36
 
@@ -47,7 +47,7 @@ async def update_tags(resource_id: str, resource_name: str, resource_type: str,
47
47
  if not tag:
48
48
  # Create new tag
49
49
  try:
50
- await tb_client.create_tag_with_resource(
50
+ tb_client.create_tag_with_resource(
51
51
  name=tag_name,
52
52
  resource_id=resource_id,
53
53
  resource_name=resource_name,
@@ -60,7 +60,7 @@ async def update_tags(resource_id: str, resource_name: str, resource_type: str,
60
60
  resources = tag.get("resources", [])
61
61
  resources.append({"id": resource_id, "name": resource_name, "type": resource_type})
62
62
  try:
63
- await tb_client.update_tag(tag.get("name", tag_name), resources)
63
+ tb_client.update_tag(tag.get("name", tag_name), resources)
64
64
  except Exception as e:
65
65
  raise Exception(FeedbackManager.error_updating_tag(error=str(e)))
66
66
 
@@ -72,12 +72,12 @@ async def update_tags(resource_id: str, resource_name: str, resource_type: str,
72
72
  resources = tag.get("resources", [])
73
73
  resources = [resource for resource in resources if resource.get("name") != resource_name]
74
74
  try:
75
- await tb_client.update_tag(tag.get("name", tag_name), resources)
75
+ tb_client.update_tag(tag.get("name", tag_name), resources)
76
76
  except Exception as e:
77
77
  raise Exception(FeedbackManager.error_updating_tag(error=str(e)))
78
78
 
79
79
 
80
- async def update_tags_in_resource(rs: Dict[str, Any], resource_type: str, client: TinyB):
80
+ def update_tags_in_resource(rs: Dict[str, Any], resource_type: str, client: TinyB):
81
81
  filtering_tags = rs.get("filtering_tags", [])
82
82
 
83
83
  if not filtering_tags:
@@ -89,7 +89,7 @@ async def update_tags_in_resource(rs: Dict[str, Any], resource_type: str, client
89
89
  if resource_type == "datasource":
90
90
  ds_name = rs["params"]["name"]
91
91
  try:
92
- persisted_ds = await client.get_datasource(ds_name)
92
+ persisted_ds = client.get_datasource(ds_name)
93
93
  resource_id = persisted_ds.get("id", "")
94
94
  resource_name = persisted_ds.get("name", "")
95
95
  except DoesNotExistException:
@@ -99,7 +99,7 @@ async def update_tags_in_resource(rs: Dict[str, Any], resource_type: str, client
99
99
  elif resource_type == "pipe":
100
100
  pipe_name = rs["name"]
101
101
  try:
102
- persisted_pipe = await client.pipe(pipe_name)
102
+ persisted_pipe = client.pipe(pipe_name)
103
103
  resource_id = persisted_pipe.get("id", "")
104
104
  resource_name = persisted_pipe.get("name", "")
105
105
  except DoesNotExistException:
@@ -107,7 +107,7 @@ async def update_tags_in_resource(rs: Dict[str, Any], resource_type: str, client
107
107
 
108
108
  if resource_id and resource_name:
109
109
  try:
110
- await update_tags(
110
+ update_tags(
111
111
  resource_id=resource_id,
112
112
  resource_name=resource_name,
113
113
  resource_type=resource_type,
@@ -10,7 +10,7 @@ from tinybird.tb.client import DoesNotExistException, TinyB
10
10
  from tinybird.tb.modules.feedback_manager import FeedbackManager
11
11
 
12
12
 
13
- async def new_ds(
13
+ def new_ds(
14
14
  ds: Dict[str, Any],
15
15
  client: TinyB,
16
16
  user_token: Optional[str],
@@ -25,26 +25,26 @@ async def new_ds(
25
25
  ):
26
26
  ds_name = ds["params"]["name"]
27
27
 
28
- async def manage_tokens():
28
+ def manage_tokens():
29
29
  # search for token with specified name and adds it if not found or adds permissions to it
30
30
  t = None
31
31
  for tk in ds["tokens"]:
32
32
  token_name = tk["token_name"]
33
- t = await client.get_token_by_name(token_name)
33
+ t = client.get_token_by_name(token_name)
34
34
  if not t:
35
35
  token_name = tk["token_name"]
36
36
  # DS == token_origin.Origins.DATASOURCE
37
- await client.create_token(token_name, [f"DATASOURCES:{tk['permissions']}:{ds_name}"], "DS", ds_name)
37
+ client.create_token(token_name, [f"DATASOURCES:{tk['permissions']}:{ds_name}"], "DS", ds_name)
38
38
  else:
39
39
  scopes = [f"DATASOURCES:{tk['permissions']}:{ds_name}"]
40
40
  for x in t["scopes"]:
41
41
  sc = x["type"] if "resource" not in x else f"{x['type']}:{x['resource']}"
42
42
  scopes.append(sc)
43
- await client.alter_tokens(token_name, scopes)
43
+ client.alter_tokens(token_name, scopes)
44
44
 
45
45
  datasource_exists = False
46
46
  try:
47
- existing_ds = await client.get_datasource(ds_name)
47
+ existing_ds = client.get_datasource(ds_name)
48
48
  datasource_exists = True
49
49
  except DoesNotExistException:
50
50
  datasource_exists = False
@@ -81,25 +81,25 @@ async def new_ds(
81
81
  if extension not in valid_formats:
82
82
  raise Exception(FeedbackManager.error_format(extension=extension, valid_formats=valid_formats))
83
83
  params["format"] = extension
84
- datasource_response = await client.datasource_create_from_definition(params)
84
+ datasource_response = client.datasource_create_from_definition(params)
85
85
  datasource = datasource_response.get("datasource", {})
86
86
 
87
87
  if datasource.get("service") == "dynamodb":
88
88
  job_id = datasource_response.get("import_id", None)
89
89
  if job_id:
90
- jobs = await client.jobs(status=("waiting", "working"))
90
+ jobs = client.jobs(status=("waiting", "working"))
91
91
  job_url = next((job["job_url"] for job in jobs if job["id"] == job_id), None)
92
92
  if job_url:
93
93
  click.echo(FeedbackManager.success_dynamodb_initial_load(job_url=job_url))
94
94
 
95
95
  if ds.get("tokens"):
96
- await manage_tokens()
96
+ manage_tokens()
97
97
 
98
98
  if ds.get("shared_with") and not build:
99
99
  if not user_token:
100
100
  click.echo(FeedbackManager.info_skipping_shared_with_entry())
101
101
  else:
102
- await share_and_unshare_datasource(
102
+ share_and_unshare_datasource(
103
103
  client,
104
104
  datasource,
105
105
  user_token,
@@ -110,7 +110,7 @@ async def new_ds(
110
110
  if is_vendor and user_token and local_ws and current_ws:
111
111
  user_client: TinyB = deepcopy(client)
112
112
  user_client.token = user_token
113
- await user_client.datasource_share(
113
+ user_client.datasource_share(
114
114
  datasource_id=datasource.get("id", ""),
115
115
  current_workspace_id=current_ws.get("id", ""),
116
116
  destination_workspace_id=local_ws.get("id", ""),
@@ -127,7 +127,7 @@ async def new_ds(
127
127
  if not user_token:
128
128
  click.echo(FeedbackManager.info_skipping_shared_with_entry())
129
129
  else:
130
- await share_and_unshare_datasource(
130
+ share_and_unshare_datasource(
131
131
  client,
132
132
  existing_ds,
133
133
  user_token,
@@ -169,7 +169,7 @@ async def new_ds(
169
169
  or new_ttl
170
170
  or ((new_indices is not None) and (not fork_downstream or not fork))
171
171
  ):
172
- alter_response = await client.alter_datasource(
172
+ alter_response = client.alter_datasource(
173
173
  ds_name,
174
174
  new_schema=new_schema,
175
175
  description=new_description,
@@ -198,7 +198,7 @@ async def new_ds(
198
198
  make_changes = click.prompt(FeedbackManager.info_ask_for_alter_confirmation()).lower() == "y"
199
199
 
200
200
  if make_changes:
201
- await client.alter_datasource(
201
+ client.alter_datasource(
202
202
  ds_name,
203
203
  new_schema=new_schema,
204
204
  description=new_description,
@@ -224,7 +224,7 @@ async def new_ds(
224
224
 
225
225
  try:
226
226
  click.echo(FeedbackManager.info_update_datasource(datasource=ds_name, params=params))
227
- await client.update_datasource(ds_name, params)
227
+ client.update_datasource(ds_name, params)
228
228
  click.echo(FeedbackManager.success_update_datasource(datasource=ds_name, params=params))
229
229
  make_changes = True
230
230
  alter_response = True
@@ -256,7 +256,7 @@ async def new_ds(
256
256
  if not connector_id:
257
257
  return
258
258
 
259
- current_connector = await client.get_connector_by_id(existing_ds.get("connector", ""))
259
+ current_connector = client.get_connector_by_id(existing_ds.get("connector", ""))
260
260
  if not current_connector:
261
261
  return
262
262
 
@@ -292,7 +292,7 @@ async def new_ds(
292
292
  }
293
293
 
294
294
  try:
295
- await client.update_datasource(ds_name, connector_data)
295
+ client.update_datasource(ds_name, connector_data)
296
296
  click.echo(FeedbackManager.success_promoting_datasource(datasource=ds_name))
297
297
  return
298
298
  except Exception as e:
@@ -309,7 +309,7 @@ async def new_ds(
309
309
  and click.prompt(FeedbackManager.info_ask_for_datasource_confirmation()) == ds_name
310
310
  ): # TODO move to CLI
311
311
  try:
312
- await client.datasource_delete(ds_name)
312
+ client.datasource_delete(ds_name)
313
313
  click.echo(FeedbackManager.success_delete_datasource(datasource=ds_name))
314
314
  except Exception:
315
315
  raise click.ClickException(FeedbackManager.error_removing_datasource(datasource=ds_name))
@@ -329,7 +329,7 @@ async def new_ds(
329
329
  click.echo(FeedbackManager.warning_datasource_already_exists(datasource=ds_name))
330
330
 
331
331
 
332
- async def share_and_unshare_datasource(
332
+ def share_and_unshare_datasource(
333
333
  client: TinyB,
334
334
  datasource: Dict[str, Any],
335
335
  user_token: str,
@@ -346,9 +346,9 @@ async def share_and_unshare_datasource(
346
346
  if current_ws:
347
347
  # Force to get all the workspaces the user can access
348
348
  workspace = current_ws
349
- workspaces = (await client.user_workspaces(version="v1")).get("workspaces", [])
349
+ workspaces = (client.user_workspaces(version="v1")).get("workspaces", [])
350
350
  else:
351
- workspace = await client.user_workspace_branches(version="v1")
351
+ workspace = client.user_workspace_branches(version="v1")
352
352
  workspaces = workspace.get("workspaces", [])
353
353
 
354
354
  if workspace.get("is_branch", False):
@@ -366,7 +366,7 @@ async def share_and_unshare_datasource(
366
366
  f"Unable to share datasource with the workspace {workspace_to_share}. Review that you have the admin permissions on this workspace"
367
367
  )
368
368
 
369
- await user_client.datasource_share(
369
+ user_client.datasource_share(
370
370
  datasource_id=datasource_id,
371
371
  current_workspace_id=workspace.get("id", ""),
372
372
  destination_workspace_id=w.get("id", ""),
@@ -387,7 +387,7 @@ async def share_and_unshare_datasource(
387
387
  workspaces_need_to_unshare = [w for w in shared_with if w not in defined_to_share_with]
388
388
 
389
389
  for w in workspaces_need_to_share:
390
- await user_client.datasource_share(
390
+ user_client.datasource_share(
391
391
  datasource_id=datasource_id,
392
392
  current_workspace_id=workspace.get("id", ""),
393
393
  destination_workspace_id=w.get("id", ""),
@@ -397,7 +397,7 @@ async def share_and_unshare_datasource(
397
397
  )
398
398
 
399
399
  for w in workspaces_need_to_unshare:
400
- await user_client.datasource_unshare(
400
+ user_client.datasource_unshare(
401
401
  datasource_id=datasource_id,
402
402
  current_workspace_id=workspace.get("id", ""),
403
403
  destination_workspace_id=w.get("id", ""),
@@ -13,7 +13,7 @@ from tinybird.tb.modules.config import CLIConfig
13
13
  from tinybird.tb.modules.feedback_manager import FeedbackManager
14
14
 
15
15
 
16
- async def new_pipe(
16
+ def new_pipe(
17
17
  p,
18
18
  config: CLIConfig,
19
19
  tb_client: TinyB,
@@ -53,7 +53,7 @@ async def new_pipe(
53
53
  cli_params["ignore_sql_errors"] = "true" if ignore_sql_errors else "false"
54
54
  cli_params["workspace_id"] = config.get("id", None)
55
55
 
56
- r: requests.Response = await requests_get(f"{host}/v0/playgrounds?{urlencode(cli_params)}", headers=headers)
56
+ r: requests.Response = requests_get(f"{host}/v0/playgrounds?{urlencode(cli_params)}", headers=headers)
57
57
  current_pipe = None
58
58
  pipe_exists = False
59
59
  playgrounds_response = r.json() if r.status_code == 200 else None
@@ -104,7 +104,7 @@ async def new_pipe(
104
104
  body["name"] = p["name"] + "__tb__playground"
105
105
 
106
106
  if pipe_exists and current_pipe:
107
- data = await user_client._req(
107
+ data = user_client._req(
108
108
  f"/v0/playgrounds/{current_pipe['id']}?{urlencode(params)}",
109
109
  method="PUT",
110
110
  headers=post_headers,
@@ -112,7 +112,7 @@ async def new_pipe(
112
112
  )
113
113
 
114
114
  else:
115
- data = await user_client._req(
115
+ data = user_client._req(
116
116
  f"/v0/playgrounds?{urlencode(params)}",
117
117
  method="POST",
118
118
  headers=post_headers,
@@ -126,23 +126,21 @@ async def new_pipe(
126
126
  t = None
127
127
  for tk in p["tokens"]:
128
128
  token_name = tk["token_name"]
129
- t = await tb_client.get_token_by_name(token_name)
129
+ t = tb_client.get_token_by_name(token_name)
130
130
  if t:
131
131
  scopes = [f"PIPES:{tk['permissions']}:{p['name']}"]
132
132
  for x in t["scopes"]:
133
133
  sc = x["type"] if "resource" not in x else f"{x['type']}:{x['resource']}"
134
134
  scopes.append(sc)
135
135
  try:
136
- r = await tb_client.alter_tokens(token_name, scopes)
136
+ r = tb_client.alter_tokens(token_name, scopes)
137
137
  token = r["token"] # type: ignore
138
138
  except Exception as e:
139
139
  raise click.ClickException(FeedbackManager.error_creating_pipe(error=e))
140
140
  else:
141
141
  token_name = tk["token_name"]
142
142
  try:
143
- r = await tb_client.create_token(
144
- token_name, [f"PIPES:{tk['permissions']}:{p['name']}"], "P", p["name"]
145
- )
143
+ r = tb_client.create_token(token_name, [f"PIPES:{tk['permissions']}:{p['name']}"], "P", p["name"])
146
144
  token = r["token"] # type: ignore
147
145
  except Exception as e:
148
146
  raise click.ClickException(FeedbackManager.error_creating_pipe(error=e))
@@ -156,7 +154,7 @@ async def new_pipe(
156
154
  "q": "",
157
155
  "token": token,
158
156
  }
159
- endpoint_url = await tb_client._req(f"/examples/query.http?{urlencode(example_params)}")
157
+ endpoint_url = tb_client._req(f"/examples/query.http?{urlencode(example_params)}")
160
158
  if endpoint_url:
161
159
  endpoint_url = endpoint_url.replace("http://localhost:8001", host)
162
160
  click.echo(f"""** => Test endpoint with:\n** $ curl {endpoint_url}""")
@@ -164,12 +162,12 @@ async def new_pipe(
164
162
  pass
165
163
 
166
164
 
167
- async def get_token_from_main_branch(branch_tb_client: TinyB) -> Optional[str]:
165
+ def get_token_from_main_branch(branch_tb_client: TinyB) -> Optional[str]:
168
166
  token_from_main_branch = None
169
- current_workspace = await branch_tb_client.workspace_info(version="v1")
167
+ current_workspace = branch_tb_client.workspace_info(version="v1")
170
168
  # current workspace is a branch
171
169
  if current_workspace.get("main"):
172
- response = await branch_tb_client.user_workspaces(version="v1")
170
+ response = branch_tb_client.user_workspaces(version="v1")
173
171
  workspaces = response["workspaces"]
174
172
  prod_workspace = next(
175
173
  (workspace for workspace in workspaces if workspace["id"] == current_workspace["main"]), None