tinybird 0.0.1.dev234__py3-none-any.whl → 0.0.1.dev235__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

Files changed (49) hide show
  1. tinybird/tb/__cli__.py +2 -2
  2. tinybird/tb/check_pypi.py +3 -8
  3. tinybird/tb/cli.py +0 -6
  4. tinybird/tb/client.py +314 -340
  5. tinybird/tb/config.py +4 -5
  6. tinybird/tb/modules/build.py +21 -24
  7. tinybird/tb/modules/cicd.py +2 -2
  8. tinybird/tb/modules/cli.py +18 -28
  9. tinybird/tb/modules/common.py +123 -138
  10. tinybird/tb/modules/config.py +2 -4
  11. tinybird/tb/modules/connection.py +21 -26
  12. tinybird/tb/modules/copy.py +7 -9
  13. tinybird/tb/modules/create.py +18 -21
  14. tinybird/tb/modules/datafile/build.py +39 -39
  15. tinybird/tb/modules/datafile/build_common.py +9 -9
  16. tinybird/tb/modules/datafile/build_datasource.py +24 -24
  17. tinybird/tb/modules/datafile/build_pipe.py +11 -13
  18. tinybird/tb/modules/datafile/diff.py +12 -12
  19. tinybird/tb/modules/datafile/format_datasource.py +5 -5
  20. tinybird/tb/modules/datafile/format_pipe.py +6 -6
  21. tinybird/tb/modules/datafile/playground.py +42 -42
  22. tinybird/tb/modules/datafile/pull.py +24 -26
  23. tinybird/tb/modules/datasource.py +42 -56
  24. tinybird/tb/modules/endpoint.py +14 -19
  25. tinybird/tb/modules/info.py +14 -15
  26. tinybird/tb/modules/infra.py +43 -48
  27. tinybird/tb/modules/job.py +7 -10
  28. tinybird/tb/modules/local.py +6 -12
  29. tinybird/tb/modules/local_common.py +4 -4
  30. tinybird/tb/modules/login.py +9 -10
  31. tinybird/tb/modules/materialization.py +7 -10
  32. tinybird/tb/modules/mock.py +8 -9
  33. tinybird/tb/modules/open.py +1 -3
  34. tinybird/tb/modules/pipe.py +2 -4
  35. tinybird/tb/modules/secret.py +12 -16
  36. tinybird/tb/modules/shell.py +7 -20
  37. tinybird/tb/modules/sink.py +6 -8
  38. tinybird/tb/modules/test.py +9 -14
  39. tinybird/tb/modules/tinyunit/tinyunit.py +3 -3
  40. tinybird/tb/modules/token.py +16 -24
  41. tinybird/tb/modules/watch.py +3 -7
  42. tinybird/tb/modules/workspace.py +26 -37
  43. tinybird/tb/modules/workspace_members.py +16 -23
  44. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev235.dist-info}/METADATA +1 -1
  45. tinybird-0.0.1.dev235.dist-info/RECORD +89 -0
  46. tinybird-0.0.1.dev234.dist-info/RECORD +0 -89
  47. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev235.dist-info}/WHEEL +0 -0
  48. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev235.dist-info}/entry_points.txt +0 -0
  49. {tinybird-0.0.1.dev234.dist-info → tinybird-0.0.1.dev235.dist-info}/top_level.txt +0 -0
@@ -31,7 +31,7 @@ from tinybird.tb.modules.datafile.pull import folder_pull
31
31
  from tinybird.tb.modules.feedback_manager import FeedbackManager
32
32
 
33
33
 
34
- async def diff_files(
34
+ def diff_files(
35
35
  from_file: str,
36
36
  to_file: str,
37
37
  from_file_suffix: str = "[remote]",
@@ -45,12 +45,12 @@ async def diff_files(
45
45
  with open(filename) as file:
46
46
  return file.readlines()
47
47
 
48
- async def parse(filename, with_format=True, unroll_includes=False):
48
+ def parse(filename, with_format=True, unroll_includes=False):
49
49
  extensions = Path(filename).suffixes
50
50
  lines = None
51
51
  if is_file_a_datasource(filename):
52
52
  lines = (
53
- await format_datasource(
53
+ format_datasource(
54
54
  filename,
55
55
  unroll_includes=unroll_includes,
56
56
  for_diff=True,
@@ -63,7 +63,7 @@ async def diff_files(
63
63
  )
64
64
  elif (".pipe" in extensions) or (".incl" in extensions):
65
65
  lines = (
66
- await format_pipe(
66
+ format_pipe(
67
67
  filename,
68
68
  DEFAULT_FMT_LINE_LENGTH,
69
69
  unroll_includes=unroll_includes,
@@ -79,8 +79,8 @@ async def diff_files(
79
79
  return [f"{l}\n" for l in lines.split("\n")] if with_format else lines # noqa: E741
80
80
 
81
81
  try:
82
- lines1 = await parse(from_file, with_format)
83
- lines2 = await parse(to_file, with_format, unroll_includes=True)
82
+ lines1 = parse(from_file, with_format)
83
+ lines2 = parse(to_file, with_format, unroll_includes=True)
84
84
  except FileNotFoundError as e:
85
85
  filename = os.path.basename(str(e)).strip("'")
86
86
  raise click.ClickException(FeedbackManager.error_diff_file(filename=filename))
@@ -98,7 +98,7 @@ async def diff_files(
98
98
  return diff
99
99
 
100
100
 
101
- async def diff_command(
101
+ def diff_command(
102
102
  filenames: Optional[List[str]],
103
103
  fmt: bool,
104
104
  client: TinyB,
@@ -121,15 +121,15 @@ async def diff_command(
121
121
  if filenames:
122
122
  if len(filenames) == 1:
123
123
  filenames = [filenames[0], *get_project_filenames(filenames[0])]
124
- await folder_pull(client, target_dir, True, verbose=False)
124
+ folder_pull(client, target_dir, True, verbose=False)
125
125
  else:
126
126
  filenames = get_project_filenames(".")
127
127
  if verbose:
128
128
  click.echo("Saving remote resources in .diff_tmp folder.\n")
129
- await folder_pull(client, target_dir, True, verbose=verbose, progress_bar=progress_bar)
129
+ folder_pull(client, target_dir, True, verbose=verbose, progress_bar=progress_bar)
130
130
 
131
- remote_datasources: List[Dict[str, Any]] = await client.datasources()
132
- remote_pipes: List[Dict[str, Any]] = await client.pipes()
131
+ remote_datasources: List[Dict[str, Any]] = client.datasources()
132
+ remote_pipes: List[Dict[str, Any]] = client.pipes()
133
133
 
134
134
  local_resources = {
135
135
  Path(file).resolve().stem: file
@@ -159,7 +159,7 @@ async def diff_command(
159
159
  suffix = ".datasource" if ".datasource" in file else ".pipe"
160
160
  target = target_dir + os.path.sep + rfilename + suffix
161
161
 
162
- diff_lines = await diff_files(
162
+ diff_lines = diff_files(
163
163
  target, file, with_format=fmt, with_color=(not no_color), client=client, for_deploy=for_deploy
164
164
  )
165
165
  not_empty, diff_lines = peek(diff_lines)
@@ -15,7 +15,7 @@ from tinybird.tb.modules.datafile.format_common import (
15
15
  )
16
16
 
17
17
 
18
- async def format_datasource(
18
+ def format_datasource(
19
19
  filename: str,
20
20
  unroll_includes: bool = False,
21
21
  for_diff: bool = False,
@@ -57,7 +57,7 @@ async def format_datasource(
57
57
  format_tags(file_parts, doc)
58
58
  format_schema(file_parts, doc.nodes[0])
59
59
  format_indices(file_parts, doc.nodes[0])
60
- await format_engine(file_parts, doc.nodes[0], only_ttl=bool(not for_deploy_diff), client=client)
60
+ format_engine(file_parts, doc.nodes[0], only_ttl=bool(not for_deploy_diff), client=client)
61
61
  if for_deploy_diff:
62
62
  format_import_settings(file_parts, doc.nodes[0])
63
63
  format_shared_with(file_parts, doc)
@@ -68,7 +68,7 @@ async def format_datasource(
68
68
  format_tags(file_parts, doc)
69
69
  format_schema(file_parts, doc.nodes[0])
70
70
  format_indices(file_parts, doc.nodes[0])
71
- await format_engine(file_parts, doc.nodes[0])
71
+ format_engine(file_parts, doc.nodes[0])
72
72
  format_include(file_parts, doc, unroll_includes=unroll_includes)
73
73
  format_data_connector(file_parts, doc.nodes[0])
74
74
  format_import_settings(file_parts, doc.nodes[0])
@@ -127,7 +127,7 @@ def format_shared_with(file_parts: List[str], doc: Datafile) -> List[str]:
127
127
  return file_parts
128
128
 
129
129
 
130
- async def format_engine(
130
+ def format_engine(
131
131
  file_parts: List[str], node: Dict[str, Any], only_ttl: bool = False, client: Optional[TinyB] = None
132
132
  ) -> List[str]:
133
133
  if only_ttl:
@@ -137,7 +137,7 @@ async def format_engine(
137
137
  elem = ", ".join([x.strip() for x in arg[1].split(",")])
138
138
  try:
139
139
  if client:
140
- ttl_sql = await client.sql_get_format(f"select {elem}", with_clickhouse_format=True)
140
+ ttl_sql = client.sql_get_format(f"select {elem}", with_clickhouse_format=True)
141
141
  formatted_ttl = ttl_sql[7:]
142
142
  else:
143
143
  formatted_ttl = elem
@@ -35,7 +35,7 @@ def format_node_sql(
35
35
  return file_parts
36
36
 
37
37
 
38
- async def format_node_type(file_parts: List[str], node: Dict[str, Any]) -> List[str]:
38
+ def format_node_type(file_parts: List[str], node: Dict[str, Any]) -> List[str]:
39
39
  node_type = node.get("type", "").lower()
40
40
  node_type_upper = f"TYPE {node_type.upper()}"
41
41
  # Materialized pipe
@@ -44,7 +44,7 @@ async def format_node_type(file_parts: List[str], node: Dict[str, Any]) -> List[
44
44
  file_parts.append(DATAFILE_NEW_LINE)
45
45
  file_parts.append(f"DATASOURCE {node['datasource']}")
46
46
  file_parts.append(DATAFILE_NEW_LINE)
47
- await format_engine(file_parts, node)
47
+ format_engine(file_parts, node)
48
48
 
49
49
  # Copy pipe
50
50
  if node_type == PipeNodeTypes.COPY:
@@ -98,7 +98,7 @@ def format_sql(sql: str, DATAFILE_INDENT: str, line_length: Optional[int] = None
98
98
  return "\n".join([f"{DATAFILE_INDENT}{part}" for part in sql.split("\n") if len(part.strip())])
99
99
 
100
100
 
101
- async def format_node(
101
+ def format_node(
102
102
  file_parts: List[str],
103
103
  node: Dict[str, Any],
104
104
  includes: Dict[str, Any],
@@ -120,10 +120,10 @@ async def format_node(
120
120
  Doc = namedtuple("Doc", ["description"])
121
121
  format_description(file_parts, Doc(node.get("description", "")))
122
122
  format_node_sql(file_parts, node, line_length=line_length, lower_keywords=lower_keywords)
123
- await format_node_type(file_parts, node)
123
+ format_node_type(file_parts, node)
124
124
 
125
125
 
126
- async def format_pipe(
126
+ def format_pipe(
127
127
  filename: str,
128
128
  line_length: Optional[int] = DEFAULT_FMT_LINE_LENGTH,
129
129
  unroll_includes: bool = False,
@@ -171,7 +171,7 @@ async def format_pipe(
171
171
  if unrolled_included_node:
172
172
  doc.nodes.remove(unrolled_included_node)
173
173
  for node in doc.nodes:
174
- await format_node(
174
+ format_node(
175
175
  file_parts,
176
176
  node,
177
177
  doc.includes,
@@ -47,7 +47,7 @@ from tinybird.tb.modules.feedback_manager import FeedbackManager
47
47
  from tinybird.tb.modules.project import Project
48
48
 
49
49
 
50
- async def folder_playground(
50
+ def folder_playground(
51
51
  project: Project,
52
52
  config: CLIConfig,
53
53
  tb_client: TinyB,
@@ -68,8 +68,8 @@ async def folder_playground(
68
68
  fork = False
69
69
  release_created = False
70
70
  folder = str(project.path)
71
- datasources: List[Dict[str, Any]] = await tb_client.datasources()
72
- pipes: List[Dict[str, Any]] = await tb_client.pipes(dependencies=True)
71
+ datasources: List[Dict[str, Any]] = tb_client.datasources()
72
+ pipes: List[Dict[str, Any]] = tb_client.pipes(dependencies=True)
73
73
  build = True
74
74
  dry_run = False
75
75
  force = True
@@ -112,7 +112,7 @@ async def folder_playground(
112
112
 
113
113
  # build graph to get new versions for all the files involved in the query
114
114
  # dependencies need to be processed always to get the versions
115
- dependencies_graph = await build_graph(
115
+ dependencies_graph = build_graph(
116
116
  filenames,
117
117
  tb_client,
118
118
  dir_path=folder,
@@ -148,7 +148,7 @@ async def folder_playground(
148
148
  return True
149
149
  return False
150
150
 
151
- async def push(
151
+ def push(
152
152
  name: str,
153
153
  to_run: Dict[str, Dict[str, Any]],
154
154
  dry_run: bool,
@@ -163,7 +163,7 @@ async def folder_playground(
163
163
  if should_push_file(name, remote_resource_names, force, run_tests):
164
164
  click.echo(FeedbackManager.info_processing_new_resource(name=name, version=""))
165
165
  try:
166
- await exec_file(
166
+ exec_file(
167
167
  to_run[name],
168
168
  config,
169
169
  tb_client,
@@ -222,7 +222,7 @@ async def folder_playground(
222
222
  )
223
223
  )
224
224
  else:
225
- if await name_matches_existing_resource(resource, name, tb_client):
225
+ if name_matches_existing_resource(resource, name, tb_client):
226
226
  if resource == "pipes":
227
227
  click.echo(FeedbackManager.error_pipe_cannot_be_pushed(name=name))
228
228
  else:
@@ -242,7 +242,7 @@ async def folder_playground(
242
242
  extension = "pipe" if resource == "pipes" else "datasource"
243
243
  click.echo(FeedbackManager.info_building_resource(name=f"{name}.{extension}", version=""))
244
244
  else:
245
- if await name_matches_existing_resource(resource, name, tb_client):
245
+ if name_matches_existing_resource(resource, name, tb_client):
246
246
  if resource == "pipes":
247
247
  click.echo(FeedbackManager.warning_pipe_cannot_be_pushed(name=name))
248
248
  else:
@@ -250,7 +250,7 @@ async def folder_playground(
250
250
  else:
251
251
  click.echo(FeedbackManager.warning_dry_name_already_exists(name=name))
252
252
 
253
- async def push_files(
253
+ def push_files(
254
254
  dependency_graph: GraphDependencies,
255
255
  dry_run: bool = False,
256
256
  ):
@@ -291,7 +291,7 @@ async def folder_playground(
291
291
  error_msg = FeedbackManager.error_connector_require_post_release(connector=connector)
292
292
  raise click.ClickException(error_msg)
293
293
 
294
- await push(
294
+ push(
295
295
  name,
296
296
  resources_to_run_fork_downstream,
297
297
  dry_run,
@@ -326,7 +326,7 @@ async def folder_playground(
326
326
  if name in processed or not is_endpoint(resources_to_run_fork_downstream[name]):
327
327
  continue
328
328
 
329
- await push(
329
+ push(
330
330
  name,
331
331
  resources_to_run_fork_downstream,
332
332
  dry_run,
@@ -344,7 +344,7 @@ async def folder_playground(
344
344
  if name in processed or is_materialized(resources_to_run_fork_downstream.get(name)):
345
345
  continue
346
346
 
347
- await push(
347
+ push(
348
348
  name,
349
349
  resources_to_run_fork_downstream,
350
350
  dry_run,
@@ -362,7 +362,7 @@ async def folder_playground(
362
362
  if name in processed or not is_materialized(resources_to_run_fork_downstream.get(name)):
363
363
  continue
364
364
 
365
- await push(
365
+ push(
366
366
  name,
367
367
  resources_to_run_fork_downstream,
368
368
  dry_run,
@@ -371,7 +371,7 @@ async def folder_playground(
371
371
  )
372
372
  processed.add(name)
373
373
 
374
- await push_files(dependencies_graph, dry_run)
374
+ push_files(dependencies_graph, dry_run)
375
375
 
376
376
  if not dry_run and not run_tests and verbose:
377
377
  click.echo(FeedbackManager.info_not_pushing_fixtures())
@@ -379,19 +379,19 @@ async def folder_playground(
379
379
  return dependencies_graph.to_run
380
380
 
381
381
 
382
- async def name_matches_existing_resource(resource: str, name: str, tb_client: TinyB):
382
+ def name_matches_existing_resource(resource: str, name: str, tb_client: TinyB):
383
383
  if resource == "datasources":
384
- current_pipes: List[Dict[str, Any]] = await tb_client.pipes()
384
+ current_pipes: List[Dict[str, Any]] = tb_client.pipes()
385
385
  if name in [x["name"] for x in current_pipes]:
386
386
  return True
387
387
  else:
388
- current_datasources: List[Dict[str, Any]] = await tb_client.datasources()
388
+ current_datasources: List[Dict[str, Any]] = tb_client.datasources()
389
389
  if name in [x["name"] for x in current_datasources]:
390
390
  return True
391
391
  return False
392
392
 
393
393
 
394
- async def exec_file(
394
+ def exec_file(
395
395
  r: Dict[str, Any],
396
396
  config: CLIConfig,
397
397
  tb_client: TinyB,
@@ -428,7 +428,7 @@ async def exec_file(
428
428
  if debug:
429
429
  click.echo(FeedbackManager.debug_running_file(file=pp.pformat(r)))
430
430
  if r["resource"] == "pipes":
431
- await new_pipe(
431
+ new_pipe(
432
432
  r,
433
433
  config,
434
434
  tb_client,
@@ -636,7 +636,7 @@ class GraphDependencies:
636
636
  all_resources: Dict[str, Dict[str, Any]]
637
637
 
638
638
 
639
- async def process(
639
+ def process(
640
640
  filename: str,
641
641
  tb_client: TinyB,
642
642
  deps: List[str],
@@ -657,7 +657,7 @@ async def process(
657
657
  embedded_datasources = {} if embedded_datasources is None else embedded_datasources
658
658
 
659
659
  try:
660
- res = await process_file(
660
+ res = process_file(
661
661
  filename,
662
662
  tb_client,
663
663
  skip_connectors=skip_connectors,
@@ -745,7 +745,7 @@ async def process(
745
745
  return os.path.basename(name), warnings
746
746
 
747
747
 
748
- async def get_processed(
748
+ def get_processed(
749
749
  filenames: Iterable[str],
750
750
  changed: Optional[Dict[str, Any]] = None,
751
751
  verbose: bool = False,
@@ -776,7 +776,7 @@ async def get_processed(
776
776
  if resource in changed and (not changed[resource] or changed[resource] in ["shared", "remote"]):
777
777
  continue
778
778
  if os.path.isdir(filename):
779
- await get_processed(
779
+ get_processed(
780
780
  filenames=get_project_filenames(filename),
781
781
  changed=changed,
782
782
  verbose=verbose,
@@ -803,7 +803,7 @@ async def get_processed(
803
803
  if tb_client is None:
804
804
  raise ValueError("tb_client cannot be None")
805
805
 
806
- name, warnings = await process(
806
+ name, warnings = process(
807
807
  filename=filename,
808
808
  tb_client=tb_client,
809
809
  deps=deps_list,
@@ -833,7 +833,7 @@ async def get_processed(
833
833
  )
834
834
 
835
835
 
836
- async def build_graph(
836
+ def build_graph(
837
837
  filenames: Iterable[str],
838
838
  tb_client: TinyB,
839
839
  dir_path: Optional[str] = None,
@@ -868,7 +868,7 @@ async def build_graph(
868
868
  # When using fork-downstream or --only-changes, we need to generate all the graph of all the resources and their dependencies
869
869
  # This way we can add more resources into the to_run dictionary if needed.
870
870
  if process_dependencies and only_changes:
871
- all_dependencies_graph = await build_graph(
871
+ all_dependencies_graph = build_graph(
872
872
  get_project_filenames(dir_path),
873
873
  tb_client,
874
874
  dir_path=dir_path,
@@ -886,7 +886,7 @@ async def build_graph(
886
886
 
887
887
  processed: Set[str] = set()
888
888
 
889
- await get_processed(
889
+ get_processed(
890
890
  filenames=filenames,
891
891
  changed=changed,
892
892
  verbose=verbose,
@@ -938,7 +938,7 @@ async def build_graph(
938
938
  click.echo(FeedbackManager.info_skipping_resource(resource=processed_filename))
939
939
  continue
940
940
  click.echo(FeedbackManager.info_processing_file(filename=processed_filename))
941
- await process(
941
+ process(
942
942
  filename=str(f),
943
943
  tb_client=tb_client,
944
944
  deps=deps,
@@ -957,7 +957,7 @@ async def build_graph(
957
957
  return GraphDependencies(dep_map, to_run, all_dep_map, all_resources)
958
958
 
959
959
 
960
- async def process_file(
960
+ def process_file(
961
961
  filename: str,
962
962
  tb_client: TinyB,
963
963
  skip_connectors: bool = False,
@@ -978,7 +978,7 @@ async def process_file(
978
978
  params[f"engine_{k}"] = v
979
979
  return params
980
980
 
981
- async def get_kafka_params(node: Dict[str, Any]):
981
+ def get_kafka_params(node: Dict[str, Any]):
982
982
  params = {key: value for key, value in node.items() if key.startswith("kafka")}
983
983
 
984
984
  if not skip_connectors:
@@ -994,7 +994,7 @@ async def process_file(
994
994
  "kafka_sasl_mechanism": params.get("kafka_sasl_mechanism", None),
995
995
  }
996
996
 
997
- connector = await tb_client.get_connection(**connector_params)
997
+ connector = tb_client.get_connection(**connector_params)
998
998
  if not connector:
999
999
  click.echo(
1000
1000
  FeedbackManager.info_creating_kafka_connection(connection_name=params["kafka_connection_name"])
@@ -1008,7 +1008,7 @@ async def process_file(
1008
1008
  if not all(required_params):
1009
1009
  raise click.ClickException(FeedbackManager.error_unknown_kafka_connection(datasource=name))
1010
1010
 
1011
- connector = await tb_client.connection_create_kafka(**connector_params)
1011
+ connector = tb_client.connection_create_kafka(**connector_params)
1012
1012
  except Exception as e:
1013
1013
  raise click.ClickException(
1014
1014
  FeedbackManager.error_connection_create(
@@ -1027,7 +1027,7 @@ async def process_file(
1027
1027
 
1028
1028
  return params
1029
1029
 
1030
- async def get_import_params(datasource: Dict[str, Any], node: Dict[str, Any]) -> Dict[str, Any]:
1030
+ def get_import_params(datasource: Dict[str, Any], node: Dict[str, Any]) -> Dict[str, Any]:
1031
1031
  params: Dict[str, Any] = {key: value for key, value in node.items() if key.startswith("import_")}
1032
1032
 
1033
1033
  if len(params) == 0 or skip_connectors:
@@ -1036,7 +1036,7 @@ async def process_file(
1036
1036
  service: Optional[str] = node.get("import_service", None)
1037
1037
 
1038
1038
  if service and service.lower() == "bigquery":
1039
- if not await tb_client.check_gcp_read_permissions():
1039
+ if not tb_client.check_gcp_read_permissions():
1040
1040
  raise click.ClickException(FeedbackManager.error_unknown_bq_connection(datasource=datasource["name"]))
1041
1041
 
1042
1042
  # Bigquery doesn't have a datalink, so we can stop here
@@ -1052,7 +1052,7 @@ async def process_file(
1052
1052
  if not connector_id:
1053
1053
  assert isinstance(connector_name, str)
1054
1054
 
1055
- connector: Optional[Dict[str, Any]] = await tb_client.get_connector(connector_name, service)
1055
+ connector: Optional[Dict[str, Any]] = tb_client.get_connector(connector_name, service)
1056
1056
 
1057
1057
  if not connector:
1058
1058
  raise Exception(
@@ -1102,13 +1102,13 @@ async def process_file(
1102
1102
  if x["default_value"] and x["default_value"].lower().startswith("materialized"):
1103
1103
  # turn expression to a select query to sql_get_used_tables can get the used tables
1104
1104
  q = "select " + x["default_value"][len("materialized") :]
1105
- tables = await tb_client.sql_get_used_tables(q)
1105
+ tables = tb_client.sql_get_used_tables(q)
1106
1106
  # materialized columns expressions could have joins so we need to add them as a dep
1107
1107
  deps += tables
1108
1108
  # generate replacements and replace the query
1109
1109
  replacements = {t: t for t in tables}
1110
1110
 
1111
- replaced_results = await tb_client.replace_tables(q, replacements)
1111
+ replaced_results = tb_client.replace_tables(q, replacements)
1112
1112
  x["default_value"] = replaced_results.replace("SELECT", "materialized", 1)
1113
1113
  if x.get("jsonpath", None):
1114
1114
  _format = "ndjson"
@@ -1146,7 +1146,7 @@ async def process_file(
1146
1146
  if "import_service" in node or "import_connection_name" in node:
1147
1147
  VALID_SERVICES: Tuple[str, ...] = ("bigquery", "snowflake", "s3", "s3_iamrole", "gcs", "dynamodb")
1148
1148
 
1149
- import_params = await get_import_params(params, node)
1149
+ import_params = get_import_params(params, node)
1150
1150
 
1151
1151
  service = import_params.get("import_service", None)
1152
1152
  if service and service not in VALID_SERVICES:
@@ -1172,7 +1172,7 @@ async def process_file(
1172
1172
  period: int = DEFAULT_CRON_PERIOD
1173
1173
 
1174
1174
  if current_ws is not None:
1175
- workspaces = (await tb_client.user_workspaces(version="v1")).get("workspaces", [])
1175
+ workspaces = (tb_client.user_workspaces(version="v1")).get("workspaces", [])
1176
1176
  workspace_rate_limits: Dict[str, Dict[str, int]] = next(
1177
1177
  (w.get("rate_limits", {}) for w in workspaces if w["id"] == current_ws["id"]), {}
1178
1178
  )
@@ -1224,7 +1224,7 @@ async def process_file(
1224
1224
  params[replacement] = value
1225
1225
 
1226
1226
  if "kafka_connection_name" in node:
1227
- kafka_params = await get_kafka_params(node)
1227
+ kafka_params = get_kafka_params(node)
1228
1228
  params.update(kafka_params)
1229
1229
  del params["format"]
1230
1230
 
@@ -1289,7 +1289,7 @@ async def process_file(
1289
1289
  sql_rendered = sql
1290
1290
 
1291
1291
  try:
1292
- dependencies = await tb_client.sql_get_used_tables(sql_rendered, raising=True, is_copy=is_copy)
1292
+ dependencies = tb_client.sql_get_used_tables(sql_rendered, raising=True, is_copy=is_copy)
1293
1293
  deps += [t for t in dependencies if t not in [n["name"] for n in doc.nodes]]
1294
1294
 
1295
1295
  except Exception as e:
@@ -1316,7 +1316,7 @@ async def process_file(
1316
1316
 
1317
1317
  replacements = {x: x for x in deps if x not in [n["name"] for n in doc.nodes]}
1318
1318
 
1319
- # FIXME: Ideally we should use await tb_client.replace_tables(sql, replacements)
1319
+ # FIXME: Ideally we should use tb_client.replace_tables(sql, replacements)
1320
1320
  for old, new in replacements.items():
1321
1321
  sql = re.sub("([\t \\n']+|^)" + old + "([\t \\n'\\)]+|$)", "\\1" + new + "\\2", sql)
1322
1322
 
@@ -1,8 +1,6 @@
1
- from asyncio import Semaphore, gather
2
1
  from pathlib import Path
3
2
  from typing import Any, Optional
4
3
 
5
- import aiofiles
6
4
  import click
7
5
 
8
6
  from tinybird.tb.client import AuthNoTokenException, TinyB
@@ -11,7 +9,7 @@ from tinybird.tb.modules.datafile.format_pipe import format_pipe
11
9
  from tinybird.tb.modules.feedback_manager import FeedbackManager
12
10
 
13
11
 
14
- async def folder_pull(
12
+ def folder_pull(
15
13
  client: TinyB,
16
14
  folder: str,
17
15
  force: bool,
@@ -36,24 +34,24 @@ async def folder_pull(
36
34
  return "pipes"
37
35
  return None
38
36
 
39
- async def write_files(
37
+ def write_files(
40
38
  resources: list[dict[str, Any]],
41
39
  extension: str,
42
40
  get_resource_function: str,
43
41
  progress_bar: bool = False,
44
42
  fmt: bool = False,
45
43
  ):
46
- async def write_resource(k: dict[str, Any]):
44
+ def write_resource(k: dict[str, Any]):
47
45
  name = f"{k['name']}.{extension}"
48
46
  try:
49
- resource = await getattr(client, get_resource_function)(k["name"])
47
+ resource = getattr(client, get_resource_function)(k["name"])
50
48
  resource_to_write = resource
51
49
 
52
50
  if fmt:
53
51
  if extension == "datasource":
54
- resource_to_write = await format_datasource(name, content=resource)
52
+ resource_to_write = format_datasource(name, content=resource)
55
53
  elif extension == "pipe":
56
- resource_to_write = await format_pipe(name, content=resource)
54
+ resource_to_write = format_pipe(name, content=resource)
57
55
 
58
56
  dest_folder = folder
59
57
  if "." in k["name"]:
@@ -71,9 +69,9 @@ async def folder_pull(
71
69
  if verbose:
72
70
  click.echo(FeedbackManager.info_writing_resource(resource=f))
73
71
  if not f.exists() or force:
74
- async with aiofiles.open(f, "w") as fd:
72
+ with open(f, "w") as fd:
75
73
  if resource_to_write:
76
- await fd.write(resource_to_write)
74
+ fd.write(resource_to_write)
77
75
  else:
78
76
  if verbose:
79
77
  click.echo(FeedbackManager.info_skip_already_exists())
@@ -83,31 +81,31 @@ async def folder_pull(
83
81
  if progress_bar:
84
82
  with click.progressbar(resources, label=f"Pulling {extension}s") as resources: # type: ignore
85
83
  for k in resources:
86
- await write_resource(k)
84
+ write_resource(k)
87
85
  else:
88
86
  tasks = [write_resource(k) for k in resources]
89
- await _gather_with_concurrency(5, *tasks)
87
+ _gather_with_concurrency(5, *tasks)
90
88
 
91
89
  try:
92
- datasources = await client.datasources()
93
- pipes = await client.pipes()
94
- connections = await client.connections(skip_bigquery=True)
90
+ datasources = client.datasources()
91
+ pipes = client.pipes()
92
+ connections = client.connections(skip_bigquery=True)
95
93
 
96
- await write_files(
94
+ write_files(
97
95
  resources=datasources,
98
96
  extension="datasource",
99
97
  get_resource_function="datasource_file",
100
98
  progress_bar=progress_bar,
101
99
  fmt=fmt,
102
100
  )
103
- await write_files(
101
+ write_files(
104
102
  resources=pipes,
105
103
  extension="pipe",
106
104
  get_resource_function="pipe_file",
107
105
  progress_bar=progress_bar,
108
106
  fmt=fmt,
109
107
  )
110
- await write_files(
108
+ write_files(
111
109
  resources=connections,
112
110
  extension="connection",
113
111
  get_resource_function="connection_file",
@@ -122,11 +120,11 @@ async def folder_pull(
122
120
  raise click.ClickException(FeedbackManager.error_pull(error=str(e)))
123
121
 
124
122
 
125
- async def _gather_with_concurrency(n, *tasks):
126
- semaphore = Semaphore(n)
127
-
128
- async def sem_task(task):
129
- async with semaphore:
130
- return await task
131
-
132
- return await gather(*(sem_task(task) for task in tasks))
123
+ def _gather_with_concurrency(n, *tasks):
124
+ results = []
125
+ for task in tasks:
126
+ if callable(task):
127
+ results.append(task())
128
+ else:
129
+ results.append(task)
130
+ return results