tinybird 0.0.1.dev43__py3-none-any.whl → 0.0.1.dev44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

@@ -15,14 +15,12 @@ from tinybird.client import TinyB
15
15
  from tinybird.sql import parse_table_structure, schema_to_sql_columns
16
16
  from tinybird.sql_template import get_used_tables_in_template, render_sql_template
17
17
  from tinybird.tb.modules.common import get_ca_pem_content
18
- from tinybird.tb.modules.datafile.build_common import update_tags_in_resource
19
- from tinybird.tb.modules.datafile.build_datasource import is_datasource, new_ds
18
+ from tinybird.tb.modules.datafile.build_datasource import is_datasource
20
19
  from tinybird.tb.modules.datafile.build_pipe import (
21
20
  get_target_materialized_data_source_name,
22
21
  is_endpoint,
23
22
  is_endpoint_with_no_dependencies,
24
23
  is_materialized,
25
- new_pipe,
26
24
  )
27
25
  from tinybird.tb.modules.datafile.common import (
28
26
  DEFAULT_CRON_PERIOD,
@@ -44,95 +42,32 @@ from tinybird.tb.modules.datafile.exceptions import AlreadyExistsException, Incl
44
42
  from tinybird.tb.modules.datafile.parse_datasource import parse_datasource
45
43
  from tinybird.tb.modules.datafile.parse_pipe import parse_pipe
46
44
  from tinybird.tb.modules.feedback_manager import FeedbackManager
47
- from tinybird.tb.modules.local_common import get_tinybird_local_config
45
+ from tinybird.tb.modules.local_common import get_tinybird_local_client
46
+ from tinybird.tb.modules.project import Project
48
47
 
49
48
 
50
49
  async def folder_build(
51
- tb_client: TinyB,
50
+ project: Project,
52
51
  filenames: Optional[List[str]] = None,
53
- folder: str = ".",
54
- ignore_sql_errors: bool = False,
55
52
  is_internal: bool = False,
56
53
  is_vendor: bool = False,
57
54
  current_ws: Optional[Dict[str, Any]] = None,
58
55
  local_ws: Optional[Dict[str, Any]] = None,
59
56
  watch: bool = False,
60
57
  ):
61
- config = await get_tinybird_local_config(folder)
62
58
  build = True
63
59
  dry_run = False
64
60
  force = True
65
61
  only_changes = True
66
62
  debug = False
67
- check = True
68
- populate = False
69
- populate_subset = None
70
- populate_condition = None
71
- tests_to_run = 0
72
- override_datasource = False
73
- skip_confirmation = True
74
- wait = False
75
- unlink_on_populate_error = False
76
- only_response_times = False
77
63
  run_tests = False
78
64
  verbose = False
79
- as_standard = False
80
65
  raise_on_exists = False
81
66
  fork_downstream = True
82
67
  fork = False
83
68
  release_created = False
84
- tests_relative_change = 0.01
85
- tests_sample_by_params = 0
86
- tests_filter_by = None
87
- tests_failfast = False
88
- tests_ignore_order = False
89
- tests_validate_processed_bytes = False
90
- tests_check_requests_from_branch = False
91
- vendor_paths = []
92
-
93
- vendor_path = Path("vendor")
94
- user_token = config.get_user_token()
95
- user_client = deepcopy(tb_client)
96
-
97
- if user_token:
98
- user_client.token = user_token
99
-
100
- vendor_workspaces = []
101
-
102
- if vendor_path.exists() and not is_vendor and not watch:
103
- user_workspaces = await user_client.user_workspaces()
104
- for x in vendor_path.iterdir():
105
- if x.is_dir() and x.name:
106
- if user_token:
107
- try:
108
- ws_to_delete = next((ws for ws in user_workspaces["workspaces"] if ws["name"] == x.name), None)
109
- if ws_to_delete:
110
- await user_client.delete_workspace(ws_to_delete["id"], hard_delete_confirmation=x.name)
111
- except Exception:
112
- pass
113
- vendor_ws = await user_client.create_workspace(x.name, template=None)
114
- vendor_workspaces.append(vendor_ws)
115
- vendor_paths.append((x.name, str(x)))
116
-
117
- workspaces: List[Dict[str, Any]] = (await user_client.user_workspaces()).get("workspaces", [])
118
-
119
- if not is_vendor:
120
- local_workspace = await tb_client.workspace_info()
121
- local_ws_id = local_workspace.get("id")
122
- local_ws = next((ws for ws in workspaces if ws["id"] == local_ws_id), {})
123
-
124
- current_ws = current_ws or local_ws
125
-
126
- for vendor_ws in [ws for ws in workspaces if ws["name"] in [ws["name"] for ws in vendor_workspaces]]:
127
- ws_client = deepcopy(tb_client)
128
- ws_client.token = vendor_ws["token"]
129
- shared_ws_path = Path(folder) / "vendor" / vendor_ws["name"]
130
-
131
- if shared_ws_path.exists() and not is_vendor and not watch:
132
- await folder_build(
133
- ws_client, folder=shared_ws_path.as_posix(), is_vendor=True, current_ws=vendor_ws, local_ws=local_ws
134
- )
135
-
69
+ folder = str(project.path)
70
+ tb_client = await get_tinybird_local_client(folder)
136
71
  datasources: List[Dict[str, Any]] = await tb_client.datasources()
137
72
  pipes: List[Dict[str, Any]] = await tb_client.pipes(dependencies=True)
138
73
 
@@ -150,7 +85,7 @@ async def folder_build(
150
85
  dir_path=folder,
151
86
  process_dependencies=True,
152
87
  skip_connectors=True,
153
- vendor_paths=vendor_paths,
88
+ vendor_paths=[],
154
89
  current_ws=current_ws,
155
90
  only_changes=only_changes,
156
91
  fork_downstream=fork_downstream,
@@ -191,63 +126,14 @@ async def folder_build(
191
126
  resource = to_run[name]["resource"]
192
127
  if not dry_run:
193
128
  if should_push_file(name, remote_resource_names, force, run_tests):
194
- click.echo(FeedbackManager.info_processing_new_resource(name=name, version=""))
195
- try:
196
- await exec_file(
197
- to_run[name],
198
- tb_client,
199
- force,
200
- check,
201
- debug and verbose,
202
- populate,
203
- populate_subset,
204
- populate_condition,
205
- unlink_on_populate_error,
206
- wait,
207
- user_token,
208
- override_datasource,
209
- ignore_sql_errors,
210
- skip_confirmation,
211
- only_response_times,
212
- run_tests,
213
- as_standard,
214
- tests_to_run,
215
- tests_relative_change,
216
- tests_sample_by_params,
217
- tests_filter_by,
218
- tests_failfast,
219
- tests_ignore_order,
220
- tests_validate_processed_bytes,
221
- tests_check_requests_from_branch,
222
- current_ws,
223
- local_ws,
224
- fork_downstream,
225
- fork,
226
- build,
227
- is_vendor,
228
- )
229
- if not run_tests:
230
- click.echo(
231
- FeedbackManager.success_create(
232
- name=(
233
- name
234
- if to_run[name]["version"] is None
235
- else f'{name}__v{to_run[name]["version"]}'
236
- )
237
- )
238
- )
239
- except Exception as e:
240
- filename = to_run[name]["filename"]
241
- exception = FeedbackManager.error_push_file_exception(
242
- filename=filename,
243
- error=e,
244
- )
245
- raise click.ClickException(exception)
129
+ filename = to_run[name]["filename"]
130
+ filename = filename.replace(f"{folder}/", "")
131
+ click.echo(FeedbackManager.info(message=f"✓ {filename}"))
246
132
  else:
247
133
  if raise_on_exists:
248
134
  raise AlreadyExistsException(
249
135
  FeedbackManager.warning_name_already_exists(
250
- name=name if to_run[name]["version"] is None else f'{name}__v{to_run[name]["version"]}'
136
+ name=name if to_run[name]["version"] is None else f"{name}__v{to_run[name]['version']}"
251
137
  )
252
138
  )
253
139
  else:
@@ -262,7 +148,7 @@ async def folder_build(
262
148
  name=(
263
149
  name
264
150
  if to_run[name]["version"] is None
265
- else f'{name}__v{to_run[name]["version"]}'
151
+ else f"{name}__v{to_run[name]['version']}"
266
152
  )
267
153
  )
268
154
  )
@@ -304,19 +190,6 @@ async def folder_build(
304
190
  # We need to deploy the datasources from left to right as some datasources might have MV that depend on the column types of previous datasources. Ex: `test_change_column_type_landing_datasource` test
305
191
  groups = [group for group in toposort(dependencies_graph_fork_downstream)]
306
192
 
307
- for group in groups:
308
- for name in group:
309
- is_vendor = resources_to_run_fork_downstream.get(name, {}).get("filename", "").startswith("vendor/")
310
- if not is_vendor:
311
- try:
312
- await tb_client.datasource_delete(name, force=True)
313
- except Exception:
314
- pass
315
- try:
316
- await tb_client.pipe_delete(name)
317
- except Exception:
318
- pass
319
-
320
193
  groups.reverse()
321
194
  for group in groups:
322
195
  for name in group:
@@ -433,88 +306,6 @@ async def name_matches_existing_resource(resource: str, name: str, tb_client: Ti
433
306
  return False
434
307
 
435
308
 
436
- async def exec_file(
437
- r: Dict[str, Any],
438
- tb_client: TinyB,
439
- force: bool,
440
- check: bool,
441
- debug: bool,
442
- populate: bool,
443
- populate_subset,
444
- populate_condition,
445
- unlink_on_populate_error,
446
- wait_populate,
447
- user_token: Optional[str],
448
- override_datasource: bool = False,
449
- ignore_sql_errors: bool = False,
450
- skip_confirmation: bool = False,
451
- only_response_times: bool = False,
452
- run_tests=False,
453
- as_standard=False,
454
- tests_to_run: int = 0,
455
- tests_relative_change: float = 0.01,
456
- tests_to_sample_by_params: int = 0,
457
- tests_filter_by: Optional[List[str]] = None,
458
- tests_failfast: bool = False,
459
- tests_ignore_order: bool = False,
460
- tests_validate_processed_bytes: bool = False,
461
- tests_check_requests_from_branch: bool = False,
462
- current_ws: Optional[Dict[str, Any]] = None,
463
- local_ws: Optional[Dict[str, Any]] = None,
464
- fork_downstream: Optional[bool] = False,
465
- fork: Optional[bool] = False,
466
- build: Optional[bool] = False,
467
- is_vendor: Optional[bool] = False,
468
- ):
469
- if debug:
470
- click.echo(FeedbackManager.debug_running_file(file=pp.pformat(r)))
471
- if r["resource"] == "pipes":
472
- await new_pipe(
473
- r,
474
- tb_client,
475
- force,
476
- check,
477
- populate,
478
- populate_subset,
479
- populate_condition,
480
- unlink_on_populate_error,
481
- wait_populate,
482
- ignore_sql_errors=ignore_sql_errors,
483
- only_response_times=only_response_times,
484
- run_tests=run_tests,
485
- as_standard=as_standard,
486
- tests_to_run=tests_to_run,
487
- tests_relative_change=tests_relative_change,
488
- tests_to_sample_by_params=tests_to_sample_by_params,
489
- tests_filter_by=tests_filter_by,
490
- tests_failfast=tests_failfast,
491
- tests_ignore_order=tests_ignore_order,
492
- tests_validate_processed_bytes=tests_validate_processed_bytes,
493
- override_datasource=override_datasource,
494
- tests_check_requests_from_branch=tests_check_requests_from_branch,
495
- fork_downstream=fork_downstream,
496
- fork=fork,
497
- )
498
- await update_tags_in_resource(r, "pipe", tb_client)
499
- elif r["resource"] == "datasources":
500
- await new_ds(
501
- r,
502
- tb_client,
503
- user_token,
504
- force,
505
- skip_confirmation=skip_confirmation,
506
- current_ws=current_ws,
507
- local_ws=local_ws,
508
- fork_downstream=fork_downstream,
509
- fork=fork,
510
- build=build,
511
- is_vendor=is_vendor,
512
- )
513
- await update_tags_in_resource(r, "datasource", tb_client)
514
- else:
515
- raise click.ClickException(FeedbackManager.error_unknown_resource(resource=r["resource"]))
516
-
517
-
518
309
  def get_remote_resource_name_without_version(remote_resource_name: str) -> str:
519
310
  """
520
311
  >>> get_remote_resource_name_without_version("r__datasource")
@@ -30,7 +30,7 @@ from tinybird.tb.modules.feedback_manager import FeedbackManager
30
30
  @cli.group()
31
31
  @click.pass_context
32
32
  def datasource(ctx):
33
- """Data Sources commands"""
33
+ """Data Source commands"""
34
34
 
35
35
 
36
36
  @datasource.command(name="ls")
@@ -191,7 +191,7 @@ def deployment_rollback(ctx: click.Context) -> None:
191
191
  rollback_deployment(client.host, HEADERS)
192
192
 
193
193
 
194
- @cli.command(name="deploy")
194
+ @cli.command(name="deploy", hidden=True)
195
195
  @click.option(
196
196
  "--wait/--no-wait",
197
197
  is_flag=True,
@@ -5,7 +5,7 @@
5
5
 
6
6
  import json
7
7
  import re
8
- from typing import Any, Dict, List, Optional
8
+ from typing import Any, Dict, List, Optional, Tuple
9
9
  from urllib.parse import urlencode
10
10
 
11
11
  import click
@@ -21,7 +21,7 @@ from tinybird.tb.modules.exceptions import CLIPipeException
21
21
  from tinybird.tb.modules.feedback_manager import FeedbackManager
22
22
 
23
23
 
24
- @cli.group(hidden=True)
24
+ @cli.group()
25
25
  @click.pass_context
26
26
  def endpoint(ctx):
27
27
  """Endpoint commands"""
@@ -185,3 +185,90 @@ def get_endpoint_token(tokens: List[Dict[str, Any]], pipe_name: str) -> Optional
185
185
  break
186
186
 
187
187
  return token
188
+
189
+
190
+ @endpoint.command(name="stats")
191
+ @click.argument("pipes", nargs=-1)
192
+ @click.option(
193
+ "--format",
194
+ "format_",
195
+ type=click.Choice(["json"], case_sensitive=False),
196
+ default=None,
197
+ help="Force a type of the output. To parse the output, keep in mind to use `tb --no-version-warning endpoint stats` option.",
198
+ )
199
+ @click.pass_context
200
+ @coro
201
+ async def endpoint_stats(ctx: click.Context, pipes: Tuple[str, ...], format_: str):
202
+ """
203
+ Print endpoint stats for the last 7 days
204
+ """
205
+ client: TinyB = ctx.ensure_object(dict)["client"]
206
+ all_pipes = await client.pipes()
207
+ pipes_to_get_stats = []
208
+ pipes_ids: Dict = {}
209
+
210
+ if pipes:
211
+ # We filter by the pipes we want to look for
212
+ all_pipes = [pipe for pipe in all_pipes if pipe["name"] in pipes]
213
+
214
+ for pipe in all_pipes:
215
+ name_version = get_name_version(pipe["name"])
216
+ if name_version["name"] in pipe["name"]:
217
+ pipes_to_get_stats.append(f"'{pipe['id']}'")
218
+ pipes_ids[pipe["id"]] = name_version
219
+
220
+ if not pipes_to_get_stats:
221
+ if format_ == "json":
222
+ click.echo(json.dumps({"pipes": []}, indent=2))
223
+ else:
224
+ click.echo(FeedbackManager.info_no_pipes_stats())
225
+ return
226
+
227
+ sql = f"""
228
+ SELECT
229
+ pipe_id id,
230
+ sumIf(view_count, date > now() - interval 7 day) requests,
231
+ sumIf(error_count, date > now() - interval 7 day) errors,
232
+ avgMergeIf(avg_duration_state, date > now() - interval 7 day) latency
233
+ FROM tinybird.pipe_stats
234
+ WHERE pipe_id in ({",".join(pipes_to_get_stats)})
235
+ GROUP BY pipe_id
236
+ ORDER BY requests DESC
237
+ FORMAT JSON
238
+ """
239
+
240
+ res = await client.query(sql)
241
+
242
+ if res and "error" in res:
243
+ raise CLIPipeException(FeedbackManager.error_exception(error=str(res["error"])))
244
+
245
+ columns = ["name", "request count", "error count", "avg latency"]
246
+ table_human_readable: List[Tuple] = []
247
+ table_machine_readable: List[Dict] = []
248
+ if res and "data" in res:
249
+ for x in res["data"]:
250
+ tk = pipes_ids[x["id"]]
251
+ table_human_readable.append(
252
+ (
253
+ tk["name"],
254
+ x["requests"],
255
+ x["errors"],
256
+ x["latency"],
257
+ )
258
+ )
259
+ table_machine_readable.append(
260
+ {
261
+ "name": tk["name"],
262
+ "requests": x["requests"],
263
+ "errors": x["errors"],
264
+ "latency": x["latency"],
265
+ }
266
+ )
267
+
268
+ table_human_readable.sort(key=lambda x: (x[1], x[0]))
269
+ table_machine_readable.sort(key=lambda x: x["name"])
270
+
271
+ if format_ == "json":
272
+ click.echo(json.dumps({"pipes": table_machine_readable}, indent=2))
273
+ else:
274
+ echo_safe_humanfriendly_tables_format_smart_table(table_human_readable, column_names=columns)
@@ -0,0 +1,146 @@
1
+ import json
2
+ import re
3
+
4
+ import click
5
+
6
+ from tinybird.client import TinyB
7
+ from tinybird.tb.modules.cli import cli
8
+ from tinybird.tb.modules.common import (
9
+ coro,
10
+ create_tb_client,
11
+ echo_safe_humanfriendly_tables_format_smart_table,
12
+ wait_job,
13
+ )
14
+ from tinybird.tb.modules.datafile.common import PipeTypes, get_name_version
15
+ from tinybird.tb.modules.exceptions import CLIPipeException
16
+ from tinybird.tb.modules.feedback_manager import FeedbackManager
17
+
18
+
19
+ @cli.group()
20
+ @click.pass_context
21
+ def materialization(ctx):
22
+ """Materialization commands"""
23
+
24
+
25
+ @materialization.command(name="ls")
26
+ @click.option("--match", default=None, help="Retrieve any resourcing matching the pattern. eg --match _test")
27
+ @click.option(
28
+ "--format",
29
+ "format_",
30
+ type=click.Choice(["json"], case_sensitive=False),
31
+ default=None,
32
+ help="Force a type of the output",
33
+ )
34
+ @click.pass_context
35
+ @coro
36
+ async def materialization_ls(ctx: click.Context, match: str, format_: str):
37
+ """List materializations"""
38
+
39
+ client: TinyB = ctx.ensure_object(dict)["client"]
40
+ pipes = await client.pipes(dependencies=True, node_attrs="name,materialized", attrs="name,updated_at,endpoint,type")
41
+ materializations = [p for p in pipes if p.get("type") == PipeTypes.MATERIALIZED]
42
+ materializations = sorted(materializations, key=lambda p: p["updated_at"])
43
+ datasources = await client.datasources()
44
+ columns = ["name", "updated at", "nodes", "target datasource"]
45
+ table_human_readable = []
46
+ table_machine_readable = []
47
+ pattern = re.compile(match) if match else None
48
+ for t in materializations:
49
+ tk = get_name_version(t["name"])
50
+ if pattern and not pattern.search(tk["name"]):
51
+ continue
52
+ target_datasource_id = next((n["materialized"] for n in t["nodes"] if n.get("materialized")), None)
53
+ target_datasource = next((d for d in datasources if d["id"] == target_datasource_id), None)
54
+ target_datasource_name = target_datasource.get("name", "") if target_datasource else ""
55
+ table_human_readable.append((tk["name"], t["updated_at"][:-7], len(t["nodes"]), target_datasource_name))
56
+ table_machine_readable.append(
57
+ {
58
+ "name": tk["name"],
59
+ "updated at": t["updated_at"][:-7],
60
+ "nodes": len(t["nodes"]),
61
+ "target datasource": target_datasource_name,
62
+ }
63
+ )
64
+
65
+ if not format_:
66
+ click.echo(FeedbackManager.info_pipes())
67
+ echo_safe_humanfriendly_tables_format_smart_table(table_human_readable, column_names=columns)
68
+ click.echo("\n")
69
+ elif format_ == "json":
70
+ click.echo(json.dumps({"pipes": table_machine_readable}, indent=2))
71
+ else:
72
+ raise CLIPipeException(FeedbackManager.error_pipe_ls_type())
73
+
74
+
75
+ @materialization.command(name="populate")
76
+ @click.argument("pipe_name")
77
+ @click.option("--node", type=str, help="Name of the materialized node.", default=None, required=False)
78
+ @click.option(
79
+ "--sql-condition",
80
+ type=str,
81
+ default=None,
82
+ help="Populate with a SQL condition to be applied to the trigger Data Source of the Materialized View. For instance, `--sql-condition='date == toYYYYMM(now())'` it'll populate taking all the rows from the trigger Data Source which `date` is the current month. Use it together with --populate. --sql-condition is not taken into account if the --subset param is present. Including in the ``sql_condition`` any column present in the Data Source ``engine_sorting_key`` will make the populate job process less data.",
83
+ )
84
+ @click.option(
85
+ "--truncate", is_flag=True, default=False, help="Truncates the materialized Data Source before populating it."
86
+ )
87
+ @click.option(
88
+ "--unlink-on-populate-error",
89
+ is_flag=True,
90
+ default=False,
91
+ help="If the populate job fails the Materialized View is unlinked and new data won't be ingested in the Materialized View. First time a populate job fails, the Materialized View is always unlinked.",
92
+ )
93
+ @click.option(
94
+ "--wait",
95
+ is_flag=True,
96
+ default=False,
97
+ help="Waits for populate jobs to finish, showing a progress bar. Disabled by default.",
98
+ )
99
+ @click.pass_context
100
+ @coro
101
+ async def pipe_populate(
102
+ ctx: click.Context,
103
+ pipe_name: str,
104
+ node: str,
105
+ sql_condition: str,
106
+ truncate: bool,
107
+ unlink_on_populate_error: bool,
108
+ wait: bool,
109
+ ):
110
+ """Populate the result of a Materialized Node into the target Materialized View"""
111
+ cl = create_tb_client(ctx)
112
+
113
+ pipe = await cl.pipe(pipe_name)
114
+
115
+ if pipe["type"] != PipeTypes.MATERIALIZED:
116
+ raise CLIPipeException(FeedbackManager.error_pipe_not_materialized(pipe=pipe_name))
117
+
118
+ if not node:
119
+ materialized_ids = [pipe_node["id"] for pipe_node in pipe["nodes"] if pipe_node.get("materialized") is not None]
120
+
121
+ if not materialized_ids:
122
+ raise CLIPipeException(FeedbackManager.error_populate_no_materialized_in_pipe(pipe=pipe_name))
123
+
124
+ elif len(materialized_ids) > 1:
125
+ raise CLIPipeException(FeedbackManager.error_populate_several_materialized_in_pipe(pipe=pipe_name))
126
+
127
+ node = materialized_ids[0]
128
+
129
+ response = await cl.populate_node(
130
+ pipe_name,
131
+ node,
132
+ populate_condition=sql_condition,
133
+ truncate=truncate,
134
+ unlink_on_populate_error=unlink_on_populate_error,
135
+ )
136
+ if "job" not in response:
137
+ raise CLIPipeException(response)
138
+
139
+ job_id = response["job"]["id"]
140
+ job_url = response["job"]["job_url"]
141
+ if sql_condition:
142
+ click.echo(FeedbackManager.info_populate_condition_job_url(url=job_url, populate_condition=sql_condition))
143
+ else:
144
+ click.echo(FeedbackManager.info_populate_job_url(url=job_url))
145
+ if wait:
146
+ await wait_job(cl, job_id, job_url, "Populating")