tinybird 0.0.1.dev42__py3-none-any.whl → 0.0.1.dev44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tinybird might be problematic. Click here for more details.
- tinybird/client.py +1 -1
- tinybird/connectors.py +3 -3
- tinybird/feedback_manager.py +1 -1
- tinybird/prompts.py +31 -3
- tinybird/sql.py +1 -1
- tinybird/sql_template_fmt.py +1 -1
- tinybird/tb/__cli__.py +2 -2
- tinybird/tb/cli.py +1 -1
- tinybird/tb/modules/build.py +39 -21
- tinybird/tb/modules/cicd.py +2 -2
- tinybird/tb/modules/cli.py +8 -61
- tinybird/tb/modules/common.py +2 -1
- tinybird/tb/modules/copy.py +96 -5
- tinybird/tb/modules/create.py +105 -46
- tinybird/tb/modules/datafile/build.py +64 -247
- tinybird/tb/modules/datasource.py +1 -1
- tinybird/tb/modules/deployment.py +86 -61
- tinybird/tb/modules/endpoint.py +90 -3
- tinybird/tb/modules/llm_utils.py +2 -2
- tinybird/tb/modules/materialization.py +146 -0
- tinybird/tb/modules/mock.py +56 -16
- tinybird/tb/modules/pipe.py +2 -411
- tinybird/tb/modules/project.py +31 -1
- tinybird/tb/modules/test.py +72 -37
- tinybird/tb/modules/update.py +1 -1
- tinybird/tb/modules/watch.py +54 -5
- tinybird/tb_cli_modules/common.py +1 -1
- tinybird/tornado_template.py +2 -2
- {tinybird-0.0.1.dev42.dist-info → tinybird-0.0.1.dev44.dist-info}/METADATA +1 -1
- {tinybird-0.0.1.dev42.dist-info → tinybird-0.0.1.dev44.dist-info}/RECORD +33 -33
- tinybird/tb/modules/build_client.py +0 -199
- {tinybird-0.0.1.dev42.dist-info → tinybird-0.0.1.dev44.dist-info}/WHEEL +0 -0
- {tinybird-0.0.1.dev42.dist-info → tinybird-0.0.1.dev44.dist-info}/entry_points.txt +0 -0
- {tinybird-0.0.1.dev42.dist-info → tinybird-0.0.1.dev44.dist-info}/top_level.txt +0 -0
|
@@ -15,14 +15,12 @@ from tinybird.client import TinyB
|
|
|
15
15
|
from tinybird.sql import parse_table_structure, schema_to_sql_columns
|
|
16
16
|
from tinybird.sql_template import get_used_tables_in_template, render_sql_template
|
|
17
17
|
from tinybird.tb.modules.common import get_ca_pem_content
|
|
18
|
-
from tinybird.tb.modules.datafile.
|
|
19
|
-
from tinybird.tb.modules.datafile.build_datasource import is_datasource, new_ds
|
|
18
|
+
from tinybird.tb.modules.datafile.build_datasource import is_datasource
|
|
20
19
|
from tinybird.tb.modules.datafile.build_pipe import (
|
|
21
20
|
get_target_materialized_data_source_name,
|
|
22
21
|
is_endpoint,
|
|
23
22
|
is_endpoint_with_no_dependencies,
|
|
24
23
|
is_materialized,
|
|
25
|
-
new_pipe,
|
|
26
24
|
)
|
|
27
25
|
from tinybird.tb.modules.datafile.common import (
|
|
28
26
|
DEFAULT_CRON_PERIOD,
|
|
@@ -44,95 +42,32 @@ from tinybird.tb.modules.datafile.exceptions import AlreadyExistsException, Incl
|
|
|
44
42
|
from tinybird.tb.modules.datafile.parse_datasource import parse_datasource
|
|
45
43
|
from tinybird.tb.modules.datafile.parse_pipe import parse_pipe
|
|
46
44
|
from tinybird.tb.modules.feedback_manager import FeedbackManager
|
|
47
|
-
from tinybird.tb.modules.local_common import
|
|
45
|
+
from tinybird.tb.modules.local_common import get_tinybird_local_client
|
|
46
|
+
from tinybird.tb.modules.project import Project
|
|
48
47
|
|
|
49
48
|
|
|
50
49
|
async def folder_build(
|
|
51
|
-
|
|
50
|
+
project: Project,
|
|
52
51
|
filenames: Optional[List[str]] = None,
|
|
53
|
-
folder: str = ".",
|
|
54
|
-
ignore_sql_errors: bool = False,
|
|
55
52
|
is_internal: bool = False,
|
|
56
53
|
is_vendor: bool = False,
|
|
57
54
|
current_ws: Optional[Dict[str, Any]] = None,
|
|
58
55
|
local_ws: Optional[Dict[str, Any]] = None,
|
|
59
56
|
watch: bool = False,
|
|
60
57
|
):
|
|
61
|
-
config = await get_tinybird_local_config(folder)
|
|
62
58
|
build = True
|
|
63
59
|
dry_run = False
|
|
64
60
|
force = True
|
|
65
61
|
only_changes = True
|
|
66
62
|
debug = False
|
|
67
|
-
check = True
|
|
68
|
-
populate = False
|
|
69
|
-
populate_subset = None
|
|
70
|
-
populate_condition = None
|
|
71
|
-
tests_to_run = 0
|
|
72
|
-
override_datasource = False
|
|
73
|
-
skip_confirmation = True
|
|
74
|
-
wait = False
|
|
75
|
-
unlink_on_populate_error = False
|
|
76
|
-
only_response_times = False
|
|
77
63
|
run_tests = False
|
|
78
64
|
verbose = False
|
|
79
|
-
as_standard = False
|
|
80
65
|
raise_on_exists = False
|
|
81
66
|
fork_downstream = True
|
|
82
67
|
fork = False
|
|
83
68
|
release_created = False
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
tests_filter_by = None
|
|
87
|
-
tests_failfast = False
|
|
88
|
-
tests_ignore_order = False
|
|
89
|
-
tests_validate_processed_bytes = False
|
|
90
|
-
tests_check_requests_from_branch = False
|
|
91
|
-
vendor_paths = []
|
|
92
|
-
|
|
93
|
-
vendor_path = Path("vendor")
|
|
94
|
-
user_token = config.get_user_token()
|
|
95
|
-
user_client = deepcopy(tb_client)
|
|
96
|
-
|
|
97
|
-
if user_token:
|
|
98
|
-
user_client.token = user_token
|
|
99
|
-
|
|
100
|
-
vendor_workspaces = []
|
|
101
|
-
|
|
102
|
-
if vendor_path.exists() and not is_vendor and not watch:
|
|
103
|
-
user_workspaces = await user_client.user_workspaces()
|
|
104
|
-
for x in vendor_path.iterdir():
|
|
105
|
-
if x.is_dir() and x.name:
|
|
106
|
-
if user_token:
|
|
107
|
-
try:
|
|
108
|
-
ws_to_delete = next((ws for ws in user_workspaces["workspaces"] if ws["name"] == x.name), None)
|
|
109
|
-
if ws_to_delete:
|
|
110
|
-
await user_client.delete_workspace(ws_to_delete["id"], hard_delete_confirmation=x.name)
|
|
111
|
-
except Exception:
|
|
112
|
-
pass
|
|
113
|
-
vendor_ws = await user_client.create_workspace(x.name, template=None)
|
|
114
|
-
vendor_workspaces.append(vendor_ws)
|
|
115
|
-
vendor_paths.append((x.name, str(x)))
|
|
116
|
-
|
|
117
|
-
workspaces: List[Dict[str, Any]] = (await user_client.user_workspaces()).get("workspaces", [])
|
|
118
|
-
|
|
119
|
-
if not is_vendor:
|
|
120
|
-
local_workspace = await tb_client.workspace_info()
|
|
121
|
-
local_ws_id = local_workspace.get("id")
|
|
122
|
-
local_ws = next((ws for ws in workspaces if ws["id"] == local_ws_id), {})
|
|
123
|
-
|
|
124
|
-
current_ws = current_ws or local_ws
|
|
125
|
-
|
|
126
|
-
for vendor_ws in [ws for ws in workspaces if ws["name"] in [ws["name"] for ws in vendor_workspaces]]:
|
|
127
|
-
ws_client = deepcopy(tb_client)
|
|
128
|
-
ws_client.token = vendor_ws["token"]
|
|
129
|
-
shared_ws_path = Path(folder) / "vendor" / vendor_ws["name"]
|
|
130
|
-
|
|
131
|
-
if shared_ws_path.exists() and not is_vendor and not watch:
|
|
132
|
-
await folder_build(
|
|
133
|
-
ws_client, folder=shared_ws_path.as_posix(), is_vendor=True, current_ws=vendor_ws, local_ws=local_ws
|
|
134
|
-
)
|
|
135
|
-
|
|
69
|
+
folder = str(project.path)
|
|
70
|
+
tb_client = await get_tinybird_local_client(folder)
|
|
136
71
|
datasources: List[Dict[str, Any]] = await tb_client.datasources()
|
|
137
72
|
pipes: List[Dict[str, Any]] = await tb_client.pipes(dependencies=True)
|
|
138
73
|
|
|
@@ -150,7 +85,7 @@ async def folder_build(
|
|
|
150
85
|
dir_path=folder,
|
|
151
86
|
process_dependencies=True,
|
|
152
87
|
skip_connectors=True,
|
|
153
|
-
vendor_paths=
|
|
88
|
+
vendor_paths=[],
|
|
154
89
|
current_ws=current_ws,
|
|
155
90
|
only_changes=only_changes,
|
|
156
91
|
fork_downstream=fork_downstream,
|
|
@@ -191,63 +126,14 @@ async def folder_build(
|
|
|
191
126
|
resource = to_run[name]["resource"]
|
|
192
127
|
if not dry_run:
|
|
193
128
|
if should_push_file(name, remote_resource_names, force, run_tests):
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
to_run[name],
|
|
198
|
-
tb_client,
|
|
199
|
-
force,
|
|
200
|
-
check,
|
|
201
|
-
debug and verbose,
|
|
202
|
-
populate,
|
|
203
|
-
populate_subset,
|
|
204
|
-
populate_condition,
|
|
205
|
-
unlink_on_populate_error,
|
|
206
|
-
wait,
|
|
207
|
-
user_token,
|
|
208
|
-
override_datasource,
|
|
209
|
-
ignore_sql_errors,
|
|
210
|
-
skip_confirmation,
|
|
211
|
-
only_response_times,
|
|
212
|
-
run_tests,
|
|
213
|
-
as_standard,
|
|
214
|
-
tests_to_run,
|
|
215
|
-
tests_relative_change,
|
|
216
|
-
tests_sample_by_params,
|
|
217
|
-
tests_filter_by,
|
|
218
|
-
tests_failfast,
|
|
219
|
-
tests_ignore_order,
|
|
220
|
-
tests_validate_processed_bytes,
|
|
221
|
-
tests_check_requests_from_branch,
|
|
222
|
-
current_ws,
|
|
223
|
-
local_ws,
|
|
224
|
-
fork_downstream,
|
|
225
|
-
fork,
|
|
226
|
-
build,
|
|
227
|
-
is_vendor,
|
|
228
|
-
)
|
|
229
|
-
if not run_tests:
|
|
230
|
-
click.echo(
|
|
231
|
-
FeedbackManager.success_create(
|
|
232
|
-
name=(
|
|
233
|
-
name
|
|
234
|
-
if to_run[name]["version"] is None
|
|
235
|
-
else f'{name}__v{to_run[name]["version"]}'
|
|
236
|
-
)
|
|
237
|
-
)
|
|
238
|
-
)
|
|
239
|
-
except Exception as e:
|
|
240
|
-
filename = to_run[name]["filename"]
|
|
241
|
-
exception = FeedbackManager.error_push_file_exception(
|
|
242
|
-
filename=filename,
|
|
243
|
-
error=e,
|
|
244
|
-
)
|
|
245
|
-
raise click.ClickException(exception)
|
|
129
|
+
filename = to_run[name]["filename"]
|
|
130
|
+
filename = filename.replace(f"{folder}/", "")
|
|
131
|
+
click.echo(FeedbackManager.info(message=f"✓ {filename}"))
|
|
246
132
|
else:
|
|
247
133
|
if raise_on_exists:
|
|
248
134
|
raise AlreadyExistsException(
|
|
249
135
|
FeedbackManager.warning_name_already_exists(
|
|
250
|
-
name=name if to_run[name]["version"] is None else f
|
|
136
|
+
name=name if to_run[name]["version"] is None else f"{name}__v{to_run[name]['version']}"
|
|
251
137
|
)
|
|
252
138
|
)
|
|
253
139
|
else:
|
|
@@ -262,7 +148,7 @@ async def folder_build(
|
|
|
262
148
|
name=(
|
|
263
149
|
name
|
|
264
150
|
if to_run[name]["version"] is None
|
|
265
|
-
else f
|
|
151
|
+
else f"{name}__v{to_run[name]['version']}"
|
|
266
152
|
)
|
|
267
153
|
)
|
|
268
154
|
)
|
|
@@ -304,19 +190,6 @@ async def folder_build(
|
|
|
304
190
|
# We need to deploy the datasources from left to right as some datasources might have MV that depend on the column types of previous datasources. Ex: `test_change_column_type_landing_datasource` test
|
|
305
191
|
groups = [group for group in toposort(dependencies_graph_fork_downstream)]
|
|
306
192
|
|
|
307
|
-
for group in groups:
|
|
308
|
-
for name in group:
|
|
309
|
-
is_vendor = resources_to_run_fork_downstream.get(name, {}).get("filename", "").startswith("vendor/")
|
|
310
|
-
if not is_vendor:
|
|
311
|
-
try:
|
|
312
|
-
await tb_client.datasource_delete(name, force=True)
|
|
313
|
-
except Exception:
|
|
314
|
-
pass
|
|
315
|
-
try:
|
|
316
|
-
await tb_client.pipe_delete(name)
|
|
317
|
-
except Exception:
|
|
318
|
-
pass
|
|
319
|
-
|
|
320
193
|
groups.reverse()
|
|
321
194
|
for group in groups:
|
|
322
195
|
for name in group:
|
|
@@ -433,88 +306,6 @@ async def name_matches_existing_resource(resource: str, name: str, tb_client: Ti
|
|
|
433
306
|
return False
|
|
434
307
|
|
|
435
308
|
|
|
436
|
-
async def exec_file(
|
|
437
|
-
r: Dict[str, Any],
|
|
438
|
-
tb_client: TinyB,
|
|
439
|
-
force: bool,
|
|
440
|
-
check: bool,
|
|
441
|
-
debug: bool,
|
|
442
|
-
populate: bool,
|
|
443
|
-
populate_subset,
|
|
444
|
-
populate_condition,
|
|
445
|
-
unlink_on_populate_error,
|
|
446
|
-
wait_populate,
|
|
447
|
-
user_token: Optional[str],
|
|
448
|
-
override_datasource: bool = False,
|
|
449
|
-
ignore_sql_errors: bool = False,
|
|
450
|
-
skip_confirmation: bool = False,
|
|
451
|
-
only_response_times: bool = False,
|
|
452
|
-
run_tests=False,
|
|
453
|
-
as_standard=False,
|
|
454
|
-
tests_to_run: int = 0,
|
|
455
|
-
tests_relative_change: float = 0.01,
|
|
456
|
-
tests_to_sample_by_params: int = 0,
|
|
457
|
-
tests_filter_by: Optional[List[str]] = None,
|
|
458
|
-
tests_failfast: bool = False,
|
|
459
|
-
tests_ignore_order: bool = False,
|
|
460
|
-
tests_validate_processed_bytes: bool = False,
|
|
461
|
-
tests_check_requests_from_branch: bool = False,
|
|
462
|
-
current_ws: Optional[Dict[str, Any]] = None,
|
|
463
|
-
local_ws: Optional[Dict[str, Any]] = None,
|
|
464
|
-
fork_downstream: Optional[bool] = False,
|
|
465
|
-
fork: Optional[bool] = False,
|
|
466
|
-
build: Optional[bool] = False,
|
|
467
|
-
is_vendor: Optional[bool] = False,
|
|
468
|
-
):
|
|
469
|
-
if debug:
|
|
470
|
-
click.echo(FeedbackManager.debug_running_file(file=pp.pformat(r)))
|
|
471
|
-
if r["resource"] == "pipes":
|
|
472
|
-
await new_pipe(
|
|
473
|
-
r,
|
|
474
|
-
tb_client,
|
|
475
|
-
force,
|
|
476
|
-
check,
|
|
477
|
-
populate,
|
|
478
|
-
populate_subset,
|
|
479
|
-
populate_condition,
|
|
480
|
-
unlink_on_populate_error,
|
|
481
|
-
wait_populate,
|
|
482
|
-
ignore_sql_errors=ignore_sql_errors,
|
|
483
|
-
only_response_times=only_response_times,
|
|
484
|
-
run_tests=run_tests,
|
|
485
|
-
as_standard=as_standard,
|
|
486
|
-
tests_to_run=tests_to_run,
|
|
487
|
-
tests_relative_change=tests_relative_change,
|
|
488
|
-
tests_to_sample_by_params=tests_to_sample_by_params,
|
|
489
|
-
tests_filter_by=tests_filter_by,
|
|
490
|
-
tests_failfast=tests_failfast,
|
|
491
|
-
tests_ignore_order=tests_ignore_order,
|
|
492
|
-
tests_validate_processed_bytes=tests_validate_processed_bytes,
|
|
493
|
-
override_datasource=override_datasource,
|
|
494
|
-
tests_check_requests_from_branch=tests_check_requests_from_branch,
|
|
495
|
-
fork_downstream=fork_downstream,
|
|
496
|
-
fork=fork,
|
|
497
|
-
)
|
|
498
|
-
await update_tags_in_resource(r, "pipe", tb_client)
|
|
499
|
-
elif r["resource"] == "datasources":
|
|
500
|
-
await new_ds(
|
|
501
|
-
r,
|
|
502
|
-
tb_client,
|
|
503
|
-
user_token,
|
|
504
|
-
force,
|
|
505
|
-
skip_confirmation=skip_confirmation,
|
|
506
|
-
current_ws=current_ws,
|
|
507
|
-
local_ws=local_ws,
|
|
508
|
-
fork_downstream=fork_downstream,
|
|
509
|
-
fork=fork,
|
|
510
|
-
build=build,
|
|
511
|
-
is_vendor=is_vendor,
|
|
512
|
-
)
|
|
513
|
-
await update_tags_in_resource(r, "datasource", tb_client)
|
|
514
|
-
else:
|
|
515
|
-
raise click.ClickException(FeedbackManager.error_unknown_resource(resource=r["resource"]))
|
|
516
|
-
|
|
517
|
-
|
|
518
309
|
def get_remote_resource_name_without_version(remote_resource_name: str) -> str:
|
|
519
310
|
"""
|
|
520
311
|
>>> get_remote_resource_name_without_version("r__datasource")
|
|
@@ -707,6 +498,7 @@ async def process(
|
|
|
707
498
|
):
|
|
708
499
|
name, kind = filename.rsplit(".", 1)
|
|
709
500
|
warnings = []
|
|
501
|
+
embedded_datasources = {} if embedded_datasources is None else embedded_datasources
|
|
710
502
|
|
|
711
503
|
try:
|
|
712
504
|
res = await process_file(
|
|
@@ -806,31 +598,45 @@ async def get_processed(
|
|
|
806
598
|
to_run: Optional[Dict[str, Any]] = None,
|
|
807
599
|
vendor_paths: Optional[List[Tuple[str, str]]] = None,
|
|
808
600
|
processed: Optional[Set[str]] = None,
|
|
809
|
-
tb_client: TinyB = None,
|
|
601
|
+
tb_client: Optional[TinyB] = None,
|
|
810
602
|
skip_connectors: bool = False,
|
|
811
603
|
current_ws: Optional[Dict[str, Any]] = None,
|
|
812
604
|
fork_downstream: Optional[bool] = False,
|
|
813
605
|
is_internal: Optional[bool] = False,
|
|
814
606
|
dir_path: Optional[str] = None,
|
|
815
|
-
embedded_datasources: Optional[Dict[str, Any]] = None,
|
|
607
|
+
embedded_datasources: Optional[Dict[str, Dict[str, Any]]] = None,
|
|
816
608
|
):
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
if dep_map is None
|
|
820
|
-
|
|
821
|
-
if
|
|
822
|
-
|
|
823
|
-
if processed is None:
|
|
824
|
-
processed = set()
|
|
609
|
+
# Initialize with proper type annotations
|
|
610
|
+
deps_list: List[str] = [] if deps is None else deps
|
|
611
|
+
dep_map_dict: Dict[str, Any] = {} if dep_map is None else dep_map
|
|
612
|
+
to_run_dict: Dict[str, Any] = {} if to_run is None else to_run
|
|
613
|
+
processed_set: Set[str] = set() if processed is None else processed
|
|
614
|
+
embedded_ds: Dict[str, Dict[str, Any]] = {} if embedded_datasources is None else embedded_datasources
|
|
825
615
|
|
|
826
616
|
for filename in filenames:
|
|
827
617
|
# just process changed filenames (tb deploy and --only-changes)
|
|
828
|
-
if changed:
|
|
618
|
+
if changed is not None:
|
|
829
619
|
resource = Path(filename).resolve().stem
|
|
830
620
|
if resource in changed and (not changed[resource] or changed[resource] in ["shared", "remote"]):
|
|
831
621
|
continue
|
|
832
622
|
if os.path.isdir(filename):
|
|
833
|
-
await get_processed(
|
|
623
|
+
await get_processed(
|
|
624
|
+
filenames=get_project_filenames(filename),
|
|
625
|
+
changed=changed,
|
|
626
|
+
verbose=verbose,
|
|
627
|
+
deps=deps_list,
|
|
628
|
+
dep_map=dep_map_dict,
|
|
629
|
+
to_run=to_run_dict,
|
|
630
|
+
vendor_paths=vendor_paths,
|
|
631
|
+
processed=processed_set,
|
|
632
|
+
tb_client=tb_client,
|
|
633
|
+
skip_connectors=skip_connectors,
|
|
634
|
+
current_ws=current_ws,
|
|
635
|
+
fork_downstream=fork_downstream,
|
|
636
|
+
is_internal=is_internal,
|
|
637
|
+
dir_path=dir_path,
|
|
638
|
+
embedded_datasources=embedded_ds,
|
|
639
|
+
)
|
|
834
640
|
else:
|
|
835
641
|
if verbose:
|
|
836
642
|
click.echo(FeedbackManager.info_processing_file(filename=filename))
|
|
@@ -838,12 +644,15 @@ async def get_processed(
|
|
|
838
644
|
if ".incl" in filename:
|
|
839
645
|
click.echo(FeedbackManager.warning_skipping_include_file(file=filename))
|
|
840
646
|
|
|
647
|
+
if tb_client is None:
|
|
648
|
+
raise ValueError("tb_client cannot be None")
|
|
649
|
+
|
|
841
650
|
name, warnings = await process(
|
|
842
651
|
filename=filename,
|
|
843
652
|
tb_client=tb_client,
|
|
844
|
-
deps=
|
|
845
|
-
dep_map=
|
|
846
|
-
to_run=
|
|
653
|
+
deps=deps_list,
|
|
654
|
+
dep_map=dep_map_dict,
|
|
655
|
+
to_run=to_run_dict,
|
|
847
656
|
vendor_paths=vendor_paths,
|
|
848
657
|
skip_connectors=skip_connectors,
|
|
849
658
|
current_ws=current_ws,
|
|
@@ -852,9 +661,9 @@ async def get_processed(
|
|
|
852
661
|
is_internal=is_internal,
|
|
853
662
|
dir_path=dir_path,
|
|
854
663
|
verbose=verbose,
|
|
855
|
-
embedded_datasources=
|
|
664
|
+
embedded_datasources=embedded_ds,
|
|
856
665
|
)
|
|
857
|
-
|
|
666
|
+
processed_set.add(name)
|
|
858
667
|
|
|
859
668
|
if verbose:
|
|
860
669
|
if len(warnings) == 1:
|
|
@@ -890,7 +699,7 @@ async def build_graph(
|
|
|
890
699
|
to_run: Dict[str, Any] = {}
|
|
891
700
|
deps: List[str] = []
|
|
892
701
|
dep_map: Dict[str, Any] = {}
|
|
893
|
-
embedded_datasources = {}
|
|
702
|
+
embedded_datasources: Dict[str, Dict[str, Any]] = {}
|
|
894
703
|
|
|
895
704
|
# These dictionaries are used to store all the resources and there dependencies for the whole project
|
|
896
705
|
# This is used for the downstream dependency graph
|
|
@@ -919,17 +728,18 @@ async def build_graph(
|
|
|
919
728
|
all_dep_map = all_dependencies_graph.dep_map
|
|
920
729
|
all_resources = all_dependencies_graph.to_run
|
|
921
730
|
|
|
922
|
-
processed = set()
|
|
731
|
+
processed: Set[str] = set()
|
|
923
732
|
|
|
924
733
|
await get_processed(
|
|
925
734
|
filenames=filenames,
|
|
926
|
-
tb_client=tb_client,
|
|
927
735
|
changed=changed,
|
|
736
|
+
verbose=verbose,
|
|
928
737
|
deps=deps,
|
|
929
738
|
dep_map=dep_map,
|
|
930
739
|
to_run=to_run,
|
|
931
740
|
vendor_paths=vendor_paths,
|
|
932
741
|
processed=processed,
|
|
742
|
+
tb_client=tb_client,
|
|
933
743
|
skip_connectors=skip_connectors,
|
|
934
744
|
current_ws=current_ws,
|
|
935
745
|
fork_downstream=fork_downstream,
|
|
@@ -1198,18 +1008,22 @@ async def process_file(
|
|
|
1198
1008
|
raise Exception(f"Invalid import schedule: '{cron}'. Valid values are: {valid_values}")
|
|
1199
1009
|
|
|
1200
1010
|
if cron == ON_DEMAND_CRON:
|
|
1011
|
+
if import_params is None:
|
|
1012
|
+
import_params = {}
|
|
1201
1013
|
import_params["import_schedule"] = ON_DEMAND_CRON_EXPECTED_BY_THE_API
|
|
1014
|
+
|
|
1202
1015
|
if cron == AUTO_CRON:
|
|
1203
1016
|
period: int = DEFAULT_CRON_PERIOD
|
|
1204
1017
|
|
|
1205
|
-
if current_ws:
|
|
1018
|
+
if current_ws is not None:
|
|
1206
1019
|
workspaces = (await tb_client.user_workspaces()).get("workspaces", [])
|
|
1207
1020
|
workspace_rate_limits: Dict[str, Dict[str, int]] = next(
|
|
1208
1021
|
(w.get("rate_limits", {}) for w in workspaces if w["id"] == current_ws["id"]), {}
|
|
1209
1022
|
)
|
|
1210
|
-
|
|
1211
|
-
"
|
|
1212
|
-
|
|
1023
|
+
if workspace_rate_limits:
|
|
1024
|
+
rate_limit_config = workspace_rate_limits.get("api_datasources_create_append_replace", {})
|
|
1025
|
+
if rate_limit_config:
|
|
1026
|
+
period = rate_limit_config.get("period", DEFAULT_CRON_PERIOD)
|
|
1213
1027
|
|
|
1214
1028
|
def seconds_to_cron_expression(seconds: int) -> str:
|
|
1215
1029
|
minutes = seconds // 60
|
|
@@ -1223,10 +1037,13 @@ async def process_file(
|
|
|
1223
1037
|
return f"*/{minutes} * * * *"
|
|
1224
1038
|
return f"*/{seconds} * * * *"
|
|
1225
1039
|
|
|
1040
|
+
if import_params is None:
|
|
1041
|
+
import_params = {}
|
|
1226
1042
|
import_params["import_schedule"] = seconds_to_cron_expression(period)
|
|
1227
1043
|
|
|
1228
|
-
|
|
1229
|
-
|
|
1044
|
+
# Include all import_ parameters in the datasource params
|
|
1045
|
+
if import_params is not None:
|
|
1046
|
+
params.update(import_params)
|
|
1230
1047
|
|
|
1231
1048
|
# Substitute the import parameters with the ones used by the
|
|
1232
1049
|
# import API:
|
|
@@ -124,10 +124,95 @@ def deployment_group() -> None:
|
|
|
124
124
|
help="Auto-promote the deployment. Only works if --wait is enabled. Disabled by default.",
|
|
125
125
|
)
|
|
126
126
|
@click.pass_context
|
|
127
|
-
def
|
|
127
|
+
def deployment_create(ctx: click.Context, wait: bool, auto: bool) -> None:
|
|
128
128
|
"""
|
|
129
129
|
Validate and deploy the project server side.
|
|
130
130
|
"""
|
|
131
|
+
create_deployment(ctx, wait, auto)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@deployment_group.command(name="ls")
|
|
135
|
+
@click.pass_context
|
|
136
|
+
def deployment_ls(ctx: click.Context) -> None:
|
|
137
|
+
"""
|
|
138
|
+
List all the deployments you have in the project.
|
|
139
|
+
"""
|
|
140
|
+
client = ctx.ensure_object(dict)["client"]
|
|
141
|
+
|
|
142
|
+
TINYBIRD_API_KEY = client.token
|
|
143
|
+
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
144
|
+
TINYBIRD_API_URL = f"{client.host}/v1/deployments"
|
|
145
|
+
|
|
146
|
+
r = requests.get(TINYBIRD_API_URL, headers=HEADERS)
|
|
147
|
+
result = r.json()
|
|
148
|
+
logging.debug(json.dumps(result, indent=2))
|
|
149
|
+
|
|
150
|
+
status_map = {"data_ready": "Ready", "failed": "Failed"}
|
|
151
|
+
columns = ["ID", "Status", "Created at", "Live"]
|
|
152
|
+
table = []
|
|
153
|
+
for deployment in result.get("deployments"):
|
|
154
|
+
table.append(
|
|
155
|
+
[
|
|
156
|
+
deployment.get("id"),
|
|
157
|
+
status_map.get(deployment.get("status"), "In progress"),
|
|
158
|
+
datetime.fromisoformat(deployment.get("created_at")).strftime("%Y-%m-%d %H:%M:%S"),
|
|
159
|
+
deployment.get("live"),
|
|
160
|
+
]
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
echo_safe_humanfriendly_tables_format_smart_table(table, column_names=columns)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
@deployment_group.command(name="promote")
|
|
167
|
+
@click.pass_context
|
|
168
|
+
def deployment_promote(ctx: click.Context) -> None:
|
|
169
|
+
"""
|
|
170
|
+
Promote last deploy to ready and remove old one.
|
|
171
|
+
"""
|
|
172
|
+
client = ctx.ensure_object(dict)["client"]
|
|
173
|
+
|
|
174
|
+
TINYBIRD_API_KEY = client.token
|
|
175
|
+
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
176
|
+
|
|
177
|
+
promote_deployment(client.host, HEADERS)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@deployment_group.command(name="rollback")
|
|
181
|
+
@click.pass_context
|
|
182
|
+
def deployment_rollback(ctx: click.Context) -> None:
|
|
183
|
+
"""
|
|
184
|
+
Rollback to the previous deployment.
|
|
185
|
+
"""
|
|
186
|
+
client = ctx.ensure_object(dict)["client"]
|
|
187
|
+
|
|
188
|
+
TINYBIRD_API_KEY = client.token
|
|
189
|
+
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
190
|
+
|
|
191
|
+
rollback_deployment(client.host, HEADERS)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@cli.command(name="deploy", hidden=True)
|
|
195
|
+
@click.option(
|
|
196
|
+
"--wait/--no-wait",
|
|
197
|
+
is_flag=True,
|
|
198
|
+
default=False,
|
|
199
|
+
help="Wait for deploy to finish. Disabled by default.",
|
|
200
|
+
)
|
|
201
|
+
@click.option(
|
|
202
|
+
"--auto/--no-auto",
|
|
203
|
+
is_flag=True,
|
|
204
|
+
default=False,
|
|
205
|
+
help="Auto-promote the deployment. Only works if --wait is enabled. Disabled by default.",
|
|
206
|
+
)
|
|
207
|
+
@click.pass_context
|
|
208
|
+
def deploy(ctx: click.Context, wait: bool, auto: bool) -> None:
|
|
209
|
+
"""
|
|
210
|
+
Deploy the project.
|
|
211
|
+
"""
|
|
212
|
+
create_deployment(ctx, wait, auto)
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def create_deployment(ctx: click.Context, wait: bool, auto: bool) -> None:
|
|
131
216
|
# TODO: This code is duplicated in build_server.py
|
|
132
217
|
# Should be refactored to be shared
|
|
133
218
|
MULTIPART_BOUNDARY_DATA_PROJECT = "data_project://"
|
|
@@ -203,63 +288,3 @@ def create(ctx: click.Context, wait: bool, auto: bool) -> None:
|
|
|
203
288
|
|
|
204
289
|
if auto:
|
|
205
290
|
promote_deployment(client.host, HEADERS)
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
@deployment_group.command(name="ls")
|
|
209
|
-
@click.pass_context
|
|
210
|
-
def deployment_ls(ctx: click.Context) -> None:
|
|
211
|
-
"""
|
|
212
|
-
List all the deployments you have in the project.
|
|
213
|
-
"""
|
|
214
|
-
client = ctx.ensure_object(dict)["client"]
|
|
215
|
-
|
|
216
|
-
TINYBIRD_API_KEY = client.token
|
|
217
|
-
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
218
|
-
TINYBIRD_API_URL = f"{client.host}/v1/deployments"
|
|
219
|
-
|
|
220
|
-
r = requests.get(TINYBIRD_API_URL, headers=HEADERS)
|
|
221
|
-
result = r.json()
|
|
222
|
-
logging.debug(json.dumps(result, indent=2))
|
|
223
|
-
|
|
224
|
-
status_map = {"data_ready": "Ready", "failed": "Failed"}
|
|
225
|
-
columns = ["ID", "Status", "Created at", "Live"]
|
|
226
|
-
table = []
|
|
227
|
-
for deployment in result.get("deployments"):
|
|
228
|
-
table.append(
|
|
229
|
-
[
|
|
230
|
-
deployment.get("id"),
|
|
231
|
-
status_map.get(deployment.get("status"), "In progress"),
|
|
232
|
-
datetime.fromisoformat(deployment.get("created_at")).strftime("%Y-%m-%d %H:%M:%S"),
|
|
233
|
-
deployment.get("live"),
|
|
234
|
-
]
|
|
235
|
-
)
|
|
236
|
-
|
|
237
|
-
echo_safe_humanfriendly_tables_format_smart_table(table, column_names=columns)
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
@deployment_group.command(name="promote")
|
|
241
|
-
@click.pass_context
|
|
242
|
-
def deployment_promote(ctx: click.Context) -> None:
|
|
243
|
-
"""
|
|
244
|
-
Promote last deploy to ready and remove old one.
|
|
245
|
-
"""
|
|
246
|
-
client = ctx.ensure_object(dict)["client"]
|
|
247
|
-
|
|
248
|
-
TINYBIRD_API_KEY = client.token
|
|
249
|
-
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
250
|
-
|
|
251
|
-
promote_deployment(client.host, HEADERS)
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
@deployment_group.command(name="rollback")
|
|
255
|
-
@click.pass_context
|
|
256
|
-
def deployment_rollback(ctx: click.Context) -> None:
|
|
257
|
-
"""
|
|
258
|
-
Rollback to the previous deployment.
|
|
259
|
-
"""
|
|
260
|
-
client = ctx.ensure_object(dict)["client"]
|
|
261
|
-
|
|
262
|
-
TINYBIRD_API_KEY = client.token
|
|
263
|
-
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
264
|
-
|
|
265
|
-
rollback_deployment(client.host, HEADERS)
|