tinybird 0.0.1.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tinybird might be problematic. Click here for more details.
- tinybird/__cli__.py +8 -0
- tinybird/ch_utils/constants.py +244 -0
- tinybird/ch_utils/engine.py +855 -0
- tinybird/check_pypi.py +25 -0
- tinybird/client.py +1281 -0
- tinybird/config.py +117 -0
- tinybird/connectors.py +428 -0
- tinybird/context.py +23 -0
- tinybird/datafile.py +5589 -0
- tinybird/datatypes.py +434 -0
- tinybird/feedback_manager.py +1022 -0
- tinybird/git_settings.py +145 -0
- tinybird/sql.py +865 -0
- tinybird/sql_template.py +2343 -0
- tinybird/sql_template_fmt.py +281 -0
- tinybird/sql_toolset.py +350 -0
- tinybird/syncasync.py +682 -0
- tinybird/tb_cli.py +25 -0
- tinybird/tb_cli_modules/auth.py +252 -0
- tinybird/tb_cli_modules/branch.py +1043 -0
- tinybird/tb_cli_modules/cicd.py +434 -0
- tinybird/tb_cli_modules/cli.py +1571 -0
- tinybird/tb_cli_modules/common.py +2082 -0
- tinybird/tb_cli_modules/config.py +344 -0
- tinybird/tb_cli_modules/connection.py +803 -0
- tinybird/tb_cli_modules/datasource.py +900 -0
- tinybird/tb_cli_modules/exceptions.py +91 -0
- tinybird/tb_cli_modules/fmt.py +91 -0
- tinybird/tb_cli_modules/job.py +85 -0
- tinybird/tb_cli_modules/pipe.py +858 -0
- tinybird/tb_cli_modules/regions.py +9 -0
- tinybird/tb_cli_modules/tag.py +100 -0
- tinybird/tb_cli_modules/telemetry.py +310 -0
- tinybird/tb_cli_modules/test.py +107 -0
- tinybird/tb_cli_modules/tinyunit/tinyunit.py +340 -0
- tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +71 -0
- tinybird/tb_cli_modules/token.py +349 -0
- tinybird/tb_cli_modules/workspace.py +269 -0
- tinybird/tb_cli_modules/workspace_members.py +212 -0
- tinybird/tornado_template.py +1194 -0
- tinybird-0.0.1.dev0.dist-info/METADATA +2815 -0
- tinybird-0.0.1.dev0.dist-info/RECORD +45 -0
- tinybird-0.0.1.dev0.dist-info/WHEEL +5 -0
- tinybird-0.0.1.dev0.dist-info/entry_points.txt +2 -0
- tinybird-0.0.1.dev0.dist-info/top_level.txt +4 -0
|
@@ -0,0 +1,858 @@
|
|
|
1
|
+
# This is a command file for our CLI. Please keep it clean.
|
|
2
|
+
#
|
|
3
|
+
# - If it makes sense and only when strictly necessary, you can create utility functions in this file.
|
|
4
|
+
# - But please, **do not** interleave utility functions and command definitions.
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import re
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Dict, List, Optional, Tuple
|
|
11
|
+
|
|
12
|
+
import click
|
|
13
|
+
import humanfriendly
|
|
14
|
+
from click import Context
|
|
15
|
+
|
|
16
|
+
import tinybird.context as context
|
|
17
|
+
from tinybird.client import AuthNoTokenException, DoesNotExistException, TinyB
|
|
18
|
+
from tinybird.config import DEFAULT_API_HOST, FeatureFlags
|
|
19
|
+
from tinybird.datafile import PipeNodeTypes, PipeTypes, folder_push, get_name_version, process_file, wait_job
|
|
20
|
+
from tinybird.feedback_manager import FeedbackManager
|
|
21
|
+
from tinybird.tb_cli_modules.branch import warn_if_in_live
|
|
22
|
+
from tinybird.tb_cli_modules.cli import cli
|
|
23
|
+
from tinybird.tb_cli_modules.common import coro, create_tb_client, echo_safe_humanfriendly_tables_format_smart_table
|
|
24
|
+
from tinybird.tb_cli_modules.exceptions import CLIPipeException
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@cli.group()
|
|
28
|
+
@click.pass_context
|
|
29
|
+
def pipe(ctx):
|
|
30
|
+
"""Pipes commands"""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@pipe.group(name="copy")
|
|
34
|
+
@click.pass_context
|
|
35
|
+
def pipe_copy(ctx: Context) -> None:
|
|
36
|
+
"""Copy Pipe commands"""
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@pipe.group(name="sink")
|
|
40
|
+
@click.pass_context
|
|
41
|
+
def pipe_sink(ctx: Context) -> None:
|
|
42
|
+
"""Sink Pipe commands"""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@pipe.command(
|
|
46
|
+
name="generate",
|
|
47
|
+
short_help="Generates a pipe file based on a sql query. Example: tb pipe generate my_pipe 'select * from existing_datasource'",
|
|
48
|
+
)
|
|
49
|
+
@click.argument("name")
|
|
50
|
+
@click.argument("query")
|
|
51
|
+
@click.option("--force", is_flag=True, default=False, help="Override existing files")
|
|
52
|
+
@click.pass_context
|
|
53
|
+
def generate_pipe(ctx: click.Context, name: str, query: str, force: bool):
|
|
54
|
+
pipefile = f"""
|
|
55
|
+
NODE endpoint
|
|
56
|
+
DESCRIPTION >
|
|
57
|
+
Generated from the command line
|
|
58
|
+
SQL >
|
|
59
|
+
{query}
|
|
60
|
+
|
|
61
|
+
"""
|
|
62
|
+
base = Path("endpoints")
|
|
63
|
+
if not base.exists():
|
|
64
|
+
base = Path()
|
|
65
|
+
f = base / (f"{name}.pipe")
|
|
66
|
+
if not f.exists() or force:
|
|
67
|
+
with open(f"{f}", "w") as file:
|
|
68
|
+
file.write(pipefile)
|
|
69
|
+
click.echo(FeedbackManager.success_generated_pipe(file=f))
|
|
70
|
+
else:
|
|
71
|
+
raise CLIPipeException(
|
|
72
|
+
FeedbackManager.error_exception(error=f"File {f} already exists, use --force to override")
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@pipe.command(name="stats")
|
|
77
|
+
@click.argument("pipes", nargs=-1)
|
|
78
|
+
@click.option(
|
|
79
|
+
"--format",
|
|
80
|
+
"format_",
|
|
81
|
+
type=click.Choice(["json"], case_sensitive=False),
|
|
82
|
+
default=None,
|
|
83
|
+
help="Force a type of the output. To parse the output, keep in mind to use `tb --no-version-warning pipe stats` option.",
|
|
84
|
+
)
|
|
85
|
+
@click.pass_context
|
|
86
|
+
@coro
|
|
87
|
+
async def pipe_stats(ctx: click.Context, pipes: Tuple[str, ...], format_: str):
|
|
88
|
+
"""
|
|
89
|
+
Print pipe stats for the last 7 days
|
|
90
|
+
"""
|
|
91
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
92
|
+
all_pipes = await client.pipes()
|
|
93
|
+
pipes_to_get_stats = []
|
|
94
|
+
pipes_ids: Dict = {}
|
|
95
|
+
|
|
96
|
+
if pipes:
|
|
97
|
+
# We filter by the pipes we want to look for
|
|
98
|
+
all_pipes = [pipe for pipe in all_pipes if pipe["name"] in pipes]
|
|
99
|
+
|
|
100
|
+
for pipe in all_pipes:
|
|
101
|
+
name_version = get_name_version(pipe["name"])
|
|
102
|
+
if name_version["name"] in pipe["name"]:
|
|
103
|
+
pipes_to_get_stats.append(f"'{pipe['id']}'")
|
|
104
|
+
pipes_ids[pipe["id"]] = name_version
|
|
105
|
+
|
|
106
|
+
if not pipes_to_get_stats:
|
|
107
|
+
if format_ == "json":
|
|
108
|
+
click.echo(json.dumps({"pipes": []}, indent=2))
|
|
109
|
+
else:
|
|
110
|
+
click.echo(FeedbackManager.info_no_pipes_stats())
|
|
111
|
+
return
|
|
112
|
+
|
|
113
|
+
sql = f"""
|
|
114
|
+
SELECT
|
|
115
|
+
pipe_id id,
|
|
116
|
+
sumIf(view_count, date > now() - interval 7 day) requests,
|
|
117
|
+
sumIf(error_count, date > now() - interval 7 day) errors,
|
|
118
|
+
avgMergeIf(avg_duration_state, date > now() - interval 7 day) latency
|
|
119
|
+
FROM tinybird.pipe_stats
|
|
120
|
+
WHERE pipe_id in ({','.join(pipes_to_get_stats)})
|
|
121
|
+
GROUP BY pipe_id
|
|
122
|
+
ORDER BY requests DESC
|
|
123
|
+
FORMAT JSON
|
|
124
|
+
"""
|
|
125
|
+
|
|
126
|
+
res = await client.query(sql)
|
|
127
|
+
|
|
128
|
+
if res and "error" in res:
|
|
129
|
+
raise CLIPipeException(FeedbackManager.error_exception(error=str(res["error"])))
|
|
130
|
+
|
|
131
|
+
columns = ["version", "name", "request count", "error count", "avg latency"]
|
|
132
|
+
table_human_readable: List[Tuple] = []
|
|
133
|
+
table_machine_readable: List[Dict] = []
|
|
134
|
+
if res and "data" in res:
|
|
135
|
+
for x in res["data"]:
|
|
136
|
+
tk = pipes_ids[x["id"]]
|
|
137
|
+
table_human_readable.append(
|
|
138
|
+
(
|
|
139
|
+
tk["version"] if tk["version"] is not None else "",
|
|
140
|
+
tk["name"],
|
|
141
|
+
x["requests"],
|
|
142
|
+
x["errors"],
|
|
143
|
+
x["latency"],
|
|
144
|
+
)
|
|
145
|
+
)
|
|
146
|
+
table_machine_readable.append(
|
|
147
|
+
{
|
|
148
|
+
"version": tk["version"] if tk["version"] is not None else "",
|
|
149
|
+
"name": tk["name"],
|
|
150
|
+
"requests": x["requests"],
|
|
151
|
+
"errors": x["errors"],
|
|
152
|
+
"latency": x["latency"],
|
|
153
|
+
}
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
table_human_readable.sort(key=lambda x: (x[1], x[0]))
|
|
157
|
+
table_machine_readable.sort(key=lambda x: x["name"])
|
|
158
|
+
|
|
159
|
+
if format_ == "json":
|
|
160
|
+
click.echo(json.dumps({"pipes": table_machine_readable}, indent=2))
|
|
161
|
+
else:
|
|
162
|
+
echo_safe_humanfriendly_tables_format_smart_table(table_human_readable, column_names=columns)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@pipe.command(name="ls")
|
|
166
|
+
@click.option("--match", default=None, help="Retrieve any resourcing matching the pattern. eg --match _test")
|
|
167
|
+
@click.option(
|
|
168
|
+
"--format",
|
|
169
|
+
"format_",
|
|
170
|
+
type=click.Choice(["json"], case_sensitive=False),
|
|
171
|
+
default=None,
|
|
172
|
+
help="Force a type of the output",
|
|
173
|
+
)
|
|
174
|
+
@click.pass_context
|
|
175
|
+
@coro
|
|
176
|
+
async def pipe_ls(ctx: Context, match: str, format_: str):
|
|
177
|
+
"""List pipes"""
|
|
178
|
+
|
|
179
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
180
|
+
pipes = await client.pipes(dependencies=False, node_attrs="name", attrs="name,updated_at")
|
|
181
|
+
pipes = sorted(pipes, key=lambda p: p["updated_at"])
|
|
182
|
+
|
|
183
|
+
columns = ["version", "name", "published date", "nodes"]
|
|
184
|
+
table_human_readable = []
|
|
185
|
+
table_machine_readable = []
|
|
186
|
+
pattern = re.compile(match) if match else None
|
|
187
|
+
for t in pipes:
|
|
188
|
+
tk = get_name_version(t["name"])
|
|
189
|
+
if pattern and not pattern.search(tk["name"]):
|
|
190
|
+
continue
|
|
191
|
+
table_human_readable.append(
|
|
192
|
+
(tk["version"] if tk["version"] is not None else "", tk["name"], t["updated_at"][:-7], len(t["nodes"]))
|
|
193
|
+
)
|
|
194
|
+
table_machine_readable.append(
|
|
195
|
+
{
|
|
196
|
+
"version": tk["version"] if tk["version"] is not None else "",
|
|
197
|
+
"name": tk["name"],
|
|
198
|
+
"published date": t["updated_at"][:-7],
|
|
199
|
+
"nodes": len(t["nodes"]),
|
|
200
|
+
}
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
if not format_:
|
|
204
|
+
click.echo(FeedbackManager.info_pipes())
|
|
205
|
+
echo_safe_humanfriendly_tables_format_smart_table(table_human_readable, column_names=columns)
|
|
206
|
+
click.echo("\n")
|
|
207
|
+
elif format_ == "json":
|
|
208
|
+
click.echo(json.dumps({"pipes": table_machine_readable}, indent=2))
|
|
209
|
+
else:
|
|
210
|
+
raise CLIPipeException(FeedbackManager.error_pipe_ls_type())
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@pipe.command(name="populate")
|
|
214
|
+
@click.argument("pipe_name")
|
|
215
|
+
@click.option("--node", type=str, help="Name of the materialized node.", default=None, required=False)
|
|
216
|
+
@click.option(
|
|
217
|
+
"--sql-condition",
|
|
218
|
+
type=str,
|
|
219
|
+
default=None,
|
|
220
|
+
help="Populate with a SQL condition to be applied to the trigger Data Source of the Materialized View. For instance, `--sql-condition='date == toYYYYMM(now())'` it'll populate taking all the rows from the trigger Data Source which `date` is the current month. Use it together with --populate. --sql-condition is not taken into account if the --subset param is present. Including in the ``sql_condition`` any column present in the Data Source ``engine_sorting_key`` will make the populate job process less data.",
|
|
221
|
+
)
|
|
222
|
+
@click.option(
|
|
223
|
+
"--truncate", is_flag=True, default=False, help="Truncates the materialized Data Source before populating it."
|
|
224
|
+
)
|
|
225
|
+
@click.option(
|
|
226
|
+
"--unlink-on-populate-error",
|
|
227
|
+
is_flag=True,
|
|
228
|
+
default=False,
|
|
229
|
+
help="If the populate job fails the Materialized View is unlinked and new data won't be ingested in the Materialized View. First time a populate job fails, the Materialized View is always unlinked.",
|
|
230
|
+
)
|
|
231
|
+
@click.option(
|
|
232
|
+
"--wait",
|
|
233
|
+
is_flag=True,
|
|
234
|
+
default=False,
|
|
235
|
+
help="Waits for populate jobs to finish, showing a progress bar. Disabled by default.",
|
|
236
|
+
)
|
|
237
|
+
@click.pass_context
|
|
238
|
+
@coro
|
|
239
|
+
async def pipe_populate(
|
|
240
|
+
ctx: click.Context,
|
|
241
|
+
pipe_name: str,
|
|
242
|
+
node: str,
|
|
243
|
+
sql_condition: str,
|
|
244
|
+
truncate: bool,
|
|
245
|
+
unlink_on_populate_error: bool,
|
|
246
|
+
wait: bool,
|
|
247
|
+
):
|
|
248
|
+
"""Populate the result of a Materialized Node into the target Materialized View"""
|
|
249
|
+
cl = create_tb_client(ctx)
|
|
250
|
+
|
|
251
|
+
pipe = await cl.pipe(pipe_name)
|
|
252
|
+
|
|
253
|
+
if pipe["type"] != PipeTypes.MATERIALIZED:
|
|
254
|
+
raise CLIPipeException(FeedbackManager.error_pipe_not_materialized(pipe=pipe_name))
|
|
255
|
+
|
|
256
|
+
if not node:
|
|
257
|
+
materialized_ids = [pipe_node["id"] for pipe_node in pipe["nodes"] if pipe_node.get("materialized") is not None]
|
|
258
|
+
|
|
259
|
+
if not materialized_ids:
|
|
260
|
+
raise CLIPipeException(FeedbackManager.error_populate_no_materialized_in_pipe(pipe=pipe_name))
|
|
261
|
+
|
|
262
|
+
elif len(materialized_ids) > 1:
|
|
263
|
+
raise CLIPipeException(FeedbackManager.error_populate_several_materialized_in_pipe(pipe=pipe_name))
|
|
264
|
+
|
|
265
|
+
node = materialized_ids[0]
|
|
266
|
+
|
|
267
|
+
response = await cl.populate_node(
|
|
268
|
+
pipe_name,
|
|
269
|
+
node,
|
|
270
|
+
populate_condition=sql_condition,
|
|
271
|
+
truncate=truncate,
|
|
272
|
+
unlink_on_populate_error=unlink_on_populate_error,
|
|
273
|
+
)
|
|
274
|
+
if "job" not in response:
|
|
275
|
+
raise CLIPipeException(response)
|
|
276
|
+
|
|
277
|
+
job_id = response["job"]["id"]
|
|
278
|
+
job_url = response["job"]["job_url"]
|
|
279
|
+
if sql_condition:
|
|
280
|
+
click.echo(FeedbackManager.info_populate_condition_job_url(url=job_url, populate_condition=sql_condition))
|
|
281
|
+
else:
|
|
282
|
+
click.echo(FeedbackManager.info_populate_job_url(url=job_url))
|
|
283
|
+
if wait:
|
|
284
|
+
await wait_job(cl, job_id, job_url, "Populating")
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
@pipe.command(name="unlink")
|
|
288
|
+
@click.argument("pipe_name_or_id")
|
|
289
|
+
@click.argument("node_uid", default=None, required=False)
|
|
290
|
+
@click.pass_context
|
|
291
|
+
@coro
|
|
292
|
+
async def pipe_unlink_output_node(
|
|
293
|
+
ctx: click.Context,
|
|
294
|
+
pipe_name_or_id: str,
|
|
295
|
+
node_uid: Optional[str] = None,
|
|
296
|
+
):
|
|
297
|
+
"""Unlink the output of a pipe. Works for Materialized Views, Copy Pipes, and Sinks."""
|
|
298
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
299
|
+
|
|
300
|
+
try:
|
|
301
|
+
pipe = await client.pipe(pipe_name_or_id)
|
|
302
|
+
|
|
303
|
+
if pipe["type"] not in [PipeTypes.MATERIALIZED, PipeTypes.COPY, PipeTypes.DATA_SINK]:
|
|
304
|
+
raise CLIPipeException(FeedbackManager.error_unlinking_pipe_not_linked(pipe=pipe_name_or_id))
|
|
305
|
+
|
|
306
|
+
if pipe["type"] == PipeTypes.MATERIALIZED:
|
|
307
|
+
click.echo(FeedbackManager.info_unlinking_materialized_pipe(pipe=pipe["name"]))
|
|
308
|
+
|
|
309
|
+
if not node_uid:
|
|
310
|
+
for node in pipe["nodes"]:
|
|
311
|
+
if "materialized" in node and node["materialized"] is not None:
|
|
312
|
+
node_uid = node["id"]
|
|
313
|
+
break
|
|
314
|
+
|
|
315
|
+
if not node_uid:
|
|
316
|
+
raise CLIPipeException(FeedbackManager.error_unlinking_pipe_not_linked(pipe=pipe_name_or_id))
|
|
317
|
+
else:
|
|
318
|
+
await client.pipe_unlink_materialized(pipe["name"], node_uid)
|
|
319
|
+
click.echo(FeedbackManager.success_pipe_unlinked(pipe=pipe["name"]))
|
|
320
|
+
|
|
321
|
+
if pipe["type"] == PipeTypes.COPY:
|
|
322
|
+
click.echo(FeedbackManager.info_unlinking_copy_pipe(pipe=pipe["name"]))
|
|
323
|
+
|
|
324
|
+
if not node_uid:
|
|
325
|
+
for node in pipe["nodes"]:
|
|
326
|
+
if node["node_type"] == "copy":
|
|
327
|
+
node_uid = node["id"]
|
|
328
|
+
break
|
|
329
|
+
|
|
330
|
+
if not node_uid:
|
|
331
|
+
raise CLIPipeException(FeedbackManager.error_unlinking_pipe_not_linked(pipe=pipe_name_or_id))
|
|
332
|
+
else:
|
|
333
|
+
await client.pipe_remove_copy(pipe["name"], node_uid)
|
|
334
|
+
click.echo(FeedbackManager.success_pipe_unlinked(pipe=pipe["name"]))
|
|
335
|
+
|
|
336
|
+
if pipe["type"] == PipeTypes.DATA_SINK:
|
|
337
|
+
click.echo(FeedbackManager.info_unlinking_sink_pipe(pipe=pipe["name"]))
|
|
338
|
+
|
|
339
|
+
if not node_uid:
|
|
340
|
+
for node in pipe["nodes"]:
|
|
341
|
+
if node["node_type"] == "sink":
|
|
342
|
+
node_uid = node["id"]
|
|
343
|
+
break
|
|
344
|
+
|
|
345
|
+
if not node_uid:
|
|
346
|
+
raise CLIPipeException(FeedbackManager.error_unlinking_pipe_not_linked(pipe=pipe_name_or_id))
|
|
347
|
+
else:
|
|
348
|
+
await client.pipe_remove_sink(pipe["name"], node_uid)
|
|
349
|
+
click.echo(FeedbackManager.success_pipe_unlinked(pipe=pipe["name"]))
|
|
350
|
+
|
|
351
|
+
if pipe["type"] == PipeTypes.STREAM:
|
|
352
|
+
click.echo(FeedbackManager.info_unlinking_stream_pipe(pipe=pipe["name"]))
|
|
353
|
+
node_uid = next((node["id"] for node in pipe["nodes"] if node["node_type"] == PipeNodeTypes.STREAM), None)
|
|
354
|
+
|
|
355
|
+
if not node_uid:
|
|
356
|
+
raise CLIPipeException(FeedbackManager.error_unlinking_pipe_not_linked(pipe=pipe_name_or_id))
|
|
357
|
+
else:
|
|
358
|
+
await client.pipe_remove_stream(pipe["name"], node_uid)
|
|
359
|
+
click.echo(FeedbackManager.success_pipe_unlinked(pipe=pipe["name"]))
|
|
360
|
+
|
|
361
|
+
except AuthNoTokenException:
|
|
362
|
+
raise
|
|
363
|
+
except Exception as e:
|
|
364
|
+
raise CLIPipeException(FeedbackManager.error_exception(error=e))
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
@pipe.command(name="append")
|
|
368
|
+
@click.argument("pipe_name_or_uid")
|
|
369
|
+
@click.argument("sql")
|
|
370
|
+
@click.pass_context
|
|
371
|
+
@coro
|
|
372
|
+
async def pipe_append_node(
|
|
373
|
+
ctx: click.Context,
|
|
374
|
+
pipe_name_or_uid: str,
|
|
375
|
+
sql: str,
|
|
376
|
+
):
|
|
377
|
+
"""Append a node to a pipe"""
|
|
378
|
+
|
|
379
|
+
client = ctx.ensure_object(dict)["client"]
|
|
380
|
+
try:
|
|
381
|
+
res = await client.pipe_append_node(pipe_name_or_uid, sql)
|
|
382
|
+
click.echo(
|
|
383
|
+
FeedbackManager.success_node_changed(
|
|
384
|
+
pipe_name_or_uid=pipe_name_or_uid, node_name=res["name"], node_id=res["id"]
|
|
385
|
+
)
|
|
386
|
+
)
|
|
387
|
+
except DoesNotExistException:
|
|
388
|
+
raise CLIPipeException(FeedbackManager.error_pipe_does_not_exist(pipe=pipe_name_or_uid))
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
async def common_pipe_publish_node(ctx: click.Context, pipe_name_or_id: str, node_uid: Optional[str] = None):
|
|
392
|
+
"""Change the published node of a pipe"""
|
|
393
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
394
|
+
host = ctx.ensure_object(dict)["config"].get("host", DEFAULT_API_HOST)
|
|
395
|
+
|
|
396
|
+
try:
|
|
397
|
+
pipe = await client.pipe(pipe_name_or_id)
|
|
398
|
+
if not node_uid:
|
|
399
|
+
node = pipe["nodes"][-1]["name"]
|
|
400
|
+
click.echo(FeedbackManager.info_using_node(node=node))
|
|
401
|
+
else:
|
|
402
|
+
node = node_uid
|
|
403
|
+
|
|
404
|
+
await client.pipe_set_endpoint(pipe_name_or_id, node)
|
|
405
|
+
click.echo(FeedbackManager.success_node_published(pipe=pipe_name_or_id, host=host))
|
|
406
|
+
except AuthNoTokenException:
|
|
407
|
+
raise
|
|
408
|
+
except DoesNotExistException:
|
|
409
|
+
raise CLIPipeException(FeedbackManager.error_pipe_does_not_exist(pipe=pipe_name_or_id))
|
|
410
|
+
except Exception as e:
|
|
411
|
+
raise CLIPipeException(FeedbackManager.error_exception(error=e))
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
@pipe.command(name="publish")
|
|
415
|
+
@click.argument("pipe_name_or_id")
|
|
416
|
+
@click.argument("node_uid", default=None, required=False)
|
|
417
|
+
@click.pass_context
|
|
418
|
+
@coro
|
|
419
|
+
async def pipe_publish_node(
|
|
420
|
+
ctx: click.Context,
|
|
421
|
+
pipe_name_or_id: str,
|
|
422
|
+
node_uid: Optional[str] = None,
|
|
423
|
+
):
|
|
424
|
+
"""Change the published node of a pipe"""
|
|
425
|
+
|
|
426
|
+
await common_pipe_publish_node(ctx, pipe_name_or_id, node_uid)
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
@pipe.command(name="unpublish")
|
|
430
|
+
@click.argument("pipe_name_or_id")
|
|
431
|
+
@click.argument("node_uid", default=None, required=False)
|
|
432
|
+
@click.pass_context
|
|
433
|
+
@coro
|
|
434
|
+
async def pipe_unpublish_node(
|
|
435
|
+
ctx: click.Context,
|
|
436
|
+
pipe_name_or_id: str,
|
|
437
|
+
node_uid: Optional[str] = None,
|
|
438
|
+
):
|
|
439
|
+
"""Unpublish the endpoint of a pipe"""
|
|
440
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
441
|
+
host = ctx.ensure_object(dict)["config"].get("host", DEFAULT_API_HOST)
|
|
442
|
+
|
|
443
|
+
try:
|
|
444
|
+
pipe = await client.pipe(pipe_name_or_id)
|
|
445
|
+
|
|
446
|
+
if not pipe["endpoint"]:
|
|
447
|
+
raise CLIPipeException(FeedbackManager.error_remove_no_endpoint())
|
|
448
|
+
|
|
449
|
+
if not node_uid:
|
|
450
|
+
node = pipe["endpoint"]
|
|
451
|
+
click.echo(FeedbackManager.info_using_node(node=node))
|
|
452
|
+
else:
|
|
453
|
+
node = node_uid
|
|
454
|
+
|
|
455
|
+
await client.pipe_remove_endpoint(pipe_name_or_id, node)
|
|
456
|
+
click.echo(FeedbackManager.success_node_unpublished(pipe=pipe_name_or_id, host=host))
|
|
457
|
+
except AuthNoTokenException:
|
|
458
|
+
raise
|
|
459
|
+
except DoesNotExistException:
|
|
460
|
+
raise CLIPipeException(FeedbackManager.error_pipe_does_not_exist(pipe=pipe_name_or_id))
|
|
461
|
+
except Exception as e:
|
|
462
|
+
raise CLIPipeException(FeedbackManager.error_exception(error=e))
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
@pipe.command(name="set_endpoint")
|
|
466
|
+
@click.argument("pipe_name_or_id")
|
|
467
|
+
@click.argument("node_uid", default=None, required=False)
|
|
468
|
+
@click.pass_context
|
|
469
|
+
@coro
|
|
470
|
+
async def pipe_published_node(
|
|
471
|
+
ctx: click.Context,
|
|
472
|
+
pipe_name_or_id: str,
|
|
473
|
+
node_uid: Optional[str] = None,
|
|
474
|
+
no_live_warning: bool = False,
|
|
475
|
+
):
|
|
476
|
+
"""Same as 'publish', change the published node of a pipe"""
|
|
477
|
+
|
|
478
|
+
await common_pipe_publish_node(ctx, pipe_name_or_id, node_uid)
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
@pipe.command(name="rm")
|
|
482
|
+
@click.argument("pipe_name_or_id")
|
|
483
|
+
@click.option("--yes", is_flag=True, default=False, help="Do not ask for confirmation")
|
|
484
|
+
@click.pass_context
|
|
485
|
+
@coro
|
|
486
|
+
async def pipe_delete(ctx: click.Context, pipe_name_or_id: str, yes: bool):
|
|
487
|
+
"""Delete a pipe. pipe_name_or_id can be either a Pipe name or id in the Workspace or a local path to a .pipe file"""
|
|
488
|
+
|
|
489
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
490
|
+
|
|
491
|
+
file_path = pipe_name_or_id
|
|
492
|
+
if os.path.exists(file_path):
|
|
493
|
+
result = await process_file(file_path, client)
|
|
494
|
+
pipe_name_or_id = result[0]["name"]
|
|
495
|
+
|
|
496
|
+
semver: str = ctx.ensure_object(dict)["config"]["semver"]
|
|
497
|
+
await warn_if_in_live(semver)
|
|
498
|
+
|
|
499
|
+
if yes or click.confirm(FeedbackManager.warning_confirm_delete_pipe(pipe=pipe_name_or_id)):
|
|
500
|
+
try:
|
|
501
|
+
await client.pipe_delete(pipe_name_or_id)
|
|
502
|
+
except DoesNotExistException:
|
|
503
|
+
raise CLIPipeException(FeedbackManager.error_pipe_does_not_exist(pipe=pipe_name_or_id))
|
|
504
|
+
|
|
505
|
+
click.echo(FeedbackManager.success_delete_pipe(pipe=pipe_name_or_id))
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
@pipe.command(name="token_read")
|
|
509
|
+
@click.argument("pipe_name")
|
|
510
|
+
@click.pass_context
|
|
511
|
+
@coro
|
|
512
|
+
async def pipe_token_read(ctx: click.Context, pipe_name: str):
|
|
513
|
+
"""Retrieve a token to read a pipe"""
|
|
514
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
515
|
+
|
|
516
|
+
try:
|
|
517
|
+
await client.pipe_file(pipe_name)
|
|
518
|
+
except DoesNotExistException:
|
|
519
|
+
raise CLIPipeException(FeedbackManager.error_pipe_does_not_exist(pipe=pipe_name))
|
|
520
|
+
|
|
521
|
+
tokens = await client.tokens()
|
|
522
|
+
token = None
|
|
523
|
+
|
|
524
|
+
for t in tokens:
|
|
525
|
+
for scope in t["scopes"]:
|
|
526
|
+
if scope["type"] == "PIPES:READ" and scope["resource"] == pipe_name:
|
|
527
|
+
token = t["token"]
|
|
528
|
+
if token:
|
|
529
|
+
click.echo(token)
|
|
530
|
+
else:
|
|
531
|
+
click.echo(FeedbackManager.warning_token_pipe(pipe=pipe_name))
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
@pipe.command(
|
|
535
|
+
name="data",
|
|
536
|
+
context_settings=dict(
|
|
537
|
+
allow_extra_args=True,
|
|
538
|
+
ignore_unknown_options=True,
|
|
539
|
+
),
|
|
540
|
+
)
|
|
541
|
+
@click.argument("pipe")
|
|
542
|
+
@click.option("--query", default=None, help="Run SQL over pipe results")
|
|
543
|
+
@click.option(
|
|
544
|
+
"--format", "format_", type=click.Choice(["json", "csv"], case_sensitive=False), help="Return format (CSV, JSON)"
|
|
545
|
+
)
|
|
546
|
+
@click.pass_context
|
|
547
|
+
@coro
|
|
548
|
+
async def print_pipe(ctx: Context, pipe: str, query: str, format_: str):
|
|
549
|
+
"""Print data returned by a pipe
|
|
550
|
+
|
|
551
|
+
Syntax: tb pipe data <pipe_name> --param_name value --param2_name value2 ...
|
|
552
|
+
"""
|
|
553
|
+
|
|
554
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
555
|
+
params = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, len(ctx.args), 2)}
|
|
556
|
+
req_format = "json" if not format_ else format_.lower()
|
|
557
|
+
try:
|
|
558
|
+
res = await client.pipe_data(pipe, format=req_format, sql=query, params=params)
|
|
559
|
+
except AuthNoTokenException:
|
|
560
|
+
raise
|
|
561
|
+
except Exception as e:
|
|
562
|
+
raise CLIPipeException(FeedbackManager.error_exception(error=str(e)))
|
|
563
|
+
|
|
564
|
+
if not format_:
|
|
565
|
+
stats = res["statistics"]
|
|
566
|
+
seconds = stats["elapsed"]
|
|
567
|
+
rows_read = humanfriendly.format_number(stats["rows_read"])
|
|
568
|
+
bytes_read = humanfriendly.format_size(stats["bytes_read"])
|
|
569
|
+
|
|
570
|
+
click.echo(FeedbackManager.success_print_pipe(pipe=pipe))
|
|
571
|
+
click.echo(FeedbackManager.info_query_stats(seconds=seconds, rows=rows_read, bytes=bytes_read))
|
|
572
|
+
|
|
573
|
+
if not res["data"]:
|
|
574
|
+
click.echo(FeedbackManager.info_no_rows())
|
|
575
|
+
else:
|
|
576
|
+
echo_safe_humanfriendly_tables_format_smart_table(
|
|
577
|
+
data=[d.values() for d in res["data"]], column_names=res["data"][0].keys()
|
|
578
|
+
)
|
|
579
|
+
click.echo("\n")
|
|
580
|
+
elif req_format == "json":
|
|
581
|
+
click.echo(json.dumps(res))
|
|
582
|
+
else:
|
|
583
|
+
click.echo(res)
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
@pipe.command(name="regression-test", short_help="Run regression tests using last requests")
|
|
587
|
+
@click.option(
|
|
588
|
+
"--debug",
|
|
589
|
+
is_flag=True,
|
|
590
|
+
default=False,
|
|
591
|
+
help="Prints internal representation, can be combined with any command to get more information.",
|
|
592
|
+
)
|
|
593
|
+
@click.option("--only-response-times", is_flag=True, default=False, help="Checks only response times")
|
|
594
|
+
@click.argument("filenames", type=click.Path(exists=True), nargs=-1, default=None)
|
|
595
|
+
@click.option("--workspace_map", nargs=2, type=str, multiple=True)
|
|
596
|
+
@click.option(
|
|
597
|
+
"--workspace",
|
|
598
|
+
nargs=2,
|
|
599
|
+
type=str,
|
|
600
|
+
multiple=True,
|
|
601
|
+
help="add a workspace path to the list of external workspaces, usage: --workspace name path/to/folder",
|
|
602
|
+
)
|
|
603
|
+
@click.option(
|
|
604
|
+
"--no-versions",
|
|
605
|
+
is_flag=True,
|
|
606
|
+
default=False,
|
|
607
|
+
help="when set, resource dependency versions are not used, it pushes the dependencies as-is",
|
|
608
|
+
)
|
|
609
|
+
@click.option(
|
|
610
|
+
"-l", "--limit", type=click.IntRange(0, 100), default=0, required=False, help="Number of requests to validate"
|
|
611
|
+
)
|
|
612
|
+
@click.option(
|
|
613
|
+
"--sample-by-params",
|
|
614
|
+
type=click.IntRange(1, 100),
|
|
615
|
+
default=1,
|
|
616
|
+
required=False,
|
|
617
|
+
help="When set, we will aggregate the pipe_stats_rt requests by extractURLParameterNames(assumeNotNull(url)) and for each combination we will take a sample of N requests",
|
|
618
|
+
)
|
|
619
|
+
@click.option(
|
|
620
|
+
"-m",
|
|
621
|
+
"--match",
|
|
622
|
+
multiple=True,
|
|
623
|
+
required=False,
|
|
624
|
+
help="Filter the checker requests by specific parameter. You can pass multiple parameters -m foo -m bar",
|
|
625
|
+
)
|
|
626
|
+
@click.option(
|
|
627
|
+
"-ff", "--failfast", is_flag=True, default=False, help="When set, the checker will exit as soon one test fails"
|
|
628
|
+
)
|
|
629
|
+
@click.option(
|
|
630
|
+
"--ignore-order", is_flag=True, default=False, help="When set, the checker will ignore the order of list properties"
|
|
631
|
+
)
|
|
632
|
+
@click.option(
|
|
633
|
+
"--validate-processed-bytes",
|
|
634
|
+
is_flag=True,
|
|
635
|
+
default=False,
|
|
636
|
+
help="When set, the checker will validate that the new version doesn't process more than 25% than the current version",
|
|
637
|
+
)
|
|
638
|
+
@click.option(
|
|
639
|
+
"--check-requests-from-main",
|
|
640
|
+
is_flag=True,
|
|
641
|
+
default=False,
|
|
642
|
+
help="When set, the checker will get Main Workspace requests",
|
|
643
|
+
hidden=True,
|
|
644
|
+
)
|
|
645
|
+
@click.option(
|
|
646
|
+
"--relative-change",
|
|
647
|
+
type=float,
|
|
648
|
+
default=0.01,
|
|
649
|
+
help="When set, the checker will validate the new version has less than this distance with the current version",
|
|
650
|
+
)
|
|
651
|
+
@click.pass_context
|
|
652
|
+
@coro
|
|
653
|
+
async def regression_test(
|
|
654
|
+
ctx: click.Context,
|
|
655
|
+
filenames: Optional[List[str]],
|
|
656
|
+
debug: bool,
|
|
657
|
+
only_response_times: bool,
|
|
658
|
+
workspace_map,
|
|
659
|
+
workspace: str,
|
|
660
|
+
no_versions: bool,
|
|
661
|
+
limit: int,
|
|
662
|
+
sample_by_params: int,
|
|
663
|
+
match: List[str],
|
|
664
|
+
failfast: bool,
|
|
665
|
+
ignore_order: bool,
|
|
666
|
+
validate_processed_bytes: bool,
|
|
667
|
+
check_requests_from_main: bool,
|
|
668
|
+
relative_change: float,
|
|
669
|
+
):
|
|
670
|
+
"""
|
|
671
|
+
Run regression tests on Tinybird
|
|
672
|
+
"""
|
|
673
|
+
|
|
674
|
+
ignore_sql_errors = FeatureFlags.ignore_sql_errors()
|
|
675
|
+
|
|
676
|
+
context.disable_template_security_validation.set(True)
|
|
677
|
+
await folder_push(
|
|
678
|
+
create_tb_client(ctx),
|
|
679
|
+
filenames,
|
|
680
|
+
dry_run=False,
|
|
681
|
+
check=True,
|
|
682
|
+
push_deps=False,
|
|
683
|
+
debug=debug,
|
|
684
|
+
force=False,
|
|
685
|
+
populate=False,
|
|
686
|
+
upload_fixtures=False,
|
|
687
|
+
wait=False,
|
|
688
|
+
ignore_sql_errors=ignore_sql_errors,
|
|
689
|
+
skip_confirmation=False,
|
|
690
|
+
only_response_times=only_response_times,
|
|
691
|
+
workspace_map=dict(workspace_map),
|
|
692
|
+
workspace_lib_paths=workspace,
|
|
693
|
+
no_versions=no_versions,
|
|
694
|
+
run_tests=True,
|
|
695
|
+
tests_to_run=limit,
|
|
696
|
+
tests_relative_change=relative_change,
|
|
697
|
+
tests_sample_by_params=sample_by_params,
|
|
698
|
+
tests_filter_by=match,
|
|
699
|
+
tests_failfast=failfast,
|
|
700
|
+
tests_ignore_order=ignore_order,
|
|
701
|
+
tests_validate_processed_bytes=validate_processed_bytes,
|
|
702
|
+
tests_check_requests_from_branch=check_requests_from_main,
|
|
703
|
+
)
|
|
704
|
+
return
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
@pipe_copy.command(name="run", short_help="Run an on-demand copy job")
|
|
708
|
+
@click.argument("pipe_name_or_id")
|
|
709
|
+
@click.option("--wait", is_flag=True, default=False, help="Wait for the copy job to finish")
|
|
710
|
+
@click.option(
|
|
711
|
+
"--mode", type=click.Choice(["append", "replace"], case_sensitive=True), default=None, help="Copy strategy"
|
|
712
|
+
)
|
|
713
|
+
@click.option("--yes", is_flag=True, default=False, help="Do not ask for confirmation")
|
|
714
|
+
@click.option(
|
|
715
|
+
"--param",
|
|
716
|
+
nargs=1,
|
|
717
|
+
type=str,
|
|
718
|
+
multiple=True,
|
|
719
|
+
default=None,
|
|
720
|
+
help="Key and value of the params you want the Copy pipe to be called with. For example: tb pipe copy run <my_copy_pipe> --param foo=bar",
|
|
721
|
+
)
|
|
722
|
+
@click.pass_context
|
|
723
|
+
@coro
|
|
724
|
+
async def pipe_copy_run(
|
|
725
|
+
ctx: click.Context, pipe_name_or_id: str, wait: bool, mode: str, yes: bool, param: Optional[Tuple[str]]
|
|
726
|
+
):
|
|
727
|
+
"""Run an on-demand copy job"""
|
|
728
|
+
|
|
729
|
+
params = dict(key_value.split("=") for key_value in param) if param else {}
|
|
730
|
+
|
|
731
|
+
if yes or click.confirm(FeedbackManager.warning_confirm_copy_pipe(pipe=pipe_name_or_id)):
|
|
732
|
+
click.echo(FeedbackManager.info_copy_job_running(pipe=pipe_name_or_id))
|
|
733
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
734
|
+
|
|
735
|
+
try:
|
|
736
|
+
response = await client.pipe_run_copy(pipe_name_or_id, params, mode)
|
|
737
|
+
|
|
738
|
+
job_id = response["job"]["job_id"]
|
|
739
|
+
job_url = response["job"]["job_url"]
|
|
740
|
+
target_datasource_id = response["tags"]["copy_target_datasource"]
|
|
741
|
+
target_datasource = await client.get_datasource(target_datasource_id)
|
|
742
|
+
target_datasource_name = target_datasource["name"]
|
|
743
|
+
click.echo(
|
|
744
|
+
FeedbackManager.success_copy_job_created(target_datasource=target_datasource_name, job_url=job_url)
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
if wait:
|
|
748
|
+
await wait_job(client, job_id, job_url, "** Copying data")
|
|
749
|
+
click.echo(FeedbackManager.success_data_copied_to_ds(target_datasource=target_datasource_name))
|
|
750
|
+
|
|
751
|
+
except AuthNoTokenException:
|
|
752
|
+
raise
|
|
753
|
+
except Exception as e:
|
|
754
|
+
raise CLIPipeException(FeedbackManager.error_creating_copy_job(error=e))
|
|
755
|
+
|
|
756
|
+
|
|
757
|
+
@pipe_copy.command(name="resume", short_help="Resume a paused copy pipe")
|
|
758
|
+
@click.argument("pipe_name_or_id")
|
|
759
|
+
@click.pass_context
|
|
760
|
+
@coro
|
|
761
|
+
async def pipe_copy_resume(ctx: click.Context, pipe_name_or_id: str):
|
|
762
|
+
"""Resume a paused copy pipe"""
|
|
763
|
+
|
|
764
|
+
click.echo(FeedbackManager.info_copy_pipe_resuming(pipe=pipe_name_or_id))
|
|
765
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
766
|
+
|
|
767
|
+
try:
|
|
768
|
+
await client.pipe_resume_copy(pipe_name_or_id)
|
|
769
|
+
click.echo(FeedbackManager.success_copy_pipe_resumed(pipe=pipe_name_or_id))
|
|
770
|
+
|
|
771
|
+
except AuthNoTokenException:
|
|
772
|
+
raise
|
|
773
|
+
except Exception as e:
|
|
774
|
+
raise CLIPipeException(FeedbackManager.error_resuming_copy_pipe(error=e))
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
@pipe_copy.command(name="pause", short_help="Pause a running copy pipe")
|
|
778
|
+
@click.argument("pipe_name_or_id")
|
|
779
|
+
@click.pass_context
|
|
780
|
+
@coro
|
|
781
|
+
async def pipe_copy_pause(ctx: click.Context, pipe_name_or_id: str):
|
|
782
|
+
"""Pause a running copy pipe"""
|
|
783
|
+
|
|
784
|
+
click.echo(FeedbackManager.info_copy_pipe_pausing(pipe=pipe_name_or_id))
|
|
785
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
786
|
+
|
|
787
|
+
try:
|
|
788
|
+
await client.pipe_pause_copy(pipe_name_or_id)
|
|
789
|
+
click.echo(FeedbackManager.success_copy_pipe_paused(pipe=pipe_name_or_id))
|
|
790
|
+
|
|
791
|
+
except AuthNoTokenException:
|
|
792
|
+
raise
|
|
793
|
+
except Exception as e:
|
|
794
|
+
raise CLIPipeException(FeedbackManager.error_pausing_copy_pipe(error=e))
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
@pipe_sink.command(name="run", short_help="Run an on-demand sink job")
|
|
798
|
+
@click.argument("pipe_name_or_id")
|
|
799
|
+
@click.option("--wait", is_flag=True, default=False, help="Wait for the sink job to finish")
|
|
800
|
+
@click.option("--yes", is_flag=True, default=False, help="Do not ask for confirmation")
|
|
801
|
+
@click.option("--dry-run", is_flag=True, default=False, help="Run the command without executing the sink job")
|
|
802
|
+
@click.option(
|
|
803
|
+
"--param",
|
|
804
|
+
nargs=1,
|
|
805
|
+
type=str,
|
|
806
|
+
multiple=True,
|
|
807
|
+
default=None,
|
|
808
|
+
help="Key and value of the params you want the Sink pipe to be called with. For example: tb pipe sink run <my_sink_pipe> --param foo=bar",
|
|
809
|
+
)
|
|
810
|
+
@click.pass_context
|
|
811
|
+
@coro
|
|
812
|
+
async def pipe_sink_run(
|
|
813
|
+
ctx: click.Context, pipe_name_or_id: str, wait: bool, yes: bool, dry_run: bool, param: Optional[Tuple[str]]
|
|
814
|
+
):
|
|
815
|
+
"""Run an on-demand sink job"""
|
|
816
|
+
|
|
817
|
+
params = dict(key_value.split("=") for key_value in param) if param else {}
|
|
818
|
+
|
|
819
|
+
if dry_run or yes or click.confirm(FeedbackManager.warning_confirm_sink_job(pipe=pipe_name_or_id)):
|
|
820
|
+
click.echo(FeedbackManager.info_sink_job_running(pipe=pipe_name_or_id))
|
|
821
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
822
|
+
|
|
823
|
+
try:
|
|
824
|
+
pipe = await client.pipe(pipe_name_or_id)
|
|
825
|
+
connections = await client.get_connections()
|
|
826
|
+
|
|
827
|
+
if (pipe.get("type", None) != "sink") or (not pipe.get("sink_node", None)):
|
|
828
|
+
error_message = f"Pipe {pipe_name_or_id} is not published as a Sink pipe"
|
|
829
|
+
raise Exception(FeedbackManager.error_running_on_demand_sink_job(error=error_message))
|
|
830
|
+
|
|
831
|
+
current_sink = None
|
|
832
|
+
for connection in connections:
|
|
833
|
+
for sink in connection.get("sinks", []):
|
|
834
|
+
if sink.get("resource_id") == pipe["id"]:
|
|
835
|
+
current_sink = sink
|
|
836
|
+
break
|
|
837
|
+
|
|
838
|
+
if not current_sink:
|
|
839
|
+
click.echo(FeedbackManager.warning_sink_no_connection(pipe_name=pipe.get("name", "")))
|
|
840
|
+
|
|
841
|
+
if dry_run:
|
|
842
|
+
click.echo(FeedbackManager.info_dry_sink_run())
|
|
843
|
+
return
|
|
844
|
+
|
|
845
|
+
bucket_path = (current_sink or {}).get("settings", {}).get("bucket_path", "")
|
|
846
|
+
response = await client.pipe_run_sink(pipe_name_or_id, params)
|
|
847
|
+
job_id = response["job"]["id"]
|
|
848
|
+
job_url = response["job"]["job_url"]
|
|
849
|
+
click.echo(FeedbackManager.success_sink_job_created(bucket_path=bucket_path, job_url=job_url))
|
|
850
|
+
|
|
851
|
+
if wait:
|
|
852
|
+
await wait_job(client, job_id, job_url, "** Sinking data")
|
|
853
|
+
click.echo(FeedbackManager.success_sink_job_finished(bucket_path=bucket_path))
|
|
854
|
+
|
|
855
|
+
except AuthNoTokenException:
|
|
856
|
+
raise
|
|
857
|
+
except Exception as e:
|
|
858
|
+
raise CLIPipeException(FeedbackManager.error_creating_sink_job(error=str(e)))
|