tinybird 0.0.1.dev245__py3-none-any.whl → 0.0.1.dev247__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tinybird might be problematic. Click here for more details.
- tinybird/ch_utils/constants.py +2 -0
- tinybird/tb/__cli__.py +2 -2
- tinybird/tb/modules/agent/agent.py +136 -27
- tinybird/tb/modules/agent/models.py +6 -0
- tinybird/tb/modules/agent/prompts.py +71 -43
- tinybird/tb/modules/agent/tools/append.py +55 -0
- tinybird/tb/modules/agent/tools/build.py +20 -0
- tinybird/tb/modules/agent/tools/create_datafile.py +84 -5
- tinybird/tb/modules/agent/tools/deploy.py +45 -0
- tinybird/tb/modules/agent/tools/deploy_check.py +19 -0
- tinybird/tb/modules/agent/tools/mock.py +59 -0
- tinybird/tb/modules/agent/tools/plan.py +1 -1
- tinybird/tb/modules/agent/tools/read_fixture_data.py +28 -0
- tinybird/tb/modules/agent/utils.py +9 -2
- tinybird/tb/modules/build.py +4 -1
- tinybird/tb/modules/build_common.py +2 -3
- tinybird/tb/modules/cli.py +9 -1
- tinybird/tb/modules/create.py +1 -1
- tinybird/tb/modules/deployment.py +9 -381
- tinybird/tb/modules/deployment_common.py +413 -0
- tinybird/tb/modules/feedback_manager.py +8 -6
- tinybird/tb/modules/llm.py +1 -1
- tinybird/tb/modules/mock.py +3 -69
- tinybird/tb/modules/mock_common.py +71 -0
- tinybird/tb/modules/project.py +9 -0
- {tinybird-0.0.1.dev245.dist-info → tinybird-0.0.1.dev247.dist-info}/METADATA +1 -1
- {tinybird-0.0.1.dev245.dist-info → tinybird-0.0.1.dev247.dist-info}/RECORD +30 -22
- {tinybird-0.0.1.dev245.dist-info → tinybird-0.0.1.dev247.dist-info}/WHEEL +0 -0
- {tinybird-0.0.1.dev245.dist-info → tinybird-0.0.1.dev247.dist-info}/entry_points.txt +0 -0
- {tinybird-0.0.1.dev245.dist-info → tinybird-0.0.1.dev247.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
import time
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Dict, Optional, Tuple, Union
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
import requests
|
|
10
|
+
|
|
11
|
+
from tinybird.tb.client import TinyB
|
|
12
|
+
from tinybird.tb.modules.common import (
|
|
13
|
+
echo_safe_humanfriendly_tables_format_smart_table,
|
|
14
|
+
get_display_cloud_host,
|
|
15
|
+
sys_exit,
|
|
16
|
+
)
|
|
17
|
+
from tinybird.tb.modules.feedback_manager import FeedbackManager, bcolors
|
|
18
|
+
from tinybird.tb.modules.project import Project
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# TODO(eclbg): This should eventually end up in client.py, but we're not using it here yet.
|
|
22
|
+
def api_fetch(url: str, headers: dict) -> dict:
|
|
23
|
+
r = requests.get(url, headers=headers)
|
|
24
|
+
if r.status_code == 200:
|
|
25
|
+
logging.debug(json.dumps(r.json(), indent=2))
|
|
26
|
+
return r.json()
|
|
27
|
+
# Try to parse and print the error from the response
|
|
28
|
+
try:
|
|
29
|
+
result = r.json()
|
|
30
|
+
error = result.get("error")
|
|
31
|
+
logging.debug(json.dumps(result, indent=2))
|
|
32
|
+
click.echo(FeedbackManager.error(message=f"Error: {error}"))
|
|
33
|
+
sys_exit("deployment_error", error)
|
|
34
|
+
except Exception:
|
|
35
|
+
message = "Error parsing response from API"
|
|
36
|
+
click.echo(FeedbackManager.error(message=message))
|
|
37
|
+
sys_exit("deployment_error", message)
|
|
38
|
+
return {}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def api_post(
|
|
42
|
+
url: str,
|
|
43
|
+
headers: dict,
|
|
44
|
+
files: Optional[list] = None,
|
|
45
|
+
params: Optional[dict] = None,
|
|
46
|
+
) -> dict:
|
|
47
|
+
r = requests.post(url, headers=headers, files=files, params=params)
|
|
48
|
+
if r.status_code < 300:
|
|
49
|
+
logging.debug(json.dumps(r.json(), indent=2))
|
|
50
|
+
return r.json()
|
|
51
|
+
# Try to parse and print the error from the response
|
|
52
|
+
try:
|
|
53
|
+
result = r.json()
|
|
54
|
+
logging.debug(json.dumps(result, indent=2))
|
|
55
|
+
error = result.get("error")
|
|
56
|
+
if error:
|
|
57
|
+
click.echo(FeedbackManager.error(message=f"Error: {error}"))
|
|
58
|
+
sys_exit("deployment_error", error)
|
|
59
|
+
return result
|
|
60
|
+
except Exception:
|
|
61
|
+
message = "Error parsing response from API"
|
|
62
|
+
click.echo(FeedbackManager.error(message=message))
|
|
63
|
+
sys_exit("deployment_error", message)
|
|
64
|
+
return {}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
# TODO(eclbg): This logic should be in the server, and there should be a dedicated endpoint for promoting a deployment
|
|
68
|
+
# potato
|
|
69
|
+
def promote_deployment(host: Optional[str], headers: dict, wait: bool) -> None:
|
|
70
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments"
|
|
71
|
+
result = api_fetch(TINYBIRD_API_URL, headers)
|
|
72
|
+
|
|
73
|
+
deployments = result.get("deployments")
|
|
74
|
+
if not deployments:
|
|
75
|
+
message = "No deployments found"
|
|
76
|
+
click.echo(FeedbackManager.error(message=message))
|
|
77
|
+
sys_exit("deployment_error", message)
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
if len(deployments) < 2:
|
|
81
|
+
message = "Only one deployment found"
|
|
82
|
+
click.echo(FeedbackManager.error(message=message))
|
|
83
|
+
sys_exit("deployment_error", message)
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
last_deployment, candidate_deployment = deployments[0], deployments[1]
|
|
87
|
+
|
|
88
|
+
if candidate_deployment.get("status") != "data_ready":
|
|
89
|
+
click.echo(FeedbackManager.error(message="Current deployment is not ready"))
|
|
90
|
+
deploy_errors = candidate_deployment.get("errors", [])
|
|
91
|
+
for deploy_error in deploy_errors:
|
|
92
|
+
click.echo(FeedbackManager.error(message=f"* {deploy_error}"))
|
|
93
|
+
sys_exit("deployment_error", "Current deployment is not ready: " + str(deploy_errors))
|
|
94
|
+
return
|
|
95
|
+
|
|
96
|
+
if candidate_deployment.get("live"):
|
|
97
|
+
click.echo(FeedbackManager.error(message="Candidate deployment is already live"))
|
|
98
|
+
else:
|
|
99
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments/{candidate_deployment.get('id')}/set-live"
|
|
100
|
+
result = api_post(TINYBIRD_API_URL, headers=headers)
|
|
101
|
+
|
|
102
|
+
click.echo(FeedbackManager.highlight(message="» Removing old deployment"))
|
|
103
|
+
|
|
104
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments/{last_deployment.get('id')}"
|
|
105
|
+
r = requests.delete(TINYBIRD_API_URL, headers=headers)
|
|
106
|
+
result = r.json()
|
|
107
|
+
logging.debug(json.dumps(result, indent=2))
|
|
108
|
+
if result.get("error"):
|
|
109
|
+
click.echo(FeedbackManager.error(message=result.get("error")))
|
|
110
|
+
sys_exit("deployment_error", result.get("error", "Unknown error"))
|
|
111
|
+
click.echo(FeedbackManager.info(message="✓ Old deployment removed"))
|
|
112
|
+
|
|
113
|
+
click.echo(FeedbackManager.highlight(message="» Waiting for deployment to be promoted..."))
|
|
114
|
+
|
|
115
|
+
if wait:
|
|
116
|
+
while True:
|
|
117
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments/{last_deployment.get('id')}"
|
|
118
|
+
result = api_fetch(TINYBIRD_API_URL, headers=headers)
|
|
119
|
+
|
|
120
|
+
last_deployment = result.get("deployment")
|
|
121
|
+
if last_deployment.get("status") == "deleted":
|
|
122
|
+
click.echo(FeedbackManager.success(message=f"✓ Deployment #{candidate_deployment.get('id')} is live!"))
|
|
123
|
+
break
|
|
124
|
+
|
|
125
|
+
time.sleep(5)
|
|
126
|
+
if last_deployment.get("id") == "0":
|
|
127
|
+
# This is the first deployment, so we prompt the user to ingest data
|
|
128
|
+
click.echo(
|
|
129
|
+
FeedbackManager.info(
|
|
130
|
+
message="A deployment with no data is useless. Learn how to ingest at https://www.tinybird.co/docs/forward/get-data-in"
|
|
131
|
+
)
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
# TODO(eclbg): This logic should be in the server, and there should be a dedicated endpoint for discarding a
|
|
136
|
+
# deployment
|
|
137
|
+
def discard_deployment(host: Optional[str], headers: dict, wait: bool) -> None:
|
|
138
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments"
|
|
139
|
+
result = api_fetch(TINYBIRD_API_URL, headers=headers)
|
|
140
|
+
|
|
141
|
+
deployments = result.get("deployments")
|
|
142
|
+
if not deployments:
|
|
143
|
+
click.echo(FeedbackManager.error(message="No deployments found"))
|
|
144
|
+
return
|
|
145
|
+
|
|
146
|
+
if len(deployments) < 2:
|
|
147
|
+
click.echo(FeedbackManager.error(message="Only one deployment found"))
|
|
148
|
+
return
|
|
149
|
+
|
|
150
|
+
previous_deployment, current_deployment = deployments[0], deployments[1]
|
|
151
|
+
|
|
152
|
+
if previous_deployment.get("status") != "data_ready":
|
|
153
|
+
click.echo(FeedbackManager.error(message="Previous deployment is not ready"))
|
|
154
|
+
deploy_errors = previous_deployment.get("errors", [])
|
|
155
|
+
for deploy_error in deploy_errors:
|
|
156
|
+
click.echo(FeedbackManager.error(message=f"* {deploy_error}"))
|
|
157
|
+
return
|
|
158
|
+
|
|
159
|
+
if previous_deployment.get("live"):
|
|
160
|
+
click.echo(FeedbackManager.error(message="Previous deployment is already live"))
|
|
161
|
+
else:
|
|
162
|
+
click.echo(FeedbackManager.success(message="Promoting previous deployment"))
|
|
163
|
+
|
|
164
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments/{previous_deployment.get('id')}/set-live"
|
|
165
|
+
result = api_post(TINYBIRD_API_URL, headers=headers)
|
|
166
|
+
|
|
167
|
+
click.echo(FeedbackManager.success(message="Removing current deployment"))
|
|
168
|
+
|
|
169
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments/{current_deployment.get('id')}"
|
|
170
|
+
r = requests.delete(TINYBIRD_API_URL, headers=headers)
|
|
171
|
+
result = r.json()
|
|
172
|
+
logging.debug(json.dumps(result, indent=2))
|
|
173
|
+
if result.get("error"):
|
|
174
|
+
click.echo(FeedbackManager.error(message=result.get("error")))
|
|
175
|
+
sys_exit("deployment_error", result.get("error", "Unknown error"))
|
|
176
|
+
|
|
177
|
+
click.echo(FeedbackManager.success(message="Discard process successfully started"))
|
|
178
|
+
|
|
179
|
+
if wait:
|
|
180
|
+
while True:
|
|
181
|
+
TINYBIRD_API_URL = f"{host}/v1/deployments/{current_deployment.get('id')}"
|
|
182
|
+
result = api_fetch(TINYBIRD_API_URL, headers)
|
|
183
|
+
|
|
184
|
+
current_deployment = result.get("deployment")
|
|
185
|
+
if current_deployment.get("status") == "deleted":
|
|
186
|
+
click.echo(FeedbackManager.success(message="Discard process successfully completed"))
|
|
187
|
+
break
|
|
188
|
+
time.sleep(5)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def create_deployment(
|
|
192
|
+
project: Project,
|
|
193
|
+
client: TinyB,
|
|
194
|
+
config: Dict[str, Any],
|
|
195
|
+
wait: bool,
|
|
196
|
+
auto: bool,
|
|
197
|
+
check: Optional[bool] = None,
|
|
198
|
+
allow_destructive_operations: Optional[bool] = None,
|
|
199
|
+
) -> None:
|
|
200
|
+
# TODO: This code is duplicated in build_server.py
|
|
201
|
+
# Should be refactored to be shared
|
|
202
|
+
MULTIPART_BOUNDARY_DATA_PROJECT = "data_project://"
|
|
203
|
+
DATAFILE_TYPE_TO_CONTENT_TYPE = {
|
|
204
|
+
".datasource": "text/plain",
|
|
205
|
+
".pipe": "text/plain",
|
|
206
|
+
".connection": "text/plain",
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
TINYBIRD_API_URL = f"{client.host}/v1/deploy"
|
|
210
|
+
TINYBIRD_API_KEY = client.token
|
|
211
|
+
|
|
212
|
+
if project.has_deeper_level():
|
|
213
|
+
click.echo(
|
|
214
|
+
FeedbackManager.warning(
|
|
215
|
+
message="\nYour project contains directories nested deeper than the default scan depth (max_depth=3). "
|
|
216
|
+
"Files in these deeper directories will not be processed. "
|
|
217
|
+
"To include all nested directories, run `tb --max-depth <depth> <cmd>` with a higher depth value."
|
|
218
|
+
)
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
files = [
|
|
222
|
+
("context://", ("cli-version", "1.0.0", "text/plain")),
|
|
223
|
+
]
|
|
224
|
+
for file_path in project.get_project_files():
|
|
225
|
+
relative_path = Path(file_path).relative_to(project.path).as_posix()
|
|
226
|
+
with open(file_path, "rb") as fd:
|
|
227
|
+
content_type = DATAFILE_TYPE_TO_CONTENT_TYPE.get(Path(file_path).suffix, "application/unknown")
|
|
228
|
+
files.append((MULTIPART_BOUNDARY_DATA_PROJECT, (relative_path, fd.read().decode("utf-8"), content_type)))
|
|
229
|
+
|
|
230
|
+
deployment = None
|
|
231
|
+
try:
|
|
232
|
+
HEADERS = {"Authorization": f"Bearer {TINYBIRD_API_KEY}"}
|
|
233
|
+
params = {}
|
|
234
|
+
if check:
|
|
235
|
+
click.echo(FeedbackManager.highlight(message="\n» Validating deployment...\n"))
|
|
236
|
+
params["check"] = "true"
|
|
237
|
+
if allow_destructive_operations:
|
|
238
|
+
params["allow_destructive_operations"] = "true"
|
|
239
|
+
|
|
240
|
+
result = api_post(TINYBIRD_API_URL, headers=HEADERS, files=files, params=params)
|
|
241
|
+
|
|
242
|
+
print_changes(result, project)
|
|
243
|
+
|
|
244
|
+
deployment = result.get("deployment", {})
|
|
245
|
+
feedback = deployment.get("feedback", [])
|
|
246
|
+
for f in feedback:
|
|
247
|
+
if f.get("level", "").upper() == "ERROR":
|
|
248
|
+
feedback_func = FeedbackManager.error
|
|
249
|
+
feedback_icon = ""
|
|
250
|
+
else:
|
|
251
|
+
feedback_func = FeedbackManager.warning
|
|
252
|
+
feedback_icon = "△ "
|
|
253
|
+
resource = f.get("resource")
|
|
254
|
+
resource_bit = f"{resource}: " if resource else ""
|
|
255
|
+
click.echo(feedback_func(message=f"{feedback_icon}{f.get('level')}: {resource_bit}{f.get('message')}"))
|
|
256
|
+
|
|
257
|
+
deploy_errors = deployment.get("errors")
|
|
258
|
+
for deploy_error in deploy_errors:
|
|
259
|
+
if deploy_error.get("filename", None):
|
|
260
|
+
click.echo(
|
|
261
|
+
FeedbackManager.error(message=f"{deploy_error.get('filename')}\n\n{deploy_error.get('error')}")
|
|
262
|
+
)
|
|
263
|
+
else:
|
|
264
|
+
click.echo(FeedbackManager.error(message=f"{deploy_error.get('error')}"))
|
|
265
|
+
click.echo("") # For spacing
|
|
266
|
+
|
|
267
|
+
status = result.get("result")
|
|
268
|
+
if check:
|
|
269
|
+
if status == "success":
|
|
270
|
+
click.echo(FeedbackManager.success(message="\n✓ Deployment is valid"))
|
|
271
|
+
sys.exit(0)
|
|
272
|
+
elif status == "no_changes":
|
|
273
|
+
sys.exit(0)
|
|
274
|
+
|
|
275
|
+
click.echo(FeedbackManager.error(message="\n✗ Deployment is not valid"))
|
|
276
|
+
sys_exit(
|
|
277
|
+
"deployment_error",
|
|
278
|
+
f"Deployment is not valid: {str(deployment.get('errors') + deployment.get('feedback', []))}",
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
status = result.get("result")
|
|
282
|
+
if status == "success":
|
|
283
|
+
host = get_display_cloud_host(client.host)
|
|
284
|
+
click.echo(
|
|
285
|
+
FeedbackManager.info(message="Deployment URL: ")
|
|
286
|
+
+ f"{bcolors.UNDERLINE}{host}/{config.get('name')}/deployments/{deployment.get('id')}{bcolors.ENDC}"
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
if wait:
|
|
290
|
+
click.echo(FeedbackManager.info(message="\n* Deployment submitted"))
|
|
291
|
+
else:
|
|
292
|
+
click.echo(FeedbackManager.success(message="\n✓ Deployment submitted successfully"))
|
|
293
|
+
elif status == "no_changes":
|
|
294
|
+
click.echo(FeedbackManager.warning(message="△ Not deploying. No changes."))
|
|
295
|
+
sys.exit(0)
|
|
296
|
+
elif status == "failed":
|
|
297
|
+
click.echo(FeedbackManager.error(message="Deployment failed"))
|
|
298
|
+
sys_exit(
|
|
299
|
+
"deployment_error",
|
|
300
|
+
f"Deployment failed. Errors: {str(deployment.get('errors') + deployment.get('feedback', []))}",
|
|
301
|
+
)
|
|
302
|
+
else:
|
|
303
|
+
click.echo(FeedbackManager.error(message=f"Unknown deployment result {status}"))
|
|
304
|
+
except Exception as e:
|
|
305
|
+
click.echo(FeedbackManager.error_exception(error=e))
|
|
306
|
+
|
|
307
|
+
if not deployment and not check:
|
|
308
|
+
sys_exit("deployment_error", "Deployment failed")
|
|
309
|
+
|
|
310
|
+
if deployment and wait and not check:
|
|
311
|
+
click.echo(FeedbackManager.highlight(message="» Waiting for deployment to be ready..."))
|
|
312
|
+
while True:
|
|
313
|
+
url = f"{client.host}/v1/deployments/{deployment.get('id')}"
|
|
314
|
+
res = api_fetch(url, HEADERS)
|
|
315
|
+
deployment = res.get("deployment")
|
|
316
|
+
if not deployment:
|
|
317
|
+
click.echo(FeedbackManager.error(message="Error parsing deployment from response"))
|
|
318
|
+
sys_exit("deployment_error", "Error parsing deployment from response")
|
|
319
|
+
if deployment.get("status") == "failed":
|
|
320
|
+
click.echo(FeedbackManager.error(message="Deployment failed"))
|
|
321
|
+
deploy_errors = deployment.get("errors")
|
|
322
|
+
for deploy_error in deploy_errors:
|
|
323
|
+
click.echo(FeedbackManager.error(message=f"* {deploy_error}"))
|
|
324
|
+
|
|
325
|
+
if auto:
|
|
326
|
+
click.echo(FeedbackManager.error(message="Rolling back deployment"))
|
|
327
|
+
discard_deployment(client.host, HEADERS, wait=wait)
|
|
328
|
+
sys_exit(
|
|
329
|
+
"deployment_error",
|
|
330
|
+
f"Deployment failed. Errors: {str(deployment.get('errors') + deployment.get('feedback', []))}",
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
if deployment.get("status") == "data_ready":
|
|
334
|
+
break
|
|
335
|
+
|
|
336
|
+
if deployment.get("status") in ["deleting", "deleted"]:
|
|
337
|
+
click.echo(FeedbackManager.error(message="Deployment was deleted by another process"))
|
|
338
|
+
sys_exit("deployment_error", "Deployment was deleted by another process")
|
|
339
|
+
|
|
340
|
+
time.sleep(5)
|
|
341
|
+
|
|
342
|
+
click.echo(FeedbackManager.info(message="✓ Deployment is ready"))
|
|
343
|
+
|
|
344
|
+
if auto:
|
|
345
|
+
promote_deployment(client.host, HEADERS, wait=wait)
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def print_changes(result: dict, project: Project) -> None:
|
|
349
|
+
deployment = result.get("deployment", {})
|
|
350
|
+
resources_columns = ["status", "name", "type", "path"]
|
|
351
|
+
resources: list[list[Union[str, None]]] = []
|
|
352
|
+
tokens_columns = ["Change", "Token name", "Added permissions", "Removed permissions"]
|
|
353
|
+
tokens: list[Tuple[str, str, str, str]] = []
|
|
354
|
+
|
|
355
|
+
for ds in deployment.get("new_datasource_names", []):
|
|
356
|
+
resources.append(["new", ds, "datasource", project.get_resource_path(ds, "datasource")])
|
|
357
|
+
|
|
358
|
+
for p in deployment.get("new_pipe_names", []):
|
|
359
|
+
path = project.get_resource_path(p, "pipe")
|
|
360
|
+
pipe_type = project.get_pipe_type(path)
|
|
361
|
+
resources.append(["new", p, pipe_type, path])
|
|
362
|
+
|
|
363
|
+
for dc in deployment.get("new_data_connector_names", []):
|
|
364
|
+
resources.append(["new", dc, "connection", project.get_resource_path(dc, "connection")])
|
|
365
|
+
|
|
366
|
+
for ds in deployment.get("changed_datasource_names", []):
|
|
367
|
+
resources.append(["modified", ds, "datasource", project.get_resource_path(ds, "datasource")])
|
|
368
|
+
|
|
369
|
+
for p in deployment.get("changed_pipe_names", []):
|
|
370
|
+
path = project.get_resource_path(p, "pipe")
|
|
371
|
+
pipe_type = project.get_pipe_type(path)
|
|
372
|
+
resources.append(["modified", p, pipe_type, path])
|
|
373
|
+
|
|
374
|
+
for dc in deployment.get("changed_data_connector_names", []):
|
|
375
|
+
resources.append(["modified", dc, "connection", project.get_resource_path(dc, "connection")])
|
|
376
|
+
|
|
377
|
+
for ds in deployment.get("disconnected_data_source_names", []):
|
|
378
|
+
resources.append(["modified", ds, "datasource", project.get_resource_path(ds, "datasource")])
|
|
379
|
+
|
|
380
|
+
for ds in deployment.get("deleted_datasource_names", []):
|
|
381
|
+
resources.append(["deleted", ds, "datasource", project.get_resource_path(ds, "datasource")])
|
|
382
|
+
|
|
383
|
+
for p in deployment.get("deleted_pipe_names", []):
|
|
384
|
+
path = project.get_resource_path(p, "pipe")
|
|
385
|
+
pipe_type = project.get_pipe_type(path)
|
|
386
|
+
resources.append(["deleted", p, pipe_type, path])
|
|
387
|
+
|
|
388
|
+
for dc in deployment.get("deleted_data_connector_names", []):
|
|
389
|
+
resources.append(["deleted", dc, "connection", project.get_resource_path(dc, "connection")])
|
|
390
|
+
|
|
391
|
+
for token_change in deployment.get("token_changes", []):
|
|
392
|
+
token_name = token_change.get("token_name")
|
|
393
|
+
change_type = token_change.get("change_type")
|
|
394
|
+
added_perms = []
|
|
395
|
+
removed_perms = []
|
|
396
|
+
permission_changes = token_change.get("permission_changes", {})
|
|
397
|
+
for perm in permission_changes.get("added_permissions", []):
|
|
398
|
+
added_perms.append(f"{perm['resource_name']}.{perm['resource_type']}:{perm['permission']}")
|
|
399
|
+
for perm in permission_changes.get("removed_permissions", []):
|
|
400
|
+
removed_perms.append(f"{perm['resource_name']}.{perm['resource_type']}:{perm['permission']}")
|
|
401
|
+
|
|
402
|
+
tokens.append((change_type, token_name, "\n".join(added_perms), "\n".join(removed_perms)))
|
|
403
|
+
|
|
404
|
+
if resources:
|
|
405
|
+
click.echo(FeedbackManager.info(message="\n* Changes to be deployed:"))
|
|
406
|
+
echo_safe_humanfriendly_tables_format_smart_table(resources, column_names=resources_columns)
|
|
407
|
+
else:
|
|
408
|
+
click.echo(FeedbackManager.gray(message="\n* No changes to be deployed"))
|
|
409
|
+
if tokens:
|
|
410
|
+
click.echo(FeedbackManager.info(message="\n* Changes in tokens to be deployed:"))
|
|
411
|
+
echo_safe_humanfriendly_tables_format_smart_table(tokens, column_names=tokens_columns)
|
|
412
|
+
else:
|
|
413
|
+
click.echo(FeedbackManager.gray(message="* No changes in tokens to be deployed"))
|
|
@@ -444,6 +444,7 @@ class FeedbackManager:
|
|
|
444
444
|
error_invalid_output_format = error_message(
|
|
445
445
|
"Invalid output format for this command. Supported formats are: {formats}"
|
|
446
446
|
)
|
|
447
|
+
error_build_only_supported_in_local = error_message("Builds are only supported in Tinybird Local")
|
|
447
448
|
|
|
448
449
|
info_incl_relative_path = info_message("** Relative path {path} does not exist, skipping.")
|
|
449
450
|
info_ignoring_incl_file = info_message(
|
|
@@ -631,12 +632,13 @@ STEP 2: CREATE GCP SERVICE ACCOUNT
|
|
|
631
632
|
1. Go to IAM & Admin > Service Accounts > + Create Service Account: https://console.cloud.google.com/iam-admin/serviceaccounts/create
|
|
632
633
|
2. Provide a service account name. Name the service account something meaningful (e.g., TinybirdGCS-{environment}-svc-account)
|
|
633
634
|
3. Click "Create and continue"
|
|
634
|
-
4. Click the "Select a role" drop down menu
|
|
635
|
-
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
635
|
+
4. Click the "Select a role" drop down menu:
|
|
636
|
+
- For Source (reading from GCS) select this role:
|
|
637
|
+
• "Storage Object Viewer" - Grants access to view objects and their metadata
|
|
638
|
+
- For Sink (writing to GCS) select all three roles:
|
|
639
|
+
• "Storage Object Creator" - Allows users to create objects
|
|
640
|
+
• "Storage Object Viewer" - Grants access to view objects and their metadata
|
|
641
|
+
• "Storage Bucket Viewer" - Grants access to view buckets and their metadata
|
|
640
642
|
(You can add IAM condition to provide access to selected buckets. More info in IAM Conditions: https://cloud.google.com/iam/docs/conditions-overview)
|
|
641
643
|
5. Click "Done"
|
|
642
644
|
"""
|
tinybird/tb/modules/llm.py
CHANGED
tinybird/tb/modules/mock.py
CHANGED
|
@@ -1,19 +1,15 @@
|
|
|
1
1
|
import glob
|
|
2
2
|
from pathlib import Path
|
|
3
|
-
from typing import Any, Dict, List
|
|
4
3
|
|
|
5
4
|
import click
|
|
6
5
|
|
|
7
|
-
from tinybird.prompts import mock_prompt
|
|
8
6
|
from tinybird.tb.client import TinyB
|
|
9
7
|
from tinybird.tb.modules.cli import cli
|
|
10
|
-
from tinybird.tb.modules.common import push_data
|
|
11
8
|
from tinybird.tb.modules.config import CLIConfig
|
|
12
|
-
from tinybird.tb.modules.datafile.fixture import persist_fixture
|
|
9
|
+
from tinybird.tb.modules.datafile.fixture import persist_fixture
|
|
13
10
|
from tinybird.tb.modules.exceptions import CLIMockException
|
|
14
11
|
from tinybird.tb.modules.feedback_manager import FeedbackManager
|
|
15
|
-
from tinybird.tb.modules.
|
|
16
|
-
from tinybird.tb.modules.llm_utils import extract_xml
|
|
12
|
+
from tinybird.tb.modules.mock_common import append_mock_data, create_mock_data
|
|
17
13
|
from tinybird.tb.modules.project import Project
|
|
18
14
|
|
|
19
15
|
|
|
@@ -86,71 +82,9 @@ def mock(ctx: click.Context, datasource: str, rows: int, prompt: str, format_: s
|
|
|
86
82
|
fixture_path = persist_fixture(datasource_name, data, folder, format=format_)
|
|
87
83
|
click.echo(FeedbackManager.info(message=f"✓ /fixtures/{datasource_name}.{format_} created"))
|
|
88
84
|
if env == "cloud":
|
|
89
|
-
|
|
85
|
+
append_mock_data(tb_client, datasource_name, str(fixture_path))
|
|
90
86
|
|
|
91
87
|
click.echo(FeedbackManager.success(message=f"✓ Sample data for {datasource_name} created with {rows} rows"))
|
|
92
88
|
|
|
93
89
|
except Exception as e:
|
|
94
90
|
raise CLIMockException(FeedbackManager.error(message=str(e)))
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def append_fixture(
|
|
98
|
-
tb_client: TinyB,
|
|
99
|
-
datasource_name: str,
|
|
100
|
-
url: str,
|
|
101
|
-
):
|
|
102
|
-
push_data(
|
|
103
|
-
tb_client,
|
|
104
|
-
datasource_name,
|
|
105
|
-
url,
|
|
106
|
-
mode="append",
|
|
107
|
-
concurrency=1,
|
|
108
|
-
silent=True,
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
def create_mock_data(
|
|
113
|
-
datasource_name: str,
|
|
114
|
-
datasource_content: str,
|
|
115
|
-
rows: int,
|
|
116
|
-
prompt: str,
|
|
117
|
-
config: CLIConfig,
|
|
118
|
-
ctx_config: Dict[str, Any],
|
|
119
|
-
user_token: str,
|
|
120
|
-
tb_client: TinyB,
|
|
121
|
-
format_: str,
|
|
122
|
-
folder: str,
|
|
123
|
-
) -> List[Dict[str, Any]]:
|
|
124
|
-
user_client = config.get_client(token=ctx_config.get("token"), host=ctx_config.get("host"))
|
|
125
|
-
llm = LLM(user_token=user_token, host=user_client.host)
|
|
126
|
-
prompt = f"<datasource_schema>{datasource_content}</datasource_schema>\n<user_input>{prompt}</user_input>"
|
|
127
|
-
sql = ""
|
|
128
|
-
attempts = 0
|
|
129
|
-
data = []
|
|
130
|
-
error = ""
|
|
131
|
-
sql_path = None
|
|
132
|
-
while True:
|
|
133
|
-
try:
|
|
134
|
-
response = llm.ask(system_prompt=mock_prompt(rows, error), prompt=prompt, feature="tb_mock")
|
|
135
|
-
sql = extract_xml(response, "sql")
|
|
136
|
-
sql_path = persist_fixture_sql(datasource_name, sql, folder)
|
|
137
|
-
sql_format = "JSON" if format_ == "ndjson" else "CSV"
|
|
138
|
-
result = tb_client.query(f"SELECT * FROM ({sql}) LIMIT {rows} FORMAT {sql_format}")
|
|
139
|
-
if sql_format == "JSON":
|
|
140
|
-
data = result.get("data", [])[:rows]
|
|
141
|
-
error_response = result.get("error", None)
|
|
142
|
-
if error_response:
|
|
143
|
-
raise Exception(error_response)
|
|
144
|
-
else:
|
|
145
|
-
data = result
|
|
146
|
-
break
|
|
147
|
-
except Exception as e:
|
|
148
|
-
error = str(e)
|
|
149
|
-
attempts += 1
|
|
150
|
-
if attempts > 5:
|
|
151
|
-
raise Exception(
|
|
152
|
-
f"Failed to generate a valid solution. Check {str(sql_path or '.sql path')} and try again."
|
|
153
|
-
)
|
|
154
|
-
else:
|
|
155
|
-
continue
|
|
156
|
-
return data
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from typing import Any, Dict, List
|
|
2
|
+
|
|
3
|
+
from tinybird.prompts import mock_prompt
|
|
4
|
+
from tinybird.tb.client import TinyB
|
|
5
|
+
from tinybird.tb.modules.common import push_data
|
|
6
|
+
from tinybird.tb.modules.config import CLIConfig
|
|
7
|
+
from tinybird.tb.modules.datafile.fixture import persist_fixture_sql
|
|
8
|
+
from tinybird.tb.modules.llm import LLM
|
|
9
|
+
from tinybird.tb.modules.llm_utils import extract_xml
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def append_mock_data(
|
|
13
|
+
tb_client: TinyB,
|
|
14
|
+
datasource_name: str,
|
|
15
|
+
url: str,
|
|
16
|
+
):
|
|
17
|
+
push_data(
|
|
18
|
+
tb_client,
|
|
19
|
+
datasource_name,
|
|
20
|
+
url,
|
|
21
|
+
mode="append",
|
|
22
|
+
concurrency=1,
|
|
23
|
+
silent=True,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def create_mock_data(
|
|
28
|
+
datasource_name: str,
|
|
29
|
+
datasource_content: str,
|
|
30
|
+
rows: int,
|
|
31
|
+
prompt: str,
|
|
32
|
+
config: CLIConfig,
|
|
33
|
+
ctx_config: Dict[str, Any],
|
|
34
|
+
user_token: str,
|
|
35
|
+
tb_client: TinyB,
|
|
36
|
+
format_: str,
|
|
37
|
+
folder: str,
|
|
38
|
+
) -> List[Dict[str, Any]]:
|
|
39
|
+
user_client = config.get_client(token=ctx_config.get("token"), host=ctx_config.get("host"))
|
|
40
|
+
llm = LLM(user_token=user_token, host=user_client.host)
|
|
41
|
+
prompt = f"<datasource_schema>{datasource_content}</datasource_schema>\n<user_input>{prompt}</user_input>"
|
|
42
|
+
sql = ""
|
|
43
|
+
attempts = 0
|
|
44
|
+
data = []
|
|
45
|
+
error = ""
|
|
46
|
+
sql_path = None
|
|
47
|
+
while True:
|
|
48
|
+
try:
|
|
49
|
+
response = llm.ask(system_prompt=mock_prompt(rows, error), prompt=prompt, feature="tb_mock")
|
|
50
|
+
sql = extract_xml(response, "sql")
|
|
51
|
+
sql_path = persist_fixture_sql(datasource_name, sql, folder)
|
|
52
|
+
sql_format = "JSON" if format_ == "ndjson" else "CSV"
|
|
53
|
+
result = tb_client.query(f"SELECT * FROM ({sql}) LIMIT {rows} FORMAT {sql_format}")
|
|
54
|
+
if sql_format == "JSON":
|
|
55
|
+
data = result.get("data", [])[:rows]
|
|
56
|
+
error_response = result.get("error", None)
|
|
57
|
+
if error_response:
|
|
58
|
+
raise Exception(error_response)
|
|
59
|
+
else:
|
|
60
|
+
data = result
|
|
61
|
+
break
|
|
62
|
+
except Exception as e:
|
|
63
|
+
error = str(e)
|
|
64
|
+
attempts += 1
|
|
65
|
+
if attempts > 5:
|
|
66
|
+
raise Exception(
|
|
67
|
+
f"Failed to generate a valid solution. Check {str(sql_path or '.sql path')} and try again."
|
|
68
|
+
)
|
|
69
|
+
else:
|
|
70
|
+
continue
|
|
71
|
+
return data
|
tinybird/tb/modules/project.py
CHANGED
|
@@ -50,6 +50,15 @@ class Project:
|
|
|
50
50
|
project_files.append(project_file)
|
|
51
51
|
return project_files
|
|
52
52
|
|
|
53
|
+
def get_fixture_files(self) -> List[str]:
|
|
54
|
+
fixture_files: List[str] = []
|
|
55
|
+
for extension in ["csv", "ndjson", "parquet"]:
|
|
56
|
+
for fixture_file in self.get_files(extension):
|
|
57
|
+
if self.vendor_path in fixture_file:
|
|
58
|
+
continue
|
|
59
|
+
fixture_files.append(fixture_file)
|
|
60
|
+
return fixture_files
|
|
61
|
+
|
|
53
62
|
def get_resource_path(self, resource_name: str, resource_type: str) -> str:
|
|
54
63
|
full_path = next(
|
|
55
64
|
(p for p in self.get_project_files() if p.endswith("/" + resource_name + f".{resource_type}")), ""
|