tinybird 0.0.1.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tinybird might be problematic. Click here for more details.
- tinybird/__cli__.py +8 -0
- tinybird/ch_utils/constants.py +244 -0
- tinybird/ch_utils/engine.py +855 -0
- tinybird/check_pypi.py +25 -0
- tinybird/client.py +1281 -0
- tinybird/config.py +117 -0
- tinybird/connectors.py +428 -0
- tinybird/context.py +23 -0
- tinybird/datafile.py +5589 -0
- tinybird/datatypes.py +434 -0
- tinybird/feedback_manager.py +1022 -0
- tinybird/git_settings.py +145 -0
- tinybird/sql.py +865 -0
- tinybird/sql_template.py +2343 -0
- tinybird/sql_template_fmt.py +281 -0
- tinybird/sql_toolset.py +350 -0
- tinybird/syncasync.py +682 -0
- tinybird/tb_cli.py +25 -0
- tinybird/tb_cli_modules/auth.py +252 -0
- tinybird/tb_cli_modules/branch.py +1043 -0
- tinybird/tb_cli_modules/cicd.py +434 -0
- tinybird/tb_cli_modules/cli.py +1571 -0
- tinybird/tb_cli_modules/common.py +2082 -0
- tinybird/tb_cli_modules/config.py +344 -0
- tinybird/tb_cli_modules/connection.py +803 -0
- tinybird/tb_cli_modules/datasource.py +900 -0
- tinybird/tb_cli_modules/exceptions.py +91 -0
- tinybird/tb_cli_modules/fmt.py +91 -0
- tinybird/tb_cli_modules/job.py +85 -0
- tinybird/tb_cli_modules/pipe.py +858 -0
- tinybird/tb_cli_modules/regions.py +9 -0
- tinybird/tb_cli_modules/tag.py +100 -0
- tinybird/tb_cli_modules/telemetry.py +310 -0
- tinybird/tb_cli_modules/test.py +107 -0
- tinybird/tb_cli_modules/tinyunit/tinyunit.py +340 -0
- tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +71 -0
- tinybird/tb_cli_modules/token.py +349 -0
- tinybird/tb_cli_modules/workspace.py +269 -0
- tinybird/tb_cli_modules/workspace_members.py +212 -0
- tinybird/tornado_template.py +1194 -0
- tinybird-0.0.1.dev0.dist-info/METADATA +2815 -0
- tinybird-0.0.1.dev0.dist-info/RECORD +45 -0
- tinybird-0.0.1.dev0.dist-info/WHEEL +5 -0
- tinybird-0.0.1.dev0.dist-info/entry_points.txt +2 -0
- tinybird-0.0.1.dev0.dist-info/top_level.txt +4 -0
tinybird/client.py
ADDED
|
@@ -0,0 +1,1281 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import ssl
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Union
|
|
7
|
+
from urllib.parse import quote, urlencode
|
|
8
|
+
|
|
9
|
+
import aiofiles
|
|
10
|
+
import requests
|
|
11
|
+
import requests.adapters
|
|
12
|
+
from requests import Response
|
|
13
|
+
from urllib3 import Retry
|
|
14
|
+
|
|
15
|
+
from tinybird.ch_utils.constants import COPY_ENABLED_TABLE_FUNCTIONS
|
|
16
|
+
from tinybird.syncasync import sync_to_async
|
|
17
|
+
from tinybird.tb_cli_modules.telemetry import add_telemetry_event
|
|
18
|
+
|
|
19
|
+
HOST = "https://api.tinybird.co"
|
|
20
|
+
LIMIT_RETRIES = 10
|
|
21
|
+
LAST_PARTITION = "last_partition"
|
|
22
|
+
ALL_PARTITIONS = "all_partitions"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class AuthException(Exception):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class AuthNoTokenException(AuthException):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class DoesNotExistException(Exception):
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class CanNotBeDeletedException(Exception):
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class OperationCanNotBePerformed(Exception):
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class TimeoutException(Exception):
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ReachRetryLimit(Exception):
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class ConnectorNothingToLoad(Exception):
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class JobException(Exception):
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def connector_equals(connector, datafile_params):
|
|
62
|
+
if not connector:
|
|
63
|
+
return False
|
|
64
|
+
if connector["name"] == datafile_params["kafka_connection_name"]:
|
|
65
|
+
return True
|
|
66
|
+
return False
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def parse_error_response(response: Response) -> str:
|
|
70
|
+
try:
|
|
71
|
+
content: Dict = response.json()
|
|
72
|
+
if content.get("error", None):
|
|
73
|
+
error = content["error"]
|
|
74
|
+
if content.get("errors", None):
|
|
75
|
+
error += f' -> errors: {content.get("errors")}'
|
|
76
|
+
else:
|
|
77
|
+
error = json.dumps(response, indent=4)
|
|
78
|
+
return error
|
|
79
|
+
except json.decoder.JSONDecodeError:
|
|
80
|
+
return f"Server error, cannot parse response. {response.content.decode('utf-8')}"
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class TinyB:
|
|
84
|
+
MAX_GET_LENGTH = 4096
|
|
85
|
+
|
|
86
|
+
def __init__(
|
|
87
|
+
self,
|
|
88
|
+
token: str,
|
|
89
|
+
host: str = HOST,
|
|
90
|
+
version: Optional[str] = None,
|
|
91
|
+
disable_ssl_checks: bool = False,
|
|
92
|
+
send_telemetry: bool = False,
|
|
93
|
+
semver: Optional[str] = None,
|
|
94
|
+
):
|
|
95
|
+
ctx = ssl.create_default_context()
|
|
96
|
+
ctx.check_hostname = False
|
|
97
|
+
ctx.verify_mode = ssl.CERT_NONE
|
|
98
|
+
|
|
99
|
+
self.token = token
|
|
100
|
+
self.host = host
|
|
101
|
+
self.version = version
|
|
102
|
+
self.disable_ssl_checks = disable_ssl_checks
|
|
103
|
+
self.send_telemetry = send_telemetry
|
|
104
|
+
self.semver = semver
|
|
105
|
+
|
|
106
|
+
async def _req(
|
|
107
|
+
self,
|
|
108
|
+
endpoint: str,
|
|
109
|
+
data=None,
|
|
110
|
+
files=None,
|
|
111
|
+
method: str = "GET",
|
|
112
|
+
retries: int = LIMIT_RETRIES,
|
|
113
|
+
use_token: Optional[str] = None,
|
|
114
|
+
**kwargs,
|
|
115
|
+
):
|
|
116
|
+
url = f"{self.host.strip('/')}/{endpoint.strip('/')}"
|
|
117
|
+
|
|
118
|
+
token_to_use = use_token if use_token else self.token
|
|
119
|
+
if token_to_use:
|
|
120
|
+
url += ("&" if "?" in endpoint else "?") + "token=" + token_to_use
|
|
121
|
+
if self.version:
|
|
122
|
+
url += ("&" if "?" in url else "?") + "cli_version=" + quote(self.version)
|
|
123
|
+
if self.semver:
|
|
124
|
+
url += ("&" if "?" in url else "?") + "__tb__semver=" + self.semver
|
|
125
|
+
|
|
126
|
+
verify_ssl = not self.disable_ssl_checks
|
|
127
|
+
try:
|
|
128
|
+
with requests.Session() as session:
|
|
129
|
+
if retries > 0:
|
|
130
|
+
retry = Retry(
|
|
131
|
+
total=retries,
|
|
132
|
+
status_forcelist=[429] if method in ("POST", "PUT", "DELETE") else [504, 502, 598, 599, 429],
|
|
133
|
+
allowed_methods=frozenset({method}),
|
|
134
|
+
)
|
|
135
|
+
session.mount("https://", requests.adapters.HTTPAdapter(max_retries=retry))
|
|
136
|
+
session.mount("http://", requests.adapters.HTTPAdapter(max_retries=retry))
|
|
137
|
+
if method == "POST":
|
|
138
|
+
if files:
|
|
139
|
+
response = await sync_to_async(session.post, thread_sensitive=False)(
|
|
140
|
+
url, files=files, verify=verify_ssl, **kwargs
|
|
141
|
+
)
|
|
142
|
+
else:
|
|
143
|
+
response = await sync_to_async(session.post, thread_sensitive=False)(
|
|
144
|
+
url, data=data, verify=verify_ssl, **kwargs
|
|
145
|
+
)
|
|
146
|
+
elif method == "PUT":
|
|
147
|
+
response = await sync_to_async(session.put, thread_sensitive=False)(
|
|
148
|
+
url, data=data, verify=verify_ssl, **kwargs
|
|
149
|
+
)
|
|
150
|
+
elif method == "DELETE":
|
|
151
|
+
response = await sync_to_async(session.delete, thread_sensitive=False)(
|
|
152
|
+
url, data=data, verify=verify_ssl, **kwargs
|
|
153
|
+
)
|
|
154
|
+
else:
|
|
155
|
+
response = await sync_to_async(session.get, thread_sensitive=False)(
|
|
156
|
+
url, verify=verify_ssl, **kwargs
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
except Exception as e:
|
|
160
|
+
raise e
|
|
161
|
+
|
|
162
|
+
logging.debug("== server response ==")
|
|
163
|
+
logging.debug(response.content)
|
|
164
|
+
logging.debug("== end ==")
|
|
165
|
+
|
|
166
|
+
if self.send_telemetry:
|
|
167
|
+
try:
|
|
168
|
+
add_telemetry_event("api_request", endpoint=url, token=self.token, status_code=response.status_code)
|
|
169
|
+
except Exception as ex:
|
|
170
|
+
logging.exception(f"Can't send telemetry: {ex}")
|
|
171
|
+
|
|
172
|
+
if response.status_code == 403:
|
|
173
|
+
error = parse_error_response(response)
|
|
174
|
+
if not token_to_use:
|
|
175
|
+
raise AuthNoTokenException(f"Forbidden: {error}")
|
|
176
|
+
raise AuthException(f"Forbidden: {error}")
|
|
177
|
+
if response.status_code == 204 or response.status_code == 205:
|
|
178
|
+
return None
|
|
179
|
+
if response.status_code == 404:
|
|
180
|
+
error = parse_error_response(response)
|
|
181
|
+
raise DoesNotExistException(error)
|
|
182
|
+
if response.status_code == 400:
|
|
183
|
+
error = parse_error_response(response)
|
|
184
|
+
raise OperationCanNotBePerformed(error)
|
|
185
|
+
if response.status_code == 409:
|
|
186
|
+
error = parse_error_response(response)
|
|
187
|
+
raise CanNotBeDeletedException(error)
|
|
188
|
+
if response.status_code == 599:
|
|
189
|
+
raise TimeoutException("timeout")
|
|
190
|
+
if "Content-Type" in response.headers and (
|
|
191
|
+
response.headers["Content-Type"] == "text/plain" or "text/csv" in response.headers["Content-Type"]
|
|
192
|
+
):
|
|
193
|
+
return response.content.decode("utf-8")
|
|
194
|
+
if response.status_code >= 400 and response.status_code not in [400, 403, 404, 409, 429]:
|
|
195
|
+
error = parse_error_response(response)
|
|
196
|
+
raise Exception(error)
|
|
197
|
+
if response.content:
|
|
198
|
+
try:
|
|
199
|
+
return response.json()
|
|
200
|
+
except json.decoder.JSONDecodeError:
|
|
201
|
+
raise Exception(f"Server error, cannot parse response. {response.content.decode('utf-8')}")
|
|
202
|
+
|
|
203
|
+
return response
|
|
204
|
+
|
|
205
|
+
async def tokens(self):
|
|
206
|
+
response = await self._req("/v0/tokens")
|
|
207
|
+
return response["tokens"]
|
|
208
|
+
|
|
209
|
+
async def starterkits(self) -> List[Dict[str, Any]]:
|
|
210
|
+
data = await self._req("/v0/templates")
|
|
211
|
+
return data.get("templates", [])
|
|
212
|
+
|
|
213
|
+
async def get_token_by_name(self, name: str):
|
|
214
|
+
tokens = await self.tokens()
|
|
215
|
+
for tk in tokens:
|
|
216
|
+
if tk["name"] == name:
|
|
217
|
+
return tk
|
|
218
|
+
return None
|
|
219
|
+
|
|
220
|
+
async def create_token(
|
|
221
|
+
self, name: str, scope: List[str], origin_code: Optional[str], origin_resource_name_or_id: Optional[str] = None
|
|
222
|
+
):
|
|
223
|
+
origin = origin_code or "C" # == Origins.CUSTOM if none specified
|
|
224
|
+
params = {
|
|
225
|
+
"name": name,
|
|
226
|
+
"origin": origin,
|
|
227
|
+
}
|
|
228
|
+
if origin_resource_name_or_id:
|
|
229
|
+
params["resource_id"] = origin_resource_name_or_id
|
|
230
|
+
|
|
231
|
+
# TODO: We should support sending multiple scopes in the body of the request
|
|
232
|
+
url = f"/v0/tokens?{urlencode(params)}"
|
|
233
|
+
url = url + "&" + "&".join([f"scope={scope}" for scope in scope])
|
|
234
|
+
return await self._req(
|
|
235
|
+
url,
|
|
236
|
+
method="POST",
|
|
237
|
+
data="",
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
async def alter_tokens(self, name: str, scopes: List[str]):
|
|
241
|
+
if not scopes:
|
|
242
|
+
return
|
|
243
|
+
scopes_url: str = "&".join([f"scope={scope}" for scope in scopes])
|
|
244
|
+
url = f"/v0/tokens/{name}"
|
|
245
|
+
if len(url + "?" + scopes_url) > TinyB.MAX_GET_LENGTH:
|
|
246
|
+
return await self._req(url, method="PUT", data=scopes_url)
|
|
247
|
+
else:
|
|
248
|
+
url = url + "?" + scopes_url
|
|
249
|
+
return await self._req(url, method="PUT", data="")
|
|
250
|
+
|
|
251
|
+
async def datasources(self, branch: Optional[str] = None, used_by: bool = False):
|
|
252
|
+
params = {
|
|
253
|
+
"attrs": "used_by" if used_by else "",
|
|
254
|
+
}
|
|
255
|
+
response = await self._req(f"/v0/datasources?{urlencode(params)}")
|
|
256
|
+
ds = response["datasources"]
|
|
257
|
+
|
|
258
|
+
if branch:
|
|
259
|
+
ds = [x for x in ds if x["name"].startswith(branch)]
|
|
260
|
+
return ds
|
|
261
|
+
|
|
262
|
+
async def get_connections(self, service: Optional[str] = None):
|
|
263
|
+
params = {}
|
|
264
|
+
|
|
265
|
+
if service:
|
|
266
|
+
params["service"] = service
|
|
267
|
+
|
|
268
|
+
response = await self._req(f"/v0/connectors?{urlencode(params)}")
|
|
269
|
+
return response["connectors"]
|
|
270
|
+
|
|
271
|
+
async def connections(self, connector: Optional[str] = None, skip_bigquery: Optional[bool] = False):
|
|
272
|
+
response = await self._req("/v0/connectors")
|
|
273
|
+
connectors = response["connectors"]
|
|
274
|
+
bigquery_connection = None
|
|
275
|
+
if not skip_bigquery:
|
|
276
|
+
bigquery_connection = (
|
|
277
|
+
await self.bigquery_connection() if connector == "bigquery" or connector is None else None
|
|
278
|
+
)
|
|
279
|
+
connectors = [*connectors, bigquery_connection] if bigquery_connection else connectors
|
|
280
|
+
if connector:
|
|
281
|
+
return [
|
|
282
|
+
{
|
|
283
|
+
"id": c["id"],
|
|
284
|
+
"service": c["service"],
|
|
285
|
+
"name": c["name"],
|
|
286
|
+
"connected_datasources": len(c["linkers"]),
|
|
287
|
+
**c["settings"],
|
|
288
|
+
}
|
|
289
|
+
for c in connectors
|
|
290
|
+
if c["service"] == connector
|
|
291
|
+
]
|
|
292
|
+
return [
|
|
293
|
+
{
|
|
294
|
+
"id": c["id"],
|
|
295
|
+
"service": c["service"],
|
|
296
|
+
"name": c["name"],
|
|
297
|
+
"connected_datasources": len(c["linkers"]),
|
|
298
|
+
**c["settings"],
|
|
299
|
+
}
|
|
300
|
+
for c in connectors
|
|
301
|
+
]
|
|
302
|
+
|
|
303
|
+
async def bigquery_connection(self):
|
|
304
|
+
bigquery_resources = await self.list_gcp_resources()
|
|
305
|
+
if len(bigquery_resources) == 0:
|
|
306
|
+
return None
|
|
307
|
+
|
|
308
|
+
gcp_account_details: Dict[str, Any] = await self.get_gcp_service_account_details()
|
|
309
|
+
datasources = await self.datasources()
|
|
310
|
+
bigquery_datasources = [ds["name"] for ds in datasources if ds["type"] == "bigquery"]
|
|
311
|
+
return {
|
|
312
|
+
"id": gcp_account_details["account"].split("@")[0],
|
|
313
|
+
"service": "bigquery",
|
|
314
|
+
"name": "bigquery",
|
|
315
|
+
"linkers": bigquery_datasources,
|
|
316
|
+
"settings": gcp_account_details,
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
async def get_datasource(self, ds_name: str, used_by: bool = False) -> Dict[str, Any]:
|
|
320
|
+
params = {
|
|
321
|
+
"attrs": "used_by" if used_by else "",
|
|
322
|
+
}
|
|
323
|
+
return await self._req(f"/v0/datasources/{ds_name}?{urlencode(params)}")
|
|
324
|
+
|
|
325
|
+
async def alter_datasource(
|
|
326
|
+
self,
|
|
327
|
+
ds_name: str,
|
|
328
|
+
new_schema: Optional[str] = None,
|
|
329
|
+
description: Optional[str] = None,
|
|
330
|
+
ttl: Optional[str] = None,
|
|
331
|
+
dry_run: bool = False,
|
|
332
|
+
indexes: Optional[str] = None,
|
|
333
|
+
):
|
|
334
|
+
params = {"dry": "true" if dry_run else "false"}
|
|
335
|
+
if new_schema:
|
|
336
|
+
params.update({"schema": new_schema})
|
|
337
|
+
if description:
|
|
338
|
+
params.update({"description": description})
|
|
339
|
+
if ttl:
|
|
340
|
+
params.update({"ttl": ttl})
|
|
341
|
+
if indexes:
|
|
342
|
+
params.update({"indexes": indexes})
|
|
343
|
+
res = await self._req(f"/v0/datasources/{ds_name}/alter", method="POST", data=params)
|
|
344
|
+
|
|
345
|
+
if "Error" in res:
|
|
346
|
+
raise Exception(res["error"])
|
|
347
|
+
|
|
348
|
+
return res
|
|
349
|
+
|
|
350
|
+
async def update_datasource(self, ds_name: str, data: Dict[str, Any]):
|
|
351
|
+
res = await self._req(f"/v0/datasources/{ds_name}", method="PUT", data=data)
|
|
352
|
+
|
|
353
|
+
if "Error" in res:
|
|
354
|
+
raise Exception(res["error"])
|
|
355
|
+
|
|
356
|
+
return res
|
|
357
|
+
|
|
358
|
+
async def pipe_file(self, pipe: str):
|
|
359
|
+
return await self._req(f"/v0/pipes/{pipe}.pipe")
|
|
360
|
+
|
|
361
|
+
async def datasource_file(self, datasource: str):
|
|
362
|
+
try:
|
|
363
|
+
return await self._req(f"/v0/datasources/{datasource}.datasource")
|
|
364
|
+
except DoesNotExistException:
|
|
365
|
+
raise Exception(f"Data Source {datasource} not found.")
|
|
366
|
+
|
|
367
|
+
async def datasource_analyze(self, url):
|
|
368
|
+
params = {"url": url}
|
|
369
|
+
return await self._req(f"/v0/analyze?{urlencode(params)}", method="POST", data="")
|
|
370
|
+
|
|
371
|
+
async def datasource_analyze_file(self, data):
|
|
372
|
+
return await self._req("/v0/analyze", method="POST", data=data)
|
|
373
|
+
|
|
374
|
+
async def datasource_create_from_definition(self, parameter_definition: Dict[str, str]):
|
|
375
|
+
return await self._req("/v0/datasources", method="POST", data=parameter_definition)
|
|
376
|
+
|
|
377
|
+
async def datasource_create_from_url(
|
|
378
|
+
self,
|
|
379
|
+
table_name: str,
|
|
380
|
+
url: str,
|
|
381
|
+
mode: str = "create",
|
|
382
|
+
status_callback=None,
|
|
383
|
+
sql_condition: Optional[str] = None,
|
|
384
|
+
format: str = "csv",
|
|
385
|
+
replace_options: Optional[Set[str]] = None,
|
|
386
|
+
):
|
|
387
|
+
params = {"name": table_name, "url": url, "mode": mode, "debug": "blocks_block_log", "format": format}
|
|
388
|
+
|
|
389
|
+
if sql_condition:
|
|
390
|
+
params["replace_condition"] = sql_condition
|
|
391
|
+
if replace_options:
|
|
392
|
+
for option in list(replace_options):
|
|
393
|
+
params[option] = "true"
|
|
394
|
+
|
|
395
|
+
req_url = f"/v0/datasources?{urlencode(params, safe='')}"
|
|
396
|
+
res = await self._req(req_url, method="POST", data=b"")
|
|
397
|
+
|
|
398
|
+
if "error" in res:
|
|
399
|
+
raise Exception(res["error"])
|
|
400
|
+
|
|
401
|
+
return await self.wait_for_job(res["id"], status_callback, backoff_multiplier=1.5, maximum_backoff_seconds=20)
|
|
402
|
+
|
|
403
|
+
async def datasource_delete(self, datasource_name: str, force: bool = False, dry_run: bool = False):
|
|
404
|
+
params = {"force": "true" if force else "false", "dry_run": "true" if dry_run else "false"}
|
|
405
|
+
return await self._req(f"/v0/datasources/{datasource_name}?{urlencode(params)}", method="DELETE")
|
|
406
|
+
|
|
407
|
+
async def datasource_append_data(
|
|
408
|
+
self,
|
|
409
|
+
datasource_name: str,
|
|
410
|
+
file: Union[str, Path],
|
|
411
|
+
mode: str = "append",
|
|
412
|
+
status_callback=None,
|
|
413
|
+
sql_condition: Optional[str] = None,
|
|
414
|
+
format: str = "csv",
|
|
415
|
+
replace_options: Optional[Set[str]] = None,
|
|
416
|
+
):
|
|
417
|
+
params = {"name": datasource_name, "mode": mode, "format": format, "debug": "blocks_block_log"}
|
|
418
|
+
|
|
419
|
+
if sql_condition:
|
|
420
|
+
params["replace_condition"] = sql_condition
|
|
421
|
+
if replace_options:
|
|
422
|
+
for option in list(replace_options):
|
|
423
|
+
params[option] = "true"
|
|
424
|
+
|
|
425
|
+
async with aiofiles.open(file, "rb") as content:
|
|
426
|
+
file_content = await content.read()
|
|
427
|
+
if format == "csv":
|
|
428
|
+
files = {"csv": ("csv", file_content)}
|
|
429
|
+
else:
|
|
430
|
+
files = {"ndjson": ("ndjson", file_content)}
|
|
431
|
+
|
|
432
|
+
res = await self._req(
|
|
433
|
+
f"v0/datasources?{urlencode(params, safe='')}",
|
|
434
|
+
files=files,
|
|
435
|
+
method="POST",
|
|
436
|
+
)
|
|
437
|
+
if status_callback:
|
|
438
|
+
status_callback(res)
|
|
439
|
+
|
|
440
|
+
return res
|
|
441
|
+
|
|
442
|
+
async def datasource_truncate(self, datasource_name: str):
|
|
443
|
+
return await self._req(f"/v0/datasources/{datasource_name}/truncate", method="POST", data="")
|
|
444
|
+
|
|
445
|
+
async def datasource_delete_rows(self, datasource_name: str, delete_condition: str, dry_run: bool = False):
|
|
446
|
+
params = {"delete_condition": delete_condition}
|
|
447
|
+
if dry_run:
|
|
448
|
+
params.update({"dry_run": "true"})
|
|
449
|
+
return await self._req(f"/v0/datasources/{datasource_name}/delete", method="POST", data=params)
|
|
450
|
+
|
|
451
|
+
async def datasource_dependencies(
|
|
452
|
+
self, no_deps: bool, match: str, pipe: str, datasource: str, check_for_partial_replace: bool, recursive: bool
|
|
453
|
+
):
|
|
454
|
+
params = {
|
|
455
|
+
"no_deps": "true" if no_deps else "false",
|
|
456
|
+
"check_for_partial_replace": "true" if check_for_partial_replace else "false",
|
|
457
|
+
"recursive": "true" if recursive else "false",
|
|
458
|
+
}
|
|
459
|
+
if match:
|
|
460
|
+
params["match"] = match
|
|
461
|
+
if pipe:
|
|
462
|
+
params["pipe"] = pipe
|
|
463
|
+
if datasource:
|
|
464
|
+
params["datasource"] = datasource
|
|
465
|
+
|
|
466
|
+
return await self._req(f"/v0/dependencies?{urlencode(params)}", timeout=60)
|
|
467
|
+
|
|
468
|
+
async def datasource_share(self, datasource_id: str, current_workspace_id: str, destination_workspace_id: str):
|
|
469
|
+
params = {"origin_workspace_id": current_workspace_id, "destination_workspace_id": destination_workspace_id}
|
|
470
|
+
return await self._req(f"/v0/datasources/{datasource_id}/share", method="POST", data=params)
|
|
471
|
+
|
|
472
|
+
async def datasource_unshare(self, datasource_id: str, current_workspace_id: str, destination_workspace_id: str):
|
|
473
|
+
params = {"origin_workspace_id": current_workspace_id, "destination_workspace_id": destination_workspace_id}
|
|
474
|
+
return await self._req(f"/v0/datasources/{datasource_id}/share", method="DELETE", data=params)
|
|
475
|
+
|
|
476
|
+
async def datasource_sync(self, datasource_id: str):
|
|
477
|
+
return await self._req(f"/v0/datasources/{datasource_id}/scheduling/runs", method="POST", data="")
|
|
478
|
+
|
|
479
|
+
async def datasource_scheduling_state(self, datasource_id: str):
|
|
480
|
+
response = await self._req(f"/v0/datasources/{datasource_id}/scheduling/state", method="GET")
|
|
481
|
+
return response["state"]
|
|
482
|
+
|
|
483
|
+
async def datasource_scheduling_pause(self, datasource_id: str):
|
|
484
|
+
return await self._req(
|
|
485
|
+
f"/v0/datasources/{datasource_id}/scheduling/state",
|
|
486
|
+
method="PUT",
|
|
487
|
+
data='{"state": "paused"}',
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
async def datasource_scheduling_resume(self, datasource_id: str):
|
|
491
|
+
return await self._req(
|
|
492
|
+
f"/v0/datasources/{datasource_id}/scheduling/state",
|
|
493
|
+
method="PUT",
|
|
494
|
+
data='{"state": "running"}',
|
|
495
|
+
)
|
|
496
|
+
|
|
497
|
+
async def datasource_exchange(self, datasource_a: str, datasource_b: str):
|
|
498
|
+
payload = {"datasource_a": datasource_a, "datasource_b": datasource_b}
|
|
499
|
+
return await self._req("/v0/datasources/exchange", method="POST", data=payload)
|
|
500
|
+
|
|
501
|
+
async def analyze_pipe_node(
|
|
502
|
+
self, pipe_name: str, node: Dict[str, Any], dry_run: str = "false", datasource_name: Optional[str] = None
|
|
503
|
+
):
|
|
504
|
+
params = {**{"include_datafile": "true", "dry_run": dry_run}, **node.get("params", node)}
|
|
505
|
+
if "mode" in params:
|
|
506
|
+
params.pop("mode")
|
|
507
|
+
node_name = node["params"]["name"] if node.get("params", None) else node["name"]
|
|
508
|
+
if datasource_name:
|
|
509
|
+
params["datasource"] = datasource_name
|
|
510
|
+
response = await self._req(f"/v0/pipes/{pipe_name}/nodes/{node_name}/analysis?{urlencode(params)}")
|
|
511
|
+
return response
|
|
512
|
+
|
|
513
|
+
async def populate_node(
|
|
514
|
+
self,
|
|
515
|
+
pipe_name: str,
|
|
516
|
+
node_name: str,
|
|
517
|
+
populate_subset: bool = False,
|
|
518
|
+
populate_condition: Optional[str] = None,
|
|
519
|
+
truncate: bool = True,
|
|
520
|
+
unlink_on_populate_error: bool = False,
|
|
521
|
+
):
|
|
522
|
+
params: Dict[str, Any] = {
|
|
523
|
+
"truncate": "true" if truncate else "false",
|
|
524
|
+
"unlink_on_populate_error": "true" if unlink_on_populate_error else "false",
|
|
525
|
+
}
|
|
526
|
+
if populate_subset:
|
|
527
|
+
params.update({"populate_subset": populate_subset})
|
|
528
|
+
if populate_condition:
|
|
529
|
+
params.update({"populate_condition": populate_condition})
|
|
530
|
+
response = await self._req(
|
|
531
|
+
f"/v0/pipes/{pipe_name}/nodes/{node_name}/population?{urlencode(params)}", method="POST"
|
|
532
|
+
)
|
|
533
|
+
return response
|
|
534
|
+
|
|
535
|
+
async def pipes(self, branch=None, dependencies: bool = False, node_attrs=None, attrs=None):
|
|
536
|
+
params = {
|
|
537
|
+
"dependencies": "true" if dependencies else "false",
|
|
538
|
+
"attrs": attrs if attrs else "",
|
|
539
|
+
"node_attrs": node_attrs if node_attrs else "",
|
|
540
|
+
}
|
|
541
|
+
response = await self._req(f"/v0/pipes?{urlencode(params)}")
|
|
542
|
+
pipes = response["pipes"]
|
|
543
|
+
if branch:
|
|
544
|
+
pipes = [x for x in pipes if x["name"].startswith(branch)]
|
|
545
|
+
return pipes
|
|
546
|
+
|
|
547
|
+
async def pipe(self, pipe: str):
|
|
548
|
+
return await self._req(f"/v0/pipes/{pipe}")
|
|
549
|
+
|
|
550
|
+
async def pipe_data(
|
|
551
|
+
self,
|
|
552
|
+
pipe_name_or_uid: str,
|
|
553
|
+
sql: Optional[str] = None,
|
|
554
|
+
format: Optional[str] = "json",
|
|
555
|
+
params: Optional[Mapping[str, Any]] = None,
|
|
556
|
+
):
|
|
557
|
+
params = {**params} if params else {}
|
|
558
|
+
if sql:
|
|
559
|
+
params["q"] = sql
|
|
560
|
+
|
|
561
|
+
url = f"/v0/pipes/{pipe_name_or_uid}.{format}"
|
|
562
|
+
query_string = urlencode(params)
|
|
563
|
+
if len(url + "?" + query_string) > TinyB.MAX_GET_LENGTH:
|
|
564
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_uid}.{format}", method="POST", data=params)
|
|
565
|
+
else:
|
|
566
|
+
url = url + "?" + query_string
|
|
567
|
+
return await self._req(url)
|
|
568
|
+
|
|
569
|
+
async def pipe_create(self, pipe_name: str, sql: str):
|
|
570
|
+
return await self._req(
|
|
571
|
+
f"/v0/pipes?name={pipe_name}&sql={quote(sql, safe='')}", method="POST", data=sql.encode()
|
|
572
|
+
)
|
|
573
|
+
|
|
574
|
+
async def pipe_delete(self, pipe_name: str):
|
|
575
|
+
return await self._req(f"/v0/pipes/{pipe_name}", method="DELETE")
|
|
576
|
+
|
|
577
|
+
async def pipe_append_node(self, pipe_name_or_uid: str, sql: str):
|
|
578
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_uid}/nodes", method="POST", data=sql.encode())
|
|
579
|
+
|
|
580
|
+
async def pipe_set_endpoint(self, pipe_name_or_uid: str, published_node_uid: str):
|
|
581
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_uid}/nodes/{published_node_uid}/endpoint", method="POST")
|
|
582
|
+
|
|
583
|
+
async def pipe_remove_endpoint(self, pipe_name_or_uid: str, published_node_uid: str):
|
|
584
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_uid}/nodes/{published_node_uid}/endpoint", method="DELETE")
|
|
585
|
+
|
|
586
|
+
async def pipe_update_copy(
|
|
587
|
+
self,
|
|
588
|
+
pipe_name_or_id: str,
|
|
589
|
+
node_id: str,
|
|
590
|
+
target_datasource: Optional[str] = None,
|
|
591
|
+
schedule_cron: Optional[str] = None,
|
|
592
|
+
mode: Optional[str] = None,
|
|
593
|
+
):
|
|
594
|
+
data = {"schedule_cron": schedule_cron}
|
|
595
|
+
|
|
596
|
+
if target_datasource:
|
|
597
|
+
data["target_datasource"] = target_datasource
|
|
598
|
+
|
|
599
|
+
if mode:
|
|
600
|
+
data["mode"] = mode
|
|
601
|
+
|
|
602
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/nodes/{node_id}/copy", method="PUT", data=data)
|
|
603
|
+
|
|
604
|
+
async def pipe_remove_copy(self, pipe_name_or_id: str, node_id: str):
|
|
605
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/nodes/{node_id}/copy", method="DELETE")
|
|
606
|
+
|
|
607
|
+
async def pipe_run_copy(
|
|
608
|
+
self, pipe_name_or_id: str, params: Optional[Dict[str, str]] = None, mode: Optional[str] = None
|
|
609
|
+
):
|
|
610
|
+
params = {**params} if params else {}
|
|
611
|
+
if mode:
|
|
612
|
+
params["_mode"] = mode
|
|
613
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/copy?{urlencode(params)}", method="POST")
|
|
614
|
+
|
|
615
|
+
async def pipe_resume_copy(self, pipe_name_or_id: str):
|
|
616
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/copy/resume", method="POST")
|
|
617
|
+
|
|
618
|
+
async def pipe_pause_copy(self, pipe_name_or_id: str):
|
|
619
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/copy/pause", method="POST")
|
|
620
|
+
|
|
621
|
+
async def pipe_create_sink(self, pipe_name_or_id: str, node_id: str, params: Optional[Dict[str, str]] = None):
|
|
622
|
+
params = {**params} if params else {}
|
|
623
|
+
return await self._req(
|
|
624
|
+
f"/v0/pipes/{pipe_name_or_id}/nodes/{node_id}/sink?{urlencode(params)}", method="POST", data=""
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
async def pipe_remove_sink(self, pipe_name_or_id: str, node_id: str):
|
|
628
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/nodes/{node_id}/sink", method="DELETE")
|
|
629
|
+
|
|
630
|
+
async def pipe_remove_stream(self, pipe_name_or_id: str, node_id: str):
|
|
631
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/nodes/{node_id}/stream", method="DELETE")
|
|
632
|
+
|
|
633
|
+
async def pipe_run_sink(self, pipe_name_or_id: str, params: Optional[Dict[str, str]] = None):
|
|
634
|
+
params = {**params} if params else {}
|
|
635
|
+
return await self._req(f"/v0/pipes/{pipe_name_or_id}/sink?{urlencode(params)}", method="POST")
|
|
636
|
+
|
|
637
|
+
async def pipe_unlink_materialized(self, pipe_name: str, node_id: str):
|
|
638
|
+
return await self._req(f"/v0/pipes/{pipe_name}/nodes/{node_id}/materialization", method="DELETE")
|
|
639
|
+
|
|
640
|
+
async def query(self, sql: str, pipeline: Optional[str] = None):
|
|
641
|
+
params = {}
|
|
642
|
+
if pipeline:
|
|
643
|
+
params = {"pipeline": pipeline}
|
|
644
|
+
params.update({"release_replacements": "true"})
|
|
645
|
+
|
|
646
|
+
if len(sql) > TinyB.MAX_GET_LENGTH:
|
|
647
|
+
return await self._req(f"/v0/sql?{urlencode(params)}", data=sql, method="POST")
|
|
648
|
+
else:
|
|
649
|
+
return await self._req(f"/v0/sql?q={quote(sql, safe='')}&{urlencode(params)}")
|
|
650
|
+
|
|
651
|
+
async def jobs(self, status=None):
|
|
652
|
+
jobs = (await self._req("/v0/jobs"))["jobs"]
|
|
653
|
+
if status:
|
|
654
|
+
status = [status] if isinstance(status, str) else status
|
|
655
|
+
jobs = [j for j in jobs if j["status"] in status]
|
|
656
|
+
return jobs
|
|
657
|
+
|
|
658
|
+
async def job(self, job_id: str):
|
|
659
|
+
return await self._req(f"/v0/jobs/{job_id}")
|
|
660
|
+
|
|
661
|
+
async def job_cancel(self, job_id: str):
|
|
662
|
+
return await self._req(f"/v0/jobs/{job_id}/cancel", method="POST", data=b"")
|
|
663
|
+
|
|
664
|
+
async def user_workspaces(self):
|
|
665
|
+
return await self._req("/v0/user/workspaces/?with_environments=false")
|
|
666
|
+
|
|
667
|
+
async def user_workspaces_and_branches(self):
|
|
668
|
+
return await self._req("/v0/user/workspaces/?with_environments=true")
|
|
669
|
+
|
|
670
|
+
async def user_workspace_branches(self):
|
|
671
|
+
return await self._req("/v0/user/workspaces/?with_environments=true&only_environments=true")
|
|
672
|
+
|
|
673
|
+
async def branches(self):
|
|
674
|
+
return await self._req("/v0/environments")
|
|
675
|
+
|
|
676
|
+
async def releases(self, workspace_id):
|
|
677
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases")
|
|
678
|
+
|
|
679
|
+
async def create_workspace(self, name: str, template: Optional[str]):
|
|
680
|
+
url = f"/v0/workspaces?name={name}"
|
|
681
|
+
if template:
|
|
682
|
+
url += f"&starter_kit={template}"
|
|
683
|
+
return await self._req(url, method="POST", data=b"")
|
|
684
|
+
|
|
685
|
+
async def create_workspace_branch(
|
|
686
|
+
self,
|
|
687
|
+
branch_name: str,
|
|
688
|
+
last_partition: Optional[bool],
|
|
689
|
+
all: Optional[bool],
|
|
690
|
+
ignore_datasources: Optional[List[str]],
|
|
691
|
+
):
|
|
692
|
+
params = {
|
|
693
|
+
"name": branch_name,
|
|
694
|
+
"data": LAST_PARTITION if last_partition else (ALL_PARTITIONS if all else ""),
|
|
695
|
+
}
|
|
696
|
+
if ignore_datasources:
|
|
697
|
+
params["ignore_datasources"] = ",".join(ignore_datasources)
|
|
698
|
+
return await self._req(f"/v0/environments?{urlencode(params)}", method="POST", data=b"")
|
|
699
|
+
|
|
700
|
+
async def branch_workspace_data(
|
|
701
|
+
self,
|
|
702
|
+
workspace_id: str,
|
|
703
|
+
last_partition: bool,
|
|
704
|
+
all: bool,
|
|
705
|
+
ignore_datasources: Optional[List[str]] = None,
|
|
706
|
+
):
|
|
707
|
+
params = {}
|
|
708
|
+
if last_partition:
|
|
709
|
+
params["mode"] = LAST_PARTITION
|
|
710
|
+
|
|
711
|
+
if all:
|
|
712
|
+
params["mode"] = ALL_PARTITIONS
|
|
713
|
+
if ignore_datasources:
|
|
714
|
+
params["ignore_datasources"] = ",".join(ignore_datasources)
|
|
715
|
+
url = f"/v0/environments/{workspace_id}/data?{urlencode(params)}"
|
|
716
|
+
return await self._req(url, method="POST", data=b"")
|
|
717
|
+
|
|
718
|
+
async def branch_regression_tests(
|
|
719
|
+
self,
|
|
720
|
+
branch_id: str,
|
|
721
|
+
pipe_name: Optional[str],
|
|
722
|
+
test_type: str,
|
|
723
|
+
failfast: Optional[bool] = False,
|
|
724
|
+
limit: Optional[int] = None,
|
|
725
|
+
sample_by_params: Optional[int] = None,
|
|
726
|
+
match: Optional[List[str]] = None,
|
|
727
|
+
params: Optional[List[Dict[str, Any]]] = None,
|
|
728
|
+
assert_result: Optional[bool] = True,
|
|
729
|
+
assert_result_no_error: Optional[bool] = True,
|
|
730
|
+
assert_result_rows_count: Optional[bool] = True,
|
|
731
|
+
assert_result_ignore_order: Optional[bool] = False,
|
|
732
|
+
assert_time_increase_percentage: Optional[float] = None,
|
|
733
|
+
assert_bytes_read_increase_percentage: Optional[float] = None,
|
|
734
|
+
assert_max_time: Optional[float] = None,
|
|
735
|
+
run_in_main: Optional[bool] = False,
|
|
736
|
+
):
|
|
737
|
+
test: Dict[str, Any] = {
|
|
738
|
+
test_type: {
|
|
739
|
+
"config": {
|
|
740
|
+
"assert_result_ignore_order": assert_result_ignore_order,
|
|
741
|
+
"assert_result": assert_result,
|
|
742
|
+
"assert_result_no_error": assert_result_no_error,
|
|
743
|
+
"assert_result_rows_count": assert_result_rows_count,
|
|
744
|
+
"failfast": failfast,
|
|
745
|
+
"assert_time_increase_percentage": assert_time_increase_percentage,
|
|
746
|
+
"assert_bytes_read_increase_percentage": assert_bytes_read_increase_percentage,
|
|
747
|
+
"assert_max_time": assert_max_time,
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
if limit is not None:
|
|
752
|
+
test[test_type].update({"limit": limit})
|
|
753
|
+
if sample_by_params is not None:
|
|
754
|
+
test[test_type].update({"samples_by_params": sample_by_params})
|
|
755
|
+
if match is not None:
|
|
756
|
+
test[test_type].update({"matches": match})
|
|
757
|
+
if params is not None:
|
|
758
|
+
test[test_type].update({"params": params})
|
|
759
|
+
|
|
760
|
+
regression_commands: List[Dict[str, Any]] = [
|
|
761
|
+
{"pipe": ".*" if pipe_name is None else pipe_name, "tests": [test]}
|
|
762
|
+
]
|
|
763
|
+
|
|
764
|
+
data = json.dumps(regression_commands)
|
|
765
|
+
if run_in_main:
|
|
766
|
+
url = f"/v0/environments/{branch_id}/regression/main"
|
|
767
|
+
else:
|
|
768
|
+
url = f"/v0/environments/{branch_id}/regression"
|
|
769
|
+
return await self._req(url, method="POST", data=data, headers={"Content-Type": "application/json"})
|
|
770
|
+
|
|
771
|
+
async def branch_regression_tests_file(
|
|
772
|
+
self, branch_id: str, regression_commands: List[Dict[str, Any]], run_in_main: Optional[bool] = False
|
|
773
|
+
):
|
|
774
|
+
data = json.dumps(regression_commands)
|
|
775
|
+
if run_in_main:
|
|
776
|
+
url = f"/v0/environments/{branch_id}/regression/main"
|
|
777
|
+
else:
|
|
778
|
+
url = f"/v0/environments/{branch_id}/regression"
|
|
779
|
+
return await self._req(url, method="POST", data=data, headers={"Content-Type": "application/json"})
|
|
780
|
+
|
|
781
|
+
async def delete_workspace(self, id: str, hard_delete_confirmation: Optional[str]):
|
|
782
|
+
data = {"confirmation": hard_delete_confirmation}
|
|
783
|
+
return await self._req(f"/v0/workspaces/{id}", data, method="DELETE")
|
|
784
|
+
|
|
785
|
+
async def delete_branch(self, id: str):
|
|
786
|
+
return await self._req(f"/v0/environments/{id}", method="DELETE")
|
|
787
|
+
|
|
788
|
+
async def add_users_to_workspace(self, workspace: Dict[str, Any], users_emails: List[str], role: Optional[str]):
|
|
789
|
+
users = ",".join(users_emails)
|
|
790
|
+
return await self._req(
|
|
791
|
+
f"/v0/workspaces/{workspace['id']}/users/",
|
|
792
|
+
method="PUT",
|
|
793
|
+
data={"operation": "add", "users": users, "role": role},
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
async def remove_users_from_workspace(self, workspace: Dict[str, Any], users_emails: List[str]):
|
|
797
|
+
users = ",".join(users_emails)
|
|
798
|
+
return await self._req(
|
|
799
|
+
f"/v0/workspaces/{workspace['id']}/users/", method="PUT", data={"operation": "remove", "users": users}
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
async def set_role_for_users_in_workspace(self, workspace: Dict[str, Any], users_emails: List[str], role: str):
|
|
803
|
+
users = ",".join(users_emails)
|
|
804
|
+
return await self._req(
|
|
805
|
+
f"/v0/workspaces/{workspace['id']}/users/",
|
|
806
|
+
method="PUT",
|
|
807
|
+
data={"operation": "change_role", "users": users, "new_role": role},
|
|
808
|
+
)
|
|
809
|
+
|
|
810
|
+
async def workspace(self, workspace_id: str, with_token: bool = False):
|
|
811
|
+
params = {"with_token": "true" if with_token else "false"}
|
|
812
|
+
return await self._req(f"/v0/workspaces/{workspace_id}?{urlencode(params)}")
|
|
813
|
+
|
|
814
|
+
async def workspace_info(self):
|
|
815
|
+
return await self._req("/v0/workspace")
|
|
816
|
+
|
|
817
|
+
async def wait_for_job(
|
|
818
|
+
self,
|
|
819
|
+
job_id: str,
|
|
820
|
+
status_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
|
|
821
|
+
backoff_seconds: float = 2.0,
|
|
822
|
+
backoff_multiplier: float = 1,
|
|
823
|
+
maximum_backoff_seconds: float = 2.0,
|
|
824
|
+
) -> Dict[str, Any]:
|
|
825
|
+
res: Dict[str, Any] = {}
|
|
826
|
+
done: bool = False
|
|
827
|
+
while not done:
|
|
828
|
+
params = {"debug": "blocks,block_log"}
|
|
829
|
+
res = await self._req(f"/v0/jobs/{job_id}?{urlencode(params)}")
|
|
830
|
+
|
|
831
|
+
if res["status"] == "error":
|
|
832
|
+
error_message = "There has been an error"
|
|
833
|
+
if not isinstance(res.get("error", True), bool):
|
|
834
|
+
error_message = str(res["error"])
|
|
835
|
+
if "errors" in res:
|
|
836
|
+
error_message += f": {res['errors']}"
|
|
837
|
+
raise JobException(error_message)
|
|
838
|
+
|
|
839
|
+
if res["status"] == "cancelled":
|
|
840
|
+
raise JobException("Job has been cancelled")
|
|
841
|
+
|
|
842
|
+
done = res["status"] == "done"
|
|
843
|
+
|
|
844
|
+
if status_callback:
|
|
845
|
+
status_callback(res)
|
|
846
|
+
|
|
847
|
+
if not done:
|
|
848
|
+
backoff_seconds = min(backoff_seconds * backoff_multiplier, maximum_backoff_seconds)
|
|
849
|
+
await asyncio.sleep(backoff_seconds)
|
|
850
|
+
|
|
851
|
+
return res
|
|
852
|
+
|
|
853
|
+
async def datasource_kafka_connect(self, connection_id, datasource_name, topic, group, auto_offset_reset):
|
|
854
|
+
return await self._req(
|
|
855
|
+
f"/v0/datasources?connector={connection_id}&name={datasource_name}&"
|
|
856
|
+
f"kafka_topic={topic}&kafka_group_id={group}&kafka_auto_offset_reset={auto_offset_reset}",
|
|
857
|
+
method="POST",
|
|
858
|
+
data=b"",
|
|
859
|
+
)
|
|
860
|
+
|
|
861
|
+
async def connection_create_kafka(
|
|
862
|
+
self,
|
|
863
|
+
kafka_bootstrap_servers,
|
|
864
|
+
kafka_key,
|
|
865
|
+
kafka_secret,
|
|
866
|
+
kafka_connection_name,
|
|
867
|
+
kafka_auto_offset_reset=None,
|
|
868
|
+
kafka_schema_registry_url=None,
|
|
869
|
+
kafka_sasl_mechanism="PLAIN",
|
|
870
|
+
kafka_ssl_ca_pem=None,
|
|
871
|
+
):
|
|
872
|
+
params = {
|
|
873
|
+
"service": "kafka",
|
|
874
|
+
"kafka_security_protocol": "SASL_SSL",
|
|
875
|
+
"kafka_sasl_mechanism": kafka_sasl_mechanism,
|
|
876
|
+
"kafka_bootstrap_servers": kafka_bootstrap_servers,
|
|
877
|
+
"kafka_sasl_plain_username": kafka_key,
|
|
878
|
+
"kafka_sasl_plain_password": kafka_secret,
|
|
879
|
+
"name": kafka_connection_name,
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
if kafka_schema_registry_url:
|
|
883
|
+
params["kafka_schema_registry_url"] = kafka_schema_registry_url
|
|
884
|
+
if kafka_auto_offset_reset:
|
|
885
|
+
params["kafka_auto_offset_reset"] = kafka_auto_offset_reset
|
|
886
|
+
if kafka_ssl_ca_pem:
|
|
887
|
+
params["kafka_ssl_ca_pem"] = kafka_ssl_ca_pem
|
|
888
|
+
connection_params = {key: value for key, value in params.items() if value is not None}
|
|
889
|
+
|
|
890
|
+
return await self._req(
|
|
891
|
+
"/v0/connectors",
|
|
892
|
+
method="POST",
|
|
893
|
+
headers={"Content-Type": "application/json"},
|
|
894
|
+
data=json.dumps(connection_params),
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
async def kafka_list_topics(self, connection_id: str, timeout=5):
|
|
898
|
+
resp = await self._req(
|
|
899
|
+
f"/v0/connectors/{connection_id}/preview?preview_activity=false",
|
|
900
|
+
connect_timeout=timeout,
|
|
901
|
+
request_timeout=timeout,
|
|
902
|
+
)
|
|
903
|
+
return [x["topic"] for x in resp["preview"]]
|
|
904
|
+
|
|
905
|
+
async def get_gcp_service_account_details(self) -> Dict[str, Any]:
|
|
906
|
+
return await self._req("/v0/datasources-bigquery-credentials")
|
|
907
|
+
|
|
908
|
+
async def list_connectors(self, service: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
909
|
+
try:
|
|
910
|
+
params: str = f"?service={service}" if service else ""
|
|
911
|
+
result = await self._req(f"/v0/connections/{params}")
|
|
912
|
+
if not result:
|
|
913
|
+
return []
|
|
914
|
+
|
|
915
|
+
return result.get("connectors", [])
|
|
916
|
+
except Exception:
|
|
917
|
+
return []
|
|
918
|
+
|
|
919
|
+
async def get_connector(
|
|
920
|
+
self,
|
|
921
|
+
name_or_id: str,
|
|
922
|
+
service: Optional[str] = None,
|
|
923
|
+
key: Optional[str] = "name",
|
|
924
|
+
skip_bigquery: Optional[bool] = False,
|
|
925
|
+
) -> Optional[Dict[str, Any]]:
|
|
926
|
+
return next(
|
|
927
|
+
(c for c in await self.connections(connector=service, skip_bigquery=skip_bigquery) if c[key] == name_or_id),
|
|
928
|
+
None,
|
|
929
|
+
)
|
|
930
|
+
|
|
931
|
+
async def get_connector_by_id(self, connector_id: Optional[str] = None):
|
|
932
|
+
return await self._req(f"/v0/connectors/{connector_id}")
|
|
933
|
+
|
|
934
|
+
async def get_snowflake_integration_query(
|
|
935
|
+
self, role: str, stage: Optional[str], integration: Optional[str]
|
|
936
|
+
) -> Optional[Dict[str, Any]]:
|
|
937
|
+
try:
|
|
938
|
+
params = {
|
|
939
|
+
"role": role,
|
|
940
|
+
}
|
|
941
|
+
if stage:
|
|
942
|
+
params["stage"] = stage
|
|
943
|
+
if integration:
|
|
944
|
+
params["integration"] = integration
|
|
945
|
+
|
|
946
|
+
return await self._req(f"/v0/connectors/snowflake/instructions?{urlencode(params)}")
|
|
947
|
+
except Exception:
|
|
948
|
+
return None
|
|
949
|
+
|
|
950
|
+
async def list_gcp_resources(self) -> List[Dict[str, Any]]:
|
|
951
|
+
try:
|
|
952
|
+
resources = await self._req("/v0/connections/bigquery")
|
|
953
|
+
if not resources:
|
|
954
|
+
return []
|
|
955
|
+
|
|
956
|
+
return resources.get("items", [])
|
|
957
|
+
except Exception:
|
|
958
|
+
return []
|
|
959
|
+
|
|
960
|
+
async def check_gcp_read_permissions(self) -> bool:
|
|
961
|
+
"""Returns `True` if our service account (see `TinyB::get_gcp_service_account_details()`)
|
|
962
|
+
has the proper permissions in GCP.
|
|
963
|
+
|
|
964
|
+
Here we assume that we have permissions if we can list resources but currently this
|
|
965
|
+
logic is wrong under some circumstances.
|
|
966
|
+
|
|
967
|
+
See https://gitlab.com/tinybird/analytics/-/issues/6485.
|
|
968
|
+
"""
|
|
969
|
+
try:
|
|
970
|
+
items = await self.list_gcp_resources()
|
|
971
|
+
if not items:
|
|
972
|
+
return False
|
|
973
|
+
return len(items) > 0
|
|
974
|
+
except Exception:
|
|
975
|
+
return False
|
|
976
|
+
|
|
977
|
+
async def connector_delete(self, connection_id):
|
|
978
|
+
return await self._req(f"/v0/connectors/{connection_id}", method="DELETE")
|
|
979
|
+
|
|
980
|
+
async def connection_create_snowflake(
|
|
981
|
+
self,
|
|
982
|
+
account_identifier: str,
|
|
983
|
+
user: str,
|
|
984
|
+
password: str,
|
|
985
|
+
warehouse: str,
|
|
986
|
+
role: str,
|
|
987
|
+
connection_name: str,
|
|
988
|
+
integration: Optional[str],
|
|
989
|
+
stage: Optional[str],
|
|
990
|
+
) -> Dict[str, Any]:
|
|
991
|
+
params = {
|
|
992
|
+
"service": "snowflake",
|
|
993
|
+
"name": connection_name,
|
|
994
|
+
"account": account_identifier,
|
|
995
|
+
"username": user,
|
|
996
|
+
"password": password,
|
|
997
|
+
"role": role,
|
|
998
|
+
"warehouse": warehouse,
|
|
999
|
+
}
|
|
1000
|
+
|
|
1001
|
+
if integration:
|
|
1002
|
+
params["integration"] = integration
|
|
1003
|
+
if stage:
|
|
1004
|
+
params["stage"] = stage
|
|
1005
|
+
|
|
1006
|
+
return await self._req(f"/v0/connectors?{urlencode(params)}", method="POST", data="")
|
|
1007
|
+
|
|
1008
|
+
async def validate_snowflake_connection(self, account_identifier: str, user: str, password: str) -> bool:
|
|
1009
|
+
try:
|
|
1010
|
+
roles = await self.get_snowflake_roles(account_identifier, user, password)
|
|
1011
|
+
if not roles:
|
|
1012
|
+
return False
|
|
1013
|
+
return len(roles) > 0
|
|
1014
|
+
except Exception:
|
|
1015
|
+
return False
|
|
1016
|
+
|
|
1017
|
+
async def validate_preview_connection(self, service: str, params: Dict[str, Any]) -> bool:
|
|
1018
|
+
params = {"service": service, "dry_run": "true", **params}
|
|
1019
|
+
bucket_list = None
|
|
1020
|
+
try:
|
|
1021
|
+
bucket_list = await self._req(f"/v0/connectors?{urlencode(params)}", method="POST", data="")
|
|
1022
|
+
if not bucket_list:
|
|
1023
|
+
return False
|
|
1024
|
+
return len(bucket_list) > 0
|
|
1025
|
+
except Exception:
|
|
1026
|
+
return False
|
|
1027
|
+
|
|
1028
|
+
async def preview_bucket(self, connector: str, bucket_uri: str):
|
|
1029
|
+
params = {"bucket_uri": bucket_uri, "service": "s3", "summary": "true"}
|
|
1030
|
+
return await self._req(f"/v0/connectors/{connector}/preview?{urlencode(params)}", method="GET")
|
|
1031
|
+
|
|
1032
|
+
async def connection_create(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
1033
|
+
return await self._req(f"/v0/connectors?{urlencode(params)}", method="POST", data="")
|
|
1034
|
+
|
|
1035
|
+
async def get_snowflake_roles(self, account_identifier: str, user: str, password: str) -> Optional[List[str]]:
|
|
1036
|
+
params = {"account": account_identifier, "username": user, "password": password}
|
|
1037
|
+
|
|
1038
|
+
response = await self._req(f"/v0/connectors/snowflake/roles?{urlencode(params)}", method="POST", data="")
|
|
1039
|
+
return response["roles"]
|
|
1040
|
+
|
|
1041
|
+
async def get_snowflake_warehouses(
|
|
1042
|
+
self, account_identifier: str, user: str, password: str, role: str
|
|
1043
|
+
) -> Optional[List[Dict[str, Any]]]:
|
|
1044
|
+
params = {
|
|
1045
|
+
"account": account_identifier,
|
|
1046
|
+
"username": user,
|
|
1047
|
+
"password": password,
|
|
1048
|
+
"role": role,
|
|
1049
|
+
}
|
|
1050
|
+
|
|
1051
|
+
response = await self._req(f"/v0/connectors/snowflake/warehouses?{urlencode(params)}", method="POST", data="")
|
|
1052
|
+
return response["warehouses"]
|
|
1053
|
+
|
|
1054
|
+
async def get_trust_policy(self, service: str) -> Dict[str, Any]:
|
|
1055
|
+
return await self._req(f"/v0/integrations/{service}/policies/trust-policy")
|
|
1056
|
+
|
|
1057
|
+
async def get_access_write_policy(self, service: str) -> Dict[str, Any]:
|
|
1058
|
+
return await self._req(f"/v0/integrations/{service}/policies/write-access-policy")
|
|
1059
|
+
|
|
1060
|
+
async def get_access_read_policy(self, service: str) -> Dict[str, Any]:
|
|
1061
|
+
return await self._req(f"/v0/integrations/{service}/policies/read-access-policy")
|
|
1062
|
+
|
|
1063
|
+
async def sql_get_format(self, sql: str, with_clickhouse_format: bool = False) -> str:
|
|
1064
|
+
try:
|
|
1065
|
+
if with_clickhouse_format:
|
|
1066
|
+
from tinybird.sql_toolset import format_sql
|
|
1067
|
+
|
|
1068
|
+
return format_sql(sql)
|
|
1069
|
+
else:
|
|
1070
|
+
return await self._sql_get_format_remote(sql, with_clickhouse_format)
|
|
1071
|
+
except ModuleNotFoundError:
|
|
1072
|
+
return await self._sql_get_format_remote(sql, with_clickhouse_format)
|
|
1073
|
+
|
|
1074
|
+
async def _sql_get_format_remote(self, sql: str, with_clickhouse_format: bool = False) -> str:
|
|
1075
|
+
params = {"with_clickhouse_format": "true" if with_clickhouse_format else "false"}
|
|
1076
|
+
result = await self._req(f"/v0/sql_format?q={quote(sql, safe='')}&{urlencode(params)}")
|
|
1077
|
+
return result["q"]
|
|
1078
|
+
|
|
1079
|
+
@staticmethod
|
|
1080
|
+
def _sql_get_used_tables_local(sql: str, raising: bool = False, is_copy: Optional[bool] = False) -> List[str]:
|
|
1081
|
+
from tinybird.sql_toolset import sql_get_used_tables
|
|
1082
|
+
|
|
1083
|
+
tables = sql_get_used_tables(
|
|
1084
|
+
sql, raising, table_functions=False, function_allow_list=COPY_ENABLED_TABLE_FUNCTIONS if is_copy else None
|
|
1085
|
+
)
|
|
1086
|
+
return [t[1] if t[0] == "" else f"{t[0]}.{t[1]}" for t in tables]
|
|
1087
|
+
|
|
1088
|
+
async def _sql_get_used_tables_remote(
|
|
1089
|
+
self, sql: str, raising: bool = False, is_copy: Optional[bool] = False
|
|
1090
|
+
) -> List[str]:
|
|
1091
|
+
params = {
|
|
1092
|
+
"q": sql,
|
|
1093
|
+
"raising": "true" if raising else "false",
|
|
1094
|
+
"table_functions": "false",
|
|
1095
|
+
"is_copy": "true" if is_copy else "false",
|
|
1096
|
+
}
|
|
1097
|
+
result = await self._req("/v0/sql_tables", data=params, method="POST")
|
|
1098
|
+
return [t[1] if t[0] == "" else f"{t[0]}.{t[1]}" for t in result["tables"]]
|
|
1099
|
+
|
|
1100
|
+
# Get used tables from a query. Does not include table functions
|
|
1101
|
+
async def sql_get_used_tables(self, sql: str, raising: bool = False, is_copy: Optional[bool] = False) -> List[str]:
|
|
1102
|
+
try:
|
|
1103
|
+
return self._sql_get_used_tables_local(sql, raising, is_copy)
|
|
1104
|
+
except ModuleNotFoundError:
|
|
1105
|
+
return await self._sql_get_used_tables_remote(sql, raising, is_copy)
|
|
1106
|
+
|
|
1107
|
+
@staticmethod
|
|
1108
|
+
def _replace_tables_local(q: str, replacements):
|
|
1109
|
+
from tinybird.sql_toolset import replace_tables, replacements_to_tuples
|
|
1110
|
+
|
|
1111
|
+
return replace_tables(q, replacements_to_tuples(replacements))
|
|
1112
|
+
|
|
1113
|
+
async def _replace_tables_remote(self, q: str, replacements):
|
|
1114
|
+
params = {
|
|
1115
|
+
"q": q,
|
|
1116
|
+
"replacements": json.dumps({k[1] if isinstance(k, tuple) else k: v for k, v in replacements.items()}),
|
|
1117
|
+
}
|
|
1118
|
+
result = await self._req("/v0/sql_replace", data=params, method="POST")
|
|
1119
|
+
return result["query"]
|
|
1120
|
+
|
|
1121
|
+
async def replace_tables(self, q: str, replacements):
|
|
1122
|
+
try:
|
|
1123
|
+
return self._replace_tables_local(q, replacements)
|
|
1124
|
+
except ModuleNotFoundError:
|
|
1125
|
+
return await self._replace_tables_remote(q, replacements)
|
|
1126
|
+
|
|
1127
|
+
async def get_connection(self, **kwargs):
|
|
1128
|
+
result = await self._req("/v0/connectors")
|
|
1129
|
+
return next((connector for connector in result["connectors"] if connector_equals(connector, kwargs)), None)
|
|
1130
|
+
|
|
1131
|
+
async def regions(self):
|
|
1132
|
+
regions = await self._req("/v0/regions")
|
|
1133
|
+
return regions
|
|
1134
|
+
|
|
1135
|
+
async def datasource_query_copy(self, datasource_name: str, sql_query: str):
|
|
1136
|
+
params = {"copy_to": datasource_name}
|
|
1137
|
+
return await self._req(f"/v0/sql_copy?{urlencode(params)}", data=sql_query, method="POST")
|
|
1138
|
+
|
|
1139
|
+
async def workspace_commit_update(self, workspace_id: str, commit: str):
|
|
1140
|
+
return await self._req(
|
|
1141
|
+
f"/v0/workspaces/{workspace_id}/releases/?commit={commit}&force=true", method="POST", data=""
|
|
1142
|
+
)
|
|
1143
|
+
|
|
1144
|
+
async def update_release_semver(self, workspace_id: str, semver: str, new_semver: str):
|
|
1145
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/{semver}?new_semver={new_semver}", method="PUT")
|
|
1146
|
+
|
|
1147
|
+
async def release_new(self, workspace_id: str, semver: str, commit: str):
|
|
1148
|
+
params = {
|
|
1149
|
+
"commit": commit,
|
|
1150
|
+
"semver": semver,
|
|
1151
|
+
}
|
|
1152
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/?{urlencode(params)}", method="POST", data="")
|
|
1153
|
+
|
|
1154
|
+
async def release_failed(self, workspace_id: str, semver: str):
|
|
1155
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/{semver}?status=failed", method="PUT")
|
|
1156
|
+
|
|
1157
|
+
async def release_preview(self, workspace_id: str, semver: str):
|
|
1158
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/{semver}?status=preview", method="PUT")
|
|
1159
|
+
|
|
1160
|
+
async def release_promote(self, workspace_id: str, semver: str):
|
|
1161
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/{semver}?status=live", method="PUT")
|
|
1162
|
+
|
|
1163
|
+
async def release_rollback(self, workspace_id: str, semver: str):
|
|
1164
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/{semver}?status=rollback", method="PUT")
|
|
1165
|
+
|
|
1166
|
+
async def release_rm(
|
|
1167
|
+
self,
|
|
1168
|
+
workspace_id: str,
|
|
1169
|
+
semver: str,
|
|
1170
|
+
confirmation: Optional[str] = None,
|
|
1171
|
+
dry_run: bool = False,
|
|
1172
|
+
force: bool = False,
|
|
1173
|
+
):
|
|
1174
|
+
params = {"force": "true" if force else "false", "dry_run": "true" if dry_run else "false"}
|
|
1175
|
+
if confirmation:
|
|
1176
|
+
params["confirmation"] = confirmation
|
|
1177
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/{semver}?{urlencode(params)}", method="DELETE")
|
|
1178
|
+
|
|
1179
|
+
async def release_oldest_rollback(
|
|
1180
|
+
self,
|
|
1181
|
+
workspace_id: str,
|
|
1182
|
+
):
|
|
1183
|
+
return await self._req(f"/v0/workspaces/{workspace_id}/releases/oldest-rollback", method="GET")
|
|
1184
|
+
|
|
1185
|
+
async def token_list(self, match: Optional[str] = None):
|
|
1186
|
+
tokens = await self.tokens()
|
|
1187
|
+
return [token for token in tokens if (not match or token["name"].find(match) != -1) and "token" in token]
|
|
1188
|
+
|
|
1189
|
+
async def token_delete(self, token_id: str):
|
|
1190
|
+
return await self._req(f"/v0/tokens/{token_id}", method="DELETE")
|
|
1191
|
+
|
|
1192
|
+
async def token_refresh(self, token_id: str):
|
|
1193
|
+
return await self._req(f"/v0/tokens/{token_id}/refresh", method="POST", data="")
|
|
1194
|
+
|
|
1195
|
+
async def token_get(self, token_id: str):
|
|
1196
|
+
return await self._req(f"/v0/tokens/{token_id}", method="GET")
|
|
1197
|
+
|
|
1198
|
+
async def token_scopes(self, token_id: str):
|
|
1199
|
+
token = await self.token_get(token_id)
|
|
1200
|
+
return token["scopes"]
|
|
1201
|
+
|
|
1202
|
+
def _token_to_params(self, token: Dict[str, Any]) -> str:
|
|
1203
|
+
params = urlencode(
|
|
1204
|
+
{
|
|
1205
|
+
"name": token["name"],
|
|
1206
|
+
"description": token.get("description", ""),
|
|
1207
|
+
"origin": token.get("origin", "C"),
|
|
1208
|
+
}
|
|
1209
|
+
)
|
|
1210
|
+
|
|
1211
|
+
if "scopes" in token:
|
|
1212
|
+
for scope_dict in token["scopes"]:
|
|
1213
|
+
scope_types = scope_dict["name"].split(",")
|
|
1214
|
+
for scope_type in scope_types:
|
|
1215
|
+
scope = scope_type.strip()
|
|
1216
|
+
if "resource" in scope_dict:
|
|
1217
|
+
resource = scope_dict["resource"]
|
|
1218
|
+
scope += f":{resource}"
|
|
1219
|
+
if "filter" in scope_dict:
|
|
1220
|
+
scope += f":{scope_dict['filter']}"
|
|
1221
|
+
params += f"&scope={scope}"
|
|
1222
|
+
return params
|
|
1223
|
+
|
|
1224
|
+
async def token_create(self, token: Dict[str, Any]):
|
|
1225
|
+
params = self._token_to_params(token)
|
|
1226
|
+
return await self._req(f"/v0/tokens?{params}", method="POST", data="")
|
|
1227
|
+
|
|
1228
|
+
async def create_jwt_token(self, name: str, expiration_time: int, scopes: List[Dict[str, Any]]):
|
|
1229
|
+
url_params = {"name": name, "expiration_time": expiration_time}
|
|
1230
|
+
body = json.dumps({"scopes": scopes})
|
|
1231
|
+
return await self._req(f"/v0/tokens?{urlencode(url_params)}", method="POST", data=body)
|
|
1232
|
+
|
|
1233
|
+
async def token_update(self, token: Dict[str, Any]):
|
|
1234
|
+
name = token["name"]
|
|
1235
|
+
params = self._token_to_params(token)
|
|
1236
|
+
return await self._req(f"/v0/tokens/{name}?{params}", method="PUT", data="")
|
|
1237
|
+
|
|
1238
|
+
async def token_file(self, token_id: str):
|
|
1239
|
+
return await self._req(f"/v0/tokens/{token_id}.token")
|
|
1240
|
+
|
|
1241
|
+
async def check_auth_login(self) -> Dict[str, Any]:
|
|
1242
|
+
return await self._req("/v0/auth")
|
|
1243
|
+
|
|
1244
|
+
async def get_all_tags(self) -> Dict[str, Any]:
|
|
1245
|
+
return await self._req("/v0/tags")
|
|
1246
|
+
|
|
1247
|
+
async def create_tag_with_resource(self, name: str, resource_id: str, resource_name: str, resource_type: str):
|
|
1248
|
+
return await self._req(
|
|
1249
|
+
"/v0/tags",
|
|
1250
|
+
method="POST",
|
|
1251
|
+
headers={"Content-Type": "application/json"},
|
|
1252
|
+
data=json.dumps(
|
|
1253
|
+
{
|
|
1254
|
+
"name": name,
|
|
1255
|
+
"resources": [{"id": resource_id, "name": resource_name, "type": resource_type}],
|
|
1256
|
+
}
|
|
1257
|
+
),
|
|
1258
|
+
)
|
|
1259
|
+
|
|
1260
|
+
async def create_tag(self, name: str):
|
|
1261
|
+
return await self._req(
|
|
1262
|
+
"/v0/tags",
|
|
1263
|
+
method="POST",
|
|
1264
|
+
headers={"Content-Type": "application/json"},
|
|
1265
|
+
data=json.dumps({"name": name}),
|
|
1266
|
+
)
|
|
1267
|
+
|
|
1268
|
+
async def update_tag(self, name: str, resources: List[Dict[str, Any]]):
|
|
1269
|
+
await self._req(
|
|
1270
|
+
f"/v0/tags/{name}",
|
|
1271
|
+
method="PUT",
|
|
1272
|
+
headers={"Content-Type": "application/json"},
|
|
1273
|
+
data=json.dumps(
|
|
1274
|
+
{
|
|
1275
|
+
"resources": resources,
|
|
1276
|
+
}
|
|
1277
|
+
),
|
|
1278
|
+
)
|
|
1279
|
+
|
|
1280
|
+
async def delete_tag(self, name: str):
|
|
1281
|
+
await self._req(f"/v0/tags/{name}", method="DELETE")
|