tinybird-cli 5.22.3.dev0__tar.gz → 6.0.2.dev0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/PKG-INFO +13 -20
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/__cli__.py +2 -2
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/ch_utils/engine.py +2 -77
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/client.py +3 -141
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/config.py +0 -5
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/datafile_common.py +18 -38
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/datatypes.py +13 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/feedback_manager.py +1 -41
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/sql.py +6 -5
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/sql_template.py +527 -36
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/sql_toolset.py +6 -32
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/auth.py +1 -11
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/cli.py +1 -83
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/common.py +27 -153
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/config.py +1 -3
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/connection.py +1 -225
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/datasource.py +11 -95
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tornado_template.py +8 -3
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird_cli.egg-info/PKG-INFO +13 -20
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird_cli.egg-info/SOURCES.txt +0 -1
- tinybird_cli-6.0.2.dev0/tinybird_cli.egg-info/requires.txt +19 -0
- tinybird_cli-5.22.3.dev0/tinybird/connectors.py +0 -422
- tinybird_cli-5.22.3.dev0/tinybird_cli.egg-info/requires.txt +0 -38
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/setup.cfg +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/ch_utils/constants.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/check_pypi.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/context.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/git_settings.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/sql_template_fmt.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/syncasync.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/branch.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/cicd.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/exceptions.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/fmt.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/job.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/pipe.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/regions.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/tag.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/telemetry.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/test.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/tinyunit/tinyunit.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/token.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/workspace.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird/tb_cli_modules/workspace_members.py +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird_cli.egg-info/dependency_links.txt +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird_cli.egg-info/entry_points.txt +0 -0
- {tinybird_cli-5.22.3.dev0 → tinybird_cli-6.0.2.dev0}/tinybird_cli.egg-info/top_level.txt +0 -0
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: tinybird_cli
|
|
3
|
-
Version:
|
|
3
|
+
Version: 6.0.2.dev0
|
|
4
4
|
Summary: Tinybird Command Line Tool
|
|
5
5
|
Home-page: https://www.tinybird.co/docs/cli
|
|
6
6
|
Author: Tinybird
|
|
7
7
|
Author-email: support@tinybird.co
|
|
8
|
-
Requires-Python: >=3.
|
|
8
|
+
Requires-Python: >=3.10, <3.14
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
10
|
Requires-Dist: aiofiles==24.1.0
|
|
11
11
|
Requires-Dist: clickhouse-toolset==0.34.dev0
|
|
@@ -26,29 +26,11 @@ Requires-Dist: tornado~=6.0.0
|
|
|
26
26
|
Requires-Dist: urllib3<2,>=1.26.14
|
|
27
27
|
Requires-Dist: wheel
|
|
28
28
|
Requires-Dist: packaging<24,>=23.1
|
|
29
|
-
Provides-Extra: bigquery
|
|
30
|
-
Requires-Dist: gsutil==4.58; extra == "bigquery"
|
|
31
|
-
Requires-Dist: google-api-python-client==2.0.2; extra == "bigquery"
|
|
32
|
-
Requires-Dist: google-auth==1.27.1; extra == "bigquery"
|
|
33
|
-
Requires-Dist: google-auth-httplib2==0.1.0; extra == "bigquery"
|
|
34
|
-
Requires-Dist: google-cloud-storage==2.4.0; extra == "bigquery"
|
|
35
|
-
Requires-Dist: google-cloud-bigquery==2.11.0; extra == "bigquery"
|
|
36
|
-
Provides-Extra: snowflake
|
|
37
|
-
Requires-Dist: snowflake-connector-python~=3.12.3; extra == "snowflake"
|
|
38
|
-
Requires-Dist: gsutil==4.58; extra == "snowflake"
|
|
39
|
-
Requires-Dist: google-api-python-client==2.0.2; extra == "snowflake"
|
|
40
|
-
Requires-Dist: google-auth==1.27.1; extra == "snowflake"
|
|
41
|
-
Requires-Dist: google-auth-httplib2==0.1.0; extra == "snowflake"
|
|
42
|
-
Requires-Dist: google-cloud-storage==2.4.0; extra == "snowflake"
|
|
43
|
-
Requires-Dist: oauth2client==3.0.0; extra == "snowflake"
|
|
44
|
-
Requires-Dist: chardet<4,>=3.0.2; extra == "snowflake"
|
|
45
|
-
Requires-Dist: pyOpenSSL<20.0.0,>=16.2.0; extra == "snowflake"
|
|
46
29
|
Dynamic: author
|
|
47
30
|
Dynamic: author-email
|
|
48
31
|
Dynamic: description
|
|
49
32
|
Dynamic: description-content-type
|
|
50
33
|
Dynamic: home-page
|
|
51
|
-
Dynamic: provides-extra
|
|
52
34
|
Dynamic: requires-dist
|
|
53
35
|
Dynamic: requires-python
|
|
54
36
|
Dynamic: summary
|
|
@@ -61,6 +43,17 @@ The Tinybird command-line tool allows you to use all the Tinybird functionality
|
|
|
61
43
|
Changelog
|
|
62
44
|
----------
|
|
63
45
|
|
|
46
|
+
6.0.1
|
|
47
|
+
***********
|
|
48
|
+
|
|
49
|
+
- `Fixed` false circular dependency detection when a shared datasource has the same name as a local pipe
|
|
50
|
+
|
|
51
|
+
6.0.0
|
|
52
|
+
***********
|
|
53
|
+
|
|
54
|
+
- `Removed` support for Python 3.9. The minimum supported Python version is now 3.10.
|
|
55
|
+
- `Removed` BigQuery and Snowflake CDK connector commands from `tb connection create`
|
|
56
|
+
|
|
64
57
|
5.22.2
|
|
65
58
|
***********
|
|
66
59
|
|
|
@@ -4,5 +4,5 @@ __description__ = 'Tinybird Command Line Tool'
|
|
|
4
4
|
__url__ = 'https://www.tinybird.co/docs/cli'
|
|
5
5
|
__author__ = 'Tinybird'
|
|
6
6
|
__author_email__ = 'support@tinybird.co'
|
|
7
|
-
__version__ = '
|
|
8
|
-
__revision__ = '
|
|
7
|
+
__version__ = '6.0.2.dev0'
|
|
8
|
+
__revision__ = '8bacae6'
|
|
@@ -14,7 +14,6 @@ from ..sql import (
|
|
|
14
14
|
)
|
|
15
15
|
|
|
16
16
|
DEFAULT_EMPTY_PARAMETERS = ["ttl", "partition_key", "sorting_key"]
|
|
17
|
-
DEFAULT_JOIN_EMPTY_PARAMETERS = ["join_strictness", "join_type", "key_columns"]
|
|
18
17
|
|
|
19
18
|
# Currently we only support the simplest TTLs
|
|
20
19
|
# f(X) + toIntervalZ(N)
|
|
@@ -62,25 +61,6 @@ class TableDetails:
|
|
|
62
61
|
>>> ed.to_datafile()
|
|
63
62
|
'ENGINE "MergeTree"\\nENGINE_PARTITION_KEY "toYear(timestamp)"\\nENGINE_SORTING_KEY "timestamp, cityHash64(location)"\\nENGINE_SAMPLING_KEY "cityHash64(location)"\\nENGINE_SETTINGS "index_granularity = 32, index_granularity_bytes = 2048"\\nENGINE_TTL "toDate(timestamp) + INTERVAL 1 DAY"'
|
|
64
63
|
|
|
65
|
-
>>> ed = TableDetails({"engine_full": "Join(ANY, LEFT, id)", "engine": "Join", "partition_key": "", "sorting_key": "", "primary_key": "", "sampling_key": ""})
|
|
66
|
-
>>> ed.engine_full
|
|
67
|
-
'Join(ANY, LEFT, id)'
|
|
68
|
-
>>> ed.engine
|
|
69
|
-
'Join'
|
|
70
|
-
>>> ed.to_json()
|
|
71
|
-
{'engine_full': 'Join(ANY, LEFT, id)', 'engine': 'Join', 'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'id'}
|
|
72
|
-
>>> ed.to_datafile()
|
|
73
|
-
'ENGINE "Join"\\nENGINE_JOIN_STRICTNESS "ANY"\\nENGINE_JOIN_TYPE "LEFT"\\nENGINE_KEY_COLUMNS "id"'
|
|
74
|
-
|
|
75
|
-
>>> ed = TableDetails({"database": "d_01", "name": "t_01", "engine": "Join", "join_strictness": "ANY", "join_type": "LEFT", "key_columns": "id"})
|
|
76
|
-
>>> ed.engine_full == None
|
|
77
|
-
True
|
|
78
|
-
>>> ed.engine
|
|
79
|
-
'Join'
|
|
80
|
-
>>> ed.to_json()
|
|
81
|
-
{'engine_full': None, 'engine': 'Join', 'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'id'}
|
|
82
|
-
>>> ed.to_datafile()
|
|
83
|
-
'ENGINE "Join"\\nENGINE_JOIN_STRICTNESS "ANY"\\nENGINE_JOIN_TYPE "LEFT"\\nENGINE_KEY_COLUMNS "id"'
|
|
84
64
|
>>> ed = TableDetails({ "engine_full": "MergeTree() PARTITION BY toYear(timestamp) ORDER BY (timestamp, cityHash64(location)) SAMPLE BY cityHash64(location) SETTINGS index_granularity = 8192, ttl_only_drop_parts = 1, merge_with_ttl_timeout = 1800 TTL toDate(timestamp) + INTERVAL 1 DAY"})
|
|
85
65
|
>>> ed.engine_full
|
|
86
66
|
'MergeTree() PARTITION BY toYear(timestamp) ORDER BY (timestamp, cityHash64(location)) SAMPLE BY cityHash64(location) SETTINGS index_granularity = 8192, ttl_only_drop_parts = 1, merge_with_ttl_timeout = 1800 TTL toDate(timestamp) + INTERVAL 1 DAY'
|
|
@@ -222,21 +202,6 @@ class TableDetails:
|
|
|
222
202
|
_sign = self.details.get("sign", None)
|
|
223
203
|
return _sign
|
|
224
204
|
|
|
225
|
-
@property
|
|
226
|
-
def join_strictness(self):
|
|
227
|
-
_join_strictness = self.details.get("join_strictness", None)
|
|
228
|
-
return _join_strictness
|
|
229
|
-
|
|
230
|
-
@property
|
|
231
|
-
def join_type(self):
|
|
232
|
-
_join_type = self.details.get("join_type", None)
|
|
233
|
-
return _join_type
|
|
234
|
-
|
|
235
|
-
@property
|
|
236
|
-
def key_columns(self):
|
|
237
|
-
_key_columns = self.details.get("key_columns", None)
|
|
238
|
-
return _key_columns
|
|
239
|
-
|
|
240
205
|
@property
|
|
241
206
|
def statistics(self) -> Dict[str, Any]:
|
|
242
207
|
return {
|
|
@@ -268,12 +233,6 @@ class TableDetails:
|
|
|
268
233
|
d["sampling_key"] = self.sampling_key
|
|
269
234
|
if self.settings:
|
|
270
235
|
d["settings"] = self.settings
|
|
271
|
-
if self.join_strictness:
|
|
272
|
-
d["join_strictness"] = self.join_strictness
|
|
273
|
-
if self.join_type:
|
|
274
|
-
d["join_type"] = self.join_type
|
|
275
|
-
if self.key_columns:
|
|
276
|
-
d["key_columns"] = self.key_columns
|
|
277
236
|
if self.ver:
|
|
278
237
|
d["ver"] = self.ver
|
|
279
238
|
if self.is_deleted:
|
|
@@ -292,10 +251,7 @@ class TableDetails:
|
|
|
292
251
|
d = {**d, **engine_params}
|
|
293
252
|
|
|
294
253
|
if include_empty_details:
|
|
295
|
-
|
|
296
|
-
d = set_empty_details(d, DEFAULT_JOIN_EMPTY_PARAMETERS)
|
|
297
|
-
else:
|
|
298
|
-
d = set_empty_details(d, DEFAULT_EMPTY_PARAMETERS)
|
|
254
|
+
d = set_empty_details(d, DEFAULT_EMPTY_PARAMETERS)
|
|
299
255
|
|
|
300
256
|
if exclude:
|
|
301
257
|
for attr in exclude:
|
|
@@ -480,20 +436,6 @@ ENABLED_ENGINES = [
|
|
|
480
436
|
],
|
|
481
437
|
MERGETREE_OPTIONS,
|
|
482
438
|
),
|
|
483
|
-
# Join(join_strictness, join_type, k1[, k2, ...])
|
|
484
|
-
engine_config(
|
|
485
|
-
"Join",
|
|
486
|
-
[
|
|
487
|
-
# https://github.com/ClickHouse/ClickHouse/blob/fa8e4e4735b932f08b6beffcb2d069b72de34401/src/Storages/StorageJoin.cpp
|
|
488
|
-
EngineParam(
|
|
489
|
-
name="join_strictness", required=True, is_valid=case_insensitive_check(["ANY", "ALL", "SEMI", "ANTI"])
|
|
490
|
-
),
|
|
491
|
-
EngineParam(
|
|
492
|
-
name="join_type", required=True, is_valid=case_insensitive_check(["LEFT", "INNER", "RIGHT", "FULL"])
|
|
493
|
-
),
|
|
494
|
-
EngineParam(name="key_columns", required=True, is_valid=columns_are_valid),
|
|
495
|
-
],
|
|
496
|
-
),
|
|
497
439
|
# Null()
|
|
498
440
|
engine_config("Null"),
|
|
499
441
|
]
|
|
@@ -599,7 +541,7 @@ def engine_full_from_dict(
|
|
|
599
541
|
>>> engine_full_from_dict('wadus', {}, schema=schema)
|
|
600
542
|
Traceback (most recent call last):
|
|
601
543
|
...
|
|
602
|
-
ValueError: Engine wadus is not supported, supported engines include: MergeTree, ReplacingMergeTree, SummingMergeTree, AggregatingMergeTree, CollapsingMergeTree, VersionedCollapsingMergeTree,
|
|
544
|
+
ValueError: Engine wadus is not supported, supported engines include: MergeTree, ReplacingMergeTree, SummingMergeTree, AggregatingMergeTree, CollapsingMergeTree, VersionedCollapsingMergeTree, Null
|
|
603
545
|
>>> schema = ''
|
|
604
546
|
>>> engine_full_from_dict('null', {}, schema=schema)
|
|
605
547
|
'Null()'
|
|
@@ -607,19 +549,6 @@ def engine_full_from_dict(
|
|
|
607
549
|
>>> engine_full_from_dict('null', {}, columns=[])
|
|
608
550
|
'Null()'
|
|
609
551
|
|
|
610
|
-
>>> schema = 'cid Int32'
|
|
611
|
-
>>> engine_full_from_dict('Join', {'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'cid'}, schema=schema)
|
|
612
|
-
'Join(ANY, LEFT, cid)'
|
|
613
|
-
>>> engine_full_from_dict('Join', {'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'cid'}, columns=[{'name': 'cid', 'type': 'Int32', 'codec': None, 'default_value': None, 'nullable': False, 'normalized_name': 'cid'}])
|
|
614
|
-
'Join(ANY, LEFT, cid)'
|
|
615
|
-
>>> schema = 'cid1 Int32, cid2 Int8'
|
|
616
|
-
>>> engine_full_from_dict('Join', {'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'cid1, cid2'}, schema=schema)
|
|
617
|
-
'Join(ANY, LEFT, cid1, cid2)'
|
|
618
|
-
>>> engine_full_from_dict('Join', {'join_strictness': 'ANY', 'join_type': 'OUTER', 'key_columns': 'cid'}, schema=schema)
|
|
619
|
-
Traceback (most recent call last):
|
|
620
|
-
...
|
|
621
|
-
ValueError: Invalid value 'OUTER' for parameter 'engine_join_type', reason: valid values are LEFT, INNER, RIGHT, FULL
|
|
622
|
-
|
|
623
552
|
>>> schema = ''
|
|
624
553
|
>>> engine_full_from_dict('MergeTree', {}, schema=schema)
|
|
625
554
|
'MergeTree() ORDER BY (tuple())'
|
|
@@ -715,10 +644,6 @@ def engine_params_from_engine_full(engine_full: str) -> Dict[str, Any]:
|
|
|
715
644
|
"""
|
|
716
645
|
>>> engine_params_from_engine_full("Null()")
|
|
717
646
|
{}
|
|
718
|
-
>>> engine_params_from_engine_full("Join(ANY, LEFT, id)")
|
|
719
|
-
{'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'id'}
|
|
720
|
-
>>> engine_params_from_engine_full("Join(ANY, LEFT, k1, k2)")
|
|
721
|
-
{'join_strictness': 'ANY', 'join_type': 'LEFT', 'key_columns': 'k1, k2'}
|
|
722
647
|
>>> engine_params_from_engine_full("AggregatingMergeTree('/clickhouse/tables/{layer}-{shard}/d_f837aa.sales_by_country_rt__v0_staging_t_00c3091e7530472caebda05e97288a1d', '{replica}') PARTITION BY toYYYYMM(date) ORDER BY (purchase_location, cod_device, date) SETTINGS index_granularity = 8192")
|
|
723
648
|
{}
|
|
724
649
|
>>> engine_params_from_engine_full("ReplicatedSummingMergeTree('/clickhouse/tables/{layer}-{shard}/d_abcf3e.t_69f9da31f4524995b8911e1b24c80ab4', '{replica}') PARTITION BY toYYYYMM(date) ORDER BY (date, purchase_location, sku_rank_lc) SETTINGS index_granularity = 8192")
|
|
@@ -62,9 +62,7 @@ class JobException(Exception):
|
|
|
62
62
|
def connector_equals(connector, datafile_params):
|
|
63
63
|
if not connector:
|
|
64
64
|
return False
|
|
65
|
-
|
|
66
|
-
return True
|
|
67
|
-
return False
|
|
65
|
+
return connector["name"] == datafile_params["kafka_connection_name"]
|
|
68
66
|
|
|
69
67
|
|
|
70
68
|
def parse_error_response(response: Response) -> str:
|
|
@@ -316,15 +314,9 @@ class TinyB:
|
|
|
316
314
|
response = await self._req(f"/v0/connectors?{urlencode(params)}")
|
|
317
315
|
return response["connectors"]
|
|
318
316
|
|
|
319
|
-
async def connections(self, connector: Optional[str] = None
|
|
317
|
+
async def connections(self, connector: Optional[str] = None):
|
|
320
318
|
response = await self._req("/v0/connectors")
|
|
321
319
|
connectors = response["connectors"]
|
|
322
|
-
bigquery_connection = None
|
|
323
|
-
if not skip_bigquery:
|
|
324
|
-
bigquery_connection = (
|
|
325
|
-
await self.bigquery_connection() if connector == "bigquery" or connector is None else None
|
|
326
|
-
)
|
|
327
|
-
connectors = [*connectors, bigquery_connection] if bigquery_connection else connectors
|
|
328
320
|
if connector:
|
|
329
321
|
return [
|
|
330
322
|
{
|
|
@@ -348,22 +340,6 @@ class TinyB:
|
|
|
348
340
|
for c in connectors
|
|
349
341
|
]
|
|
350
342
|
|
|
351
|
-
async def bigquery_connection(self):
|
|
352
|
-
bigquery_resources = await self.list_gcp_resources()
|
|
353
|
-
if len(bigquery_resources) == 0:
|
|
354
|
-
return None
|
|
355
|
-
|
|
356
|
-
gcp_account_details: Dict[str, Any] = await self.get_gcp_service_account_details()
|
|
357
|
-
datasources = await self.datasources()
|
|
358
|
-
bigquery_datasources = [ds["name"] for ds in datasources if ds["type"] == "bigquery"]
|
|
359
|
-
return {
|
|
360
|
-
"id": gcp_account_details["account"].split("@")[0],
|
|
361
|
-
"service": "bigquery",
|
|
362
|
-
"name": "bigquery",
|
|
363
|
-
"linkers": bigquery_datasources,
|
|
364
|
-
"settings": gcp_account_details,
|
|
365
|
-
}
|
|
366
|
-
|
|
367
343
|
async def get_datasource(self, ds_name: str, used_by: bool = False) -> Dict[str, Any]:
|
|
368
344
|
params = {
|
|
369
345
|
"attrs": "used_by" if used_by else "",
|
|
@@ -1024,118 +1000,23 @@ class TinyB:
|
|
|
1024
1000
|
)
|
|
1025
1001
|
return [x["topic"] for x in resp["preview"]]
|
|
1026
1002
|
|
|
1027
|
-
async def get_gcp_service_account_details(self) -> Dict[str, Any]:
|
|
1028
|
-
return await self._req("/v0/datasources-bigquery-credentials")
|
|
1029
|
-
|
|
1030
|
-
async def list_connectors(self, service: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
1031
|
-
try:
|
|
1032
|
-
params: str = f"?service={service}" if service else ""
|
|
1033
|
-
result = await self._req(f"/v0/connections/{params}")
|
|
1034
|
-
if not result:
|
|
1035
|
-
return []
|
|
1036
|
-
|
|
1037
|
-
return result.get("connectors", [])
|
|
1038
|
-
except Exception:
|
|
1039
|
-
return []
|
|
1040
|
-
|
|
1041
1003
|
async def get_connector(
|
|
1042
1004
|
self,
|
|
1043
1005
|
name_or_id: str,
|
|
1044
1006
|
service: Optional[str] = None,
|
|
1045
1007
|
key: Optional[str] = "name",
|
|
1046
|
-
skip_bigquery: Optional[bool] = False,
|
|
1047
1008
|
) -> Optional[Dict[str, Any]]:
|
|
1048
1009
|
return next(
|
|
1049
|
-
(c for c in await self.connections(connector=service
|
|
1010
|
+
(c for c in await self.connections(connector=service) if c[key] == name_or_id),
|
|
1050
1011
|
None,
|
|
1051
1012
|
)
|
|
1052
1013
|
|
|
1053
1014
|
async def get_connector_by_id(self, connector_id: Optional[str] = None):
|
|
1054
1015
|
return await self._req(f"/v0/connectors/{connector_id}")
|
|
1055
1016
|
|
|
1056
|
-
async def get_snowflake_integration_query(
|
|
1057
|
-
self, role: str, stage: Optional[str], integration: Optional[str]
|
|
1058
|
-
) -> Optional[Dict[str, Any]]:
|
|
1059
|
-
try:
|
|
1060
|
-
params = {
|
|
1061
|
-
"role": role,
|
|
1062
|
-
}
|
|
1063
|
-
if stage:
|
|
1064
|
-
params["stage"] = stage
|
|
1065
|
-
if integration:
|
|
1066
|
-
params["integration"] = integration
|
|
1067
|
-
|
|
1068
|
-
return await self._req(f"/v0/connectors/snowflake/instructions?{urlencode(params)}")
|
|
1069
|
-
except Exception:
|
|
1070
|
-
return None
|
|
1071
|
-
|
|
1072
|
-
async def list_gcp_resources(self) -> List[Dict[str, Any]]:
|
|
1073
|
-
try:
|
|
1074
|
-
resources = await self._req("/v0/connections/bigquery")
|
|
1075
|
-
if not resources:
|
|
1076
|
-
return []
|
|
1077
|
-
|
|
1078
|
-
return resources.get("items", [])
|
|
1079
|
-
except Exception:
|
|
1080
|
-
return []
|
|
1081
|
-
|
|
1082
|
-
async def check_gcp_read_permissions(self) -> bool:
|
|
1083
|
-
"""Returns `True` if our service account (see `TinyB::get_gcp_service_account_details()`)
|
|
1084
|
-
has the proper permissions in GCP.
|
|
1085
|
-
|
|
1086
|
-
Here we assume that we have permissions if we can list resources but currently this
|
|
1087
|
-
logic is wrong under some circumstances.
|
|
1088
|
-
|
|
1089
|
-
See https://gitlab.com/tinybird/analytics/-/issues/6485.
|
|
1090
|
-
"""
|
|
1091
|
-
try:
|
|
1092
|
-
items = await self.list_gcp_resources()
|
|
1093
|
-
if not items:
|
|
1094
|
-
return False
|
|
1095
|
-
return len(items) > 0
|
|
1096
|
-
except Exception:
|
|
1097
|
-
return False
|
|
1098
|
-
|
|
1099
1017
|
async def connector_delete(self, connection_id):
|
|
1100
1018
|
return await self._req(f"/v0/connectors/{connection_id}", method="DELETE")
|
|
1101
1019
|
|
|
1102
|
-
async def connection_create_snowflake(
|
|
1103
|
-
self,
|
|
1104
|
-
account_identifier: str,
|
|
1105
|
-
user: str,
|
|
1106
|
-
password: str,
|
|
1107
|
-
warehouse: str,
|
|
1108
|
-
role: str,
|
|
1109
|
-
connection_name: str,
|
|
1110
|
-
integration: Optional[str],
|
|
1111
|
-
stage: Optional[str],
|
|
1112
|
-
) -> Dict[str, Any]:
|
|
1113
|
-
params = {
|
|
1114
|
-
"service": "snowflake",
|
|
1115
|
-
"name": connection_name,
|
|
1116
|
-
"account": account_identifier,
|
|
1117
|
-
"username": user,
|
|
1118
|
-
"password": password,
|
|
1119
|
-
"role": role,
|
|
1120
|
-
"warehouse": warehouse,
|
|
1121
|
-
}
|
|
1122
|
-
|
|
1123
|
-
if integration:
|
|
1124
|
-
params["integration"] = integration
|
|
1125
|
-
if stage:
|
|
1126
|
-
params["stage"] = stage
|
|
1127
|
-
|
|
1128
|
-
return await self._req(f"/v0/connectors?{urlencode(params)}", method="POST", data="")
|
|
1129
|
-
|
|
1130
|
-
async def validate_snowflake_connection(self, account_identifier: str, user: str, password: str) -> bool:
|
|
1131
|
-
try:
|
|
1132
|
-
roles = await self.get_snowflake_roles(account_identifier, user, password)
|
|
1133
|
-
if not roles:
|
|
1134
|
-
return False
|
|
1135
|
-
return len(roles) > 0
|
|
1136
|
-
except Exception:
|
|
1137
|
-
return False
|
|
1138
|
-
|
|
1139
1020
|
async def validate_preview_connection(self, service: str, params: Dict[str, Any]) -> bool:
|
|
1140
1021
|
params = {"service": service, "dry_run": "true", **params}
|
|
1141
1022
|
bucket_list = None
|
|
@@ -1154,25 +1035,6 @@ class TinyB:
|
|
|
1154
1035
|
async def connection_create(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
1155
1036
|
return await self._req(f"/v0/connectors?{urlencode(params)}", method="POST", data="")
|
|
1156
1037
|
|
|
1157
|
-
async def get_snowflake_roles(self, account_identifier: str, user: str, password: str) -> Optional[List[str]]:
|
|
1158
|
-
params = {"account": account_identifier, "username": user, "password": password}
|
|
1159
|
-
|
|
1160
|
-
response = await self._req(f"/v0/connectors/snowflake/roles?{urlencode(params)}", method="POST", data="")
|
|
1161
|
-
return response["roles"]
|
|
1162
|
-
|
|
1163
|
-
async def get_snowflake_warehouses(
|
|
1164
|
-
self, account_identifier: str, user: str, password: str, role: str
|
|
1165
|
-
) -> Optional[List[Dict[str, Any]]]:
|
|
1166
|
-
params = {
|
|
1167
|
-
"account": account_identifier,
|
|
1168
|
-
"username": user,
|
|
1169
|
-
"password": password,
|
|
1170
|
-
"role": role,
|
|
1171
|
-
}
|
|
1172
|
-
|
|
1173
|
-
response = await self._req(f"/v0/connectors/snowflake/warehouses?{urlencode(params)}", method="POST", data="")
|
|
1174
|
-
return response["warehouses"]
|
|
1175
|
-
|
|
1176
1038
|
async def get_trust_policy(self, service: str) -> Dict[str, Any]:
|
|
1177
1039
|
return await self._req(f"/v0/integrations/{service}/policies/trust-policy")
|
|
1178
1040
|
|
|
@@ -19,7 +19,6 @@ DEFAULT_LOCALHOST = "http://localhost:8001"
|
|
|
19
19
|
CURRENT_VERSION = f"{__cli__.__version__}"
|
|
20
20
|
VERSION = f"{__cli__.__version__} (rev {__revision__})"
|
|
21
21
|
DEFAULT_UI_HOST = "https://app.tinybird.co"
|
|
22
|
-
SUPPORTED_CONNECTORS = ["bigquery", "snowflake"]
|
|
23
22
|
PROJECT_PATHS = ["datasources", "datasources/fixtures", "endpoints", "pipes", "tests", "scripts", "deploy"]
|
|
24
23
|
DEPRECATED_PROJECT_PATHS = ["endpoints"]
|
|
25
24
|
MIN_WORKSPACE_ID_LENGTH = 36
|
|
@@ -138,7 +137,3 @@ class FeatureFlags:
|
|
|
138
137
|
@classmethod
|
|
139
138
|
def is_localhost(cls) -> bool:
|
|
140
139
|
return "SET_LOCALHOST" in environ
|
|
141
|
-
|
|
142
|
-
@classmethod
|
|
143
|
-
def enable_snowflake_connector_command(cls) -> bool:
|
|
144
|
-
return "ENABLE_SNOWFLAKE_CONNECTOR_COMMAND" in environ
|
|
@@ -81,6 +81,10 @@ from .tornado_template import UnClosedIfError
|
|
|
81
81
|
os.environ["GIT_PYTHON_REFRESH"] = "quiet"
|
|
82
82
|
from git import HEAD, Diff, GitCommandError, InvalidGitRepositoryError, Repo
|
|
83
83
|
|
|
84
|
+
# Pre-compiled regex patterns
|
|
85
|
+
_PATTERN_VERSION_NUMBER = re.compile(r"[0-9]+$")
|
|
86
|
+
_PATTERN_VERSIONED_RESOURCE = re.compile(r"([^\s\.]*__v\d+)")
|
|
87
|
+
|
|
84
88
|
INTERNAL_TABLES: Tuple[str, ...] = (
|
|
85
89
|
"datasources_ops_log",
|
|
86
90
|
"pipe_stats",
|
|
@@ -1412,15 +1416,6 @@ async def process_file(
|
|
|
1412
1416
|
|
|
1413
1417
|
service: Optional[str] = node.get("import_service", None)
|
|
1414
1418
|
|
|
1415
|
-
if service and service.lower() == "bigquery":
|
|
1416
|
-
if not await tb_client.check_gcp_read_permissions():
|
|
1417
|
-
raise click.ClickException(FeedbackManager.error_unknown_bq_connection(datasource=datasource["name"]))
|
|
1418
|
-
|
|
1419
|
-
# Bigquery doesn't have a datalink, so we can stop here
|
|
1420
|
-
return params
|
|
1421
|
-
|
|
1422
|
-
# Rest of connectors
|
|
1423
|
-
|
|
1424
1419
|
connector_id: Optional[str] = node.get("import_connector", None)
|
|
1425
1420
|
connector_name: Optional[str] = node.get("import_connection_name", None)
|
|
1426
1421
|
if not connector_name and not connector_id:
|
|
@@ -1525,7 +1520,7 @@ async def process_file(
|
|
|
1525
1520
|
params.update(get_engine_params(node))
|
|
1526
1521
|
|
|
1527
1522
|
if "import_service" in node or "import_connection_name" in node:
|
|
1528
|
-
VALID_SERVICES: Tuple[str, ...] = ("
|
|
1523
|
+
VALID_SERVICES: Tuple[str, ...] = ("s3", "s3_iamrole", "gcs", "dynamodb")
|
|
1529
1524
|
|
|
1530
1525
|
import_params = await get_import_params(params, node)
|
|
1531
1526
|
|
|
@@ -1823,7 +1818,10 @@ def find_file_by_name(
|
|
|
1823
1818
|
wk_path, name.replace(f"{wk_name}.", ""), verbose, is_raw, resource=resource
|
|
1824
1819
|
)
|
|
1825
1820
|
if file:
|
|
1826
|
-
|
|
1821
|
+
# Preserve workspace prefix in the returned filename to avoid conflicts
|
|
1822
|
+
# with local files of the same name (e.g., vendored datasource "workspace.ds"
|
|
1823
|
+
# vs local pipe with same base name)
|
|
1824
|
+
return f"{wk_name}.{file}", _resource
|
|
1827
1825
|
|
|
1828
1826
|
if not is_raw:
|
|
1829
1827
|
f, raw = find_file_by_name(
|
|
@@ -3259,12 +3257,6 @@ async def new_ds(
|
|
|
3259
3257
|
):
|
|
3260
3258
|
raise click.ClickException(FeedbackManager.error_dynamodb_engine_not_supported(engine=engine_param))
|
|
3261
3259
|
|
|
3262
|
-
if engine_param.lower() == "join":
|
|
3263
|
-
deprecation_notice = FeedbackManager.warning_deprecated(
|
|
3264
|
-
warning="Data Sources with Join engine are deprecated and will be removed in the next major release of tinybird-cli. Use MergeTree instead."
|
|
3265
|
-
)
|
|
3266
|
-
click.echo(deprecation_notice)
|
|
3267
|
-
|
|
3268
3260
|
if not datasource_exists or fork_downstream or fork:
|
|
3269
3261
|
params = ds["params"]
|
|
3270
3262
|
params["branch_mode"] = "fork" if fork_downstream or fork else "None"
|
|
@@ -3434,19 +3426,15 @@ async def new_ds(
|
|
|
3434
3426
|
|
|
3435
3427
|
ds_params = ds["params"]
|
|
3436
3428
|
service = ds_params.get("service")
|
|
3437
|
-
DATASOURCE_VALID_SERVICES_TO_UPDATE = [
|
|
3429
|
+
DATASOURCE_VALID_SERVICES_TO_UPDATE: List[str] = []
|
|
3438
3430
|
if datasource_exists and service and service in [*DATASOURCE_VALID_SERVICES_TO_UPDATE, *PREVIEW_CONNECTOR_SERVICES]:
|
|
3439
3431
|
connector_required_params = {
|
|
3440
|
-
"bigquery": ["service", "cron"],
|
|
3441
|
-
"snowflake": ["connector", "service", "cron", "external_data_source"],
|
|
3442
3432
|
"s3": ["connector", "service", "cron", "bucket_uri"],
|
|
3443
3433
|
"s3_iamrole": ["connector", "service", "cron", "bucket_uri"],
|
|
3444
3434
|
"gcs": ["connector", "service", "cron", "bucket_uri"],
|
|
3445
3435
|
}.get(service, [])
|
|
3446
3436
|
|
|
3447
|
-
connector_at_least_one_required_param =
|
|
3448
|
-
"bigquery": ["external_data_source", "query"],
|
|
3449
|
-
}.get(service, [])
|
|
3437
|
+
connector_at_least_one_required_param: List[str] = []
|
|
3450
3438
|
|
|
3451
3439
|
if connector_at_least_one_required_param and not any(
|
|
3452
3440
|
key in ds_params for key in connector_at_least_one_required_param
|
|
@@ -3705,7 +3693,7 @@ def get_name_version(ds: str) -> Dict[str, Any]:
|
|
|
3705
3693
|
return {"name": tk[0], "version": None}
|
|
3706
3694
|
elif len(tk) == 2:
|
|
3707
3695
|
if len(tk[1]):
|
|
3708
|
-
if tk[1][0] == "v" and
|
|
3696
|
+
if tk[1][0] == "v" and _PATTERN_VERSION_NUMBER.match(tk[1][1:]):
|
|
3709
3697
|
return {"name": tk[0], "version": int(tk[1][1:])}
|
|
3710
3698
|
else:
|
|
3711
3699
|
return {"name": tk[0] + "__" + tk[1], "version": None}
|
|
@@ -4363,9 +4351,7 @@ async def folder_push(
|
|
|
4363
4351
|
)
|
|
4364
4352
|
if resource_full_name not in existing_resources:
|
|
4365
4353
|
return True
|
|
4366
|
-
|
|
4367
|
-
return True
|
|
4368
|
-
return False
|
|
4354
|
+
return force or run_tests
|
|
4369
4355
|
|
|
4370
4356
|
async def push(
|
|
4371
4357
|
name: str,
|
|
@@ -5327,7 +5313,7 @@ async def folder_pull(
|
|
|
5327
5313
|
|
|
5328
5314
|
resource_to_write = f"VERSION {k['version']}\n" + resource_to_write
|
|
5329
5315
|
if resource_to_write:
|
|
5330
|
-
matches =
|
|
5316
|
+
matches = _PATTERN_VERSIONED_RESOURCE.findall(resource_to_write)
|
|
5331
5317
|
for match in set(matches):
|
|
5332
5318
|
m = match.split("__v")[0]
|
|
5333
5319
|
if m in resources or m in resource_names:
|
|
@@ -5549,9 +5535,7 @@ async def diff_files(
|
|
|
5549
5535
|
|
|
5550
5536
|
|
|
5551
5537
|
def is_endpoint(resource: Optional[Dict[str, Any]]) -> bool:
|
|
5552
|
-
|
|
5553
|
-
return True
|
|
5554
|
-
return False
|
|
5538
|
+
return bool(resource and len(resource.get("tokens", [])) != 0 and resource.get("resource") == "pipes")
|
|
5555
5539
|
|
|
5556
5540
|
|
|
5557
5541
|
def is_materialized(resource: Optional[Dict[str, Any]]) -> bool:
|
|
@@ -5612,9 +5596,7 @@ def get_target_materialized_data_source_name(resource: Optional[Dict[str, Any]])
|
|
|
5612
5596
|
|
|
5613
5597
|
|
|
5614
5598
|
def is_datasource(resource: Optional[Dict[str, Any]]) -> bool:
|
|
5615
|
-
|
|
5616
|
-
return True
|
|
5617
|
-
return False
|
|
5599
|
+
return bool(resource and resource.get("resource") == "datasources")
|
|
5618
5600
|
|
|
5619
5601
|
|
|
5620
5602
|
async def create_release(
|
|
@@ -5665,15 +5647,13 @@ def update_connector_params(service: str, ds_params: Dict[str, Any], connector_r
|
|
|
5665
5647
|
"""
|
|
5666
5648
|
Update connector parameters for a given service, ensuring required parameters exist.
|
|
5667
5649
|
|
|
5668
|
-
:param service: The name of the service (e.g., '
|
|
5650
|
+
:param service: The name of the service (e.g., 's3').
|
|
5669
5651
|
:param ds_params: The data source parameters to be checked.
|
|
5670
5652
|
:param connector_required_params: The list of required parameters for the connector.
|
|
5671
5653
|
:return: None
|
|
5672
5654
|
"""
|
|
5673
5655
|
|
|
5674
|
-
connector_at_least_one_required_param: List[str] =
|
|
5675
|
-
"bigquery": ["external_data_source", "query"],
|
|
5676
|
-
}.get(service, [])
|
|
5656
|
+
connector_at_least_one_required_param: List[str] = []
|
|
5677
5657
|
|
|
5678
5658
|
# Handle the "at least one param" requirement
|
|
5679
5659
|
if connector_at_least_one_required_param and not any(
|
|
@@ -35,6 +35,7 @@ date_pattern = re.compile(r"\d\d\d\d-\d\d-\d\d$")
|
|
|
35
35
|
|
|
36
36
|
datetime64_type_pattern = re.compile(r"^DateTime64(\([1-9](, ?'.+')?\))?$")
|
|
37
37
|
datetime_type_pattern = re.compile(r"^DateTime(\(('.+')?)?\)?$")
|
|
38
|
+
date_type_pattern = re.compile(r"^Date(\(('.+')?)?\)?$")
|
|
38
39
|
|
|
39
40
|
# List from https://github.com/tinybirdco/ClickHousePrivate/blob/153473d9c1c871974688a1d72dcff7a13fc2076c/src/DataTypes/Serializations/SerializationBool.cpp#L216
|
|
40
41
|
bool_allowed_values = {
|
|
@@ -95,6 +96,18 @@ def is_type_datetime(type_to_check: str) -> bool:
|
|
|
95
96
|
return datetime_type_pattern.match(type_to_check) is not None
|
|
96
97
|
|
|
97
98
|
|
|
99
|
+
def is_type_date(type_to_check: str) -> bool:
|
|
100
|
+
"""
|
|
101
|
+
>>> is_type_date('Date')
|
|
102
|
+
True
|
|
103
|
+
>>> is_type_date('Date()')
|
|
104
|
+
True
|
|
105
|
+
>>> is_type_date("date")
|
|
106
|
+
False
|
|
107
|
+
"""
|
|
108
|
+
return date_type_pattern.match(type_to_check) is not None
|
|
109
|
+
|
|
110
|
+
|
|
98
111
|
def string_test(x: str) -> bool:
|
|
99
112
|
return True
|
|
100
113
|
|