tinybird 0.0.1.dev140__py3-none-any.whl → 0.0.1.dev141__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

tinybird/tb/__cli__.py CHANGED
@@ -4,5 +4,5 @@ __description__ = 'Tinybird Command Line Tool'
4
4
  __url__ = 'https://www.tinybird.co/docs/cli/introduction.html'
5
5
  __author__ = 'Tinybird'
6
6
  __author_email__ = 'support@tinybird.co'
7
- __version__ = '0.0.1.dev140'
8
- __revision__ = 'aa2a66a'
7
+ __version__ = '0.0.1.dev141'
8
+ __revision__ = '2f7ef0f'
@@ -197,8 +197,12 @@ REQUIRED_BLOB_STORAGE_PARAMS = {
197
197
  "import_schedule",
198
198
  "import_bucket_uri",
199
199
  }
200
-
201
200
  BLOB_STORAGE_PARAMS = REQUIRED_BLOB_STORAGE_PARAMS.union({"import_from_timestamp"})
201
+ VALID_BLOB_STORAGE_CRON_VALUES = {
202
+ "@once",
203
+ "@on-demand",
204
+ "@auto",
205
+ }
202
206
 
203
207
 
204
208
  class Datafile:
@@ -294,13 +298,17 @@ class Datafile:
294
298
  f"Some Kafka params have been provided, but the following required ones are missing: {missing}"
295
299
  )
296
300
  # Validate S3 params
297
- if any(param in node for param in BLOB_STORAGE_PARAMS) and not all(
298
- param in node for param in REQUIRED_BLOB_STORAGE_PARAMS
299
- ):
300
- missing = [param for param in REQUIRED_BLOB_STORAGE_PARAMS if param not in node]
301
- raise DatafileValidationError(
302
- f"Some connection params have been provided, but the following required ones are missing: {missing}"
303
- )
301
+ if any(param in node for param in BLOB_STORAGE_PARAMS):
302
+ if not all(param in node for param in REQUIRED_BLOB_STORAGE_PARAMS):
303
+ missing = [param for param in REQUIRED_BLOB_STORAGE_PARAMS if param not in node]
304
+ raise DatafileValidationError(
305
+ f"Some connection params have been provided, but the following required ones are missing: {missing}"
306
+ )
307
+ if node["import_schedule"] not in VALID_BLOB_STORAGE_CRON_VALUES:
308
+ raise DatafileValidationError(
309
+ f"Invalid import schedule '{node['import_schedule']}'. Only {sorted(VALID_BLOB_STORAGE_CRON_VALUES)} values are allowed"
310
+ )
311
+
304
312
  else:
305
313
  # We cannot validate a datafile whose kind is unknown
306
314
  pass
@@ -1325,11 +1333,52 @@ def parse(
1325
1333
 
1326
1334
  def kafka_key_avro_deserialization(*args: str, **kwargs: Any):
1327
1335
  raise DatafileSyntaxError(
1328
- f'{kwargs["cmd"].upper()} has been deprecated. Use "KAFKA_KEY_FORMAT avro" instead',
1336
+ f'{kwargs["cmd"].upper()} has been deprecated. Use "KAFKA_KEY_FORMAT avro" in the corresponding .datasource file instead',
1337
+ lineno=kwargs["lineno"],
1338
+ pos=1,
1339
+ )
1340
+
1341
+ def kafka_target_partitions(*args: str, **kwargs: Any):
1342
+ warnings.append(
1343
+ DatafileParseWarning(
1344
+ message=f"{kwargs['cmd'].upper()} has been deprecated and will be ignored.",
1345
+ )
1346
+ )
1347
+
1348
+ def import_service(*args: str, **kwargs: Any):
1349
+ warnings.append(
1350
+ DatafileParseWarning(
1351
+ message=(
1352
+ f"{kwargs['cmd'].upper()} has been deprecated and will be ignored. If you're using an S3 or GCS "
1353
+ "connection, you don't need this setting anymore."
1354
+ )
1355
+ )
1356
+ )
1357
+
1358
+ def import_strategy(*args: str, **kwargs: Any):
1359
+ raise DatafileSyntaxError(
1360
+ f"{kwargs['cmd'].upper()} has been deprecated. It is now fixed to 'append'",
1361
+ lineno=kwargs["lineno"],
1362
+ pos=1,
1363
+ )
1364
+
1365
+ def kafka_store_binary_headers(*args: str, **kwargs: Any):
1366
+ raise DatafileSyntaxError(
1367
+ f"{kwargs['cmd'].upper()} has been deprecated. When KAFKA_STORE_HEADERS is True, __headers is always of type Map(String, String)",
1329
1368
  lineno=kwargs["lineno"],
1330
1369
  pos=1,
1331
1370
  )
1332
1371
 
1372
+ def export_service(*args: str, **kwargs: Any):
1373
+ warnings.append(
1374
+ DatafileParseWarning(
1375
+ message=(
1376
+ f"{kwargs['cmd'].upper()} has been deprecated and will be ignored. If you're using an S3 or GCS "
1377
+ "connection, you don't need this setting anymore."
1378
+ )
1379
+ )
1380
+ )
1381
+
1333
1382
  @deprecated(severity="error")
1334
1383
  def sources(x: str, **kwargs: Any) -> None:
1335
1384
  pass # Deprecated
@@ -1469,6 +1518,7 @@ def parse(
1469
1518
  def version(*args: str, **kwargs: Any) -> None:
1470
1519
  pass # whatever, it's deprecated
1471
1520
 
1521
+ @not_supported_yet()
1472
1522
  def shared_with(*args: str, **kwargs: Any) -> None:
1473
1523
  for entries in args:
1474
1524
  # In case they specify multiple workspaces
@@ -1508,79 +1558,90 @@ def parse(
1508
1558
  else:
1509
1559
  doc.filtering_tags += filtering_tags
1510
1560
 
1511
- cmds = {
1512
- "source": sources,
1513
- "schema": schema,
1514
- "indexes": indexes,
1515
- "engine": set_engine,
1516
- "partition_key": assign_var("partition_key"),
1517
- "sorting_key": assign_var("sorting_key"),
1518
- "primary_key": assign_var("primary_key"),
1519
- "sampling_key": assign_var("sampling_key"),
1520
- "ttl": assign_var("ttl"),
1521
- "settings": assign_var("settings"),
1522
- "node": node,
1523
- "scope": scope,
1524
- "description": description,
1525
- "type": assign_node_var("type"),
1526
- "datasource": assign_node_var("datasource"),
1527
- "tags": tags,
1528
- "target_datasource": assign_node_var("target_datasource"),
1529
- "copy_schedule": assign_node_var(CopyParameters.COPY_SCHEDULE),
1530
- "copy_mode": assign_node_var("mode"),
1531
- "mode": assign_node_var("mode"),
1532
- "resource": assign_node_var("resource"),
1533
- "filter": assign_node_var("filter"),
1534
- "token": add_token,
1535
- "include": include,
1536
- "sql": sql("sql"),
1537
- "version": version,
1538
- # TODO(eclbg): We should decide on a single place to define the kafka params. Definitely not here.
1539
- "kafka_connection_name": assign_var("kafka_connection_name"),
1540
- "kafka_topic": assign_var("kafka_topic"),
1541
- "kafka_group_id": assign_var("kafka_group_id"),
1542
- "kafka_bootstrap_servers": assign_var("kafka_bootstrap_servers"),
1543
- "kafka_key": assign_var("kafka_key"),
1544
- "kafka_secret": assign_var("kafka_secret"),
1545
- "kafka_schema_registry_url": assign_var("kafka_schema_registry_url"),
1546
- "kafka_target_partitions": assign_var("kafka_target_partitions"),
1547
- "kafka_auto_offset_reset": assign_var("kafka_auto_offset_reset"),
1548
- "kafka_store_raw_value": assign_var("kafka_store_raw_value"),
1549
- "kafka_store_headers": assign_var("kafka_store_headers"),
1550
- "kafka_store_binary_headers": assign_var("kafka_store_binary_headers"),
1551
- "kafka_key_format": assign_var("kafka_key_format"),
1552
- "kafka_value_format": assign_var("kafka_value_format"),
1553
- "kafka_key_avro_deserialization": kafka_key_avro_deserialization,
1554
- "kafka_ssl_ca_pem": assign_var("kafka_ssl_ca_pem"),
1555
- "kafka_security_protocol": assign_var("kafka_security_protocol"),
1556
- "kafka_sasl_mechanism": assign_var("kafka_sasl_mechanism"),
1557
- "import_service": assign_var("import_service"),
1558
- "s3_region": assign_var("s3_region"),
1559
- "s3_arn": assign_var("s3_arn"),
1560
- "s3_access_key": assign_var("s3_access_key"),
1561
- "s3_secret": assign_var("s3_secret"),
1562
- "gcs_service_account_credentials_json": assign_var_json("gcs_service_account_credentials_json"),
1563
- "import_connection_name": assign_var("import_connection_name"),
1564
- "import_schedule": assign_var("import_schedule"),
1565
- "import_strategy": assign_var("import_strategy"),
1566
- "import_external_datasource": assign_var("import_external_datasource"),
1567
- "import_bucket_uri": assign_var("import_bucket_uri"),
1568
- "import_from_timestamp": assign_var("import_from_timestamp"),
1569
- "import_query": assign_var("import_query"),
1570
- "import_table_arn": assign_var("import_table_arn"),
1571
- "import_export_bucket": assign_var("import_export_bucket"),
1572
- "shared_with": shared_with,
1573
- "export_service": assign_var("export_service"),
1574
- "export_connection_name": assign_var("export_connection_name"),
1575
- "export_schedule": assign_var("export_schedule"),
1576
- "export_bucket_uri": assign_var("export_bucket_uri"),
1577
- "export_file_template": assign_var("export_file_template"),
1578
- "export_format": assign_var("export_format"),
1579
- "export_strategy": assign_var("export_strategy"),
1580
- "export_compression": assign_var("export_compression"),
1581
- "export_write_strategy": assign_var("export_write_strategy"),
1582
- "export_kafka_topic": assign_var("export_kafka_topic"),
1583
- "forward_query": sql("forward_query"),
1561
+ cmds_per_datafile_kind: dict[DatafileKind, dict[str, Callable]] = {
1562
+ DatafileKind.datasource: {
1563
+ "description": description,
1564
+ "token": add_token,
1565
+ "source": sources,
1566
+ "schema": schema,
1567
+ "indexes": indexes,
1568
+ "engine": set_engine,
1569
+ "partition_key": assign_var("partition_key"),
1570
+ "sorting_key": assign_var("sorting_key"),
1571
+ "primary_key": assign_var("primary_key"),
1572
+ "sampling_key": assign_var("sampling_key"),
1573
+ "ttl": assign_var("ttl"),
1574
+ "tags": tags,
1575
+ "include": include,
1576
+ "version": version,
1577
+ "kafka_connection_name": assign_var("kafka_connection_name"),
1578
+ "kafka_topic": assign_var("kafka_topic"),
1579
+ "kafka_group_id": assign_var("kafka_group_id"),
1580
+ "kafka_auto_offset_reset": assign_var("kafka_auto_offset_reset"),
1581
+ "kafka_store_raw_value": assign_var("kafka_store_raw_value"),
1582
+ "kafka_store_headers": assign_var("kafka_store_headers"),
1583
+ "kafka_store_binary_headers": kafka_store_binary_headers,
1584
+ "kafka_key_format": assign_var("kafka_key_format"),
1585
+ "kafka_value_format": assign_var("kafka_value_format"),
1586
+ "kafka_target_partitions": kafka_target_partitions, # Deprecated
1587
+ "import_connection_name": assign_var("import_connection_name"),
1588
+ "import_schedule": assign_var("import_schedule"),
1589
+ "import_strategy": import_strategy, # Deprecated, always append
1590
+ "import_bucket_uri": assign_var("import_bucket_uri"),
1591
+ "import_from_timestamp": assign_var("import_from_timestamp"),
1592
+ "import_service": import_service, # Deprecated
1593
+ "import_external_datasource": assign_var("import_external_datasource"), # Deprecated, BQ and SFK
1594
+ "import_query": assign_var("import_query"), # Deprecated, BQ and SFK
1595
+ "import_table_arn": assign_var("import_table_arn"), # Only for DynamoDB
1596
+ "import_export_bucket": assign_var("import_export_bucket"), # For DynamoDB
1597
+ "shared_with": shared_with, # Not supported yet
1598
+ "export_service": export_service, # Deprecated
1599
+ "forward_query": sql("forward_query"),
1600
+ "export_connection_name": assign_var("export_connection_name"),
1601
+ "export_schedule": assign_var("export_schedule"),
1602
+ "export_bucket_uri": assign_var("export_bucket_uri"),
1603
+ "export_file_template": assign_var("export_file_template"),
1604
+ "export_format": assign_var("export_format"),
1605
+ "export_strategy": assign_var("export_strategy"),
1606
+ "export_compression": assign_var("export_compression"),
1607
+ "export_write_strategy": assign_var("export_write_strategy"),
1608
+ "export_kafka_topic": assign_var("export_kafka_topic"),
1609
+ },
1610
+ DatafileKind.pipe: {
1611
+ "node": node,
1612
+ "scope": scope,
1613
+ "description": description,
1614
+ "type": assign_node_var("type"),
1615
+ "datasource": assign_node_var("datasource"),
1616
+ "tags": tags,
1617
+ "target_datasource": assign_node_var("target_datasource"),
1618
+ "copy_schedule": assign_node_var(CopyParameters.COPY_SCHEDULE),
1619
+ "copy_mode": assign_node_var("mode"),
1620
+ "mode": assign_node_var("mode"),
1621
+ "filter": assign_node_var("filter"),
1622
+ "token": add_token,
1623
+ "include": include,
1624
+ "sql": sql("sql"),
1625
+ "version": version,
1626
+ },
1627
+ DatafileKind.connection: {
1628
+ "description": description,
1629
+ "type": assign_node_var("type"),
1630
+ "kafka_bootstrap_servers": assign_var("kafka_bootstrap_servers"),
1631
+ "kafka_key": assign_var("kafka_key"),
1632
+ "kafka_secret": assign_var("kafka_secret"),
1633
+ "kafka_schema_registry_url": assign_var("kafka_schema_registry_url"),
1634
+ "kafka_ssl_ca_pem": assign_var("kafka_ssl_ca_pem"),
1635
+ "kafka_security_protocol": assign_var("kafka_security_protocol"),
1636
+ "kafka_sasl_mechanism": assign_var("kafka_sasl_mechanism"),
1637
+ "kafka_key_avro_deserialization": kafka_key_avro_deserialization,
1638
+ "s3_region": assign_var("s3_region"),
1639
+ "s3_arn": assign_var("s3_arn"),
1640
+ "s3_access_key": assign_var("s3_access_key"),
1641
+ "s3_secret": assign_var("s3_secret"),
1642
+ "gcs_service_account_credentials_json": assign_var_json("gcs_service_account_credentials_json"),
1643
+ "include": include,
1644
+ },
1584
1645
  }
1585
1646
 
1586
1647
  engine_vars = set()
@@ -1591,11 +1652,19 @@ def parse(
1591
1652
  for o in options:
1592
1653
  engine_vars.add(o.name)
1593
1654
  for v in engine_vars:
1594
- cmds[f"engine_{v}"] = add_engine_var(v)
1655
+ cmds_per_datafile_kind[DatafileKind.datasource][f"engine_{v}"] = add_engine_var(v)
1595
1656
 
1596
1657
  if default_node:
1597
1658
  node(default_node)
1598
1659
 
1660
+ def get_all_cmds() -> dict[str, Callable]:
1661
+ all_cmds: dict[str, Callable] = {}
1662
+ for _, cmds in cmds_per_datafile_kind.items():
1663
+ all_cmds.update(cmds)
1664
+ return all_cmds
1665
+
1666
+ cmds = cmds_per_datafile_kind[kind] if kind is not None else get_all_cmds()
1667
+
1599
1668
  lineno = 1
1600
1669
  try:
1601
1670
  while lineno <= len(lines):
@@ -1646,7 +1715,14 @@ def parse(
1646
1715
  if cmd.lower() in cmds:
1647
1716
  cmds[cmd.lower()](*args, lineno=lineno, replace_includes=replace_includes, cmd=cmd)
1648
1717
  else:
1649
- raise click.ClickException(FeedbackManager.error_option(option=cmd.upper()))
1718
+ error_msg = f"{cmd.upper()} is not a valid option"
1719
+ if kind:
1720
+ error_msg += f" in {kind.value} files."
1721
+ raise DatafileSyntaxError(
1722
+ message=error_msg,
1723
+ lineno=lineno,
1724
+ pos=0,
1725
+ )
1650
1726
  else:
1651
1727
  parser_state.multiline_string += line
1652
1728
  lineno += 1
@@ -1686,7 +1762,6 @@ def parse(
1686
1762
 
1687
1763
  class ImportReplacements:
1688
1764
  _REPLACEMENTS: Tuple[Tuple[str, str, Optional[str]], ...] = (
1689
- ("import_service", "service", None),
1690
1765
  ("import_strategy", "mode", "replace"),
1691
1766
  ("import_connection_name", "connection", None),
1692
1767
  ("import_schedule", "cron", ON_DEMAND),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: tinybird
3
- Version: 0.0.1.dev140
3
+ Version: 0.0.1.dev141
4
4
  Summary: Tinybird Command Line Tool
5
5
  Home-page: https://www.tinybird.co/docs/cli/introduction.html
6
6
  Author: Tinybird
@@ -12,7 +12,7 @@ tinybird/syncasync.py,sha256=IPnOx6lMbf9SNddN1eBtssg8vCLHMt76SuZ6YNYm-Yk,27761
12
12
  tinybird/tornado_template.py,sha256=jjNVDMnkYFWXflmT8KU_Ssbo5vR8KQq3EJMk5vYgXRw,41959
13
13
  tinybird/ch_utils/constants.py,sha256=aYvg2C_WxYWsnqPdZB1ZFoIr8ZY-XjUXYyHKE9Ansj0,3890
14
14
  tinybird/ch_utils/engine.py,sha256=BZuPM7MFS7vaEKK5tOMR2bwSAgJudPrJt27uVEwZmTY,40512
15
- tinybird/tb/__cli__.py,sha256=ca_E2br4_GaxuhT-XfoJYkuzUd-v6Z8yRK7watCF2hE,252
15
+ tinybird/tb/__cli__.py,sha256=HatbYmPEFnba3j4qc8NOBm4CILHsE8TZVBx4rD__eLc,252
16
16
  tinybird/tb/cli.py,sha256=6X7pMjscB1yDsnzBaZBnF4pCBJ7tZgCC500CtPEP-qQ,1106
17
17
  tinybird/tb/client.py,sha256=aaPKq5C77e72kR7IMv9WrvnvNki8mKMOTi9EsCp0eUc,55962
18
18
  tinybird/tb/config.py,sha256=HLMHbJg6BjeGZ2KiJA-BCCVnk8w959xsSaDEEePakZg,3981
@@ -59,7 +59,7 @@ tinybird/tb/modules/datafile/build.py,sha256=d_h3pRFDPFrDKGhpFx2iejY25GuB2k8yfNo
59
59
  tinybird/tb/modules/datafile/build_common.py,sha256=LU24kAQmxDJIyoIapDaYG-SU3P4FrMG9UBf8m9PgVSI,4565
60
60
  tinybird/tb/modules/datafile/build_datasource.py,sha256=nXEQ0qHdq2ai7jJTv8H2d7eeDPBYzLn8VY7zMtOYb8M,17382
61
61
  tinybird/tb/modules/datafile/build_pipe.py,sha256=6Cwjf3BKEF3-oQ9PipsQfK-Z43nSwtA4qJAUoysI7Uc,11385
62
- tinybird/tb/modules/datafile/common.py,sha256=p3EyhmO-zp5ZzdJcG6H5S60TTahhQn1fjK_ovXCk2J8,86185
62
+ tinybird/tb/modules/datafile/common.py,sha256=NIqXpsj7u8uaeSKcUZM-85Kxg35pd08DVAdbVuHmvTU,89400
63
63
  tinybird/tb/modules/datafile/diff.py,sha256=MTmj53RYjER4neLgWVjabn-FKVFgh8h8uYiBo55lFQg,6757
64
64
  tinybird/tb/modules/datafile/exceptions.py,sha256=8rw2umdZjtby85QbuRKFO5ETz_eRHwUY5l7eHsy1wnI,556
65
65
  tinybird/tb/modules/datafile/fixture.py,sha256=DrRWivcvo_1rn7LlVUnHcXccdgx9yVj63mzBkUwCzk8,1420
@@ -79,8 +79,8 @@ tinybird/tb_cli_modules/config.py,sha256=IsgdtFRnUrkY8-Zo32lmk6O7u3bHie1QCxLwgp4
79
79
  tinybird/tb_cli_modules/exceptions.py,sha256=pmucP4kTF4irIt7dXiG-FcnI-o3mvDusPmch1L8RCWk,3367
80
80
  tinybird/tb_cli_modules/regions.py,sha256=QjsL5H6Kg-qr0aYVLrvb1STeJ5Sx_sjvbOYO0LrEGMk,166
81
81
  tinybird/tb_cli_modules/telemetry.py,sha256=Hh2Io8ZPROSunbOLuMvuIFU4TqwWPmQTqal4WS09K1A,10449
82
- tinybird-0.0.1.dev140.dist-info/METADATA,sha256=uI3BQRF3MVPNyMo39wVfQl5jVgZd5s1bIRpqoy3dScw,1612
83
- tinybird-0.0.1.dev140.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
84
- tinybird-0.0.1.dev140.dist-info/entry_points.txt,sha256=LwdHU6TfKx4Qs7BqqtaczEZbImgU7Abe9Lp920zb_fo,43
85
- tinybird-0.0.1.dev140.dist-info/top_level.txt,sha256=VqqqEmkAy7UNaD8-V51FCoMMWXjLUlR0IstvK7tJYVY,54
86
- tinybird-0.0.1.dev140.dist-info/RECORD,,
82
+ tinybird-0.0.1.dev141.dist-info/METADATA,sha256=fKlvi9USAvvAJMIZlfVRRWGxJrAXIPtR3Bv0hSWEivU,1612
83
+ tinybird-0.0.1.dev141.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
84
+ tinybird-0.0.1.dev141.dist-info/entry_points.txt,sha256=LwdHU6TfKx4Qs7BqqtaczEZbImgU7Abe9Lp920zb_fo,43
85
+ tinybird-0.0.1.dev141.dist-info/top_level.txt,sha256=VqqqEmkAy7UNaD8-V51FCoMMWXjLUlR0IstvK7tJYVY,54
86
+ tinybird-0.0.1.dev141.dist-info/RECORD,,