tinybird 0.0.1.dev291__py3-none-any.whl → 1.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tinybird/ch_utils/constants.py +5 -0
- tinybird/connectors.py +1 -7
- tinybird/context.py +3 -3
- tinybird/datafile/common.py +10 -8
- tinybird/datafile/parse_pipe.py +2 -2
- tinybird/feedback_manager.py +3 -0
- tinybird/prompts.py +1 -0
- tinybird/service_datasources.py +223 -0
- tinybird/sql_template.py +26 -11
- tinybird/sql_template_fmt.py +14 -4
- tinybird/tb/__cli__.py +2 -2
- tinybird/tb/cli.py +1 -0
- tinybird/tb/client.py +104 -26
- tinybird/tb/config.py +24 -0
- tinybird/tb/modules/agent/agent.py +103 -67
- tinybird/tb/modules/agent/banner.py +15 -15
- tinybird/tb/modules/agent/explore_agent.py +5 -0
- tinybird/tb/modules/agent/mock_agent.py +5 -1
- tinybird/tb/modules/agent/models.py +6 -2
- tinybird/tb/modules/agent/prompts.py +49 -2
- tinybird/tb/modules/agent/tools/deploy.py +1 -1
- tinybird/tb/modules/agent/tools/execute_query.py +15 -18
- tinybird/tb/modules/agent/tools/request_endpoint.py +1 -1
- tinybird/tb/modules/agent/tools/run_command.py +9 -0
- tinybird/tb/modules/agent/utils.py +38 -48
- tinybird/tb/modules/branch.py +150 -0
- tinybird/tb/modules/build.py +58 -13
- tinybird/tb/modules/build_common.py +209 -25
- tinybird/tb/modules/cli.py +129 -16
- tinybird/tb/modules/common.py +172 -146
- tinybird/tb/modules/connection.py +125 -194
- tinybird/tb/modules/connection_kafka.py +382 -0
- tinybird/tb/modules/copy.py +3 -1
- tinybird/tb/modules/create.py +83 -150
- tinybird/tb/modules/datafile/build.py +27 -38
- tinybird/tb/modules/datafile/build_datasource.py +21 -25
- tinybird/tb/modules/datafile/diff.py +1 -1
- tinybird/tb/modules/datafile/format_pipe.py +46 -7
- tinybird/tb/modules/datafile/playground.py +59 -68
- tinybird/tb/modules/datafile/pull.py +2 -3
- tinybird/tb/modules/datasource.py +477 -308
- tinybird/tb/modules/deployment.py +2 -0
- tinybird/tb/modules/deployment_common.py +84 -44
- tinybird/tb/modules/deprecations.py +4 -4
- tinybird/tb/modules/dev_server.py +33 -12
- tinybird/tb/modules/exceptions.py +14 -0
- tinybird/tb/modules/feedback_manager.py +1 -1
- tinybird/tb/modules/info.py +69 -12
- tinybird/tb/modules/infra.py +4 -5
- tinybird/tb/modules/job_common.py +15 -0
- tinybird/tb/modules/local.py +143 -23
- tinybird/tb/modules/local_common.py +347 -19
- tinybird/tb/modules/local_logs.py +209 -0
- tinybird/tb/modules/login.py +21 -2
- tinybird/tb/modules/login_common.py +254 -12
- tinybird/tb/modules/mock.py +5 -54
- tinybird/tb/modules/mock_common.py +0 -54
- tinybird/tb/modules/open.py +10 -5
- tinybird/tb/modules/project.py +14 -5
- tinybird/tb/modules/shell.py +15 -7
- tinybird/tb/modules/sink.py +3 -1
- tinybird/tb/modules/telemetry.py +11 -3
- tinybird/tb/modules/test.py +13 -9
- tinybird/tb/modules/test_common.py +13 -87
- tinybird/tb/modules/tinyunit/tinyunit.py +0 -14
- tinybird/tb/modules/tinyunit/tinyunit_lib.py +0 -6
- tinybird/tb/modules/watch.py +5 -3
- tinybird/tb_cli_modules/common.py +2 -2
- tinybird/tb_cli_modules/telemetry.py +1 -1
- tinybird/tornado_template.py +6 -7
- {tinybird-0.0.1.dev291.dist-info → tinybird-1.0.5.dist-info}/METADATA +32 -6
- tinybird-1.0.5.dist-info/RECORD +132 -0
- {tinybird-0.0.1.dev291.dist-info → tinybird-1.0.5.dist-info}/WHEEL +1 -1
- tinybird-0.0.1.dev291.dist-info/RECORD +0 -128
- {tinybird-0.0.1.dev291.dist-info → tinybird-1.0.5.dist-info}/entry_points.txt +0 -0
- {tinybird-0.0.1.dev291.dist-info → tinybird-1.0.5.dist-info}/top_level.txt +0 -0
|
@@ -8,9 +8,8 @@ import os
|
|
|
8
8
|
import re
|
|
9
9
|
import time
|
|
10
10
|
import uuid
|
|
11
|
-
from datetime import datetime
|
|
12
11
|
from pathlib import Path
|
|
13
|
-
from typing import
|
|
12
|
+
from typing import Optional
|
|
14
13
|
from urllib.parse import urlparse
|
|
15
14
|
|
|
16
15
|
import click
|
|
@@ -33,7 +32,13 @@ from tinybird.tb.modules.common import (
|
|
|
33
32
|
push_data,
|
|
34
33
|
)
|
|
35
34
|
from tinybird.tb.modules.config import CLIConfig
|
|
36
|
-
from tinybird.tb.modules.
|
|
35
|
+
from tinybird.tb.modules.connection_kafka import (
|
|
36
|
+
connection_create_kafka,
|
|
37
|
+
echo_kafka_data,
|
|
38
|
+
select_connection,
|
|
39
|
+
select_group_id,
|
|
40
|
+
select_topic,
|
|
41
|
+
)
|
|
37
42
|
from tinybird.tb.modules.create import (
|
|
38
43
|
create_resources_from_prompt,
|
|
39
44
|
generate_aws_iamrole_connection_file_with_secret,
|
|
@@ -651,11 +656,19 @@ def datasource_sync(ctx: Context, datasource_name: str, yes: bool):
|
|
|
651
656
|
@click.option("--blank", is_flag=True, default=False, help="Create a blank data source")
|
|
652
657
|
@click.option("--file", type=str, help="Create a data source from a local file")
|
|
653
658
|
@click.option("--url", type=str, help="Create a data source from a remote URL")
|
|
654
|
-
@click.option("--connection", type=str, help="Create a data source from a connection")
|
|
655
659
|
@click.option("--prompt", type=str, help="Create a data source from a prompt")
|
|
660
|
+
@click.option("--connection-name", type=str, help="Create a data source from a connection")
|
|
656
661
|
@click.option("--s3", is_flag=True, default=False, help="Create a data source from a S3 connection")
|
|
657
662
|
@click.option("--gcs", is_flag=True, default=False, help="Create a data source from a GCS connection")
|
|
658
663
|
@click.option("--kafka", is_flag=True, default=False, help="Create a data source from a Kafka connection")
|
|
664
|
+
@click.option("--kafka-topic", type=str, help="Kafka topic")
|
|
665
|
+
@click.option("--kafka-group-id", type=str, help="Kafka group ID")
|
|
666
|
+
@click.option(
|
|
667
|
+
"--kafka-auto-offset-reset",
|
|
668
|
+
type=click.Choice(["latest", "earliest"], case_sensitive=False),
|
|
669
|
+
help="Kafka auto offset reset",
|
|
670
|
+
)
|
|
671
|
+
@click.option("--yes", is_flag=True, default=False, help="Do not ask for confirmation")
|
|
659
672
|
@click.pass_context
|
|
660
673
|
def datasource_create(
|
|
661
674
|
ctx: Context,
|
|
@@ -663,359 +676,515 @@ def datasource_create(
|
|
|
663
676
|
blank: bool,
|
|
664
677
|
file: str,
|
|
665
678
|
url: str,
|
|
666
|
-
|
|
679
|
+
connection_name: Optional[str],
|
|
667
680
|
prompt: str,
|
|
668
681
|
s3: bool,
|
|
669
682
|
gcs: bool,
|
|
670
683
|
kafka: bool,
|
|
684
|
+
kafka_topic: str,
|
|
685
|
+
kafka_group_id: str,
|
|
686
|
+
kafka_auto_offset_reset: str,
|
|
687
|
+
yes: bool,
|
|
671
688
|
):
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
689
|
+
wizard_data: dict[str, str | bool | float] = {
|
|
690
|
+
"wizard": "datasource_create",
|
|
691
|
+
"current_step": "start",
|
|
692
|
+
}
|
|
693
|
+
start_time = time.time()
|
|
676
694
|
|
|
677
|
-
if
|
|
678
|
-
|
|
679
|
-
FeedbackManager.error(message="`tb datasource create` is not available against Tinybird Cloud.")
|
|
680
|
-
)
|
|
695
|
+
if name:
|
|
696
|
+
wizard_data["datasource_name"] = name
|
|
681
697
|
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
698
|
+
try:
|
|
699
|
+
project: Project = ctx.ensure_object(dict)["project"]
|
|
700
|
+
client: TinyB = ctx.ensure_object(dict)["client"]
|
|
701
|
+
config = ctx.ensure_object(dict)["config"]
|
|
702
|
+
env: str = ctx.ensure_object(dict)["env"]
|
|
703
|
+
|
|
704
|
+
if env == "cloud":
|
|
705
|
+
raise CLIDatasourceException(
|
|
706
|
+
FeedbackManager.error(message="`tb datasource create` is not available against Tinybird Cloud.")
|
|
707
|
+
)
|
|
708
|
+
|
|
709
|
+
datasource_types = {
|
|
710
|
+
"blank": ("Blank", "Create an empty one"),
|
|
711
|
+
"local_file": ("Local file", "A local file"),
|
|
712
|
+
"remote_url": ("Remote URL", "A remote file"),
|
|
713
|
+
"s3": ("S3", "Files on S3"),
|
|
714
|
+
"gcs": ("GCS", "Files on GCS"),
|
|
715
|
+
"kafka": ("Kafka", "Connect a Kafka topic"),
|
|
716
|
+
"prompt": ("Prompt", "Create a datasource from a prompt"),
|
|
717
|
+
}
|
|
718
|
+
datasource_type: Optional[str] = None
|
|
719
|
+
connection_file: Optional[str] = None
|
|
720
|
+
ds_content = """SCHEMA >
|
|
694
721
|
`data` String `json:$`
|
|
722
|
+
|
|
723
|
+
ENGINE "MergeTree"
|
|
724
|
+
# ENGINE_SORTING_KEY "user_id, timestamp"
|
|
725
|
+
# ENGINE_TTL "timestamp + toIntervalDay(60)"
|
|
726
|
+
# Learn more at https://www.tinybird.co/docs/forward/dev-reference/datafiles/datasource-files
|
|
695
727
|
"""
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
if datasource_type is None:
|
|
731
|
-
click.echo(
|
|
732
|
-
FeedbackManager.highlight(
|
|
733
|
-
message="? This command will create the schema (.datasource) for your data. Choose where from:"
|
|
734
|
-
)
|
|
735
|
-
)
|
|
728
|
+
wizard_mode = True
|
|
729
|
+
if file:
|
|
730
|
+
datasource_type = "local_file"
|
|
731
|
+
wizard_mode = False
|
|
732
|
+
elif url:
|
|
733
|
+
datasource_type = "remote_url"
|
|
734
|
+
wizard_mode = False
|
|
735
|
+
elif blank:
|
|
736
|
+
datasource_type = "blank"
|
|
737
|
+
wizard_mode = False
|
|
738
|
+
elif connection_name:
|
|
739
|
+
connection_files = project.get_connection_files()
|
|
740
|
+
connection_file = next((f for f in connection_files if f.endswith(f"{connection_name}.connection")), None)
|
|
741
|
+
if connection_file:
|
|
742
|
+
connection_content = Path(connection_file).read_text()
|
|
743
|
+
if project.is_kafka_connection(connection_content):
|
|
744
|
+
datasource_type = "kafka"
|
|
745
|
+
elif project.is_s3_connection(connection_content):
|
|
746
|
+
datasource_type = "s3"
|
|
747
|
+
elif project.is_gcs_connection(connection_content):
|
|
748
|
+
datasource_type = "gcs"
|
|
749
|
+
elif s3:
|
|
750
|
+
datasource_type = "s3"
|
|
751
|
+
wizard_mode = False
|
|
752
|
+
elif gcs:
|
|
753
|
+
datasource_type = "gcs"
|
|
754
|
+
wizard_mode = False
|
|
755
|
+
elif kafka:
|
|
756
|
+
datasource_type = "kafka"
|
|
757
|
+
wizard_mode = False
|
|
758
|
+
elif prompt:
|
|
759
|
+
datasource_type = "prompt"
|
|
760
|
+
wizard_mode = False
|
|
736
761
|
datasource_type_index = -1
|
|
737
762
|
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
763
|
+
if datasource_type is None:
|
|
764
|
+
wizard_data["current_step"] = "select_datasource_origin"
|
|
765
|
+
click.echo(
|
|
766
|
+
FeedbackManager.highlight(
|
|
767
|
+
message="? This command will create the schema (.datasource) for your data. Choose where from:"
|
|
768
|
+
)
|
|
769
|
+
)
|
|
770
|
+
|
|
771
|
+
dt_keys = list(datasource_types.keys())
|
|
772
|
+
while datasource_type_index == -1:
|
|
773
|
+
for index, key in enumerate(dt_keys):
|
|
774
|
+
click.echo(
|
|
775
|
+
f" [{index + 1}] {FeedbackManager.bold(message=datasource_types[key][0])}: {datasource_types[key][1]}"
|
|
776
|
+
)
|
|
777
|
+
click.echo(FeedbackManager.gray(message="\nFiles can be either NDJSON, CSV or Parquet."))
|
|
741
778
|
click.echo(
|
|
742
|
-
|
|
779
|
+
FeedbackManager.gray(
|
|
780
|
+
message=("Tip: Run `tb datasource create --file | --url | --connection` to skip this step.")
|
|
781
|
+
)
|
|
743
782
|
)
|
|
744
|
-
|
|
783
|
+
datasource_type_index = click.prompt("\nSelect option", default=1)
|
|
784
|
+
|
|
785
|
+
if datasource_type_index == 0:
|
|
786
|
+
click.echo(FeedbackManager.warning(message="Datasource type selection cancelled by user"))
|
|
787
|
+
|
|
788
|
+
wizard_data["exit_reason"] = "user_cancelled_type_selection"
|
|
789
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
790
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
791
|
+
return None
|
|
792
|
+
|
|
793
|
+
try:
|
|
794
|
+
datasource_type = dt_keys[int(datasource_type_index) - 1]
|
|
795
|
+
except Exception:
|
|
796
|
+
datasource_type_index = -1
|
|
797
|
+
|
|
798
|
+
if datasource_type:
|
|
799
|
+
wizard_data["datasource_type"] = datasource_type
|
|
800
|
+
|
|
801
|
+
if not datasource_type:
|
|
745
802
|
click.echo(
|
|
746
|
-
FeedbackManager.
|
|
747
|
-
message=
|
|
803
|
+
FeedbackManager.error(
|
|
804
|
+
message=f"Invalid option: {datasource_type_index}. Please select a valid option from the list above."
|
|
748
805
|
)
|
|
749
806
|
)
|
|
750
|
-
datasource_type_index = click.prompt("\nSelect option", default=1)
|
|
751
807
|
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
808
|
+
wizard_data["exit_reason"] = "invalid_type_selection"
|
|
809
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
810
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
811
|
+
return
|
|
755
812
|
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
813
|
+
if datasource_type == "prompt":
|
|
814
|
+
click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
|
|
815
|
+
if not config.get("user_token"):
|
|
816
|
+
raise Exception("This action requires authentication. Run 'tb login' first.")
|
|
760
817
|
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
message=f"Invalid option: {datasource_type_index}. Please select a valid option from the list above."
|
|
818
|
+
instructions = (
|
|
819
|
+
"Create or update a Tinybird datasource (.datasource file) for this project. "
|
|
820
|
+
"Do not generate mock data or append data; those steps will run later programmatically."
|
|
765
821
|
)
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
def get_connection_files():
|
|
785
|
-
connection_files = []
|
|
786
|
-
if datasource_type == "kafka":
|
|
787
|
-
connection_files = project.get_kafka_connection_files()
|
|
788
|
-
elif datasource_type == "s3":
|
|
789
|
-
connection_files = project.get_s3_connection_files()
|
|
790
|
-
elif datasource_type == "gcs":
|
|
791
|
-
connection_files = project.get_gcs_connection_files()
|
|
792
|
-
return connection_files
|
|
793
|
-
|
|
794
|
-
connection_files = get_connection_files()
|
|
795
|
-
|
|
796
|
-
click.echo(FeedbackManager.gray(message="\n» Selecting connection..."))
|
|
797
|
-
connection_name = ""
|
|
798
|
-
topics: List[str] = []
|
|
799
|
-
if len(connection_files) == 0:
|
|
800
|
-
click.echo(FeedbackManager.error(message=f"✗ No {datasource_types[datasource_type][0]} connections found."))
|
|
801
|
-
if click.confirm(
|
|
802
|
-
FeedbackManager.highlight(
|
|
803
|
-
message=f"\n? Do you want to create a {datasource_types[datasource_type][0]} connection? [Y/n]"
|
|
804
|
-
),
|
|
805
|
-
show_default=False,
|
|
806
|
-
default=True,
|
|
807
|
-
):
|
|
808
|
-
if datasource_type != "kafka":
|
|
809
|
-
click.echo(FeedbackManager.gray(message="\n» Creating .connection file..."))
|
|
810
|
-
default_connection_name = f"{datasource_type}_{generate_short_id()}"
|
|
811
|
-
connection_name = click.prompt(
|
|
812
|
-
FeedbackManager.highlight(message=f"? Connection name [{default_connection_name}]"),
|
|
813
|
-
show_default=False,
|
|
814
|
-
default=default_connection_name,
|
|
815
|
-
)
|
|
816
|
-
if datasource_type == "kafka":
|
|
817
|
-
(
|
|
818
|
-
connection_name,
|
|
819
|
-
bootstrap_servers,
|
|
820
|
-
key,
|
|
821
|
-
secret,
|
|
822
|
-
schema_registry_url,
|
|
823
|
-
auto_offset_reset,
|
|
824
|
-
sasl_mechanism,
|
|
825
|
-
security_protocol,
|
|
826
|
-
topics,
|
|
827
|
-
) = connection_create_kafka(ctx)
|
|
828
|
-
elif datasource_type == "s3":
|
|
829
|
-
generate_aws_iamrole_connection_file_with_secret(
|
|
830
|
-
connection_name,
|
|
831
|
-
service="s3",
|
|
832
|
-
role_arn_secret_name="S3_ARN",
|
|
833
|
-
region="eu-west-1",
|
|
834
|
-
folder=project.folder,
|
|
835
|
-
with_default_secret=True,
|
|
836
|
-
)
|
|
837
|
-
elif datasource_type == "gcs":
|
|
838
|
-
generate_gcs_connection_file_with_secrets(
|
|
839
|
-
connection_name,
|
|
840
|
-
service="gcs",
|
|
841
|
-
svc_account_creds="GCS_SERVICE_ACCOUNT_CREDENTIALS_JSON",
|
|
842
|
-
folder=project.folder,
|
|
843
|
-
)
|
|
844
|
-
if datasource_type != "kafka":
|
|
845
|
-
click.echo(FeedbackManager.info_file_created(file=f"connections/{connection_name}.connection"))
|
|
846
|
-
click.echo(FeedbackManager.success(message="✓ .connection created!"))
|
|
847
|
-
connection_files = get_connection_files()
|
|
822
|
+
if not prompt:
|
|
823
|
+
wizard_data["current_step"] = "enter_prompt"
|
|
824
|
+
prompt = click.prompt(FeedbackManager.highlight(message="? Enter your prompt"))
|
|
825
|
+
wizard_data["prompt"] = prompt
|
|
826
|
+
|
|
827
|
+
if name:
|
|
828
|
+
instructions += f" Name the datasource '{name}'."
|
|
829
|
+
|
|
830
|
+
created_resources = create_resources_from_prompt(
|
|
831
|
+
config,
|
|
832
|
+
project,
|
|
833
|
+
prompt,
|
|
834
|
+
feature="tb_datasource_create",
|
|
835
|
+
instructions=instructions,
|
|
836
|
+
)
|
|
837
|
+
if any(path.suffix == ".datasource" for path in created_resources):
|
|
838
|
+
click.echo(FeedbackManager.success(message="✓ .datasource created!"))
|
|
848
839
|
else:
|
|
849
|
-
click.echo(FeedbackManager.info(message=f"→ Run `tb connection create {datasource_type}` to add one."))
|
|
850
|
-
return
|
|
851
|
-
|
|
852
|
-
if not connection_file:
|
|
853
|
-
if len(connection_files) > 1:
|
|
854
840
|
click.echo(
|
|
855
|
-
FeedbackManager.
|
|
856
|
-
message=
|
|
841
|
+
FeedbackManager.gray(
|
|
842
|
+
message="△ No new datasource file detected. Existing resources may have been updated instead."
|
|
857
843
|
)
|
|
858
844
|
)
|
|
859
|
-
connection_index = -1
|
|
860
|
-
while connection_index == -1:
|
|
861
|
-
for index, conn_file in enumerate(connection_files):
|
|
862
|
-
conn_path = Path(conn_file)
|
|
863
|
-
click.echo(f" [{index + 1}] {conn_path.stem}")
|
|
864
|
-
connection_index = click.prompt("\nSelect option", default=1)
|
|
865
|
-
try:
|
|
866
|
-
connection_file = connection_files[int(connection_index) - 1]
|
|
867
|
-
connection_path = Path(connection_file)
|
|
868
|
-
connection = connection_path.stem
|
|
869
|
-
except Exception:
|
|
870
|
-
connection_index = -1
|
|
871
|
-
else:
|
|
872
|
-
connection_file = connection_files[0]
|
|
873
|
-
connection_path = Path(connection_file)
|
|
874
|
-
connection = connection_path.stem
|
|
875
|
-
click.echo(FeedbackManager.info(message=f"Using connection: {connection}"))
|
|
876
|
-
|
|
877
|
-
click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
|
|
878
|
-
|
|
879
|
-
if datasource_type == "local_file":
|
|
880
|
-
if not file:
|
|
881
|
-
file = click.prompt(FeedbackManager.highlight(message="? Path"))
|
|
882
|
-
if file.startswith("~"):
|
|
883
|
-
file = os.path.expanduser(file)
|
|
884
|
-
|
|
885
|
-
folder_path = project.path
|
|
886
|
-
path = folder_path / file
|
|
887
|
-
if not path.exists():
|
|
888
|
-
path = Path(file)
|
|
889
|
-
|
|
890
|
-
data_format = path.suffix.lstrip(".")
|
|
891
|
-
ds_content = analyze_file(str(path), client, format=data_format)
|
|
892
|
-
default_name = normalize_datasource_name(path.stem)
|
|
893
|
-
name = name or click.prompt(
|
|
894
|
-
FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
|
|
895
|
-
default=default_name,
|
|
896
|
-
show_default=False,
|
|
897
|
-
)
|
|
898
845
|
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
ds_content = analyze_file(url, client, format)
|
|
904
|
-
default_name = normalize_datasource_name(Path(url).stem)
|
|
905
|
-
name = name or click.prompt(
|
|
906
|
-
FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
|
|
907
|
-
default=default_name,
|
|
908
|
-
show_default=False,
|
|
909
|
-
)
|
|
846
|
+
wizard_data["current_step"] = "completed"
|
|
847
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
848
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
849
|
+
return
|
|
910
850
|
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
851
|
+
connection_required = datasource_type in ("kafka", "s3", "gcs")
|
|
852
|
+
|
|
853
|
+
if connection_required:
|
|
854
|
+
if env == "local":
|
|
855
|
+
should_build = click.confirm(
|
|
856
|
+
FeedbackManager.highlight(message="\n? Do you want to build the project before continue? [Y/n]"),
|
|
857
|
+
show_default=False,
|
|
858
|
+
default=True,
|
|
859
|
+
)
|
|
860
|
+
if should_build:
|
|
861
|
+
click.echo(FeedbackManager.gray(message="» Building project before continue..."))
|
|
862
|
+
build_project(project=project, tb_client=client, watch=False, config=config, silent=True)
|
|
863
|
+
click.echo(FeedbackManager.success(message="✓ Build completed!\n"))
|
|
864
|
+
else:
|
|
865
|
+
click.echo(FeedbackManager.gray(message="Skipping build...\n"))
|
|
866
|
+
|
|
867
|
+
wizard_data["current_step"] = "select_connection"
|
|
868
|
+
|
|
869
|
+
connections = client.connections(datasource_type)
|
|
870
|
+
connection_type = datasource_types[datasource_type][0]
|
|
871
|
+
new_connection_created = False
|
|
872
|
+
if len(connections) == 0:
|
|
873
|
+
click.echo(FeedbackManager.info(message=f"No {connection_type} connections found."))
|
|
874
|
+
if click.confirm(
|
|
875
|
+
FeedbackManager.highlight(
|
|
876
|
+
message=f"\n? Do you want to create a {connection_type} connection? [Y/n]"
|
|
877
|
+
),
|
|
878
|
+
show_default=False,
|
|
879
|
+
default=True,
|
|
880
|
+
):
|
|
881
|
+
wizard_data["created_new_connection"] = True
|
|
882
|
+
if datasource_type == "kafka":
|
|
883
|
+
result = connection_create_kafka(ctx)
|
|
884
|
+
connection_name = result["name"]
|
|
885
|
+
elif datasource_type == "s3":
|
|
886
|
+
click.echo(FeedbackManager.gray(message="\n» Creating .connection file..."))
|
|
887
|
+
default_connection_name = f"{datasource_type}_{generate_short_id()}"
|
|
888
|
+
s3_connection_name: str = click.prompt(
|
|
889
|
+
FeedbackManager.highlight(message=f"? Connection name [{default_connection_name}]"),
|
|
890
|
+
show_default=False,
|
|
891
|
+
default=default_connection_name,
|
|
892
|
+
)
|
|
893
|
+
connection_name = s3_connection_name
|
|
894
|
+
wizard_data["connection_name"] = s3_connection_name
|
|
895
|
+
generate_aws_iamrole_connection_file_with_secret(
|
|
896
|
+
s3_connection_name,
|
|
897
|
+
service="s3",
|
|
898
|
+
role_arn_secret_name="S3_ARN",
|
|
899
|
+
region="eu-west-1",
|
|
900
|
+
folder=project.folder,
|
|
901
|
+
with_default_secret=True,
|
|
902
|
+
)
|
|
903
|
+
elif datasource_type == "gcs":
|
|
904
|
+
click.echo(FeedbackManager.gray(message="\n» Creating .connection file..."))
|
|
905
|
+
default_connection_name = f"{datasource_type}_{generate_short_id()}"
|
|
906
|
+
gcs_connection_name: str = click.prompt(
|
|
907
|
+
FeedbackManager.highlight(message=f"? Connection name [{default_connection_name}]"),
|
|
908
|
+
show_default=False,
|
|
909
|
+
default=default_connection_name,
|
|
910
|
+
)
|
|
911
|
+
connection_name = gcs_connection_name
|
|
912
|
+
wizard_data["connection_name"] = gcs_connection_name
|
|
913
|
+
generate_gcs_connection_file_with_secrets(
|
|
914
|
+
gcs_connection_name,
|
|
915
|
+
service="gcs",
|
|
916
|
+
svc_account_creds="GCS_SERVICE_ACCOUNT_CREDENTIALS_JSON",
|
|
917
|
+
folder=project.folder,
|
|
918
|
+
)
|
|
919
|
+
new_connection_created = True
|
|
920
|
+
if env == "local" and new_connection_created:
|
|
921
|
+
click.echo(FeedbackManager.gray(message="\n» Building project to access the new connection..."))
|
|
922
|
+
build_project(project=project, tb_client=client, watch=False, config=config, silent=True)
|
|
923
|
+
click.echo(FeedbackManager.success(message="✓ Build completed!"))
|
|
924
|
+
else:
|
|
925
|
+
click.echo(
|
|
926
|
+
FeedbackManager.info(message=f"→ Run `tb connection create {datasource_type}` to add one.")
|
|
927
|
+
)
|
|
928
|
+
wizard_data["exit_reason"] = "user_declined_connection_creation"
|
|
929
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
930
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
931
|
+
return
|
|
932
|
+
|
|
933
|
+
# Only prompt for connection selection if connection_name wasn't provided via CLI
|
|
934
|
+
if not connection_name:
|
|
935
|
+
wizard_data["selected_connection_from_multiple"] = True
|
|
936
|
+
connection = select_connection(None, datasource_type, connections, client)
|
|
937
|
+
connection_id = connection["id"]
|
|
938
|
+
connection_name = connection["name"]
|
|
918
939
|
|
|
919
|
-
|
|
920
|
-
connections = client.connections("kafka")
|
|
921
|
-
kafka_connection_files = project.get_kafka_connection_files()
|
|
940
|
+
click.echo(FeedbackManager.gray(message="\n» Creating .datasource file..."))
|
|
922
941
|
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
FeedbackManager.
|
|
942
|
+
if datasource_type == "local_file":
|
|
943
|
+
wizard_data["current_step"] = "file_input"
|
|
944
|
+
if not file:
|
|
945
|
+
file = click.prompt(FeedbackManager.highlight(message="? Path"))
|
|
946
|
+
if file.startswith("~"):
|
|
947
|
+
file = os.path.expanduser(file)
|
|
948
|
+
|
|
949
|
+
folder_path = project.path
|
|
950
|
+
path = folder_path / file
|
|
951
|
+
if not path.exists():
|
|
952
|
+
path = Path(file)
|
|
953
|
+
|
|
954
|
+
data_format = path.suffix.lstrip(".")
|
|
955
|
+
ds_content = analyze_file(str(path), client, format=data_format)
|
|
956
|
+
default_name = normalize_datasource_name(path.stem)
|
|
957
|
+
wizard_data["current_step"] = "enter_name"
|
|
958
|
+
name = name or click.prompt(
|
|
959
|
+
FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
|
|
960
|
+
default=default_name,
|
|
961
|
+
show_default=False,
|
|
927
962
|
)
|
|
928
|
-
|
|
929
|
-
|
|
963
|
+
wizard_data["datasource_name"] = name
|
|
964
|
+
|
|
965
|
+
if name == default_name:
|
|
966
|
+
wizard_data["used_default_name"] = True
|
|
967
|
+
|
|
968
|
+
if datasource_type == "remote_url":
|
|
969
|
+
wizard_data["current_step"] = "file_input"
|
|
970
|
+
if not url:
|
|
971
|
+
url = click.prompt(FeedbackManager.highlight(message="? URL"))
|
|
972
|
+
format = url.split(".")[-1]
|
|
973
|
+
ds_content = analyze_file(url, client, format)
|
|
974
|
+
default_name = normalize_datasource_name(Path(url).stem)
|
|
975
|
+
wizard_data["current_step"] = "enter_name"
|
|
976
|
+
name = name or click.prompt(
|
|
977
|
+
FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
|
|
978
|
+
default=default_name,
|
|
930
979
|
show_default=False,
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
click.echo(FeedbackManager.gray(message="» Building project..."))
|
|
934
|
-
build_project(project=project, tb_client=client, watch=False, silent=True)
|
|
935
|
-
click.echo(FeedbackManager.success(message="✓ Build completed!"))
|
|
936
|
-
connections = client.connections("kafka")
|
|
980
|
+
)
|
|
981
|
+
wizard_data["datasource_name"] = name
|
|
937
982
|
|
|
938
|
-
|
|
983
|
+
if name == default_name:
|
|
984
|
+
wizard_data["used_default_name"] = True
|
|
939
985
|
|
|
940
|
-
if not
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
topic_index = click.prompt("\nSelect option", default=1)
|
|
953
|
-
try:
|
|
954
|
-
topic = topics[int(topic_index) - 1]
|
|
955
|
-
except Exception:
|
|
956
|
-
topic_index = -1
|
|
957
|
-
else:
|
|
958
|
-
topic = topics[0] if len(topics) > 0 else "topic_0"
|
|
986
|
+
if datasource_type not in ("remote_url", "local_file"):
|
|
987
|
+
wizard_data["current_step"] = "enter_name"
|
|
988
|
+
default_name = f"ds_{generate_short_id()}"
|
|
989
|
+
name = name or click.prompt(
|
|
990
|
+
FeedbackManager.highlight(message=f"? Data source name [{default_name}]"),
|
|
991
|
+
default=default_name,
|
|
992
|
+
show_default=False,
|
|
993
|
+
)
|
|
994
|
+
wizard_data["datasource_name"] = name
|
|
995
|
+
|
|
996
|
+
if name == default_name:
|
|
997
|
+
wizard_data["used_default_name"] = True
|
|
959
998
|
|
|
960
|
-
|
|
999
|
+
if datasource_type == "kafka":
|
|
1000
|
+
if not connection_name:
|
|
1001
|
+
raise CLIDatasourceException(FeedbackManager.error(message="A Kafka connection name is required."))
|
|
961
1002
|
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
1003
|
+
wizard_data["current_step"] = "kafka_configuration"
|
|
1004
|
+
connections = client.connections("kafka")
|
|
1005
|
+
|
|
1006
|
+
# Kafka configuration values - preserve CLI values if provided
|
|
1007
|
+
kafka_topic_value: Optional[str] = kafka_topic or None
|
|
1008
|
+
kafka_group_id_value: Optional[str] = kafka_group_id or None
|
|
1009
|
+
kafka_auto_offset_reset_value: Optional[str] = kafka_auto_offset_reset or None
|
|
1010
|
+
kafka_connection_id: Optional[str] = next(
|
|
1011
|
+
(c["id"] for c in connections if c["name"] == connection_name), None
|
|
1012
|
+
)
|
|
1013
|
+
|
|
1014
|
+
if connection_name and kafka_connection_id is None:
|
|
1015
|
+
raise CLIDatasourceException(
|
|
1016
|
+
FeedbackManager.error(message=f"No Kafka connection found with name '{connection_name}'.")
|
|
1017
|
+
)
|
|
1018
|
+
|
|
1019
|
+
confirmed = yes
|
|
1020
|
+
change_topic = False
|
|
1021
|
+
change_group_id = False
|
|
1022
|
+
change_connection = False
|
|
1023
|
+
change_auto_offset_reset = False
|
|
1024
|
+
|
|
1025
|
+
while not confirmed:
|
|
1026
|
+
# Select connection if not set or if user wants to change it
|
|
1027
|
+
if kafka_connection_id is None or change_connection:
|
|
1028
|
+
selected_connection = select_connection(kafka_connection_id, datasource_type, connections, client)
|
|
1029
|
+
kafka_connection_id = selected_connection["id"]
|
|
1030
|
+
connection_name = selected_connection["name"]
|
|
1031
|
+
change_connection = False
|
|
1032
|
+
|
|
1033
|
+
# Select topic if not set
|
|
1034
|
+
if (kafka_topic_value is None or change_topic) and kafka_connection_id is not None:
|
|
1035
|
+
kafka_topic_value = select_topic(kafka_topic_value, kafka_connection_id, client)
|
|
1036
|
+
change_topic = False
|
|
1037
|
+
|
|
1038
|
+
# Select group ID if not set or if user wants to change it
|
|
1039
|
+
if (
|
|
1040
|
+
(kafka_group_id_value is None or change_group_id)
|
|
1041
|
+
and kafka_connection_id is not None
|
|
1042
|
+
and kafka_topic_value is not None
|
|
1043
|
+
):
|
|
1044
|
+
kafka_group_id_value = select_group_id(
|
|
1045
|
+
kafka_group_id_value, kafka_topic_value, kafka_connection_id, client
|
|
1046
|
+
)
|
|
1047
|
+
change_group_id = False # Reset flag
|
|
1048
|
+
|
|
1049
|
+
# Select auto offset reset if not set or if user wants to change it
|
|
1050
|
+
if kafka_auto_offset_reset_value is None or change_auto_offset_reset:
|
|
1051
|
+
default_auto_offset = kafka_auto_offset_reset_value if kafka_auto_offset_reset_value else "latest"
|
|
1052
|
+
kafka_auto_offset_reset_value = click.prompt(
|
|
1053
|
+
FeedbackManager.highlight(message="? Auto offset reset"),
|
|
1054
|
+
type=click.Choice(["latest", "earliest"], case_sensitive=False),
|
|
1055
|
+
default=default_auto_offset,
|
|
1056
|
+
show_default=True,
|
|
1057
|
+
)
|
|
1058
|
+
change_auto_offset_reset = False # Reset flag
|
|
1059
|
+
|
|
1060
|
+
# Show preview - at this point kafka_connection_id is guaranteed to be set
|
|
1061
|
+
assert kafka_connection_id is not None
|
|
1062
|
+
assert connection_name is not None
|
|
1063
|
+
assert kafka_topic_value is not None
|
|
1064
|
+
assert kafka_group_id_value is not None
|
|
1065
|
+
echo_kafka_data(kafka_connection_id, connection_name, kafka_topic_value, kafka_group_id_value, client)
|
|
1066
|
+
|
|
1067
|
+
# Confirmation step
|
|
1068
|
+
wizard_data["current_step"] = "kafka_confirmation"
|
|
1069
|
+
click.echo(FeedbackManager.highlight(message="\n? What would you like to do?"))
|
|
1070
|
+
click.echo(" [1] Create .datasource file with this configuration")
|
|
1071
|
+
click.echo(" [2] Connection")
|
|
1072
|
+
click.echo(" [3] Topic")
|
|
1073
|
+
click.echo(" [4] Group ID")
|
|
1074
|
+
click.echo(" [5] Auto offset reset")
|
|
1075
|
+
click.echo(" [6] Cancel")
|
|
1076
|
+
|
|
1077
|
+
choice = click.prompt("\nSelect option", default=1, type=int)
|
|
1078
|
+
|
|
1079
|
+
if choice == 1:
|
|
1080
|
+
confirmed = True
|
|
1081
|
+
elif choice == 2:
|
|
1082
|
+
change_connection = True
|
|
1083
|
+
# Reset topic and group ID since they are connection-specific
|
|
1084
|
+
kafka_topic_value = None
|
|
1085
|
+
kafka_group_id_value = None
|
|
1086
|
+
elif choice == 3:
|
|
1087
|
+
kafka_topic_value = None # Reset to prompt again
|
|
1088
|
+
kafka_group_id_value = None # Reset group ID since it's topic-specific
|
|
1089
|
+
elif choice == 4:
|
|
1090
|
+
change_group_id = True # Set flag to re-prompt with current value as default
|
|
1091
|
+
elif choice == 5:
|
|
1092
|
+
change_auto_offset_reset = True # Set flag to re-prompt with current value as default
|
|
1093
|
+
elif choice == 6:
|
|
1094
|
+
wizard_data["exit_reason"] = "user_cancelled_kafka_configuration"
|
|
1095
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
1096
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
1097
|
+
return None
|
|
1098
|
+
else:
|
|
1099
|
+
click.echo(FeedbackManager.error(message="Invalid option. Please select 1-6."))
|
|
1100
|
+
|
|
1101
|
+
ds_content += f"""
|
|
1102
|
+
KAFKA_CONNECTION_NAME {connection_name}
|
|
1103
|
+
KAFKA_TOPIC {kafka_topic_value}
|
|
1104
|
+
KAFKA_GROUP_ID {kafka_group_id_value}
|
|
1105
|
+
KAFKA_AUTO_OFFSET_RESET {kafka_auto_offset_reset_value}
|
|
1106
|
+
# Learn more at https://www.tinybird.co/docs/forward/get-data-in/connectors/kafka#kafka-datasource-settings
|
|
966
1107
|
"""
|
|
967
1108
|
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
1109
|
+
if datasource_type == "s3":
|
|
1110
|
+
# Use connection_name from CLI if provided, otherwise look it up from selected connection_id
|
|
1111
|
+
s3_conn_name: Optional[str] = connection_name
|
|
1112
|
+
if not s3_conn_name:
|
|
1113
|
+
s3_connections = client.connections("s3")
|
|
1114
|
+
s3_conn_name = next((c["name"] for c in s3_connections if c["id"] == connection_id), None)
|
|
1115
|
+
ds_content += f"""
|
|
1116
|
+
IMPORT_CONNECTION_NAME "{s3_conn_name}"
|
|
974
1117
|
IMPORT_BUCKET_URI "s3://my-bucket/*.csv"
|
|
975
1118
|
IMPORT_SCHEDULE "@auto"
|
|
976
1119
|
"""
|
|
977
1120
|
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
1121
|
+
if datasource_type == "gcs":
|
|
1122
|
+
# Use connection_name from CLI if provided, otherwise look it up from selected connection_id
|
|
1123
|
+
gcs_conn_name: Optional[str] = connection_name
|
|
1124
|
+
if not gcs_conn_name:
|
|
1125
|
+
gcs_connections = client.connections("gcs")
|
|
1126
|
+
gcs_conn_name = next((c["name"] for c in gcs_connections if c["id"] == connection_id), None)
|
|
1127
|
+
ds_content += f"""
|
|
1128
|
+
IMPORT_CONNECTION_NAME "{gcs_conn_name}"
|
|
984
1129
|
IMPORT_BUCKET_URI "gs://my-bucket/*.csv"
|
|
985
1130
|
IMPORT_SCHEDULE "@auto"
|
|
986
1131
|
"""
|
|
987
1132
|
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
datasources_path.
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
ds_file.
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
if wizard_mode:
|
|
999
|
-
last_tip_message = "\nTip: To skip the interactive prompts, pass flags to this command, e.g."
|
|
1000
|
-
last_tip_command = ""
|
|
1001
|
-
if datasource_type == "local_file":
|
|
1002
|
-
last_tip_command = f"`tb datasource create --file {file} --name {name}`."
|
|
1003
|
-
elif datasource_type == "remote_url":
|
|
1004
|
-
last_tip_command = f"`tb datasource create --url {url} --name {name}`."
|
|
1005
|
-
elif datasource_type == "blank":
|
|
1006
|
-
last_tip_command = f"`tb datasource create --blank --name {name}`."
|
|
1007
|
-
elif datasource_type in ("s3", "gcs", "kafka"):
|
|
1008
|
-
last_tip_command = f"`tb datasource create --{datasource_type} --name {name} --connection {connection}`."
|
|
1133
|
+
wizard_data["current_step"] = "create_datasource_file"
|
|
1134
|
+
click.echo(FeedbackManager.info(message=f"/datasources/{name}.datasource"))
|
|
1135
|
+
datasources_path = project.path / "datasources"
|
|
1136
|
+
if not datasources_path.exists():
|
|
1137
|
+
datasources_path.mkdir()
|
|
1138
|
+
ds_file = datasources_path / f"{name}.datasource"
|
|
1139
|
+
if not ds_file.exists():
|
|
1140
|
+
ds_file.touch()
|
|
1141
|
+
ds_file.write_text(ds_content)
|
|
1142
|
+
click.echo(FeedbackManager.success(message="✓ .datasource created!"))
|
|
1009
1143
|
|
|
1010
|
-
|
|
1144
|
+
if wizard_mode:
|
|
1145
|
+
last_tip_message = "\nTip: To skip the interactive prompts, pass flags to this command, e.g."
|
|
1146
|
+
last_tip_command = ""
|
|
1147
|
+
if datasource_type == "local_file":
|
|
1148
|
+
last_tip_command = f"`tb datasource create --file {file} --name {name}`."
|
|
1149
|
+
elif datasource_type == "remote_url":
|
|
1150
|
+
last_tip_command = f"`tb datasource create --url {url} --name {name}`."
|
|
1151
|
+
elif datasource_type == "blank":
|
|
1152
|
+
last_tip_command = f"`tb datasource create --blank --name {name}`."
|
|
1153
|
+
elif datasource_type in ("s3", "gcs", "kafka"):
|
|
1154
|
+
if datasource_type == "kafka":
|
|
1155
|
+
conn_name = connection_name
|
|
1156
|
+
elif datasource_type == "s3":
|
|
1157
|
+
conn_name = s3_conn_name
|
|
1158
|
+
else:
|
|
1159
|
+
conn_name = gcs_conn_name
|
|
1160
|
+
last_tip_command = (
|
|
1161
|
+
f"`tb datasource create --{datasource_type} --name {name} --connection-name {conn_name}`."
|
|
1162
|
+
)
|
|
1011
1163
|
|
|
1164
|
+
click.echo(FeedbackManager.gray(message=(f"{last_tip_message} {last_tip_command}")))
|
|
1012
1165
|
|
|
1013
|
-
|
|
1014
|
-
|
|
1166
|
+
wizard_data["current_step"] = "completed"
|
|
1167
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
1168
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
1169
|
+
|
|
1170
|
+
except Exception as e:
|
|
1171
|
+
wizard_data["duration_seconds"] = round(time.time() - start_time, 2)
|
|
1172
|
+
|
|
1173
|
+
current_exception: Optional[BaseException] = e
|
|
1174
|
+
while current_exception:
|
|
1175
|
+
if isinstance(current_exception, KeyboardInterrupt):
|
|
1176
|
+
wizard_data["exit_reason"] = "user_interrupted"
|
|
1177
|
+
add_telemetry_event("system_info", **wizard_data)
|
|
1178
|
+
raise
|
|
1179
|
+
current_exception = current_exception.__cause__ or current_exception.__context__
|
|
1180
|
+
|
|
1181
|
+
wizard_data["error_message"] = str(e)
|
|
1182
|
+
add_telemetry_event("wizard_error", **wizard_data)
|
|
1183
|
+
raise
|
|
1015
1184
|
|
|
1016
1185
|
|
|
1017
|
-
def
|
|
1018
|
-
return
|
|
1186
|
+
def generate_short_id():
|
|
1187
|
+
return str(uuid.uuid4())[:4]
|
|
1019
1188
|
|
|
1020
1189
|
|
|
1021
1190
|
def analyze_quarantine(datasource_name: str, project: Project, client: TinyB):
|