tinybird 0.0.1.dev5__tar.gz → 0.0.1.dev7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

Files changed (93) hide show
  1. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/PKG-INFO +1 -1
  2. tinybird-0.0.1.dev7/tinybird/__cli__.py +7 -0
  3. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/datafile.py +7 -541
  4. tinybird-0.0.1.dev7/tinybird/tb/cli.py +28 -0
  5. tinybird-0.0.1.dev7/tinybird/tb/modules/auth.py +252 -0
  6. tinybird-0.0.1.dev7/tinybird/tb/modules/branch.py +1023 -0
  7. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/build.py +10 -21
  8. tinybird-0.0.1.dev7/tinybird/tb/modules/cicd.py +271 -0
  9. tinybird-0.0.1.dev7/tinybird/tb/modules/cli.py +1451 -0
  10. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/common.py +20 -8
  11. tinybird-0.0.1.dev7/tinybird/tb/modules/connection.py +803 -0
  12. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/create.py +20 -20
  13. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/build.py +2103 -0
  14. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/build_common.py +118 -0
  15. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/build_datasource.py +403 -0
  16. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/build_pipe.py +648 -0
  17. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/common.py +897 -0
  18. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/diff.py +197 -0
  19. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/exceptions.py +23 -0
  20. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/format_common.py +66 -0
  21. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/format_datasource.py +160 -0
  22. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/format_pipe.py +195 -0
  23. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/parse_datasource.py +41 -0
  24. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/parse_pipe.py +69 -0
  25. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/pipe_checker.py +560 -0
  26. tinybird-0.0.1.dev7/tinybird/tb/modules/datafile/pull.py +157 -0
  27. tinybird-0.0.1.dev7/tinybird/tb/modules/datasource.py +909 -0
  28. tinybird-0.0.1.dev7/tinybird/tb/modules/exceptions.py +91 -0
  29. tinybird-0.0.1.dev7/tinybird/tb/modules/fmt.py +94 -0
  30. tinybird-0.0.1.dev7/tinybird/tb/modules/job.py +85 -0
  31. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/llm.py +1 -1
  32. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/local.py +9 -5
  33. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/mock.py +5 -5
  34. tinybird-0.0.1.dev7/tinybird/tb/modules/pipe.py +864 -0
  35. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/prompts.py +1 -1
  36. tinybird-0.0.1.dev7/tinybird/tb/modules/tag.py +100 -0
  37. tinybird-0.0.1.dev7/tinybird/tb/modules/test.py +107 -0
  38. tinybird-0.0.1.dev7/tinybird/tb/modules/tinyunit/tinyunit.py +340 -0
  39. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/token.py +3 -3
  40. tinybird-0.0.1.dev7/tinybird/tb/modules/workspace.py +269 -0
  41. tinybird-0.0.1.dev7/tinybird/tb/modules/workspace_members.py +212 -0
  42. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli.py +0 -3
  43. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/cli.py +2 -2
  44. tinybird-0.0.1.dev7/tinybird/tb_cli_modules/common.py +2082 -0
  45. tinybird-0.0.1.dev7/tinybird/tb_cli_modules/config.py +344 -0
  46. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/datasource.py +2 -10
  47. tinybird-0.0.1.dev7/tinybird/tb_cli_modules/regions.py +9 -0
  48. tinybird-0.0.1.dev7/tinybird/tb_cli_modules/telemetry.py +310 -0
  49. tinybird-0.0.1.dev7/tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +71 -0
  50. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird.egg-info/PKG-INFO +1 -1
  51. tinybird-0.0.1.dev7/tinybird.egg-info/SOURCES.txt +88 -0
  52. tinybird-0.0.1.dev7/tinybird.egg-info/entry_points.txt +2 -0
  53. tinybird-0.0.1.dev5/tinybird/__cli__.py +0 -8
  54. tinybird-0.0.1.dev5/tinybird.egg-info/SOURCES.txt +0 -53
  55. tinybird-0.0.1.dev5/tinybird.egg-info/entry_points.txt +0 -2
  56. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/setup.cfg +0 -0
  57. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/ch_utils/constants.py +0 -0
  58. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/ch_utils/engine.py +0 -0
  59. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/check_pypi.py +0 -0
  60. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/client.py +0 -0
  61. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/config.py +0 -0
  62. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/connectors.py +0 -0
  63. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/context.py +0 -0
  64. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/datatypes.py +0 -0
  65. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/feedback_manager.py +0 -0
  66. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/git_settings.py +0 -0
  67. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/sql.py +0 -0
  68. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/sql_template.py +0 -0
  69. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/sql_template_fmt.py +0 -0
  70. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/sql_toolset.py +0 -0
  71. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/syncasync.py +0 -0
  72. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/config.py +0 -0
  73. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/regions.py +0 -0
  74. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/table.py +0 -0
  75. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/telemetry.py +0 -0
  76. {tinybird-0.0.1.dev5/tinybird/tb_cli_modules → tinybird-0.0.1.dev7/tinybird/tb/modules}/tinyunit/tinyunit_lib.py +0 -0
  77. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/auth.py +0 -0
  78. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/branch.py +0 -0
  79. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/cicd.py +0 -0
  80. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/connection.py +0 -0
  81. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/exceptions.py +0 -0
  82. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/fmt.py +0 -0
  83. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/job.py +0 -0
  84. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/pipe.py +0 -0
  85. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/tag.py +0 -0
  86. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/test.py +0 -0
  87. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/tinyunit/tinyunit.py +0 -0
  88. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/workspace.py +0 -0
  89. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tb_cli_modules/workspace_members.py +0 -0
  90. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird/tornado_template.py +0 -0
  91. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird.egg-info/dependency_links.txt +0 -0
  92. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird.egg-info/requires.txt +0 -0
  93. {tinybird-0.0.1.dev5 → tinybird-0.0.1.dev7}/tinybird.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tinybird
3
- Version: 0.0.1.dev5
3
+ Version: 0.0.1.dev7
4
4
  Summary: Tinybird Command Line Tool
5
5
  Home-page: https://www.tinybird.co/docs/cli/introduction.html
6
6
  Author: Tinybird
@@ -0,0 +1,7 @@
1
+ __name__ = "tinybird-cli"
2
+ __description__ = "Tinybird Command Line Tool"
3
+ __url__ = "https://www.tinybird.co/docs/cli/introduction.html"
4
+ __author__ = "Tinybird"
5
+ __author_email__ = "support@tinybird.co"
6
+ __version__ = "x.y.z"
7
+ __revision__ = "xxxxxxxxx"
@@ -3066,7 +3066,7 @@ async def new_pipe(
3066
3066
  )
3067
3067
  )
3068
3068
 
3069
- click.echo(FeedbackManager.success_test_endpoint(host=host, pipe=p["name"], token="$TB_TOKEN"))
3069
+ click.echo(FeedbackManager.success_test_endpoint_no_token(host=host, pipe=p["name"]))
3070
3070
 
3071
3071
  if copy_node:
3072
3072
  pipe_id = data["id"]
@@ -3125,8 +3125,8 @@ async def new_pipe(
3125
3125
  except Exception as e:
3126
3126
  raise click.ClickException(FeedbackManager.error_creating_pipe(error=e))
3127
3127
 
3128
- if data.get("type") == "endpoint" and t:
3129
- click.echo(FeedbackManager.success_test_endpoint(host=host, pipe=p["name"], token=t["token"]))
3128
+ if data.get("type") == "endpoint":
3129
+ click.echo(FeedbackManager.success_test_endpoint_no_token(host=host, pipe=p["name"]))
3130
3130
 
3131
3131
 
3132
3132
  async def share_and_unshare_datasource(
@@ -3217,7 +3217,6 @@ async def new_ds(
3217
3217
  fork_downstream: Optional[bool] = False,
3218
3218
  fork: Optional[bool] = False,
3219
3219
  git_release: Optional[bool] = False,
3220
- build: Optional[bool] = False,
3221
3220
  ):
3222
3221
  ds_name = ds["params"]["name"]
3223
3222
 
@@ -3240,14 +3239,9 @@ async def new_ds(
3240
3239
  scopes.append(sc)
3241
3240
  await client.alter_tokens(token_name, scopes)
3242
3241
 
3243
- datasource_exists = False
3244
3242
  try:
3245
3243
  existing_ds = await client.get_datasource(ds_name)
3246
- if build:
3247
- await client.datasource_delete(ds_name, force=True)
3248
- else:
3249
- datasource_exists = True
3250
-
3244
+ datasource_exists = True
3251
3245
  except DoesNotExistException:
3252
3246
  datasource_exists = False
3253
3247
 
@@ -3580,7 +3574,6 @@ async def exec_file(
3580
3574
  fork_downstream: Optional[bool] = False,
3581
3575
  fork: Optional[bool] = False,
3582
3576
  git_release: Optional[bool] = False,
3583
- build: Optional[bool] = False,
3584
3577
  ):
3585
3578
  if debug:
3586
3579
  click.echo(FeedbackManager.debug_running_file(file=pp.pformat(r)))
@@ -3623,7 +3616,6 @@ async def exec_file(
3623
3616
  fork_downstream=fork_downstream,
3624
3617
  fork=fork,
3625
3618
  git_release=git_release,
3626
- build=build,
3627
3619
  )
3628
3620
  await update_tags_in_resource(r, "datasource", tb_client)
3629
3621
  elif r["resource"] == "tokens":
@@ -3908,7 +3900,6 @@ async def build_graph(
3908
3900
  only_changes: bool = False,
3909
3901
  fork_downstream: Optional[bool] = False,
3910
3902
  is_internal: Optional[bool] = False,
3911
- build: Optional[bool] = False,
3912
3903
  ) -> GraphDependencies:
3913
3904
  """
3914
3905
  This method will generate a dependency graph for the given files. It will also return a map of all the resources that are going to be deployed.
@@ -3945,7 +3936,6 @@ async def build_graph(
3945
3936
  changed=None,
3946
3937
  only_changes=False,
3947
3938
  is_internal=is_internal,
3948
- build=build,
3949
3939
  )
3950
3940
  all_dep_map = all_dependencies_graph.dep_map
3951
3941
  all_resources = all_dependencies_graph.to_run
@@ -4132,10 +4122,8 @@ def get_project_filenames(folder: str, with_vendor=False) -> List[str]:
4132
4122
  f"{folder}/*.pipe",
4133
4123
  f"{folder}/pipes/*.pipe",
4134
4124
  f"{folder}/endpoints/*.pipe",
4135
- f"{folder}/materializations/*.pipe",
4136
- f"{folder}/sinks/*.pipe",
4137
- f"{folder}/copies/*.pipe",
4138
- f"{folder}/playgrounds/*.pipe",
4125
+ f"{folder}/*.token",
4126
+ f"{folder}/tokens/*.token",
4139
4127
  ]
4140
4128
  if with_vendor:
4141
4129
  folders.append(f"{folder}/vendor/**/**/*.datasource")
@@ -4360,7 +4348,6 @@ async def folder_push(
4360
4348
  dry_run: bool,
4361
4349
  fork_downstream: Optional[bool] = False,
4362
4350
  fork: Optional[bool] = False,
4363
- build: Optional[bool] = False,
4364
4351
  ):
4365
4352
  if name in to_run:
4366
4353
  resource = to_run[name]["resource"]
@@ -4410,7 +4397,6 @@ async def folder_push(
4410
4397
  fork_downstream,
4411
4398
  fork,
4412
4399
  git_release,
4413
- build,
4414
4400
  )
4415
4401
  if not run_tests:
4416
4402
  click.echo(
@@ -4460,13 +4446,7 @@ async def folder_push(
4460
4446
  version = ""
4461
4447
  if name in latest_datasource_versions:
4462
4448
  version = f"(v{latest_datasource_versions[name]})"
4463
- if build:
4464
- extension = "pipe" if resource == "pipes" else "datasource"
4465
- click.echo(
4466
- FeedbackManager.info_building_resource(name=f"{name}.{extension}", version=version)
4467
- )
4468
- else:
4469
- click.echo(FeedbackManager.info_dry_processing_new_resource(name=name, version=version))
4449
+ click.echo(FeedbackManager.info_dry_processing_new_resource(name=name, version=version))
4470
4450
  else:
4471
4451
  click.echo(
4472
4452
  FeedbackManager.info_dry_processing_resource(
@@ -5607,517 +5587,3 @@ def is_file_a_datasource(filename: str) -> bool:
5607
5587
  return True
5608
5588
 
5609
5589
  return False
5610
-
5611
-
5612
- async def folder_build(
5613
- tb_client: TinyB,
5614
- workspaces: List[Dict[str, Any]],
5615
- datasources: List[Dict[str, Any]],
5616
- pipes: List[Dict[str, Any]],
5617
- filenames: Optional[List[str]] = None,
5618
- folder: str = ".",
5619
- ignore_sql_errors: bool = False,
5620
- is_internal: bool = False,
5621
- only_pipes: bool = False,
5622
- ):
5623
- if only_pipes:
5624
- filenames = [f for f in filenames if f.endswith(".pipe")]
5625
-
5626
- config = CLIConfig.get_project_config()
5627
- build = True
5628
- dry_run = False
5629
- force = True
5630
- push_deps = False
5631
- only_changes = False
5632
- debug = False
5633
- check = True
5634
- populate = False
5635
- populate_subset = None
5636
- populate_condition = None
5637
- tests_to_run = 0
5638
- user_token = None
5639
- tests_failfast = True
5640
- override_datasource = True
5641
- tests_check_requests_from_branch = False
5642
- skip_confirmation = True
5643
- wait = False
5644
- unlink_on_populate_error = False
5645
- upload_fixtures = False
5646
- only_response_times = False
5647
- workspace_map: Dict[str, Any] = {}
5648
- tests_sample_by_params = 1
5649
- tests_ignore_order = False
5650
- tests_validate_processed_bytes = False
5651
- run_tests = False
5652
- verbose = False
5653
- as_standard = False
5654
- raise_on_exists = False
5655
- fork_downstream = False
5656
- fork = False
5657
- release_created = False
5658
- auto_promote = False
5659
- hide_folders = False
5660
- tests_relative_change = 0.01
5661
- tests_sample_by_params = 0
5662
- tests_filter_by = None
5663
- tests_failfast = False
5664
- tests_ignore_order = False
5665
- tests_validate_processed_bytes = False
5666
- tests_check_requests_from_branch = False
5667
- git_release = False
5668
- workspace_lib_paths = []
5669
- current_ws: Dict[str, Any] = next(
5670
- (workspace for workspace in workspaces if config and workspace.get("id", ".") == config.get("id", "..")), {}
5671
- )
5672
-
5673
- workspace_lib_paths = list(workspace_lib_paths)
5674
- # include vendor libs without overriding user ones
5675
- existing_workspaces = set(x[1] for x in workspace_lib_paths)
5676
- vendor_path = Path("vendor")
5677
- if vendor_path.exists():
5678
- for x in vendor_path.iterdir():
5679
- if x.is_dir() and x.name not in existing_workspaces:
5680
- workspace_lib_paths.append((x.name, x))
5681
-
5682
- datasources: List[Dict[str, Any]] = await tb_client.datasources()
5683
- pipes: List[Dict[str, Any]] = await tb_client.pipes(dependencies=True)
5684
-
5685
- existing_resources: List[str] = [x["name"] for x in datasources] + [x["name"] for x in pipes]
5686
- # replace workspace mapping names
5687
- for old_ws, new_ws in workspace_map.items():
5688
- existing_resources = [re.sub(f"^{old_ws}\.", f"{new_ws}.", x) for x in existing_resources]
5689
-
5690
- remote_resource_names = [get_remote_resource_name_without_version(x) for x in existing_resources]
5691
-
5692
- # replace workspace mapping names
5693
- for old_ws, new_ws in workspace_map.items():
5694
- remote_resource_names = [re.sub(f"^{old_ws}\.", f"{new_ws}.", x) for x in remote_resource_names]
5695
-
5696
- if not filenames:
5697
- filenames = get_project_filenames(folder)
5698
-
5699
- changed = None
5700
-
5701
- # build graph to get new versions for all the files involved in the query
5702
- # dependencies need to be processed always to get the versions
5703
- dependencies_graph = await build_graph(
5704
- filenames,
5705
- tb_client,
5706
- dir_path=folder,
5707
- process_dependencies=True,
5708
- workspace_map=workspace_map,
5709
- skip_connectors=True,
5710
- workspace_lib_paths=workspace_lib_paths,
5711
- current_ws=current_ws,
5712
- changed=changed,
5713
- only_changes=only_changes,
5714
- fork_downstream=fork_downstream,
5715
- is_internal=is_internal,
5716
- build=build,
5717
- )
5718
-
5719
- resource_versions = {}
5720
- latest_datasource_versions = {}
5721
-
5722
- # If we have datasources using VERSION, let's try to get the latest version
5723
- dependencies_graph = await build_graph(
5724
- filenames,
5725
- tb_client,
5726
- dir_path=folder,
5727
- resource_versions=latest_datasource_versions,
5728
- workspace_map=workspace_map,
5729
- process_dependencies=push_deps,
5730
- verbose=verbose,
5731
- workspace_lib_paths=workspace_lib_paths,
5732
- current_ws=current_ws,
5733
- changed=changed,
5734
- only_changes=only_changes,
5735
- fork_downstream=fork_downstream,
5736
- is_internal=is_internal,
5737
- build=build,
5738
- )
5739
-
5740
- if debug:
5741
- pp.pprint(dependencies_graph.to_run)
5742
-
5743
- def should_push_file(
5744
- name: str,
5745
- remote_resource_names: List[str],
5746
- latest_datasource_versions: Dict[str, Any],
5747
- force: bool,
5748
- run_tests: bool,
5749
- ) -> bool:
5750
- """
5751
- Function to know if we need to run a file or not
5752
- """
5753
- if name not in remote_resource_names:
5754
- return True
5755
- # When we need to try to push a file when it doesn't exist and the version is different that the existing one
5756
- resource_full_name = (
5757
- f"{name}__v{latest_datasource_versions.get(name)}" if name in latest_datasource_versions else name
5758
- )
5759
- if resource_full_name not in existing_resources:
5760
- return True
5761
- if force or run_tests:
5762
- return True
5763
- return False
5764
-
5765
- async def push(
5766
- name: str,
5767
- to_run: Dict[str, Dict[str, Any]],
5768
- resource_versions: Dict[str, Any],
5769
- latest_datasource_versions: Dict[str, Any],
5770
- dry_run: bool,
5771
- fork_downstream: Optional[bool] = False,
5772
- fork: Optional[bool] = False,
5773
- ):
5774
- if name in to_run:
5775
- resource = to_run[name]["resource"]
5776
- if not dry_run:
5777
- if should_push_file(name, remote_resource_names, latest_datasource_versions, force, run_tests):
5778
- if name not in resource_versions:
5779
- version = ""
5780
- if name in latest_datasource_versions:
5781
- version = f"(v{latest_datasource_versions[name]})"
5782
- click.echo(FeedbackManager.info_processing_new_resource(name=name, version=version))
5783
- else:
5784
- click.echo(
5785
- FeedbackManager.info_processing_resource(
5786
- name=name,
5787
- version=latest_datasource_versions[name],
5788
- latest_version=resource_versions.get(name),
5789
- )
5790
- )
5791
- try:
5792
- await exec_file(
5793
- to_run[name],
5794
- tb_client,
5795
- force,
5796
- check,
5797
- debug and verbose,
5798
- populate,
5799
- populate_subset,
5800
- populate_condition,
5801
- unlink_on_populate_error,
5802
- wait,
5803
- user_token,
5804
- override_datasource,
5805
- ignore_sql_errors,
5806
- skip_confirmation,
5807
- only_response_times,
5808
- run_tests,
5809
- as_standard,
5810
- tests_to_run,
5811
- tests_relative_change,
5812
- tests_sample_by_params,
5813
- tests_filter_by,
5814
- tests_failfast,
5815
- tests_ignore_order,
5816
- tests_validate_processed_bytes,
5817
- tests_check_requests_from_branch,
5818
- current_ws,
5819
- fork_downstream,
5820
- fork,
5821
- git_release,
5822
- build,
5823
- )
5824
- if not run_tests:
5825
- click.echo(
5826
- FeedbackManager.success_create(
5827
- name=(
5828
- name
5829
- if to_run[name]["version"] is None
5830
- else f'{name}__v{to_run[name]["version"]}'
5831
- )
5832
- )
5833
- )
5834
- except Exception as e:
5835
- filename = (
5836
- os.path.basename(to_run[name]["filename"]) if hide_folders else to_run[name]["filename"]
5837
- )
5838
- exception = FeedbackManager.error_push_file_exception(
5839
- filename=filename,
5840
- error=e,
5841
- )
5842
- raise click.ClickException(exception)
5843
- else:
5844
- if raise_on_exists:
5845
- raise AlreadyExistsException(
5846
- FeedbackManager.warning_name_already_exists(
5847
- name=name if to_run[name]["version"] is None else f'{name}__v{to_run[name]["version"]}'
5848
- )
5849
- )
5850
- else:
5851
- if await name_matches_existing_resource(resource, name, tb_client):
5852
- if resource == "pipes":
5853
- click.echo(FeedbackManager.error_pipe_cannot_be_pushed(name=name))
5854
- else:
5855
- click.echo(FeedbackManager.error_datasource_cannot_be_pushed(name=name))
5856
- else:
5857
- click.echo(
5858
- FeedbackManager.warning_name_already_exists(
5859
- name=(
5860
- name
5861
- if to_run[name]["version"] is None
5862
- else f'{name}__v{to_run[name]["version"]}'
5863
- )
5864
- )
5865
- )
5866
- else:
5867
- if should_push_file(name, remote_resource_names, latest_datasource_versions, force, run_tests):
5868
- if name not in resource_versions:
5869
- version = ""
5870
- if name in latest_datasource_versions:
5871
- version = f"(v{latest_datasource_versions[name]})"
5872
- if build:
5873
- extension = "pipe" if resource == "pipes" else "datasource"
5874
- click.echo(
5875
- FeedbackManager.info_building_resource(name=f"{name}.{extension}", version=version)
5876
- )
5877
- else:
5878
- click.echo(FeedbackManager.info_dry_processing_new_resource(name=name, version=version))
5879
- else:
5880
- click.echo(
5881
- FeedbackManager.info_dry_processing_resource(
5882
- name=name,
5883
- version=latest_datasource_versions[name],
5884
- latest_version=resource_versions.get(name),
5885
- )
5886
- )
5887
- else:
5888
- if await name_matches_existing_resource(resource, name, tb_client):
5889
- if resource == "pipes":
5890
- click.echo(FeedbackManager.warning_pipe_cannot_be_pushed(name=name))
5891
- else:
5892
- click.echo(FeedbackManager.warning_datasource_cannot_be_pushed(name=name))
5893
- else:
5894
- click.echo(FeedbackManager.warning_dry_name_already_exists(name=name))
5895
-
5896
- async def push_files(
5897
- dependency_graph: GraphDependencies,
5898
- dry_run: bool = False,
5899
- check_backfill_required: bool = False,
5900
- ):
5901
- endpoints_dep_map = dict()
5902
- processed = set()
5903
-
5904
- dependencies_graph = dependency_graph.dep_map
5905
- resources_to_run = dependency_graph.to_run
5906
-
5907
- if not fork_downstream:
5908
- # First, we will deploy the all the resources following the dependency graph except for the endpoints
5909
- groups = [group for group in toposort(dependencies_graph)]
5910
- for group in groups:
5911
- for name in group:
5912
- if name in processed:
5913
- continue
5914
-
5915
- if is_endpoint_with_no_dependencies(
5916
- resources_to_run.get(name, {}),
5917
- dependencies_graph,
5918
- resources_to_run,
5919
- ):
5920
- endpoints_dep_map[name] = dependencies_graph[name]
5921
- continue
5922
-
5923
- await push(
5924
- name,
5925
- resources_to_run,
5926
- resource_versions,
5927
- latest_datasource_versions,
5928
- dry_run,
5929
- fork_downstream,
5930
- fork,
5931
- )
5932
- processed.add(name)
5933
-
5934
- # Then, we will deploy the endpoints that are on the dependency graph
5935
- groups = [group for group in toposort(endpoints_dep_map)]
5936
- for group in groups:
5937
- for name in group:
5938
- if name not in processed:
5939
- await push(
5940
- name,
5941
- resources_to_run,
5942
- resource_versions,
5943
- latest_datasource_versions,
5944
- dry_run,
5945
- fork_downstream,
5946
- fork,
5947
- )
5948
- processed.add(name)
5949
- else:
5950
- # This will generate the graph from right to left and will fill the gaps of the dependencies
5951
- # If we have a graph like this:
5952
- # A -> B -> C
5953
- # If we only modify A, the normal dependencies graph will only contain a node like _{A => B}
5954
- # But we need a graph that contains A, B and C and the dependencies between them to deploy them in the right order
5955
- dependencies_graph_fork_downstream, resources_to_run_fork_downstream = generate_forkdownstream_graph(
5956
- dependency_graph.all_dep_map,
5957
- dependency_graph.all_resources,
5958
- resources_to_run,
5959
- list(dependency_graph.dep_map.keys()),
5960
- )
5961
-
5962
- # First, we will deploy the datasources that need to be deployed.
5963
- # We need to deploy the datasources from left to right as some datasources might have MV that depend on the column types of previous datasources. Ex: `test_change_column_type_landing_datasource` test
5964
- groups = [group for group in toposort(dependencies_graph_fork_downstream)]
5965
- groups.reverse()
5966
- for group in groups:
5967
- for name in group:
5968
- if name in processed or not is_datasource(resources_to_run_fork_downstream[name]):
5969
- continue
5970
-
5971
- # If the resource is new, we will use the normal resource information to deploy it
5972
- # This is mostly used for datasources with connections.
5973
- # At the moment, `resources_to_run_fork_downstream` is generated by `all_resources` and this is generated using the parameter `skip_connectors=True`
5974
- # TODO: Should the `resources_to_run_fork_downstream` be generated using the `skip_connectors` parameter?
5975
- if is_new(name, changed, dependencies_graph_fork_downstream, dependencies_graph_fork_downstream):
5976
- await push(
5977
- name,
5978
- resources_to_run,
5979
- resource_versions,
5980
- latest_datasource_versions,
5981
- dry_run,
5982
- fork_downstream,
5983
- fork,
5984
- )
5985
- else:
5986
- # If we are trying to modify a Kafka or CDK datasource, we need to inform the user that the resource needs to be post-released
5987
- kafka_connection_name = (
5988
- resources_to_run_fork_downstream[name].get("params", {}).get("kafka_connection_name")
5989
- )
5990
- service = resources_to_run_fork_downstream[name].get("params", {}).get("import_service")
5991
- if release_created and (kafka_connection_name or service):
5992
- connector = "Kafka" if kafka_connection_name else service
5993
- error_msg = FeedbackManager.error_connector_require_post_release(connector=connector)
5994
- raise click.ClickException(error_msg)
5995
-
5996
- # If we are pushing a modified datasource, inform about the backfill``
5997
- if check_backfill_required and auto_promote and release_created:
5998
- error_msg = FeedbackManager.error_check_backfill_required(resource_name=name)
5999
- raise click.ClickException(error_msg)
6000
-
6001
- await push(
6002
- name,
6003
- resources_to_run_fork_downstream,
6004
- resource_versions,
6005
- latest_datasource_versions,
6006
- dry_run,
6007
- fork_downstream,
6008
- fork,
6009
- )
6010
- processed.add(name)
6011
-
6012
- # Now, we will create a map of all the endpoints and there dependencies
6013
- # We are using the forkdownstream graph to get the dependencies of the endpoints as the normal dependencies graph only contains the resources that are going to be deployed
6014
- # But does not include the missing gaps
6015
- # If we have ENDPOINT_A ----> MV_PIPE_B -----> DATASOURCE_B ------> ENDPOINT_C
6016
- # Where endpoint A is being used in the MV_PIPE_B, if we only modify the endpoint A
6017
- # The dependencies graph will only contain the endpoint A and the MV_PIPE_B, but not the DATASOURCE_B and the ENDPOINT_C
6018
- groups = [group for group in toposort(dependencies_graph_fork_downstream)]
6019
- for group in groups:
6020
- for name in group:
6021
- if name in processed or not is_endpoint(resources_to_run_fork_downstream[name]):
6022
- continue
6023
-
6024
- endpoints_dep_map[name] = dependencies_graph_fork_downstream[name]
6025
-
6026
- # Now that we have the dependencies of the endpoints, we need to check that the resources has not been deployed yet and only care about the endpoints that depend on endpoints
6027
- groups = [group for group in toposort(endpoints_dep_map)]
6028
-
6029
- # As we have used the forkdownstream graph to get the dependencies of the endpoints, we have all the dependencies of the endpoints
6030
- # But we need to deploy the endpoints and the dependencies of the endpoints from left to right
6031
- # So we need to reverse the groups
6032
- groups.reverse()
6033
- for group in groups:
6034
- for name in group:
6035
- if name in processed or not is_endpoint(resources_to_run_fork_downstream[name]):
6036
- continue
6037
-
6038
- await push(
6039
- name,
6040
- resources_to_run_fork_downstream,
6041
- resource_versions,
6042
- latest_datasource_versions,
6043
- dry_run,
6044
- fork_downstream,
6045
- fork,
6046
- )
6047
- processed.add(name)
6048
-
6049
- # Now we should have the endpoints and datasources deployed, we can deploy the rest of the pipes (copy & sinks)
6050
- # We need to rely on the forkdownstream graph as it contains all the modified pipes as well as the dependencies of the pipes
6051
- # In this case, we don't need to generate a new graph as we did for the endpoints as the pipes are not going to be used as dependencies and the datasources are already deployed
6052
- groups = [group for group in toposort(dependencies_graph_fork_downstream)]
6053
- for group in groups:
6054
- for name in group:
6055
- if name in processed or is_materialized(resources_to_run_fork_downstream.get(name)):
6056
- continue
6057
-
6058
- await push(
6059
- name,
6060
- resources_to_run_fork_downstream,
6061
- resource_versions,
6062
- latest_datasource_versions,
6063
- dry_run,
6064
- fork_downstream,
6065
- fork,
6066
- )
6067
- processed.add(name)
6068
-
6069
- # Finally, we need to deploy the materialized views from right to left.
6070
- # We need to rely on the forkdownstream graph as it contains all the modified materialized views as well as the dependencies of the materialized views
6071
- # In this case, we don't need to generate a new graph as we did for the endpoints as the pipes are not going to be used as dependencies and the datasources are already deployed
6072
- groups = [group for group in toposort(dependencies_graph_fork_downstream)]
6073
- for group in groups:
6074
- for name in group:
6075
- if name in processed or not is_materialized(resources_to_run_fork_downstream.get(name)):
6076
- continue
6077
-
6078
- await push(
6079
- name,
6080
- resources_to_run_fork_downstream,
6081
- resource_versions,
6082
- latest_datasource_versions,
6083
- dry_run,
6084
- fork_downstream,
6085
- fork,
6086
- )
6087
- processed.add(name)
6088
-
6089
- await push_files(dependencies_graph, dry_run)
6090
-
6091
- if not dry_run and not run_tests:
6092
- if upload_fixtures:
6093
- click.echo(FeedbackManager.info_pushing_fixtures())
6094
-
6095
- processed = set()
6096
- for group in toposort(dependencies_graph.dep_map):
6097
- for f in group:
6098
- name = os.path.basename(f)
6099
- if name not in processed and name in dependencies_graph.to_run:
6100
- await check_fixtures_data(
6101
- tb_client,
6102
- dependencies_graph.to_run[name],
6103
- debug,
6104
- folder,
6105
- force,
6106
- mode="replace",
6107
- )
6108
- processed.add(name)
6109
- for f in dependencies_graph.to_run:
6110
- if f not in processed:
6111
- await check_fixtures_data(
6112
- tb_client,
6113
- dependencies_graph.to_run[f],
6114
- debug,
6115
- folder,
6116
- force,
6117
- mode="replace",
6118
- )
6119
- else:
6120
- if verbose:
6121
- click.echo(FeedbackManager.info_not_pushing_fixtures())
6122
-
6123
- return dependencies_graph.to_run
@@ -0,0 +1,28 @@
1
+ import asyncio
2
+ import sys
3
+
4
+ if sys.platform == "win32":
5
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
6
+
7
+ import tinybird.tb.modules.auth
8
+ import tinybird.tb.modules.branch
9
+ import tinybird.tb.modules.build
10
+ import tinybird.tb.modules.cli
11
+ import tinybird.tb.modules.common
12
+ import tinybird.tb.modules.connection
13
+ import tinybird.tb.modules.create
14
+ import tinybird.tb.modules.datasource
15
+ import tinybird.tb.modules.fmt
16
+ import tinybird.tb.modules.job
17
+ import tinybird.tb.modules.mock
18
+ import tinybird.tb.modules.pipe
19
+ import tinybird.tb.modules.tag
20
+ import tinybird.tb.modules.test
21
+ import tinybird.tb.modules.token
22
+ import tinybird.tb.modules.workspace
23
+ import tinybird.tb.modules.workspace_members
24
+
25
+ cli = tinybird.tb.modules.cli.cli
26
+
27
+ if __name__ == "__main__":
28
+ cli()