tinybird 0.0.1.dev6__py3-none-any.whl → 0.0.1.dev8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tinybird might be problematic. Click here for more details.

Files changed (31) hide show
  1. tinybird/tb/modules/branch.py +0 -21
  2. tinybird/tb/modules/build.py +7 -18
  3. tinybird/tb/modules/cli.py +11 -131
  4. tinybird/tb/modules/common.py +14 -2
  5. tinybird/tb/modules/create.py +10 -14
  6. tinybird/tb/modules/datafile/build.py +2136 -0
  7. tinybird/tb/modules/datafile/build_common.py +118 -0
  8. tinybird/tb/modules/datafile/build_datasource.py +413 -0
  9. tinybird/tb/modules/datafile/build_pipe.py +648 -0
  10. tinybird/tb/modules/datafile/common.py +898 -0
  11. tinybird/tb/modules/datafile/diff.py +197 -0
  12. tinybird/tb/modules/datafile/exceptions.py +23 -0
  13. tinybird/tb/modules/datafile/format_common.py +66 -0
  14. tinybird/tb/modules/datafile/format_datasource.py +160 -0
  15. tinybird/tb/modules/datafile/format_pipe.py +195 -0
  16. tinybird/tb/modules/datafile/parse_datasource.py +41 -0
  17. tinybird/tb/modules/datafile/parse_pipe.py +69 -0
  18. tinybird/tb/modules/datafile/pipe_checker.py +560 -0
  19. tinybird/tb/modules/datafile/pull.py +157 -0
  20. tinybird/tb/modules/datasource.py +1 -1
  21. tinybird/tb/modules/fmt.py +4 -1
  22. tinybird/tb/modules/local.py +3 -0
  23. tinybird/tb/modules/pipe.py +8 -2
  24. tinybird/tb/modules/prompts.py +1 -1
  25. tinybird/tb/modules/workspace.py +1 -1
  26. {tinybird-0.0.1.dev6.dist-info → tinybird-0.0.1.dev8.dist-info}/METADATA +1 -1
  27. {tinybird-0.0.1.dev6.dist-info → tinybird-0.0.1.dev8.dist-info}/RECORD +30 -17
  28. tinybird/tb/modules/datafile.py +0 -6122
  29. {tinybird-0.0.1.dev6.dist-info → tinybird-0.0.1.dev8.dist-info}/WHEEL +0 -0
  30. {tinybird-0.0.1.dev6.dist-info → tinybird-0.0.1.dev8.dist-info}/entry_points.txt +0 -0
  31. {tinybird-0.0.1.dev6.dist-info → tinybird-0.0.1.dev8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,118 @@
1
+ from typing import Any, Dict, List, Optional, Tuple
2
+
3
+ import click
4
+
5
+ from tinybird.client import DoesNotExistException, TinyB
6
+ from tinybird.feedback_manager import FeedbackManager
7
+
8
+
9
+ async def update_tags(resource_id: str, resource_name: str, resource_type: str, tags: List[str], tb_client: TinyB):
10
+ def get_tags_for_resource(all_tags: dict, resource_id: str, resource_name: str) -> List[str]:
11
+ tag_names = []
12
+
13
+ for tag in all_tags.get("tags", []):
14
+ for resource in tag.get("resources", []):
15
+ if resource.get("id") == resource_id or resource.get("name") == resource_name:
16
+ tag_names.append(tag.get("name"))
17
+ break # No need to check other resources in this tag
18
+
19
+ return tag_names
20
+
21
+ def get_tag(all_tags: dict, tag_name: str) -> Optional[dict]:
22
+ for tag in all_tags.get("tags", []):
23
+ if tag.get("name") == tag_name:
24
+ return tag
25
+ return None
26
+
27
+ def compare_tags(current_tags: List[str], new_tags: List[str]) -> Tuple[List[str], List[str]]:
28
+ tags_to_add = list(set(new_tags) - set(current_tags))
29
+ tags_to_remove = list(set(current_tags) - set(new_tags))
30
+ return tags_to_add, tags_to_remove
31
+
32
+ try:
33
+ all_tags = await tb_client.get_all_tags()
34
+ except Exception as e:
35
+ raise Exception(FeedbackManager.error_getting_tags(error=str(e)))
36
+
37
+ # Get all tags of that resource
38
+ current_tags = get_tags_for_resource(all_tags, resource_id, resource_name)
39
+
40
+ # Get the tags to add and remove
41
+ tags_to_add, tags_to_remove = compare_tags(current_tags, tags)
42
+
43
+ # Tags to add
44
+ for tag_name in tags_to_add:
45
+ tag = get_tag(all_tags, tag_name)
46
+
47
+ if not tag:
48
+ # Create new tag
49
+ try:
50
+ await tb_client.create_tag_with_resource(
51
+ name=tag_name,
52
+ resource_id=resource_id,
53
+ resource_name=resource_name,
54
+ resource_type=resource_type,
55
+ )
56
+ except Exception as e:
57
+ raise Exception(FeedbackManager.error_creating_tag(error=str(e)))
58
+ else:
59
+ # Update tag with new resource
60
+ resources = tag.get("resources", [])
61
+ resources.append({"id": resource_id, "name": resource_name, "type": resource_type})
62
+ try:
63
+ await tb_client.update_tag(tag.get("name", tag_name), resources)
64
+ except Exception as e:
65
+ raise Exception(FeedbackManager.error_updating_tag(error=str(e)))
66
+
67
+ # Tags to delete
68
+ for tag_name in tags_to_remove:
69
+ tag = get_tag(all_tags, tag_name)
70
+
71
+ if tag:
72
+ resources = tag.get("resources", [])
73
+ resources = [resource for resource in resources if resource.get("name") != resource_name]
74
+ try:
75
+ await tb_client.update_tag(tag.get("name", tag_name), resources)
76
+ except Exception as e:
77
+ raise Exception(FeedbackManager.error_updating_tag(error=str(e)))
78
+
79
+
80
+ async def update_tags_in_resource(rs: Dict[str, Any], resource_type: str, client: TinyB):
81
+ filtering_tags = rs.get("filtering_tags", [])
82
+
83
+ if not filtering_tags:
84
+ return
85
+
86
+ resource_id = ""
87
+ resource_name = ""
88
+
89
+ if resource_type == "datasource":
90
+ ds_name = rs["params"]["name"]
91
+ try:
92
+ persisted_ds = await client.get_datasource(ds_name)
93
+ resource_id = persisted_ds.get("id", "")
94
+ resource_name = persisted_ds.get("name", "")
95
+ except DoesNotExistException:
96
+ click.echo(
97
+ FeedbackManager.error_tag_generic("Could not get the latest Data Source info for updating its tags.")
98
+ )
99
+ elif resource_type == "pipe":
100
+ pipe_name = rs["name"]
101
+ try:
102
+ persisted_pipe = await client.pipe(pipe_name)
103
+ resource_id = persisted_pipe.get("id", "")
104
+ resource_name = persisted_pipe.get("name", "")
105
+ except DoesNotExistException:
106
+ click.echo(FeedbackManager.error_tag_generic("Could not get the latest Pipe info for updating its tags."))
107
+
108
+ if resource_id and resource_name:
109
+ try:
110
+ await update_tags(
111
+ resource_id=resource_id,
112
+ resource_name=resource_name,
113
+ resource_type=resource_type,
114
+ tags=filtering_tags,
115
+ tb_client=client,
116
+ )
117
+ except Exception as e:
118
+ click.echo(FeedbackManager.error_tag_generic(error=str(e)))
@@ -0,0 +1,413 @@
1
+ import os
2
+ from copy import deepcopy
3
+ from dataclasses import asdict
4
+ from typing import Any, Dict, List, Optional
5
+
6
+ import click
7
+
8
+ from tinybird.client import DoesNotExistException, TinyB
9
+ from tinybird.feedback_manager import FeedbackManager
10
+ from tinybird.tb.modules.datafile.common import PREVIEW_CONNECTOR_SERVICES, ImportReplacements
11
+
12
+
13
+ async def new_ds(
14
+ ds: Dict[str, Any],
15
+ client: TinyB,
16
+ user_token: Optional[str],
17
+ force: bool = False,
18
+ skip_confirmation: bool = False,
19
+ current_ws=None,
20
+ local_ws=None,
21
+ fork_downstream: Optional[bool] = False,
22
+ fork: Optional[bool] = False,
23
+ build: Optional[bool] = False,
24
+ is_vendor: Optional[bool] = False,
25
+ ):
26
+ ds_name = ds["params"]["name"]
27
+
28
+ async def manage_tokens():
29
+ # search for token with specified name and adds it if not found or adds permissions to it
30
+ t = None
31
+ for tk in ds["tokens"]:
32
+ token_name = tk["token_name"]
33
+ t = await client.get_token_by_name(token_name)
34
+ if not t:
35
+ token_name = tk["token_name"]
36
+ # DS == token_origin.Origins.DATASOURCE
37
+ await client.create_token(token_name, [f"DATASOURCES:{tk['permissions']}:{ds_name}"], "DS", ds_name)
38
+ else:
39
+ scopes = [f"DATASOURCES:{tk['permissions']}:{ds_name}"]
40
+ for x in t["scopes"]:
41
+ sc = x["type"] if "resource" not in x else f"{x['type']}:{x['resource']}"
42
+ scopes.append(sc)
43
+ await client.alter_tokens(token_name, scopes)
44
+
45
+ datasource_exists = False
46
+ try:
47
+ existing_ds = await client.get_datasource(ds_name)
48
+ datasource_exists = True
49
+ except DoesNotExistException:
50
+ datasource_exists = False
51
+
52
+ engine_param = ds["params"].get("engine", "")
53
+
54
+ if (
55
+ ds["params"].get("service") == "dynamodb"
56
+ and engine_param != ""
57
+ and engine_param.lower() != "replacingmergetree"
58
+ ):
59
+ raise click.ClickException(FeedbackManager.error_dynamodb_engine_not_supported(engine=engine_param))
60
+
61
+ if engine_param.lower() == "join":
62
+ deprecation_notice = FeedbackManager.warning_deprecated(
63
+ warning="Data Sources with Join engine are deprecated and will be removed in the next major release of tinybird-cli. Use MergeTree instead."
64
+ )
65
+ click.echo(deprecation_notice)
66
+
67
+ if not datasource_exists or fork_downstream or fork:
68
+ params = ds["params"]
69
+
70
+ try:
71
+ if (
72
+ params.get("service") in PREVIEW_CONNECTOR_SERVICES
73
+ and params.get("connector")
74
+ and params.get("bucket_uri")
75
+ ):
76
+ bucket_uri = params.get("bucket_uri")
77
+ extension = bucket_uri.split(".")[-1]
78
+ if extension == "gz":
79
+ extension = bucket_uri.split(".")[-2]
80
+ valid_formats = ["csv", "json", "jsonl", "ndjson", "parquet"]
81
+ if extension not in valid_formats:
82
+ raise Exception(FeedbackManager.error_format(extension=extension, valid_formats=valid_formats))
83
+ params["format"] = extension
84
+ datasource_response = await client.datasource_create_from_definition(params)
85
+ datasource = datasource_response.get("datasource", {})
86
+
87
+ if datasource.get("service") == "dynamodb":
88
+ job_id = datasource_response.get("import_id", None)
89
+ if job_id:
90
+ jobs = await client.jobs(status=["waiting", "working"])
91
+ job_url = next((job["job_url"] for job in jobs if job["id"] == job_id), None)
92
+ if job_url:
93
+ click.echo(FeedbackManager.success_dynamodb_initial_load(job_url=job_url))
94
+
95
+ if ds.get("tokens"):
96
+ await manage_tokens()
97
+
98
+ if ds.get("shared_with") and not build:
99
+ if not user_token:
100
+ click.echo(FeedbackManager.info_skipping_shared_with_entry())
101
+ else:
102
+ await share_and_unshare_datasource(
103
+ client,
104
+ datasource,
105
+ user_token,
106
+ workspaces_current_shared_with=[],
107
+ workspaces_to_share=ds["shared_with"],
108
+ current_ws=current_ws,
109
+ )
110
+ if is_vendor and user_token and local_ws and current_ws:
111
+ user_client: TinyB = deepcopy(client)
112
+ user_client.token = user_token
113
+ await user_client.datasource_share(
114
+ datasource_id=datasource.get("id", ""),
115
+ current_workspace_id=current_ws.get("id", ""),
116
+ destination_workspace_id=local_ws.get("id", ""),
117
+ )
118
+
119
+ except Exception as e:
120
+ raise click.ClickException(FeedbackManager.error_creating_datasource(error=str(e)))
121
+ return
122
+
123
+ if not force:
124
+ raise click.ClickException(FeedbackManager.error_datasource_already_exists(datasource=ds_name))
125
+
126
+ if ds.get("shared_with", []) or existing_ds.get("shared_with", []):
127
+ if not user_token:
128
+ click.echo(FeedbackManager.info_skipping_shared_with_entry())
129
+ else:
130
+ await share_and_unshare_datasource(
131
+ client,
132
+ existing_ds,
133
+ user_token,
134
+ existing_ds.get("shared_with", []),
135
+ ds.get("shared_with", []),
136
+ current_ws,
137
+ )
138
+
139
+ alter_response = None
140
+ alter_error_message = None
141
+ new_description = None
142
+ new_schema = None
143
+ new_indices = None
144
+ new_ttl = None
145
+
146
+ try:
147
+ if datasource_exists and ds["params"]["description"] != existing_ds["description"]:
148
+ new_description = ds["params"]["description"]
149
+
150
+ if datasource_exists and ds["params"].get("engine_ttl") != existing_ds["engine"].get("ttl"):
151
+ new_ttl = ds["params"].get("engine_ttl", "false")
152
+
153
+ # Schema fixed by the kafka connector
154
+ if datasource_exists and (
155
+ ds["params"]["schema"].replace(" ", "") != existing_ds["schema"]["sql_schema"].replace(" ", "")
156
+ ):
157
+ new_schema = ds["params"]["schema"]
158
+
159
+ if datasource_exists:
160
+ new = [asdict(index) for index in ds.get("params", {}).get("indexes_list", [])]
161
+ existing = existing_ds.get("indexes", [])
162
+ new.sort(key=lambda x: x["name"])
163
+ existing.sort(key=lambda x: x["name"])
164
+ if len(existing) != len(new) or any([(d, d2) for d, d2 in zip(new, existing) if d != d2]):
165
+ new_indices = ds.get("params", {}).get("indexes") or "0"
166
+ if (
167
+ new_description
168
+ or new_schema
169
+ or new_ttl
170
+ or ((new_indices is not None) and (not fork_downstream or not fork))
171
+ ):
172
+ alter_response = await client.alter_datasource(
173
+ ds_name,
174
+ new_schema=new_schema,
175
+ description=new_description,
176
+ ttl=new_ttl,
177
+ dry_run=True,
178
+ indexes=new_indices,
179
+ )
180
+ except Exception as e:
181
+ if "There were no operations to perform" in str(e):
182
+ pass
183
+ else:
184
+ alter_error_message = str(e)
185
+
186
+ if alter_response:
187
+ click.echo(FeedbackManager.info_datasource_doesnt_match(datasource=ds_name))
188
+ for operation in alter_response["operations"]:
189
+ click.echo(f"** - {operation}")
190
+ if alter_response["operations"] and alter_response.get("dependencies", []):
191
+ click.echo(FeedbackManager.info_datasource_alter_dependent_pipes())
192
+ for dependency in alter_response.get("dependencies", []):
193
+ click.echo(f"** - {dependency}")
194
+
195
+ if skip_confirmation:
196
+ make_changes = True
197
+ else:
198
+ make_changes = click.prompt(FeedbackManager.info_ask_for_alter_confirmation()).lower() == "y"
199
+
200
+ if make_changes:
201
+ await client.alter_datasource(
202
+ ds_name,
203
+ new_schema=new_schema,
204
+ description=new_description,
205
+ ttl=new_ttl,
206
+ dry_run=False,
207
+ indexes=new_indices,
208
+ )
209
+ click.echo(FeedbackManager.success_datasource_alter())
210
+ else:
211
+ alter_error_message = "Alter datasource cancelled"
212
+
213
+ if alter_error_message:
214
+ raise click.ClickException(
215
+ FeedbackManager.error_datasource_already_exists_and_alter_failed(
216
+ datasource=ds_name, alter_error_message=alter_error_message
217
+ )
218
+ )
219
+
220
+ if datasource_exists and ds["params"].get("backfill_column") != existing_ds["tags"].get("backfill_column"):
221
+ params = {
222
+ "backfill_column": ds["params"].get("backfill_column"),
223
+ }
224
+
225
+ try:
226
+ click.echo(FeedbackManager.info_update_datasource(datasource=ds_name, params=params))
227
+ await client.update_datasource(ds_name, params)
228
+ click.echo(FeedbackManager.success_update_datasource(datasource=ds_name, params=params))
229
+ make_changes = True
230
+ alter_response = True
231
+ except Exception as e:
232
+ raise click.ClickException(FeedbackManager.error_updating_datasource(datasource=ds_name, error=str(e)))
233
+
234
+ connector_data = None
235
+ promote_error_message = None
236
+
237
+ ds_params = ds["params"]
238
+ service = ds_params.get("service")
239
+ DATASOURCE_VALID_SERVICES_TO_UPDATE = ["bigquery", "snowflake"]
240
+ if datasource_exists and service and service in [*DATASOURCE_VALID_SERVICES_TO_UPDATE, *PREVIEW_CONNECTOR_SERVICES]:
241
+ connector_required_params = {
242
+ "bigquery": ["service", "cron", "external_data_source"],
243
+ "snowflake": ["connector", "service", "cron", "external_data_source"],
244
+ "s3": ["connector", "service", "cron", "bucket_uri"],
245
+ "s3_iamrole": ["connector", "service", "cron", "bucket_uri"],
246
+ "gcs": ["connector", "service", "cron", "bucket_uri"],
247
+ }.get(service, [])
248
+
249
+ if not all(key in ds_params for key in connector_required_params):
250
+ return
251
+
252
+ connector = ds_params.get("connector", None)
253
+
254
+ if service in PREVIEW_CONNECTOR_SERVICES:
255
+ connector_id = existing_ds.get("connector", "")
256
+ if not connector_id:
257
+ return
258
+
259
+ current_connector = await client.get_connector_by_id(existing_ds.get("connector", ""))
260
+ if not current_connector:
261
+ return
262
+
263
+ if current_connector["name"] != ds_params["connection"]:
264
+ param = "connection"
265
+ datafile_param = ImportReplacements.get_datafile_param_for_linker_param(service, param) or param
266
+ raise click.ClickException(FeedbackManager.error_updating_connector_not_supported(param=datafile_param))
267
+
268
+ linkers = current_connector.get("linkers", [])
269
+ linker = next((linker for linker in linkers if linker["datasource_id"] == existing_ds["id"]), None)
270
+ if not linker:
271
+ return
272
+
273
+ linker_settings = linker.get("settings", {})
274
+ for param, value in linker_settings.items():
275
+ ds_params_value = ds_params.get(param, None)
276
+ if ds_params_value and ds_params_value != value:
277
+ datafile_param = ImportReplacements.get_datafile_param_for_linker_param(service, param) or param
278
+ raise Exception(
279
+ FeedbackManager.error_updating_connector_not_supported(param=datafile_param.upper())
280
+ )
281
+ return
282
+
283
+ connector_data = {
284
+ "connector": connector,
285
+ "service": service,
286
+ "cron": ds_params.get("cron", None),
287
+ "external_data_source": ds_params.get("external_data_source", None),
288
+ "bucket_uri": ds_params.get("bucket_uri", None),
289
+ "mode": ds_params.get("mode", "replace"),
290
+ "query": ds_params.get("query", None),
291
+ "ingest_now": ds_params.get("ingest_now", False),
292
+ }
293
+
294
+ try:
295
+ await client.update_datasource(ds_name, connector_data)
296
+ click.echo(FeedbackManager.success_promoting_datasource(datasource=ds_name))
297
+ return
298
+ except Exception as e:
299
+ promote_error_message = str(e)
300
+
301
+ if alter_response and make_changes:
302
+ # alter operation finished
303
+ pass
304
+ else:
305
+ # removed replacing by default. When a datasource is removed data is
306
+ # removed and all the references needs to be updated
307
+ if (
308
+ os.getenv("TB_I_KNOW_WHAT_I_AM_DOING")
309
+ and click.prompt(FeedbackManager.info_ask_for_datasource_confirmation()) == ds_name
310
+ ): # TODO move to CLI
311
+ try:
312
+ await client.datasource_delete(ds_name)
313
+ click.echo(FeedbackManager.success_delete_datasource(datasource=ds_name))
314
+ except Exception:
315
+ raise click.ClickException(FeedbackManager.error_removing_datasource(datasource=ds_name))
316
+ return
317
+ else:
318
+ if alter_error_message:
319
+ raise click.ClickException(
320
+ FeedbackManager.error_datasource_already_exists_and_alter_failed(
321
+ datasource=ds_name, alter_error_message=alter_error_message
322
+ )
323
+ )
324
+ if promote_error_message:
325
+ raise click.ClickException(
326
+ FeedbackManager.error_promoting_datasource(datasource=ds_name, error=promote_error_message)
327
+ )
328
+ else:
329
+ click.echo(FeedbackManager.warning_datasource_already_exists(datasource=ds_name))
330
+
331
+
332
+ async def share_and_unshare_datasource(
333
+ client: TinyB,
334
+ datasource: Dict[str, Any],
335
+ user_token: str,
336
+ workspaces_current_shared_with: List[str],
337
+ workspaces_to_share: List[str],
338
+ current_ws: Optional[Dict[str, Any]],
339
+ ) -> None:
340
+ datasource_name = datasource.get("name", "")
341
+ datasource_id = datasource.get("id", "")
342
+ workspaces: List[Dict[str, Any]]
343
+
344
+ # In case we are pushing to a branch, we don't share the datasource
345
+ # FIXME: Have only once way to get the current workspace
346
+ if current_ws:
347
+ # Force to get all the workspaces the user can access
348
+ workspace = current_ws
349
+ workspaces = (await client.user_workspaces()).get("workspaces", [])
350
+ else:
351
+ workspace = await client.user_workspace_branches()
352
+ workspaces = workspace.get("workspaces", [])
353
+
354
+ if workspace.get("is_branch", False):
355
+ click.echo(FeedbackManager.info_skipping_sharing_datasources_branch(datasource=datasource["name"]))
356
+ return
357
+
358
+ # We duplicate the client to use the user_token
359
+ user_client: TinyB = deepcopy(client)
360
+ user_client.token = user_token
361
+ if not workspaces_current_shared_with:
362
+ for workspace_to_share in workspaces_to_share:
363
+ w: Optional[Dict[str, Any]] = next((w for w in workspaces if w["name"] == workspace_to_share), None)
364
+ if not w:
365
+ raise Exception(
366
+ f"Unable to share datasource with the workspace {workspace_to_share}. Review that you have the admin permissions on this workspace"
367
+ )
368
+
369
+ await user_client.datasource_share(
370
+ datasource_id=datasource_id,
371
+ current_workspace_id=workspace.get("id", ""),
372
+ destination_workspace_id=w.get("id", ""),
373
+ )
374
+ click.echo(
375
+ FeedbackManager.success_datasource_shared(datasource=datasource_name, workspace=w.get("name", ""))
376
+ )
377
+ else:
378
+ shared_with = [
379
+ w
380
+ for w in workspaces
381
+ if next((ws for ws in workspaces_current_shared_with if ws == w["id"] or ws == w["name"]), None)
382
+ ]
383
+ defined_to_share_with = [
384
+ w for w in workspaces if next((ws for ws in workspaces_to_share if ws == w["id"] or ws == w["name"]), None)
385
+ ]
386
+ workspaces_need_to_share = [w for w in defined_to_share_with if w not in shared_with]
387
+ workspaces_need_to_unshare = [w for w in shared_with if w not in defined_to_share_with]
388
+
389
+ for w in workspaces_need_to_share:
390
+ await user_client.datasource_share(
391
+ datasource_id=datasource_id,
392
+ current_workspace_id=workspace.get("id", ""),
393
+ destination_workspace_id=w.get("id", ""),
394
+ )
395
+ click.echo(
396
+ FeedbackManager.success_datasource_shared(datasource=datasource["name"], workspace=w.get("name", ""))
397
+ )
398
+
399
+ for w in workspaces_need_to_unshare:
400
+ await user_client.datasource_unshare(
401
+ datasource_id=datasource_id,
402
+ current_workspace_id=workspace.get("id", ""),
403
+ destination_workspace_id=w.get("id", ""),
404
+ )
405
+ click.echo(
406
+ FeedbackManager.success_datasource_unshared(datasource=datasource_name, workspace=w.get("name", ""))
407
+ )
408
+
409
+
410
+ def is_datasource(resource: Optional[Dict[str, Any]]) -> bool:
411
+ if resource and resource.get("resource") == "datasources":
412
+ return True
413
+ return False