semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
resolve_item_id,
|
|
3
|
+
resolve_workspace_name_and_id,
|
|
4
|
+
_base_api,
|
|
5
|
+
_create_dataframe,
|
|
6
|
+
_update_dataframe_datatypes,
|
|
7
|
+
resolve_workspace_id,
|
|
8
|
+
)
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from typing import Optional, List, Literal
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _get_base_url(item, type, workspace):
|
|
17
|
+
|
|
18
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
19
|
+
item_id = resolve_item_id(item=item, type=type, workspace=workspace)
|
|
20
|
+
|
|
21
|
+
type_dict = {
|
|
22
|
+
"Warehouse": "warehouses",
|
|
23
|
+
"SQLEndpoint": "sqlEndpoints",
|
|
24
|
+
}
|
|
25
|
+
type_for_url = type_dict.get(type)
|
|
26
|
+
|
|
27
|
+
if type in ["SQLEndpoint", "Warehouse"]:
|
|
28
|
+
url = f"/v1/workspaces/{workspace_id}/{type_for_url}/{item_id}"
|
|
29
|
+
else:
|
|
30
|
+
raise ValueError(
|
|
31
|
+
f"{icons.red_dot} The type must be 'Warehouse' or 'SQLEndpoint'."
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
return url
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@log
|
|
38
|
+
def get_sql_audit_settings(
|
|
39
|
+
item: str | UUID,
|
|
40
|
+
type: Literal["Warehouse", "SQLEndpoint"],
|
|
41
|
+
workspace: Optional[str | UUID] = None,
|
|
42
|
+
) -> pd.DataFrame:
|
|
43
|
+
"""
|
|
44
|
+
Shows the SQL audit settings of a Fabric item.
|
|
45
|
+
|
|
46
|
+
This is a wrapper function for the following API: `SQL Audit Settings - Get SQL Audit Settings <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings>`_.
|
|
47
|
+
|
|
48
|
+
Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
|
|
49
|
+
|
|
50
|
+
Parameters
|
|
51
|
+
----------
|
|
52
|
+
item : str | uuid.UUID
|
|
53
|
+
The name or ID of the item (Warehouse or SQLEndpoint).
|
|
54
|
+
type : Literal['Warehouse', 'SQLEndpoint']
|
|
55
|
+
The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
|
|
56
|
+
workspace : str | uuid.UUID, default=None
|
|
57
|
+
The Fabric workspace name or ID.
|
|
58
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
59
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
pandas.DataFrame
|
|
64
|
+
A pandas dataframe containing the SQL audit settings of the specified warehouse.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
columns = {
|
|
68
|
+
"State": "string",
|
|
69
|
+
"Retention Days": "int",
|
|
70
|
+
"Audit Actions And Group": "list",
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
df = _create_dataframe(columns=columns)
|
|
74
|
+
|
|
75
|
+
url = _get_base_url(item=item, type=type, workspace=workspace)
|
|
76
|
+
response = _base_api(
|
|
77
|
+
request=f"{url}/settings/sqlAudit",
|
|
78
|
+
client="fabric_sp",
|
|
79
|
+
).json()
|
|
80
|
+
|
|
81
|
+
rows = []
|
|
82
|
+
rows.append(
|
|
83
|
+
{
|
|
84
|
+
"State": response.get("state"),
|
|
85
|
+
"Retention Days": response.get("retentionDays"),
|
|
86
|
+
"Audit Actions And Group": response.get("auditActionsAndGroups"),
|
|
87
|
+
}
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
if rows:
|
|
91
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
92
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
93
|
+
|
|
94
|
+
return df
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@log
|
|
98
|
+
def update_sql_audit_settings(
|
|
99
|
+
item: str | UUID,
|
|
100
|
+
type: Literal["Warehouse", "SQLEndpoint"],
|
|
101
|
+
workspace: Optional[str | UUID] = None,
|
|
102
|
+
retention_days: Optional[int] = None,
|
|
103
|
+
state: Optional[str] = None,
|
|
104
|
+
):
|
|
105
|
+
"""
|
|
106
|
+
Update settings associated with the item.
|
|
107
|
+
|
|
108
|
+
This is a wrapper function for the following API: SQL Audit Settings - Update SQL Audit Settings <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/update-sql-audit-settings>`_.
|
|
109
|
+
|
|
110
|
+
Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
|
|
111
|
+
|
|
112
|
+
Parameters
|
|
113
|
+
----------
|
|
114
|
+
item : str | uuid.UUID
|
|
115
|
+
The name or ID of the item (Warehouse or SQLEndpoint).
|
|
116
|
+
type : Literal['Warehouse', 'SQLEndpoint']
|
|
117
|
+
The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
|
|
118
|
+
workspace : str | uuid.UUID, default=None
|
|
119
|
+
The Fabric workspace name or ID.
|
|
120
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
121
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
125
|
+
|
|
126
|
+
payload = {}
|
|
127
|
+
if retention_days is not None:
|
|
128
|
+
if not isinstance(retention_days, int) or retention_days < 0:
|
|
129
|
+
raise ValueError(
|
|
130
|
+
f"{icons.red_dot} retention_days must be a non-negative integer."
|
|
131
|
+
)
|
|
132
|
+
payload["retentionDays"] = retention_days
|
|
133
|
+
if state is not None:
|
|
134
|
+
state = state.capitalize()
|
|
135
|
+
if state not in ["Enabled", "Disabled"]:
|
|
136
|
+
raise ValueError(
|
|
137
|
+
f"{icons.red_dot} state must be either 'Enabled' or 'Disabled'."
|
|
138
|
+
)
|
|
139
|
+
payload["state"] = state
|
|
140
|
+
|
|
141
|
+
if not payload:
|
|
142
|
+
print(
|
|
143
|
+
f"{icons.info} No updates were made as neither retention_days nor state were provided."
|
|
144
|
+
)
|
|
145
|
+
return
|
|
146
|
+
|
|
147
|
+
url = _get_base_url(item=item, type=type, workspace=workspace)
|
|
148
|
+
_base_api(
|
|
149
|
+
request=f"{url}/settings/sqlAudit",
|
|
150
|
+
client="fabric_sp",
|
|
151
|
+
method="patch",
|
|
152
|
+
payload=payload,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
print(
|
|
156
|
+
f"{icons.green_dot} The SQL audit settings for the '{item}' {type.lower()} within the '{workspace_name}' workspace have been updated accordingly."
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
@log
|
|
161
|
+
def set_audit_actions_and_group(
|
|
162
|
+
item: str | UUID,
|
|
163
|
+
type: Literal["Warehouse", "SQLEndpoint"],
|
|
164
|
+
sql_audit_groups: List[str],
|
|
165
|
+
workspace: Optional[str | UUID] = None,
|
|
166
|
+
):
|
|
167
|
+
"""
|
|
168
|
+
Update the audit actions and groups for this item.
|
|
169
|
+
|
|
170
|
+
This is a wrapper function for the following API: SQL Audit Settings - Set Audit Actions And Groups <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/set-audit-actions-and-groups>`_.
|
|
171
|
+
|
|
172
|
+
Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
|
|
173
|
+
|
|
174
|
+
Parameters
|
|
175
|
+
----------
|
|
176
|
+
item : str | uuid.UUID
|
|
177
|
+
The name or ID of the item (Warehouse or SQLEndpoint).
|
|
178
|
+
type : Literal['Warehouse', 'SQLEndpoint']
|
|
179
|
+
The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
|
|
180
|
+
workspace : str | uuid.UUID, default=None
|
|
181
|
+
The Fabric workspace name or ID.
|
|
182
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
183
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
187
|
+
|
|
188
|
+
if (
|
|
189
|
+
not sql_audit_groups
|
|
190
|
+
or not isinstance(sql_audit_groups, list)
|
|
191
|
+
or not all(isinstance(item, str) for item in sql_audit_groups)
|
|
192
|
+
):
|
|
193
|
+
raise ValueError(
|
|
194
|
+
f"{icons.red_dot} sql_audit_groups must be a non-empty list of strings."
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
url = _get_base_url(item=item, type=type, workspace=workspace)
|
|
198
|
+
_base_api(
|
|
199
|
+
request=f"{url}/settings/sqlAudit/setAuditActionsAndGroups",
|
|
200
|
+
client="fabric_sp",
|
|
201
|
+
method="post",
|
|
202
|
+
payload=sql_audit_groups,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
print(
|
|
206
|
+
f"{icons.green_dot} The SQL audit actions and groups for the '{item}' {type.lower()} within the '{workspace_name}' workspace have been updated accordingly."
|
|
207
|
+
)
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
from typing import Optional, Literal
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
_base_api,
|
|
6
|
+
_create_dataframe,
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
resolve_item_name_and_id,
|
|
9
|
+
_update_dataframe_datatypes,
|
|
10
|
+
resolve_workspace_id,
|
|
11
|
+
)
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
18
|
+
"""
|
|
19
|
+
Shows the SQL endpoints within a workspace.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
workspace : str | uuid.UUID, default=None
|
|
24
|
+
The Fabric workspace name or ID.
|
|
25
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
26
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
27
|
+
|
|
28
|
+
Returns
|
|
29
|
+
-------
|
|
30
|
+
pandas.DataFrame
|
|
31
|
+
A pandas dataframe showing the SQL endpoints within a workspace.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
columns = {
|
|
35
|
+
"SQL Endpoint Id": "string",
|
|
36
|
+
"SQL Endpoint Name": "string",
|
|
37
|
+
"Description": "string",
|
|
38
|
+
}
|
|
39
|
+
df = _create_dataframe(columns=columns)
|
|
40
|
+
|
|
41
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
42
|
+
|
|
43
|
+
responses = _base_api(
|
|
44
|
+
request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
rows = []
|
|
48
|
+
for r in responses:
|
|
49
|
+
for v in r.get("value", []):
|
|
50
|
+
rows.append(
|
|
51
|
+
{
|
|
52
|
+
"SQL Endpoint Id": v.get("id"),
|
|
53
|
+
"SQL Endpoint Name": v.get("displayName"),
|
|
54
|
+
"Description": v.get("description"),
|
|
55
|
+
}
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
if rows:
|
|
59
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
60
|
+
|
|
61
|
+
return df
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@log
|
|
65
|
+
def refresh_sql_endpoint_metadata(
|
|
66
|
+
item: str | UUID,
|
|
67
|
+
type: Literal["Lakehouse", "MirroredDatabase"],
|
|
68
|
+
workspace: Optional[str | UUID] = None,
|
|
69
|
+
timeout_unit: Literal["Seconds", "Minutes", "Hours", "Days"] = "Minutes",
|
|
70
|
+
timeout_value: int = 15,
|
|
71
|
+
) -> pd.DataFrame:
|
|
72
|
+
"""
|
|
73
|
+
Refreshes the metadata of a SQL endpoint.
|
|
74
|
+
|
|
75
|
+
This is a wrapper function for the following API: `Items - Refresh Sql Endpoint Metadata <https://learn.microsoft.com/rest/api/fabric/sqlendpoint/items/refresh-sql-endpoint-metadata>`_.
|
|
76
|
+
|
|
77
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
item : str | uuid.UUID
|
|
82
|
+
The name or ID of the item (Lakehouse or MirroredDatabase).
|
|
83
|
+
type : Literal['Lakehouse', 'MirroredDatabase']
|
|
84
|
+
The type of the item. Must be 'Lakehouse' or 'MirroredDatabase'.
|
|
85
|
+
workspace : str | uuid.UUID, default=None
|
|
86
|
+
The Fabric workspace name or ID.
|
|
87
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
88
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
89
|
+
timeout_unit : Literal['Seconds', 'Minutes', 'Hours', 'Days'], default='Minutes'
|
|
90
|
+
The unit of time for the request duration before timing out. Additional duration types may be added over time.
|
|
91
|
+
timeout_value : int, default=15
|
|
92
|
+
The number of time units in the request duration.
|
|
93
|
+
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
pandas.DataFrame
|
|
97
|
+
A pandas dataframe showing the status of the metadata refresh operation.
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
101
|
+
|
|
102
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
103
|
+
item=item, type=type, workspace=workspace
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
if type == "Lakehouse":
|
|
107
|
+
response = _base_api(
|
|
108
|
+
request=f"/v1/workspaces/{workspace_id}/lakehouses/{item_id}",
|
|
109
|
+
client="fabric_sp",
|
|
110
|
+
)
|
|
111
|
+
sql_endpoint_id = (
|
|
112
|
+
response.json()
|
|
113
|
+
.get("properties", {})
|
|
114
|
+
.get("sqlEndpointProperties", {})
|
|
115
|
+
.get("id")
|
|
116
|
+
)
|
|
117
|
+
elif type == "MirroredDatabase":
|
|
118
|
+
response = _base_api(
|
|
119
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}",
|
|
120
|
+
client="fabric_sp",
|
|
121
|
+
)
|
|
122
|
+
sql_endpoint_id = (
|
|
123
|
+
response.json()
|
|
124
|
+
.get("properties", {})
|
|
125
|
+
.get("sqlEndpointProperties", {})
|
|
126
|
+
.get("id")
|
|
127
|
+
)
|
|
128
|
+
else:
|
|
129
|
+
raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
|
|
130
|
+
|
|
131
|
+
timeout_unit = timeout_unit.capitalize()
|
|
132
|
+
if timeout_unit not in ["Seconds", "Minutes", "Hours", "Days"]:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
"Invalid timeout_unit. Must be 'Seconds', 'Minutes', 'Hours', or 'Days'."
|
|
135
|
+
)
|
|
136
|
+
if timeout_unit == "Hours" and timeout_value > 24:
|
|
137
|
+
raise ValueError("timeout_value cannot exceed 24 when timeout_unit is 'Hours'.")
|
|
138
|
+
if timeout_unit == "Days" and timeout_value > 1:
|
|
139
|
+
raise ValueError("timeout_value cannot exceed 1 when timeout_unit is 'Days'.")
|
|
140
|
+
if timeout_unit == "Minutes" and timeout_value > 1440:
|
|
141
|
+
raise ValueError(
|
|
142
|
+
"timeout_value cannot exceed 1440 when timeout_unit is 'Minutes'."
|
|
143
|
+
)
|
|
144
|
+
if timeout_unit == "Seconds" and timeout_value > 86400:
|
|
145
|
+
raise ValueError(
|
|
146
|
+
"timeout_value cannot exceed 86400 when timeout_unit is 'Seconds'."
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
|
|
150
|
+
|
|
151
|
+
result = _base_api(
|
|
152
|
+
request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
|
|
153
|
+
method="post",
|
|
154
|
+
client="fabric_sp",
|
|
155
|
+
status_codes=[200, 202],
|
|
156
|
+
lro_return_json=True,
|
|
157
|
+
payload=payload,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
columns = {
|
|
161
|
+
"Table Name": "string",
|
|
162
|
+
"Status": "string",
|
|
163
|
+
"Start Time": "datetime",
|
|
164
|
+
"End Time": "datetime",
|
|
165
|
+
"Last Successful Sync Time": "datetime",
|
|
166
|
+
"Error Code": "string",
|
|
167
|
+
"Error Message": "string",
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if result:
|
|
171
|
+
df = pd.json_normalize(result.get("value"))
|
|
172
|
+
|
|
173
|
+
# Extract error code and message, set to None if no error
|
|
174
|
+
df["Error Code"] = df.get("error.errorCode", None)
|
|
175
|
+
df["Error Message"] = df.get("error.message", None)
|
|
176
|
+
|
|
177
|
+
# Friendly column renaming
|
|
178
|
+
df.rename(
|
|
179
|
+
columns={
|
|
180
|
+
"tableName": "Table Name",
|
|
181
|
+
"startDateTime": "Start Time",
|
|
182
|
+
"endDateTime": "End Time",
|
|
183
|
+
"status": "Status",
|
|
184
|
+
"lastSuccessfulSyncDateTime": "Last Successful Sync Time",
|
|
185
|
+
},
|
|
186
|
+
inplace=True,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
# Drop the original 'error' column if present
|
|
190
|
+
df.drop(columns=[col for col in ["error"] if col in df.columns], inplace=True)
|
|
191
|
+
|
|
192
|
+
# Optional: Reorder columns
|
|
193
|
+
column_order = [
|
|
194
|
+
"Table Name",
|
|
195
|
+
"Status",
|
|
196
|
+
"Start Time",
|
|
197
|
+
"End Time",
|
|
198
|
+
"Last Successful Sync Time",
|
|
199
|
+
"Error Code",
|
|
200
|
+
"Error Message",
|
|
201
|
+
]
|
|
202
|
+
df = df[column_order]
|
|
203
|
+
|
|
204
|
+
printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
|
|
205
|
+
print(f"{printout} for all tables.")
|
|
206
|
+
_update_dataframe_datatypes(df, columns)
|
|
207
|
+
else:
|
|
208
|
+
# If the target item has no tables to refresh the metadata for
|
|
209
|
+
df = pd.DataFrame(columns=columns.keys())
|
|
210
|
+
print(
|
|
211
|
+
f"{icons.yellow_dot} The SQL endpoint '{item_name}' {type.lower()} within the '{workspace_name}' workspace has no tables to refresh..."
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
return df
|
sempy_labs/_tags.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
_base_api,
|
|
3
|
+
_create_dataframe,
|
|
4
|
+
_update_dataframe_datatypes,
|
|
5
|
+
resolve_item_name_and_id,
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
_is_valid_uuid,
|
|
8
|
+
)
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from typing import Optional, List
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def list_tags() -> pd.DataFrame:
|
|
18
|
+
"""
|
|
19
|
+
Shows a list of all the tenant's tags.
|
|
20
|
+
|
|
21
|
+
This is a wrapper function for the following API: `Tags - List Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/list-tags>`_.
|
|
22
|
+
|
|
23
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
24
|
+
|
|
25
|
+
Returns
|
|
26
|
+
-------
|
|
27
|
+
pandas.DataFrame
|
|
28
|
+
A pandas dataframe showing a list of all the tenant's tags.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
columns = {
|
|
32
|
+
"Tag Name": "string",
|
|
33
|
+
"Tag Id": "string",
|
|
34
|
+
}
|
|
35
|
+
df = _create_dataframe(columns=columns)
|
|
36
|
+
|
|
37
|
+
responses = _base_api(
|
|
38
|
+
request="/v1/tags",
|
|
39
|
+
uses_pagination=True,
|
|
40
|
+
client="fabric_sp",
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
rows = []
|
|
44
|
+
for r in responses:
|
|
45
|
+
for v in r.get("value", []):
|
|
46
|
+
rows.append(
|
|
47
|
+
{
|
|
48
|
+
"Tag Name": v.get("displayName"),
|
|
49
|
+
"Tag Id": v.get("id"),
|
|
50
|
+
}
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
if rows:
|
|
54
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
55
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
56
|
+
|
|
57
|
+
return df
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@log
|
|
61
|
+
def resolve_tags(tags: str | List[str]) -> List[str]:
|
|
62
|
+
"""
|
|
63
|
+
Resolves the tags to a list of strings.
|
|
64
|
+
|
|
65
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
66
|
+
|
|
67
|
+
Parameters
|
|
68
|
+
----------
|
|
69
|
+
tags : str | List[str]
|
|
70
|
+
The tags to resolve.
|
|
71
|
+
|
|
72
|
+
Returns
|
|
73
|
+
-------
|
|
74
|
+
List[str]
|
|
75
|
+
A list of resolved tags.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
if isinstance(tags, str):
|
|
79
|
+
tags = [tags]
|
|
80
|
+
|
|
81
|
+
if all(_is_valid_uuid(tag) for tag in tags):
|
|
82
|
+
return tags
|
|
83
|
+
|
|
84
|
+
df = list_tags()
|
|
85
|
+
|
|
86
|
+
tag_list = []
|
|
87
|
+
for tag in tags:
|
|
88
|
+
if _is_valid_uuid(tag):
|
|
89
|
+
tag_list.append(tag)
|
|
90
|
+
else:
|
|
91
|
+
df_filt = df[df["Tag Name"] == tag]
|
|
92
|
+
if df_filt.empty:
|
|
93
|
+
raise ValueError(f"Tag '{tag}' not found in the tenant's tags.")
|
|
94
|
+
tag_id = df_filt["Tag Id"].iloc[0]
|
|
95
|
+
tag_list.append(tag_id)
|
|
96
|
+
|
|
97
|
+
return tag_list
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@log
|
|
101
|
+
def apply_tags(
|
|
102
|
+
item: str | UUID,
|
|
103
|
+
type: str,
|
|
104
|
+
tags: str | UUID | List[str | UUID],
|
|
105
|
+
workspace: Optional[str | UUID] = None,
|
|
106
|
+
):
|
|
107
|
+
"""
|
|
108
|
+
Shows a list of all the tenant's tags.
|
|
109
|
+
|
|
110
|
+
This is a wrapper function for the following API: `Tags - Apply Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/apply-tags>`_.
|
|
111
|
+
|
|
112
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
113
|
+
|
|
114
|
+
Parameters
|
|
115
|
+
----------
|
|
116
|
+
item : str | uuid.UUID
|
|
117
|
+
The name or ID of the item to apply tags to.
|
|
118
|
+
type : str
|
|
119
|
+
The type of the item to apply tags to. For example: "Lakehouse".
|
|
120
|
+
tags : str | uuid.UUID | List[str | uuid.UUID]
|
|
121
|
+
The name or ID of the tag(s) to apply to the item.
|
|
122
|
+
workspace : str | uuid.UUID, default=None
|
|
123
|
+
The workspace name or ID.
|
|
124
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
125
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
129
|
+
(item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
|
|
130
|
+
|
|
131
|
+
if isinstance(tags, str):
|
|
132
|
+
tags = [tags]
|
|
133
|
+
|
|
134
|
+
tag_list = resolve_tags(tags)
|
|
135
|
+
|
|
136
|
+
payload = {
|
|
137
|
+
"tags": tag_list,
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
_base_api(
|
|
141
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/applyTags",
|
|
142
|
+
client="fabric_sp",
|
|
143
|
+
method="post",
|
|
144
|
+
payload=payload,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
print(
|
|
148
|
+
f"{icons.green_dot} Tags {tags} applied to the '{item_name}' {type.lower()} within the '{workspace_name}' workspace"
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@log
|
|
153
|
+
def unapply_tags(
|
|
154
|
+
item: str | UUID,
|
|
155
|
+
type: str,
|
|
156
|
+
tags: str | UUID | List[str | UUID],
|
|
157
|
+
workspace: Optional[str | UUID] = None,
|
|
158
|
+
):
|
|
159
|
+
"""
|
|
160
|
+
Shows a list of all the tenant's tags.
|
|
161
|
+
|
|
162
|
+
This is a wrapper function for the following API: `Tags - Unapply Tags <https://learn.microsoft.com/rest/api/fabric/core/tags/unapply-tags>`_.
|
|
163
|
+
|
|
164
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
165
|
+
|
|
166
|
+
Parameters
|
|
167
|
+
----------
|
|
168
|
+
item : str | uuid.UUID
|
|
169
|
+
The name or ID of the item to apply tags to.
|
|
170
|
+
type : str
|
|
171
|
+
The type of the item to apply tags to. For example: "Lakehouse".
|
|
172
|
+
tags : str | uuid.UUID | List[str | uuid.UUID]
|
|
173
|
+
The name or ID of the tag(s) to apply to the item.
|
|
174
|
+
workspace : str | uuid.UUID, default=None
|
|
175
|
+
The workspace name or ID.
|
|
176
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
177
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
181
|
+
(item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
|
|
182
|
+
|
|
183
|
+
if isinstance(tags, str):
|
|
184
|
+
tags = [tags]
|
|
185
|
+
|
|
186
|
+
tag_list = resolve_tags(tags)
|
|
187
|
+
|
|
188
|
+
payload = {
|
|
189
|
+
"tags": tag_list,
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
_base_api(
|
|
193
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/unapplyTags",
|
|
194
|
+
client="fabric_sp",
|
|
195
|
+
method="post",
|
|
196
|
+
payload=payload,
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
print(
|
|
200
|
+
f"{icons.green_dot} Tags {tags} applied to the '{item_name}' {type.lower()} within the '{workspace_name}' workspace"
|
|
201
|
+
)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import List, Optional, Union
|
|
4
|
+
from sempy._utils._log import log
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@log
|
|
9
|
+
def translate_semantic_model(
|
|
10
|
+
dataset: str | UUID,
|
|
11
|
+
languages: Union[str, List[str]],
|
|
12
|
+
exclude_characters: Optional[str] = None,
|
|
13
|
+
workspace: Optional[str | UUID] = None,
|
|
14
|
+
) -> pd.DataFrame:
|
|
15
|
+
"""
|
|
16
|
+
Translates names, descriptions, display folders for all objects in a semantic model.
|
|
17
|
+
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
dataset : str | uuid.UUID
|
|
21
|
+
Name or ID of the semantic model.
|
|
22
|
+
languages : str, List[str]
|
|
23
|
+
The language code(s) in which to translate the semantic model.
|
|
24
|
+
exclude_characters : str
|
|
25
|
+
A string specifying characters which will be replaced by a space in the translation text when sent to the translation service.
|
|
26
|
+
workspace : str | uuid.UUID, default=None
|
|
27
|
+
The Fabric workspace name or ID.
|
|
28
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
29
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
30
|
+
|
|
31
|
+
Returns
|
|
32
|
+
-------
|
|
33
|
+
pandas.DataFrame
|
|
34
|
+
Shows a pandas dataframe which displays all of the translations in the semantic model.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
return fabric.translate_semantic_model(
|
|
38
|
+
dataset=dataset,
|
|
39
|
+
languages=languages,
|
|
40
|
+
exclude_characters=exclude_characters,
|
|
41
|
+
workspace=workspace,
|
|
42
|
+
model_readonly=False,
|
|
43
|
+
)
|