semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from uuid import UUID
|
|
2
|
+
import sempy_labs._icons as icons
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from sempy_labs.admin._basic_functions import _resolve_workspace_name_and_id
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
_base_api,
|
|
7
|
+
_create_dataframe,
|
|
8
|
+
_update_dataframe_datatypes,
|
|
9
|
+
)
|
|
10
|
+
from sempy._utils._log import log
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@log
|
|
14
|
+
def list_external_data_shares() -> pd.DataFrame:
|
|
15
|
+
"""
|
|
16
|
+
Lists external data shares in the tenant. This function is for admins.
|
|
17
|
+
|
|
18
|
+
This is a wrapper function for the following API: `External Data Shares - List External Data Shares <https://learn.microsoft.com/rest/api/fabric/admin/external-data-shares/list-external-data-shares>`_.
|
|
19
|
+
|
|
20
|
+
Returns
|
|
21
|
+
-------
|
|
22
|
+
pandas.DataFrame
|
|
23
|
+
A pandas dataframe showing a list of external data shares in the tenant.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
columns = {
|
|
27
|
+
"External Data Share Id": "string",
|
|
28
|
+
"Paths": "string",
|
|
29
|
+
"Creater Principal Id": "string",
|
|
30
|
+
"Creater Principal Name": "string",
|
|
31
|
+
"Creater Principal Type": "string",
|
|
32
|
+
"Creater Principal UPN": "string",
|
|
33
|
+
"Recipient UPN": "string",
|
|
34
|
+
"Status": "string",
|
|
35
|
+
"Expiration Time UTC": "datetime",
|
|
36
|
+
"Workspace Id": "string",
|
|
37
|
+
"Item Id": "string",
|
|
38
|
+
"Invitation URL": "string",
|
|
39
|
+
}
|
|
40
|
+
df = _create_dataframe(columns=columns)
|
|
41
|
+
|
|
42
|
+
response = _base_api(request="/v1/admin/items/externalDataShares")
|
|
43
|
+
|
|
44
|
+
rows = []
|
|
45
|
+
for i in response.json().get("value", []):
|
|
46
|
+
cp = i.get("creatorPrincipal", {})
|
|
47
|
+
rows.append(
|
|
48
|
+
{
|
|
49
|
+
"External Data Share Id": i.get("id"),
|
|
50
|
+
"Paths": [i.get("paths", [])],
|
|
51
|
+
"Creater Principal Id": cp.get("id"),
|
|
52
|
+
"Creater Principal Name": cp.get("displayName"),
|
|
53
|
+
"Creater Principal Type": cp.get("type"),
|
|
54
|
+
"Creater Principal UPN": cp.get("userDetails", {}).get(
|
|
55
|
+
"userPrincipalName"
|
|
56
|
+
),
|
|
57
|
+
"Recipient UPN": i.get("recipient", {}).get("userPrincipalName"),
|
|
58
|
+
"Status": i.get("status"),
|
|
59
|
+
"Expiration Time UTC": i.get("expirationTimeUtc"),
|
|
60
|
+
"Workspace Id": i.get("workspaceId"),
|
|
61
|
+
"Item Id": i.get("itemId"),
|
|
62
|
+
"Invitation URL": i.get("invitationUrl"),
|
|
63
|
+
}
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
if rows:
|
|
67
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
68
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
69
|
+
|
|
70
|
+
return df
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@log
|
|
74
|
+
def revoke_external_data_share(
|
|
75
|
+
external_data_share_id: UUID, item_id: UUID, workspace: str | UUID
|
|
76
|
+
):
|
|
77
|
+
"""
|
|
78
|
+
Revokes the specified external data share. Note: This action cannot be undone.
|
|
79
|
+
|
|
80
|
+
This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/admin/external-data-shares/revoke-external-data-share>`_.
|
|
81
|
+
|
|
82
|
+
Parameters
|
|
83
|
+
----------
|
|
84
|
+
external_data_share_id : uuid.UUID
|
|
85
|
+
The external data share ID.
|
|
86
|
+
item_id : uuid.UUID, default=None
|
|
87
|
+
The Item ID
|
|
88
|
+
workspace : str | uuid.UUID
|
|
89
|
+
The Fabric workspace name or id.
|
|
90
|
+
"""
|
|
91
|
+
(workspace, workspace_id) = _resolve_workspace_name_and_id(workspace)
|
|
92
|
+
|
|
93
|
+
_base_api(
|
|
94
|
+
request=f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
|
|
95
|
+
method="post",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
print(
|
|
99
|
+
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_id}' item within the '{workspace}' workspace has been revoked."
|
|
100
|
+
)
|
sempy_labs/admin/_git.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
_base_api,
|
|
3
|
+
_create_dataframe,
|
|
4
|
+
)
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from sempy_labs.admin._basic_functions import list_workspaces
|
|
7
|
+
from sempy._utils._log import log
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@log
|
|
11
|
+
def list_git_connections() -> pd.DataFrame:
|
|
12
|
+
"""
|
|
13
|
+
Shows a list of Git connections.
|
|
14
|
+
|
|
15
|
+
This is a wrapper function for the following API: `Workspaces - List Git Connections <https://learn.microsoft.com/rest/api/fabric/admin/workspaces/list-git-connections>`_.
|
|
16
|
+
|
|
17
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
18
|
+
|
|
19
|
+
Returns
|
|
20
|
+
-------
|
|
21
|
+
pandas.DataFrame
|
|
22
|
+
A pandas dataframe showing a list of Git connections.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
columns = {
|
|
26
|
+
"Workspace Id": "string",
|
|
27
|
+
"Organization Name": "string",
|
|
28
|
+
"Owner Name": "string",
|
|
29
|
+
"Project Name": "string",
|
|
30
|
+
"Git Provider Type": "string",
|
|
31
|
+
"Repository Name": "string",
|
|
32
|
+
"Branch Name": "string",
|
|
33
|
+
"Directory Name": "string",
|
|
34
|
+
}
|
|
35
|
+
df = _create_dataframe(columns=columns)
|
|
36
|
+
|
|
37
|
+
responses = _base_api(
|
|
38
|
+
request="/v1/admin/workspaces/discoverGitConnections",
|
|
39
|
+
client="fabric_sp",
|
|
40
|
+
uses_pagination=True,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
rows = []
|
|
44
|
+
for r in responses:
|
|
45
|
+
for v in r.get("value", []):
|
|
46
|
+
git = v.get("gitProviderDetails", {})
|
|
47
|
+
rows.append(
|
|
48
|
+
{
|
|
49
|
+
"Workspace Id": v.get("workspaceId"),
|
|
50
|
+
"Organization Name": git.get("organizationName"),
|
|
51
|
+
"Owner Name": git.get("ownerName"),
|
|
52
|
+
"Project Name": git.get("projectName"),
|
|
53
|
+
"Git Provider Type": git.get("gitProviderType"),
|
|
54
|
+
"Repository Name": git.get("repositoryName"),
|
|
55
|
+
"Branch Name": git.get("branchName"),
|
|
56
|
+
"Directory Name": git.get("directoryName"),
|
|
57
|
+
}
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
if rows:
|
|
61
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
62
|
+
dfW = list_workspaces()
|
|
63
|
+
df = pd.merge(
|
|
64
|
+
df, dfW[["Id", "Name"]], left_on="Workspace Id", right_on="Id", how="left"
|
|
65
|
+
)
|
|
66
|
+
new_col_name = "Workspace Name"
|
|
67
|
+
df = df.rename(columns={"Name": new_col_name})
|
|
68
|
+
df.insert(1, new_col_name, df.pop(new_col_name))
|
|
69
|
+
|
|
70
|
+
df = df.drop(columns=["Id"])
|
|
71
|
+
|
|
72
|
+
return df
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional, Tuple
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
import sempy_labs._icons as icons
|
|
5
|
+
from sempy_labs.admin._basic_functions import (
|
|
6
|
+
_resolve_workspace_name_and_id,
|
|
7
|
+
)
|
|
8
|
+
from sempy_labs.admin._capacities import (
|
|
9
|
+
_resolve_capacity_name_and_id,
|
|
10
|
+
)
|
|
11
|
+
from sempy_labs._helper_functions import (
|
|
12
|
+
_is_valid_uuid,
|
|
13
|
+
_build_url,
|
|
14
|
+
_base_api,
|
|
15
|
+
_create_dataframe,
|
|
16
|
+
)
|
|
17
|
+
from sempy._utils._log import log
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@log
|
|
21
|
+
def _resolve_item_id(
|
|
22
|
+
item: str,
|
|
23
|
+
type: Optional[str] = None,
|
|
24
|
+
workspace: Optional[str | UUID] = None,
|
|
25
|
+
) -> UUID:
|
|
26
|
+
if _is_valid_uuid(item):
|
|
27
|
+
item_id = item
|
|
28
|
+
|
|
29
|
+
else:
|
|
30
|
+
workspace_id = _resolve_workspace_name_and_id(workspace)[1]
|
|
31
|
+
dfI = list_items(workspace=workspace_id, type=type)
|
|
32
|
+
dfI_filt = dfI[dfI["Item Name"] == item]
|
|
33
|
+
|
|
34
|
+
if len(dfI_filt) == 0:
|
|
35
|
+
raise ValueError(
|
|
36
|
+
f"The '{item}' {type} does not exist within the '{workspace}' workspace or is not of type '{type}'."
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
item_id = dfI_filt["Item Id"].iloc[0]
|
|
40
|
+
|
|
41
|
+
return item_id
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@log
|
|
45
|
+
def _resolve_item_name_and_id(
|
|
46
|
+
item: str,
|
|
47
|
+
type: Optional[str] = None,
|
|
48
|
+
workspace: Optional[str | UUID] = None,
|
|
49
|
+
**kwargs,
|
|
50
|
+
) -> Tuple[str, UUID]:
|
|
51
|
+
if "item_name" in kwargs:
|
|
52
|
+
print(
|
|
53
|
+
"The 'item_name' parameter has been deprecated. Please replace this parameter with 'item' from the function going forward."
|
|
54
|
+
)
|
|
55
|
+
item = item_name
|
|
56
|
+
del kwargs["item_name"]
|
|
57
|
+
|
|
58
|
+
dfI = list_items(workspace=workspace, type=type, item=item)
|
|
59
|
+
|
|
60
|
+
if len(dfI) > 1:
|
|
61
|
+
raise ValueError(
|
|
62
|
+
f"There are more than 1 item with the name '{item}'. Please specify the 'type' and/or 'workspace' to be more precise."
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
item_name = dfI["Item Name"].iloc[0]
|
|
67
|
+
item_id = dfI["Item Id"].iloc[0]
|
|
68
|
+
except Exception:
|
|
69
|
+
raise ValueError(
|
|
70
|
+
f"The '{item}' {type} does not exist within the '{workspace}' workspace or is not of type '{type}'."
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return item_name, item_id
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@log
|
|
77
|
+
def list_items(
|
|
78
|
+
capacity: Optional[str | UUID] = None,
|
|
79
|
+
workspace: Optional[str | UUID] = None,
|
|
80
|
+
state: Optional[str] = None,
|
|
81
|
+
type: Optional[str] = None,
|
|
82
|
+
item: Optional[str | UUID] = None,
|
|
83
|
+
**kwargs,
|
|
84
|
+
) -> pd.DataFrame:
|
|
85
|
+
"""
|
|
86
|
+
Shows a list of active Fabric and Power BI items.
|
|
87
|
+
|
|
88
|
+
This is a wrapper function for the following API: `Items - List Items <https://learn.microsoft.com/rest/api/fabric/admin/items/list-items>`_.
|
|
89
|
+
|
|
90
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
----------
|
|
94
|
+
capacity : str | uuid.UUID, default=None
|
|
95
|
+
The capacity name or id.
|
|
96
|
+
workspace : str | uuid.UUID, default=None
|
|
97
|
+
The Fabric workspace name or id.
|
|
98
|
+
Defaults to None which looks into all the workspaces.
|
|
99
|
+
state : str, default=None
|
|
100
|
+
The item state.
|
|
101
|
+
type : str, default=None
|
|
102
|
+
The item type.
|
|
103
|
+
item : str | UUID, default=None
|
|
104
|
+
Item id or name to filter the list.
|
|
105
|
+
|
|
106
|
+
Returns
|
|
107
|
+
-------
|
|
108
|
+
pandas.DataFrame
|
|
109
|
+
A pandas dataframe showing a list of active Fabric and Power BI items.
|
|
110
|
+
"""
|
|
111
|
+
if "capacity_name" in kwargs:
|
|
112
|
+
print(
|
|
113
|
+
"The 'capacity_name' parameter has been deprecated. Please replace this parameter with 'capacity' from the function going forward."
|
|
114
|
+
)
|
|
115
|
+
capacity = kwargs["capacity_name"]
|
|
116
|
+
del kwargs["capacity_name"]
|
|
117
|
+
|
|
118
|
+
columns = {
|
|
119
|
+
"Item Id": "string",
|
|
120
|
+
"Item Name": "string",
|
|
121
|
+
"Type": "string",
|
|
122
|
+
"Description": "string",
|
|
123
|
+
"State": "string",
|
|
124
|
+
"Last Updated Date": "string",
|
|
125
|
+
"Creator Principal Id": "string",
|
|
126
|
+
"Creator Principal Display Name": "string",
|
|
127
|
+
"Creator Principal Type": "string",
|
|
128
|
+
"Creator User Principal Name": "string",
|
|
129
|
+
"Workspace Id": "string",
|
|
130
|
+
"Capacity Id": "string",
|
|
131
|
+
}
|
|
132
|
+
df = _create_dataframe(columns=columns)
|
|
133
|
+
|
|
134
|
+
params = {}
|
|
135
|
+
url = "/v1/admin/items"
|
|
136
|
+
|
|
137
|
+
if capacity is not None:
|
|
138
|
+
params["capacityId"] = _resolve_capacity_name_and_id(capacity)[1]
|
|
139
|
+
|
|
140
|
+
if workspace is not None:
|
|
141
|
+
params["workspaceId"] = _resolve_workspace_name_and_id(workspace)[1]
|
|
142
|
+
|
|
143
|
+
if state is not None:
|
|
144
|
+
params["state"] = state
|
|
145
|
+
|
|
146
|
+
if type is not None:
|
|
147
|
+
params["type"] = type
|
|
148
|
+
|
|
149
|
+
url = _build_url(url, params)
|
|
150
|
+
|
|
151
|
+
responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
|
|
152
|
+
|
|
153
|
+
rows = []
|
|
154
|
+
for r in responses:
|
|
155
|
+
for v in r.get("itemEntities", []):
|
|
156
|
+
rows.append(
|
|
157
|
+
{
|
|
158
|
+
"Item Id": v.get("id"),
|
|
159
|
+
"Type": v.get("type"),
|
|
160
|
+
"Item Name": v.get("name"),
|
|
161
|
+
"Description": v.get("description"),
|
|
162
|
+
"State": v.get("state"),
|
|
163
|
+
"Last Updated Date": v.get("lastUpdatedDate"),
|
|
164
|
+
"Creator Principal Id": v.get("creatorPrincipal", {}).get("id"),
|
|
165
|
+
"Creator Principal Display Name": v.get("creatorPrincipal", {}).get(
|
|
166
|
+
"displayName"
|
|
167
|
+
),
|
|
168
|
+
"Creator Principal Type": v.get("creatorPrincipal", {}).get("type"),
|
|
169
|
+
"Creator User Principal Name": v.get("creatorPrincipal", {})
|
|
170
|
+
.get("userDetails", {})
|
|
171
|
+
.get("userPrincipalName"),
|
|
172
|
+
"Workspace Id": v.get("workspaceId"),
|
|
173
|
+
"Capacity Id": v.get("capacityId"),
|
|
174
|
+
}
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
if rows:
|
|
178
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
179
|
+
|
|
180
|
+
if item is not None:
|
|
181
|
+
if _is_valid_uuid(item):
|
|
182
|
+
df = df[df["Item Id"] == item]
|
|
183
|
+
else:
|
|
184
|
+
df = df[df["Item Name"] == item]
|
|
185
|
+
|
|
186
|
+
return df
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@log
|
|
190
|
+
def list_item_access_details(
|
|
191
|
+
item: str | UUID,
|
|
192
|
+
type: str,
|
|
193
|
+
workspace: Optional[str | UUID] = None,
|
|
194
|
+
) -> pd.DataFrame:
|
|
195
|
+
"""
|
|
196
|
+
Returns a list of users (including groups and service principals) and lists their workspace roles.
|
|
197
|
+
|
|
198
|
+
This is a wrapper function for the following API: `Items - List Item Access Details <https://learn.microsoft.com/rest/api/fabric/admin/items/list-item-access-details>`_.
|
|
199
|
+
|
|
200
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
201
|
+
|
|
202
|
+
Parameters
|
|
203
|
+
----------
|
|
204
|
+
item : str
|
|
205
|
+
Name or id of the Fabric item.
|
|
206
|
+
type : str
|
|
207
|
+
Type of Fabric item.
|
|
208
|
+
workspace : str | uuid.UUID, default=None
|
|
209
|
+
The Fabric workspace name or id.
|
|
210
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
211
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
212
|
+
|
|
213
|
+
Returns
|
|
214
|
+
-------
|
|
215
|
+
pandas.DataFrame
|
|
216
|
+
A pandas dataframe showing a list of users (including groups and service principals) and lists their workspace roles.
|
|
217
|
+
"""
|
|
218
|
+
|
|
219
|
+
(workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
|
|
220
|
+
(item_name, item_id) = _resolve_item_name_and_id(
|
|
221
|
+
item=item, type=type, workspace=workspace_name
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
columns = {
|
|
225
|
+
"User Id": "string",
|
|
226
|
+
"User Name": "string",
|
|
227
|
+
"User Type": "string",
|
|
228
|
+
"User Principal Name": "string",
|
|
229
|
+
"Item Name": "string",
|
|
230
|
+
"Item Type": "string",
|
|
231
|
+
"Item Id": "string",
|
|
232
|
+
"Permissions": "string",
|
|
233
|
+
"Additional Permissions": "string",
|
|
234
|
+
}
|
|
235
|
+
df = _create_dataframe(columns=columns)
|
|
236
|
+
|
|
237
|
+
response = _base_api(
|
|
238
|
+
request=f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/users?type={type}",
|
|
239
|
+
client="fabric_sp",
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
rows = []
|
|
243
|
+
for v in response.json().get("accessDetails", []):
|
|
244
|
+
rows.append(
|
|
245
|
+
{
|
|
246
|
+
"User Id": v.get("principal", {}).get("id"),
|
|
247
|
+
"User Name": v.get("principal", {}).get("displayName"),
|
|
248
|
+
"User Type": v.get("principal", {}).get("type"),
|
|
249
|
+
"User Principal Name": v.get("principal", {})
|
|
250
|
+
.get("userDetails", {})
|
|
251
|
+
.get("userPrincipalName"),
|
|
252
|
+
"Item Type": v.get("itemAccessDetails", {}).get("type"),
|
|
253
|
+
"Permissions": v.get("itemAccessDetails", {}).get("permissions"),
|
|
254
|
+
"Additional Permissions": v.get("itemAccessDetails", {}).get(
|
|
255
|
+
"additionalPermissions"
|
|
256
|
+
),
|
|
257
|
+
"Item Name": item_name,
|
|
258
|
+
"Item Id": item_id,
|
|
259
|
+
}
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
if rows:
|
|
263
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
264
|
+
|
|
265
|
+
return df
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
from typing import Literal, List
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from sempy_labs.admin._basic_functions import (
|
|
5
|
+
_resolve_workspace_name_and_id,
|
|
6
|
+
)
|
|
7
|
+
from sempy_labs.admin._items import (
|
|
8
|
+
list_items,
|
|
9
|
+
)
|
|
10
|
+
from sempy_labs._helper_functions import (
|
|
11
|
+
_is_valid_uuid,
|
|
12
|
+
_base_api,
|
|
13
|
+
)
|
|
14
|
+
from sempy._utils._log import log
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@log
|
|
18
|
+
def bulk_set_labels(
|
|
19
|
+
items: List[dict],
|
|
20
|
+
label_id: UUID,
|
|
21
|
+
assignment_method: Literal["Standard", "Priviledged"] = "Standard",
|
|
22
|
+
):
|
|
23
|
+
"""
|
|
24
|
+
Sets sensitivity labels on Fabric items.
|
|
25
|
+
|
|
26
|
+
Note: Please use the sempy_labs.graph.resolve_sensitivity_label_id function to retrieve label IDs.
|
|
27
|
+
|
|
28
|
+
This is a wrapper function for the following API: `Labels - Bulk Set Labels <https://learn.microsoft.com/rest/api/fabric/admin/labels/bulk-set-labels>`_.
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
items : List[dict]
|
|
33
|
+
A list of dictionaries containing the item details.
|
|
34
|
+
|
|
35
|
+
Example 1:
|
|
36
|
+
items = [
|
|
37
|
+
{
|
|
38
|
+
"id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542a",
|
|
39
|
+
"type": "Dashboard"
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
"id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542c",
|
|
43
|
+
"type": "Report"
|
|
44
|
+
},
|
|
45
|
+
{
|
|
46
|
+
"id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542e",
|
|
47
|
+
"type": "SemanticModel"
|
|
48
|
+
},
|
|
49
|
+
]
|
|
50
|
+
|
|
51
|
+
Example 2:
|
|
52
|
+
items = [
|
|
53
|
+
{
|
|
54
|
+
"id": "Dashboard 1",
|
|
55
|
+
"type": "Dashboard",
|
|
56
|
+
"workspace": "Sales Workspace"
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
"id": "Sales Report",
|
|
60
|
+
"type": "Report",
|
|
61
|
+
"workspace": "Sales Workspace"
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
"id": "KPI Model",
|
|
65
|
+
"type": "SemanticModel",
|
|
66
|
+
"workspace": "Workspace 2"
|
|
67
|
+
},
|
|
68
|
+
]
|
|
69
|
+
|
|
70
|
+
label_id : uuid.UUID
|
|
71
|
+
The label ID, which must be in the user's label policy.
|
|
72
|
+
assignment_method : Literal["Standard", "Priviledged"], default="Standard"
|
|
73
|
+
Specifies whether the assigned label was set by an automated process or manually. Additional tenant setting property types may be added over time.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
if assignment_method not in ["Standard", "Priviledged"]:
|
|
77
|
+
raise ValueError("assignment_method must be either 'Standard' or 'Priviledged'")
|
|
78
|
+
|
|
79
|
+
payload = {"items": []}
|
|
80
|
+
df = list_items()
|
|
81
|
+
|
|
82
|
+
for i in items:
|
|
83
|
+
item = i.get("item")
|
|
84
|
+
type = i.get("type")
|
|
85
|
+
workspace = i.get("workspace")
|
|
86
|
+
if _is_valid_uuid(item):
|
|
87
|
+
payload["items"].append(
|
|
88
|
+
{
|
|
89
|
+
"id": item,
|
|
90
|
+
"type": type,
|
|
91
|
+
}
|
|
92
|
+
)
|
|
93
|
+
else:
|
|
94
|
+
workspace_id = _resolve_workspace_name_and_id(workspace)[1]
|
|
95
|
+
df_filtered = df[
|
|
96
|
+
(df["Item Name"] == item)
|
|
97
|
+
& (df["Type"] == type)
|
|
98
|
+
& (df["Workspace Id"] == workspace_id)
|
|
99
|
+
]
|
|
100
|
+
if df_filtered.empty:
|
|
101
|
+
raise ValueError(
|
|
102
|
+
f"The item '{item}' of type '{type}' does not exist in workspace '{workspace}'."
|
|
103
|
+
)
|
|
104
|
+
else:
|
|
105
|
+
payload["items"].append(
|
|
106
|
+
{
|
|
107
|
+
"id": df_filtered["Item Id"].iloc[0],
|
|
108
|
+
"type": type,
|
|
109
|
+
}
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
payload["labelId"] = label_id
|
|
113
|
+
payload["assignmentMethod"] = assignment_method
|
|
114
|
+
|
|
115
|
+
_base_api(request="/v1/admin/items/bulkSetLabels", method="post", payload=payload)
|
|
116
|
+
|
|
117
|
+
print(
|
|
118
|
+
f"{icons.green_dot} Labels have been successfully set on the specified items."
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@log
|
|
123
|
+
def bulk_remove_labels(
|
|
124
|
+
items: List[dict],
|
|
125
|
+
):
|
|
126
|
+
"""
|
|
127
|
+
Removes sensitivity labels from Fabric items.
|
|
128
|
+
|
|
129
|
+
This is a wrapper function for the following API: `Labels - Bulk Remove Labels <https://learn.microsoft.com/rest/api/fabric/admin/labels/bulk-remove-labels>`_.
|
|
130
|
+
|
|
131
|
+
Parameters
|
|
132
|
+
----------
|
|
133
|
+
items : List[dict]
|
|
134
|
+
A list of dictionaries containing the item details.
|
|
135
|
+
|
|
136
|
+
Example 1:
|
|
137
|
+
items = [
|
|
138
|
+
{
|
|
139
|
+
"id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542a",
|
|
140
|
+
"type": "Dashboard"
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
"id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542c",
|
|
144
|
+
"type": "Report"
|
|
145
|
+
},
|
|
146
|
+
{
|
|
147
|
+
"id": "fe472f5e-636e-4c10-a1c6-7e9edc0b542e",
|
|
148
|
+
"type": "SemanticModel"
|
|
149
|
+
},
|
|
150
|
+
]
|
|
151
|
+
|
|
152
|
+
Example 2:
|
|
153
|
+
items = [
|
|
154
|
+
{
|
|
155
|
+
"id": "Dashboard 1",
|
|
156
|
+
"type": "Dashboard",
|
|
157
|
+
"workspace": "Sales Workspace"
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
"id": "Sales Report",
|
|
161
|
+
"type": "Report",
|
|
162
|
+
"workspace": "Sales Workspace"
|
|
163
|
+
},
|
|
164
|
+
{
|
|
165
|
+
"id": "KPI Model",
|
|
166
|
+
"type": "SemanticModel",
|
|
167
|
+
"workspace": "Workspace 2"
|
|
168
|
+
},
|
|
169
|
+
]
|
|
170
|
+
"""
|
|
171
|
+
|
|
172
|
+
payload = {"items": []}
|
|
173
|
+
df = list_items()
|
|
174
|
+
|
|
175
|
+
for i in items:
|
|
176
|
+
item = i.get("item")
|
|
177
|
+
type = i.get("type")
|
|
178
|
+
workspace = i.get("workspace")
|
|
179
|
+
if _is_valid_uuid(item):
|
|
180
|
+
payload["items"].append(
|
|
181
|
+
{
|
|
182
|
+
"id": item,
|
|
183
|
+
"type": type,
|
|
184
|
+
}
|
|
185
|
+
)
|
|
186
|
+
else:
|
|
187
|
+
workspace_id = _resolve_workspace_name_and_id(workspace)[1]
|
|
188
|
+
df_filtered = df[
|
|
189
|
+
(df["Item Name"] == item)
|
|
190
|
+
& (df["Type"] == type)
|
|
191
|
+
& (df["Workspace Id"] == workspace_id)
|
|
192
|
+
]
|
|
193
|
+
if df_filtered.empty:
|
|
194
|
+
raise ValueError(
|
|
195
|
+
f"The item '{item}' of type '{type}' does not exist in workspace '{workspace}'."
|
|
196
|
+
)
|
|
197
|
+
else:
|
|
198
|
+
payload["items"].append(
|
|
199
|
+
{
|
|
200
|
+
"id": df_filtered["Item Id"].iloc[0],
|
|
201
|
+
"type": type,
|
|
202
|
+
}
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
_base_api(
|
|
206
|
+
request="/v1/admin/items/bulkRemoveLabels", method="post", payload=payload
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
print(
|
|
210
|
+
f"{icons.green_dot} Labels have been successfully removed from the specified items."
|
|
211
|
+
)
|