semantic-link-labs 0.11.2__py3-none-any.whl → 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +7 -6
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +90 -84
- sempy_labs/__init__.py +18 -18
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_authentication.py +81 -32
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +4 -4
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +107 -70
- sempy_labs/_dashboards.py +6 -2
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +1 -1
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +1 -1
- sempy_labs/_eventhouses.py +9 -3
- sempy_labs/_eventstreams.py +1 -1
- sempy_labs/_external_data_shares.py +56 -2
- sempy_labs/_gateways.py +14 -7
- sempy_labs/_generate_semantic_model.py +7 -12
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +1 -1
- sempy_labs/_helper_functions.py +293 -22
- sempy_labs/_job_scheduler.py +12 -1
- sempy_labs/_kql_databases.py +1 -1
- sempy_labs/_kql_querysets.py +10 -2
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +1 -1
- sempy_labs/_mirrored_databases.py +40 -16
- sempy_labs/_mirrored_warehouses.py +1 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +6 -6
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +3 -3
- sempy_labs/_notebooks.py +153 -3
- sempy_labs/_query_scale_out.py +2 -2
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +15 -3
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +3 -3
- sempy_labs/_sql_endpoints.py +5 -3
- sempy_labs/_sqldatabase.py +5 -1
- sempy_labs/_tags.py +3 -1
- sempy_labs/_translations.py +7 -360
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_utils.py +27 -0
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +5 -0
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +145 -11
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_capacities.py +34 -11
- sempy_labs/admin/_items.py +2 -2
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/graph/_users.py +3 -5
- sempy_labs/lakehouse/__init__.py +4 -0
- sempy_labs/lakehouse/_helper.py +18 -9
- sempy_labs/lakehouse/_lakehouse.py +18 -9
- sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
- sempy_labs/lakehouse/_shortcuts.py +8 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +12 -5
- sempy_labs/report/_generate_report.py +11 -3
- sempy_labs/report/_paginated.py +21 -15
- sempy_labs/report/_report_functions.py +19 -11
- sempy_labs/report/_report_rebind.py +21 -10
- sempy_labs/report/_reportwrapper.py +1 -1
- sempy_labs/theme/_org_themes.py +5 -6
- sempy_labs/tom/_model.py +13 -19
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -111
- sempy_labs/_variable_libraries.py +0 -92
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
|
@@ -19,6 +19,7 @@ from .._helper_functions import (
|
|
|
19
19
|
_create_spark_session,
|
|
20
20
|
_mount,
|
|
21
21
|
resolve_workspace_id,
|
|
22
|
+
resolve_item_name_and_id,
|
|
22
23
|
)
|
|
23
24
|
from typing import List, Optional, Union
|
|
24
25
|
from sempy._utils._log import log
|
|
@@ -33,14 +34,14 @@ def get_report_json(
|
|
|
33
34
|
save_to_file_name: Optional[str] = None,
|
|
34
35
|
) -> dict:
|
|
35
36
|
"""
|
|
36
|
-
Gets the report.json file content of a Power BI report.
|
|
37
|
+
Gets the report.json file content of a Power BI report. This function only supports reports in the PBIR-Legacy format.
|
|
37
38
|
|
|
38
39
|
This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition>`_.
|
|
39
40
|
|
|
40
41
|
Parameters
|
|
41
42
|
----------
|
|
42
|
-
report : str
|
|
43
|
-
Name of the Power BI report.
|
|
43
|
+
report : str | uuid.UUID
|
|
44
|
+
Name or ID of the Power BI report.
|
|
44
45
|
workspace : str | uuid.UUID, default=None
|
|
45
46
|
The Fabric workspace name or ID in which the report exists.
|
|
46
47
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -55,20 +56,27 @@ def get_report_json(
|
|
|
55
56
|
"""
|
|
56
57
|
|
|
57
58
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
58
|
-
report_id =
|
|
59
|
-
|
|
59
|
+
(report_name, report_id) = resolve_item_name_and_id(
|
|
60
|
+
item=report, type="Report", workspace=workspace_id
|
|
61
|
+
)
|
|
60
62
|
|
|
61
63
|
result = _base_api(
|
|
62
|
-
request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition
|
|
64
|
+
request=f"/v1/workspaces/{workspace_id}/reports/{report_id}/getDefinition",
|
|
63
65
|
method="post",
|
|
64
66
|
lro_return_json=True,
|
|
65
67
|
status_codes=None,
|
|
66
68
|
)
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
69
|
+
report_json = None
|
|
70
|
+
for part in result.get("definition", {}).get("parts", {}):
|
|
71
|
+
if part.get("path") == "report.json":
|
|
72
|
+
payload = part.get("payload")
|
|
73
|
+
report_file = _decode_b64(payload)
|
|
74
|
+
report_json = json.loads(report_file)
|
|
75
|
+
|
|
76
|
+
if not report_json:
|
|
77
|
+
raise ValueError(
|
|
78
|
+
f"{icons.red_dot} Unable to retrieve report.json for the '{report_name}' report within the '{workspace_name}' workspace. This function only supports reports in the PBIR-Legacy format."
|
|
79
|
+
)
|
|
72
80
|
|
|
73
81
|
if save_to_file_name is not None:
|
|
74
82
|
if not lakehouse_attached():
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
from
|
|
2
|
-
resolve_dataset_id,
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
3
2
|
resolve_workspace_name_and_id,
|
|
4
|
-
|
|
3
|
+
resolve_item_name_and_id,
|
|
5
4
|
_base_api,
|
|
6
5
|
resolve_dataset_name_and_id,
|
|
7
6
|
)
|
|
@@ -23,12 +22,14 @@ def report_rebind(
|
|
|
23
22
|
|
|
24
23
|
This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group>`_.
|
|
25
24
|
|
|
25
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
26
|
+
|
|
26
27
|
Parameters
|
|
27
28
|
----------
|
|
28
|
-
report : str | List[str]
|
|
29
|
-
Name(s) of the Power BI report(s).
|
|
30
|
-
dataset : str
|
|
31
|
-
Name of the semantic model.
|
|
29
|
+
report : str | uuid.UUID | List[str | uuid.UUID]
|
|
30
|
+
Name(s) or ID(s) of the Power BI report(s).
|
|
31
|
+
dataset : str | uuid.UUID
|
|
32
|
+
Name or ID of the semantic model.
|
|
32
33
|
report_workspace : str | uuid.UUID, default=None
|
|
33
34
|
The name or ID of the Fabric workspace in which the report resides.
|
|
34
35
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -46,12 +47,19 @@ def report_rebind(
|
|
|
46
47
|
if dataset_workspace is None:
|
|
47
48
|
dataset_workspace = report_workspace_name
|
|
48
49
|
|
|
50
|
+
(dataset_workspace_name, dataset_workspace_id) = resolve_workspace_name_and_id(
|
|
51
|
+
dataset_workspace
|
|
52
|
+
)
|
|
49
53
|
if isinstance(report, str):
|
|
50
54
|
report = [report]
|
|
51
55
|
|
|
52
56
|
for rpt in report:
|
|
53
|
-
report_id =
|
|
54
|
-
|
|
57
|
+
(report_name, report_id) = resolve_item_name_and_id(
|
|
58
|
+
item=rpt, type="Report", workspace=report_workspace_id
|
|
59
|
+
)
|
|
60
|
+
(dataset_name, dataset_id) = resolve_item_name_and_id(
|
|
61
|
+
item=dataset, type="SemanticModel", workspace=dataset_workspace
|
|
62
|
+
)
|
|
55
63
|
|
|
56
64
|
payload = {"datasetId": dataset_id}
|
|
57
65
|
|
|
@@ -59,10 +67,11 @@ def report_rebind(
|
|
|
59
67
|
request=f"v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
|
|
60
68
|
method="post",
|
|
61
69
|
payload=payload,
|
|
70
|
+
client="fabric_sp",
|
|
62
71
|
)
|
|
63
72
|
|
|
64
73
|
print(
|
|
65
|
-
f"{icons.green_dot} The '{
|
|
74
|
+
f"{icons.green_dot} The '{report_name}' report within the '{report_workspace_name}' workspace has been successfully rebinded to the '{dataset_name}' semantic model within the '{dataset_workspace_name}' workspace."
|
|
66
75
|
)
|
|
67
76
|
|
|
68
77
|
|
|
@@ -77,6 +86,8 @@ def report_rebind_all(
|
|
|
77
86
|
"""
|
|
78
87
|
Rebinds all reports across all workspaces which are bound to a specific semantic model to a new semantic model.
|
|
79
88
|
|
|
89
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
90
|
+
|
|
80
91
|
Parameters
|
|
81
92
|
----------
|
|
82
93
|
dataset : str
|
|
@@ -2083,7 +2083,7 @@ class ReportWrapper:
|
|
|
2083
2083
|
item for item in custom_visuals if item not in cv_remove
|
|
2084
2084
|
]
|
|
2085
2085
|
self.set_json(
|
|
2086
|
-
file_path=self.
|
|
2086
|
+
file_path=self._report_file_path,
|
|
2087
2087
|
json_path=json_path,
|
|
2088
2088
|
json_value=updated_custom_visuals,
|
|
2089
2089
|
)
|
sempy_labs/theme/_org_themes.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from uuid import UUID
|
|
3
|
-
from
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
4
|
_is_valid_uuid,
|
|
5
5
|
_base_api,
|
|
6
6
|
_update_dataframe_datatypes,
|
|
@@ -9,6 +9,7 @@ from .._helper_functions import (
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
_pbi_url_prefix = None
|
|
12
|
+
_theme_url_prefix = "metadata/v202409/organization/themes"
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
def init_pbi_url_prefix():
|
|
@@ -44,7 +45,7 @@ def list_org_themes() -> pd.DataFrame:
|
|
|
44
45
|
|
|
45
46
|
df = _create_dataframe(columns=columns)
|
|
46
47
|
|
|
47
|
-
response = _base_api(request=f"{_pbi_url_prefix}/
|
|
48
|
+
response = _base_api(request=f"{_pbi_url_prefix}/{_theme_url_prefix}")
|
|
48
49
|
|
|
49
50
|
result = response.json().get("orgThemes")
|
|
50
51
|
if result:
|
|
@@ -110,9 +111,7 @@ def get_org_theme_json(theme: str | UUID) -> dict:
|
|
|
110
111
|
init_pbi_url_prefix()
|
|
111
112
|
|
|
112
113
|
theme_id = resolve_theme_id(theme)
|
|
113
|
-
response = _base_api(
|
|
114
|
-
request=f"{_pbi_url_prefix}/metadata/organization/themes/{theme_id}"
|
|
115
|
-
)
|
|
114
|
+
response = _base_api(request=f"{_pbi_url_prefix}/{_theme_url_prefix}/{theme_id}")
|
|
116
115
|
return response.json().get("themeJson", {})
|
|
117
116
|
|
|
118
117
|
|
|
@@ -124,7 +123,7 @@ def delete_org_theme(theme: str | UUID) -> None:
|
|
|
124
123
|
|
|
125
124
|
theme_id = resolve_theme_id(theme)
|
|
126
125
|
_base_api(
|
|
127
|
-
request=f"{_pbi_url_prefix}/
|
|
126
|
+
request=f"{_pbi_url_prefix}/{_theme_url_prefix}/{theme_id}",
|
|
128
127
|
method="delete",
|
|
129
128
|
status_codes=204,
|
|
130
129
|
)
|
sempy_labs/tom/_model.py
CHANGED
|
@@ -1200,7 +1200,7 @@ class TOMWrapper:
|
|
|
1200
1200
|
entity_name: str,
|
|
1201
1201
|
expression: Optional[str] = None,
|
|
1202
1202
|
description: Optional[str] = None,
|
|
1203
|
-
schema_name: str =
|
|
1203
|
+
schema_name: str = None,
|
|
1204
1204
|
):
|
|
1205
1205
|
"""
|
|
1206
1206
|
Adds an entity partition to a table within a semantic model.
|
|
@@ -1216,7 +1216,7 @@ class TOMWrapper:
|
|
|
1216
1216
|
Defaults to None which resolves to the 'DatabaseQuery' expression.
|
|
1217
1217
|
description : str, default=None
|
|
1218
1218
|
A description for the partition.
|
|
1219
|
-
schema_name : str, default=
|
|
1219
|
+
schema_name : str, default=None
|
|
1220
1220
|
The schema name.
|
|
1221
1221
|
"""
|
|
1222
1222
|
import Microsoft.AnalysisServices.Tabular as TOM
|
|
@@ -1228,7 +1228,8 @@ class TOMWrapper:
|
|
|
1228
1228
|
ep.ExpressionSource = self.model.Expressions["DatabaseQuery"]
|
|
1229
1229
|
else:
|
|
1230
1230
|
ep.ExpressionSource = self.model.Expressions[expression]
|
|
1231
|
-
|
|
1231
|
+
if schema_name:
|
|
1232
|
+
ep.SchemaName = schema_name
|
|
1232
1233
|
p = TOM.Partition()
|
|
1233
1234
|
p.Name = table_name
|
|
1234
1235
|
p.Source = ep
|
|
@@ -1236,6 +1237,10 @@ class TOMWrapper:
|
|
|
1236
1237
|
if description is not None:
|
|
1237
1238
|
p.Description = description
|
|
1238
1239
|
|
|
1240
|
+
# For the source lineage tag
|
|
1241
|
+
if schema_name is None:
|
|
1242
|
+
schema_name = "dbo"
|
|
1243
|
+
|
|
1239
1244
|
self.model.Tables[table_name].Partitions.Add(p)
|
|
1240
1245
|
self.model.Tables[table_name].SourceLineageTag = (
|
|
1241
1246
|
f"[{schema_name}].[{entity_name}]"
|
|
@@ -5122,25 +5127,14 @@ class TOMWrapper:
|
|
|
5122
5127
|
Generates the M expression for the import partition.
|
|
5123
5128
|
"""
|
|
5124
5129
|
|
|
5125
|
-
if artifact_type == "Lakehouse":
|
|
5126
|
-
type_id = "lakehouseId"
|
|
5127
|
-
elif artifact_type == "Warehouse":
|
|
5128
|
-
type_id = "warehouseId"
|
|
5129
|
-
else:
|
|
5130
|
-
raise NotImplementedError
|
|
5131
|
-
|
|
5132
5130
|
full_table_name = (
|
|
5133
|
-
f"{schema_name}
|
|
5131
|
+
f"{schema_name}/{table_name}" if schema_name else table_name
|
|
5134
5132
|
)
|
|
5135
5133
|
|
|
5136
|
-
return f"""let
|
|
5137
|
-
|
|
5138
|
-
|
|
5139
|
-
|
|
5140
|
-
result = #"Artifact"{{[Id="{full_table_name}",ItemKind="Table"]}}[Data]
|
|
5141
|
-
in
|
|
5142
|
-
result
|
|
5143
|
-
"""
|
|
5134
|
+
return f"""let\n\tSource = AzureStorage.DataLake("https://onelake.dfs.fabric.microsoft.com/{workspace_id}/{artifact_id}", [HierarchicalNavigation=true]),
|
|
5135
|
+
Tables = Source{{[Name = "Tables"]}}[Content],
|
|
5136
|
+
ExpressionTable = Tables{{[Name = "{full_table_name}"]}}[Content],
|
|
5137
|
+
ToDelta = DeltaLake.Table(ExpressionTable)\nin\n\tToDelta"""
|
|
5144
5138
|
|
|
5145
5139
|
m_expression = _generate_m_expression(
|
|
5146
5140
|
source_workspace_id,
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from ._functions import (
|
|
2
|
+
get_variable_library,
|
|
3
|
+
list_variables,
|
|
4
|
+
list_variable_libraries,
|
|
5
|
+
delete_variable_library,
|
|
6
|
+
get_variable_library_definition,
|
|
7
|
+
get_variable_values,
|
|
8
|
+
get_variable_value,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"get_variable_library",
|
|
13
|
+
"list_variables",
|
|
14
|
+
"list_variable_libraries",
|
|
15
|
+
"delete_variable_library",
|
|
16
|
+
"get_variable_library_definition",
|
|
17
|
+
"get_variable_values",
|
|
18
|
+
"get_variable_value",
|
|
19
|
+
]
|
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
resolve_item_id,
|
|
3
|
+
resolve_workspace_id,
|
|
4
|
+
_base_api,
|
|
5
|
+
_create_dataframe,
|
|
6
|
+
_update_dataframe_datatypes,
|
|
7
|
+
delete_item,
|
|
8
|
+
_decode_b64,
|
|
9
|
+
)
|
|
10
|
+
import pandas as pd
|
|
11
|
+
from typing import Any, Optional, List, Union
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
import json
|
|
15
|
+
import sempy_labs._icons as icons
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@log
|
|
19
|
+
def get_variable_library(
|
|
20
|
+
variable_library: str | UUID, workspace: Optional[str | UUID] = None
|
|
21
|
+
) -> pd.DataFrame:
|
|
22
|
+
"""
|
|
23
|
+
Returns properties of the specified variable library.
|
|
24
|
+
|
|
25
|
+
This is a wrapper function for the following API: `Items - Get Variable Library <https://learn.microsoft.com/rest/api/fabric/variablelibrary/items/get-variable-library>`_.
|
|
26
|
+
|
|
27
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
28
|
+
|
|
29
|
+
Parameters
|
|
30
|
+
----------
|
|
31
|
+
variable_library : str | uuid.UUID
|
|
32
|
+
Name or ID of the variable library.
|
|
33
|
+
workspace : str | uuid.UUID, default=None
|
|
34
|
+
The Fabric workspace name or ID.
|
|
35
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
36
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
pandas.DataFrame
|
|
41
|
+
A pandas dataframe showing the properties of the variable library.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
columns = {
|
|
45
|
+
"Variable Library Name": "string",
|
|
46
|
+
"Variable Library Id": "string",
|
|
47
|
+
"Description": "string",
|
|
48
|
+
"Active Value Set Name": "string",
|
|
49
|
+
}
|
|
50
|
+
df = _create_dataframe(columns=columns)
|
|
51
|
+
|
|
52
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
53
|
+
variable_library_id = resolve_item_id(
|
|
54
|
+
item=variable_library, type="VariableLibrary", workspace=workspace
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
response = _base_api(
|
|
58
|
+
request=f"/v1/workspaces/{workspace_id}/variableLibraries/{variable_library_id}",
|
|
59
|
+
client="fabric_sp",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
result = response.json()
|
|
63
|
+
prop = result.get("properties", {})
|
|
64
|
+
|
|
65
|
+
if prop:
|
|
66
|
+
df = pd.DataFrame(
|
|
67
|
+
[
|
|
68
|
+
{
|
|
69
|
+
"Variable Library Name": result.get("displayName"),
|
|
70
|
+
"Variable Library Id": result.get("id"),
|
|
71
|
+
"Description": result.get("description"),
|
|
72
|
+
"Active Value Set Name": prop.get("activeValueSetName"),
|
|
73
|
+
}
|
|
74
|
+
],
|
|
75
|
+
columns=list(columns.keys()),
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
79
|
+
|
|
80
|
+
return df
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@log
|
|
84
|
+
def list_variable_libraries(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
85
|
+
"""
|
|
86
|
+
Shows the variable libraries within a workspace.
|
|
87
|
+
|
|
88
|
+
This is a wrapper function for the following API: `Items - List Variable Libraries <https://learn.microsoft.com/rest/api/fabric/variablelibrary/items/list-variable-libraries>`_.
|
|
89
|
+
|
|
90
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
----------
|
|
94
|
+
workspace : str | uuid.UUID, default=None
|
|
95
|
+
The Fabric workspace name or ID.
|
|
96
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
97
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
98
|
+
|
|
99
|
+
Returns
|
|
100
|
+
-------
|
|
101
|
+
pandas.DataFrame
|
|
102
|
+
A pandas dataframe showing the variable libraries within a workspace.
|
|
103
|
+
"""
|
|
104
|
+
|
|
105
|
+
columns = {
|
|
106
|
+
"Variable Library Name": "string",
|
|
107
|
+
"Variable Library Id": "string",
|
|
108
|
+
"Description": "string",
|
|
109
|
+
"Active Value Set Name": "string",
|
|
110
|
+
}
|
|
111
|
+
df = _create_dataframe(columns=columns)
|
|
112
|
+
|
|
113
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
114
|
+
|
|
115
|
+
responses = _base_api(
|
|
116
|
+
request=f"/v1/workspaces/{workspace_id}/VariableLibraries",
|
|
117
|
+
uses_pagination=True,
|
|
118
|
+
client="fabric_sp",
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
rows = []
|
|
122
|
+
for r in responses:
|
|
123
|
+
for v in r.get("value", []):
|
|
124
|
+
prop = v.get("properties", {})
|
|
125
|
+
|
|
126
|
+
rows.append(
|
|
127
|
+
{
|
|
128
|
+
"Variable Library Name": v.get("displayName"),
|
|
129
|
+
"Variable Library Id": v.get("id"),
|
|
130
|
+
"Description": v.get("description"),
|
|
131
|
+
"Active Value Set Name": prop.get("activeValueSetName"),
|
|
132
|
+
}
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
if rows:
|
|
136
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
137
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
138
|
+
|
|
139
|
+
return df
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@log
|
|
143
|
+
def delete_variable_library(
|
|
144
|
+
variable_library: str | UUID, workspace: Optional[str | UUID] = None
|
|
145
|
+
):
|
|
146
|
+
"""
|
|
147
|
+
Deletes a variable library.
|
|
148
|
+
|
|
149
|
+
This is a wrapper function for the following API: `Items - Delete Variable Library https://learn.microsoft.com/rest/api/fabric/variablelibrary/items/delete-variable-library>`_.
|
|
150
|
+
|
|
151
|
+
Parameters
|
|
152
|
+
----------
|
|
153
|
+
navariable_libraryme: str | uuid.UUID
|
|
154
|
+
Name or ID of the variable library.
|
|
155
|
+
workspace : str | uuid.UUID, default=None
|
|
156
|
+
The Fabric workspace name or ID.
|
|
157
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
158
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
159
|
+
"""
|
|
160
|
+
|
|
161
|
+
delete_item(item=variable_library, type="VariableLibrary", workspace=workspace)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@log
|
|
165
|
+
def get_variable_library_definition(
|
|
166
|
+
variable_library: str | UUID,
|
|
167
|
+
workspace: Optional[str | UUID] = None,
|
|
168
|
+
decode: bool = True,
|
|
169
|
+
return_dataframe: bool = False,
|
|
170
|
+
) -> dict | pd.DataFrame:
|
|
171
|
+
"""
|
|
172
|
+
Gets the definition of a variable library.
|
|
173
|
+
|
|
174
|
+
This is a wrapper function for the following API: `Items - Get Variable Library Definition <https://learn.microsoft.com/rest/api/fabric/variablelibrary/items/delete-variable-library>`_.
|
|
175
|
+
|
|
176
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
177
|
+
|
|
178
|
+
Parameters
|
|
179
|
+
----------
|
|
180
|
+
workspace : str | uuid.UUID, default=None
|
|
181
|
+
The Fabric workspace name or ID.
|
|
182
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
183
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
184
|
+
|
|
185
|
+
Returns
|
|
186
|
+
-------
|
|
187
|
+
dict | pandas.DataFrame
|
|
188
|
+
A dictionary showing the definition or a pandas dataframe showing the definition.
|
|
189
|
+
"""
|
|
190
|
+
|
|
191
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
192
|
+
variable_library_id = resolve_item_id(
|
|
193
|
+
item=variable_library, type="VariableLibrary", workspace=workspace
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
result = _base_api(
|
|
197
|
+
request=f"/v1/workspaces/{workspace_id}/variableLibraries/{variable_library_id}/getDefinition",
|
|
198
|
+
method="post",
|
|
199
|
+
client="fabric_sp",
|
|
200
|
+
status_codes=None,
|
|
201
|
+
lro_return_json=True,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
if decode:
|
|
205
|
+
definition = {"definition": {"parts": []}}
|
|
206
|
+
|
|
207
|
+
for part in result.get("definition", {}).get("parts", []):
|
|
208
|
+
path = part.get("path")
|
|
209
|
+
payload = _decode_b64(part.get("payload"))
|
|
210
|
+
definition["definition"]["parts"].append({"path": path, "payload": payload})
|
|
211
|
+
else:
|
|
212
|
+
definition = result.copy()
|
|
213
|
+
|
|
214
|
+
if return_dataframe:
|
|
215
|
+
df = pd.DataFrame(definition["definition"]["parts"])
|
|
216
|
+
df.columns = ["Path", "Payload", "Payload Type"]
|
|
217
|
+
return df
|
|
218
|
+
else:
|
|
219
|
+
return definition
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
@log
|
|
223
|
+
def list_variables(
|
|
224
|
+
variable_library: str | UUID, workspace: Optional[str | UUID] = None
|
|
225
|
+
) -> pd.DataFrame:
|
|
226
|
+
"""
|
|
227
|
+
Lists the variables in a variable library.
|
|
228
|
+
|
|
229
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
230
|
+
|
|
231
|
+
Parameters
|
|
232
|
+
----------
|
|
233
|
+
variable_library : str | uuid.UUID
|
|
234
|
+
Name or ID of the variable library.
|
|
235
|
+
workspace : str | uuid.UUID, default=None
|
|
236
|
+
The Fabric workspace name or ID.
|
|
237
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
238
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
239
|
+
|
|
240
|
+
Returns
|
|
241
|
+
-------
|
|
242
|
+
pandas.DataFrame
|
|
243
|
+
A pandas dataframe showing the variables within a variable library.
|
|
244
|
+
"""
|
|
245
|
+
|
|
246
|
+
result = get_variable_library_definition(
|
|
247
|
+
variable_library=variable_library,
|
|
248
|
+
workspace=workspace,
|
|
249
|
+
decode=True,
|
|
250
|
+
return_dataframe=False,
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
columns = {
|
|
254
|
+
"Variable Name": "string",
|
|
255
|
+
"Note": "string",
|
|
256
|
+
"Type": "string",
|
|
257
|
+
"Value": "string",
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
df = _create_dataframe(columns=columns)
|
|
261
|
+
|
|
262
|
+
rows = []
|
|
263
|
+
for part in result.get("definition").get("parts"):
|
|
264
|
+
path = part.get("path")
|
|
265
|
+
payload = json.loads(part.get("payload"))
|
|
266
|
+
if path == "variables.json":
|
|
267
|
+
|
|
268
|
+
for variable in payload.get("variables", []):
|
|
269
|
+
rows.append(
|
|
270
|
+
{
|
|
271
|
+
"Variable Name": variable.get("name"),
|
|
272
|
+
"Note": variable.get("note"),
|
|
273
|
+
"Type": variable.get("type"),
|
|
274
|
+
"Value": variable.get("value"),
|
|
275
|
+
}
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
if rows:
|
|
279
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
280
|
+
|
|
281
|
+
for part in result.get("definition", {}).get("parts", []):
|
|
282
|
+
path = part.get("path")
|
|
283
|
+
if path.startswith("valueSets") and path.endswith(".json"):
|
|
284
|
+
payload = json.loads(part.get("payload"))
|
|
285
|
+
value_set_name = payload.get("name")
|
|
286
|
+
|
|
287
|
+
# Initialize the new column with None (or pd.NA)
|
|
288
|
+
df[value_set_name] = None
|
|
289
|
+
|
|
290
|
+
for override in payload.get("variableOverrides", []):
|
|
291
|
+
variable_name = override.get("name")
|
|
292
|
+
variable_value = override.get("value")
|
|
293
|
+
|
|
294
|
+
# Set the value in the appropriate row and column
|
|
295
|
+
df.loc[df["Variable Name"] == variable_name, value_set_name] = (
|
|
296
|
+
variable_value
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
return df
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
@log
|
|
303
|
+
def get_variable_values(
|
|
304
|
+
variable_names: List[str],
|
|
305
|
+
variable_library: Union[str, UUID],
|
|
306
|
+
workspace: Optional[Union[str, UUID]] = None,
|
|
307
|
+
value_set: Optional[str] = None,
|
|
308
|
+
) -> dict:
|
|
309
|
+
"""
|
|
310
|
+
Gets the values of multiple variables from a variable library with a single call to list_variables.
|
|
311
|
+
|
|
312
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
313
|
+
|
|
314
|
+
Parameters
|
|
315
|
+
----------
|
|
316
|
+
variable_names : List[str]
|
|
317
|
+
A list of variable names to retrieve.
|
|
318
|
+
variable_library : str | uuid.UUID
|
|
319
|
+
Name or ID of the variable library.
|
|
320
|
+
workspace : str | uuid.UUID, default=None
|
|
321
|
+
The Fabric workspace name or ID.
|
|
322
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
323
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
324
|
+
value_set : str, default=None
|
|
325
|
+
The name of the value set to use for variable overrides.
|
|
326
|
+
If None, the active value set of the variable library will be used.
|
|
327
|
+
|
|
328
|
+
Returns
|
|
329
|
+
-------
|
|
330
|
+
dict
|
|
331
|
+
Dictionary mapping variable names to their corresponding values.
|
|
332
|
+
"""
|
|
333
|
+
|
|
334
|
+
if isinstance(variable_names, str):
|
|
335
|
+
variable_names = [variable_names]
|
|
336
|
+
|
|
337
|
+
if value_set is None:
|
|
338
|
+
vl_df = get_variable_library(
|
|
339
|
+
variable_library=variable_library, workspace=workspace
|
|
340
|
+
)
|
|
341
|
+
if vl_df.empty:
|
|
342
|
+
raise ValueError(
|
|
343
|
+
f"{icons.red_dot} The variable library '{variable_library}' does not exist within the '{workspace}' workspace."
|
|
344
|
+
)
|
|
345
|
+
value_set = vl_df["Active Value Set Name"].iloc[0]
|
|
346
|
+
|
|
347
|
+
df = list_variables(variable_library=variable_library, workspace=workspace)
|
|
348
|
+
found_variables = df[df["Variable Name"].isin(variable_names)]
|
|
349
|
+
|
|
350
|
+
missing = set(variable_names) - set(found_variables["Variable Name"])
|
|
351
|
+
if missing:
|
|
352
|
+
raise ValueError(
|
|
353
|
+
f"{icons.red_dot} The following variables do not exist in the '{variable_library}' variable library: {', '.join(missing)}"
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
if value_set == "Default value set":
|
|
357
|
+
value_set = "Value"
|
|
358
|
+
if value_set not in df.columns:
|
|
359
|
+
raise ValueError(
|
|
360
|
+
f"{icons.red_dot} The value set '{value_set}' does not exist in the variable library '{variable_library}' within the '{workspace}' workspace."
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
return dict(zip(found_variables["Variable Name"], found_variables[value_set]))
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
@log
|
|
367
|
+
def get_variable_value(
|
|
368
|
+
variable_name: str,
|
|
369
|
+
variable_library: str | UUID,
|
|
370
|
+
workspace: Optional[str | UUID] = None,
|
|
371
|
+
value_set: Optional[str] = None,
|
|
372
|
+
) -> Any:
|
|
373
|
+
"""
|
|
374
|
+
Gets the value of a single variable in a variable library.
|
|
375
|
+
|
|
376
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
377
|
+
|
|
378
|
+
Parameters
|
|
379
|
+
----------
|
|
380
|
+
variable_name : str
|
|
381
|
+
Name of the variable.
|
|
382
|
+
variable_library : str | uuid.UUID
|
|
383
|
+
Name or ID of the variable library.
|
|
384
|
+
workspace : str | uuid.UUID, default=None
|
|
385
|
+
The Fabric workspace name or ID.
|
|
386
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
387
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
388
|
+
value_set : str, default=None
|
|
389
|
+
The name of the value set to use for variable overrides.
|
|
390
|
+
If None, the active value set of the variable library will be used.
|
|
391
|
+
|
|
392
|
+
Returns
|
|
393
|
+
-------
|
|
394
|
+
Any
|
|
395
|
+
The value of the variable.
|
|
396
|
+
"""
|
|
397
|
+
|
|
398
|
+
return get_variable_values(
|
|
399
|
+
variable_names=[variable_name],
|
|
400
|
+
variable_library=variable_library,
|
|
401
|
+
workspace=workspace,
|
|
402
|
+
value_set=value_set,
|
|
403
|
+
)[variable_name]
|