semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +38 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
- sempy_labs/_capacities.py +138 -25
- sempy_labs/_capacity_migration.py +161 -60
- sempy_labs/_clear_cache.py +3 -3
- sempy_labs/_data_pipelines.py +54 -0
- sempy_labs/_dataflows.py +4 -0
- sempy_labs/_deployment_pipelines.py +13 -7
- sempy_labs/_environments.py +6 -0
- sempy_labs/_eventhouses.py +6 -0
- sempy_labs/_eventstreams.py +6 -0
- sempy_labs/_external_data_shares.py +190 -0
- sempy_labs/_generate_semantic_model.py +26 -4
- sempy_labs/_git.py +15 -15
- sempy_labs/_helper_functions.py +186 -11
- sempy_labs/_icons.py +55 -22
- sempy_labs/_kql_databases.py +6 -0
- sempy_labs/_kql_querysets.py +6 -0
- sempy_labs/_list_functions.py +6 -3
- sempy_labs/_managed_private_endpoints.py +166 -0
- sempy_labs/_mirrored_warehouses.py +2 -0
- sempy_labs/_ml_experiments.py +6 -0
- sempy_labs/_ml_models.py +6 -0
- sempy_labs/_model_bpa.py +11 -6
- sempy_labs/_model_bpa_bulk.py +14 -30
- sempy_labs/_model_bpa_rules.py +8 -3
- sempy_labs/_notebooks.py +111 -15
- sempy_labs/_query_scale_out.py +8 -6
- sempy_labs/_refresh_semantic_model.py +299 -49
- sempy_labs/_spark.py +12 -5
- sempy_labs/_sql.py +2 -2
- sempy_labs/_translations.py +16 -14
- sempy_labs/_vertipaq.py +127 -116
- sempy_labs/_warehouses.py +90 -1
- sempy_labs/_workloads.py +128 -0
- sempy_labs/_workspace_identity.py +4 -4
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +203 -58
- sempy_labs/admin/_domains.py +18 -18
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_sync.py +2 -6
- sempy_labs/directlake/_dl_helper.py +4 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_shared_expression.py +7 -1
- sempy_labs/directlake/_guardrails.py +3 -2
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
- sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
- sempy_labs/lakehouse/_shortcuts.py +4 -0
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
- sempy_labs/migration/_migration_validation.py +2 -0
- sempy_labs/migration/_refresh_calc_tables.py +1 -0
- sempy_labs/report/__init__.py +4 -1
- sempy_labs/report/_generate_report.py +16 -14
- sempy_labs/report/_paginated.py +74 -0
- sempy_labs/report/_report_bpa.py +8 -10
- sempy_labs/report/_report_functions.py +19 -19
- sempy_labs/report/_report_rebind.py +6 -1
- sempy_labs/report/_reportwrapper.py +3 -3
- sempy_labs/tom/_model.py +173 -67
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
sempy_labs/_model_bpa.py
CHANGED
|
@@ -12,7 +12,7 @@ from sempy_labs._helper_functions import (
|
|
|
12
12
|
resolve_workspace_capacity,
|
|
13
13
|
resolve_dataset_id,
|
|
14
14
|
get_language_codes,
|
|
15
|
-
|
|
15
|
+
_get_max_run_id,
|
|
16
16
|
)
|
|
17
17
|
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
18
18
|
from sempy_labs.tom import connect_semantic_model
|
|
@@ -56,7 +56,7 @@ def run_model_bpa(
|
|
|
56
56
|
extended : bool, default=False
|
|
57
57
|
If True, runs the set_vertipaq_annotations function to collect Vertipaq Analyzer statistics to be used in the analysis of the semantic model.
|
|
58
58
|
language : str, default=None
|
|
59
|
-
Specifying a language code (i.e. 'it-IT' for Italian) will auto-translate the Category, Rule Name and Description into the specified language.
|
|
59
|
+
Specifying a language name or code (i.e. 'it-IT' for Italian) will auto-translate the Category, Rule Name and Description into the specified language.
|
|
60
60
|
Defaults to None which resolves to English.
|
|
61
61
|
|
|
62
62
|
Returns
|
|
@@ -113,6 +113,7 @@ def run_model_bpa(
|
|
|
113
113
|
)
|
|
114
114
|
|
|
115
115
|
if extended:
|
|
116
|
+
icons.sll_tags.append("ModelBPAExtended")
|
|
116
117
|
with connect_semantic_model(
|
|
117
118
|
dataset=dataset, workspace=workspace, readonly=False
|
|
118
119
|
) as tom:
|
|
@@ -220,9 +221,9 @@ def run_model_bpa(
|
|
|
220
221
|
|
|
221
222
|
rules = translate_using_spark(rules)
|
|
222
223
|
|
|
223
|
-
rules["Severity"]
|
|
224
|
-
rules["Severity"]
|
|
225
|
-
rules["Severity"]
|
|
224
|
+
rules.loc[rules["Severity"] == "Warning", "Severity"] = icons.warning
|
|
225
|
+
rules.loc[rules["Severity"] == "Error", "Severity"] = icons.error
|
|
226
|
+
rules.loc[rules["Severity"] == "Info", "Severity"] = icons.info
|
|
226
227
|
|
|
227
228
|
pd.set_option("display.max_colwidth", 1000)
|
|
228
229
|
|
|
@@ -350,7 +351,7 @@ def run_model_bpa(
|
|
|
350
351
|
if len(lakeT_filt) == 0:
|
|
351
352
|
runId = 1
|
|
352
353
|
else:
|
|
353
|
-
max_run_id =
|
|
354
|
+
max_run_id = _get_max_run_id(
|
|
354
355
|
lakehouse=lakehouse, table_name=delta_table_name
|
|
355
356
|
)
|
|
356
357
|
runId = max_run_id + 1
|
|
@@ -387,10 +388,14 @@ def run_model_bpa(
|
|
|
387
388
|
dfExport.insert(5, colName, dfExport.pop(colName))
|
|
388
389
|
|
|
389
390
|
dfExport.columns = dfExport.columns.str.replace(" ", "_")
|
|
391
|
+
schema = {
|
|
392
|
+
key.replace(" ", "_"): value for key, value in icons.bpa_schema.items()
|
|
393
|
+
}
|
|
390
394
|
save_as_delta_table(
|
|
391
395
|
dataframe=dfExport,
|
|
392
396
|
delta_table_name=delta_table_name,
|
|
393
397
|
write_mode="append",
|
|
398
|
+
schema=schema,
|
|
394
399
|
merge_schema=True,
|
|
395
400
|
)
|
|
396
401
|
|
sempy_labs/_model_bpa_bulk.py
CHANGED
|
@@ -6,7 +6,7 @@ from sempy_labs._helper_functions import (
|
|
|
6
6
|
save_as_delta_table,
|
|
7
7
|
resolve_workspace_capacity,
|
|
8
8
|
retry,
|
|
9
|
-
|
|
9
|
+
_get_max_run_id,
|
|
10
10
|
)
|
|
11
11
|
from sempy_labs.lakehouse import (
|
|
12
12
|
get_lakehouse_tables,
|
|
@@ -49,8 +49,6 @@ def run_model_bpa_bulk(
|
|
|
49
49
|
The semantic models to always skip when running this analysis.
|
|
50
50
|
"""
|
|
51
51
|
|
|
52
|
-
import pyspark.sql.functions as F
|
|
53
|
-
|
|
54
52
|
if not lakehouse_attached():
|
|
55
53
|
raise ValueError(
|
|
56
54
|
f"{icons.red_dot} No lakehouse is attached to this notebook. Must attach a lakehouse to the notebook."
|
|
@@ -61,24 +59,6 @@ def run_model_bpa_bulk(
|
|
|
61
59
|
|
|
62
60
|
skip_models.extend(["ModelBPA", "Fabric Capacity Metrics"])
|
|
63
61
|
|
|
64
|
-
cols = [
|
|
65
|
-
"Capacity Name",
|
|
66
|
-
"Capacity Id",
|
|
67
|
-
"Workspace Name",
|
|
68
|
-
"Workspace Id",
|
|
69
|
-
"Dataset Name",
|
|
70
|
-
"Dataset Id",
|
|
71
|
-
"Configured By",
|
|
72
|
-
"Rule Name",
|
|
73
|
-
"Category",
|
|
74
|
-
"Severity",
|
|
75
|
-
"Object Type",
|
|
76
|
-
"Object Name",
|
|
77
|
-
"Description",
|
|
78
|
-
"URL",
|
|
79
|
-
"RunId",
|
|
80
|
-
"Timestamp",
|
|
81
|
-
]
|
|
82
62
|
now = datetime.datetime.now()
|
|
83
63
|
output_table = "modelbparesults"
|
|
84
64
|
lakehouse_workspace = fabric.resolve_workspace_name()
|
|
@@ -92,7 +72,7 @@ def run_model_bpa_bulk(
|
|
|
92
72
|
if len(lakeT_filt) == 0:
|
|
93
73
|
runId = 1
|
|
94
74
|
else:
|
|
95
|
-
max_run_id =
|
|
75
|
+
max_run_id = _get_max_run_id(lakehouse=lakehouse, table_name=output_table)
|
|
96
76
|
runId = max_run_id + 1
|
|
97
77
|
|
|
98
78
|
if isinstance(workspace, str):
|
|
@@ -108,7 +88,7 @@ def run_model_bpa_bulk(
|
|
|
108
88
|
wksp = r["Name"]
|
|
109
89
|
wksp_id = r["Id"]
|
|
110
90
|
capacity_id, capacity_name = resolve_workspace_capacity(workspace=wksp)
|
|
111
|
-
df = pd.DataFrame(columns=
|
|
91
|
+
df = pd.DataFrame(columns=list(icons.bpa_schema.keys()))
|
|
112
92
|
dfD = fabric.list_datasets(workspace=wksp, mode="rest")
|
|
113
93
|
|
|
114
94
|
# Exclude default semantic models
|
|
@@ -139,8 +119,8 @@ def run_model_bpa_bulk(
|
|
|
139
119
|
rules=rules,
|
|
140
120
|
extended=extended,
|
|
141
121
|
)
|
|
142
|
-
bpa_df["Capacity Id"] = capacity_id
|
|
143
122
|
bpa_df["Capacity Name"] = capacity_name
|
|
123
|
+
bpa_df["Capacity Id"] = capacity_id
|
|
144
124
|
bpa_df["Workspace Name"] = wksp
|
|
145
125
|
bpa_df["Workspace Id"] = wksp_id
|
|
146
126
|
bpa_df["Dataset Name"] = dataset_name
|
|
@@ -148,7 +128,7 @@ def run_model_bpa_bulk(
|
|
|
148
128
|
bpa_df["Configured By"] = config_by
|
|
149
129
|
bpa_df["Timestamp"] = now
|
|
150
130
|
bpa_df["RunId"] = runId
|
|
151
|
-
bpa_df = bpa_df[
|
|
131
|
+
bpa_df = bpa_df[list(icons.bpa_schema.keys())]
|
|
152
132
|
|
|
153
133
|
bpa_df["RunId"] = bpa_df["RunId"].astype("int")
|
|
154
134
|
|
|
@@ -162,16 +142,23 @@ def run_model_bpa_bulk(
|
|
|
162
142
|
)
|
|
163
143
|
print(e)
|
|
164
144
|
|
|
165
|
-
df["Severity"].replace(icons.severity_mapping
|
|
145
|
+
df["Severity"].replace(icons.severity_mapping)
|
|
166
146
|
|
|
167
147
|
# Append save results individually for each workspace (so as not to create a giant dataframe)
|
|
168
148
|
print(
|
|
169
149
|
f"{icons.in_progress} Saving the Model BPA results of the '{wksp}' workspace to the '{output_table}' within the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace..."
|
|
170
150
|
)
|
|
151
|
+
|
|
152
|
+
schema = {
|
|
153
|
+
key.replace(" ", "_"): value
|
|
154
|
+
for key, value in icons.bpa_schema.items()
|
|
155
|
+
}
|
|
156
|
+
|
|
171
157
|
save_as_delta_table(
|
|
172
158
|
dataframe=df,
|
|
173
159
|
delta_table_name=output_table,
|
|
174
160
|
write_mode="append",
|
|
161
|
+
schema=schema,
|
|
175
162
|
merge_schema=True,
|
|
176
163
|
)
|
|
177
164
|
print(
|
|
@@ -205,9 +192,6 @@ def create_model_bpa_semantic_model(
|
|
|
205
192
|
The workspace in which the lakehouse resides.
|
|
206
193
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
207
194
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
208
|
-
|
|
209
|
-
Returns
|
|
210
|
-
-------
|
|
211
195
|
"""
|
|
212
196
|
|
|
213
197
|
from sempy_labs._helper_functions import resolve_lakehouse_name
|
|
@@ -246,7 +230,7 @@ def create_model_bpa_semantic_model(
|
|
|
246
230
|
tom.model
|
|
247
231
|
|
|
248
232
|
dyn_connect()
|
|
249
|
-
|
|
233
|
+
icons.sll_tags.append("ModelBPABulk")
|
|
250
234
|
table_exists = False
|
|
251
235
|
with connect_semantic_model(
|
|
252
236
|
dataset=dataset, readonly=False, workspace=lakehouse_workspace
|
sempy_labs/_model_bpa_rules.py
CHANGED
|
@@ -158,6 +158,7 @@ def model_bpa_rules(
|
|
|
158
158
|
and r.ToTable.Name == obj.Name
|
|
159
159
|
for r in tom.used_in_relationships(object=obj)
|
|
160
160
|
),
|
|
161
|
+
"When using DirectQuery, dimension tables should be set to Dual mode in order to improve query performance.",
|
|
161
162
|
"https://learn.microsoft.com/power-bi/transform-model/desktop-storage-mode#propagation-of-the-dual-setting",
|
|
162
163
|
),
|
|
163
164
|
(
|
|
@@ -492,6 +493,7 @@ def model_bpa_rules(
|
|
|
492
493
|
obj.Expression,
|
|
493
494
|
flags=re.IGNORECASE,
|
|
494
495
|
),
|
|
496
|
+
"Adding a constant value may lead to performance degradation.",
|
|
495
497
|
),
|
|
496
498
|
(
|
|
497
499
|
"DAX Expressions",
|
|
@@ -643,14 +645,15 @@ def model_bpa_rules(
|
|
|
643
645
|
"Calculation groups with no calculation items",
|
|
644
646
|
lambda obj, tom: obj.CalculationGroup is not None
|
|
645
647
|
and not any(obj.CalculationGroup.CalculationItems),
|
|
648
|
+
"Calculation groups have no function unless they have calculation items.",
|
|
646
649
|
),
|
|
647
650
|
(
|
|
648
651
|
"Maintenance",
|
|
649
|
-
"Column",
|
|
652
|
+
["Column", "Measure", "Table"],
|
|
650
653
|
"Info",
|
|
651
654
|
"Visible objects with no description",
|
|
652
655
|
lambda obj, tom: obj.IsHidden is False and len(obj.Description) == 0,
|
|
653
|
-
"
|
|
656
|
+
"Add descriptions to objects. These descriptions are shown on hover within the Field List in Power BI Desktop. Additionally, you can leverage these descriptions to create an automated data dictionary.",
|
|
654
657
|
),
|
|
655
658
|
(
|
|
656
659
|
"Formatting",
|
|
@@ -710,6 +713,7 @@ def model_bpa_rules(
|
|
|
710
713
|
"Percentages should be formatted with thousands separators and 1 decimal",
|
|
711
714
|
lambda obj, tom: "%" in obj.FormatString
|
|
712
715
|
and obj.FormatString != "#,0.0%;-#,0.0%;#,0.0%",
|
|
716
|
+
"For a better user experience, percengage measures should be formatted with a '%' sign.",
|
|
713
717
|
),
|
|
714
718
|
(
|
|
715
719
|
"Formatting",
|
|
@@ -719,6 +723,7 @@ def model_bpa_rules(
|
|
|
719
723
|
lambda obj, tom: "$" not in obj.FormatString
|
|
720
724
|
and "%" not in obj.FormatString
|
|
721
725
|
and obj.FormatString not in ["#,0", "#,0.0"],
|
|
726
|
+
"For a better user experience, whole numbers should be formatted with commas.",
|
|
722
727
|
),
|
|
723
728
|
(
|
|
724
729
|
"Formatting",
|
|
@@ -731,7 +736,7 @@ def model_bpa_rules(
|
|
|
731
736
|
and r.FromCardinality == TOM.RelationshipEndCardinality.Many
|
|
732
737
|
for r in tom.used_in_relationships(object=obj)
|
|
733
738
|
),
|
|
734
|
-
"Foreign keys should always be hidden.",
|
|
739
|
+
"Foreign keys should always be hidden as they should not be used by end users.",
|
|
735
740
|
),
|
|
736
741
|
(
|
|
737
742
|
"Formatting",
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -8,7 +8,6 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
9
|
lro,
|
|
10
10
|
_decode_b64,
|
|
11
|
-
resolve_notebook_id,
|
|
12
11
|
)
|
|
13
12
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
14
13
|
|
|
@@ -19,6 +18,8 @@ def get_notebook_definition(
|
|
|
19
18
|
"""
|
|
20
19
|
Obtains the notebook definition.
|
|
21
20
|
|
|
21
|
+
This is a wrapper function for the following API: `Items - Get Notebook Definition <https://learn.microsoft.com/rest/api/fabric/notebook/items/get-notebook-definition`_.
|
|
22
|
+
|
|
22
23
|
Parameters
|
|
23
24
|
----------
|
|
24
25
|
notebook_name : str
|
|
@@ -38,10 +39,12 @@ def get_notebook_definition(
|
|
|
38
39
|
"""
|
|
39
40
|
|
|
40
41
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
41
|
-
|
|
42
|
+
item_id = fabric.resolve_item_id(
|
|
43
|
+
item_name=notebook_name, type="Notebook", workspace=workspace
|
|
44
|
+
)
|
|
42
45
|
client = fabric.FabricRestClient()
|
|
43
46
|
response = client.post(
|
|
44
|
-
f"v1/workspaces/{workspace_id}/notebooks/{
|
|
47
|
+
f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition",
|
|
45
48
|
)
|
|
46
49
|
|
|
47
50
|
result = lro(client, response).json()
|
|
@@ -62,6 +65,7 @@ def import_notebook_from_web(
|
|
|
62
65
|
url: str,
|
|
63
66
|
description: Optional[str] = None,
|
|
64
67
|
workspace: Optional[str] = None,
|
|
68
|
+
overwrite: bool = False,
|
|
65
69
|
):
|
|
66
70
|
"""
|
|
67
71
|
Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
|
|
@@ -82,16 +86,12 @@ def import_notebook_from_web(
|
|
|
82
86
|
The name of the workspace.
|
|
83
87
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
84
88
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
89
|
+
overwrite : bool, default=False
|
|
90
|
+
If set to True, overwrites the existing notebook in the workspace if it exists.
|
|
85
91
|
"""
|
|
86
92
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
dfI = fabric.list_items(workspace=workspace, type="Notebook")
|
|
90
|
-
dfI_filt = dfI[dfI["Display Name"] == notebook_name]
|
|
91
|
-
if len(dfI_filt) > 0:
|
|
92
|
-
raise ValueError(
|
|
93
|
-
f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
|
|
94
|
-
)
|
|
93
|
+
if workspace is None:
|
|
94
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
95
95
|
|
|
96
96
|
# Fix links to go to the raw github file
|
|
97
97
|
starting_text = "https://github.com/"
|
|
@@ -104,11 +104,56 @@ def import_notebook_from_web(
|
|
|
104
104
|
response = requests.get(url)
|
|
105
105
|
if response.status_code != 200:
|
|
106
106
|
raise FabricHTTPException(response)
|
|
107
|
-
|
|
108
|
-
|
|
107
|
+
|
|
108
|
+
dfI = fabric.list_items(workspace=workspace, type="Notebook")
|
|
109
|
+
dfI_filt = dfI[dfI["Display Name"] == notebook_name]
|
|
110
|
+
if len(dfI_filt) == 0:
|
|
111
|
+
create_notebook(
|
|
112
|
+
name=notebook_name,
|
|
113
|
+
notebook_content=response.content,
|
|
114
|
+
workspace=workspace,
|
|
115
|
+
description=description,
|
|
116
|
+
)
|
|
117
|
+
elif len(dfI_filt) > 0 and overwrite:
|
|
118
|
+
update_notebook_definition(
|
|
119
|
+
name=notebook_name, notebook_content=response.content, workspace=workspace
|
|
120
|
+
)
|
|
121
|
+
else:
|
|
122
|
+
raise ValueError(
|
|
123
|
+
f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace and 'overwrite' is set to False."
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def create_notebook(
|
|
128
|
+
name: str,
|
|
129
|
+
notebook_content: str,
|
|
130
|
+
description: Optional[str] = None,
|
|
131
|
+
workspace: Optional[str] = None,
|
|
132
|
+
):
|
|
133
|
+
"""
|
|
134
|
+
Creates a new notebook with a definition within a workspace.
|
|
135
|
+
|
|
136
|
+
Parameters
|
|
137
|
+
----------
|
|
138
|
+
name : str
|
|
139
|
+
The name of the notebook to be created.
|
|
140
|
+
notebook_content : str
|
|
141
|
+
The Jupyter notebook content (not in Base64 format).
|
|
142
|
+
description : str, default=None
|
|
143
|
+
The description of the notebook.
|
|
144
|
+
Defaults to None which does not place a description.
|
|
145
|
+
workspace : str, default=None
|
|
146
|
+
The name of the workspace.
|
|
147
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
148
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
149
|
+
"""
|
|
150
|
+
|
|
151
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
152
|
+
client = fabric.FabricRestClient()
|
|
153
|
+
notebook_payload = base64.b64encode(notebook_content)
|
|
109
154
|
|
|
110
155
|
request_body = {
|
|
111
|
-
"displayName":
|
|
156
|
+
"displayName": name,
|
|
112
157
|
"definition": {
|
|
113
158
|
"format": "ipynb",
|
|
114
159
|
"parts": [
|
|
@@ -128,5 +173,56 @@ def import_notebook_from_web(
|
|
|
128
173
|
lro(client, response, status_codes=[201, 202])
|
|
129
174
|
|
|
130
175
|
print(
|
|
131
|
-
f"{icons.green_dot} The '{
|
|
176
|
+
f"{icons.green_dot} The '{name}' notebook was created within the '{workspace}' workspace."
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def update_notebook_definition(
|
|
181
|
+
name: str, notebook_content: str, workspace: Optional[str] = None
|
|
182
|
+
):
|
|
183
|
+
"""
|
|
184
|
+
Updates an existing notebook with a new definition.
|
|
185
|
+
|
|
186
|
+
Parameters
|
|
187
|
+
----------
|
|
188
|
+
name : str
|
|
189
|
+
The name of the notebook to be created.
|
|
190
|
+
notebook_content : str
|
|
191
|
+
The Jupyter notebook content (not in Base64 format).
|
|
192
|
+
workspace : str, default=None
|
|
193
|
+
The name of the workspace.
|
|
194
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
195
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
196
|
+
"""
|
|
197
|
+
|
|
198
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
199
|
+
client = fabric.FabricRestClient()
|
|
200
|
+
notebook_payload = base64.b64encode(notebook_content)
|
|
201
|
+
notebook_id = fabric.resolve_item_id(
|
|
202
|
+
item_name=name, type="Notebook", workspace=workspace
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
request_body = {
|
|
206
|
+
"displayName": name,
|
|
207
|
+
"definition": {
|
|
208
|
+
"format": "ipynb",
|
|
209
|
+
"parts": [
|
|
210
|
+
{
|
|
211
|
+
"path": "notebook-content.py",
|
|
212
|
+
"payload": notebook_payload,
|
|
213
|
+
"payloadType": "InlineBase64",
|
|
214
|
+
}
|
|
215
|
+
],
|
|
216
|
+
},
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
response = client.post(
|
|
220
|
+
f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/updateDefinition",
|
|
221
|
+
json=request_body,
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
lro(client, response, return_status_code=True)
|
|
225
|
+
|
|
226
|
+
print(
|
|
227
|
+
f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace}' workspace."
|
|
132
228
|
)
|
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -13,6 +13,8 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
13
13
|
"""
|
|
14
14
|
Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace.
|
|
15
15
|
|
|
16
|
+
This is a wrapper function for the following API: `Datasets - Trigger Query Scale Out Sync In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group`_.
|
|
17
|
+
|
|
16
18
|
Parameters
|
|
17
19
|
----------
|
|
18
20
|
dataset : str
|
|
@@ -23,8 +25,6 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
|
|
|
23
25
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
24
26
|
"""
|
|
25
27
|
|
|
26
|
-
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group
|
|
27
|
-
|
|
28
28
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
29
29
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
30
30
|
|
|
@@ -46,6 +46,8 @@ def qso_sync_status(
|
|
|
46
46
|
"""
|
|
47
47
|
Returns the query scale-out sync status for the specified dataset from the specified workspace.
|
|
48
48
|
|
|
49
|
+
This is a wrapper function for the following API: `Datasets - Get Query Scale Out Sync Status In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-query-scale-out-sync-status-in-group`_.
|
|
50
|
+
|
|
49
51
|
Parameters
|
|
50
52
|
----------
|
|
51
53
|
dataset : str
|
|
@@ -61,8 +63,6 @@ def qso_sync_status(
|
|
|
61
63
|
2 pandas dataframes showing the query scale-out sync status.
|
|
62
64
|
"""
|
|
63
65
|
|
|
64
|
-
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/get-query-scale-out-sync-status-in-group
|
|
65
|
-
|
|
66
66
|
df = pd.DataFrame(
|
|
67
67
|
columns=[
|
|
68
68
|
"Scale Out Status",
|
|
@@ -143,6 +143,8 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
143
143
|
"""
|
|
144
144
|
Sets the max read-only replicas to 0, disabling query scale out.
|
|
145
145
|
|
|
146
|
+
This is a wrapper function for the following API: `Datasets - Update Dataset In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-dataset-in-group`_.
|
|
147
|
+
|
|
146
148
|
Parameters
|
|
147
149
|
----------
|
|
148
150
|
dataset : str
|
|
@@ -188,6 +190,8 @@ def set_qso(
|
|
|
188
190
|
"""
|
|
189
191
|
Sets the query scale out settings for a semantic model.
|
|
190
192
|
|
|
193
|
+
This is a wrapper function for the following API: `Datasets - Update Dataset In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-dataset-in-group`_.
|
|
194
|
+
|
|
191
195
|
Parameters
|
|
192
196
|
----------
|
|
193
197
|
dataset : str
|
|
@@ -209,8 +213,6 @@ def set_qso(
|
|
|
209
213
|
|
|
210
214
|
from sempy_labs._helper_functions import is_default_semantic_model
|
|
211
215
|
|
|
212
|
-
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/update-dataset-in-group
|
|
213
|
-
|
|
214
216
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
215
217
|
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
216
218
|
|