semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
sempy_labs/_list_functions.py
CHANGED
|
@@ -1,17 +1,28 @@
|
|
|
1
|
-
import sempy
|
|
2
1
|
import sempy.fabric as fabric
|
|
3
2
|
from sempy_labs._helper_functions import (
|
|
4
|
-
resolve_workspace_name_and_id,
|
|
5
|
-
resolve_lakehouse_name,
|
|
6
|
-
create_relationship_name,
|
|
7
|
-
resolve_lakehouse_id
|
|
3
|
+
resolve_workspace_name_and_id,
|
|
4
|
+
resolve_lakehouse_name,
|
|
5
|
+
create_relationship_name,
|
|
6
|
+
resolve_lakehouse_id,
|
|
7
|
+
resolve_dataset_id,
|
|
8
|
+
_decode_b64,
|
|
9
|
+
pagination,
|
|
10
|
+
lro,
|
|
11
|
+
)
|
|
8
12
|
import pandas as pd
|
|
9
|
-
import
|
|
13
|
+
import base64
|
|
14
|
+
import requests
|
|
15
|
+
import time
|
|
16
|
+
import json
|
|
10
17
|
from pyspark.sql import SparkSession
|
|
11
18
|
from typing import Optional
|
|
12
19
|
import sempy_labs._icons as icons
|
|
20
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
13
21
|
|
|
14
|
-
|
|
22
|
+
|
|
23
|
+
def get_object_level_security(
|
|
24
|
+
dataset: str, workspace: Optional[str] = None
|
|
25
|
+
) -> pd.DataFrame:
|
|
15
26
|
"""
|
|
16
27
|
Shows the object level security for the semantic model.
|
|
17
28
|
|
|
@@ -32,12 +43,13 @@ def get_object_level_security(dataset: str, workspace: Optional[str] = None) ->
|
|
|
32
43
|
|
|
33
44
|
from sempy_labs.tom import connect_semantic_model
|
|
34
45
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
46
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
47
|
+
|
|
38
48
|
df = pd.DataFrame(columns=["Role Name", "Object Type", "Table Name", "Object Name"])
|
|
39
49
|
|
|
40
|
-
with connect_semantic_model(
|
|
50
|
+
with connect_semantic_model(
|
|
51
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
52
|
+
) as tom:
|
|
41
53
|
|
|
42
54
|
for r in tom.model.Roles:
|
|
43
55
|
for tp in r.TablePermissions:
|
|
@@ -45,7 +57,7 @@ def get_object_level_security(dataset: str, workspace: Optional[str] = None) ->
|
|
|
45
57
|
columnCount = 0
|
|
46
58
|
try:
|
|
47
59
|
columnCount = len(tp.ColumnPermissions)
|
|
48
|
-
except:
|
|
60
|
+
except Exception:
|
|
49
61
|
pass
|
|
50
62
|
objectType = "Table"
|
|
51
63
|
if columnCount == 0:
|
|
@@ -68,7 +80,8 @@ def get_object_level_security(dataset: str, workspace: Optional[str] = None) ->
|
|
|
68
80
|
"Object Name": cp.Name,
|
|
69
81
|
}
|
|
70
82
|
df = pd.concat(
|
|
71
|
-
[df, pd.DataFrame(new_data, index=[0])],
|
|
83
|
+
[df, pd.DataFrame(new_data, index=[0])],
|
|
84
|
+
ignore_index=True,
|
|
72
85
|
)
|
|
73
86
|
|
|
74
87
|
return df
|
|
@@ -93,53 +106,20 @@ def list_tables(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
93
106
|
A pandas dataframe showing the semantic model's tables and their properties.
|
|
94
107
|
"""
|
|
95
108
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
if workspace is None:
|
|
99
|
-
workspace = fabric.resolve_workspace_name()
|
|
109
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
100
110
|
|
|
101
|
-
df =
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
"Hidden",
|
|
106
|
-
"Data Category",
|
|
107
|
-
"Description",
|
|
108
|
-
"Refresh Policy",
|
|
109
|
-
"Source Expression",
|
|
110
|
-
]
|
|
111
|
+
df = fabric.list_tables(
|
|
112
|
+
dataset=dataset,
|
|
113
|
+
workspace=workspace,
|
|
114
|
+
additional_xmla_properties=["RefreshPolicy", "RefreshPolicy.SourceExpression"],
|
|
111
115
|
)
|
|
112
116
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
for t in tom.model.Tables:
|
|
118
|
-
tableType = "Table"
|
|
119
|
-
rPolicy = bool(t.RefreshPolicy)
|
|
120
|
-
sourceExpression = None
|
|
121
|
-
if str(t.CalculationGroup) != "None":
|
|
122
|
-
tableType = "Calculation Group"
|
|
123
|
-
else:
|
|
124
|
-
for p in t.Partitions:
|
|
125
|
-
if p.SourceType == TOM.PartitionSourceType.Calculated:
|
|
126
|
-
tableType = "Calculated Table"
|
|
127
|
-
|
|
128
|
-
if rPolicy:
|
|
129
|
-
sourceExpression = t.RefreshPolicy.SourceExpression
|
|
130
|
-
|
|
131
|
-
new_data = {
|
|
132
|
-
"Name": t.Name,
|
|
133
|
-
"Type": tableType,
|
|
134
|
-
"Hidden": t.IsHidden,
|
|
135
|
-
"Data Category": t.DataCategory,
|
|
136
|
-
"Description": t.Description,
|
|
137
|
-
"Refresh Policy": rPolicy,
|
|
138
|
-
"Source Expression": sourceExpression,
|
|
139
|
-
}
|
|
140
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
117
|
+
df["Refresh Policy"] = df["Refresh Policy"].notna()
|
|
118
|
+
df.rename(
|
|
119
|
+
columns={"Refresh Policy Source Expression": "Source Expression"}, inplace=True
|
|
120
|
+
)
|
|
141
121
|
|
|
142
|
-
|
|
122
|
+
return df
|
|
143
123
|
|
|
144
124
|
|
|
145
125
|
def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
|
|
@@ -163,7 +143,7 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
163
143
|
|
|
164
144
|
from sempy_labs.tom import connect_semantic_model
|
|
165
145
|
|
|
166
|
-
workspace = fabric.resolve_workspace_name()
|
|
146
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
167
147
|
|
|
168
148
|
df = pd.DataFrame(
|
|
169
149
|
columns=[
|
|
@@ -175,7 +155,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
175
155
|
]
|
|
176
156
|
)
|
|
177
157
|
|
|
178
|
-
with connect_semantic_model(
|
|
158
|
+
with connect_semantic_model(
|
|
159
|
+
dataset=dataset, readonly=True, workspace=workspace
|
|
160
|
+
) as tom:
|
|
179
161
|
|
|
180
162
|
mName = tom.model.Name
|
|
181
163
|
for a in tom.model.Annotations:
|
|
@@ -203,7 +185,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
203
185
|
"Annotation Name": taName,
|
|
204
186
|
"Annotation Value": taValue,
|
|
205
187
|
}
|
|
206
|
-
df = pd.concat(
|
|
188
|
+
df = pd.concat(
|
|
189
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
190
|
+
)
|
|
207
191
|
for p in t.Partitions:
|
|
208
192
|
pName = p.Name
|
|
209
193
|
objectType = "Partition"
|
|
@@ -281,7 +265,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
281
265
|
"Annotation Name": daName,
|
|
282
266
|
"Annotation Value": daValue,
|
|
283
267
|
}
|
|
284
|
-
df = pd.concat(
|
|
268
|
+
df = pd.concat(
|
|
269
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
270
|
+
)
|
|
285
271
|
for r in tom.model.Relationships:
|
|
286
272
|
rName = r.Name
|
|
287
273
|
objectType = "Relationship"
|
|
@@ -295,7 +281,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
295
281
|
"Annotation Name": raName,
|
|
296
282
|
"Annotation Value": raValue,
|
|
297
283
|
}
|
|
298
|
-
df = pd.concat(
|
|
284
|
+
df = pd.concat(
|
|
285
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
286
|
+
)
|
|
299
287
|
for cul in tom.model.Cultures:
|
|
300
288
|
culName = cul.Name
|
|
301
289
|
objectType = "Translation"
|
|
@@ -309,7 +297,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
309
297
|
"Annotation Name": culaName,
|
|
310
298
|
"Annotation Value": culaValue,
|
|
311
299
|
}
|
|
312
|
-
df = pd.concat(
|
|
300
|
+
df = pd.concat(
|
|
301
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
302
|
+
)
|
|
313
303
|
for e in tom.model.Expressions:
|
|
314
304
|
eName = e.Name
|
|
315
305
|
objectType = "Expression"
|
|
@@ -323,7 +313,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
323
313
|
"Annotation Name": eaName,
|
|
324
314
|
"Annotation Value": eaValue,
|
|
325
315
|
}
|
|
326
|
-
df = pd.concat(
|
|
316
|
+
df = pd.concat(
|
|
317
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
318
|
+
)
|
|
327
319
|
for per in tom.model.Perspectives:
|
|
328
320
|
perName = per.Name
|
|
329
321
|
objectType = "Perspective"
|
|
@@ -337,7 +329,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
337
329
|
"Annotation Name": peraName,
|
|
338
330
|
"Annotation Value": peraValue,
|
|
339
331
|
}
|
|
340
|
-
df = pd.concat(
|
|
332
|
+
df = pd.concat(
|
|
333
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
334
|
+
)
|
|
341
335
|
for rol in tom.model.Roles:
|
|
342
336
|
rolName = rol.Name
|
|
343
337
|
objectType = "Role"
|
|
@@ -351,7 +345,9 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
|
|
|
351
345
|
"Annotation Name": rolaName,
|
|
352
346
|
"Annotation Value": rolaValue,
|
|
353
347
|
}
|
|
354
|
-
df = pd.concat(
|
|
348
|
+
df = pd.concat(
|
|
349
|
+
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
350
|
+
)
|
|
355
351
|
|
|
356
352
|
return df
|
|
357
353
|
|
|
@@ -390,8 +386,7 @@ def list_columns(
|
|
|
390
386
|
get_direct_lake_lakehouse,
|
|
391
387
|
)
|
|
392
388
|
|
|
393
|
-
|
|
394
|
-
workspace = fabric.resolve_workspace_name()
|
|
389
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
395
390
|
|
|
396
391
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
397
392
|
|
|
@@ -431,10 +426,10 @@ def list_columns(
|
|
|
431
426
|
].iloc[0]
|
|
432
427
|
|
|
433
428
|
# Build the query to be executed dynamically
|
|
434
|
-
query =
|
|
429
|
+
query = f"{query}COUNT(DISTINCT({scName})) AS {scName}, "
|
|
435
430
|
|
|
436
431
|
query = query[:-2]
|
|
437
|
-
query =
|
|
432
|
+
query = f"{query} FROM {lakehouse}.{lakeTName}"
|
|
438
433
|
sql_statements.append((table_name, query))
|
|
439
434
|
|
|
440
435
|
spark = SparkSession.builder.getOrCreate()
|
|
@@ -496,8 +491,10 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
496
491
|
|
|
497
492
|
client = fabric.PowerBIRestClient()
|
|
498
493
|
response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dashboards")
|
|
494
|
+
if response.status_code != 200:
|
|
495
|
+
raise FabricHTTPException(response)
|
|
499
496
|
|
|
500
|
-
for v in response.json()
|
|
497
|
+
for v in response.json().get("value", []):
|
|
501
498
|
new_data = {
|
|
502
499
|
"Dashboard ID": v.get("id"),
|
|
503
500
|
"Dashboard Name": v.get("displayName"),
|
|
@@ -548,23 +545,29 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
548
545
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
549
546
|
|
|
550
547
|
client = fabric.FabricRestClient()
|
|
551
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses
|
|
548
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses")
|
|
552
549
|
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
sqlEPProp = prop.get("sqlEndpointProperties",{})
|
|
550
|
+
if response.status_code != 200:
|
|
551
|
+
raise FabricHTTPException(response)
|
|
556
552
|
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
553
|
+
responses = pagination(client, response)
|
|
554
|
+
|
|
555
|
+
for r in responses:
|
|
556
|
+
for v in r.get("value", []):
|
|
557
|
+
prop = v.get("properties", {})
|
|
558
|
+
sqlEPProp = prop.get("sqlEndpointProperties", {})
|
|
559
|
+
|
|
560
|
+
new_data = {
|
|
561
|
+
"Lakehouse Name": v.get("displayName"),
|
|
562
|
+
"Lakehouse ID": v.get("id"),
|
|
563
|
+
"Description": v.get("description"),
|
|
564
|
+
"OneLake Tables Path": prop.get("oneLakeTablesPath"),
|
|
565
|
+
"OneLake Files Path": prop.get("oneLakeFilesPath"),
|
|
566
|
+
"SQL Endpoint Connection String": sqlEPProp.get("connectionString"),
|
|
567
|
+
"SQL Endpoint ID": sqlEPProp.get("id"),
|
|
568
|
+
"SQL Endpoint Provisioning Status": sqlEPProp.get("provisioningStatus"),
|
|
569
|
+
}
|
|
570
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
568
571
|
|
|
569
572
|
return df
|
|
570
573
|
|
|
@@ -600,20 +603,25 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
600
603
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
601
604
|
|
|
602
605
|
client = fabric.FabricRestClient()
|
|
603
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/warehouses
|
|
606
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/warehouses")
|
|
607
|
+
if response.status_code != 200:
|
|
608
|
+
raise FabricHTTPException(response)
|
|
604
609
|
|
|
605
|
-
|
|
606
|
-
prop = v.get("properties",{})
|
|
610
|
+
responses = pagination(client, response)
|
|
607
611
|
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
612
|
+
for r in responses:
|
|
613
|
+
for v in r.get("value", []):
|
|
614
|
+
prop = v.get("properties", {})
|
|
615
|
+
|
|
616
|
+
new_data = {
|
|
617
|
+
"Warehouse Name": v.get("displayName"),
|
|
618
|
+
"Warehouse ID": v.get("id"),
|
|
619
|
+
"Description": v.get("description"),
|
|
620
|
+
"Connection Info": prop.get("connectionInfo"),
|
|
621
|
+
"Created Date": prop.get("createdDate"),
|
|
622
|
+
"Last Updated Time": prop.get("lastUpdatedTime"),
|
|
623
|
+
}
|
|
624
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
617
625
|
|
|
618
626
|
return df
|
|
619
627
|
|
|
@@ -640,16 +648,21 @@ def list_sqlendpoints(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
640
648
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
641
649
|
|
|
642
650
|
client = fabric.FabricRestClient()
|
|
643
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints
|
|
651
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints")
|
|
652
|
+
if response.status_code != 200:
|
|
653
|
+
raise FabricHTTPException(response)
|
|
644
654
|
|
|
645
|
-
|
|
655
|
+
responses = pagination(client, response)
|
|
646
656
|
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
657
|
+
for r in responses:
|
|
658
|
+
for v in r.get("value", []):
|
|
659
|
+
|
|
660
|
+
new_data = {
|
|
661
|
+
"SQL Endpoint ID": v.get("id"),
|
|
662
|
+
"SQL Endpoint Name": v.get("displayName"),
|
|
663
|
+
"Description": v.get("description"),
|
|
664
|
+
}
|
|
665
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
653
666
|
|
|
654
667
|
return df
|
|
655
668
|
|
|
@@ -678,16 +691,21 @@ def list_mirroredwarehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
678
691
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
679
692
|
|
|
680
693
|
client = fabric.FabricRestClient()
|
|
681
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses
|
|
694
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
|
|
695
|
+
if response.status_code != 200:
|
|
696
|
+
raise FabricHTTPException(response)
|
|
682
697
|
|
|
683
|
-
|
|
698
|
+
responses = pagination(client, response)
|
|
684
699
|
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
700
|
+
for r in responses:
|
|
701
|
+
for v in r.get("value", []):
|
|
702
|
+
|
|
703
|
+
new_data = {
|
|
704
|
+
"Mirrored Warehouse": v.get("displayName"),
|
|
705
|
+
"Mirrored Warehouse ID": v.get("id"),
|
|
706
|
+
"Description": v.get("description"),
|
|
707
|
+
}
|
|
708
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
691
709
|
|
|
692
710
|
return df
|
|
693
711
|
|
|
@@ -724,21 +742,26 @@ def list_kqldatabases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
724
742
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
725
743
|
|
|
726
744
|
client = fabric.FabricRestClient()
|
|
727
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases
|
|
745
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
|
|
746
|
+
if response.status_code != 200:
|
|
747
|
+
raise FabricHTTPException(response)
|
|
728
748
|
|
|
729
|
-
|
|
730
|
-
prop = v.get("properties",{})
|
|
749
|
+
responses = pagination(client, response)
|
|
731
750
|
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
751
|
+
for r in responses:
|
|
752
|
+
for v in r.get("value", []):
|
|
753
|
+
prop = v.get("properties", {})
|
|
754
|
+
|
|
755
|
+
new_data = {
|
|
756
|
+
"KQL Database Name": v.get("displayName"),
|
|
757
|
+
"KQL Database ID": v.get("id"),
|
|
758
|
+
"Description": v.get("description"),
|
|
759
|
+
"Parent Eventhouse Item ID": prop.get("parentEventhouseItemId"),
|
|
760
|
+
"Query Service URI": prop.get("queryServiceUri"),
|
|
761
|
+
"Ingestion Service URI": prop.get("ingestionServiceUri"),
|
|
762
|
+
"Kusto Database Type": prop.get("kustoDatabaseType"),
|
|
763
|
+
}
|
|
764
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
742
765
|
|
|
743
766
|
return df
|
|
744
767
|
|
|
@@ -765,16 +788,21 @@ def list_kqlquerysets(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
765
788
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
766
789
|
|
|
767
790
|
client = fabric.FabricRestClient()
|
|
768
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets
|
|
791
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
|
|
792
|
+
if response.status_code != 200:
|
|
793
|
+
raise FabricHTTPException(response)
|
|
769
794
|
|
|
770
|
-
|
|
795
|
+
responses = pagination(client, response)
|
|
771
796
|
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
797
|
+
for r in responses:
|
|
798
|
+
for v in r.get("value", []):
|
|
799
|
+
|
|
800
|
+
new_data = {
|
|
801
|
+
"KQL Queryset Name": v.get("displayName"),
|
|
802
|
+
"KQL Queryset ID": v.get("id"),
|
|
803
|
+
"Description": v.get("description"),
|
|
804
|
+
}
|
|
805
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
778
806
|
|
|
779
807
|
return df
|
|
780
808
|
|
|
@@ -801,19 +829,24 @@ def list_mlmodels(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
801
829
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
802
830
|
|
|
803
831
|
client = fabric.FabricRestClient()
|
|
804
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/mlModels
|
|
832
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
|
|
833
|
+
if response.status_code != 200:
|
|
834
|
+
raise FabricHTTPException(response)
|
|
805
835
|
|
|
806
|
-
|
|
807
|
-
model_id = v.get("id")
|
|
808
|
-
modelName = v.get("displayName")
|
|
809
|
-
desc = v.get("description")
|
|
836
|
+
responses = pagination(client, response)
|
|
810
837
|
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
"
|
|
815
|
-
|
|
816
|
-
|
|
838
|
+
for r in responses:
|
|
839
|
+
for v in r.get("value", []):
|
|
840
|
+
model_id = v.get("id")
|
|
841
|
+
modelName = v.get("displayName")
|
|
842
|
+
desc = v.get("description")
|
|
843
|
+
|
|
844
|
+
new_data = {
|
|
845
|
+
"ML Model Name": modelName,
|
|
846
|
+
"ML Model ID": model_id,
|
|
847
|
+
"Description": desc,
|
|
848
|
+
}
|
|
849
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
817
850
|
|
|
818
851
|
return df
|
|
819
852
|
|
|
@@ -840,19 +873,24 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
840
873
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
841
874
|
|
|
842
875
|
client = fabric.FabricRestClient()
|
|
843
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams
|
|
876
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
|
|
877
|
+
if response.status_code != 200:
|
|
878
|
+
raise FabricHTTPException(response)
|
|
844
879
|
|
|
845
|
-
|
|
846
|
-
model_id = v.get("id")
|
|
847
|
-
modelName = v.get("displayName")
|
|
848
|
-
desc = v.get("description")
|
|
880
|
+
responses = pagination(client, response)
|
|
849
881
|
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
"
|
|
854
|
-
|
|
855
|
-
|
|
882
|
+
for r in responses:
|
|
883
|
+
for v in r.get("value", []):
|
|
884
|
+
model_id = v.get("id")
|
|
885
|
+
modelName = v.get("displayName")
|
|
886
|
+
desc = v.get("description")
|
|
887
|
+
|
|
888
|
+
new_data = {
|
|
889
|
+
"Eventstream Name": modelName,
|
|
890
|
+
"Eventstream ID": model_id,
|
|
891
|
+
"Description": desc,
|
|
892
|
+
}
|
|
893
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
856
894
|
|
|
857
895
|
return df
|
|
858
896
|
|
|
@@ -879,19 +917,20 @@ def list_datapipelines(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
879
917
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
880
918
|
|
|
881
919
|
client = fabric.FabricRestClient()
|
|
882
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines
|
|
920
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
|
|
921
|
+
if response.status_code != 200:
|
|
922
|
+
raise FabricHTTPException(response)
|
|
883
923
|
|
|
884
|
-
|
|
885
|
-
model_id = v.get("id")
|
|
886
|
-
modelName = v.get("displayName")
|
|
887
|
-
desc = v.get("description")
|
|
924
|
+
responses = pagination(client, response)
|
|
888
925
|
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
926
|
+
for r in responses:
|
|
927
|
+
for v in r.get("value", []):
|
|
928
|
+
new_data = {
|
|
929
|
+
"Data Pipeline Name": v.get("displayName"),
|
|
930
|
+
"Data Pipeline ID": v.get("id"),
|
|
931
|
+
"Description": v.get("description"),
|
|
932
|
+
}
|
|
933
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
895
934
|
|
|
896
935
|
return df
|
|
897
936
|
|
|
@@ -918,16 +957,20 @@ def list_mlexperiments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
918
957
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
919
958
|
|
|
920
959
|
client = fabric.FabricRestClient()
|
|
921
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments
|
|
960
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
|
|
961
|
+
if response.status_code != 200:
|
|
962
|
+
raise FabricHTTPException(response)
|
|
922
963
|
|
|
923
|
-
|
|
964
|
+
responses = pagination(client, response)
|
|
924
965
|
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
966
|
+
for r in responses:
|
|
967
|
+
for v in r.get("value", []):
|
|
968
|
+
new_data = {
|
|
969
|
+
"ML Experiment Name": v.get("displayName"),
|
|
970
|
+
"ML Experiment ID": v.get("id"),
|
|
971
|
+
"Description": v.get("description"),
|
|
972
|
+
}
|
|
973
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
931
974
|
|
|
932
975
|
return df
|
|
933
976
|
|
|
@@ -954,16 +997,20 @@ def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
954
997
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
955
998
|
|
|
956
999
|
client = fabric.FabricRestClient()
|
|
957
|
-
response = client.get(f"/v1/workspaces/{workspace_id}/datamarts
|
|
1000
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/datamarts")
|
|
1001
|
+
if response.status_code != 200:
|
|
1002
|
+
raise FabricHTTPException(response)
|
|
958
1003
|
|
|
959
|
-
|
|
1004
|
+
responses = pagination(client, response)
|
|
960
1005
|
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
1006
|
+
for r in responses:
|
|
1007
|
+
for v in response.get("value", []):
|
|
1008
|
+
new_data = {
|
|
1009
|
+
"Datamart Name": v.get("displayName"),
|
|
1010
|
+
"Datamart ID": v.get("id"),
|
|
1011
|
+
"Description": v.get("description"),
|
|
1012
|
+
}
|
|
1013
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
967
1014
|
|
|
968
1015
|
return df
|
|
969
1016
|
|
|
@@ -999,27 +1046,15 @@ def create_warehouse(
|
|
|
999
1046
|
|
|
1000
1047
|
client = fabric.FabricRestClient()
|
|
1001
1048
|
response = client.post(
|
|
1002
|
-
f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body
|
|
1049
|
+
f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body, lro_wait=True
|
|
1050
|
+
)
|
|
1051
|
+
|
|
1052
|
+
if response.status_code != 200:
|
|
1053
|
+
raise FabricHTTPException(response)
|
|
1054
|
+
print(
|
|
1055
|
+
f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
|
|
1003
1056
|
)
|
|
1004
1057
|
|
|
1005
|
-
if response.status_code == 201:
|
|
1006
|
-
print(
|
|
1007
|
-
f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
|
|
1008
|
-
)
|
|
1009
|
-
elif response.status_code == 202:
|
|
1010
|
-
operationId = response.headers["x-ms-operation-id"]
|
|
1011
|
-
response = client.get(f"/v1/operations/{operationId}")
|
|
1012
|
-
response_body = json.loads(response.content)
|
|
1013
|
-
while response_body["status"] != "Succeeded":
|
|
1014
|
-
time.sleep(3)
|
|
1015
|
-
response = client.get(f"/v1/operations/{operationId}")
|
|
1016
|
-
response_body = json.loads(response.content)
|
|
1017
|
-
response = client.get(f"/v1/operations/{operationId}/result")
|
|
1018
|
-
print(
|
|
1019
|
-
f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
|
|
1020
|
-
)
|
|
1021
|
-
else:
|
|
1022
|
-
raise ValueError(f"{icons.red_dot} Failed to create the '{warehouse}' warehouse within the '{workspace}' workspace.")
|
|
1023
1058
|
|
|
1024
1059
|
def update_item(
|
|
1025
1060
|
item_type: str,
|
|
@@ -1064,15 +1099,19 @@ def update_item(
|
|
|
1064
1099
|
item_type = item_type.replace(" ", "").capitalize()
|
|
1065
1100
|
|
|
1066
1101
|
if item_type not in itemTypes.keys():
|
|
1067
|
-
raise ValueError(
|
|
1068
|
-
|
|
1102
|
+
raise ValueError(
|
|
1103
|
+
f"{icons.red_dot} The '{item_type}' is not a valid item type. "
|
|
1104
|
+
)
|
|
1105
|
+
|
|
1069
1106
|
itemType = itemTypes[item_type]
|
|
1070
1107
|
|
|
1071
1108
|
dfI = fabric.list_items(workspace=workspace, type=item_type)
|
|
1072
1109
|
dfI_filt = dfI[(dfI["Display Name"] == current_name)]
|
|
1073
1110
|
|
|
1074
1111
|
if len(dfI_filt) == 0:
|
|
1075
|
-
raise ValueError(
|
|
1112
|
+
raise ValueError(
|
|
1113
|
+
f"{icons.red_dot} The '{current_name}' {item_type} does not exist within the '{workspace}' workspace."
|
|
1114
|
+
)
|
|
1076
1115
|
|
|
1077
1116
|
itemId = dfI_filt["Id"].iloc[0]
|
|
1078
1117
|
|
|
@@ -1085,17 +1124,17 @@ def update_item(
|
|
|
1085
1124
|
f"/v1/workspaces/{workspace_id}/{itemType}/{itemId}", json=request_body
|
|
1086
1125
|
)
|
|
1087
1126
|
|
|
1088
|
-
if response.status_code
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
print(
|
|
1095
|
-
f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}' and have a description of '{description}'"
|
|
1096
|
-
)
|
|
1127
|
+
if response.status_code != 200:
|
|
1128
|
+
raise FabricHTTPException(response)
|
|
1129
|
+
if description is None:
|
|
1130
|
+
print(
|
|
1131
|
+
f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}'"
|
|
1132
|
+
)
|
|
1097
1133
|
else:
|
|
1098
|
-
|
|
1134
|
+
print(
|
|
1135
|
+
f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}' and have a description of '{description}'"
|
|
1136
|
+
)
|
|
1137
|
+
|
|
1099
1138
|
|
|
1100
1139
|
def list_relationships(
|
|
1101
1140
|
dataset: str, workspace: Optional[str] = None, extended: Optional[bool] = False
|
|
@@ -1120,8 +1159,7 @@ def list_relationships(
|
|
|
1120
1159
|
A pandas dataframe showing the object level security for the semantic model.
|
|
1121
1160
|
"""
|
|
1122
1161
|
|
|
1123
|
-
|
|
1124
|
-
workspace = fabric.resolve_workspace_name()
|
|
1162
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
1125
1163
|
|
|
1126
1164
|
dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
|
|
1127
1165
|
|
|
@@ -1133,7 +1171,7 @@ def list_relationships(
|
|
|
1133
1171
|
dax_string="""
|
|
1134
1172
|
SELECT
|
|
1135
1173
|
[ID] AS [RelationshipID]
|
|
1136
|
-
,[Name]
|
|
1174
|
+
,[Name]
|
|
1137
1175
|
FROM $SYSTEM.TMSCHEMA_RELATIONSHIPS
|
|
1138
1176
|
""",
|
|
1139
1177
|
)
|
|
@@ -1143,7 +1181,7 @@ def list_relationships(
|
|
|
1143
1181
|
dataset=dataset,
|
|
1144
1182
|
workspace=workspace,
|
|
1145
1183
|
dax_string="""
|
|
1146
|
-
SELECT
|
|
1184
|
+
SELECT
|
|
1147
1185
|
[TABLE_ID]
|
|
1148
1186
|
,[USED_SIZE]
|
|
1149
1187
|
FROM $SYSTEM.DISCOVER_STORAGE_TABLE_COLUMN_SEGMENTS
|
|
@@ -1200,10 +1238,11 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
|
|
|
1200
1238
|
]
|
|
1201
1239
|
)
|
|
1202
1240
|
client = fabric.PowerBIRestClient()
|
|
1203
|
-
response = client.get(
|
|
1204
|
-
|
|
1205
|
-
|
|
1241
|
+
response = client.get("/v1.0/myorg/dataflowStorageAccounts")
|
|
1242
|
+
if response.status_code != 200:
|
|
1243
|
+
raise FabricHTTPException(response)
|
|
1206
1244
|
|
|
1245
|
+
for v in response.json().get("value", []):
|
|
1207
1246
|
new_data = {
|
|
1208
1247
|
"Dataflow Storage Account ID": v.get("id"),
|
|
1209
1248
|
"Dataflow Storage Account Name": v.get("name"),
|
|
@@ -1303,24 +1342,28 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
|
|
|
1303
1342
|
|
|
1304
1343
|
client = fabric.FabricRestClient()
|
|
1305
1344
|
response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
|
|
1345
|
+
if response.status_code != 200:
|
|
1346
|
+
raise FabricHTTPException(response)
|
|
1306
1347
|
|
|
1307
|
-
|
|
1308
|
-
user_name = i.get("principal",{}).get("displayName")
|
|
1309
|
-
role_name = i.get("role")
|
|
1310
|
-
user_email = i.get("principal",{}).get("userDetails",{}).get("userPrincipalName")
|
|
1311
|
-
user_type = i.get("principal",{}).get("type")
|
|
1348
|
+
responses = pagination(client, response)
|
|
1312
1349
|
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1350
|
+
for r in responses:
|
|
1351
|
+
for i in r.get("value", []):
|
|
1352
|
+
principal = i.get("principal", {})
|
|
1353
|
+
new_data = {
|
|
1354
|
+
"User Name": principal.get("displayName"),
|
|
1355
|
+
"Role Name": i.get("role"),
|
|
1356
|
+
"Type": principal.get("type"),
|
|
1357
|
+
"User Email": principal.get("userDetails", {}).get("userPrincipalName"),
|
|
1358
|
+
}
|
|
1359
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
1320
1360
|
|
|
1321
1361
|
return df
|
|
1322
1362
|
|
|
1323
|
-
|
|
1363
|
+
|
|
1364
|
+
def list_semantic_model_objects(
|
|
1365
|
+
dataset: str, workspace: Optional[str] = None
|
|
1366
|
+
) -> pd.DataFrame:
|
|
1324
1367
|
"""
|
|
1325
1368
|
Shows a list of semantic model objects.
|
|
1326
1369
|
|
|
@@ -1420,11 +1463,11 @@ def list_semantic_model_objects(dataset: str, workspace: Optional[str] = None) -
|
|
|
1420
1463
|
df = pd.concat(
|
|
1421
1464
|
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
1422
1465
|
)
|
|
1423
|
-
for
|
|
1466
|
+
for lev in h.Levels:
|
|
1424
1467
|
new_data = {
|
|
1425
|
-
"Parent Name":
|
|
1426
|
-
"Object Name":
|
|
1427
|
-
"Object Type": str(
|
|
1468
|
+
"Parent Name": lev.Parent.Name,
|
|
1469
|
+
"Object Name": lev.Name,
|
|
1470
|
+
"Object Type": str(lev.ObjectType),
|
|
1428
1471
|
}
|
|
1429
1472
|
df = pd.concat(
|
|
1430
1473
|
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
@@ -1481,6 +1524,7 @@ def list_semantic_model_objects(dataset: str, workspace: Optional[str] = None) -
|
|
|
1481
1524
|
|
|
1482
1525
|
return df
|
|
1483
1526
|
|
|
1527
|
+
|
|
1484
1528
|
def list_shortcuts(
|
|
1485
1529
|
lakehouse: Optional[str] = None, workspace: Optional[str] = None
|
|
1486
1530
|
) -> pd.DataFrame:
|
|
@@ -1529,8 +1573,13 @@ def list_shortcuts(
|
|
|
1529
1573
|
response = client.get(
|
|
1530
1574
|
f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
|
|
1531
1575
|
)
|
|
1532
|
-
if response.status_code
|
|
1533
|
-
|
|
1576
|
+
if response.status_code != 200:
|
|
1577
|
+
raise FabricHTTPException(response)
|
|
1578
|
+
|
|
1579
|
+
responses = pagination(client, response)
|
|
1580
|
+
|
|
1581
|
+
for r in responses:
|
|
1582
|
+
for s in r.get("value", []):
|
|
1534
1583
|
shortcutName = s.get("name")
|
|
1535
1584
|
shortcutPath = s.get("path")
|
|
1536
1585
|
source = list(s["target"].keys())[0]
|
|
@@ -1543,17 +1592,19 @@ def list_shortcuts(
|
|
|
1543
1592
|
subpath,
|
|
1544
1593
|
) = (None, None, None, None, None, None)
|
|
1545
1594
|
if source == "oneLake":
|
|
1546
|
-
sourceLakehouseId = s.get("target",{}).get(source,{}).get("itemId")
|
|
1547
|
-
sourcePath = s.get("target",{}).get(source,{}).get("path")
|
|
1548
|
-
sourceWorkspaceId =
|
|
1595
|
+
sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId")
|
|
1596
|
+
sourcePath = s.get("target", {}).get(source, {}).get("path")
|
|
1597
|
+
sourceWorkspaceId = (
|
|
1598
|
+
s.get("target", {}).get(source, {}).get("workspaceId")
|
|
1599
|
+
)
|
|
1549
1600
|
sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
|
|
1550
1601
|
sourceLakehouseName = resolve_lakehouse_name(
|
|
1551
1602
|
sourceLakehouseId, sourceWorkspaceName
|
|
1552
1603
|
)
|
|
1553
1604
|
else:
|
|
1554
|
-
connectionId = s.get("target",{}).get(source,{}).get("connectionId")
|
|
1555
|
-
location = s.get("target",{}).get(source,{}).get("location")
|
|
1556
|
-
subpath = s.get("target",{}).get(source,{}).get("subpath")
|
|
1605
|
+
connectionId = s.get("target", {}).get(source, {}).get("connectionId")
|
|
1606
|
+
location = s.get("target", {}).get(source, {}).get("location")
|
|
1607
|
+
subpath = s.get("target", {}).get(source, {}).get("subpath")
|
|
1557
1608
|
|
|
1558
1609
|
new_data = {
|
|
1559
1610
|
"Shortcut Name": shortcutName,
|
|
@@ -1568,13 +1619,10 @@ def list_shortcuts(
|
|
|
1568
1619
|
}
|
|
1569
1620
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
1570
1621
|
|
|
1571
|
-
print(
|
|
1572
|
-
f"{icons.warning} This function relies on an API which is not yet official as of May 21, 2024. Once the API becomes official this function will work as expected."
|
|
1573
|
-
)
|
|
1574
1622
|
return df
|
|
1575
1623
|
|
|
1624
|
+
|
|
1576
1625
|
def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
1577
|
-
|
|
1578
1626
|
"""
|
|
1579
1627
|
Lists all `custom pools <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
1580
1628
|
|
|
@@ -1590,42 +1638,85 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
1590
1638
|
pandas.DataFrame
|
|
1591
1639
|
A pandas dataframe showing all the custom pools within the Fabric workspace.
|
|
1592
1640
|
"""
|
|
1593
|
-
|
|
1594
|
-
#https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools
|
|
1641
|
+
|
|
1642
|
+
# https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools
|
|
1595
1643
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1596
1644
|
|
|
1597
|
-
df = pd.DataFrame(
|
|
1645
|
+
df = pd.DataFrame(
|
|
1646
|
+
columns=[
|
|
1647
|
+
"Custom Pool ID",
|
|
1648
|
+
"Custom Pool Name",
|
|
1649
|
+
"Type",
|
|
1650
|
+
"Node Family",
|
|
1651
|
+
"Node Size",
|
|
1652
|
+
"Auto Scale Enabled",
|
|
1653
|
+
"Auto Scale Min Node Count",
|
|
1654
|
+
"Auto Scale Max Node Count",
|
|
1655
|
+
"Dynamic Executor Allocation Enabled",
|
|
1656
|
+
"Dynamic Executor Allocation Min Executors",
|
|
1657
|
+
"Dynamic Executor Allocation Max Executors",
|
|
1658
|
+
]
|
|
1659
|
+
)
|
|
1598
1660
|
|
|
1599
1661
|
client = fabric.FabricRestClient()
|
|
1600
1662
|
response = client.get(f"/v1/workspaces/{workspace_id}/spark/pools")
|
|
1663
|
+
if response.status_code != 200:
|
|
1664
|
+
raise FabricHTTPException(response)
|
|
1601
1665
|
|
|
1602
|
-
for i in response.json()[
|
|
1666
|
+
for i in response.json()["value"]:
|
|
1603
1667
|
|
|
1604
|
-
aScale = i.get(
|
|
1605
|
-
d = i.get(
|
|
1668
|
+
aScale = i.get("autoScale", {})
|
|
1669
|
+
d = i.get("dynamicExecutorAllocation", {})
|
|
1606
1670
|
|
|
1607
|
-
new_data = {
|
|
1608
|
-
|
|
1609
|
-
|
|
1671
|
+
new_data = {
|
|
1672
|
+
"Custom Pool ID": i.get("id"),
|
|
1673
|
+
"Custom Pool Name": i.get("name"),
|
|
1674
|
+
"Type": i.get("type"),
|
|
1675
|
+
"Node Family": i.get("nodeFamily"),
|
|
1676
|
+
"Node Size": i.get("nodeSize"),
|
|
1677
|
+
"Auto Scale Enabled": aScale.get("enabled"),
|
|
1678
|
+
"Auto Scale Min Node Count": aScale.get("minNodeCount"),
|
|
1679
|
+
"Auto Scale Max Node Count": aScale.get("maxNodeCount"),
|
|
1680
|
+
"Dynamic Executor Allocation Enabled": d.get("enabled"),
|
|
1681
|
+
"Dynamic Executor Allocation Min Executors": d.get("minExecutors"),
|
|
1682
|
+
"Dynamic Executor Allocation Max Executors": d.get("maxExecutors"),
|
|
1683
|
+
}
|
|
1610
1684
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
1611
1685
|
|
|
1612
|
-
bool_cols = [
|
|
1613
|
-
int_cols = [
|
|
1686
|
+
bool_cols = ["Auto Scale Enabled", "Dynamic Executor Allocation Enabled"]
|
|
1687
|
+
int_cols = [
|
|
1688
|
+
"Auto Scale Min Node Count",
|
|
1689
|
+
"Auto Scale Max Node Count",
|
|
1690
|
+
"Dynamic Executor Allocation Enabled",
|
|
1691
|
+
"Dynamic Executor Allocation Min Executors",
|
|
1692
|
+
"Dynamic Executor Allocation Max Executors",
|
|
1693
|
+
]
|
|
1614
1694
|
|
|
1615
1695
|
df[bool_cols] = df[bool_cols].astype(bool)
|
|
1616
1696
|
df[int_cols] = df[int_cols].astype(int)
|
|
1617
1697
|
|
|
1618
1698
|
return df
|
|
1619
1699
|
|
|
1620
|
-
|
|
1621
|
-
|
|
1700
|
+
|
|
1701
|
+
def create_custom_pool(
|
|
1702
|
+
pool_name: str,
|
|
1703
|
+
node_size: str,
|
|
1704
|
+
min_node_count: int,
|
|
1705
|
+
max_node_count: int,
|
|
1706
|
+
min_executors: int,
|
|
1707
|
+
max_executors: int,
|
|
1708
|
+
node_family: Optional[str] = "MemoryOptimized",
|
|
1709
|
+
auto_scale_enabled: Optional[bool] = True,
|
|
1710
|
+
dynamic_executor_allocation_enabled: Optional[bool] = True,
|
|
1711
|
+
workspace: Optional[str] = None,
|
|
1712
|
+
):
|
|
1622
1713
|
"""
|
|
1623
1714
|
Creates a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
1624
1715
|
|
|
1625
1716
|
Parameters
|
|
1626
1717
|
----------
|
|
1627
1718
|
pool_name : str
|
|
1628
|
-
The custom pool name.
|
|
1719
|
+
The custom pool name.
|
|
1629
1720
|
node_size : str
|
|
1630
1721
|
The `node size <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize>`_.
|
|
1631
1722
|
min_node_count : int
|
|
@@ -1648,10 +1739,10 @@ def create_custom_pool(pool_name: str, node_size: str, min_node_count: int, max_
|
|
|
1648
1739
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1649
1740
|
|
|
1650
1741
|
Returns
|
|
1651
|
-
-------
|
|
1742
|
+
-------
|
|
1652
1743
|
"""
|
|
1653
1744
|
|
|
1654
|
-
#https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool
|
|
1745
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool
|
|
1655
1746
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1656
1747
|
|
|
1657
1748
|
request_body = {
|
|
@@ -1659,34 +1750,48 @@ def create_custom_pool(pool_name: str, node_size: str, min_node_count: int, max_
|
|
|
1659
1750
|
"nodeFamily": node_family,
|
|
1660
1751
|
"nodeSize": node_size,
|
|
1661
1752
|
"autoScale": {
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1753
|
+
"enabled": auto_scale_enabled,
|
|
1754
|
+
"minNodeCount": min_node_count,
|
|
1755
|
+
"maxNodeCount": max_node_count,
|
|
1665
1756
|
},
|
|
1666
1757
|
"dynamicExecutorAllocation": {
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
}
|
|
1758
|
+
"enabled": dynamic_executor_allocation_enabled,
|
|
1759
|
+
"minExecutors": min_executors,
|
|
1760
|
+
"maxExecutors": max_executors,
|
|
1761
|
+
},
|
|
1671
1762
|
}
|
|
1672
1763
|
|
|
1673
1764
|
client = fabric.FabricRestClient()
|
|
1674
|
-
response = client.post(
|
|
1765
|
+
response = client.post(
|
|
1766
|
+
f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body, lro_wait=True
|
|
1767
|
+
)
|
|
1768
|
+
|
|
1769
|
+
if response.status_code != 200:
|
|
1770
|
+
raise FabricHTTPException(response)
|
|
1771
|
+
print(
|
|
1772
|
+
f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
|
|
1773
|
+
)
|
|
1675
1774
|
|
|
1676
|
-
if response.status_code == 201:
|
|
1677
|
-
print(f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace.")
|
|
1678
|
-
else:
|
|
1679
|
-
raise ValueError(f"{icons.red_dot} {response.status_code}")
|
|
1680
|
-
|
|
1681
|
-
def update_custom_pool(pool_name: str, node_size: Optional[str] = None, min_node_count: Optional[int] = None, max_node_count: Optional[int] = None, min_executors: Optional[int] = None, max_executors: Optional[int] = None, node_family: Optional[str] = None, auto_scale_enabled: Optional[bool] = None, dynamic_executor_allocation_enabled: Optional[bool] = None, workspace: Optional[str] = None):
|
|
1682
1775
|
|
|
1776
|
+
def update_custom_pool(
|
|
1777
|
+
pool_name: str,
|
|
1778
|
+
node_size: Optional[str] = None,
|
|
1779
|
+
min_node_count: Optional[int] = None,
|
|
1780
|
+
max_node_count: Optional[int] = None,
|
|
1781
|
+
min_executors: Optional[int] = None,
|
|
1782
|
+
max_executors: Optional[int] = None,
|
|
1783
|
+
node_family: Optional[str] = None,
|
|
1784
|
+
auto_scale_enabled: Optional[bool] = None,
|
|
1785
|
+
dynamic_executor_allocation_enabled: Optional[bool] = None,
|
|
1786
|
+
workspace: Optional[str] = None,
|
|
1787
|
+
):
|
|
1683
1788
|
"""
|
|
1684
1789
|
Updates the properties of a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
1685
1790
|
|
|
1686
1791
|
Parameters
|
|
1687
1792
|
----------
|
|
1688
1793
|
pool_name : str
|
|
1689
|
-
The custom pool name.
|
|
1794
|
+
The custom pool name.
|
|
1690
1795
|
node_size : str, default=None
|
|
1691
1796
|
The `node size <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize>`_.
|
|
1692
1797
|
Defaults to None which keeps the existing property setting.
|
|
@@ -1717,61 +1822,106 @@ def update_custom_pool(pool_name: str, node_size: Optional[str] = None, min_node
|
|
|
1717
1822
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1718
1823
|
|
|
1719
1824
|
Returns
|
|
1720
|
-
-------
|
|
1825
|
+
-------
|
|
1721
1826
|
"""
|
|
1722
1827
|
|
|
1723
|
-
#https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool?tabs=HTTP
|
|
1828
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool?tabs=HTTP
|
|
1724
1829
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1725
1830
|
|
|
1726
|
-
df = list_custom_pools(workspace
|
|
1727
|
-
df_pool = df[df[
|
|
1831
|
+
df = list_custom_pools(workspace=workspace)
|
|
1832
|
+
df_pool = df[df["Custom Pool Name"] == pool_name]
|
|
1728
1833
|
|
|
1729
1834
|
if len(df_pool) == 0:
|
|
1730
|
-
raise ValueError(
|
|
1835
|
+
raise ValueError(
|
|
1836
|
+
f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace}'. Please choose a valid custom pool."
|
|
1837
|
+
)
|
|
1731
1838
|
|
|
1732
1839
|
if node_family is None:
|
|
1733
|
-
node_family = df_pool[
|
|
1840
|
+
node_family = df_pool["Node Family"].iloc[0]
|
|
1734
1841
|
if node_size is None:
|
|
1735
|
-
node_size = df_pool[
|
|
1842
|
+
node_size = df_pool["Node Size"].iloc[0]
|
|
1736
1843
|
if auto_scale_enabled is None:
|
|
1737
|
-
auto_scale_enabled = bool(df_pool[
|
|
1844
|
+
auto_scale_enabled = bool(df_pool["Auto Scale Enabled"].iloc[0])
|
|
1738
1845
|
if min_node_count is None:
|
|
1739
|
-
min_node_count = int(df_pool[
|
|
1846
|
+
min_node_count = int(df_pool["Min Node Count"].iloc[0])
|
|
1740
1847
|
if max_node_count is None:
|
|
1741
|
-
max_node_count = int(df_pool[
|
|
1848
|
+
max_node_count = int(df_pool["Max Node Count"].iloc[0])
|
|
1742
1849
|
if dynamic_executor_allocation_enabled is None:
|
|
1743
|
-
dynamic_executor_allocation_enabled = bool(
|
|
1850
|
+
dynamic_executor_allocation_enabled = bool(
|
|
1851
|
+
df_pool["Dynami Executor Allocation Enabled"].iloc[0]
|
|
1852
|
+
)
|
|
1744
1853
|
if min_executors is None:
|
|
1745
|
-
min_executors = int(df_pool[
|
|
1854
|
+
min_executors = int(df_pool["Min Executors"].iloc[0])
|
|
1746
1855
|
if max_executors is None:
|
|
1747
|
-
max_executors = int(df_pool[
|
|
1856
|
+
max_executors = int(df_pool["Max Executors"].iloc[0])
|
|
1748
1857
|
|
|
1749
1858
|
request_body = {
|
|
1750
1859
|
"name": pool_name,
|
|
1751
1860
|
"nodeFamily": node_family,
|
|
1752
1861
|
"nodeSize": node_size,
|
|
1753
1862
|
"autoScale": {
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
|
|
1863
|
+
"enabled": auto_scale_enabled,
|
|
1864
|
+
"minNodeCount": min_node_count,
|
|
1865
|
+
"maxNodeCount": max_node_count,
|
|
1757
1866
|
},
|
|
1758
1867
|
"dynamicExecutorAllocation": {
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
}
|
|
1868
|
+
"enabled": dynamic_executor_allocation_enabled,
|
|
1869
|
+
"minExecutors": min_executors,
|
|
1870
|
+
"maxExecutors": max_executors,
|
|
1871
|
+
},
|
|
1763
1872
|
}
|
|
1764
1873
|
|
|
1765
1874
|
client = fabric.FabricRestClient()
|
|
1766
|
-
response = client.post(
|
|
1875
|
+
response = client.post(
|
|
1876
|
+
f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
|
|
1877
|
+
)
|
|
1878
|
+
|
|
1879
|
+
if response.status_code != 200:
|
|
1880
|
+
raise FabricHTTPException(response)
|
|
1881
|
+
print(
|
|
1882
|
+
f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
|
|
1883
|
+
)
|
|
1884
|
+
|
|
1885
|
+
|
|
1886
|
+
def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
|
|
1887
|
+
"""
|
|
1888
|
+
Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
|
|
1889
|
+
|
|
1890
|
+
Parameters
|
|
1891
|
+
----------
|
|
1892
|
+
pool_name : str
|
|
1893
|
+
The custom pool name.
|
|
1894
|
+
workspace : str, default=None
|
|
1895
|
+
The name of the Fabric workspace.
|
|
1896
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1897
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1898
|
+
|
|
1899
|
+
Returns
|
|
1900
|
+
-------
|
|
1901
|
+
"""
|
|
1902
|
+
|
|
1903
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1904
|
+
|
|
1905
|
+
dfL = list_custom_pools(workspace=workspace)
|
|
1906
|
+
dfL_filt = dfL[dfL["Custom Pool Name"] == pool_name]
|
|
1907
|
+
|
|
1908
|
+
if len(dfL_filt) == 0:
|
|
1909
|
+
raise ValueError(
|
|
1910
|
+
f"{icons.red_dot} The '{pool_name}' custom pool does not exist within the '{workspace}' workspace."
|
|
1911
|
+
)
|
|
1912
|
+
poolId = dfL_filt["Custom Pool ID"].iloc[0]
|
|
1913
|
+
|
|
1914
|
+
client = fabric.FabricRestClient()
|
|
1915
|
+
response = client.delete(f"/v1/workspaces/{workspace_id}/spark/pools/{poolId}")
|
|
1916
|
+
|
|
1917
|
+
if response.status_code != 200:
|
|
1918
|
+
raise FabricHTTPException(response)
|
|
1919
|
+
print(
|
|
1920
|
+
f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
|
|
1921
|
+
)
|
|
1922
|
+
|
|
1767
1923
|
|
|
1768
|
-
if response.status_code == 200:
|
|
1769
|
-
print(f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated.")
|
|
1770
|
-
else:
|
|
1771
|
-
raise ValueError(f"{icons.red_dot} {response.status_code}")
|
|
1772
|
-
|
|
1773
1924
|
def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] = None):
|
|
1774
|
-
|
|
1775
1925
|
"""
|
|
1776
1926
|
Assigns a workspace to a capacity.
|
|
1777
1927
|
|
|
@@ -1788,27 +1938,29 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
|
|
|
1788
1938
|
-------
|
|
1789
1939
|
"""
|
|
1790
1940
|
|
|
1791
|
-
#https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/assign-to-capacity?tabs=HTTP
|
|
1792
1941
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1793
1942
|
|
|
1794
1943
|
dfC = fabric.list_capacities()
|
|
1795
|
-
dfC_filt = dfC[dfC[
|
|
1796
|
-
capacity_id = dfC_filt[
|
|
1944
|
+
dfC_filt = dfC[dfC["Display Name"] == capacity_name]
|
|
1945
|
+
capacity_id = dfC_filt["Id"].iloc[0]
|
|
1797
1946
|
|
|
1798
|
-
request_body = {
|
|
1799
|
-
"capacityId": capacity_id
|
|
1800
|
-
}
|
|
1947
|
+
request_body = {"capacityId": capacity_id}
|
|
1801
1948
|
|
|
1802
1949
|
client = fabric.FabricRestClient()
|
|
1803
|
-
response = client.post(
|
|
1950
|
+
response = client.post(
|
|
1951
|
+
f"/v1/workspaces/{workspace_id}/assignToCapacity",
|
|
1952
|
+
json=request_body,
|
|
1953
|
+
lro_wait=True,
|
|
1954
|
+
)
|
|
1955
|
+
|
|
1956
|
+
if response.status_code not in [200, 202]:
|
|
1957
|
+
raise FabricHTTPException(response)
|
|
1958
|
+
print(
|
|
1959
|
+
f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
|
|
1960
|
+
)
|
|
1804
1961
|
|
|
1805
|
-
if response.status_code == 202:
|
|
1806
|
-
print(f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity.")
|
|
1807
|
-
else:
|
|
1808
|
-
raise ValueError(f"{icons.red_dot} {response.status_code}")
|
|
1809
1962
|
|
|
1810
1963
|
def unassign_workspace_from_capacity(workspace: Optional[str] = None):
|
|
1811
|
-
|
|
1812
1964
|
"""
|
|
1813
1965
|
Unassigns a workspace from its assigned capacity.
|
|
1814
1966
|
|
|
@@ -1823,19 +1975,22 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
|
|
|
1823
1975
|
-------
|
|
1824
1976
|
"""
|
|
1825
1977
|
|
|
1826
|
-
#https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/unassign-from-capacity?tabs=HTTP
|
|
1978
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/unassign-from-capacity?tabs=HTTP
|
|
1827
1979
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1828
|
-
|
|
1980
|
+
|
|
1829
1981
|
client = fabric.FabricRestClient()
|
|
1830
|
-
response = client.post(
|
|
1982
|
+
response = client.post(
|
|
1983
|
+
f"/v1/workspaces/{workspace_id}/unassignFromCapacity", lro_wait=True
|
|
1984
|
+
)
|
|
1985
|
+
|
|
1986
|
+
if response.status_code not in [200, 202]:
|
|
1987
|
+
raise FabricHTTPException(response)
|
|
1988
|
+
print(
|
|
1989
|
+
f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
|
|
1990
|
+
)
|
|
1991
|
+
|
|
1831
1992
|
|
|
1832
|
-
if response.status_code == 202:
|
|
1833
|
-
print(f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity.")
|
|
1834
|
-
else:
|
|
1835
|
-
raise ValueError(f"{icons.red_dot} {response.status_code}")
|
|
1836
|
-
|
|
1837
1993
|
def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
1838
|
-
|
|
1839
1994
|
"""
|
|
1840
1995
|
Shows the spark settings for a workspace.
|
|
1841
1996
|
|
|
@@ -1852,35 +2007,73 @@ def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
1852
2007
|
A pandas dataframe showing the spark settings for a workspace.
|
|
1853
2008
|
"""
|
|
1854
2009
|
|
|
1855
|
-
#https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/get-spark-settings?tabs=HTTP
|
|
2010
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/get-spark-settings?tabs=HTTP
|
|
1856
2011
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1857
2012
|
|
|
1858
|
-
df = pd.DataFrame(
|
|
2013
|
+
df = pd.DataFrame(
|
|
2014
|
+
columns=[
|
|
2015
|
+
"Automatic Log Enabled",
|
|
2016
|
+
"High Concurrency Enabled",
|
|
2017
|
+
"Customize Compute Enabled",
|
|
2018
|
+
"Default Pool Name",
|
|
2019
|
+
"Default Pool Type",
|
|
2020
|
+
"Max Node Count",
|
|
2021
|
+
"Max Executors",
|
|
2022
|
+
"Environment Name",
|
|
2023
|
+
"Runtime Version",
|
|
2024
|
+
]
|
|
2025
|
+
)
|
|
1859
2026
|
|
|
1860
2027
|
client = fabric.FabricRestClient()
|
|
1861
2028
|
response = client.get(f"/v1/workspaces/{workspace_id}/spark/settings")
|
|
2029
|
+
if response.status_code != 200:
|
|
2030
|
+
raise FabricHTTPException(response)
|
|
1862
2031
|
|
|
1863
2032
|
i = response.json()
|
|
1864
|
-
p = i.get(
|
|
1865
|
-
dp = i.get(
|
|
1866
|
-
sp = i.get(
|
|
1867
|
-
e = i.get(
|
|
1868
|
-
|
|
1869
|
-
new_data = {
|
|
1870
|
-
|
|
1871
|
-
|
|
2033
|
+
p = i.get("pool")
|
|
2034
|
+
dp = i.get("pool", {}).get("defaultPool", {})
|
|
2035
|
+
sp = i.get("pool", {}).get("starterPool", {})
|
|
2036
|
+
e = i.get("environment", {})
|
|
2037
|
+
|
|
2038
|
+
new_data = {
|
|
2039
|
+
"Automatic Log Enabled": i.get("automaticLog").get("enabled"),
|
|
2040
|
+
"High Concurrency Enabled": i.get("highConcurrency").get(
|
|
2041
|
+
"notebookInteractiveRunEnabled"
|
|
2042
|
+
),
|
|
2043
|
+
"Customize Compute Enabled": p.get("customizeComputeEnabled"),
|
|
2044
|
+
"Default Pool Name": dp.get("name"),
|
|
2045
|
+
"Default Pool Type": dp.get("type"),
|
|
2046
|
+
"Max Node Count": sp.get("maxNodeCount"),
|
|
2047
|
+
"Max Node Executors": sp.get("maxExecutors"),
|
|
2048
|
+
"Environment Name": e.get("name"),
|
|
2049
|
+
"Runtime Version": e.get("runtimeVersion"),
|
|
2050
|
+
}
|
|
1872
2051
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
1873
2052
|
|
|
1874
|
-
bool_cols = [
|
|
1875
|
-
|
|
2053
|
+
bool_cols = [
|
|
2054
|
+
"Automatic Log Enabled",
|
|
2055
|
+
"High Concurrency Enabled",
|
|
2056
|
+
"Customize Compute Enabled",
|
|
2057
|
+
]
|
|
2058
|
+
int_cols = ["Max Node Count", "Max Executors"]
|
|
1876
2059
|
|
|
1877
2060
|
df[bool_cols] = df[bool_cols].astype(bool)
|
|
1878
2061
|
df[int_cols] = df[int_cols].astype(int)
|
|
1879
2062
|
|
|
1880
2063
|
return df
|
|
1881
2064
|
|
|
1882
|
-
|
|
1883
|
-
|
|
2065
|
+
|
|
2066
|
+
def update_spark_settings(
|
|
2067
|
+
automatic_log_enabled: Optional[bool] = None,
|
|
2068
|
+
high_concurrency_enabled: Optional[bool] = None,
|
|
2069
|
+
customize_compute_enabled: Optional[bool] = None,
|
|
2070
|
+
default_pool_name: Optional[str] = None,
|
|
2071
|
+
max_node_count: Optional[int] = None,
|
|
2072
|
+
max_executors: Optional[int] = None,
|
|
2073
|
+
environment_name: Optional[str] = None,
|
|
2074
|
+
runtime_version: Optional[str] = None,
|
|
2075
|
+
workspace: Optional[str] = None,
|
|
2076
|
+
):
|
|
1884
2077
|
"""
|
|
1885
2078
|
Updates the spark settings for a workspace.
|
|
1886
2079
|
|
|
@@ -1919,62 +2112,57 @@ def update_spark_settings(automatic_log_enabled: Optional[bool] = None, high_con
|
|
|
1919
2112
|
-------
|
|
1920
2113
|
"""
|
|
1921
2114
|
|
|
1922
|
-
#https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP
|
|
2115
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP
|
|
1923
2116
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1924
2117
|
|
|
1925
|
-
dfS = get_spark_settings(workspace
|
|
2118
|
+
dfS = get_spark_settings(workspace=workspace)
|
|
1926
2119
|
|
|
1927
2120
|
if automatic_log_enabled is None:
|
|
1928
|
-
automatic_log_enabled = bool(dfS[
|
|
2121
|
+
automatic_log_enabled = bool(dfS["Automatic Log Enabled"].iloc[0])
|
|
1929
2122
|
if high_concurrency_enabled is None:
|
|
1930
|
-
high_concurrency_enabled = bool(dfS[
|
|
2123
|
+
high_concurrency_enabled = bool(dfS["High Concurrency Enabled"].iloc[0])
|
|
1931
2124
|
if customize_compute_enabled is None:
|
|
1932
|
-
customize_compute_enabled = bool(dfS[
|
|
2125
|
+
customize_compute_enabled = bool(dfS["Customize Compute Enabled"].iloc[0])
|
|
1933
2126
|
if default_pool_name is None:
|
|
1934
|
-
default_pool_name = dfS[
|
|
2127
|
+
default_pool_name = dfS["Default Pool Name"].iloc[0]
|
|
1935
2128
|
if max_node_count is None:
|
|
1936
|
-
max_node_count = int(dfS[
|
|
2129
|
+
max_node_count = int(dfS["Max Node Count"].iloc[0])
|
|
1937
2130
|
if max_executors is None:
|
|
1938
|
-
max_executors = int(dfS[
|
|
2131
|
+
max_executors = int(dfS["Max Executors"].iloc[0])
|
|
1939
2132
|
if environment_name is None:
|
|
1940
|
-
environment_name = dfS[
|
|
2133
|
+
environment_name = dfS["Environment Name"].iloc[0]
|
|
1941
2134
|
if runtime_version is None:
|
|
1942
|
-
runtime_version = dfS[
|
|
2135
|
+
runtime_version = dfS["Runtime Version"].iloc[0]
|
|
1943
2136
|
|
|
1944
2137
|
request_body = {
|
|
1945
|
-
|
|
1946
|
-
"
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
"name": default_pool_name,
|
|
1955
|
-
"type": "Workspace"
|
|
2138
|
+
"automaticLog": {"enabled": automatic_log_enabled},
|
|
2139
|
+
"highConcurrency": {"notebookInteractiveRunEnabled": high_concurrency_enabled},
|
|
2140
|
+
"pool": {
|
|
2141
|
+
"customizeComputeEnabled": customize_compute_enabled,
|
|
2142
|
+
"defaultPool": {"name": default_pool_name, "type": "Workspace"},
|
|
2143
|
+
"starterPool": {
|
|
2144
|
+
"maxNodeCount": max_node_count,
|
|
2145
|
+
"maxExecutors": max_executors,
|
|
2146
|
+
},
|
|
1956
2147
|
},
|
|
1957
|
-
"
|
|
1958
|
-
"maxNodeCount": max_node_count,
|
|
1959
|
-
"maxExecutors": max_executors
|
|
1960
|
-
}
|
|
1961
|
-
},
|
|
1962
|
-
"environment": {
|
|
1963
|
-
"name": environment_name,
|
|
1964
|
-
"runtimeVersion": runtime_version
|
|
1965
|
-
}
|
|
2148
|
+
"environment": {"name": environment_name, "runtimeVersion": runtime_version},
|
|
1966
2149
|
}
|
|
1967
2150
|
|
|
1968
2151
|
client = fabric.FabricRestClient()
|
|
1969
|
-
response = client.patch(
|
|
2152
|
+
response = client.patch(
|
|
2153
|
+
f"/v1/workspaces/{workspace_id}/spark/settings", json=request_body
|
|
2154
|
+
)
|
|
1970
2155
|
|
|
1971
|
-
if response.status_code
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
2156
|
+
if response.status_code != 200:
|
|
2157
|
+
raise FabricHTTPException(response)
|
|
2158
|
+
print(
|
|
2159
|
+
f"{icons.green_dot} The spark settings within the '{workspace}' workspace have been updated accordingly."
|
|
2160
|
+
)
|
|
1975
2161
|
|
|
1976
|
-
def add_user_to_workspace(email_address: str, role_name: str, workspace: Optional[str] = None):
|
|
1977
2162
|
|
|
2163
|
+
def add_user_to_workspace(
|
|
2164
|
+
email_address: str, role_name: str, workspace: Optional[str] = None
|
|
2165
|
+
):
|
|
1978
2166
|
"""
|
|
1979
2167
|
Adds a user to a workspace.
|
|
1980
2168
|
|
|
@@ -1995,28 +2183,30 @@ def add_user_to_workspace(email_address: str, role_name: str, workspace: Optiona
|
|
|
1995
2183
|
|
|
1996
2184
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1997
2185
|
|
|
1998
|
-
role_names = [
|
|
2186
|
+
role_names = ["Admin", "Member", "Viewer", "Contributor"]
|
|
1999
2187
|
role_name = role_name.capitalize()
|
|
2000
2188
|
if role_name not in role_names:
|
|
2001
|
-
raise ValueError(
|
|
2002
|
-
|
|
2189
|
+
raise ValueError(
|
|
2190
|
+
f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
|
|
2191
|
+
)
|
|
2192
|
+
plural = "n" if role_name == "Admin" else ""
|
|
2003
2193
|
|
|
2004
2194
|
client = fabric.PowerBIRestClient()
|
|
2005
2195
|
|
|
2006
|
-
request_body = {
|
|
2007
|
-
"emailAddress": email_address,
|
|
2008
|
-
"groupUserAccessRight": role_name
|
|
2009
|
-
}
|
|
2196
|
+
request_body = {"emailAddress": email_address, "groupUserAccessRight": role_name}
|
|
2010
2197
|
|
|
2011
|
-
response = client.post(
|
|
2012
|
-
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
|
|
2198
|
+
response = client.post(
|
|
2199
|
+
f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body
|
|
2200
|
+
)
|
|
2201
|
+
|
|
2202
|
+
if response.status_code != 200:
|
|
2203
|
+
raise FabricHTTPException(response)
|
|
2204
|
+
print(
|
|
2205
|
+
f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace}' workspace."
|
|
2206
|
+
)
|
|
2017
2207
|
|
|
2018
|
-
def delete_user_from_workspace(email_address : str, workspace : Optional[str] = None):
|
|
2019
2208
|
|
|
2209
|
+
def delete_user_from_workspace(email_address: str, workspace: Optional[str] = None):
|
|
2020
2210
|
"""
|
|
2021
2211
|
Removes a user from a workspace.
|
|
2022
2212
|
|
|
@@ -2037,14 +2227,17 @@ def delete_user_from_workspace(email_address : str, workspace : Optional[str] =
|
|
|
2037
2227
|
|
|
2038
2228
|
client = fabric.PowerBIRestClient()
|
|
2039
2229
|
response = client.delete(f"/v1.0/myorg/groups/{workspace_id}/users/{email_address}")
|
|
2040
|
-
|
|
2041
|
-
if response.status_code == 200:
|
|
2042
|
-
print(f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace.")
|
|
2043
|
-
else:
|
|
2044
|
-
print(f"{icons.red_dot} {response.status_code}")
|
|
2045
2230
|
|
|
2046
|
-
|
|
2047
|
-
|
|
2231
|
+
if response.status_code != 200:
|
|
2232
|
+
raise FabricHTTPException(response)
|
|
2233
|
+
print(
|
|
2234
|
+
f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace."
|
|
2235
|
+
)
|
|
2236
|
+
|
|
2237
|
+
|
|
2238
|
+
def update_workspace_user(
|
|
2239
|
+
email_address: str, role_name: str, workspace: Optional[str] = None
|
|
2240
|
+
):
|
|
2048
2241
|
"""
|
|
2049
2242
|
Updates a user's role within a workspace.
|
|
2050
2243
|
|
|
@@ -2065,26 +2258,26 @@ def update_workspace_user(email_address: str, role_name: str, workspace: Optiona
|
|
|
2065
2258
|
|
|
2066
2259
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
2067
2260
|
|
|
2068
|
-
role_names = [
|
|
2261
|
+
role_names = ["Admin", "Member", "Viewer", "Contributor"]
|
|
2069
2262
|
role_name = role_name.capitalize()
|
|
2070
2263
|
if role_name not in role_names:
|
|
2071
|
-
|
|
2264
|
+
raise ValueError(
|
|
2265
|
+
f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
|
|
2266
|
+
)
|
|
2072
2267
|
|
|
2073
|
-
request_body = {
|
|
2074
|
-
"emailAddress": email_address,
|
|
2075
|
-
"groupUserAccessRight": role_name
|
|
2076
|
-
}
|
|
2268
|
+
request_body = {"emailAddress": email_address, "groupUserAccessRight": role_name}
|
|
2077
2269
|
|
|
2078
2270
|
client = fabric.PowerBIRestClient()
|
|
2079
|
-
response = client.put(f"/v1.0/myorg/groups/{workspace_id}/users", json
|
|
2271
|
+
response = client.put(f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body)
|
|
2080
2272
|
|
|
2081
|
-
if response.status_code
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2273
|
+
if response.status_code != 200:
|
|
2274
|
+
raise FabricHTTPException(response)
|
|
2275
|
+
print(
|
|
2276
|
+
f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace}' workspace."
|
|
2277
|
+
)
|
|
2085
2278
|
|
|
2086
|
-
def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
2087
2279
|
|
|
2280
|
+
def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
2088
2281
|
"""
|
|
2089
2282
|
A list of all the users of a workspace and their roles.
|
|
2090
2283
|
|
|
@@ -2103,20 +2296,32 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
2103
2296
|
|
|
2104
2297
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
2105
2298
|
|
|
2106
|
-
df = pd.DataFrame(columns=[
|
|
2299
|
+
df = pd.DataFrame(columns=["User Name", "Email Address", "Role", "Type", "User ID"])
|
|
2107
2300
|
client = fabric.FabricRestClient()
|
|
2108
2301
|
response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
|
|
2302
|
+
if response.status_code != 200:
|
|
2303
|
+
raise FabricHTTPException(response)
|
|
2109
2304
|
|
|
2110
|
-
|
|
2111
|
-
p = v.get('principal',{})
|
|
2305
|
+
responses = pagination(client, response)
|
|
2112
2306
|
|
|
2113
|
-
|
|
2114
|
-
|
|
2307
|
+
for r in responses:
|
|
2308
|
+
for v in r.get("value", []):
|
|
2309
|
+
p = v.get("principal", {})
|
|
2310
|
+
new_data = {
|
|
2311
|
+
"User Name": p.get("displayName"),
|
|
2312
|
+
"User ID": p.get("id"),
|
|
2313
|
+
"Type": p.get("type"),
|
|
2314
|
+
"Role": v.get("role"),
|
|
2315
|
+
"Email Address": p.get("userDetails", {}).get("userPrincipalName"),
|
|
2316
|
+
}
|
|
2317
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
2115
2318
|
|
|
2116
2319
|
return df
|
|
2117
2320
|
|
|
2118
|
-
def assign_workspace_to_dataflow_storage(dataflow_storage_account: str, workspace: Optional[str] = None):
|
|
2119
2321
|
|
|
2322
|
+
def assign_workspace_to_dataflow_storage(
|
|
2323
|
+
dataflow_storage_account: str, workspace: Optional[str] = None
|
|
2324
|
+
):
|
|
2120
2325
|
"""
|
|
2121
2326
|
Assigns a dataflow storage account to a workspace.
|
|
2122
2327
|
|
|
@@ -2136,17 +2341,240 @@ def assign_workspace_to_dataflow_storage(dataflow_storage_account: str, workspac
|
|
|
2136
2341
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
2137
2342
|
|
|
2138
2343
|
df = list_dataflow_storage_accounts()
|
|
2139
|
-
df_filt = df[df[
|
|
2140
|
-
dataflow_storage_id = df_filt[
|
|
2344
|
+
df_filt = df[df["Dataflow Storage Account Name"] == dataflow_storage_account]
|
|
2345
|
+
dataflow_storage_id = df_filt["Dataflow Storage Account ID"].iloc[0]
|
|
2141
2346
|
|
|
2142
2347
|
client = fabric.PowerBIRestClient()
|
|
2143
2348
|
|
|
2349
|
+
request_body = {"dataflowStorageId": dataflow_storage_id}
|
|
2350
|
+
|
|
2351
|
+
response = client.post(
|
|
2352
|
+
f"/v1.0/myorg/groups/{workspace_id}/AssignToDataflowStorage", json=request_body
|
|
2353
|
+
)
|
|
2354
|
+
|
|
2355
|
+
if response.status_code != 200:
|
|
2356
|
+
raise FabricHTTPException(response)
|
|
2357
|
+
print(
|
|
2358
|
+
f"{icons.green_dot} The '{dataflow_storage_account}' dataflow storage account has been assigned to the '{workspace}' workspacce."
|
|
2359
|
+
)
|
|
2360
|
+
|
|
2361
|
+
|
|
2362
|
+
def list_capacities() -> pd.DataFrame:
|
|
2363
|
+
"""
|
|
2364
|
+
Shows the capacities and their properties.
|
|
2365
|
+
|
|
2366
|
+
Parameters
|
|
2367
|
+
----------
|
|
2368
|
+
|
|
2369
|
+
Returns
|
|
2370
|
+
-------
|
|
2371
|
+
pandas.DataFrame
|
|
2372
|
+
A pandas dataframe showing the capacities and their properties
|
|
2373
|
+
"""
|
|
2374
|
+
|
|
2375
|
+
df = pd.DataFrame(
|
|
2376
|
+
columns=["Id", "Display Name", "Sku", "Region", "State", "Admins"]
|
|
2377
|
+
)
|
|
2378
|
+
|
|
2379
|
+
client = fabric.PowerBIRestClient()
|
|
2380
|
+
response = client.get("/v1.0/myorg/capacities")
|
|
2381
|
+
if response.status_code != 200:
|
|
2382
|
+
raise FabricHTTPException(response)
|
|
2383
|
+
|
|
2384
|
+
for i in response.json().get("value", []):
|
|
2385
|
+
new_data = {
|
|
2386
|
+
"Id": i.get("id").lower(),
|
|
2387
|
+
"Display Name": i.get("displayName"),
|
|
2388
|
+
"Sku": i.get("sku"),
|
|
2389
|
+
"Region": i.get("region"),
|
|
2390
|
+
"State": i.get("state"),
|
|
2391
|
+
"Admins": [i.get("admins", [])],
|
|
2392
|
+
}
|
|
2393
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
2394
|
+
|
|
2395
|
+
return df
|
|
2396
|
+
|
|
2397
|
+
|
|
2398
|
+
def get_notebook_definition(
|
|
2399
|
+
notebook_name: str, workspace: Optional[str] = None, decode: Optional[bool] = True
|
|
2400
|
+
):
|
|
2401
|
+
"""
|
|
2402
|
+
Obtains the notebook definition.
|
|
2403
|
+
|
|
2404
|
+
Parameters
|
|
2405
|
+
----------
|
|
2406
|
+
notebook_name : str
|
|
2407
|
+
The name of the notebook.
|
|
2408
|
+
workspace : str, default=None
|
|
2409
|
+
The name of the workspace.
|
|
2410
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
2411
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
2412
|
+
decode : bool, default=True
|
|
2413
|
+
If True, decodes the notebook definition file into .ipynb format.
|
|
2414
|
+
If False, obtains the notebook definition file in base64 format.
|
|
2415
|
+
|
|
2416
|
+
Returns
|
|
2417
|
+
-------
|
|
2418
|
+
ipynb
|
|
2419
|
+
The notebook definition.
|
|
2420
|
+
"""
|
|
2421
|
+
|
|
2422
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
2423
|
+
|
|
2424
|
+
dfI = fabric.list_items(workspace=workspace, type="Notebook")
|
|
2425
|
+
dfI_filt = dfI[dfI["Display Name"] == notebook_name]
|
|
2426
|
+
|
|
2427
|
+
if len(dfI_filt) == 0:
|
|
2428
|
+
raise ValueError(
|
|
2429
|
+
f"{icons.red_dot} The '{notebook_name}' notebook does not exist within the '{workspace}' workspace."
|
|
2430
|
+
)
|
|
2431
|
+
|
|
2432
|
+
notebook_id = dfI_filt["Id"].iloc[0]
|
|
2433
|
+
client = fabric.FabricRestClient()
|
|
2434
|
+
response = client.post(
|
|
2435
|
+
f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/getDefinition",
|
|
2436
|
+
)
|
|
2437
|
+
|
|
2438
|
+
result = lro(client, response).json()
|
|
2439
|
+
df_items = pd.json_normalize(result["definition"]["parts"])
|
|
2440
|
+
df_items_filt = df_items[df_items["path"] == "notebook-content.py"]
|
|
2441
|
+
payload = df_items_filt["payload"].iloc[0]
|
|
2442
|
+
|
|
2443
|
+
if decode:
|
|
2444
|
+
result = _decode_b64(payload)
|
|
2445
|
+
else:
|
|
2446
|
+
result = payload
|
|
2447
|
+
|
|
2448
|
+
return result
|
|
2449
|
+
|
|
2450
|
+
|
|
2451
|
+
def import_notebook_from_web(
|
|
2452
|
+
notebook_name: str,
|
|
2453
|
+
url: str,
|
|
2454
|
+
description: Optional[str] = None,
|
|
2455
|
+
workspace: Optional[str] = None,
|
|
2456
|
+
):
|
|
2457
|
+
"""
|
|
2458
|
+
Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
|
|
2459
|
+
|
|
2460
|
+
Parameters
|
|
2461
|
+
----------
|
|
2462
|
+
notebook_name : str
|
|
2463
|
+
The name of the notebook to be created.
|
|
2464
|
+
url : str
|
|
2465
|
+
The url of the Jupyter Notebook (.ipynb)
|
|
2466
|
+
description : str, default=None
|
|
2467
|
+
The description of the notebook.
|
|
2468
|
+
Defaults to None which does not place a description.
|
|
2469
|
+
workspace : str, default=None
|
|
2470
|
+
The name of the workspace.
|
|
2471
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
2472
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
2473
|
+
|
|
2474
|
+
Returns
|
|
2475
|
+
-------
|
|
2476
|
+
"""
|
|
2477
|
+
|
|
2478
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
2479
|
+
client = fabric.FabricRestClient()
|
|
2480
|
+
dfI = fabric.list_items(workspace=workspace, type="Notebook")
|
|
2481
|
+
dfI_filt = dfI[dfI["Display Name"] == notebook_name]
|
|
2482
|
+
if len(dfI_filt) > 0:
|
|
2483
|
+
raise ValueError(
|
|
2484
|
+
f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
|
|
2485
|
+
)
|
|
2486
|
+
|
|
2487
|
+
# Fix links to go to the raw github file
|
|
2488
|
+
starting_text = "https://github.com/"
|
|
2489
|
+
starting_text_len = len(starting_text)
|
|
2490
|
+
if url.startswith(starting_text):
|
|
2491
|
+
url = f"https://raw.githubusercontent.com/{url[starting_text_len:]}".replace(
|
|
2492
|
+
"/blob/", "/"
|
|
2493
|
+
)
|
|
2494
|
+
|
|
2495
|
+
response = requests.get(url)
|
|
2496
|
+
if response.status_code != 200:
|
|
2497
|
+
raise FabricHTTPException(response)
|
|
2498
|
+
file_content = response.content
|
|
2499
|
+
notebook_payload = base64.b64encode(file_content)
|
|
2500
|
+
|
|
2144
2501
|
request_body = {
|
|
2145
|
-
|
|
2502
|
+
"displayName": notebook_name,
|
|
2503
|
+
"definition": {
|
|
2504
|
+
"format": "ipynb",
|
|
2505
|
+
"parts": [
|
|
2506
|
+
{
|
|
2507
|
+
"path": "notebook-content.py",
|
|
2508
|
+
"payload": notebook_payload,
|
|
2509
|
+
"payloadType": "InlineBase64",
|
|
2510
|
+
}
|
|
2511
|
+
],
|
|
2512
|
+
},
|
|
2146
2513
|
}
|
|
2514
|
+
if description is not None:
|
|
2515
|
+
request_body["description"] = description
|
|
2147
2516
|
|
|
2148
|
-
response = client.post(f"
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2517
|
+
response = client.post(f"v1/workspaces/{workspace_id}/notebooks", json=request_body)
|
|
2518
|
+
|
|
2519
|
+
lro(client, response, status_codes=[201, 202])
|
|
2520
|
+
|
|
2521
|
+
print(
|
|
2522
|
+
f"{icons.green_dot} The '{notebook_name}' notebook was created within the '{workspace}' workspace."
|
|
2523
|
+
)
|
|
2524
|
+
|
|
2525
|
+
|
|
2526
|
+
def list_reports_using_semantic_model(
|
|
2527
|
+
dataset: str, workspace: Optional[str] = None
|
|
2528
|
+
) -> pd.DataFrame:
|
|
2529
|
+
"""
|
|
2530
|
+
Shows a list of all the reports (in all workspaces) which use a given semantic model.
|
|
2531
|
+
|
|
2532
|
+
Parameters
|
|
2533
|
+
----------
|
|
2534
|
+
dataset : str
|
|
2535
|
+
Name of the semantic model.
|
|
2536
|
+
workspace : str, default=None
|
|
2537
|
+
The Fabric workspace name.
|
|
2538
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
2539
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
2540
|
+
|
|
2541
|
+
Returns
|
|
2542
|
+
-------
|
|
2543
|
+
pandas.DataFrame
|
|
2544
|
+
A pandas dataframe showing the reports which use a given semantic model.
|
|
2545
|
+
"""
|
|
2546
|
+
|
|
2547
|
+
df = pd.DataFrame(
|
|
2548
|
+
columns=[
|
|
2549
|
+
"Report Name",
|
|
2550
|
+
"Report Id",
|
|
2551
|
+
"Report Workspace Name",
|
|
2552
|
+
"Report Workspace Id",
|
|
2553
|
+
]
|
|
2554
|
+
)
|
|
2555
|
+
|
|
2556
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
2557
|
+
dataset_id = resolve_dataset_id(dataset, workspace)
|
|
2558
|
+
client = fabric.PowerBIRestClient()
|
|
2559
|
+
response = client.get(
|
|
2560
|
+
f"metadata/relations/downstream/dataset/{dataset_id}?apiVersion=3"
|
|
2561
|
+
)
|
|
2562
|
+
|
|
2563
|
+
response_json = response.json()
|
|
2564
|
+
|
|
2565
|
+
for i in response_json.get("artifacts", []):
|
|
2566
|
+
object_workspace_id = i.get("workspace", {}).get("objectId")
|
|
2567
|
+
object_type = i.get("typeName")
|
|
2568
|
+
|
|
2569
|
+
if object_type == "Report":
|
|
2570
|
+
new_data = {
|
|
2571
|
+
"Report Name": i.get("displayName"),
|
|
2572
|
+
"Report Id": i.get("objectId"),
|
|
2573
|
+
"Report Workspace Name": fabric.resolve_workspace_name(
|
|
2574
|
+
object_workspace_id
|
|
2575
|
+
),
|
|
2576
|
+
"Report Workspace Id": object_workspace_id,
|
|
2577
|
+
}
|
|
2578
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
2579
|
+
|
|
2580
|
+
return df
|